code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _build_discrete_cmap(cmap, levels, extend, filled):
"""
Build a discrete colormap and normalization of the data.
"""
import matplotlib as mpl
if not filled:
# non-filled contour plots
extend = 'max'
if extend == 'both':
ext_n = 2
elif extend in ['min', 'max']:
ext_n = 1
else:
ext_n = 0
n_colors = len(levels) + ext_n - 1
pal = _color_palette(cmap, n_colors)
new_cmap, cnorm = mpl.colors.from_levels_and_colors(
levels, pal, extend=extend)
# copy the old cmap name, for easier testing
new_cmap.name = getattr(cmap, 'name', cmap)
return new_cmap, cnorm | def function[_build_discrete_cmap, parameter[cmap, levels, extend, filled]]:
constant[
Build a discrete colormap and normalization of the data.
]
import module[matplotlib] as alias[mpl]
if <ast.UnaryOp object at 0x7da1b1f94040> begin[:]
variable[extend] assign[=] constant[max]
if compare[name[extend] equal[==] constant[both]] begin[:]
variable[ext_n] assign[=] constant[2]
variable[n_colors] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[levels]]] + name[ext_n]] - constant[1]]
variable[pal] assign[=] call[name[_color_palette], parameter[name[cmap], name[n_colors]]]
<ast.Tuple object at 0x7da1b1c79870> assign[=] call[name[mpl].colors.from_levels_and_colors, parameter[name[levels], name[pal]]]
name[new_cmap].name assign[=] call[name[getattr], parameter[name[cmap], constant[name], name[cmap]]]
return[tuple[[<ast.Name object at 0x7da1b1c7ad10>, <ast.Name object at 0x7da1b1c7b0a0>]]] | keyword[def] identifier[_build_discrete_cmap] ( identifier[cmap] , identifier[levels] , identifier[extend] , identifier[filled] ):
literal[string]
keyword[import] identifier[matplotlib] keyword[as] identifier[mpl]
keyword[if] keyword[not] identifier[filled] :
identifier[extend] = literal[string]
keyword[if] identifier[extend] == literal[string] :
identifier[ext_n] = literal[int]
keyword[elif] identifier[extend] keyword[in] [ literal[string] , literal[string] ]:
identifier[ext_n] = literal[int]
keyword[else] :
identifier[ext_n] = literal[int]
identifier[n_colors] = identifier[len] ( identifier[levels] )+ identifier[ext_n] - literal[int]
identifier[pal] = identifier[_color_palette] ( identifier[cmap] , identifier[n_colors] )
identifier[new_cmap] , identifier[cnorm] = identifier[mpl] . identifier[colors] . identifier[from_levels_and_colors] (
identifier[levels] , identifier[pal] , identifier[extend] = identifier[extend] )
identifier[new_cmap] . identifier[name] = identifier[getattr] ( identifier[cmap] , literal[string] , identifier[cmap] )
keyword[return] identifier[new_cmap] , identifier[cnorm] | def _build_discrete_cmap(cmap, levels, extend, filled):
"""
Build a discrete colormap and normalization of the data.
"""
import matplotlib as mpl
if not filled:
# non-filled contour plots
extend = 'max' # depends on [control=['if'], data=[]]
if extend == 'both':
ext_n = 2 # depends on [control=['if'], data=[]]
elif extend in ['min', 'max']:
ext_n = 1 # depends on [control=['if'], data=[]]
else:
ext_n = 0
n_colors = len(levels) + ext_n - 1
pal = _color_palette(cmap, n_colors)
(new_cmap, cnorm) = mpl.colors.from_levels_and_colors(levels, pal, extend=extend)
# copy the old cmap name, for easier testing
new_cmap.name = getattr(cmap, 'name', cmap)
return (new_cmap, cnorm) |
def line_pos_from_number(self, line_number):
"""
Computes line position on Y-Axis (at the center of the line) from line
number.
:param line_number: The line number for which we want to know the
position in pixels.
:return: The center position of the line.
"""
editor = self._editor
block = editor.document().findBlockByNumber(line_number)
if block.isValid():
return int(editor.blockBoundingGeometry(block).translated(
editor.contentOffset()).top())
if line_number <= 0:
return 0
else:
return int(editor.blockBoundingGeometry(
block.previous()).translated(editor.contentOffset()).bottom()) | def function[line_pos_from_number, parameter[self, line_number]]:
constant[
Computes line position on Y-Axis (at the center of the line) from line
number.
:param line_number: The line number for which we want to know the
position in pixels.
:return: The center position of the line.
]
variable[editor] assign[=] name[self]._editor
variable[block] assign[=] call[call[name[editor].document, parameter[]].findBlockByNumber, parameter[name[line_number]]]
if call[name[block].isValid, parameter[]] begin[:]
return[call[name[int], parameter[call[call[call[name[editor].blockBoundingGeometry, parameter[name[block]]].translated, parameter[call[name[editor].contentOffset, parameter[]]]].top, parameter[]]]]]
if compare[name[line_number] less_or_equal[<=] constant[0]] begin[:]
return[constant[0]] | keyword[def] identifier[line_pos_from_number] ( identifier[self] , identifier[line_number] ):
literal[string]
identifier[editor] = identifier[self] . identifier[_editor]
identifier[block] = identifier[editor] . identifier[document] (). identifier[findBlockByNumber] ( identifier[line_number] )
keyword[if] identifier[block] . identifier[isValid] ():
keyword[return] identifier[int] ( identifier[editor] . identifier[blockBoundingGeometry] ( identifier[block] ). identifier[translated] (
identifier[editor] . identifier[contentOffset] ()). identifier[top] ())
keyword[if] identifier[line_number] <= literal[int] :
keyword[return] literal[int]
keyword[else] :
keyword[return] identifier[int] ( identifier[editor] . identifier[blockBoundingGeometry] (
identifier[block] . identifier[previous] ()). identifier[translated] ( identifier[editor] . identifier[contentOffset] ()). identifier[bottom] ()) | def line_pos_from_number(self, line_number):
"""
Computes line position on Y-Axis (at the center of the line) from line
number.
:param line_number: The line number for which we want to know the
position in pixels.
:return: The center position of the line.
"""
editor = self._editor
block = editor.document().findBlockByNumber(line_number)
if block.isValid():
return int(editor.blockBoundingGeometry(block).translated(editor.contentOffset()).top()) # depends on [control=['if'], data=[]]
if line_number <= 0:
return 0 # depends on [control=['if'], data=[]]
else:
return int(editor.blockBoundingGeometry(block.previous()).translated(editor.contentOffset()).bottom()) |
def render_component(self, declaration):
""" Render a row of all the attributes """
items = ["""<tr><td>{name}</td><td>{type}</td></tr>"""
.format(name=m.name,
type=self.render_component_types(declaration, m))
for m in self.get_component_members(declaration)]
info = []
parent = declaration.__mro__[1]
#: Superclass
info.append("<tr><td>extends component</td>"
"<td><a href='#component-{id}'>{name}</a></td></td>"
.format(id=parent.__name__.lower(), name=parent.__name__))
#: Source and example, only works with enamlnative builtins
source_path = inspect.getfile(declaration).replace(
".pyo", ".py").replace(".pyc", ".py")
if 'enamlnative' in source_path:
source_link = "https://github.com/frmdstryr/" \
"enaml-native/tree/master/src/{}".format(
source_path.split("assets/python")[1]
)
info.append("<tr><td>source code</td>"
"<td><a href='{}' target='_blank'>show</a></td></td>"
.format(source_link))
#: Examples link
example_link = "https://www.codelv.com/projects/" \
"enaml-native/docs/components#{}" \
.format(declaration.__name__.lower())
info.append("<tr><td>example usage</td>"
"<td><a href='{}' target='_blank'>view</a></td></td>"
.format(example_link))
return COMPONENT_TMPL.format(id=declaration.__name__.lower(),
name=declaration.__name__,
info="".join(info),
items="".join(items)) | def function[render_component, parameter[self, declaration]]:
constant[ Render a row of all the attributes ]
variable[items] assign[=] <ast.ListComp object at 0x7da1b1c60820>
variable[info] assign[=] list[[]]
variable[parent] assign[=] call[name[declaration].__mro__][constant[1]]
call[name[info].append, parameter[call[constant[<tr><td>extends component</td><td><a href='#component-{id}'>{name}</a></td></td>].format, parameter[]]]]
variable[source_path] assign[=] call[call[call[name[inspect].getfile, parameter[name[declaration]]].replace, parameter[constant[.pyo], constant[.py]]].replace, parameter[constant[.pyc], constant[.py]]]
if compare[constant[enamlnative] in name[source_path]] begin[:]
variable[source_link] assign[=] call[constant[https://github.com/frmdstryr/enaml-native/tree/master/src/{}].format, parameter[call[call[name[source_path].split, parameter[constant[assets/python]]]][constant[1]]]]
call[name[info].append, parameter[call[constant[<tr><td>source code</td><td><a href='{}' target='_blank'>show</a></td></td>].format, parameter[name[source_link]]]]]
variable[example_link] assign[=] call[constant[https://www.codelv.com/projects/enaml-native/docs/components#{}].format, parameter[call[name[declaration].__name__.lower, parameter[]]]]
call[name[info].append, parameter[call[constant[<tr><td>example usage</td><td><a href='{}' target='_blank'>view</a></td></td>].format, parameter[name[example_link]]]]]
return[call[name[COMPONENT_TMPL].format, parameter[]]] | keyword[def] identifier[render_component] ( identifier[self] , identifier[declaration] ):
literal[string]
identifier[items] =[ literal[string]
. identifier[format] ( identifier[name] = identifier[m] . identifier[name] ,
identifier[type] = identifier[self] . identifier[render_component_types] ( identifier[declaration] , identifier[m] ))
keyword[for] identifier[m] keyword[in] identifier[self] . identifier[get_component_members] ( identifier[declaration] )]
identifier[info] =[]
identifier[parent] = identifier[declaration] . identifier[__mro__] [ literal[int] ]
identifier[info] . identifier[append] ( literal[string]
literal[string]
. identifier[format] ( identifier[id] = identifier[parent] . identifier[__name__] . identifier[lower] (), identifier[name] = identifier[parent] . identifier[__name__] ))
identifier[source_path] = identifier[inspect] . identifier[getfile] ( identifier[declaration] ). identifier[replace] (
literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[source_path] :
identifier[source_link] = literal[string] literal[string] . identifier[format] (
identifier[source_path] . identifier[split] ( literal[string] )[ literal[int] ]
)
identifier[info] . identifier[append] ( literal[string]
literal[string]
. identifier[format] ( identifier[source_link] ))
identifier[example_link] = literal[string] literal[string] . identifier[format] ( identifier[declaration] . identifier[__name__] . identifier[lower] ())
identifier[info] . identifier[append] ( literal[string]
literal[string]
. identifier[format] ( identifier[example_link] ))
keyword[return] identifier[COMPONENT_TMPL] . identifier[format] ( identifier[id] = identifier[declaration] . identifier[__name__] . identifier[lower] (),
identifier[name] = identifier[declaration] . identifier[__name__] ,
identifier[info] = literal[string] . identifier[join] ( identifier[info] ),
identifier[items] = literal[string] . identifier[join] ( identifier[items] )) | def render_component(self, declaration):
""" Render a row of all the attributes """
items = ['<tr><td>{name}</td><td>{type}</td></tr>'.format(name=m.name, type=self.render_component_types(declaration, m)) for m in self.get_component_members(declaration)]
info = []
parent = declaration.__mro__[1]
#: Superclass
info.append("<tr><td>extends component</td><td><a href='#component-{id}'>{name}</a></td></td>".format(id=parent.__name__.lower(), name=parent.__name__))
#: Source and example, only works with enamlnative builtins
source_path = inspect.getfile(declaration).replace('.pyo', '.py').replace('.pyc', '.py')
if 'enamlnative' in source_path:
source_link = 'https://github.com/frmdstryr/enaml-native/tree/master/src/{}'.format(source_path.split('assets/python')[1])
info.append("<tr><td>source code</td><td><a href='{}' target='_blank'>show</a></td></td>".format(source_link))
#: Examples link
example_link = 'https://www.codelv.com/projects/enaml-native/docs/components#{}'.format(declaration.__name__.lower())
info.append("<tr><td>example usage</td><td><a href='{}' target='_blank'>view</a></td></td>".format(example_link)) # depends on [control=['if'], data=['source_path']]
return COMPONENT_TMPL.format(id=declaration.__name__.lower(), name=declaration.__name__, info=''.join(info), items=''.join(items)) |
def _generate_op_module_signature(root_namespace, module_name, op_code_gen_func):
"""
Generate op functions created by `op_code_gen_func` and write to the source file
of `root_namespace.module_name.[submodule_name]`,
where `submodule_name` is one of `_OP_SUBMODULE_NAME_LIST`.
Parameters
----------
root_namespace : str
Top level module name, `mxnet` in the current cases.
module_name : str
Second level module name, `ndarray` and `symbol` in the current cases.
op_code_gen_func : function
Function for creating op functions for `ndarray` and `symbol` modules.
"""
def get_module_file(module_name):
"""Return the generated module file based on module name."""
path = os.path.dirname(__file__)
module_path = module_name.split('.')
module_path[-1] = 'gen_' + module_path[-1]
file_name = os.path.join(path, '..', *module_path) + '.py'
module_file = open(file_name, 'w')
dependencies = {'symbol': ['from ._internal import SymbolBase',
'from ..base import _Null'],
'ndarray': ['from ._internal import NDArrayBase',
'from ..base import _Null']}
module_file.write('# File content is auto-generated. Do not modify.' + os.linesep)
module_file.write('# pylint: skip-file' + os.linesep)
module_file.write(os.linesep.join(dependencies[module_name.split('.')[1]]))
return module_file
def write_all_str(module_file, module_all_list):
"""Write the proper __all__ based on available operators."""
module_file.write(os.linesep)
module_file.write(os.linesep)
all_str = '__all__ = [' + ', '.join(["'%s'"%s for s in module_all_list]) + ']'
module_file.write(all_str)
plist = ctypes.POINTER(ctypes.c_char_p)()
size = ctypes.c_uint()
check_call(_LIB.MXListAllOpNames(ctypes.byref(size),
ctypes.byref(plist)))
op_names = []
for i in range(size.value):
op_names.append(py_str(plist[i]))
module_op_file = get_module_file("%s.%s.op" % (root_namespace, module_name))
module_op_all = []
module_internal_file = get_module_file("%s.%s._internal"%(root_namespace, module_name))
module_internal_all = []
submodule_dict = {}
for op_name_prefix in _OP_NAME_PREFIX_LIST:
submodule_dict[op_name_prefix] =\
(get_module_file("%s.%s.%s" % (root_namespace, module_name,
op_name_prefix[1:-1])), [])
for name in op_names:
hdl = OpHandle()
check_call(_LIB.NNGetOpHandle(c_str(name), ctypes.byref(hdl)))
op_name_prefix = _get_op_name_prefix(name)
if len(op_name_prefix) > 0:
func_name = name[len(op_name_prefix):]
cur_module_file, cur_module_all = submodule_dict[op_name_prefix]
elif name.startswith('_'):
func_name = name
cur_module_file = module_internal_file
cur_module_all = module_internal_all
else:
func_name = name
cur_module_file = module_op_file
cur_module_all = module_op_all
code, _ = op_code_gen_func(hdl, name, func_name, True)
cur_module_file.write(os.linesep)
cur_module_file.write(code)
cur_module_all.append(func_name)
for (submodule_f, submodule_all) in submodule_dict.values():
write_all_str(submodule_f, submodule_all)
submodule_f.close()
write_all_str(module_op_file, module_op_all)
module_op_file.close()
write_all_str(module_internal_file, module_internal_all)
module_internal_file.close() | def function[_generate_op_module_signature, parameter[root_namespace, module_name, op_code_gen_func]]:
constant[
Generate op functions created by `op_code_gen_func` and write to the source file
of `root_namespace.module_name.[submodule_name]`,
where `submodule_name` is one of `_OP_SUBMODULE_NAME_LIST`.
Parameters
----------
root_namespace : str
Top level module name, `mxnet` in the current cases.
module_name : str
Second level module name, `ndarray` and `symbol` in the current cases.
op_code_gen_func : function
Function for creating op functions for `ndarray` and `symbol` modules.
]
def function[get_module_file, parameter[module_name]]:
constant[Return the generated module file based on module name.]
variable[path] assign[=] call[name[os].path.dirname, parameter[name[__file__]]]
variable[module_path] assign[=] call[name[module_name].split, parameter[constant[.]]]
call[name[module_path]][<ast.UnaryOp object at 0x7da1b1ef24d0>] assign[=] binary_operation[constant[gen_] + call[name[module_path]][<ast.UnaryOp object at 0x7da1b1ef0190>]]
variable[file_name] assign[=] binary_operation[call[name[os].path.join, parameter[name[path], constant[..], <ast.Starred object at 0x7da1b1ef0f70>]] + constant[.py]]
variable[module_file] assign[=] call[name[open], parameter[name[file_name], constant[w]]]
variable[dependencies] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ef0b50>, <ast.Constant object at 0x7da1b1ef1b40>], [<ast.List object at 0x7da1b1ef1060>, <ast.List object at 0x7da1b1ef0490>]]
call[name[module_file].write, parameter[binary_operation[constant[# File content is auto-generated. Do not modify.] + name[os].linesep]]]
call[name[module_file].write, parameter[binary_operation[constant[# pylint: skip-file] + name[os].linesep]]]
call[name[module_file].write, parameter[call[name[os].linesep.join, parameter[call[name[dependencies]][call[call[name[module_name].split, parameter[constant[.]]]][constant[1]]]]]]]
return[name[module_file]]
def function[write_all_str, parameter[module_file, module_all_list]]:
constant[Write the proper __all__ based on available operators.]
call[name[module_file].write, parameter[name[os].linesep]]
call[name[module_file].write, parameter[name[os].linesep]]
variable[all_str] assign[=] binary_operation[binary_operation[constant[__all__ = [] + call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b1ef1e10>]]] + constant[]]]
call[name[module_file].write, parameter[name[all_str]]]
variable[plist] assign[=] call[call[name[ctypes].POINTER, parameter[name[ctypes].c_char_p]], parameter[]]
variable[size] assign[=] call[name[ctypes].c_uint, parameter[]]
call[name[check_call], parameter[call[name[_LIB].MXListAllOpNames, parameter[call[name[ctypes].byref, parameter[name[size]]], call[name[ctypes].byref, parameter[name[plist]]]]]]]
variable[op_names] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[size].value]]] begin[:]
call[name[op_names].append, parameter[call[name[py_str], parameter[call[name[plist]][name[i]]]]]]
variable[module_op_file] assign[=] call[name[get_module_file], parameter[binary_operation[constant[%s.%s.op] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1ef17e0>, <ast.Name object at 0x7da1b1ef1de0>]]]]]
variable[module_op_all] assign[=] list[[]]
variable[module_internal_file] assign[=] call[name[get_module_file], parameter[binary_operation[constant[%s.%s._internal] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1ef30a0>, <ast.Name object at 0x7da1b1ef2200>]]]]]
variable[module_internal_all] assign[=] list[[]]
variable[submodule_dict] assign[=] dictionary[[], []]
for taget[name[op_name_prefix]] in starred[name[_OP_NAME_PREFIX_LIST]] begin[:]
call[name[submodule_dict]][name[op_name_prefix]] assign[=] tuple[[<ast.Call object at 0x7da1b1ef1ba0>, <ast.List object at 0x7da1b1ef2410>]]
for taget[name[name]] in starred[name[op_names]] begin[:]
variable[hdl] assign[=] call[name[OpHandle], parameter[]]
call[name[check_call], parameter[call[name[_LIB].NNGetOpHandle, parameter[call[name[c_str], parameter[name[name]]], call[name[ctypes].byref, parameter[name[hdl]]]]]]]
variable[op_name_prefix] assign[=] call[name[_get_op_name_prefix], parameter[name[name]]]
if compare[call[name[len], parameter[name[op_name_prefix]]] greater[>] constant[0]] begin[:]
variable[func_name] assign[=] call[name[name]][<ast.Slice object at 0x7da1b1ffd690>]
<ast.Tuple object at 0x7da1b1ffc910> assign[=] call[name[submodule_dict]][name[op_name_prefix]]
<ast.Tuple object at 0x7da1b1fff520> assign[=] call[name[op_code_gen_func], parameter[name[hdl], name[name], name[func_name], constant[True]]]
call[name[cur_module_file].write, parameter[name[os].linesep]]
call[name[cur_module_file].write, parameter[name[code]]]
call[name[cur_module_all].append, parameter[name[func_name]]]
for taget[tuple[[<ast.Name object at 0x7da1b1ffca30>, <ast.Name object at 0x7da1b1fff040>]]] in starred[call[name[submodule_dict].values, parameter[]]] begin[:]
call[name[write_all_str], parameter[name[submodule_f], name[submodule_all]]]
call[name[submodule_f].close, parameter[]]
call[name[write_all_str], parameter[name[module_op_file], name[module_op_all]]]
call[name[module_op_file].close, parameter[]]
call[name[write_all_str], parameter[name[module_internal_file], name[module_internal_all]]]
call[name[module_internal_file].close, parameter[]] | keyword[def] identifier[_generate_op_module_signature] ( identifier[root_namespace] , identifier[module_name] , identifier[op_code_gen_func] ):
literal[string]
keyword[def] identifier[get_module_file] ( identifier[module_name] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] )
identifier[module_path] = identifier[module_name] . identifier[split] ( literal[string] )
identifier[module_path] [- literal[int] ]= literal[string] + identifier[module_path] [- literal[int] ]
identifier[file_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] ,* identifier[module_path] )+ literal[string]
identifier[module_file] = identifier[open] ( identifier[file_name] , literal[string] )
identifier[dependencies] ={ literal[string] :[ literal[string] ,
literal[string] ],
literal[string] :[ literal[string] ,
literal[string] ]}
identifier[module_file] . identifier[write] ( literal[string] + identifier[os] . identifier[linesep] )
identifier[module_file] . identifier[write] ( literal[string] + identifier[os] . identifier[linesep] )
identifier[module_file] . identifier[write] ( identifier[os] . identifier[linesep] . identifier[join] ( identifier[dependencies] [ identifier[module_name] . identifier[split] ( literal[string] )[ literal[int] ]]))
keyword[return] identifier[module_file]
keyword[def] identifier[write_all_str] ( identifier[module_file] , identifier[module_all_list] ):
literal[string]
identifier[module_file] . identifier[write] ( identifier[os] . identifier[linesep] )
identifier[module_file] . identifier[write] ( identifier[os] . identifier[linesep] )
identifier[all_str] = literal[string] + literal[string] . identifier[join] ([ literal[string] % identifier[s] keyword[for] identifier[s] keyword[in] identifier[module_all_list] ])+ literal[string]
identifier[module_file] . identifier[write] ( identifier[all_str] )
identifier[plist] = identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_char_p] )()
identifier[size] = identifier[ctypes] . identifier[c_uint] ()
identifier[check_call] ( identifier[_LIB] . identifier[MXListAllOpNames] ( identifier[ctypes] . identifier[byref] ( identifier[size] ),
identifier[ctypes] . identifier[byref] ( identifier[plist] )))
identifier[op_names] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[size] . identifier[value] ):
identifier[op_names] . identifier[append] ( identifier[py_str] ( identifier[plist] [ identifier[i] ]))
identifier[module_op_file] = identifier[get_module_file] ( literal[string] %( identifier[root_namespace] , identifier[module_name] ))
identifier[module_op_all] =[]
identifier[module_internal_file] = identifier[get_module_file] ( literal[string] %( identifier[root_namespace] , identifier[module_name] ))
identifier[module_internal_all] =[]
identifier[submodule_dict] ={}
keyword[for] identifier[op_name_prefix] keyword[in] identifier[_OP_NAME_PREFIX_LIST] :
identifier[submodule_dict] [ identifier[op_name_prefix] ]=( identifier[get_module_file] ( literal[string] %( identifier[root_namespace] , identifier[module_name] ,
identifier[op_name_prefix] [ literal[int] :- literal[int] ])),[])
keyword[for] identifier[name] keyword[in] identifier[op_names] :
identifier[hdl] = identifier[OpHandle] ()
identifier[check_call] ( identifier[_LIB] . identifier[NNGetOpHandle] ( identifier[c_str] ( identifier[name] ), identifier[ctypes] . identifier[byref] ( identifier[hdl] )))
identifier[op_name_prefix] = identifier[_get_op_name_prefix] ( identifier[name] )
keyword[if] identifier[len] ( identifier[op_name_prefix] )> literal[int] :
identifier[func_name] = identifier[name] [ identifier[len] ( identifier[op_name_prefix] ):]
identifier[cur_module_file] , identifier[cur_module_all] = identifier[submodule_dict] [ identifier[op_name_prefix] ]
keyword[elif] identifier[name] . identifier[startswith] ( literal[string] ):
identifier[func_name] = identifier[name]
identifier[cur_module_file] = identifier[module_internal_file]
identifier[cur_module_all] = identifier[module_internal_all]
keyword[else] :
identifier[func_name] = identifier[name]
identifier[cur_module_file] = identifier[module_op_file]
identifier[cur_module_all] = identifier[module_op_all]
identifier[code] , identifier[_] = identifier[op_code_gen_func] ( identifier[hdl] , identifier[name] , identifier[func_name] , keyword[True] )
identifier[cur_module_file] . identifier[write] ( identifier[os] . identifier[linesep] )
identifier[cur_module_file] . identifier[write] ( identifier[code] )
identifier[cur_module_all] . identifier[append] ( identifier[func_name] )
keyword[for] ( identifier[submodule_f] , identifier[submodule_all] ) keyword[in] identifier[submodule_dict] . identifier[values] ():
identifier[write_all_str] ( identifier[submodule_f] , identifier[submodule_all] )
identifier[submodule_f] . identifier[close] ()
identifier[write_all_str] ( identifier[module_op_file] , identifier[module_op_all] )
identifier[module_op_file] . identifier[close] ()
identifier[write_all_str] ( identifier[module_internal_file] , identifier[module_internal_all] )
identifier[module_internal_file] . identifier[close] () | def _generate_op_module_signature(root_namespace, module_name, op_code_gen_func):
"""
Generate op functions created by `op_code_gen_func` and write to the source file
of `root_namespace.module_name.[submodule_name]`,
where `submodule_name` is one of `_OP_SUBMODULE_NAME_LIST`.
Parameters
----------
root_namespace : str
Top level module name, `mxnet` in the current cases.
module_name : str
Second level module name, `ndarray` and `symbol` in the current cases.
op_code_gen_func : function
Function for creating op functions for `ndarray` and `symbol` modules.
"""
def get_module_file(module_name):
"""Return the generated module file based on module name."""
path = os.path.dirname(__file__)
module_path = module_name.split('.')
module_path[-1] = 'gen_' + module_path[-1]
file_name = os.path.join(path, '..', *module_path) + '.py'
module_file = open(file_name, 'w')
dependencies = {'symbol': ['from ._internal import SymbolBase', 'from ..base import _Null'], 'ndarray': ['from ._internal import NDArrayBase', 'from ..base import _Null']}
module_file.write('# File content is auto-generated. Do not modify.' + os.linesep)
module_file.write('# pylint: skip-file' + os.linesep)
module_file.write(os.linesep.join(dependencies[module_name.split('.')[1]]))
return module_file
def write_all_str(module_file, module_all_list):
"""Write the proper __all__ based on available operators."""
module_file.write(os.linesep)
module_file.write(os.linesep)
all_str = '__all__ = [' + ', '.join(["'%s'" % s for s in module_all_list]) + ']'
module_file.write(all_str)
plist = ctypes.POINTER(ctypes.c_char_p)()
size = ctypes.c_uint()
check_call(_LIB.MXListAllOpNames(ctypes.byref(size), ctypes.byref(plist)))
op_names = []
for i in range(size.value):
op_names.append(py_str(plist[i])) # depends on [control=['for'], data=['i']]
module_op_file = get_module_file('%s.%s.op' % (root_namespace, module_name))
module_op_all = []
module_internal_file = get_module_file('%s.%s._internal' % (root_namespace, module_name))
module_internal_all = []
submodule_dict = {}
for op_name_prefix in _OP_NAME_PREFIX_LIST:
submodule_dict[op_name_prefix] = (get_module_file('%s.%s.%s' % (root_namespace, module_name, op_name_prefix[1:-1])), []) # depends on [control=['for'], data=['op_name_prefix']]
for name in op_names:
hdl = OpHandle()
check_call(_LIB.NNGetOpHandle(c_str(name), ctypes.byref(hdl)))
op_name_prefix = _get_op_name_prefix(name)
if len(op_name_prefix) > 0:
func_name = name[len(op_name_prefix):]
(cur_module_file, cur_module_all) = submodule_dict[op_name_prefix] # depends on [control=['if'], data=[]]
elif name.startswith('_'):
func_name = name
cur_module_file = module_internal_file
cur_module_all = module_internal_all # depends on [control=['if'], data=[]]
else:
func_name = name
cur_module_file = module_op_file
cur_module_all = module_op_all
(code, _) = op_code_gen_func(hdl, name, func_name, True)
cur_module_file.write(os.linesep)
cur_module_file.write(code)
cur_module_all.append(func_name) # depends on [control=['for'], data=['name']]
for (submodule_f, submodule_all) in submodule_dict.values():
write_all_str(submodule_f, submodule_all)
submodule_f.close() # depends on [control=['for'], data=[]]
write_all_str(module_op_file, module_op_all)
module_op_file.close()
write_all_str(module_internal_file, module_internal_all)
module_internal_file.close() |
def find_many(self, url, type, resource):
"""Get a list of resources
Args:
url (string): URL to invoke
type (class): Class type
resource (string): The REST Resource
Returns:
list of object: List of resource instances
"""
return [type(item) for item in RestClient.get(url)[resource]] | def function[find_many, parameter[self, url, type, resource]]:
constant[Get a list of resources
Args:
url (string): URL to invoke
type (class): Class type
resource (string): The REST Resource
Returns:
list of object: List of resource instances
]
return[<ast.ListComp object at 0x7da1b0b4a6e0>] | keyword[def] identifier[find_many] ( identifier[self] , identifier[url] , identifier[type] , identifier[resource] ):
literal[string]
keyword[return] [ identifier[type] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[RestClient] . identifier[get] ( identifier[url] )[ identifier[resource] ]] | def find_many(self, url, type, resource):
"""Get a list of resources
Args:
url (string): URL to invoke
type (class): Class type
resource (string): The REST Resource
Returns:
list of object: List of resource instances
"""
return [type(item) for item in RestClient.get(url)[resource]] |
def _task_idle_ticks(seconds_per_cycle):
""" 计算下次周期的沉睡时间 """
t = time_ticks()
while True:
t += seconds_per_cycle
yield max(t - time_ticks(), 0) | def function[_task_idle_ticks, parameter[seconds_per_cycle]]:
constant[ 计算下次周期的沉睡时间 ]
variable[t] assign[=] call[name[time_ticks], parameter[]]
while constant[True] begin[:]
<ast.AugAssign object at 0x7da20e955300>
<ast.Yield object at 0x7da20e957550> | keyword[def] identifier[_task_idle_ticks] ( identifier[seconds_per_cycle] ):
literal[string]
identifier[t] = identifier[time_ticks] ()
keyword[while] keyword[True] :
identifier[t] += identifier[seconds_per_cycle]
keyword[yield] identifier[max] ( identifier[t] - identifier[time_ticks] (), literal[int] ) | def _task_idle_ticks(seconds_per_cycle):
""" 计算下次周期的沉睡时间 """
t = time_ticks()
while True:
t += seconds_per_cycle
yield max(t - time_ticks(), 0) # depends on [control=['while'], data=[]] |
def _send(self, message):
"""If not running connects socket and
authenticates. Adds CRLF and sends message
to Betfair.
:param message: Data to be sent to Betfair.
"""
if not self._running:
self._connect()
self.authenticate()
message_dumped = json.dumps(message) + self.__CRLF
try:
self._socket.send(message_dumped.encode())
except (socket.timeout, socket.error) as e:
self.stop()
raise SocketError('[Connect: %s]: Socket %s' % (self._unique_id, e)) | def function[_send, parameter[self, message]]:
constant[If not running connects socket and
authenticates. Adds CRLF and sends message
to Betfair.
:param message: Data to be sent to Betfair.
]
if <ast.UnaryOp object at 0x7da1b17f83a0> begin[:]
call[name[self]._connect, parameter[]]
call[name[self].authenticate, parameter[]]
variable[message_dumped] assign[=] binary_operation[call[name[json].dumps, parameter[name[message]]] + name[self].__CRLF]
<ast.Try object at 0x7da1b17f88e0> | keyword[def] identifier[_send] ( identifier[self] , identifier[message] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_running] :
identifier[self] . identifier[_connect] ()
identifier[self] . identifier[authenticate] ()
identifier[message_dumped] = identifier[json] . identifier[dumps] ( identifier[message] )+ identifier[self] . identifier[__CRLF]
keyword[try] :
identifier[self] . identifier[_socket] . identifier[send] ( identifier[message_dumped] . identifier[encode] ())
keyword[except] ( identifier[socket] . identifier[timeout] , identifier[socket] . identifier[error] ) keyword[as] identifier[e] :
identifier[self] . identifier[stop] ()
keyword[raise] identifier[SocketError] ( literal[string] %( identifier[self] . identifier[_unique_id] , identifier[e] )) | def _send(self, message):
"""If not running connects socket and
authenticates. Adds CRLF and sends message
to Betfair.
:param message: Data to be sent to Betfair.
"""
if not self._running:
self._connect()
self.authenticate() # depends on [control=['if'], data=[]]
message_dumped = json.dumps(message) + self.__CRLF
try:
self._socket.send(message_dumped.encode()) # depends on [control=['try'], data=[]]
except (socket.timeout, socket.error) as e:
self.stop()
raise SocketError('[Connect: %s]: Socket %s' % (self._unique_id, e)) # depends on [control=['except'], data=['e']] |
def get_host_advanced(name=None, ipv4addr=None, mac=None, **api_opts):
'''
Get all host information
CLI Example:
.. code-block:: bash
salt-call infoblox.get_host_advanced hostname.domain.ca
'''
infoblox = _get_infoblox(**api_opts)
host = infoblox.get_host_advanced(name=name, mac=mac, ipv4addr=ipv4addr)
return host | def function[get_host_advanced, parameter[name, ipv4addr, mac]]:
constant[
Get all host information
CLI Example:
.. code-block:: bash
salt-call infoblox.get_host_advanced hostname.domain.ca
]
variable[infoblox] assign[=] call[name[_get_infoblox], parameter[]]
variable[host] assign[=] call[name[infoblox].get_host_advanced, parameter[]]
return[name[host]] | keyword[def] identifier[get_host_advanced] ( identifier[name] = keyword[None] , identifier[ipv4addr] = keyword[None] , identifier[mac] = keyword[None] ,** identifier[api_opts] ):
literal[string]
identifier[infoblox] = identifier[_get_infoblox] (** identifier[api_opts] )
identifier[host] = identifier[infoblox] . identifier[get_host_advanced] ( identifier[name] = identifier[name] , identifier[mac] = identifier[mac] , identifier[ipv4addr] = identifier[ipv4addr] )
keyword[return] identifier[host] | def get_host_advanced(name=None, ipv4addr=None, mac=None, **api_opts):
"""
Get all host information
CLI Example:
.. code-block:: bash
salt-call infoblox.get_host_advanced hostname.domain.ca
"""
infoblox = _get_infoblox(**api_opts)
host = infoblox.get_host_advanced(name=name, mac=mac, ipv4addr=ipv4addr)
return host |
def handle(self, *args, **options):
"""Register processes."""
force = options.get('force')
retire = options.get('retire')
verbosity = int(options.get('verbosity'))
users = get_user_model().objects.filter(is_superuser=True).order_by('date_joined')
if not users.exists():
self.stderr.write("Admin does not exist: create a superuser")
exit(1)
process_paths, descriptor_paths = [], []
process_schemas, descriptor_schemas = [], []
for finder in get_finders():
process_paths.extend(finder.find_processes())
descriptor_paths.extend(finder.find_descriptors())
for proc_path in process_paths:
process_schemas.extend(
self.find_schemas(proc_path, schema_type=SCHEMA_TYPE_PROCESS, verbosity=verbosity))
for desc_path in descriptor_paths:
descriptor_schemas.extend(
self.find_schemas(desc_path, schema_type=SCHEMA_TYPE_DESCRIPTOR, verbosity=verbosity))
user_admin = users.first()
self.register_descriptors(descriptor_schemas, user_admin, force, verbosity=verbosity)
# NOTE: Descriptor schemas must be registered first, so
# processes can validate 'entity_descriptor_schema' field.
self.register_processes(process_schemas, user_admin, force, verbosity=verbosity)
if retire:
self.retire(process_schemas)
if verbosity > 0:
self.stdout.write("Running executor post-registration hook...")
manager.get_executor().post_register_hook(verbosity=verbosity) | def function[handle, parameter[self]]:
constant[Register processes.]
variable[force] assign[=] call[name[options].get, parameter[constant[force]]]
variable[retire] assign[=] call[name[options].get, parameter[constant[retire]]]
variable[verbosity] assign[=] call[name[int], parameter[call[name[options].get, parameter[constant[verbosity]]]]]
variable[users] assign[=] call[call[call[name[get_user_model], parameter[]].objects.filter, parameter[]].order_by, parameter[constant[date_joined]]]
if <ast.UnaryOp object at 0x7da1b1add9f0> begin[:]
call[name[self].stderr.write, parameter[constant[Admin does not exist: create a superuser]]]
call[name[exit], parameter[constant[1]]]
<ast.Tuple object at 0x7da1b1ad7820> assign[=] tuple[[<ast.List object at 0x7da1b1ad78e0>, <ast.List object at 0x7da1b1ad7910>]]
<ast.Tuple object at 0x7da1b1ad7970> assign[=] tuple[[<ast.List object at 0x7da1b1ad7a30>, <ast.List object at 0x7da1b1ad7a60>]]
for taget[name[finder]] in starred[call[name[get_finders], parameter[]]] begin[:]
call[name[process_paths].extend, parameter[call[name[finder].find_processes, parameter[]]]]
call[name[descriptor_paths].extend, parameter[call[name[finder].find_descriptors, parameter[]]]]
for taget[name[proc_path]] in starred[name[process_paths]] begin[:]
call[name[process_schemas].extend, parameter[call[name[self].find_schemas, parameter[name[proc_path]]]]]
for taget[name[desc_path]] in starred[name[descriptor_paths]] begin[:]
call[name[descriptor_schemas].extend, parameter[call[name[self].find_schemas, parameter[name[desc_path]]]]]
variable[user_admin] assign[=] call[name[users].first, parameter[]]
call[name[self].register_descriptors, parameter[name[descriptor_schemas], name[user_admin], name[force]]]
call[name[self].register_processes, parameter[name[process_schemas], name[user_admin], name[force]]]
if name[retire] begin[:]
call[name[self].retire, parameter[name[process_schemas]]]
if compare[name[verbosity] greater[>] constant[0]] begin[:]
call[name[self].stdout.write, parameter[constant[Running executor post-registration hook...]]]
call[call[name[manager].get_executor, parameter[]].post_register_hook, parameter[]] | keyword[def] identifier[handle] ( identifier[self] ,* identifier[args] ,** identifier[options] ):
literal[string]
identifier[force] = identifier[options] . identifier[get] ( literal[string] )
identifier[retire] = identifier[options] . identifier[get] ( literal[string] )
identifier[verbosity] = identifier[int] ( identifier[options] . identifier[get] ( literal[string] ))
identifier[users] = identifier[get_user_model] (). identifier[objects] . identifier[filter] ( identifier[is_superuser] = keyword[True] ). identifier[order_by] ( literal[string] )
keyword[if] keyword[not] identifier[users] . identifier[exists] ():
identifier[self] . identifier[stderr] . identifier[write] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[process_paths] , identifier[descriptor_paths] =[],[]
identifier[process_schemas] , identifier[descriptor_schemas] =[],[]
keyword[for] identifier[finder] keyword[in] identifier[get_finders] ():
identifier[process_paths] . identifier[extend] ( identifier[finder] . identifier[find_processes] ())
identifier[descriptor_paths] . identifier[extend] ( identifier[finder] . identifier[find_descriptors] ())
keyword[for] identifier[proc_path] keyword[in] identifier[process_paths] :
identifier[process_schemas] . identifier[extend] (
identifier[self] . identifier[find_schemas] ( identifier[proc_path] , identifier[schema_type] = identifier[SCHEMA_TYPE_PROCESS] , identifier[verbosity] = identifier[verbosity] ))
keyword[for] identifier[desc_path] keyword[in] identifier[descriptor_paths] :
identifier[descriptor_schemas] . identifier[extend] (
identifier[self] . identifier[find_schemas] ( identifier[desc_path] , identifier[schema_type] = identifier[SCHEMA_TYPE_DESCRIPTOR] , identifier[verbosity] = identifier[verbosity] ))
identifier[user_admin] = identifier[users] . identifier[first] ()
identifier[self] . identifier[register_descriptors] ( identifier[descriptor_schemas] , identifier[user_admin] , identifier[force] , identifier[verbosity] = identifier[verbosity] )
identifier[self] . identifier[register_processes] ( identifier[process_schemas] , identifier[user_admin] , identifier[force] , identifier[verbosity] = identifier[verbosity] )
keyword[if] identifier[retire] :
identifier[self] . identifier[retire] ( identifier[process_schemas] )
keyword[if] identifier[verbosity] > literal[int] :
identifier[self] . identifier[stdout] . identifier[write] ( literal[string] )
identifier[manager] . identifier[get_executor] (). identifier[post_register_hook] ( identifier[verbosity] = identifier[verbosity] ) | def handle(self, *args, **options):
"""Register processes."""
force = options.get('force')
retire = options.get('retire')
verbosity = int(options.get('verbosity'))
users = get_user_model().objects.filter(is_superuser=True).order_by('date_joined')
if not users.exists():
self.stderr.write('Admin does not exist: create a superuser')
exit(1) # depends on [control=['if'], data=[]]
(process_paths, descriptor_paths) = ([], [])
(process_schemas, descriptor_schemas) = ([], [])
for finder in get_finders():
process_paths.extend(finder.find_processes())
descriptor_paths.extend(finder.find_descriptors()) # depends on [control=['for'], data=['finder']]
for proc_path in process_paths:
process_schemas.extend(self.find_schemas(proc_path, schema_type=SCHEMA_TYPE_PROCESS, verbosity=verbosity)) # depends on [control=['for'], data=['proc_path']]
for desc_path in descriptor_paths:
descriptor_schemas.extend(self.find_schemas(desc_path, schema_type=SCHEMA_TYPE_DESCRIPTOR, verbosity=verbosity)) # depends on [control=['for'], data=['desc_path']]
user_admin = users.first()
self.register_descriptors(descriptor_schemas, user_admin, force, verbosity=verbosity)
# NOTE: Descriptor schemas must be registered first, so
# processes can validate 'entity_descriptor_schema' field.
self.register_processes(process_schemas, user_admin, force, verbosity=verbosity)
if retire:
self.retire(process_schemas) # depends on [control=['if'], data=[]]
if verbosity > 0:
self.stdout.write('Running executor post-registration hook...') # depends on [control=['if'], data=[]]
manager.get_executor().post_register_hook(verbosity=verbosity) |
def analyze(self, text):
"""
Run text through the external process, and get a list of lists
("records") that contain the analysis of each word.
"""
try:
text = render_safe(text).strip()
if not text:
return []
chunks = text.split('\n')
results = []
for chunk_text in chunks:
if chunk_text.strip():
textbytes = (chunk_text + '\n').encode('utf-8')
self.send_input(textbytes)
out_line = ''
while True:
out_line = self.receive_output_line()
out_line = out_line.decode('utf-8')
if out_line == '\n':
break
record = out_line.strip('\n').split(' ')
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text) | def function[analyze, parameter[self, text]]:
constant[
Run text through the external process, and get a list of lists
("records") that contain the analysis of each word.
]
<ast.Try object at 0x7da207f9a590> | keyword[def] identifier[analyze] ( identifier[self] , identifier[text] ):
literal[string]
keyword[try] :
identifier[text] = identifier[render_safe] ( identifier[text] ). identifier[strip] ()
keyword[if] keyword[not] identifier[text] :
keyword[return] []
identifier[chunks] = identifier[text] . identifier[split] ( literal[string] )
identifier[results] =[]
keyword[for] identifier[chunk_text] keyword[in] identifier[chunks] :
keyword[if] identifier[chunk_text] . identifier[strip] ():
identifier[textbytes] =( identifier[chunk_text] + literal[string] ). identifier[encode] ( literal[string] )
identifier[self] . identifier[send_input] ( identifier[textbytes] )
identifier[out_line] = literal[string]
keyword[while] keyword[True] :
identifier[out_line] = identifier[self] . identifier[receive_output_line] ()
identifier[out_line] = identifier[out_line] . identifier[decode] ( literal[string] )
keyword[if] identifier[out_line] == literal[string] :
keyword[break]
identifier[record] = identifier[out_line] . identifier[strip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[results] . identifier[append] ( identifier[record] )
keyword[return] identifier[results]
keyword[except] identifier[ProcessError] :
identifier[self] . identifier[restart_process] ()
keyword[return] identifier[self] . identifier[analyze] ( identifier[text] ) | def analyze(self, text):
"""
Run text through the external process, and get a list of lists
("records") that contain the analysis of each word.
"""
try:
text = render_safe(text).strip()
if not text:
return [] # depends on [control=['if'], data=[]]
chunks = text.split('\n')
results = []
for chunk_text in chunks:
if chunk_text.strip():
textbytes = (chunk_text + '\n').encode('utf-8')
self.send_input(textbytes)
out_line = ''
while True:
out_line = self.receive_output_line()
out_line = out_line.decode('utf-8')
if out_line == '\n':
break # depends on [control=['if'], data=[]]
record = out_line.strip('\n').split(' ')
results.append(record) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chunk_text']]
return results # depends on [control=['try'], data=[]]
except ProcessError:
self.restart_process()
return self.analyze(text) # depends on [control=['except'], data=[]] |
def load_scrap(self, path):
"""
Load scraper settings from file
:param path: Path to file
:type path: str
:rtype: None
:raises WEBFileException: Failed to load settings
:raises WEBParameterException: Missing parameters in file
"""
try:
conf = self.load_settings(path)
except:
# Should only be IOError
self.exception("Failed to load file")
raise WEBFileException("Failed to load from {}".format(path))
if "scheme" not in conf:
raise WEBParameterException("Missing scheme definition")
if "url" not in conf:
raise WEBParameterException("Missing url definition")
version = conf.get('version', None)
if version != "1.0":
raise WEBParameterException(
"Unsupported version {}".format(version)
)
self.scheme = conf['scheme']
self.url = conf['url']
self.timeout = conf.get('timeout', self.timeout)
if conf.get('html2text'):
self._set_html2text(conf['html2text']) | def function[load_scrap, parameter[self, path]]:
constant[
Load scraper settings from file
:param path: Path to file
:type path: str
:rtype: None
:raises WEBFileException: Failed to load settings
:raises WEBParameterException: Missing parameters in file
]
<ast.Try object at 0x7da1b26506a0>
if compare[constant[scheme] <ast.NotIn object at 0x7da2590d7190> name[conf]] begin[:]
<ast.Raise object at 0x7da2047e89a0>
if compare[constant[url] <ast.NotIn object at 0x7da2590d7190> name[conf]] begin[:]
<ast.Raise object at 0x7da18fe93130>
variable[version] assign[=] call[name[conf].get, parameter[constant[version], constant[None]]]
if compare[name[version] not_equal[!=] constant[1.0]] begin[:]
<ast.Raise object at 0x7da18fe91360>
name[self].scheme assign[=] call[name[conf]][constant[scheme]]
name[self].url assign[=] call[name[conf]][constant[url]]
name[self].timeout assign[=] call[name[conf].get, parameter[constant[timeout], name[self].timeout]]
if call[name[conf].get, parameter[constant[html2text]]] begin[:]
call[name[self]._set_html2text, parameter[call[name[conf]][constant[html2text]]]] | keyword[def] identifier[load_scrap] ( identifier[self] , identifier[path] ):
literal[string]
keyword[try] :
identifier[conf] = identifier[self] . identifier[load_settings] ( identifier[path] )
keyword[except] :
identifier[self] . identifier[exception] ( literal[string] )
keyword[raise] identifier[WEBFileException] ( literal[string] . identifier[format] ( identifier[path] ))
keyword[if] literal[string] keyword[not] keyword[in] identifier[conf] :
keyword[raise] identifier[WEBParameterException] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[conf] :
keyword[raise] identifier[WEBParameterException] ( literal[string] )
identifier[version] = identifier[conf] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[version] != literal[string] :
keyword[raise] identifier[WEBParameterException] (
literal[string] . identifier[format] ( identifier[version] )
)
identifier[self] . identifier[scheme] = identifier[conf] [ literal[string] ]
identifier[self] . identifier[url] = identifier[conf] [ literal[string] ]
identifier[self] . identifier[timeout] = identifier[conf] . identifier[get] ( literal[string] , identifier[self] . identifier[timeout] )
keyword[if] identifier[conf] . identifier[get] ( literal[string] ):
identifier[self] . identifier[_set_html2text] ( identifier[conf] [ literal[string] ]) | def load_scrap(self, path):
"""
Load scraper settings from file
:param path: Path to file
:type path: str
:rtype: None
:raises WEBFileException: Failed to load settings
:raises WEBParameterException: Missing parameters in file
"""
try:
conf = self.load_settings(path) # depends on [control=['try'], data=[]]
except:
# Should only be IOError
self.exception('Failed to load file')
raise WEBFileException('Failed to load from {}'.format(path)) # depends on [control=['except'], data=[]]
if 'scheme' not in conf:
raise WEBParameterException('Missing scheme definition') # depends on [control=['if'], data=[]]
if 'url' not in conf:
raise WEBParameterException('Missing url definition') # depends on [control=['if'], data=[]]
version = conf.get('version', None)
if version != '1.0':
raise WEBParameterException('Unsupported version {}'.format(version)) # depends on [control=['if'], data=['version']]
self.scheme = conf['scheme']
self.url = conf['url']
self.timeout = conf.get('timeout', self.timeout)
if conf.get('html2text'):
self._set_html2text(conf['html2text']) # depends on [control=['if'], data=[]] |
def expand_image(image, shape):
""" Expand image from original shape to requested shape. Output shape
must be an integer multiple of input image shape for each axis. """
if (shape[0] % image.shape[0]) or (shape[1] % image.shape[1]):
raise ValueError("Output shape must be an integer multiple of input "
"image shape.")
sx = shape[1] // image.shape[1]
sy = shape[0] // image.shape[0]
ox = (sx - 1.0) / (2.0 * sx)
oy = (sy - 1.0) / (2.0 * sy)
# generate output coordinates:
y, x = np.indices(shape, dtype=np.float)
x = x / sx - ox
y = y / sy - oy
# interpolate:
return bilinear_interp(image, x, y) | def function[expand_image, parameter[image, shape]]:
constant[ Expand image from original shape to requested shape. Output shape
must be an integer multiple of input image shape for each axis. ]
if <ast.BoolOp object at 0x7da204620eb0> begin[:]
<ast.Raise object at 0x7da1b1bbff10>
variable[sx] assign[=] binary_operation[call[name[shape]][constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[image].shape][constant[1]]]
variable[sy] assign[=] binary_operation[call[name[shape]][constant[0]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[image].shape][constant[0]]]
variable[ox] assign[=] binary_operation[binary_operation[name[sx] - constant[1.0]] / binary_operation[constant[2.0] * name[sx]]]
variable[oy] assign[=] binary_operation[binary_operation[name[sy] - constant[1.0]] / binary_operation[constant[2.0] * name[sy]]]
<ast.Tuple object at 0x7da204622950> assign[=] call[name[np].indices, parameter[name[shape]]]
variable[x] assign[=] binary_operation[binary_operation[name[x] / name[sx]] - name[ox]]
variable[y] assign[=] binary_operation[binary_operation[name[y] / name[sy]] - name[oy]]
return[call[name[bilinear_interp], parameter[name[image], name[x], name[y]]]] | keyword[def] identifier[expand_image] ( identifier[image] , identifier[shape] ):
literal[string]
keyword[if] ( identifier[shape] [ literal[int] ]% identifier[image] . identifier[shape] [ literal[int] ]) keyword[or] ( identifier[shape] [ literal[int] ]% identifier[image] . identifier[shape] [ literal[int] ]):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[sx] = identifier[shape] [ literal[int] ]// identifier[image] . identifier[shape] [ literal[int] ]
identifier[sy] = identifier[shape] [ literal[int] ]// identifier[image] . identifier[shape] [ literal[int] ]
identifier[ox] =( identifier[sx] - literal[int] )/( literal[int] * identifier[sx] )
identifier[oy] =( identifier[sy] - literal[int] )/( literal[int] * identifier[sy] )
identifier[y] , identifier[x] = identifier[np] . identifier[indices] ( identifier[shape] , identifier[dtype] = identifier[np] . identifier[float] )
identifier[x] = identifier[x] / identifier[sx] - identifier[ox]
identifier[y] = identifier[y] / identifier[sy] - identifier[oy]
keyword[return] identifier[bilinear_interp] ( identifier[image] , identifier[x] , identifier[y] ) | def expand_image(image, shape):
""" Expand image from original shape to requested shape. Output shape
must be an integer multiple of input image shape for each axis. """
if shape[0] % image.shape[0] or shape[1] % image.shape[1]:
raise ValueError('Output shape must be an integer multiple of input image shape.') # depends on [control=['if'], data=[]]
sx = shape[1] // image.shape[1]
sy = shape[0] // image.shape[0]
ox = (sx - 1.0) / (2.0 * sx)
oy = (sy - 1.0) / (2.0 * sy)
# generate output coordinates:
(y, x) = np.indices(shape, dtype=np.float)
x = x / sx - ox
y = y / sy - oy
# interpolate:
return bilinear_interp(image, x, y) |
def dropout(tensor, drop_prob, is_training):
'''
Dropout except test.
'''
if not is_training:
return tensor
return tf.nn.dropout(tensor, 1.0 - drop_prob) | def function[dropout, parameter[tensor, drop_prob, is_training]]:
constant[
Dropout except test.
]
if <ast.UnaryOp object at 0x7da1b1fcaf50> begin[:]
return[name[tensor]]
return[call[name[tf].nn.dropout, parameter[name[tensor], binary_operation[constant[1.0] - name[drop_prob]]]]] | keyword[def] identifier[dropout] ( identifier[tensor] , identifier[drop_prob] , identifier[is_training] ):
literal[string]
keyword[if] keyword[not] identifier[is_training] :
keyword[return] identifier[tensor]
keyword[return] identifier[tf] . identifier[nn] . identifier[dropout] ( identifier[tensor] , literal[int] - identifier[drop_prob] ) | def dropout(tensor, drop_prob, is_training):
"""
Dropout except test.
"""
if not is_training:
return tensor # depends on [control=['if'], data=[]]
return tf.nn.dropout(tensor, 1.0 - drop_prob) |
def url(self):
'''
Because invoice URLs are generally emailed, this
includes the default site URL and the protocol specified in
settings.
'''
if self.id:
return '%s://%s%s' % (
getConstant('email__linkProtocol'),
Site.objects.get_current().domain,
reverse('viewInvoice', args=[self.id,]),
) | def function[url, parameter[self]]:
constant[
Because invoice URLs are generally emailed, this
includes the default site URL and the protocol specified in
settings.
]
if name[self].id begin[:]
return[binary_operation[constant[%s://%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b13a4a30>, <ast.Attribute object at 0x7da1b13a4850>, <ast.Call object at 0x7da1b13a6440>]]]] | keyword[def] identifier[url] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[id] :
keyword[return] literal[string] %(
identifier[getConstant] ( literal[string] ),
identifier[Site] . identifier[objects] . identifier[get_current] (). identifier[domain] ,
identifier[reverse] ( literal[string] , identifier[args] =[ identifier[self] . identifier[id] ,]),
) | def url(self):
"""
Because invoice URLs are generally emailed, this
includes the default site URL and the protocol specified in
settings.
"""
if self.id:
return '%s://%s%s' % (getConstant('email__linkProtocol'), Site.objects.get_current().domain, reverse('viewInvoice', args=[self.id])) # depends on [control=['if'], data=[]] |
def echo_html_fenye_str(rec_num, fenye_num):
'''
生成分页的导航
'''
pagination_num = int(math.ceil(rec_num * 1.0 / 10))
if pagination_num == 1 or pagination_num == 0:
fenye_str = ''
elif pagination_num > 1:
pager_mid, pager_pre, pager_next, pager_last, pager_home = '', '', '', '', ''
fenye_str = '<ul class="pagination">'
if fenye_num > 1:
pager_home = '''<li class="{0}" name='fenye' onclick='change(this);'
value='{1}'><a>First Page</a></li>'''.format('', 1)
pager_pre = ''' <li class="{0}" name='fenye' onclick='change(this);'
value='{1}'><a>Previous Page</a></li>'''.format('', fenye_num - 1)
if fenye_num > 5:
cur_num = fenye_num - 4
else:
cur_num = 1
if pagination_num > 10 and cur_num < pagination_num - 10:
show_num = cur_num + 10
else:
show_num = pagination_num + 1
for num in range(cur_num, show_num):
if num == fenye_num:
checkstr = 'active'
else:
checkstr = ''
tmp_str_df = '''<li class="{0}" name='fenye' onclick='change(this);'
value='{1}'><a>{1}</a></li>'''.format(checkstr, num)
pager_mid += tmp_str_df
if fenye_num < pagination_num:
pager_next = '''<li class="{0}" name='fenye' onclick='change(this);'
value='{1}'><a>Next Page</a></li>'''.format('', fenye_num + 1)
pager_last = '''<li class="{0}" name='fenye' onclick='change(this);'
value='{1}'><a>End Page</a></li>'''.format('', pagination_num)
fenye_str += pager_home + pager_pre + pager_mid + pager_next + pager_last
fenye_str += '</ul>'
else:
return ''
return fenye_str | def function[echo_html_fenye_str, parameter[rec_num, fenye_num]]:
constant[
生成分页的导航
]
variable[pagination_num] assign[=] call[name[int], parameter[call[name[math].ceil, parameter[binary_operation[binary_operation[name[rec_num] * constant[1.0]] / constant[10]]]]]]
if <ast.BoolOp object at 0x7da1b0668460> begin[:]
variable[fenye_str] assign[=] constant[]
return[name[fenye_str]] | keyword[def] identifier[echo_html_fenye_str] ( identifier[rec_num] , identifier[fenye_num] ):
literal[string]
identifier[pagination_num] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[rec_num] * literal[int] / literal[int] ))
keyword[if] identifier[pagination_num] == literal[int] keyword[or] identifier[pagination_num] == literal[int] :
identifier[fenye_str] = literal[string]
keyword[elif] identifier[pagination_num] > literal[int] :
identifier[pager_mid] , identifier[pager_pre] , identifier[pager_next] , identifier[pager_last] , identifier[pager_home] = literal[string] , literal[string] , literal[string] , literal[string] , literal[string]
identifier[fenye_str] = literal[string]
keyword[if] identifier[fenye_num] > literal[int] :
identifier[pager_home] = literal[string] . identifier[format] ( literal[string] , literal[int] )
identifier[pager_pre] = literal[string] . identifier[format] ( literal[string] , identifier[fenye_num] - literal[int] )
keyword[if] identifier[fenye_num] > literal[int] :
identifier[cur_num] = identifier[fenye_num] - literal[int]
keyword[else] :
identifier[cur_num] = literal[int]
keyword[if] identifier[pagination_num] > literal[int] keyword[and] identifier[cur_num] < identifier[pagination_num] - literal[int] :
identifier[show_num] = identifier[cur_num] + literal[int]
keyword[else] :
identifier[show_num] = identifier[pagination_num] + literal[int]
keyword[for] identifier[num] keyword[in] identifier[range] ( identifier[cur_num] , identifier[show_num] ):
keyword[if] identifier[num] == identifier[fenye_num] :
identifier[checkstr] = literal[string]
keyword[else] :
identifier[checkstr] = literal[string]
identifier[tmp_str_df] = literal[string] . identifier[format] ( identifier[checkstr] , identifier[num] )
identifier[pager_mid] += identifier[tmp_str_df]
keyword[if] identifier[fenye_num] < identifier[pagination_num] :
identifier[pager_next] = literal[string] . identifier[format] ( literal[string] , identifier[fenye_num] + literal[int] )
identifier[pager_last] = literal[string] . identifier[format] ( literal[string] , identifier[pagination_num] )
identifier[fenye_str] += identifier[pager_home] + identifier[pager_pre] + identifier[pager_mid] + identifier[pager_next] + identifier[pager_last]
identifier[fenye_str] += literal[string]
keyword[else] :
keyword[return] literal[string]
keyword[return] identifier[fenye_str] | def echo_html_fenye_str(rec_num, fenye_num):
"""
生成分页的导航
"""
pagination_num = int(math.ceil(rec_num * 1.0 / 10))
if pagination_num == 1 or pagination_num == 0:
fenye_str = '' # depends on [control=['if'], data=[]]
elif pagination_num > 1:
(pager_mid, pager_pre, pager_next, pager_last, pager_home) = ('', '', '', '', '')
fenye_str = '<ul class="pagination">'
if fenye_num > 1:
pager_home = '<li class="{0}" name=\'fenye\' onclick=\'change(this);\'\n value=\'{1}\'><a>First Page</a></li>'.format('', 1)
pager_pre = ' <li class="{0}" name=\'fenye\' onclick=\'change(this);\'\n value=\'{1}\'><a>Previous Page</a></li>'.format('', fenye_num - 1) # depends on [control=['if'], data=['fenye_num']]
if fenye_num > 5:
cur_num = fenye_num - 4 # depends on [control=['if'], data=['fenye_num']]
else:
cur_num = 1
if pagination_num > 10 and cur_num < pagination_num - 10:
show_num = cur_num + 10 # depends on [control=['if'], data=[]]
else:
show_num = pagination_num + 1
for num in range(cur_num, show_num):
if num == fenye_num:
checkstr = 'active' # depends on [control=['if'], data=[]]
else:
checkstr = ''
tmp_str_df = '<li class="{0}" name=\'fenye\' onclick=\'change(this);\'\n value=\'{1}\'><a>{1}</a></li>'.format(checkstr, num)
pager_mid += tmp_str_df # depends on [control=['for'], data=['num']]
if fenye_num < pagination_num:
pager_next = '<li class="{0}" name=\'fenye\' onclick=\'change(this);\'\n value=\'{1}\'><a>Next Page</a></li>'.format('', fenye_num + 1)
pager_last = '<li class="{0}" name=\'fenye\' onclick=\'change(this);\'\n value=\'{1}\'><a>End Page</a></li>'.format('', pagination_num) # depends on [control=['if'], data=['fenye_num', 'pagination_num']]
fenye_str += pager_home + pager_pre + pager_mid + pager_next + pager_last
fenye_str += '</ul>' # depends on [control=['if'], data=['pagination_num']]
else:
return ''
return fenye_str |
def _constraints(self, values):
"""Applies physical constraints to the given parameter values.
Parameters
----------
values : {arr or dict}
A dictionary or structured array giving the values.
Returns
-------
bool
Whether or not the values satisfy physical
"""
mass1, mass2, phi_a, phi_s, chi_eff, chi_a, xi1, xi2, _ = \
conversions.ensurearray(values['mass1'], values['mass2'],
values['phi_a'], values['phi_s'],
values['chi_eff'], values['chi_a'],
values['xi1'], values['xi2'])
s1x = conversions.spin1x_from_xi1_phi_a_phi_s(xi1, phi_a, phi_s)
s2x = conversions.spin2x_from_mass1_mass2_xi2_phi_a_phi_s(mass1, mass2,
xi2, phi_a, phi_s)
s1y = conversions.spin1y_from_xi1_phi_a_phi_s(xi1, phi_a, phi_s)
s2y = conversions.spin2y_from_mass1_mass2_xi2_phi_a_phi_s(mass1, mass2,
xi2, phi_a, phi_s)
s1z = conversions.spin1z_from_mass1_mass2_chi_eff_chi_a(mass1, mass2,
chi_eff, chi_a)
s2z = conversions.spin2z_from_mass1_mass2_chi_eff_chi_a(mass1, mass2,
chi_eff, chi_a)
test = ((s1x**2. + s1y**2. + s1z**2.) < 1.) & \
((s2x**2. + s2y**2. + s2z**2.) < 1.)
return test | def function[_constraints, parameter[self, values]]:
constant[Applies physical constraints to the given parameter values.
Parameters
----------
values : {arr or dict}
A dictionary or structured array giving the values.
Returns
-------
bool
Whether or not the values satisfy physical
]
<ast.Tuple object at 0x7da1b1da1750> assign[=] call[name[conversions].ensurearray, parameter[call[name[values]][constant[mass1]], call[name[values]][constant[mass2]], call[name[values]][constant[phi_a]], call[name[values]][constant[phi_s]], call[name[values]][constant[chi_eff]], call[name[values]][constant[chi_a]], call[name[values]][constant[xi1]], call[name[values]][constant[xi2]]]]
variable[s1x] assign[=] call[name[conversions].spin1x_from_xi1_phi_a_phi_s, parameter[name[xi1], name[phi_a], name[phi_s]]]
variable[s2x] assign[=] call[name[conversions].spin2x_from_mass1_mass2_xi2_phi_a_phi_s, parameter[name[mass1], name[mass2], name[xi2], name[phi_a], name[phi_s]]]
variable[s1y] assign[=] call[name[conversions].spin1y_from_xi1_phi_a_phi_s, parameter[name[xi1], name[phi_a], name[phi_s]]]
variable[s2y] assign[=] call[name[conversions].spin2y_from_mass1_mass2_xi2_phi_a_phi_s, parameter[name[mass1], name[mass2], name[xi2], name[phi_a], name[phi_s]]]
variable[s1z] assign[=] call[name[conversions].spin1z_from_mass1_mass2_chi_eff_chi_a, parameter[name[mass1], name[mass2], name[chi_eff], name[chi_a]]]
variable[s2z] assign[=] call[name[conversions].spin2z_from_mass1_mass2_chi_eff_chi_a, parameter[name[mass1], name[mass2], name[chi_eff], name[chi_a]]]
variable[test] assign[=] binary_operation[compare[binary_operation[binary_operation[binary_operation[name[s1x] ** constant[2.0]] + binary_operation[name[s1y] ** constant[2.0]]] + binary_operation[name[s1z] ** constant[2.0]]] less[<] constant[1.0]] <ast.BitAnd object at 0x7da2590d6b60> compare[binary_operation[binary_operation[binary_operation[name[s2x] ** constant[2.0]] + binary_operation[name[s2y] ** constant[2.0]]] + binary_operation[name[s2z] ** constant[2.0]]] less[<] constant[1.0]]]
return[name[test]] | keyword[def] identifier[_constraints] ( identifier[self] , identifier[values] ):
literal[string]
identifier[mass1] , identifier[mass2] , identifier[phi_a] , identifier[phi_s] , identifier[chi_eff] , identifier[chi_a] , identifier[xi1] , identifier[xi2] , identifier[_] = identifier[conversions] . identifier[ensurearray] ( identifier[values] [ literal[string] ], identifier[values] [ literal[string] ],
identifier[values] [ literal[string] ], identifier[values] [ literal[string] ],
identifier[values] [ literal[string] ], identifier[values] [ literal[string] ],
identifier[values] [ literal[string] ], identifier[values] [ literal[string] ])
identifier[s1x] = identifier[conversions] . identifier[spin1x_from_xi1_phi_a_phi_s] ( identifier[xi1] , identifier[phi_a] , identifier[phi_s] )
identifier[s2x] = identifier[conversions] . identifier[spin2x_from_mass1_mass2_xi2_phi_a_phi_s] ( identifier[mass1] , identifier[mass2] ,
identifier[xi2] , identifier[phi_a] , identifier[phi_s] )
identifier[s1y] = identifier[conversions] . identifier[spin1y_from_xi1_phi_a_phi_s] ( identifier[xi1] , identifier[phi_a] , identifier[phi_s] )
identifier[s2y] = identifier[conversions] . identifier[spin2y_from_mass1_mass2_xi2_phi_a_phi_s] ( identifier[mass1] , identifier[mass2] ,
identifier[xi2] , identifier[phi_a] , identifier[phi_s] )
identifier[s1z] = identifier[conversions] . identifier[spin1z_from_mass1_mass2_chi_eff_chi_a] ( identifier[mass1] , identifier[mass2] ,
identifier[chi_eff] , identifier[chi_a] )
identifier[s2z] = identifier[conversions] . identifier[spin2z_from_mass1_mass2_chi_eff_chi_a] ( identifier[mass1] , identifier[mass2] ,
identifier[chi_eff] , identifier[chi_a] )
identifier[test] =(( identifier[s1x] ** literal[int] + identifier[s1y] ** literal[int] + identifier[s1z] ** literal[int] )< literal[int] )&(( identifier[s2x] ** literal[int] + identifier[s2y] ** literal[int] + identifier[s2z] ** literal[int] )< literal[int] )
keyword[return] identifier[test] | def _constraints(self, values):
"""Applies physical constraints to the given parameter values.
Parameters
----------
values : {arr or dict}
A dictionary or structured array giving the values.
Returns
-------
bool
Whether or not the values satisfy physical
"""
(mass1, mass2, phi_a, phi_s, chi_eff, chi_a, xi1, xi2, _) = conversions.ensurearray(values['mass1'], values['mass2'], values['phi_a'], values['phi_s'], values['chi_eff'], values['chi_a'], values['xi1'], values['xi2'])
s1x = conversions.spin1x_from_xi1_phi_a_phi_s(xi1, phi_a, phi_s)
s2x = conversions.spin2x_from_mass1_mass2_xi2_phi_a_phi_s(mass1, mass2, xi2, phi_a, phi_s)
s1y = conversions.spin1y_from_xi1_phi_a_phi_s(xi1, phi_a, phi_s)
s2y = conversions.spin2y_from_mass1_mass2_xi2_phi_a_phi_s(mass1, mass2, xi2, phi_a, phi_s)
s1z = conversions.spin1z_from_mass1_mass2_chi_eff_chi_a(mass1, mass2, chi_eff, chi_a)
s2z = conversions.spin2z_from_mass1_mass2_chi_eff_chi_a(mass1, mass2, chi_eff, chi_a)
test = (s1x ** 2.0 + s1y ** 2.0 + s1z ** 2.0 < 1.0) & (s2x ** 2.0 + s2y ** 2.0 + s2z ** 2.0 < 1.0)
return test |
def get_all_organization_names(configuration=None, **kwargs):
# type: (Optional[Configuration], Any) -> List[str]
"""Get all organization names in HDX
Args:
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
**kwargs: See below
sort (str): Sort the search results according to field name and sort-order. Allowed fields are ‘name’, ‘package_count’ and ‘title’. Defaults to 'name asc'.
organizations (List[str]): List of names of the groups to return.
all_fields (bool): Return group dictionaries instead of just names. Only core fields are returned - get some more using the include_* options. Defaults to False.
include_extras (bool): If all_fields, include the group extra fields. Defaults to False.
include_tags (bool): If all_fields, include the group tags. Defaults to False.
include_groups: If all_fields, include the groups the groups are in. Defaults to False.
Returns:
List[str]: List of all organization names in HDX
"""
organization = Organization(configuration=configuration)
organization['id'] = 'all organizations' # only for error message if produced
return organization._write_to_hdx('list', kwargs, 'id') | def function[get_all_organization_names, parameter[configuration]]:
constant[Get all organization names in HDX
Args:
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
**kwargs: See below
sort (str): Sort the search results according to field name and sort-order. Allowed fields are ‘name’, ‘package_count’ and ‘title’. Defaults to 'name asc'.
organizations (List[str]): List of names of the groups to return.
all_fields (bool): Return group dictionaries instead of just names. Only core fields are returned - get some more using the include_* options. Defaults to False.
include_extras (bool): If all_fields, include the group extra fields. Defaults to False.
include_tags (bool): If all_fields, include the group tags. Defaults to False.
include_groups: If all_fields, include the groups the groups are in. Defaults to False.
Returns:
List[str]: List of all organization names in HDX
]
variable[organization] assign[=] call[name[Organization], parameter[]]
call[name[organization]][constant[id]] assign[=] constant[all organizations]
return[call[name[organization]._write_to_hdx, parameter[constant[list], name[kwargs], constant[id]]]] | keyword[def] identifier[get_all_organization_names] ( identifier[configuration] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[organization] = identifier[Organization] ( identifier[configuration] = identifier[configuration] )
identifier[organization] [ literal[string] ]= literal[string]
keyword[return] identifier[organization] . identifier[_write_to_hdx] ( literal[string] , identifier[kwargs] , literal[string] ) | def get_all_organization_names(configuration=None, **kwargs):
# type: (Optional[Configuration], Any) -> List[str]
"Get all organization names in HDX\n\n Args:\n configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.\n **kwargs: See below\n sort (str): Sort the search results according to field name and sort-order. Allowed fields are ‘name’, ‘package_count’ and ‘title’. Defaults to 'name asc'.\n organizations (List[str]): List of names of the groups to return.\n all_fields (bool): Return group dictionaries instead of just names. Only core fields are returned - get some more using the include_* options. Defaults to False.\n include_extras (bool): If all_fields, include the group extra fields. Defaults to False.\n include_tags (bool): If all_fields, include the group tags. Defaults to False.\n include_groups: If all_fields, include the groups the groups are in. Defaults to False.\n\n Returns:\n List[str]: List of all organization names in HDX\n "
organization = Organization(configuration=configuration)
organization['id'] = 'all organizations' # only for error message if produced
return organization._write_to_hdx('list', kwargs, 'id') |
def user_read_message(self, id, **kwargs): # noqa: E501
"""Mark a specific message as read # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_read_message(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_read_message_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.user_read_message_with_http_info(id, **kwargs) # noqa: E501
return data | def function[user_read_message, parameter[self, id]]:
constant[Mark a specific message as read # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_read_message(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerMessage
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].user_read_message_with_http_info, parameter[name[id]]]] | keyword[def] identifier[user_read_message] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[user_read_message_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[user_read_message_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def user_read_message(self, id, **kwargs): # noqa: E501
'Mark a specific message as read # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.user_read_message(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: ResponseContainerMessage\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.user_read_message_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.user_read_message_with_http_info(id, **kwargs) # noqa: E501
return data |
def total_proper_motion(pmra, pmdecl, decl):
'''This calculates the total proper motion of an object.
Parameters
----------
pmra : float or array-like
The proper motion(s) in right ascension, measured in mas/yr.
pmdecl : float or array-like
The proper motion(s) in declination, measured in mas/yr.
decl : float or array-like
The declination of the object(s) in decimal degrees.
Returns
-------
float or array-like
The total proper motion(s) of the object(s) in mas/yr.
'''
pm = np.sqrt( pmdecl*pmdecl + pmra*pmra*np.cos(np.radians(decl)) *
np.cos(np.radians(decl)) )
return pm | def function[total_proper_motion, parameter[pmra, pmdecl, decl]]:
constant[This calculates the total proper motion of an object.
Parameters
----------
pmra : float or array-like
The proper motion(s) in right ascension, measured in mas/yr.
pmdecl : float or array-like
The proper motion(s) in declination, measured in mas/yr.
decl : float or array-like
The declination of the object(s) in decimal degrees.
Returns
-------
float or array-like
The total proper motion(s) of the object(s) in mas/yr.
]
variable[pm] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[pmdecl] * name[pmdecl]] + binary_operation[binary_operation[binary_operation[name[pmra] * name[pmra]] * call[name[np].cos, parameter[call[name[np].radians, parameter[name[decl]]]]]] * call[name[np].cos, parameter[call[name[np].radians, parameter[name[decl]]]]]]]]]
return[name[pm]] | keyword[def] identifier[total_proper_motion] ( identifier[pmra] , identifier[pmdecl] , identifier[decl] ):
literal[string]
identifier[pm] = identifier[np] . identifier[sqrt] ( identifier[pmdecl] * identifier[pmdecl] + identifier[pmra] * identifier[pmra] * identifier[np] . identifier[cos] ( identifier[np] . identifier[radians] ( identifier[decl] ))*
identifier[np] . identifier[cos] ( identifier[np] . identifier[radians] ( identifier[decl] )))
keyword[return] identifier[pm] | def total_proper_motion(pmra, pmdecl, decl):
"""This calculates the total proper motion of an object.
Parameters
----------
pmra : float or array-like
The proper motion(s) in right ascension, measured in mas/yr.
pmdecl : float or array-like
The proper motion(s) in declination, measured in mas/yr.
decl : float or array-like
The declination of the object(s) in decimal degrees.
Returns
-------
float or array-like
The total proper motion(s) of the object(s) in mas/yr.
"""
pm = np.sqrt(pmdecl * pmdecl + pmra * pmra * np.cos(np.radians(decl)) * np.cos(np.radians(decl)))
return pm |
def run(files, temp_folder):
"Check frosted errors in the code base."
try:
import frosted # NOQA
except ImportError:
return NO_FROSTED_MSG
py_files = filter_python_files(files)
cmd = 'frosted {0}'.format(' '.join(py_files))
return bash(cmd).value() | def function[run, parameter[files, temp_folder]]:
constant[Check frosted errors in the code base.]
<ast.Try object at 0x7da1b0f0c7c0>
variable[py_files] assign[=] call[name[filter_python_files], parameter[name[files]]]
variable[cmd] assign[=] call[constant[frosted {0}].format, parameter[call[constant[ ].join, parameter[name[py_files]]]]]
return[call[call[name[bash], parameter[name[cmd]]].value, parameter[]]] | keyword[def] identifier[run] ( identifier[files] , identifier[temp_folder] ):
literal[string]
keyword[try] :
keyword[import] identifier[frosted]
keyword[except] identifier[ImportError] :
keyword[return] identifier[NO_FROSTED_MSG]
identifier[py_files] = identifier[filter_python_files] ( identifier[files] )
identifier[cmd] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[py_files] ))
keyword[return] identifier[bash] ( identifier[cmd] ). identifier[value] () | def run(files, temp_folder):
"""Check frosted errors in the code base."""
try:
import frosted # NOQA # depends on [control=['try'], data=[]]
except ImportError:
return NO_FROSTED_MSG # depends on [control=['except'], data=[]]
py_files = filter_python_files(files)
cmd = 'frosted {0}'.format(' '.join(py_files))
return bash(cmd).value() |
def _git_config(cwd, user, password, output_encoding=None):
'''
Helper to retrieve git config options
'''
contextkey = 'git.config.' + cwd
if contextkey not in __context__:
git_dir = rev_parse(cwd,
opts=['--git-dir'],
user=user,
password=password,
ignore_retcode=True,
output_encoding=output_encoding)
if not os.path.isabs(git_dir):
paths = (cwd, git_dir, 'config')
else:
paths = (git_dir, 'config')
__context__[contextkey] = os.path.join(*paths)
return __context__[contextkey] | def function[_git_config, parameter[cwd, user, password, output_encoding]]:
constant[
Helper to retrieve git config options
]
variable[contextkey] assign[=] binary_operation[constant[git.config.] + name[cwd]]
if compare[name[contextkey] <ast.NotIn object at 0x7da2590d7190> name[__context__]] begin[:]
variable[git_dir] assign[=] call[name[rev_parse], parameter[name[cwd]]]
if <ast.UnaryOp object at 0x7da20c7cb160> begin[:]
variable[paths] assign[=] tuple[[<ast.Name object at 0x7da20c7c8640>, <ast.Name object at 0x7da20c7c8fa0>, <ast.Constant object at 0x7da20c7ca620>]]
call[name[__context__]][name[contextkey]] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da20c7c9180>]]
return[call[name[__context__]][name[contextkey]]] | keyword[def] identifier[_git_config] ( identifier[cwd] , identifier[user] , identifier[password] , identifier[output_encoding] = keyword[None] ):
literal[string]
identifier[contextkey] = literal[string] + identifier[cwd]
keyword[if] identifier[contextkey] keyword[not] keyword[in] identifier[__context__] :
identifier[git_dir] = identifier[rev_parse] ( identifier[cwd] ,
identifier[opts] =[ literal[string] ],
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[ignore_retcode] = keyword[True] ,
identifier[output_encoding] = identifier[output_encoding] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[git_dir] ):
identifier[paths] =( identifier[cwd] , identifier[git_dir] , literal[string] )
keyword[else] :
identifier[paths] =( identifier[git_dir] , literal[string] )
identifier[__context__] [ identifier[contextkey] ]= identifier[os] . identifier[path] . identifier[join] (* identifier[paths] )
keyword[return] identifier[__context__] [ identifier[contextkey] ] | def _git_config(cwd, user, password, output_encoding=None):
"""
Helper to retrieve git config options
"""
contextkey = 'git.config.' + cwd
if contextkey not in __context__:
git_dir = rev_parse(cwd, opts=['--git-dir'], user=user, password=password, ignore_retcode=True, output_encoding=output_encoding)
if not os.path.isabs(git_dir):
paths = (cwd, git_dir, 'config') # depends on [control=['if'], data=[]]
else:
paths = (git_dir, 'config')
__context__[contextkey] = os.path.join(*paths) # depends on [control=['if'], data=['contextkey', '__context__']]
return __context__[contextkey] |
def getInfo(self):
"""
C_GetInfo
:return: a :class:`CK_INFO` object
"""
info = PyKCS11.LowLevel.CK_INFO()
rv = self.lib.C_GetInfo(info)
if rv != CKR_OK:
raise PyKCS11Error(rv)
i = CK_INFO()
i.cryptokiVersion = (info.cryptokiVersion.major,
info.cryptokiVersion.minor)
i.manufacturerID = info.GetManufacturerID()
i.flags = info.flags
i.libraryDescription = info.GetLibraryDescription()
i.libraryVersion = (info.libraryVersion.major,
info.libraryVersion.minor)
return i | def function[getInfo, parameter[self]]:
constant[
C_GetInfo
:return: a :class:`CK_INFO` object
]
variable[info] assign[=] call[name[PyKCS11].LowLevel.CK_INFO, parameter[]]
variable[rv] assign[=] call[name[self].lib.C_GetInfo, parameter[name[info]]]
if compare[name[rv] not_equal[!=] name[CKR_OK]] begin[:]
<ast.Raise object at 0x7da1b2346c80>
variable[i] assign[=] call[name[CK_INFO], parameter[]]
name[i].cryptokiVersion assign[=] tuple[[<ast.Attribute object at 0x7da1b2344ca0>, <ast.Attribute object at 0x7da1b2347790>]]
name[i].manufacturerID assign[=] call[name[info].GetManufacturerID, parameter[]]
name[i].flags assign[=] name[info].flags
name[i].libraryDescription assign[=] call[name[info].GetLibraryDescription, parameter[]]
name[i].libraryVersion assign[=] tuple[[<ast.Attribute object at 0x7da1b2345060>, <ast.Attribute object at 0x7da1b2347730>]]
return[name[i]] | keyword[def] identifier[getInfo] ( identifier[self] ):
literal[string]
identifier[info] = identifier[PyKCS11] . identifier[LowLevel] . identifier[CK_INFO] ()
identifier[rv] = identifier[self] . identifier[lib] . identifier[C_GetInfo] ( identifier[info] )
keyword[if] identifier[rv] != identifier[CKR_OK] :
keyword[raise] identifier[PyKCS11Error] ( identifier[rv] )
identifier[i] = identifier[CK_INFO] ()
identifier[i] . identifier[cryptokiVersion] =( identifier[info] . identifier[cryptokiVersion] . identifier[major] ,
identifier[info] . identifier[cryptokiVersion] . identifier[minor] )
identifier[i] . identifier[manufacturerID] = identifier[info] . identifier[GetManufacturerID] ()
identifier[i] . identifier[flags] = identifier[info] . identifier[flags]
identifier[i] . identifier[libraryDescription] = identifier[info] . identifier[GetLibraryDescription] ()
identifier[i] . identifier[libraryVersion] =( identifier[info] . identifier[libraryVersion] . identifier[major] ,
identifier[info] . identifier[libraryVersion] . identifier[minor] )
keyword[return] identifier[i] | def getInfo(self):
"""
C_GetInfo
:return: a :class:`CK_INFO` object
"""
info = PyKCS11.LowLevel.CK_INFO()
rv = self.lib.C_GetInfo(info)
if rv != CKR_OK:
raise PyKCS11Error(rv) # depends on [control=['if'], data=['rv']]
i = CK_INFO()
i.cryptokiVersion = (info.cryptokiVersion.major, info.cryptokiVersion.minor)
i.manufacturerID = info.GetManufacturerID()
i.flags = info.flags
i.libraryDescription = info.GetLibraryDescription()
i.libraryVersion = (info.libraryVersion.major, info.libraryVersion.minor)
return i |
def set_scale(self, scale, no_reset=False):
"""Scale the image in a channel.
Also see :meth:`zoom_to`.
Parameters
----------
scale : tuple of float
Scaling factors for the image in the X and Y axes.
no_reset : bool
Do not reset ``autozoom`` setting.
"""
return self.scale_to(*scale[:2], no_reset=no_reset) | def function[set_scale, parameter[self, scale, no_reset]]:
constant[Scale the image in a channel.
Also see :meth:`zoom_to`.
Parameters
----------
scale : tuple of float
Scaling factors for the image in the X and Y axes.
no_reset : bool
Do not reset ``autozoom`` setting.
]
return[call[name[self].scale_to, parameter[<ast.Starred object at 0x7da20e956aa0>]]] | keyword[def] identifier[set_scale] ( identifier[self] , identifier[scale] , identifier[no_reset] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[scale_to] (* identifier[scale] [: literal[int] ], identifier[no_reset] = identifier[no_reset] ) | def set_scale(self, scale, no_reset=False):
"""Scale the image in a channel.
Also see :meth:`zoom_to`.
Parameters
----------
scale : tuple of float
Scaling factors for the image in the X and Y axes.
no_reset : bool
Do not reset ``autozoom`` setting.
"""
return self.scale_to(*scale[:2], no_reset=no_reset) |
def add_permalink_methods(content_inst):
'''
Add permalink methods to object
'''
for permalink_method in PERMALINK_METHODS:
setattr(
content_inst,
permalink_method.__name__,
permalink_method.__get__(content_inst, content_inst.__class__)) | def function[add_permalink_methods, parameter[content_inst]]:
constant[
Add permalink methods to object
]
for taget[name[permalink_method]] in starred[name[PERMALINK_METHODS]] begin[:]
call[name[setattr], parameter[name[content_inst], name[permalink_method].__name__, call[name[permalink_method].__get__, parameter[name[content_inst], name[content_inst].__class__]]]] | keyword[def] identifier[add_permalink_methods] ( identifier[content_inst] ):
literal[string]
keyword[for] identifier[permalink_method] keyword[in] identifier[PERMALINK_METHODS] :
identifier[setattr] (
identifier[content_inst] ,
identifier[permalink_method] . identifier[__name__] ,
identifier[permalink_method] . identifier[__get__] ( identifier[content_inst] , identifier[content_inst] . identifier[__class__] )) | def add_permalink_methods(content_inst):
"""
Add permalink methods to object
"""
for permalink_method in PERMALINK_METHODS:
setattr(content_inst, permalink_method.__name__, permalink_method.__get__(content_inst, content_inst.__class__)) # depends on [control=['for'], data=['permalink_method']] |
def parameter_table(parameters):
"""
Create
"""
if not isinstance(parameters, list): parameters = [parameters]
rows = []
for param in parameters:
row = []
row.append('$' + param['tex_symbol'] + '$')
row.append('$=$')
row.append(r'$\SI{' + param['disply_value'] + '}{' + param['siunitx'] + '}$')
rows.append(row)
return rows | def function[parameter_table, parameter[parameters]]:
constant[
Create
]
if <ast.UnaryOp object at 0x7da18fe92110> begin[:]
variable[parameters] assign[=] list[[<ast.Name object at 0x7da18fe92b60>]]
variable[rows] assign[=] list[[]]
for taget[name[param]] in starred[name[parameters]] begin[:]
variable[row] assign[=] list[[]]
call[name[row].append, parameter[binary_operation[binary_operation[constant[$] + call[name[param]][constant[tex_symbol]]] + constant[$]]]]
call[name[row].append, parameter[constant[$=$]]]
call[name[row].append, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[$\SI{] + call[name[param]][constant[disply_value]]] + constant[}{]] + call[name[param]][constant[siunitx]]] + constant[}$]]]]
call[name[rows].append, parameter[name[row]]]
return[name[rows]] | keyword[def] identifier[parameter_table] ( identifier[parameters] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[parameters] , identifier[list] ): identifier[parameters] =[ identifier[parameters] ]
identifier[rows] =[]
keyword[for] identifier[param] keyword[in] identifier[parameters] :
identifier[row] =[]
identifier[row] . identifier[append] ( literal[string] + identifier[param] [ literal[string] ]+ literal[string] )
identifier[row] . identifier[append] ( literal[string] )
identifier[row] . identifier[append] ( literal[string] + identifier[param] [ literal[string] ]+ literal[string] + identifier[param] [ literal[string] ]+ literal[string] )
identifier[rows] . identifier[append] ( identifier[row] )
keyword[return] identifier[rows] | def parameter_table(parameters):
"""
Create
"""
if not isinstance(parameters, list):
parameters = [parameters] # depends on [control=['if'], data=[]]
rows = []
for param in parameters:
row = []
row.append('$' + param['tex_symbol'] + '$')
row.append('$=$')
row.append('$\\SI{' + param['disply_value'] + '}{' + param['siunitx'] + '}$')
rows.append(row) # depends on [control=['for'], data=['param']]
return rows |
def pairs_to_dict(response, decode_keys=False):
"Create a dict given a list of key/value pairs"
if response is None:
return {}
if decode_keys:
# the iter form is faster, but I don't know how to make that work
# with a nativestr() map
return dict(izip(imap(nativestr, response[::2]), response[1::2]))
else:
it = iter(response)
return dict(izip(it, it)) | def function[pairs_to_dict, parameter[response, decode_keys]]:
constant[Create a dict given a list of key/value pairs]
if compare[name[response] is constant[None]] begin[:]
return[dictionary[[], []]]
if name[decode_keys] begin[:]
return[call[name[dict], parameter[call[name[izip], parameter[call[name[imap], parameter[name[nativestr], call[name[response]][<ast.Slice object at 0x7da18f00ded0>]]], call[name[response]][<ast.Slice object at 0x7da18f00e620>]]]]]] | keyword[def] identifier[pairs_to_dict] ( identifier[response] , identifier[decode_keys] = keyword[False] ):
literal[string]
keyword[if] identifier[response] keyword[is] keyword[None] :
keyword[return] {}
keyword[if] identifier[decode_keys] :
keyword[return] identifier[dict] ( identifier[izip] ( identifier[imap] ( identifier[nativestr] , identifier[response] [:: literal[int] ]), identifier[response] [ literal[int] :: literal[int] ]))
keyword[else] :
identifier[it] = identifier[iter] ( identifier[response] )
keyword[return] identifier[dict] ( identifier[izip] ( identifier[it] , identifier[it] )) | def pairs_to_dict(response, decode_keys=False):
"""Create a dict given a list of key/value pairs"""
if response is None:
return {} # depends on [control=['if'], data=[]]
if decode_keys:
# the iter form is faster, but I don't know how to make that work
# with a nativestr() map
return dict(izip(imap(nativestr, response[::2]), response[1::2])) # depends on [control=['if'], data=[]]
else:
it = iter(response)
return dict(izip(it, it)) |
def register_model(self, storagemodel:object):
""" set up an Tableservice for an StorageTableModel in your Azure Storage Account
Will create the Table if not exist!
required Parameter is:
- storagemodel: StorageTableModel(Object)
"""
modeldefinition = self.getmodeldefinition(storagemodel, False)
if modeldefinition is None:
""" test if queuename already exists """
if [model for model in self._modeldefinitions if model['tablename'] == storagemodel._tablename]:
raise NameConventionError(storagemodel._tablename)
""" test if queuename fits to azure naming rules """
if not test_azurestorage_nameconventions(storagemodel._tablename, 'StorageTableModel'):
raise NameConventionError(storagemodel._tablename)
""" now register model """
modeldefinition = {
'modelname': storagemodel.__class__.__name__,
'tablename': storagemodel._tablename,
'encrypt': storagemodel._encrypt,
'tableservice': self._account.create_table_service()
}
if modeldefinition['encrypt']:
""" encrypt init """
# Create the KEK used for encryption.
# KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
kek = KeyWrapper(self._key_identifier, self._secret_key) # Key identifier
# Create the key resolver used for decryption.
# KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
key_resolver = KeyResolver()
key_resolver.put_key(kek)
# Create the EncryptionResolver Function to determine Properties to en/decrypt
encryptionresolver = self.__encryptionresolver__(modeldefinition['encrypt'])
# Set the require Encryption, KEK and key resolver on the service object.
modeldefinition['tableservice'].key_encryption_key = kek
modeldefinition['tableservice'].key_resolver_funcion = key_resolver.resolve_key
modeldefinition['tableservice'].encryption_resolver_function = encryptionresolver
pass
self.__createtable__(modeldefinition)
self._modeldefinitions.append(modeldefinition)
log.info('model {} registered successfully. Models are {!s}.'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions]))
else:
log.info('model {} already registered. Models are {!s}.'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions])) | def function[register_model, parameter[self, storagemodel]]:
constant[ set up an Tableservice for an StorageTableModel in your Azure Storage Account
Will create the Table if not exist!
required Parameter is:
- storagemodel: StorageTableModel(Object)
]
variable[modeldefinition] assign[=] call[name[self].getmodeldefinition, parameter[name[storagemodel], constant[False]]]
if compare[name[modeldefinition] is constant[None]] begin[:]
constant[ test if queuename already exists ]
if <ast.ListComp object at 0x7da1b0ae03a0> begin[:]
<ast.Raise object at 0x7da1b0ae29b0>
constant[ test if queuename fits to azure naming rules ]
if <ast.UnaryOp object at 0x7da1b0ae3580> begin[:]
<ast.Raise object at 0x7da1b0ae0b20>
constant[ now register model ]
variable[modeldefinition] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ae3100>, <ast.Constant object at 0x7da1b0ae1840>, <ast.Constant object at 0x7da1b0ae07f0>, <ast.Constant object at 0x7da1b0ae0610>], [<ast.Attribute object at 0x7da1b0ae1330>, <ast.Attribute object at 0x7da18dc99960>, <ast.Attribute object at 0x7da18dc9aaa0>, <ast.Call object at 0x7da18dc9a440>]]
if call[name[modeldefinition]][constant[encrypt]] begin[:]
constant[ encrypt init ]
variable[kek] assign[=] call[name[KeyWrapper], parameter[name[self]._key_identifier, name[self]._secret_key]]
variable[key_resolver] assign[=] call[name[KeyResolver], parameter[]]
call[name[key_resolver].put_key, parameter[name[kek]]]
variable[encryptionresolver] assign[=] call[name[self].__encryptionresolver__, parameter[call[name[modeldefinition]][constant[encrypt]]]]
call[name[modeldefinition]][constant[tableservice]].key_encryption_key assign[=] name[kek]
call[name[modeldefinition]][constant[tableservice]].key_resolver_funcion assign[=] name[key_resolver].resolve_key
call[name[modeldefinition]][constant[tableservice]].encryption_resolver_function assign[=] name[encryptionresolver]
pass
call[name[self].__createtable__, parameter[name[modeldefinition]]]
call[name[self]._modeldefinitions.append, parameter[name[modeldefinition]]]
call[name[log].info, parameter[call[constant[model {} registered successfully. Models are {!s}.].format, parameter[call[name[modeldefinition]][constant[modelname]], <ast.ListComp object at 0x7da18dc9a0e0>]]]] | keyword[def] identifier[register_model] ( identifier[self] , identifier[storagemodel] : identifier[object] ):
literal[string]
identifier[modeldefinition] = identifier[self] . identifier[getmodeldefinition] ( identifier[storagemodel] , keyword[False] )
keyword[if] identifier[modeldefinition] keyword[is] keyword[None] :
literal[string]
keyword[if] [ identifier[model] keyword[for] identifier[model] keyword[in] identifier[self] . identifier[_modeldefinitions] keyword[if] identifier[model] [ literal[string] ]== identifier[storagemodel] . identifier[_tablename] ]:
keyword[raise] identifier[NameConventionError] ( identifier[storagemodel] . identifier[_tablename] )
literal[string]
keyword[if] keyword[not] identifier[test_azurestorage_nameconventions] ( identifier[storagemodel] . identifier[_tablename] , literal[string] ):
keyword[raise] identifier[NameConventionError] ( identifier[storagemodel] . identifier[_tablename] )
literal[string]
identifier[modeldefinition] ={
literal[string] : identifier[storagemodel] . identifier[__class__] . identifier[__name__] ,
literal[string] : identifier[storagemodel] . identifier[_tablename] ,
literal[string] : identifier[storagemodel] . identifier[_encrypt] ,
literal[string] : identifier[self] . identifier[_account] . identifier[create_table_service] ()
}
keyword[if] identifier[modeldefinition] [ literal[string] ]:
literal[string]
identifier[kek] = identifier[KeyWrapper] ( identifier[self] . identifier[_key_identifier] , identifier[self] . identifier[_secret_key] )
identifier[key_resolver] = identifier[KeyResolver] ()
identifier[key_resolver] . identifier[put_key] ( identifier[kek] )
identifier[encryptionresolver] = identifier[self] . identifier[__encryptionresolver__] ( identifier[modeldefinition] [ literal[string] ])
identifier[modeldefinition] [ literal[string] ]. identifier[key_encryption_key] = identifier[kek]
identifier[modeldefinition] [ literal[string] ]. identifier[key_resolver_funcion] = identifier[key_resolver] . identifier[resolve_key]
identifier[modeldefinition] [ literal[string] ]. identifier[encryption_resolver_function] = identifier[encryptionresolver]
keyword[pass]
identifier[self] . identifier[__createtable__] ( identifier[modeldefinition] )
identifier[self] . identifier[_modeldefinitions] . identifier[append] ( identifier[modeldefinition] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[modeldefinition] [ literal[string] ],[ identifier[model] [ literal[string] ] keyword[for] identifier[model] keyword[in] identifier[self] . identifier[_modeldefinitions] ]))
keyword[else] :
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[modeldefinition] [ literal[string] ],[ identifier[model] [ literal[string] ] keyword[for] identifier[model] keyword[in] identifier[self] . identifier[_modeldefinitions] ])) | def register_model(self, storagemodel: object):
""" set up an Tableservice for an StorageTableModel in your Azure Storage Account
Will create the Table if not exist!
required Parameter is:
- storagemodel: StorageTableModel(Object)
"""
modeldefinition = self.getmodeldefinition(storagemodel, False)
if modeldefinition is None:
' test if queuename already exists '
if [model for model in self._modeldefinitions if model['tablename'] == storagemodel._tablename]:
raise NameConventionError(storagemodel._tablename) # depends on [control=['if'], data=[]]
' test if queuename fits to azure naming rules '
if not test_azurestorage_nameconventions(storagemodel._tablename, 'StorageTableModel'):
raise NameConventionError(storagemodel._tablename) # depends on [control=['if'], data=[]]
' now register model '
modeldefinition = {'modelname': storagemodel.__class__.__name__, 'tablename': storagemodel._tablename, 'encrypt': storagemodel._encrypt, 'tableservice': self._account.create_table_service()}
if modeldefinition['encrypt']:
' encrypt init '
# Create the KEK used for encryption.
# KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
kek = KeyWrapper(self._key_identifier, self._secret_key) # Key identifier
# Create the key resolver used for decryption.
# KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
key_resolver = KeyResolver()
key_resolver.put_key(kek)
# Create the EncryptionResolver Function to determine Properties to en/decrypt
encryptionresolver = self.__encryptionresolver__(modeldefinition['encrypt'])
# Set the require Encryption, KEK and key resolver on the service object.
modeldefinition['tableservice'].key_encryption_key = kek
modeldefinition['tableservice'].key_resolver_funcion = key_resolver.resolve_key
modeldefinition['tableservice'].encryption_resolver_function = encryptionresolver
pass # depends on [control=['if'], data=[]]
self.__createtable__(modeldefinition)
self._modeldefinitions.append(modeldefinition)
log.info('model {} registered successfully. Models are {!s}.'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions])) # depends on [control=['if'], data=['modeldefinition']]
else:
log.info('model {} already registered. Models are {!s}.'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions])) |
def idle_task(self):
'''called in idle time'''
try:
data = self.port.recv(1024) # Attempt to read up to 1024 bytes.
except socket.error as e:
if e.errno in [ errno.EAGAIN, errno.EWOULDBLOCK ]:
return
raise
try:
self.send_rtcm_msg(data)
except Exception as e:
print("DGPS: GPS Inject Failed:", e) | def function[idle_task, parameter[self]]:
constant[called in idle time]
<ast.Try object at 0x7da20c76c970>
<ast.Try object at 0x7da20c76c520> | keyword[def] identifier[idle_task] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[data] = identifier[self] . identifier[port] . identifier[recv] ( literal[int] )
keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] keyword[in] [ identifier[errno] . identifier[EAGAIN] , identifier[errno] . identifier[EWOULDBLOCK] ]:
keyword[return]
keyword[raise]
keyword[try] :
identifier[self] . identifier[send_rtcm_msg] ( identifier[data] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( literal[string] , identifier[e] ) | def idle_task(self):
"""called in idle time"""
try:
data = self.port.recv(1024) # Attempt to read up to 1024 bytes. # depends on [control=['try'], data=[]]
except socket.error as e:
if e.errno in [errno.EAGAIN, errno.EWOULDBLOCK]:
return # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['e']]
try:
self.send_rtcm_msg(data) # depends on [control=['try'], data=[]]
except Exception as e:
print('DGPS: GPS Inject Failed:', e) # depends on [control=['except'], data=['e']] |
def _get_image_tensors(self)->([Tensor], [Tensor], [Tensor]):
"Gets list of image tensors from lists of Image objects, as a tuple of original, generated and real(target) images."
orig_images, gen_images, real_images = [], [], []
for image_set in self.image_sets:
orig_images.append(image_set.orig.px)
gen_images.append(image_set.gen.px)
real_images.append(image_set.real.px)
return orig_images, gen_images, real_images | def function[_get_image_tensors, parameter[self]]:
constant[Gets list of image tensors from lists of Image objects, as a tuple of original, generated and real(target) images.]
<ast.Tuple object at 0x7da18dc07940> assign[=] tuple[[<ast.List object at 0x7da1b1ec6140>, <ast.List object at 0x7da1b1ec5ed0>, <ast.List object at 0x7da1b1ec5c90>]]
for taget[name[image_set]] in starred[name[self].image_sets] begin[:]
call[name[orig_images].append, parameter[name[image_set].orig.px]]
call[name[gen_images].append, parameter[name[image_set].gen.px]]
call[name[real_images].append, parameter[name[image_set].real.px]]
return[tuple[[<ast.Name object at 0x7da1b1ec7700>, <ast.Name object at 0x7da1b1ec6260>, <ast.Name object at 0x7da1b1ec7d00>]]] | keyword[def] identifier[_get_image_tensors] ( identifier[self] )->([ identifier[Tensor] ],[ identifier[Tensor] ],[ identifier[Tensor] ]):
literal[string]
identifier[orig_images] , identifier[gen_images] , identifier[real_images] =[],[],[]
keyword[for] identifier[image_set] keyword[in] identifier[self] . identifier[image_sets] :
identifier[orig_images] . identifier[append] ( identifier[image_set] . identifier[orig] . identifier[px] )
identifier[gen_images] . identifier[append] ( identifier[image_set] . identifier[gen] . identifier[px] )
identifier[real_images] . identifier[append] ( identifier[image_set] . identifier[real] . identifier[px] )
keyword[return] identifier[orig_images] , identifier[gen_images] , identifier[real_images] | def _get_image_tensors(self) -> ([Tensor], [Tensor], [Tensor]):
"""Gets list of image tensors from lists of Image objects, as a tuple of original, generated and real(target) images."""
(orig_images, gen_images, real_images) = ([], [], [])
for image_set in self.image_sets:
orig_images.append(image_set.orig.px)
gen_images.append(image_set.gen.px)
real_images.append(image_set.real.px) # depends on [control=['for'], data=['image_set']]
return (orig_images, gen_images, real_images) |
def is_android_api(self):
"""
Returns True if the method seems to be an Android API method.
This method might be not very precise unless an list of known API methods
is given.
:return: boolean
"""
if not self.is_external():
# Method must be external to be an API
return False
# Packages found at https://developer.android.com/reference/packages.html
api_candidates = ["Landroid/", "Lcom/android/internal/util", "Ldalvik/", "Ljava/", "Ljavax/", "Lorg/apache/",
"Lorg/json/", "Lorg/w3c/dom/", "Lorg/xml/sax", "Lorg/xmlpull/v1/", "Ljunit/"]
if self.apilist:
# FIXME: This will not work... need to introduce a name for lookup (like EncodedMethod.__str__ but without
# the offset! Such a name is also needed for the lookup in permissions
return self.method.get_name() in self.apilist
else:
for candidate in api_candidates:
if self.method.get_class_name().startswith(candidate):
return True
return False | def function[is_android_api, parameter[self]]:
constant[
Returns True if the method seems to be an Android API method.
This method might be not very precise unless an list of known API methods
is given.
:return: boolean
]
if <ast.UnaryOp object at 0x7da20c7ca410> begin[:]
return[constant[False]]
variable[api_candidates] assign[=] list[[<ast.Constant object at 0x7da20c7caf50>, <ast.Constant object at 0x7da20c7cbf10>, <ast.Constant object at 0x7da20c7c8b80>, <ast.Constant object at 0x7da20c7c8c70>, <ast.Constant object at 0x7da20c7c8c10>, <ast.Constant object at 0x7da20c7c8a60>, <ast.Constant object at 0x7da20c7c9d80>, <ast.Constant object at 0x7da20c7c9f30>, <ast.Constant object at 0x7da20c7ca920>, <ast.Constant object at 0x7da20c7ca2c0>, <ast.Constant object at 0x7da20c7ca4a0>]]
if name[self].apilist begin[:]
return[compare[call[name[self].method.get_name, parameter[]] in name[self].apilist]]
return[constant[False]] | keyword[def] identifier[is_android_api] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_external] ():
keyword[return] keyword[False]
identifier[api_candidates] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[self] . identifier[apilist] :
keyword[return] identifier[self] . identifier[method] . identifier[get_name] () keyword[in] identifier[self] . identifier[apilist]
keyword[else] :
keyword[for] identifier[candidate] keyword[in] identifier[api_candidates] :
keyword[if] identifier[self] . identifier[method] . identifier[get_class_name] (). identifier[startswith] ( identifier[candidate] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_android_api(self):
"""
Returns True if the method seems to be an Android API method.
This method might be not very precise unless an list of known API methods
is given.
:return: boolean
"""
if not self.is_external():
# Method must be external to be an API
return False # depends on [control=['if'], data=[]]
# Packages found at https://developer.android.com/reference/packages.html
api_candidates = ['Landroid/', 'Lcom/android/internal/util', 'Ldalvik/', 'Ljava/', 'Ljavax/', 'Lorg/apache/', 'Lorg/json/', 'Lorg/w3c/dom/', 'Lorg/xml/sax', 'Lorg/xmlpull/v1/', 'Ljunit/']
if self.apilist:
# FIXME: This will not work... need to introduce a name for lookup (like EncodedMethod.__str__ but without
# the offset! Such a name is also needed for the lookup in permissions
return self.method.get_name() in self.apilist # depends on [control=['if'], data=[]]
else:
for candidate in api_candidates:
if self.method.get_class_name().startswith(candidate):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['candidate']]
return False |
def dataset_generator(filepath,
dataset,
chunk_size=1,
start_idx=None,
end_idx=None):
"""Generate example dicts."""
encoder = dna_encoder.DNAEncoder(chunk_size=chunk_size)
with h5py.File(filepath, "r") as h5_file:
# Get input keys from h5_file
src_keys = [s % dataset for s in ["%s_in", "%s_na", "%s_out"]]
src_values = [h5_file[k] for k in src_keys]
inp_data, mask_data, out_data = src_values
assert len(set([v.len() for v in src_values])) == 1
if start_idx is None:
start_idx = 0
if end_idx is None:
end_idx = inp_data.len()
for i in range(start_idx, end_idx):
if i % 100 == 0:
print("Generating example %d for %s" % (i, dataset))
inputs, mask, outputs = inp_data[i], mask_data[i], out_data[i]
ex_dict = to_example_dict(encoder, inputs, mask, outputs)
# Original data has one output for every 128 input bases. Ensure that the
# ratio has been maintained given the chunk size and removing EOS.
assert (len(ex_dict["inputs"]) - 1) == ((
128 // chunk_size) * ex_dict["targets_shape"][0])
yield ex_dict | def function[dataset_generator, parameter[filepath, dataset, chunk_size, start_idx, end_idx]]:
constant[Generate example dicts.]
variable[encoder] assign[=] call[name[dna_encoder].DNAEncoder, parameter[]]
with call[name[h5py].File, parameter[name[filepath], constant[r]]] begin[:]
variable[src_keys] assign[=] <ast.ListComp object at 0x7da1b2004c70>
variable[src_values] assign[=] <ast.ListComp object at 0x7da1b2004910>
<ast.Tuple object at 0x7da1b2004a90> assign[=] name[src_values]
assert[compare[call[name[len], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b20b2f50>]]]] equal[==] constant[1]]]
if compare[name[start_idx] is constant[None]] begin[:]
variable[start_idx] assign[=] constant[0]
if compare[name[end_idx] is constant[None]] begin[:]
variable[end_idx] assign[=] call[name[inp_data].len, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[name[start_idx], name[end_idx]]]] begin[:]
if compare[binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> constant[100]] equal[==] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[Generating example %d for %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b203d9c0>, <ast.Name object at 0x7da1b203c6d0>]]]]]
<ast.Tuple object at 0x7da1b203fc10> assign[=] tuple[[<ast.Subscript object at 0x7da1b203cd00>, <ast.Subscript object at 0x7da1b203de70>, <ast.Subscript object at 0x7da1b203f6d0>]]
variable[ex_dict] assign[=] call[name[to_example_dict], parameter[name[encoder], name[inputs], name[mask], name[outputs]]]
assert[compare[binary_operation[call[name[len], parameter[call[name[ex_dict]][constant[inputs]]]] - constant[1]] equal[==] binary_operation[binary_operation[constant[128] <ast.FloorDiv object at 0x7da2590d6bc0> name[chunk_size]] * call[call[name[ex_dict]][constant[targets_shape]]][constant[0]]]]]
<ast.Yield object at 0x7da1b203fd90> | keyword[def] identifier[dataset_generator] ( identifier[filepath] ,
identifier[dataset] ,
identifier[chunk_size] = literal[int] ,
identifier[start_idx] = keyword[None] ,
identifier[end_idx] = keyword[None] ):
literal[string]
identifier[encoder] = identifier[dna_encoder] . identifier[DNAEncoder] ( identifier[chunk_size] = identifier[chunk_size] )
keyword[with] identifier[h5py] . identifier[File] ( identifier[filepath] , literal[string] ) keyword[as] identifier[h5_file] :
identifier[src_keys] =[ identifier[s] % identifier[dataset] keyword[for] identifier[s] keyword[in] [ literal[string] , literal[string] , literal[string] ]]
identifier[src_values] =[ identifier[h5_file] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[src_keys] ]
identifier[inp_data] , identifier[mask_data] , identifier[out_data] = identifier[src_values]
keyword[assert] identifier[len] ( identifier[set] ([ identifier[v] . identifier[len] () keyword[for] identifier[v] keyword[in] identifier[src_values] ]))== literal[int]
keyword[if] identifier[start_idx] keyword[is] keyword[None] :
identifier[start_idx] = literal[int]
keyword[if] identifier[end_idx] keyword[is] keyword[None] :
identifier[end_idx] = identifier[inp_data] . identifier[len] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start_idx] , identifier[end_idx] ):
keyword[if] identifier[i] % literal[int] == literal[int] :
identifier[print] ( literal[string] %( identifier[i] , identifier[dataset] ))
identifier[inputs] , identifier[mask] , identifier[outputs] = identifier[inp_data] [ identifier[i] ], identifier[mask_data] [ identifier[i] ], identifier[out_data] [ identifier[i] ]
identifier[ex_dict] = identifier[to_example_dict] ( identifier[encoder] , identifier[inputs] , identifier[mask] , identifier[outputs] )
keyword[assert] ( identifier[len] ( identifier[ex_dict] [ literal[string] ])- literal[int] )==((
literal[int] // identifier[chunk_size] )* identifier[ex_dict] [ literal[string] ][ literal[int] ])
keyword[yield] identifier[ex_dict] | def dataset_generator(filepath, dataset, chunk_size=1, start_idx=None, end_idx=None):
"""Generate example dicts."""
encoder = dna_encoder.DNAEncoder(chunk_size=chunk_size)
with h5py.File(filepath, 'r') as h5_file:
# Get input keys from h5_file
src_keys = [s % dataset for s in ['%s_in', '%s_na', '%s_out']]
src_values = [h5_file[k] for k in src_keys]
(inp_data, mask_data, out_data) = src_values
assert len(set([v.len() for v in src_values])) == 1
if start_idx is None:
start_idx = 0 # depends on [control=['if'], data=['start_idx']]
if end_idx is None:
end_idx = inp_data.len() # depends on [control=['if'], data=['end_idx']]
for i in range(start_idx, end_idx):
if i % 100 == 0:
print('Generating example %d for %s' % (i, dataset)) # depends on [control=['if'], data=[]]
(inputs, mask, outputs) = (inp_data[i], mask_data[i], out_data[i])
ex_dict = to_example_dict(encoder, inputs, mask, outputs)
# Original data has one output for every 128 input bases. Ensure that the
# ratio has been maintained given the chunk size and removing EOS.
assert len(ex_dict['inputs']) - 1 == 128 // chunk_size * ex_dict['targets_shape'][0]
yield ex_dict # depends on [control=['for'], data=['i']] # depends on [control=['with'], data=['h5_file']] |
def punsubscribe(self, pattern, *patterns):
"""Unsubscribe from specific patterns.
Arguments can be instances of :class:`~aioredis.Channel`.
"""
conn = self._pool_or_conn
return conn.execute_pubsub(b'PUNSUBSCRIBE', pattern, *patterns) | def function[punsubscribe, parameter[self, pattern]]:
constant[Unsubscribe from specific patterns.
Arguments can be instances of :class:`~aioredis.Channel`.
]
variable[conn] assign[=] name[self]._pool_or_conn
return[call[name[conn].execute_pubsub, parameter[constant[b'PUNSUBSCRIBE'], name[pattern], <ast.Starred object at 0x7da18f813520>]]] | keyword[def] identifier[punsubscribe] ( identifier[self] , identifier[pattern] ,* identifier[patterns] ):
literal[string]
identifier[conn] = identifier[self] . identifier[_pool_or_conn]
keyword[return] identifier[conn] . identifier[execute_pubsub] ( literal[string] , identifier[pattern] ,* identifier[patterns] ) | def punsubscribe(self, pattern, *patterns):
"""Unsubscribe from specific patterns.
Arguments can be instances of :class:`~aioredis.Channel`.
"""
conn = self._pool_or_conn
return conn.execute_pubsub(b'PUNSUBSCRIBE', pattern, *patterns) |
def attrget(self, groupname, attrname, rownr):
"""Get the value of an attribute in the given row in a group."""
return self._attrget(groupname, attrname, rownr) | def function[attrget, parameter[self, groupname, attrname, rownr]]:
constant[Get the value of an attribute in the given row in a group.]
return[call[name[self]._attrget, parameter[name[groupname], name[attrname], name[rownr]]]] | keyword[def] identifier[attrget] ( identifier[self] , identifier[groupname] , identifier[attrname] , identifier[rownr] ):
literal[string]
keyword[return] identifier[self] . identifier[_attrget] ( identifier[groupname] , identifier[attrname] , identifier[rownr] ) | def attrget(self, groupname, attrname, rownr):
"""Get the value of an attribute in the given row in a group."""
return self._attrget(groupname, attrname, rownr) |
def read_entry(self):
"""get the next value from the array, and set internal iterator so next call will be next entry
:return: The next GenomicRange entry
:rtype: GenomicRange
"""
if len(self.bedarray) <= self.curr_ind: return None
val = self.bedarray[self.curr_ind]
self.curr_ind += 1
return val | def function[read_entry, parameter[self]]:
constant[get the next value from the array, and set internal iterator so next call will be next entry
:return: The next GenomicRange entry
:rtype: GenomicRange
]
if compare[call[name[len], parameter[name[self].bedarray]] less_or_equal[<=] name[self].curr_ind] begin[:]
return[constant[None]]
variable[val] assign[=] call[name[self].bedarray][name[self].curr_ind]
<ast.AugAssign object at 0x7da20c6e7a60>
return[name[val]] | keyword[def] identifier[read_entry] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[bedarray] )<= identifier[self] . identifier[curr_ind] : keyword[return] keyword[None]
identifier[val] = identifier[self] . identifier[bedarray] [ identifier[self] . identifier[curr_ind] ]
identifier[self] . identifier[curr_ind] += literal[int]
keyword[return] identifier[val] | def read_entry(self):
"""get the next value from the array, and set internal iterator so next call will be next entry
:return: The next GenomicRange entry
:rtype: GenomicRange
"""
if len(self.bedarray) <= self.curr_ind:
return None # depends on [control=['if'], data=[]]
val = self.bedarray[self.curr_ind]
self.curr_ind += 1
return val |
def calculate_P(self, T, P, method):
r'''Method to calculate pressure-dependent liquid viscosity at
temperature `T` and pressure `P` with a given method.
This method has no exception handling; see `TP_dependent_property`
for that.
Parameters
----------
T : float
Temperature at which to calculate viscosity, [K]
P : float
Pressure at which to calculate viscosity, [K]
method : str
Name of the method to use
Returns
-------
mu : float
Viscosity of the liquid at T and P, [Pa*S]
'''
if method == LUCAS:
mu = self.T_dependent_property(T)
Psat = self.Psat(T) if hasattr(self.Psat, '__call__') else self.Psat
mu = Lucas(T, P, self.Tc, self.Pc, self.omega, Psat, mu)
elif method == COOLPROP:
mu = PropsSI('V', 'T', T, 'P', P, self.CASRN)
elif method in self.tabular_data:
mu = self.interpolate_P(T, P, method)
return mu | def function[calculate_P, parameter[self, T, P, method]]:
constant[Method to calculate pressure-dependent liquid viscosity at
temperature `T` and pressure `P` with a given method.
This method has no exception handling; see `TP_dependent_property`
for that.
Parameters
----------
T : float
Temperature at which to calculate viscosity, [K]
P : float
Pressure at which to calculate viscosity, [K]
method : str
Name of the method to use
Returns
-------
mu : float
Viscosity of the liquid at T and P, [Pa*S]
]
if compare[name[method] equal[==] name[LUCAS]] begin[:]
variable[mu] assign[=] call[name[self].T_dependent_property, parameter[name[T]]]
variable[Psat] assign[=] <ast.IfExp object at 0x7da1b021feb0>
variable[mu] assign[=] call[name[Lucas], parameter[name[T], name[P], name[self].Tc, name[self].Pc, name[self].omega, name[Psat], name[mu]]]
return[name[mu]] | keyword[def] identifier[calculate_P] ( identifier[self] , identifier[T] , identifier[P] , identifier[method] ):
literal[string]
keyword[if] identifier[method] == identifier[LUCAS] :
identifier[mu] = identifier[self] . identifier[T_dependent_property] ( identifier[T] )
identifier[Psat] = identifier[self] . identifier[Psat] ( identifier[T] ) keyword[if] identifier[hasattr] ( identifier[self] . identifier[Psat] , literal[string] ) keyword[else] identifier[self] . identifier[Psat]
identifier[mu] = identifier[Lucas] ( identifier[T] , identifier[P] , identifier[self] . identifier[Tc] , identifier[self] . identifier[Pc] , identifier[self] . identifier[omega] , identifier[Psat] , identifier[mu] )
keyword[elif] identifier[method] == identifier[COOLPROP] :
identifier[mu] = identifier[PropsSI] ( literal[string] , literal[string] , identifier[T] , literal[string] , identifier[P] , identifier[self] . identifier[CASRN] )
keyword[elif] identifier[method] keyword[in] identifier[self] . identifier[tabular_data] :
identifier[mu] = identifier[self] . identifier[interpolate_P] ( identifier[T] , identifier[P] , identifier[method] )
keyword[return] identifier[mu] | def calculate_P(self, T, P, method):
"""Method to calculate pressure-dependent liquid viscosity at
temperature `T` and pressure `P` with a given method.
This method has no exception handling; see `TP_dependent_property`
for that.
Parameters
----------
T : float
Temperature at which to calculate viscosity, [K]
P : float
Pressure at which to calculate viscosity, [K]
method : str
Name of the method to use
Returns
-------
mu : float
Viscosity of the liquid at T and P, [Pa*S]
"""
if method == LUCAS:
mu = self.T_dependent_property(T)
Psat = self.Psat(T) if hasattr(self.Psat, '__call__') else self.Psat
mu = Lucas(T, P, self.Tc, self.Pc, self.omega, Psat, mu) # depends on [control=['if'], data=[]]
elif method == COOLPROP:
mu = PropsSI('V', 'T', T, 'P', P, self.CASRN) # depends on [control=['if'], data=[]]
elif method in self.tabular_data:
mu = self.interpolate_P(T, P, method) # depends on [control=['if'], data=['method']]
return mu |
def unwrap_aliases(data_type):
"""
Convenience method to unwrap all Alias(es) from around a DataType.
Args:
data_type (DataType): The target to unwrap.
Return:
Tuple[DataType, bool]: The underlying data type and a bool indicating
whether the input type had at least one alias layer.
"""
unwrapped_alias = False
while is_alias(data_type):
unwrapped_alias = True
data_type = data_type.data_type
return data_type, unwrapped_alias | def function[unwrap_aliases, parameter[data_type]]:
constant[
Convenience method to unwrap all Alias(es) from around a DataType.
Args:
data_type (DataType): The target to unwrap.
Return:
Tuple[DataType, bool]: The underlying data type and a bool indicating
whether the input type had at least one alias layer.
]
variable[unwrapped_alias] assign[=] constant[False]
while call[name[is_alias], parameter[name[data_type]]] begin[:]
variable[unwrapped_alias] assign[=] constant[True]
variable[data_type] assign[=] name[data_type].data_type
return[tuple[[<ast.Name object at 0x7da2044c1570>, <ast.Name object at 0x7da2044c1420>]]] | keyword[def] identifier[unwrap_aliases] ( identifier[data_type] ):
literal[string]
identifier[unwrapped_alias] = keyword[False]
keyword[while] identifier[is_alias] ( identifier[data_type] ):
identifier[unwrapped_alias] = keyword[True]
identifier[data_type] = identifier[data_type] . identifier[data_type]
keyword[return] identifier[data_type] , identifier[unwrapped_alias] | def unwrap_aliases(data_type):
"""
Convenience method to unwrap all Alias(es) from around a DataType.
Args:
data_type (DataType): The target to unwrap.
Return:
Tuple[DataType, bool]: The underlying data type and a bool indicating
whether the input type had at least one alias layer.
"""
unwrapped_alias = False
while is_alias(data_type):
unwrapped_alias = True
data_type = data_type.data_type # depends on [control=['while'], data=[]]
return (data_type, unwrapped_alias) |
def add_tags(self, tags, **kwargs):
"""
:param tags: Tags to add to the object
:type tags: list of strings
Adds each of the specified tags to the remote object. Takes no
action for tags that are already listed for the object.
The tags are added to the copy of the object in the project
associated with the handler.
"""
self._add_tags(self._dxid, {"project": self._proj, "tags": tags},
**kwargs) | def function[add_tags, parameter[self, tags]]:
constant[
:param tags: Tags to add to the object
:type tags: list of strings
Adds each of the specified tags to the remote object. Takes no
action for tags that are already listed for the object.
The tags are added to the copy of the object in the project
associated with the handler.
]
call[name[self]._add_tags, parameter[name[self]._dxid, dictionary[[<ast.Constant object at 0x7da204344d00>, <ast.Constant object at 0x7da2043469e0>], [<ast.Attribute object at 0x7da204346320>, <ast.Name object at 0x7da204346ce0>]]]] | keyword[def] identifier[add_tags] ( identifier[self] , identifier[tags] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_add_tags] ( identifier[self] . identifier[_dxid] ,{ literal[string] : identifier[self] . identifier[_proj] , literal[string] : identifier[tags] },
** identifier[kwargs] ) | def add_tags(self, tags, **kwargs):
"""
:param tags: Tags to add to the object
:type tags: list of strings
Adds each of the specified tags to the remote object. Takes no
action for tags that are already listed for the object.
The tags are added to the copy of the object in the project
associated with the handler.
"""
self._add_tags(self._dxid, {'project': self._proj, 'tags': tags}, **kwargs) |
def encode(self, entity):
"""
Encode this
:param entity:
:return: cwr string
"""
entity = self.expand_entity(entity)
value = entity[self.name]
result = self.format(value)
return result | def function[encode, parameter[self, entity]]:
constant[
Encode this
:param entity:
:return: cwr string
]
variable[entity] assign[=] call[name[self].expand_entity, parameter[name[entity]]]
variable[value] assign[=] call[name[entity]][name[self].name]
variable[result] assign[=] call[name[self].format, parameter[name[value]]]
return[name[result]] | keyword[def] identifier[encode] ( identifier[self] , identifier[entity] ):
literal[string]
identifier[entity] = identifier[self] . identifier[expand_entity] ( identifier[entity] )
identifier[value] = identifier[entity] [ identifier[self] . identifier[name] ]
identifier[result] = identifier[self] . identifier[format] ( identifier[value] )
keyword[return] identifier[result] | def encode(self, entity):
"""
Encode this
:param entity:
:return: cwr string
"""
entity = self.expand_entity(entity)
value = entity[self.name]
result = self.format(value)
return result |
def on_for_degrees(self, steering, speed, degrees, brake=True, block=True):
"""
Rotate the motors according to the provided ``steering``.
The distance each motor will travel follows the rules of :meth:`MoveTank.on_for_degrees`.
"""
(left_speed, right_speed) = self.get_speed_steering(steering, speed)
MoveTank.on_for_degrees(self, SpeedNativeUnits(left_speed), SpeedNativeUnits(right_speed), degrees, brake, block) | def function[on_for_degrees, parameter[self, steering, speed, degrees, brake, block]]:
constant[
Rotate the motors according to the provided ``steering``.
The distance each motor will travel follows the rules of :meth:`MoveTank.on_for_degrees`.
]
<ast.Tuple object at 0x7da2045668c0> assign[=] call[name[self].get_speed_steering, parameter[name[steering], name[speed]]]
call[name[MoveTank].on_for_degrees, parameter[name[self], call[name[SpeedNativeUnits], parameter[name[left_speed]]], call[name[SpeedNativeUnits], parameter[name[right_speed]]], name[degrees], name[brake], name[block]]] | keyword[def] identifier[on_for_degrees] ( identifier[self] , identifier[steering] , identifier[speed] , identifier[degrees] , identifier[brake] = keyword[True] , identifier[block] = keyword[True] ):
literal[string]
( identifier[left_speed] , identifier[right_speed] )= identifier[self] . identifier[get_speed_steering] ( identifier[steering] , identifier[speed] )
identifier[MoveTank] . identifier[on_for_degrees] ( identifier[self] , identifier[SpeedNativeUnits] ( identifier[left_speed] ), identifier[SpeedNativeUnits] ( identifier[right_speed] ), identifier[degrees] , identifier[brake] , identifier[block] ) | def on_for_degrees(self, steering, speed, degrees, brake=True, block=True):
"""
Rotate the motors according to the provided ``steering``.
The distance each motor will travel follows the rules of :meth:`MoveTank.on_for_degrees`.
"""
(left_speed, right_speed) = self.get_speed_steering(steering, speed)
MoveTank.on_for_degrees(self, SpeedNativeUnits(left_speed), SpeedNativeUnits(right_speed), degrees, brake, block) |
def quick_search(self, request, **kw):
'''Execute a quick search with the specified request.
:param request: see :ref:`api-search-request`
:param **kw: See Options below
:returns: :py:class:`planet.api.models.Items`
:raises planet.api.exceptions.APIException: On API error.
:Options:
* page_size (int): Size of response pages
* sort (string): Sorting order in the form `field (asc|desc)`
'''
body = json.dumps(request)
params = self._params(kw)
return self.dispatcher.response(models.Request(
self._url('data/v1/quick-search'), self.auth, params=params,
body_type=models.Items, data=body, method='POST')).get_body() | def function[quick_search, parameter[self, request]]:
constant[Execute a quick search with the specified request.
:param request: see :ref:`api-search-request`
:param **kw: See Options below
:returns: :py:class:`planet.api.models.Items`
:raises planet.api.exceptions.APIException: On API error.
:Options:
* page_size (int): Size of response pages
* sort (string): Sorting order in the form `field (asc|desc)`
]
variable[body] assign[=] call[name[json].dumps, parameter[name[request]]]
variable[params] assign[=] call[name[self]._params, parameter[name[kw]]]
return[call[call[name[self].dispatcher.response, parameter[call[name[models].Request, parameter[call[name[self]._url, parameter[constant[data/v1/quick-search]]], name[self].auth]]]].get_body, parameter[]]] | keyword[def] identifier[quick_search] ( identifier[self] , identifier[request] ,** identifier[kw] ):
literal[string]
identifier[body] = identifier[json] . identifier[dumps] ( identifier[request] )
identifier[params] = identifier[self] . identifier[_params] ( identifier[kw] )
keyword[return] identifier[self] . identifier[dispatcher] . identifier[response] ( identifier[models] . identifier[Request] (
identifier[self] . identifier[_url] ( literal[string] ), identifier[self] . identifier[auth] , identifier[params] = identifier[params] ,
identifier[body_type] = identifier[models] . identifier[Items] , identifier[data] = identifier[body] , identifier[method] = literal[string] )). identifier[get_body] () | def quick_search(self, request, **kw):
"""Execute a quick search with the specified request.
:param request: see :ref:`api-search-request`
:param **kw: See Options below
:returns: :py:class:`planet.api.models.Items`
:raises planet.api.exceptions.APIException: On API error.
:Options:
* page_size (int): Size of response pages
* sort (string): Sorting order in the form `field (asc|desc)`
"""
body = json.dumps(request)
params = self._params(kw)
return self.dispatcher.response(models.Request(self._url('data/v1/quick-search'), self.auth, params=params, body_type=models.Items, data=body, method='POST')).get_body() |
async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script) | <ast.AsyncFunctionDef object at 0x7da1b1dc7160> | keyword[async] keyword[def] identifier[executescript] ( identifier[self] , identifier[sql_script] : identifier[str] )-> keyword[None] :
literal[string]
keyword[await] identifier[self] . identifier[_execute] ( identifier[self] . identifier[_cursor] . identifier[executescript] , identifier[sql_script] ) | async def executescript(self, sql_script: str) -> None:
"""Execute a user script."""
await self._execute(self._cursor.executescript, sql_script) |
def request(self, *args, **kwargs):
"""Issue the HTTP request capturing any errors that may occur."""
try:
return self._http.request(*args, timeout=TIMEOUT, **kwargs)
except Exception as exc:
raise RequestException(exc, args, kwargs) | def function[request, parameter[self]]:
constant[Issue the HTTP request capturing any errors that may occur.]
<ast.Try object at 0x7da20e957700> | keyword[def] identifier[request] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_http] . identifier[request] (* identifier[args] , identifier[timeout] = identifier[TIMEOUT] ,** identifier[kwargs] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[raise] identifier[RequestException] ( identifier[exc] , identifier[args] , identifier[kwargs] ) | def request(self, *args, **kwargs):
"""Issue the HTTP request capturing any errors that may occur."""
try:
return self._http.request(*args, timeout=TIMEOUT, **kwargs) # depends on [control=['try'], data=[]]
except Exception as exc:
raise RequestException(exc, args, kwargs) # depends on [control=['except'], data=['exc']] |
def plugin_get(name):
"""
Return plugin class.
@param name: the cms label.
"""
plugins = plugins_base_get()
for plugin in plugins:
if plugin.Meta.label == name:
return plugin
raise RuntimeError('CMS "%s" not known.' % name) | def function[plugin_get, parameter[name]]:
constant[
Return plugin class.
@param name: the cms label.
]
variable[plugins] assign[=] call[name[plugins_base_get], parameter[]]
for taget[name[plugin]] in starred[name[plugins]] begin[:]
if compare[name[plugin].Meta.label equal[==] name[name]] begin[:]
return[name[plugin]]
<ast.Raise object at 0x7da1b1da2290> | keyword[def] identifier[plugin_get] ( identifier[name] ):
literal[string]
identifier[plugins] = identifier[plugins_base_get] ()
keyword[for] identifier[plugin] keyword[in] identifier[plugins] :
keyword[if] identifier[plugin] . identifier[Meta] . identifier[label] == identifier[name] :
keyword[return] identifier[plugin]
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[name] ) | def plugin_get(name):
"""
Return plugin class.
@param name: the cms label.
"""
plugins = plugins_base_get()
for plugin in plugins:
if plugin.Meta.label == name:
return plugin # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plugin']]
raise RuntimeError('CMS "%s" not known.' % name) |
def handle_valid(self, form=None, *args, **kwargs):
"""
Called after the form has validated.
"""
# Take a chance and try save a subclass of a ModelForm.
if hasattr(form, 'save'):
form.save()
# Also try and call handle_valid method of the form itself.
if hasattr(form, 'handle_valid'):
form.handle_valid(*args, **kwargs) | def function[handle_valid, parameter[self, form]]:
constant[
Called after the form has validated.
]
if call[name[hasattr], parameter[name[form], constant[save]]] begin[:]
call[name[form].save, parameter[]]
if call[name[hasattr], parameter[name[form], constant[handle_valid]]] begin[:]
call[name[form].handle_valid, parameter[<ast.Starred object at 0x7da2044c2e60>]] | keyword[def] identifier[handle_valid] ( identifier[self] , identifier[form] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[form] , literal[string] ):
identifier[form] . identifier[save] ()
keyword[if] identifier[hasattr] ( identifier[form] , literal[string] ):
identifier[form] . identifier[handle_valid] (* identifier[args] ,** identifier[kwargs] ) | def handle_valid(self, form=None, *args, **kwargs):
"""
Called after the form has validated.
"""
# Take a chance and try save a subclass of a ModelForm.
if hasattr(form, 'save'):
form.save() # depends on [control=['if'], data=[]]
# Also try and call handle_valid method of the form itself.
if hasattr(form, 'handle_valid'):
form.handle_valid(*args, **kwargs) # depends on [control=['if'], data=[]] |
def show_term_protect(name=None, instance_id=None, call=None, quiet=False):
'''
Show the details from EC2 concerning an instance's termination protection state
'''
if call != 'action':
raise SaltCloudSystemExit(
'The show_term_protect action must be called with -a or --action.'
)
if not instance_id:
instance_id = _get_node(name)['instanceId']
params = {'Action': 'DescribeInstanceAttribute',
'InstanceId': instance_id,
'Attribute': 'disableApiTermination'}
result = aws.query(params,
location=get_location(),
provider=get_provider(),
return_root=True,
opts=__opts__,
sigver='4')
disable_protect = False
for item in result:
if 'value' in item:
disable_protect = item['value']
break
log.log(
logging.DEBUG if quiet is True else logging.INFO,
'Termination Protection is %s for %s',
disable_protect == 'true' and 'enabled' or 'disabled', name
)
return disable_protect | def function[show_term_protect, parameter[name, instance_id, call, quiet]]:
constant[
Show the details from EC2 concerning an instance's termination protection state
]
if compare[name[call] not_equal[!=] constant[action]] begin[:]
<ast.Raise object at 0x7da18dc04ee0>
if <ast.UnaryOp object at 0x7da18dc062f0> begin[:]
variable[instance_id] assign[=] call[call[name[_get_node], parameter[name[name]]]][constant[instanceId]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18dc072e0>, <ast.Constant object at 0x7da18dc041c0>, <ast.Constant object at 0x7da18dc06950>], [<ast.Constant object at 0x7da18dc074f0>, <ast.Name object at 0x7da18dc063b0>, <ast.Constant object at 0x7da18dc070d0>]]
variable[result] assign[=] call[name[aws].query, parameter[name[params]]]
variable[disable_protect] assign[=] constant[False]
for taget[name[item]] in starred[name[result]] begin[:]
if compare[constant[value] in name[item]] begin[:]
variable[disable_protect] assign[=] call[name[item]][constant[value]]
break
call[name[log].log, parameter[<ast.IfExp object at 0x7da18dc04a30>, constant[Termination Protection is %s for %s], <ast.BoolOp object at 0x7da18dc07580>, name[name]]]
return[name[disable_protect]] | keyword[def] identifier[show_term_protect] ( identifier[name] = keyword[None] , identifier[instance_id] = keyword[None] , identifier[call] = keyword[None] , identifier[quiet] = keyword[False] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] keyword[not] identifier[instance_id] :
identifier[instance_id] = identifier[_get_node] ( identifier[name] )[ literal[string] ]
identifier[params] ={ literal[string] : literal[string] ,
literal[string] : identifier[instance_id] ,
literal[string] : literal[string] }
identifier[result] = identifier[aws] . identifier[query] ( identifier[params] ,
identifier[location] = identifier[get_location] (),
identifier[provider] = identifier[get_provider] (),
identifier[return_root] = keyword[True] ,
identifier[opts] = identifier[__opts__] ,
identifier[sigver] = literal[string] )
identifier[disable_protect] = keyword[False]
keyword[for] identifier[item] keyword[in] identifier[result] :
keyword[if] literal[string] keyword[in] identifier[item] :
identifier[disable_protect] = identifier[item] [ literal[string] ]
keyword[break]
identifier[log] . identifier[log] (
identifier[logging] . identifier[DEBUG] keyword[if] identifier[quiet] keyword[is] keyword[True] keyword[else] identifier[logging] . identifier[INFO] ,
literal[string] ,
identifier[disable_protect] == literal[string] keyword[and] literal[string] keyword[or] literal[string] , identifier[name]
)
keyword[return] identifier[disable_protect] | def show_term_protect(name=None, instance_id=None, call=None, quiet=False):
"""
Show the details from EC2 concerning an instance's termination protection state
"""
if call != 'action':
raise SaltCloudSystemExit('The show_term_protect action must be called with -a or --action.') # depends on [control=['if'], data=[]]
if not instance_id:
instance_id = _get_node(name)['instanceId'] # depends on [control=['if'], data=[]]
params = {'Action': 'DescribeInstanceAttribute', 'InstanceId': instance_id, 'Attribute': 'disableApiTermination'}
result = aws.query(params, location=get_location(), provider=get_provider(), return_root=True, opts=__opts__, sigver='4')
disable_protect = False
for item in result:
if 'value' in item:
disable_protect = item['value']
break # depends on [control=['if'], data=['item']] # depends on [control=['for'], data=['item']]
log.log(logging.DEBUG if quiet is True else logging.INFO, 'Termination Protection is %s for %s', disable_protect == 'true' and 'enabled' or 'disabled', name)
return disable_protect |
def service_record(self, service_name):
"""
Args:
service_name: the name of the service in the service registry
Returns:
the entire service record from the service registry or None if the record was not found
"""
if not self.services().has_key(service_name):
return None
return self.services()[service_name] | def function[service_record, parameter[self, service_name]]:
constant[
Args:
service_name: the name of the service in the service registry
Returns:
the entire service record from the service registry or None if the record was not found
]
if <ast.UnaryOp object at 0x7da1b1bc1a20> begin[:]
return[constant[None]]
return[call[call[name[self].services, parameter[]]][name[service_name]]] | keyword[def] identifier[service_record] ( identifier[self] , identifier[service_name] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[services] (). identifier[has_key] ( identifier[service_name] ):
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[services] ()[ identifier[service_name] ] | def service_record(self, service_name):
"""
Args:
service_name: the name of the service in the service registry
Returns:
the entire service record from the service registry or None if the record was not found
"""
if not self.services().has_key(service_name):
return None # depends on [control=['if'], data=[]]
return self.services()[service_name] |
def from_dict(d):
"""Transform the dict to a response object and return the response."""
warnings_ = d.get('warnings', [])
query = d.get('query') or None
if query:
query = Person.from_dict(query)
person = d.get('person') or None
if person:
person = Person.from_dict(person)
records = d.get('records')
if records:
records = [Record.from_dict(record) for record in records]
suggested_searches = d.get('suggested_searches')
if suggested_searches:
suggested_searches = [Record.from_dict(record)
for record in suggested_searches]
return SearchAPIResponse(query=query, person=person, records=records,
suggested_searches=suggested_searches,
warnings_=warnings_) | def function[from_dict, parameter[d]]:
constant[Transform the dict to a response object and return the response.]
variable[warnings_] assign[=] call[name[d].get, parameter[constant[warnings], list[[]]]]
variable[query] assign[=] <ast.BoolOp object at 0x7da1b13aa7a0>
if name[query] begin[:]
variable[query] assign[=] call[name[Person].from_dict, parameter[name[query]]]
variable[person] assign[=] <ast.BoolOp object at 0x7da1b13a8550>
if name[person] begin[:]
variable[person] assign[=] call[name[Person].from_dict, parameter[name[person]]]
variable[records] assign[=] call[name[d].get, parameter[constant[records]]]
if name[records] begin[:]
variable[records] assign[=] <ast.ListComp object at 0x7da1b13a8940>
variable[suggested_searches] assign[=] call[name[d].get, parameter[constant[suggested_searches]]]
if name[suggested_searches] begin[:]
variable[suggested_searches] assign[=] <ast.ListComp object at 0x7da1b13aab60>
return[call[name[SearchAPIResponse], parameter[]]] | keyword[def] identifier[from_dict] ( identifier[d] ):
literal[string]
identifier[warnings_] = identifier[d] . identifier[get] ( literal[string] ,[])
identifier[query] = identifier[d] . identifier[get] ( literal[string] ) keyword[or] keyword[None]
keyword[if] identifier[query] :
identifier[query] = identifier[Person] . identifier[from_dict] ( identifier[query] )
identifier[person] = identifier[d] . identifier[get] ( literal[string] ) keyword[or] keyword[None]
keyword[if] identifier[person] :
identifier[person] = identifier[Person] . identifier[from_dict] ( identifier[person] )
identifier[records] = identifier[d] . identifier[get] ( literal[string] )
keyword[if] identifier[records] :
identifier[records] =[ identifier[Record] . identifier[from_dict] ( identifier[record] ) keyword[for] identifier[record] keyword[in] identifier[records] ]
identifier[suggested_searches] = identifier[d] . identifier[get] ( literal[string] )
keyword[if] identifier[suggested_searches] :
identifier[suggested_searches] =[ identifier[Record] . identifier[from_dict] ( identifier[record] )
keyword[for] identifier[record] keyword[in] identifier[suggested_searches] ]
keyword[return] identifier[SearchAPIResponse] ( identifier[query] = identifier[query] , identifier[person] = identifier[person] , identifier[records] = identifier[records] ,
identifier[suggested_searches] = identifier[suggested_searches] ,
identifier[warnings_] = identifier[warnings_] ) | def from_dict(d):
"""Transform the dict to a response object and return the response."""
warnings_ = d.get('warnings', [])
query = d.get('query') or None
if query:
query = Person.from_dict(query) # depends on [control=['if'], data=[]]
person = d.get('person') or None
if person:
person = Person.from_dict(person) # depends on [control=['if'], data=[]]
records = d.get('records')
if records:
records = [Record.from_dict(record) for record in records] # depends on [control=['if'], data=[]]
suggested_searches = d.get('suggested_searches')
if suggested_searches:
suggested_searches = [Record.from_dict(record) for record in suggested_searches] # depends on [control=['if'], data=[]]
return SearchAPIResponse(query=query, person=person, records=records, suggested_searches=suggested_searches, warnings_=warnings_) |
def geocode(self,
query,
lang='en',
exactly_one=True,
timeout=DEFAULT_SENTINEL):
"""
Return a location point for a `3 words` query. If the `3 words` address
doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be
thrown.
:param str query: The 3-word address you wish to geocode.
:param str lang: two character language codes as supported by
the API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if not self._check_query(query):
raise exc.GeocoderQueryError(
"Search string must be 'word.word.word'"
)
params = {
'addr': self.format_string % query,
'lang': lang.lower(),
'key': self.api_key,
}
url = "?".join((self.geocode_api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one=exactly_one
) | def function[geocode, parameter[self, query, lang, exactly_one, timeout]]:
constant[
Return a location point for a `3 words` query. If the `3 words` address
doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be
thrown.
:param str query: The 3-word address you wish to geocode.
:param str lang: two character language codes as supported by
the API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
]
if <ast.UnaryOp object at 0x7da20c6c60b0> begin[:]
<ast.Raise object at 0x7da20c6c7bb0>
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c4160>, <ast.Constant object at 0x7da20c6c6dd0>, <ast.Constant object at 0x7da20c6c6890>], [<ast.BinOp object at 0x7da20c6c7790>, <ast.Call object at 0x7da20c6c7940>, <ast.Attribute object at 0x7da20c6c6110>]]
variable[url] assign[=] call[constant[?].join, parameter[tuple[[<ast.Attribute object at 0x7da20c6c5750>, <ast.Call object at 0x7da20c6c4d90>]]]]
call[name[logger].debug, parameter[constant[%s.geocode: %s], name[self].__class__.__name__, name[url]]]
return[call[name[self]._parse_json, parameter[call[name[self]._call_geocoder, parameter[name[url]]]]]] | keyword[def] identifier[geocode] ( identifier[self] ,
identifier[query] ,
identifier[lang] = literal[string] ,
identifier[exactly_one] = keyword[True] ,
identifier[timeout] = identifier[DEFAULT_SENTINEL] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_check_query] ( identifier[query] ):
keyword[raise] identifier[exc] . identifier[GeocoderQueryError] (
literal[string]
)
identifier[params] ={
literal[string] : identifier[self] . identifier[format_string] % identifier[query] ,
literal[string] : identifier[lang] . identifier[lower] (),
literal[string] : identifier[self] . identifier[api_key] ,
}
identifier[url] = literal[string] . identifier[join] (( identifier[self] . identifier[geocode_api] , identifier[urlencode] ( identifier[params] )))
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[__class__] . identifier[__name__] , identifier[url] )
keyword[return] identifier[self] . identifier[_parse_json] (
identifier[self] . identifier[_call_geocoder] ( identifier[url] , identifier[timeout] = identifier[timeout] ),
identifier[exactly_one] = identifier[exactly_one]
) | def geocode(self, query, lang='en', exactly_one=True, timeout=DEFAULT_SENTINEL):
"""
Return a location point for a `3 words` query. If the `3 words` address
doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be
thrown.
:param str query: The 3-word address you wish to geocode.
:param str lang: two character language codes as supported by
the API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if not self._check_query(query):
raise exc.GeocoderQueryError("Search string must be 'word.word.word'") # depends on [control=['if'], data=[]]
params = {'addr': self.format_string % query, 'lang': lang.lower(), 'key': self.api_key}
url = '?'.join((self.geocode_api, urlencode(params)))
logger.debug('%s.geocode: %s', self.__class__.__name__, url)
return self._parse_json(self._call_geocoder(url, timeout=timeout), exactly_one=exactly_one) |
def update(self, **kwargs):
"""
Update a resource by passing in modifications via keyword arguments.
"""
data = self._generate_input_dict(**kwargs)
self.load(self.client.put('/'.join(self.url.split('/')[:-1]) + 's', data=data))
return self | def function[update, parameter[self]]:
constant[
Update a resource by passing in modifications via keyword arguments.
]
variable[data] assign[=] call[name[self]._generate_input_dict, parameter[]]
call[name[self].load, parameter[call[name[self].client.put, parameter[binary_operation[call[constant[/].join, parameter[call[call[name[self].url.split, parameter[constant[/]]]][<ast.Slice object at 0x7da2041d87c0>]]] + constant[s]]]]]]
return[name[self]] | keyword[def] identifier[update] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[data] = identifier[self] . identifier[_generate_input_dict] (** identifier[kwargs] )
identifier[self] . identifier[load] ( identifier[self] . identifier[client] . identifier[put] ( literal[string] . identifier[join] ( identifier[self] . identifier[url] . identifier[split] ( literal[string] )[:- literal[int] ])+ literal[string] , identifier[data] = identifier[data] ))
keyword[return] identifier[self] | def update(self, **kwargs):
"""
Update a resource by passing in modifications via keyword arguments.
"""
data = self._generate_input_dict(**kwargs)
self.load(self.client.put('/'.join(self.url.split('/')[:-1]) + 's', data=data))
return self |
def __create_si(self, result) -> StoreItem:
"""Create a StoreItem from a result out of CosmosDB.
:param result:
:return StoreItem:
"""
# get the document item from the result and turn into a dict
doc = result.get('document')
# readd the e_tag from Cosmos
doc['e_tag'] = result.get('_etag')
# create and return the StoreItem
return StoreItem(**doc) | def function[__create_si, parameter[self, result]]:
constant[Create a StoreItem from a result out of CosmosDB.
:param result:
:return StoreItem:
]
variable[doc] assign[=] call[name[result].get, parameter[constant[document]]]
call[name[doc]][constant[e_tag]] assign[=] call[name[result].get, parameter[constant[_etag]]]
return[call[name[StoreItem], parameter[]]] | keyword[def] identifier[__create_si] ( identifier[self] , identifier[result] )-> identifier[StoreItem] :
literal[string]
identifier[doc] = identifier[result] . identifier[get] ( literal[string] )
identifier[doc] [ literal[string] ]= identifier[result] . identifier[get] ( literal[string] )
keyword[return] identifier[StoreItem] (** identifier[doc] ) | def __create_si(self, result) -> StoreItem:
"""Create a StoreItem from a result out of CosmosDB.
:param result:
:return StoreItem:
"""
# get the document item from the result and turn into a dict
doc = result.get('document')
# readd the e_tag from Cosmos
doc['e_tag'] = result.get('_etag')
# create and return the StoreItem
return StoreItem(**doc) |
def get_child_ids(self):
"""gets the ids for the child parts"""
if self.has_magic_children():
if self._child_parts is None:
self.generate_children()
child_ids = list()
for part in self._child_parts:
child_ids.append(part.get_id())
return IdList(child_ids,
runtime=self.my_osid_object._runtime,
proxy=self.my_osid_object._runtime)
raise IllegalState() | def function[get_child_ids, parameter[self]]:
constant[gets the ids for the child parts]
if call[name[self].has_magic_children, parameter[]] begin[:]
if compare[name[self]._child_parts is constant[None]] begin[:]
call[name[self].generate_children, parameter[]]
variable[child_ids] assign[=] call[name[list], parameter[]]
for taget[name[part]] in starred[name[self]._child_parts] begin[:]
call[name[child_ids].append, parameter[call[name[part].get_id, parameter[]]]]
return[call[name[IdList], parameter[name[child_ids]]]]
<ast.Raise object at 0x7da2041d9240> | keyword[def] identifier[get_child_ids] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[has_magic_children] ():
keyword[if] identifier[self] . identifier[_child_parts] keyword[is] keyword[None] :
identifier[self] . identifier[generate_children] ()
identifier[child_ids] = identifier[list] ()
keyword[for] identifier[part] keyword[in] identifier[self] . identifier[_child_parts] :
identifier[child_ids] . identifier[append] ( identifier[part] . identifier[get_id] ())
keyword[return] identifier[IdList] ( identifier[child_ids] ,
identifier[runtime] = identifier[self] . identifier[my_osid_object] . identifier[_runtime] ,
identifier[proxy] = identifier[self] . identifier[my_osid_object] . identifier[_runtime] )
keyword[raise] identifier[IllegalState] () | def get_child_ids(self):
"""gets the ids for the child parts"""
if self.has_magic_children():
if self._child_parts is None:
self.generate_children() # depends on [control=['if'], data=[]]
child_ids = list()
for part in self._child_parts:
child_ids.append(part.get_id()) # depends on [control=['for'], data=['part']]
return IdList(child_ids, runtime=self.my_osid_object._runtime, proxy=self.my_osid_object._runtime) # depends on [control=['if'], data=[]]
raise IllegalState() |
def get_member(self, jid, default=None):
"""Get a chatroom member by JID"""
member = filter(lambda m: m['JID'] == jid, self.params['MEMBERS'])
if len(member) == 1:
return member[0]
elif len(member) == 0:
return default
else:
raise Exception('Multple members have the same JID of [%s]' % (jid,)) | def function[get_member, parameter[self, jid, default]]:
constant[Get a chatroom member by JID]
variable[member] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da18f810ac0>, call[name[self].params][constant[MEMBERS]]]]
if compare[call[name[len], parameter[name[member]]] equal[==] constant[1]] begin[:]
return[call[name[member]][constant[0]]] | keyword[def] identifier[get_member] ( identifier[self] , identifier[jid] , identifier[default] = keyword[None] ):
literal[string]
identifier[member] = identifier[filter] ( keyword[lambda] identifier[m] : identifier[m] [ literal[string] ]== identifier[jid] , identifier[self] . identifier[params] [ literal[string] ])
keyword[if] identifier[len] ( identifier[member] )== literal[int] :
keyword[return] identifier[member] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[member] )== literal[int] :
keyword[return] identifier[default]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[jid] ,)) | def get_member(self, jid, default=None):
"""Get a chatroom member by JID"""
member = filter(lambda m: m['JID'] == jid, self.params['MEMBERS'])
if len(member) == 1:
return member[0] # depends on [control=['if'], data=[]]
elif len(member) == 0:
return default # depends on [control=['if'], data=[]]
else:
raise Exception('Multple members have the same JID of [%s]' % (jid,)) |
def get(self, key):
'''Return the object named by key or None if it does not exist.
LoggingDatastore logs the access.
'''
self.logger.info('%s: get %s' % (self, key))
value = super(LoggingDatastore, self).get(key)
self.logger.debug('%s: %s' % (self, value))
return value | def function[get, parameter[self, key]]:
constant[Return the object named by key or None if it does not exist.
LoggingDatastore logs the access.
]
call[name[self].logger.info, parameter[binary_operation[constant[%s: get %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c990880>, <ast.Name object at 0x7da20c990df0>]]]]]
variable[value] assign[=] call[call[name[super], parameter[name[LoggingDatastore], name[self]]].get, parameter[name[key]]]
call[name[self].logger.debug, parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c992860>, <ast.Name object at 0x7da20c9906d0>]]]]]
return[name[value]] | keyword[def] identifier[get] ( identifier[self] , identifier[key] ):
literal[string]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[self] , identifier[key] ))
identifier[value] = identifier[super] ( identifier[LoggingDatastore] , identifier[self] ). identifier[get] ( identifier[key] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] %( identifier[self] , identifier[value] ))
keyword[return] identifier[value] | def get(self, key):
"""Return the object named by key or None if it does not exist.
LoggingDatastore logs the access.
"""
self.logger.info('%s: get %s' % (self, key))
value = super(LoggingDatastore, self).get(key)
self.logger.debug('%s: %s' % (self, value))
return value |
def forward(self,
inputs: torch.Tensor,
tags: torch.Tensor,
mask: torch.ByteTensor = None) -> torch.Tensor:
"""
Computes the log likelihood.
"""
# pylint: disable=arguments-differ
if mask is None:
mask = torch.ones(*tags.size(), dtype=torch.long)
log_denominator = self._input_likelihood(inputs, mask)
log_numerator = self._joint_likelihood(inputs, tags, mask)
return torch.sum(log_numerator - log_denominator) | def function[forward, parameter[self, inputs, tags, mask]]:
constant[
Computes the log likelihood.
]
if compare[name[mask] is constant[None]] begin[:]
variable[mask] assign[=] call[name[torch].ones, parameter[<ast.Starred object at 0x7da1b1f96f50>]]
variable[log_denominator] assign[=] call[name[self]._input_likelihood, parameter[name[inputs], name[mask]]]
variable[log_numerator] assign[=] call[name[self]._joint_likelihood, parameter[name[inputs], name[tags], name[mask]]]
return[call[name[torch].sum, parameter[binary_operation[name[log_numerator] - name[log_denominator]]]]] | keyword[def] identifier[forward] ( identifier[self] ,
identifier[inputs] : identifier[torch] . identifier[Tensor] ,
identifier[tags] : identifier[torch] . identifier[Tensor] ,
identifier[mask] : identifier[torch] . identifier[ByteTensor] = keyword[None] )-> identifier[torch] . identifier[Tensor] :
literal[string]
keyword[if] identifier[mask] keyword[is] keyword[None] :
identifier[mask] = identifier[torch] . identifier[ones] (* identifier[tags] . identifier[size] (), identifier[dtype] = identifier[torch] . identifier[long] )
identifier[log_denominator] = identifier[self] . identifier[_input_likelihood] ( identifier[inputs] , identifier[mask] )
identifier[log_numerator] = identifier[self] . identifier[_joint_likelihood] ( identifier[inputs] , identifier[tags] , identifier[mask] )
keyword[return] identifier[torch] . identifier[sum] ( identifier[log_numerator] - identifier[log_denominator] ) | def forward(self, inputs: torch.Tensor, tags: torch.Tensor, mask: torch.ByteTensor=None) -> torch.Tensor:
"""
Computes the log likelihood.
"""
# pylint: disable=arguments-differ
if mask is None:
mask = torch.ones(*tags.size(), dtype=torch.long) # depends on [control=['if'], data=['mask']]
log_denominator = self._input_likelihood(inputs, mask)
log_numerator = self._joint_likelihood(inputs, tags, mask)
return torch.sum(log_numerator - log_denominator) |
def to_json(self):
"""
Returns:
str: Json for commands array object and all of the commands inside the array."""
commands = ",".join(map(lambda x: x.to_json(), self._commands))
return "{\"commands\": [" + commands + "]}" | def function[to_json, parameter[self]]:
constant[
Returns:
str: Json for commands array object and all of the commands inside the array.]
variable[commands] assign[=] call[constant[,].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b112be20>, name[self]._commands]]]]
return[binary_operation[binary_operation[constant[{"commands": [] + name[commands]] + constant[]}]]] | keyword[def] identifier[to_json] ( identifier[self] ):
literal[string]
identifier[commands] = literal[string] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[to_json] (), identifier[self] . identifier[_commands] ))
keyword[return] literal[string] + identifier[commands] + literal[string] | def to_json(self):
"""
Returns:
str: Json for commands array object and all of the commands inside the array."""
commands = ','.join(map(lambda x: x.to_json(), self._commands))
return '{"commands": [' + commands + ']}' |
def execute(client, output_file, output_paths=None):
"""Run the generated workflow using cwltool library."""
output_paths = output_paths or set()
import cwltool.factory
from cwltool import workflow
from cwltool.context import LoadingContext, RuntimeContext
from cwltool.utils import visit_class
def construct_tool_object(toolpath_object, *args, **kwargs):
"""Fix missing locations."""
protocol = 'file://'
def addLocation(d):
if 'location' not in d and 'path' in d:
d['location'] = protocol + d['path']
visit_class(toolpath_object, ('File', 'Directory'), addLocation)
return workflow.default_make_tool(toolpath_object, *args, **kwargs)
argv = sys.argv
sys.argv = ['cwltool']
# Keep all environment variables.
runtime_context = RuntimeContext(
kwargs={
'rm_tmpdir': False,
'move_outputs': 'leave',
'preserve_entire_environment': True,
}
)
loading_context = LoadingContext(
kwargs={
'construct_tool_object': construct_tool_object,
}
)
factory = cwltool.factory.Factory(
loading_context=loading_context,
runtime_context=runtime_context,
)
process = factory.make(os.path.relpath(str(output_file)))
outputs = process()
sys.argv = argv
# Move outputs to correct location in the repository.
output_dirs = process.factory.executor.output_dirs
def remove_prefix(location, prefix='file://'):
if location.startswith(prefix):
return location[len(prefix):]
return location
locations = {
remove_prefix(output['location'])
for output in outputs.values()
}
with progressbar(
locations,
label='Moving outputs',
) as bar:
for location in bar:
for output_dir in output_dirs:
if location.startswith(output_dir):
output_path = location[len(output_dir):].lstrip(
os.path.sep
)
destination = client.path / output_path
if destination.is_dir():
shutil.rmtree(str(destination))
destination = destination.parent
shutil.move(location, str(destination))
continue
unchanged_paths = client.remove_unmodified(output_paths)
if unchanged_paths:
click.echo(
'Unchanged files:\n\n\t{0}'.format(
'\n\t'.join(
click.style(path, fg='yellow') for path in unchanged_paths
)
)
) | def function[execute, parameter[client, output_file, output_paths]]:
constant[Run the generated workflow using cwltool library.]
variable[output_paths] assign[=] <ast.BoolOp object at 0x7da18ede6b00>
import module[cwltool.factory]
from relative_module[cwltool] import module[workflow]
from relative_module[cwltool.context] import module[LoadingContext], module[RuntimeContext]
from relative_module[cwltool.utils] import module[visit_class]
def function[construct_tool_object, parameter[toolpath_object]]:
constant[Fix missing locations.]
variable[protocol] assign[=] constant[file://]
def function[addLocation, parameter[d]]:
if <ast.BoolOp object at 0x7da18ede4610> begin[:]
call[name[d]][constant[location]] assign[=] binary_operation[name[protocol] + call[name[d]][constant[path]]]
call[name[visit_class], parameter[name[toolpath_object], tuple[[<ast.Constant object at 0x7da18ede4460>, <ast.Constant object at 0x7da18ede5e40>]], name[addLocation]]]
return[call[name[workflow].default_make_tool, parameter[name[toolpath_object], <ast.Starred object at 0x7da18ede4af0>]]]
variable[argv] assign[=] name[sys].argv
name[sys].argv assign[=] list[[<ast.Constant object at 0x7da18ede67a0>]]
variable[runtime_context] assign[=] call[name[RuntimeContext], parameter[]]
variable[loading_context] assign[=] call[name[LoadingContext], parameter[]]
variable[factory] assign[=] call[name[cwltool].factory.Factory, parameter[]]
variable[process] assign[=] call[name[factory].make, parameter[call[name[os].path.relpath, parameter[call[name[str], parameter[name[output_file]]]]]]]
variable[outputs] assign[=] call[name[process], parameter[]]
name[sys].argv assign[=] name[argv]
variable[output_dirs] assign[=] name[process].factory.executor.output_dirs
def function[remove_prefix, parameter[location, prefix]]:
if call[name[location].startswith, parameter[name[prefix]]] begin[:]
return[call[name[location]][<ast.Slice object at 0x7da1b042dde0>]]
return[name[location]]
variable[locations] assign[=] <ast.SetComp object at 0x7da1b042dc30>
with call[name[progressbar], parameter[name[locations]]] begin[:]
for taget[name[location]] in starred[name[bar]] begin[:]
for taget[name[output_dir]] in starred[name[output_dirs]] begin[:]
if call[name[location].startswith, parameter[name[output_dir]]] begin[:]
variable[output_path] assign[=] call[call[name[location]][<ast.Slice object at 0x7da1b042eaa0>].lstrip, parameter[name[os].path.sep]]
variable[destination] assign[=] binary_operation[name[client].path / name[output_path]]
if call[name[destination].is_dir, parameter[]] begin[:]
call[name[shutil].rmtree, parameter[call[name[str], parameter[name[destination]]]]]
variable[destination] assign[=] name[destination].parent
call[name[shutil].move, parameter[name[location], call[name[str], parameter[name[destination]]]]]
continue
variable[unchanged_paths] assign[=] call[name[client].remove_unmodified, parameter[name[output_paths]]]
if name[unchanged_paths] begin[:]
call[name[click].echo, parameter[call[constant[Unchanged files:
{0}].format, parameter[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da20c6a8c70>]]]]]] | keyword[def] identifier[execute] ( identifier[client] , identifier[output_file] , identifier[output_paths] = keyword[None] ):
literal[string]
identifier[output_paths] = identifier[output_paths] keyword[or] identifier[set] ()
keyword[import] identifier[cwltool] . identifier[factory]
keyword[from] identifier[cwltool] keyword[import] identifier[workflow]
keyword[from] identifier[cwltool] . identifier[context] keyword[import] identifier[LoadingContext] , identifier[RuntimeContext]
keyword[from] identifier[cwltool] . identifier[utils] keyword[import] identifier[visit_class]
keyword[def] identifier[construct_tool_object] ( identifier[toolpath_object] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[protocol] = literal[string]
keyword[def] identifier[addLocation] ( identifier[d] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[d] keyword[and] literal[string] keyword[in] identifier[d] :
identifier[d] [ literal[string] ]= identifier[protocol] + identifier[d] [ literal[string] ]
identifier[visit_class] ( identifier[toolpath_object] ,( literal[string] , literal[string] ), identifier[addLocation] )
keyword[return] identifier[workflow] . identifier[default_make_tool] ( identifier[toolpath_object] ,* identifier[args] ,** identifier[kwargs] )
identifier[argv] = identifier[sys] . identifier[argv]
identifier[sys] . identifier[argv] =[ literal[string] ]
identifier[runtime_context] = identifier[RuntimeContext] (
identifier[kwargs] ={
literal[string] : keyword[False] ,
literal[string] : literal[string] ,
literal[string] : keyword[True] ,
}
)
identifier[loading_context] = identifier[LoadingContext] (
identifier[kwargs] ={
literal[string] : identifier[construct_tool_object] ,
}
)
identifier[factory] = identifier[cwltool] . identifier[factory] . identifier[Factory] (
identifier[loading_context] = identifier[loading_context] ,
identifier[runtime_context] = identifier[runtime_context] ,
)
identifier[process] = identifier[factory] . identifier[make] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[str] ( identifier[output_file] )))
identifier[outputs] = identifier[process] ()
identifier[sys] . identifier[argv] = identifier[argv]
identifier[output_dirs] = identifier[process] . identifier[factory] . identifier[executor] . identifier[output_dirs]
keyword[def] identifier[remove_prefix] ( identifier[location] , identifier[prefix] = literal[string] ):
keyword[if] identifier[location] . identifier[startswith] ( identifier[prefix] ):
keyword[return] identifier[location] [ identifier[len] ( identifier[prefix] ):]
keyword[return] identifier[location]
identifier[locations] ={
identifier[remove_prefix] ( identifier[output] [ literal[string] ])
keyword[for] identifier[output] keyword[in] identifier[outputs] . identifier[values] ()
}
keyword[with] identifier[progressbar] (
identifier[locations] ,
identifier[label] = literal[string] ,
) keyword[as] identifier[bar] :
keyword[for] identifier[location] keyword[in] identifier[bar] :
keyword[for] identifier[output_dir] keyword[in] identifier[output_dirs] :
keyword[if] identifier[location] . identifier[startswith] ( identifier[output_dir] ):
identifier[output_path] = identifier[location] [ identifier[len] ( identifier[output_dir] ):]. identifier[lstrip] (
identifier[os] . identifier[path] . identifier[sep]
)
identifier[destination] = identifier[client] . identifier[path] / identifier[output_path]
keyword[if] identifier[destination] . identifier[is_dir] ():
identifier[shutil] . identifier[rmtree] ( identifier[str] ( identifier[destination] ))
identifier[destination] = identifier[destination] . identifier[parent]
identifier[shutil] . identifier[move] ( identifier[location] , identifier[str] ( identifier[destination] ))
keyword[continue]
identifier[unchanged_paths] = identifier[client] . identifier[remove_unmodified] ( identifier[output_paths] )
keyword[if] identifier[unchanged_paths] :
identifier[click] . identifier[echo] (
literal[string] . identifier[format] (
literal[string] . identifier[join] (
identifier[click] . identifier[style] ( identifier[path] , identifier[fg] = literal[string] ) keyword[for] identifier[path] keyword[in] identifier[unchanged_paths]
)
)
) | def execute(client, output_file, output_paths=None):
"""Run the generated workflow using cwltool library."""
output_paths = output_paths or set()
import cwltool.factory
from cwltool import workflow
from cwltool.context import LoadingContext, RuntimeContext
from cwltool.utils import visit_class
def construct_tool_object(toolpath_object, *args, **kwargs):
"""Fix missing locations."""
protocol = 'file://'
def addLocation(d):
if 'location' not in d and 'path' in d:
d['location'] = protocol + d['path'] # depends on [control=['if'], data=[]]
visit_class(toolpath_object, ('File', 'Directory'), addLocation)
return workflow.default_make_tool(toolpath_object, *args, **kwargs)
argv = sys.argv
sys.argv = ['cwltool']
# Keep all environment variables.
runtime_context = RuntimeContext(kwargs={'rm_tmpdir': False, 'move_outputs': 'leave', 'preserve_entire_environment': True})
loading_context = LoadingContext(kwargs={'construct_tool_object': construct_tool_object})
factory = cwltool.factory.Factory(loading_context=loading_context, runtime_context=runtime_context)
process = factory.make(os.path.relpath(str(output_file)))
outputs = process()
sys.argv = argv
# Move outputs to correct location in the repository.
output_dirs = process.factory.executor.output_dirs
def remove_prefix(location, prefix='file://'):
if location.startswith(prefix):
return location[len(prefix):] # depends on [control=['if'], data=[]]
return location
locations = {remove_prefix(output['location']) for output in outputs.values()}
with progressbar(locations, label='Moving outputs') as bar:
for location in bar:
for output_dir in output_dirs:
if location.startswith(output_dir):
output_path = location[len(output_dir):].lstrip(os.path.sep)
destination = client.path / output_path
if destination.is_dir():
shutil.rmtree(str(destination))
destination = destination.parent # depends on [control=['if'], data=[]]
shutil.move(location, str(destination))
continue # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['output_dir']] # depends on [control=['for'], data=['location']] # depends on [control=['with'], data=['bar']]
unchanged_paths = client.remove_unmodified(output_paths)
if unchanged_paths:
click.echo('Unchanged files:\n\n\t{0}'.format('\n\t'.join((click.style(path, fg='yellow') for path in unchanged_paths)))) # depends on [control=['if'], data=[]] |
def create_peaklist(self, spectrum, chain, chain_idx, source):
"""Create peak list file.
:param spectrum: Spectrum object instance.
:type spectrum: :class:`~nmrstarlib.plsimulator.Spectrum`
:param dict chain: Chain object that contains chemical shift values and assignment information.
:param int chain_idx: Protein chain index.
:param str source: :class:`~nmrstarlib.nmrstarlib.StarFile` source.
:return: Peak list object.
:rtype: :class:`~nmrstarlib.plsimulator.PeakList`
"""
sequence_sites = self.create_sequence_sites(chain, spectrum.seq_site_length)
spin_systems = []
peaklist = plsimulator.PeakList(spectrum.name, spectrum.labels, source, chain_idx)
for seq_site in sequence_sites:
spin_system = plsimulator.SpinSystem()
for template in spectrum.peak_templates:
peak = plsimulator.Peak(template.dimension_labels)
for dim in template:
chemshift = seq_site[dim.position].get(dim.label, None)
assignment = "{}{}{}".format(seq_site[dim.position]["AA3Code"],
seq_site[dim.position]["Seq_ID"],
dim.label)
if chemshift and assignment:
peak_dim = plsimulator.Dimension(dim.label, dim.position, assignment, float(chemshift))
peak.append(peak_dim)
else:
continue
if len(peak) == len(template):
spin_system.append(peak)
peaklist.append(peak)
else:
continue
spin_systems.append(spin_system)
if all(len(i) < spectrum.min_spin_system_peaks for i in spin_systems):
return None
if self.noise_generator is not None:
spin_systems_chunks = self.split_by_percent(spin_systems)
for split_idx, chunk in enumerate(spin_systems_chunks):
for spin_system in chunk:
for peak in spin_system:
peak.apply_noise(self.noise_generator, split_idx)
return peaklist | def function[create_peaklist, parameter[self, spectrum, chain, chain_idx, source]]:
constant[Create peak list file.
:param spectrum: Spectrum object instance.
:type spectrum: :class:`~nmrstarlib.plsimulator.Spectrum`
:param dict chain: Chain object that contains chemical shift values and assignment information.
:param int chain_idx: Protein chain index.
:param str source: :class:`~nmrstarlib.nmrstarlib.StarFile` source.
:return: Peak list object.
:rtype: :class:`~nmrstarlib.plsimulator.PeakList`
]
variable[sequence_sites] assign[=] call[name[self].create_sequence_sites, parameter[name[chain], name[spectrum].seq_site_length]]
variable[spin_systems] assign[=] list[[]]
variable[peaklist] assign[=] call[name[plsimulator].PeakList, parameter[name[spectrum].name, name[spectrum].labels, name[source], name[chain_idx]]]
for taget[name[seq_site]] in starred[name[sequence_sites]] begin[:]
variable[spin_system] assign[=] call[name[plsimulator].SpinSystem, parameter[]]
for taget[name[template]] in starred[name[spectrum].peak_templates] begin[:]
variable[peak] assign[=] call[name[plsimulator].Peak, parameter[name[template].dimension_labels]]
for taget[name[dim]] in starred[name[template]] begin[:]
variable[chemshift] assign[=] call[call[name[seq_site]][name[dim].position].get, parameter[name[dim].label, constant[None]]]
variable[assignment] assign[=] call[constant[{}{}{}].format, parameter[call[call[name[seq_site]][name[dim].position]][constant[AA3Code]], call[call[name[seq_site]][name[dim].position]][constant[Seq_ID]], name[dim].label]]
if <ast.BoolOp object at 0x7da20c6aa950> begin[:]
variable[peak_dim] assign[=] call[name[plsimulator].Dimension, parameter[name[dim].label, name[dim].position, name[assignment], call[name[float], parameter[name[chemshift]]]]]
call[name[peak].append, parameter[name[peak_dim]]]
if compare[call[name[len], parameter[name[peak]]] equal[==] call[name[len], parameter[name[template]]]] begin[:]
call[name[spin_system].append, parameter[name[peak]]]
call[name[peaklist].append, parameter[name[peak]]]
call[name[spin_systems].append, parameter[name[spin_system]]]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da20c6ab040>]] begin[:]
return[constant[None]]
if compare[name[self].noise_generator is_not constant[None]] begin[:]
variable[spin_systems_chunks] assign[=] call[name[self].split_by_percent, parameter[name[spin_systems]]]
for taget[tuple[[<ast.Name object at 0x7da18f09d600>, <ast.Name object at 0x7da18f09fe80>]]] in starred[call[name[enumerate], parameter[name[spin_systems_chunks]]]] begin[:]
for taget[name[spin_system]] in starred[name[chunk]] begin[:]
for taget[name[peak]] in starred[name[spin_system]] begin[:]
call[name[peak].apply_noise, parameter[name[self].noise_generator, name[split_idx]]]
return[name[peaklist]] | keyword[def] identifier[create_peaklist] ( identifier[self] , identifier[spectrum] , identifier[chain] , identifier[chain_idx] , identifier[source] ):
literal[string]
identifier[sequence_sites] = identifier[self] . identifier[create_sequence_sites] ( identifier[chain] , identifier[spectrum] . identifier[seq_site_length] )
identifier[spin_systems] =[]
identifier[peaklist] = identifier[plsimulator] . identifier[PeakList] ( identifier[spectrum] . identifier[name] , identifier[spectrum] . identifier[labels] , identifier[source] , identifier[chain_idx] )
keyword[for] identifier[seq_site] keyword[in] identifier[sequence_sites] :
identifier[spin_system] = identifier[plsimulator] . identifier[SpinSystem] ()
keyword[for] identifier[template] keyword[in] identifier[spectrum] . identifier[peak_templates] :
identifier[peak] = identifier[plsimulator] . identifier[Peak] ( identifier[template] . identifier[dimension_labels] )
keyword[for] identifier[dim] keyword[in] identifier[template] :
identifier[chemshift] = identifier[seq_site] [ identifier[dim] . identifier[position] ]. identifier[get] ( identifier[dim] . identifier[label] , keyword[None] )
identifier[assignment] = literal[string] . identifier[format] ( identifier[seq_site] [ identifier[dim] . identifier[position] ][ literal[string] ],
identifier[seq_site] [ identifier[dim] . identifier[position] ][ literal[string] ],
identifier[dim] . identifier[label] )
keyword[if] identifier[chemshift] keyword[and] identifier[assignment] :
identifier[peak_dim] = identifier[plsimulator] . identifier[Dimension] ( identifier[dim] . identifier[label] , identifier[dim] . identifier[position] , identifier[assignment] , identifier[float] ( identifier[chemshift] ))
identifier[peak] . identifier[append] ( identifier[peak_dim] )
keyword[else] :
keyword[continue]
keyword[if] identifier[len] ( identifier[peak] )== identifier[len] ( identifier[template] ):
identifier[spin_system] . identifier[append] ( identifier[peak] )
identifier[peaklist] . identifier[append] ( identifier[peak] )
keyword[else] :
keyword[continue]
identifier[spin_systems] . identifier[append] ( identifier[spin_system] )
keyword[if] identifier[all] ( identifier[len] ( identifier[i] )< identifier[spectrum] . identifier[min_spin_system_peaks] keyword[for] identifier[i] keyword[in] identifier[spin_systems] ):
keyword[return] keyword[None]
keyword[if] identifier[self] . identifier[noise_generator] keyword[is] keyword[not] keyword[None] :
identifier[spin_systems_chunks] = identifier[self] . identifier[split_by_percent] ( identifier[spin_systems] )
keyword[for] identifier[split_idx] , identifier[chunk] keyword[in] identifier[enumerate] ( identifier[spin_systems_chunks] ):
keyword[for] identifier[spin_system] keyword[in] identifier[chunk] :
keyword[for] identifier[peak] keyword[in] identifier[spin_system] :
identifier[peak] . identifier[apply_noise] ( identifier[self] . identifier[noise_generator] , identifier[split_idx] )
keyword[return] identifier[peaklist] | def create_peaklist(self, spectrum, chain, chain_idx, source):
"""Create peak list file.
:param spectrum: Spectrum object instance.
:type spectrum: :class:`~nmrstarlib.plsimulator.Spectrum`
:param dict chain: Chain object that contains chemical shift values and assignment information.
:param int chain_idx: Protein chain index.
:param str source: :class:`~nmrstarlib.nmrstarlib.StarFile` source.
:return: Peak list object.
:rtype: :class:`~nmrstarlib.plsimulator.PeakList`
"""
sequence_sites = self.create_sequence_sites(chain, spectrum.seq_site_length)
spin_systems = []
peaklist = plsimulator.PeakList(spectrum.name, spectrum.labels, source, chain_idx)
for seq_site in sequence_sites:
spin_system = plsimulator.SpinSystem()
for template in spectrum.peak_templates:
peak = plsimulator.Peak(template.dimension_labels)
for dim in template:
chemshift = seq_site[dim.position].get(dim.label, None)
assignment = '{}{}{}'.format(seq_site[dim.position]['AA3Code'], seq_site[dim.position]['Seq_ID'], dim.label)
if chemshift and assignment:
peak_dim = plsimulator.Dimension(dim.label, dim.position, assignment, float(chemshift))
peak.append(peak_dim) # depends on [control=['if'], data=[]]
else:
continue # depends on [control=['for'], data=['dim']]
if len(peak) == len(template):
spin_system.append(peak)
peaklist.append(peak) # depends on [control=['if'], data=[]]
else:
continue # depends on [control=['for'], data=['template']]
spin_systems.append(spin_system) # depends on [control=['for'], data=['seq_site']]
if all((len(i) < spectrum.min_spin_system_peaks for i in spin_systems)):
return None # depends on [control=['if'], data=[]]
if self.noise_generator is not None:
spin_systems_chunks = self.split_by_percent(spin_systems)
for (split_idx, chunk) in enumerate(spin_systems_chunks):
for spin_system in chunk:
for peak in spin_system:
peak.apply_noise(self.noise_generator, split_idx) # depends on [control=['for'], data=['peak']] # depends on [control=['for'], data=['spin_system']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return peaklist |
def sendCommand(self, **msg):
"""
Sends a raw command to the Slack server, generating a message ID automatically.
"""
assert 'type' in msg, 'Message type is required.'
msg['id'] = self.next_message_id
self.next_message_id += 1
if self.next_message_id >= maxint:
self.next_message_id = 1
self.sendMessage(json.dumps(msg))
return msg['id'] | def function[sendCommand, parameter[self]]:
constant[
Sends a raw command to the Slack server, generating a message ID automatically.
]
assert[compare[constant[type] in name[msg]]]
call[name[msg]][constant[id]] assign[=] name[self].next_message_id
<ast.AugAssign object at 0x7da1b2445300>
if compare[name[self].next_message_id greater_or_equal[>=] name[maxint]] begin[:]
name[self].next_message_id assign[=] constant[1]
call[name[self].sendMessage, parameter[call[name[json].dumps, parameter[name[msg]]]]]
return[call[name[msg]][constant[id]]] | keyword[def] identifier[sendCommand] ( identifier[self] ,** identifier[msg] ):
literal[string]
keyword[assert] literal[string] keyword[in] identifier[msg] , literal[string]
identifier[msg] [ literal[string] ]= identifier[self] . identifier[next_message_id]
identifier[self] . identifier[next_message_id] += literal[int]
keyword[if] identifier[self] . identifier[next_message_id] >= identifier[maxint] :
identifier[self] . identifier[next_message_id] = literal[int]
identifier[self] . identifier[sendMessage] ( identifier[json] . identifier[dumps] ( identifier[msg] ))
keyword[return] identifier[msg] [ literal[string] ] | def sendCommand(self, **msg):
"""
Sends a raw command to the Slack server, generating a message ID automatically.
"""
assert 'type' in msg, 'Message type is required.'
msg['id'] = self.next_message_id
self.next_message_id += 1
if self.next_message_id >= maxint:
self.next_message_id = 1 # depends on [control=['if'], data=[]]
self.sendMessage(json.dumps(msg))
return msg['id'] |
def search_kv_store(self, key):
"""Search for a key in the key-value store.
:param key: string
:rtype: string
"""
data = {
'operation': 'RETRIEVE',
'key': key
}
return self.post_json(self.make_url("/useragent-kv"), data)['value'] | def function[search_kv_store, parameter[self, key]]:
constant[Search for a key in the key-value store.
:param key: string
:rtype: string
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da2047ea740>, <ast.Constant object at 0x7da2047e97e0>], [<ast.Constant object at 0x7da2047e93f0>, <ast.Name object at 0x7da2047e8be0>]]
return[call[call[name[self].post_json, parameter[call[name[self].make_url, parameter[constant[/useragent-kv]]], name[data]]]][constant[value]]] | keyword[def] identifier[search_kv_store] ( identifier[self] , identifier[key] ):
literal[string]
identifier[data] ={
literal[string] : literal[string] ,
literal[string] : identifier[key]
}
keyword[return] identifier[self] . identifier[post_json] ( identifier[self] . identifier[make_url] ( literal[string] ), identifier[data] )[ literal[string] ] | def search_kv_store(self, key):
"""Search for a key in the key-value store.
:param key: string
:rtype: string
"""
data = {'operation': 'RETRIEVE', 'key': key}
return self.post_json(self.make_url('/useragent-kv'), data)['value'] |
def write_text(self, name, s):
"""Write string data to cur_dir/name using write_file()."""
buf = io.BytesIO(compat.to_bytes(s))
self.write_file(name, buf) | def function[write_text, parameter[self, name, s]]:
constant[Write string data to cur_dir/name using write_file().]
variable[buf] assign[=] call[name[io].BytesIO, parameter[call[name[compat].to_bytes, parameter[name[s]]]]]
call[name[self].write_file, parameter[name[name], name[buf]]] | keyword[def] identifier[write_text] ( identifier[self] , identifier[name] , identifier[s] ):
literal[string]
identifier[buf] = identifier[io] . identifier[BytesIO] ( identifier[compat] . identifier[to_bytes] ( identifier[s] ))
identifier[self] . identifier[write_file] ( identifier[name] , identifier[buf] ) | def write_text(self, name, s):
"""Write string data to cur_dir/name using write_file()."""
buf = io.BytesIO(compat.to_bytes(s))
self.write_file(name, buf) |
def norm(self):
""" Returns the norm of the quaternion
norm = w**2 + x**2 + y**2 + z**2
"""
tmp = self.w**2 + self.x**2 + self.y**2 + self.z**2
return tmp**0.5 | def function[norm, parameter[self]]:
constant[ Returns the norm of the quaternion
norm = w**2 + x**2 + y**2 + z**2
]
variable[tmp] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[self].w ** constant[2]] + binary_operation[name[self].x ** constant[2]]] + binary_operation[name[self].y ** constant[2]]] + binary_operation[name[self].z ** constant[2]]]
return[binary_operation[name[tmp] ** constant[0.5]]] | keyword[def] identifier[norm] ( identifier[self] ):
literal[string]
identifier[tmp] = identifier[self] . identifier[w] ** literal[int] + identifier[self] . identifier[x] ** literal[int] + identifier[self] . identifier[y] ** literal[int] + identifier[self] . identifier[z] ** literal[int]
keyword[return] identifier[tmp] ** literal[int] | def norm(self):
""" Returns the norm of the quaternion
norm = w**2 + x**2 + y**2 + z**2
"""
tmp = self.w ** 2 + self.x ** 2 + self.y ** 2 + self.z ** 2
return tmp ** 0.5 |
def evaluateR2derivs(Pot,R,z,phi=None,t=0.):
"""
NAME:
evaluateR2derivs
PURPOSE:
convenience function to evaluate a possible sum of potentials
INPUT:
Pot - a potential or list of potentials (dissipative forces in such a list are ignored)
R - cylindrical Galactocentric distance (can be Quantity)
z - distance above the plane (can be Quantity)
phi - azimuth (optional; can be Quantity)
t - time (optional; can be Quantity)
OUTPUT:
d2Phi/d2R(R,z,phi,t)
HISTORY:
2012-07-25 - Written - Bovy (IAS)
"""
isList= isinstance(Pot,list)
nonAxi= _isNonAxi(Pot)
if nonAxi and phi is None:
raise PotentialError("The (list of) Potential instances is non-axisymmetric, but you did not provide phi")
if isList:
sum= 0.
for pot in Pot:
if not isinstance(pot,DissipativeForce):
sum+= pot.R2deriv(R,z,phi=phi,t=t,use_physical=False)
return sum
elif isinstance(Pot,Potential):
return Pot.R2deriv(R,z,phi=phi,t=t,use_physical=False)
else: #pragma: no cover
raise PotentialError("Input to 'evaluateR2derivs' is neither a Potential-instance or a list of such instances") | def function[evaluateR2derivs, parameter[Pot, R, z, phi, t]]:
constant[
NAME:
evaluateR2derivs
PURPOSE:
convenience function to evaluate a possible sum of potentials
INPUT:
Pot - a potential or list of potentials (dissipative forces in such a list are ignored)
R - cylindrical Galactocentric distance (can be Quantity)
z - distance above the plane (can be Quantity)
phi - azimuth (optional; can be Quantity)
t - time (optional; can be Quantity)
OUTPUT:
d2Phi/d2R(R,z,phi,t)
HISTORY:
2012-07-25 - Written - Bovy (IAS)
]
variable[isList] assign[=] call[name[isinstance], parameter[name[Pot], name[list]]]
variable[nonAxi] assign[=] call[name[_isNonAxi], parameter[name[Pot]]]
if <ast.BoolOp object at 0x7da1b0c94d00> begin[:]
<ast.Raise object at 0x7da1b0c96c80>
if name[isList] begin[:]
variable[sum] assign[=] constant[0.0]
for taget[name[pot]] in starred[name[Pot]] begin[:]
if <ast.UnaryOp object at 0x7da1b0c95180> begin[:]
<ast.AugAssign object at 0x7da1b0c95c60>
return[name[sum]] | keyword[def] identifier[evaluateR2derivs] ( identifier[Pot] , identifier[R] , identifier[z] , identifier[phi] = keyword[None] , identifier[t] = literal[int] ):
literal[string]
identifier[isList] = identifier[isinstance] ( identifier[Pot] , identifier[list] )
identifier[nonAxi] = identifier[_isNonAxi] ( identifier[Pot] )
keyword[if] identifier[nonAxi] keyword[and] identifier[phi] keyword[is] keyword[None] :
keyword[raise] identifier[PotentialError] ( literal[string] )
keyword[if] identifier[isList] :
identifier[sum] = literal[int]
keyword[for] identifier[pot] keyword[in] identifier[Pot] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[pot] , identifier[DissipativeForce] ):
identifier[sum] += identifier[pot] . identifier[R2deriv] ( identifier[R] , identifier[z] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] )
keyword[return] identifier[sum]
keyword[elif] identifier[isinstance] ( identifier[Pot] , identifier[Potential] ):
keyword[return] identifier[Pot] . identifier[R2deriv] ( identifier[R] , identifier[z] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] )
keyword[else] :
keyword[raise] identifier[PotentialError] ( literal[string] ) | def evaluateR2derivs(Pot, R, z, phi=None, t=0.0):
"""
NAME:
evaluateR2derivs
PURPOSE:
convenience function to evaluate a possible sum of potentials
INPUT:
Pot - a potential or list of potentials (dissipative forces in such a list are ignored)
R - cylindrical Galactocentric distance (can be Quantity)
z - distance above the plane (can be Quantity)
phi - azimuth (optional; can be Quantity)
t - time (optional; can be Quantity)
OUTPUT:
d2Phi/d2R(R,z,phi,t)
HISTORY:
2012-07-25 - Written - Bovy (IAS)
"""
isList = isinstance(Pot, list)
nonAxi = _isNonAxi(Pot)
if nonAxi and phi is None:
raise PotentialError('The (list of) Potential instances is non-axisymmetric, but you did not provide phi') # depends on [control=['if'], data=[]]
if isList:
sum = 0.0
for pot in Pot:
if not isinstance(pot, DissipativeForce):
sum += pot.R2deriv(R, z, phi=phi, t=t, use_physical=False) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pot']]
return sum # depends on [control=['if'], data=[]]
elif isinstance(Pot, Potential):
return Pot.R2deriv(R, z, phi=phi, t=t, use_physical=False) # depends on [control=['if'], data=[]]
else: #pragma: no cover
raise PotentialError("Input to 'evaluateR2derivs' is neither a Potential-instance or a list of such instances") |
def get_usage_type(self, ip):
''' Get usage_type '''
rec = self.get_all(ip)
return rec and rec.usage_type | def function[get_usage_type, parameter[self, ip]]:
constant[ Get usage_type ]
variable[rec] assign[=] call[name[self].get_all, parameter[name[ip]]]
return[<ast.BoolOp object at 0x7da1b0d88460>] | keyword[def] identifier[get_usage_type] ( identifier[self] , identifier[ip] ):
literal[string]
identifier[rec] = identifier[self] . identifier[get_all] ( identifier[ip] )
keyword[return] identifier[rec] keyword[and] identifier[rec] . identifier[usage_type] | def get_usage_type(self, ip):
""" Get usage_type """
rec = self.get_all(ip)
return rec and rec.usage_type |
def _sincedb_start_position(self):
"""Retrieves the starting position from the sincedb sql db
for a given file
"""
if not self._sincedb_path:
return None
self._sincedb_init()
self._log_debug('retrieving start_position from sincedb')
conn = sqlite3.connect(self._sincedb_path, isolation_level=None)
cursor = conn.cursor()
cursor.execute('select position from sincedb where fid = :fid and filename = :filename', {
'fid': self._fid,
'filename': self._filename
})
start_position = None
for row in cursor.fetchall():
start_position, = row
return start_position | def function[_sincedb_start_position, parameter[self]]:
constant[Retrieves the starting position from the sincedb sql db
for a given file
]
if <ast.UnaryOp object at 0x7da207f03190> begin[:]
return[constant[None]]
call[name[self]._sincedb_init, parameter[]]
call[name[self]._log_debug, parameter[constant[retrieving start_position from sincedb]]]
variable[conn] assign[=] call[name[sqlite3].connect, parameter[name[self]._sincedb_path]]
variable[cursor] assign[=] call[name[conn].cursor, parameter[]]
call[name[cursor].execute, parameter[constant[select position from sincedb where fid = :fid and filename = :filename], dictionary[[<ast.Constant object at 0x7da207f025f0>, <ast.Constant object at 0x7da207f012d0>], [<ast.Attribute object at 0x7da207f01b40>, <ast.Attribute object at 0x7da207f025c0>]]]]
variable[start_position] assign[=] constant[None]
for taget[name[row]] in starred[call[name[cursor].fetchall, parameter[]]] begin[:]
<ast.Tuple object at 0x7da207f015a0> assign[=] name[row]
return[name[start_position]] | keyword[def] identifier[_sincedb_start_position] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_sincedb_path] :
keyword[return] keyword[None]
identifier[self] . identifier[_sincedb_init] ()
identifier[self] . identifier[_log_debug] ( literal[string] )
identifier[conn] = identifier[sqlite3] . identifier[connect] ( identifier[self] . identifier[_sincedb_path] , identifier[isolation_level] = keyword[None] )
identifier[cursor] = identifier[conn] . identifier[cursor] ()
identifier[cursor] . identifier[execute] ( literal[string] ,{
literal[string] : identifier[self] . identifier[_fid] ,
literal[string] : identifier[self] . identifier[_filename]
})
identifier[start_position] = keyword[None]
keyword[for] identifier[row] keyword[in] identifier[cursor] . identifier[fetchall] ():
identifier[start_position] ,= identifier[row]
keyword[return] identifier[start_position] | def _sincedb_start_position(self):
"""Retrieves the starting position from the sincedb sql db
for a given file
"""
if not self._sincedb_path:
return None # depends on [control=['if'], data=[]]
self._sincedb_init()
self._log_debug('retrieving start_position from sincedb')
conn = sqlite3.connect(self._sincedb_path, isolation_level=None)
cursor = conn.cursor()
cursor.execute('select position from sincedb where fid = :fid and filename = :filename', {'fid': self._fid, 'filename': self._filename})
start_position = None
for row in cursor.fetchall():
(start_position,) = row # depends on [control=['for'], data=['row']]
return start_position |
def stats_per100(self, kind='R', summary=False):
"""Returns a DataFrame of per-100-possession stats."""
return self._get_stats_table('per_poss', kind=kind, summary=summary) | def function[stats_per100, parameter[self, kind, summary]]:
constant[Returns a DataFrame of per-100-possession stats.]
return[call[name[self]._get_stats_table, parameter[constant[per_poss]]]] | keyword[def] identifier[stats_per100] ( identifier[self] , identifier[kind] = literal[string] , identifier[summary] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[_get_stats_table] ( literal[string] , identifier[kind] = identifier[kind] , identifier[summary] = identifier[summary] ) | def stats_per100(self, kind='R', summary=False):
"""Returns a DataFrame of per-100-possession stats."""
return self._get_stats_table('per_poss', kind=kind, summary=summary) |
def renders_impl(self, template_content, context,
at_encoding=anytemplate.compat.ENCODING, **kwargs):
"""
Render given template string and return the result.
:param template_content: Template content
:param context: A dict or dict-like object to instantiate given
template file
:param at_encoding: Template encoding
:param kwargs: Keyword arguments such as:
- at_paths: Template search paths
- Other keyword arguments passed to the template engine to render
templates with specific features enabled.
:return: Rendered string
"""
tmpdir = os.environ.get("TMPDIR", "/tmp")
res = template_content
try:
(ofd, opath) = tempfile.mkstemp(prefix="at-tenjin-tmpl-",
dir=tmpdir)
os.write(ofd, template_content.encode(at_encoding))
os.close(ofd)
res = self.render_impl(opath, context, **kwargs)
except (IOError, OSError) as exc:
LOGGER.error("Failed to render from tempral template: %s"
" [exc=%r]", opath, exc)
raise
finally:
try:
os.remove(opath)
os.removedirs(os.path.dirname(opath))
except (IOError, OSError):
pass
return res | def function[renders_impl, parameter[self, template_content, context, at_encoding]]:
constant[
Render given template string and return the result.
:param template_content: Template content
:param context: A dict or dict-like object to instantiate given
template file
:param at_encoding: Template encoding
:param kwargs: Keyword arguments such as:
- at_paths: Template search paths
- Other keyword arguments passed to the template engine to render
templates with specific features enabled.
:return: Rendered string
]
variable[tmpdir] assign[=] call[name[os].environ.get, parameter[constant[TMPDIR], constant[/tmp]]]
variable[res] assign[=] name[template_content]
<ast.Try object at 0x7da207f9b8b0>
return[name[res]] | keyword[def] identifier[renders_impl] ( identifier[self] , identifier[template_content] , identifier[context] ,
identifier[at_encoding] = identifier[anytemplate] . identifier[compat] . identifier[ENCODING] ,** identifier[kwargs] ):
literal[string]
identifier[tmpdir] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] )
identifier[res] = identifier[template_content]
keyword[try] :
( identifier[ofd] , identifier[opath] )= identifier[tempfile] . identifier[mkstemp] ( identifier[prefix] = literal[string] ,
identifier[dir] = identifier[tmpdir] )
identifier[os] . identifier[write] ( identifier[ofd] , identifier[template_content] . identifier[encode] ( identifier[at_encoding] ))
identifier[os] . identifier[close] ( identifier[ofd] )
identifier[res] = identifier[self] . identifier[render_impl] ( identifier[opath] , identifier[context] ,** identifier[kwargs] )
keyword[except] ( identifier[IOError] , identifier[OSError] ) keyword[as] identifier[exc] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] , identifier[opath] , identifier[exc] )
keyword[raise]
keyword[finally] :
keyword[try] :
identifier[os] . identifier[remove] ( identifier[opath] )
identifier[os] . identifier[removedirs] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[opath] ))
keyword[except] ( identifier[IOError] , identifier[OSError] ):
keyword[pass]
keyword[return] identifier[res] | def renders_impl(self, template_content, context, at_encoding=anytemplate.compat.ENCODING, **kwargs):
"""
Render given template string and return the result.
:param template_content: Template content
:param context: A dict or dict-like object to instantiate given
template file
:param at_encoding: Template encoding
:param kwargs: Keyword arguments such as:
- at_paths: Template search paths
- Other keyword arguments passed to the template engine to render
templates with specific features enabled.
:return: Rendered string
"""
tmpdir = os.environ.get('TMPDIR', '/tmp')
res = template_content
try:
(ofd, opath) = tempfile.mkstemp(prefix='at-tenjin-tmpl-', dir=tmpdir)
os.write(ofd, template_content.encode(at_encoding))
os.close(ofd)
res = self.render_impl(opath, context, **kwargs) # depends on [control=['try'], data=[]]
except (IOError, OSError) as exc:
LOGGER.error('Failed to render from tempral template: %s [exc=%r]', opath, exc)
raise # depends on [control=['except'], data=['exc']]
finally:
try:
os.remove(opath)
os.removedirs(os.path.dirname(opath)) # depends on [control=['try'], data=[]]
except (IOError, OSError):
pass # depends on [control=['except'], data=[]]
return res |
def unkown_field(self, value=None):
"""Corresponds to IDD Field `unkown_field` Empty field in data.
Args:
value (str): value for IDD Field `unkown_field`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError('value {} need to be of type str '
'for field `unkown_field`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `unkown_field`')
self._unkown_field = value | def function[unkown_field, parameter[self, value]]:
constant[Corresponds to IDD Field `unkown_field` Empty field in data.
Args:
value (str): value for IDD Field `unkown_field`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
]
if compare[name[value] is_not constant[None]] begin[:]
<ast.Try object at 0x7da18c4cce80>
if compare[constant[,] in name[value]] begin[:]
<ast.Raise object at 0x7da18c4ce290>
name[self]._unkown_field assign[=] name[value] | keyword[def] identifier[unkown_field] ( identifier[self] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[value] = identifier[str] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[value] ))
keyword[if] literal[string] keyword[in] identifier[value] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[self] . identifier[_unkown_field] = identifier[value] | def unkown_field(self, value=None):
"""Corresponds to IDD Field `unkown_field` Empty field in data.
Args:
value (str): value for IDD Field `unkown_field`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
"""
if value is not None:
try:
value = str(value) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('value {} need to be of type str for field `unkown_field`'.format(value)) # depends on [control=['except'], data=[]]
if ',' in value:
raise ValueError('value should not contain a comma for field `unkown_field`') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']]
self._unkown_field = value |
def _sumLists(a, b):
"""
Algorithm to check validity of NBI and NIF.
Receives string with a umber to validate.
"""
val = 0
for i in map(lambda a, b: a * b, a, b):
val += i
return val | def function[_sumLists, parameter[a, b]]:
constant[
Algorithm to check validity of NBI and NIF.
Receives string with a umber to validate.
]
variable[val] assign[=] constant[0]
for taget[name[i]] in starred[call[name[map], parameter[<ast.Lambda object at 0x7da18eb55ae0>, name[a], name[b]]]] begin[:]
<ast.AugAssign object at 0x7da2047eb070>
return[name[val]] | keyword[def] identifier[_sumLists] ( identifier[a] , identifier[b] ):
literal[string]
identifier[val] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[map] ( keyword[lambda] identifier[a] , identifier[b] : identifier[a] * identifier[b] , identifier[a] , identifier[b] ):
identifier[val] += identifier[i]
keyword[return] identifier[val] | def _sumLists(a, b):
"""
Algorithm to check validity of NBI and NIF.
Receives string with a umber to validate.
"""
val = 0
for i in map(lambda a, b: a * b, a, b):
val += i # depends on [control=['for'], data=['i']]
return val |
def do_debug(self, arg):
"""debug code
Enter a recursive debugger that steps through the code
argument (which is an arbitrary expression or statement to be
executed in the current environment).
"""
self.settrace(False)
globals = self.curframe.f_globals
locals = self.get_locals(self.curframe)
p = Pdb(self.completekey, self.stdin, self.stdout, debug=True)
p.prompt = "(%s) " % self.prompt.strip()
self.message("ENTERING RECURSIVE DEBUGGER")
sys.call_tracing(p.run, (arg, globals, locals))
self.message("LEAVING RECURSIVE DEBUGGER")
self.settrace(True)
self.lastcmd = p.lastcmd | def function[do_debug, parameter[self, arg]]:
constant[debug code
Enter a recursive debugger that steps through the code
argument (which is an arbitrary expression or statement to be
executed in the current environment).
]
call[name[self].settrace, parameter[constant[False]]]
variable[globals] assign[=] name[self].curframe.f_globals
variable[locals] assign[=] call[name[self].get_locals, parameter[name[self].curframe]]
variable[p] assign[=] call[name[Pdb], parameter[name[self].completekey, name[self].stdin, name[self].stdout]]
name[p].prompt assign[=] binary_operation[constant[(%s) ] <ast.Mod object at 0x7da2590d6920> call[name[self].prompt.strip, parameter[]]]
call[name[self].message, parameter[constant[ENTERING RECURSIVE DEBUGGER]]]
call[name[sys].call_tracing, parameter[name[p].run, tuple[[<ast.Name object at 0x7da1b0e91060>, <ast.Name object at 0x7da1b0e92aa0>, <ast.Name object at 0x7da1b0e915d0>]]]]
call[name[self].message, parameter[constant[LEAVING RECURSIVE DEBUGGER]]]
call[name[self].settrace, parameter[constant[True]]]
name[self].lastcmd assign[=] name[p].lastcmd | keyword[def] identifier[do_debug] ( identifier[self] , identifier[arg] ):
literal[string]
identifier[self] . identifier[settrace] ( keyword[False] )
identifier[globals] = identifier[self] . identifier[curframe] . identifier[f_globals]
identifier[locals] = identifier[self] . identifier[get_locals] ( identifier[self] . identifier[curframe] )
identifier[p] = identifier[Pdb] ( identifier[self] . identifier[completekey] , identifier[self] . identifier[stdin] , identifier[self] . identifier[stdout] , identifier[debug] = keyword[True] )
identifier[p] . identifier[prompt] = literal[string] % identifier[self] . identifier[prompt] . identifier[strip] ()
identifier[self] . identifier[message] ( literal[string] )
identifier[sys] . identifier[call_tracing] ( identifier[p] . identifier[run] ,( identifier[arg] , identifier[globals] , identifier[locals] ))
identifier[self] . identifier[message] ( literal[string] )
identifier[self] . identifier[settrace] ( keyword[True] )
identifier[self] . identifier[lastcmd] = identifier[p] . identifier[lastcmd] | def do_debug(self, arg):
"""debug code
Enter a recursive debugger that steps through the code
argument (which is an arbitrary expression or statement to be
executed in the current environment).
"""
self.settrace(False)
globals = self.curframe.f_globals
locals = self.get_locals(self.curframe)
p = Pdb(self.completekey, self.stdin, self.stdout, debug=True)
p.prompt = '(%s) ' % self.prompt.strip()
self.message('ENTERING RECURSIVE DEBUGGER')
sys.call_tracing(p.run, (arg, globals, locals))
self.message('LEAVING RECURSIVE DEBUGGER')
self.settrace(True)
self.lastcmd = p.lastcmd |
def members(self, is_manager=None):
"""
Retrieve members of the scope.
:param is_manager: (optional) set to True to return only Scope members that are also managers.
:type is_manager: bool
:return: List of members (usernames)
Examples
--------
>>> members = project.members()
>>> managers = project.members(is_manager=True)
"""
if not is_manager:
return [member for member in self._json_data['members'] if member['is_active']]
else:
return [member for member in self._json_data['members'] if
member.get('is_active', False) and member.get('is_manager', False)] | def function[members, parameter[self, is_manager]]:
constant[
Retrieve members of the scope.
:param is_manager: (optional) set to True to return only Scope members that are also managers.
:type is_manager: bool
:return: List of members (usernames)
Examples
--------
>>> members = project.members()
>>> managers = project.members(is_manager=True)
]
if <ast.UnaryOp object at 0x7da204620b20> begin[:]
return[<ast.ListComp object at 0x7da204622cb0>] | keyword[def] identifier[members] ( identifier[self] , identifier[is_manager] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[is_manager] :
keyword[return] [ identifier[member] keyword[for] identifier[member] keyword[in] identifier[self] . identifier[_json_data] [ literal[string] ] keyword[if] identifier[member] [ literal[string] ]]
keyword[else] :
keyword[return] [ identifier[member] keyword[for] identifier[member] keyword[in] identifier[self] . identifier[_json_data] [ literal[string] ] keyword[if]
identifier[member] . identifier[get] ( literal[string] , keyword[False] ) keyword[and] identifier[member] . identifier[get] ( literal[string] , keyword[False] )] | def members(self, is_manager=None):
"""
Retrieve members of the scope.
:param is_manager: (optional) set to True to return only Scope members that are also managers.
:type is_manager: bool
:return: List of members (usernames)
Examples
--------
>>> members = project.members()
>>> managers = project.members(is_manager=True)
"""
if not is_manager:
return [member for member in self._json_data['members'] if member['is_active']] # depends on [control=['if'], data=[]]
else:
return [member for member in self._json_data['members'] if member.get('is_active', False) and member.get('is_manager', False)] |
def add_headerReference(self, type_, rId):
"""Return newly added CT_HdrFtrRef element of *type_* with *rId*.
The element tag is `w:headerReference`.
"""
headerReference = self._add_headerReference()
headerReference.type_ = type_
headerReference.rId = rId
return headerReference | def function[add_headerReference, parameter[self, type_, rId]]:
constant[Return newly added CT_HdrFtrRef element of *type_* with *rId*.
The element tag is `w:headerReference`.
]
variable[headerReference] assign[=] call[name[self]._add_headerReference, parameter[]]
name[headerReference].type_ assign[=] name[type_]
name[headerReference].rId assign[=] name[rId]
return[name[headerReference]] | keyword[def] identifier[add_headerReference] ( identifier[self] , identifier[type_] , identifier[rId] ):
literal[string]
identifier[headerReference] = identifier[self] . identifier[_add_headerReference] ()
identifier[headerReference] . identifier[type_] = identifier[type_]
identifier[headerReference] . identifier[rId] = identifier[rId]
keyword[return] identifier[headerReference] | def add_headerReference(self, type_, rId):
"""Return newly added CT_HdrFtrRef element of *type_* with *rId*.
The element tag is `w:headerReference`.
"""
headerReference = self._add_headerReference()
headerReference.type_ = type_
headerReference.rId = rId
return headerReference |
def _map_center(self, coord, val):
''' Identitify the center of the Image correspond to one coordinate. '''
if self.ppd in [4, 8, 16, 32, 64]:
res = {'lat': 0, 'long': 360}
return res[coord] / 2.0
elif self.ppd in [128]:
res = {'lat': 90, 'long': 90}
return (val // res[coord] + 1) * res[coord] - res[coord] / 2.0
elif self.ppd in [256]:
res = {'lat': 60, 'long': 90}
return (val // res[coord] + 1) * res[coord] - res[coord] / 2.0 | def function[_map_center, parameter[self, coord, val]]:
constant[ Identitify the center of the Image correspond to one coordinate. ]
if compare[name[self].ppd in list[[<ast.Constant object at 0x7da2045662c0>, <ast.Constant object at 0x7da204565c60>, <ast.Constant object at 0x7da204564790>, <ast.Constant object at 0x7da2045664a0>, <ast.Constant object at 0x7da204564f70>]]] begin[:]
variable[res] assign[=] dictionary[[<ast.Constant object at 0x7da204565900>, <ast.Constant object at 0x7da2045640a0>], [<ast.Constant object at 0x7da204565ba0>, <ast.Constant object at 0x7da204566560>]]
return[binary_operation[call[name[res]][name[coord]] / constant[2.0]]] | keyword[def] identifier[_map_center] ( identifier[self] , identifier[coord] , identifier[val] ):
literal[string]
keyword[if] identifier[self] . identifier[ppd] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]:
identifier[res] ={ literal[string] : literal[int] , literal[string] : literal[int] }
keyword[return] identifier[res] [ identifier[coord] ]/ literal[int]
keyword[elif] identifier[self] . identifier[ppd] keyword[in] [ literal[int] ]:
identifier[res] ={ literal[string] : literal[int] , literal[string] : literal[int] }
keyword[return] ( identifier[val] // identifier[res] [ identifier[coord] ]+ literal[int] )* identifier[res] [ identifier[coord] ]- identifier[res] [ identifier[coord] ]/ literal[int]
keyword[elif] identifier[self] . identifier[ppd] keyword[in] [ literal[int] ]:
identifier[res] ={ literal[string] : literal[int] , literal[string] : literal[int] }
keyword[return] ( identifier[val] // identifier[res] [ identifier[coord] ]+ literal[int] )* identifier[res] [ identifier[coord] ]- identifier[res] [ identifier[coord] ]/ literal[int] | def _map_center(self, coord, val):
""" Identitify the center of the Image correspond to one coordinate. """
if self.ppd in [4, 8, 16, 32, 64]:
res = {'lat': 0, 'long': 360}
return res[coord] / 2.0 # depends on [control=['if'], data=[]]
elif self.ppd in [128]:
res = {'lat': 90, 'long': 90}
return (val // res[coord] + 1) * res[coord] - res[coord] / 2.0 # depends on [control=['if'], data=[]]
elif self.ppd in [256]:
res = {'lat': 60, 'long': 90}
return (val // res[coord] + 1) * res[coord] - res[coord] / 2.0 # depends on [control=['if'], data=[]] |
def getSessionFromFile(self, file, verbose=None):
"""
Loads a session from a local file and returns the session file name
:param file: Session file location as an absolute path
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'session', PARAMS={'file':file}, method="GET", verbose=verbose, parse_params=False)
return response | def function[getSessionFromFile, parameter[self, file, verbose]]:
constant[
Loads a session from a local file and returns the session file name
:param file: Session file location as an absolute path
:param verbose: print more
:returns: 200: successful operation
]
variable[response] assign[=] call[name[api], parameter[]]
return[name[response]] | keyword[def] identifier[getSessionFromFile] ( identifier[self] , identifier[file] , identifier[verbose] = keyword[None] ):
literal[string]
identifier[response] = identifier[api] ( identifier[url] = identifier[self] . identifier[___url] + literal[string] , identifier[PARAMS] ={ literal[string] : identifier[file] }, identifier[method] = literal[string] , identifier[verbose] = identifier[verbose] , identifier[parse_params] = keyword[False] )
keyword[return] identifier[response] | def getSessionFromFile(self, file, verbose=None):
"""
Loads a session from a local file and returns the session file name
:param file: Session file location as an absolute path
:param verbose: print more
:returns: 200: successful operation
"""
response = api(url=self.___url + 'session', PARAMS={'file': file}, method='GET', verbose=verbose, parse_params=False)
return response |
def stepceil_with_units(param, step, unit):
"""This function returns the smallest multiple of 'step' greater than or
equal to 'param' and outputs the result in Pint units.
This function is unit-aware and functions without requiring translation
so long as 'param' and 'unit' are of the same dimensionality.
"""
counter = 0 * unit
while counter < param.to(unit):
counter += step * unit
return counter | def function[stepceil_with_units, parameter[param, step, unit]]:
constant[This function returns the smallest multiple of 'step' greater than or
equal to 'param' and outputs the result in Pint units.
This function is unit-aware and functions without requiring translation
so long as 'param' and 'unit' are of the same dimensionality.
]
variable[counter] assign[=] binary_operation[constant[0] * name[unit]]
while compare[name[counter] less[<] call[name[param].to, parameter[name[unit]]]] begin[:]
<ast.AugAssign object at 0x7da1b06ffc10>
return[name[counter]] | keyword[def] identifier[stepceil_with_units] ( identifier[param] , identifier[step] , identifier[unit] ):
literal[string]
identifier[counter] = literal[int] * identifier[unit]
keyword[while] identifier[counter] < identifier[param] . identifier[to] ( identifier[unit] ):
identifier[counter] += identifier[step] * identifier[unit]
keyword[return] identifier[counter] | def stepceil_with_units(param, step, unit):
"""This function returns the smallest multiple of 'step' greater than or
equal to 'param' and outputs the result in Pint units.
This function is unit-aware and functions without requiring translation
so long as 'param' and 'unit' are of the same dimensionality.
"""
counter = 0 * unit
while counter < param.to(unit):
counter += step * unit # depends on [control=['while'], data=['counter']]
return counter |
def nl_socket_modify_err_cb(sk, kind, func, arg):
"""Modify the error callback handler associated with the socket.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L649
Positional arguments:
sk -- Netlink socket (nl_sock class instance).
kind -- kind of callback (integer).
func -- callback function.
arg -- argument to be passed to callback function.
Returns:
0 on success or a negative error code.
"""
return int(nl_cb_err(sk.s_cb, kind, func, arg)) | def function[nl_socket_modify_err_cb, parameter[sk, kind, func, arg]]:
constant[Modify the error callback handler associated with the socket.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L649
Positional arguments:
sk -- Netlink socket (nl_sock class instance).
kind -- kind of callback (integer).
func -- callback function.
arg -- argument to be passed to callback function.
Returns:
0 on success or a negative error code.
]
return[call[name[int], parameter[call[name[nl_cb_err], parameter[name[sk].s_cb, name[kind], name[func], name[arg]]]]]] | keyword[def] identifier[nl_socket_modify_err_cb] ( identifier[sk] , identifier[kind] , identifier[func] , identifier[arg] ):
literal[string]
keyword[return] identifier[int] ( identifier[nl_cb_err] ( identifier[sk] . identifier[s_cb] , identifier[kind] , identifier[func] , identifier[arg] )) | def nl_socket_modify_err_cb(sk, kind, func, arg):
"""Modify the error callback handler associated with the socket.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/socket.c#L649
Positional arguments:
sk -- Netlink socket (nl_sock class instance).
kind -- kind of callback (integer).
func -- callback function.
arg -- argument to be passed to callback function.
Returns:
0 on success or a negative error code.
"""
return int(nl_cb_err(sk.s_cb, kind, func, arg)) |
def add_group(self, group_name, payment_address):
""" Return new group_id in base64 """
if (self.is_group_name_exists(group_name)):
raise Exception("the group \"%s\" is already present"%str(group_name))
group_id_base64 = base64.b64encode(secrets.token_bytes(32))
self.m["groups"] += [{"group_name" : group_name ,
"group_id" : group_id_base64.decode("ascii"),
"payment_address" : payment_address}]
return group_id_base64 | def function[add_group, parameter[self, group_name, payment_address]]:
constant[ Return new group_id in base64 ]
if call[name[self].is_group_name_exists, parameter[name[group_name]]] begin[:]
<ast.Raise object at 0x7da1b153e3b0>
variable[group_id_base64] assign[=] call[name[base64].b64encode, parameter[call[name[secrets].token_bytes, parameter[constant[32]]]]]
<ast.AugAssign object at 0x7da1b153cb80>
return[name[group_id_base64]] | keyword[def] identifier[add_group] ( identifier[self] , identifier[group_name] , identifier[payment_address] ):
literal[string]
keyword[if] ( identifier[self] . identifier[is_group_name_exists] ( identifier[group_name] )):
keyword[raise] identifier[Exception] ( literal[string] % identifier[str] ( identifier[group_name] ))
identifier[group_id_base64] = identifier[base64] . identifier[b64encode] ( identifier[secrets] . identifier[token_bytes] ( literal[int] ))
identifier[self] . identifier[m] [ literal[string] ]+=[{ literal[string] : identifier[group_name] ,
literal[string] : identifier[group_id_base64] . identifier[decode] ( literal[string] ),
literal[string] : identifier[payment_address] }]
keyword[return] identifier[group_id_base64] | def add_group(self, group_name, payment_address):
""" Return new group_id in base64 """
if self.is_group_name_exists(group_name):
raise Exception('the group "%s" is already present' % str(group_name)) # depends on [control=['if'], data=[]]
group_id_base64 = base64.b64encode(secrets.token_bytes(32))
self.m['groups'] += [{'group_name': group_name, 'group_id': group_id_base64.decode('ascii'), 'payment_address': payment_address}]
return group_id_base64 |
def write_binary(filename, data):
"""Create path to filename and saves binary data"""
dir = os.path.dirname(filename)
if not os.path.exists(dir):
os.makedirs(dir)
with open(filename, 'wb') as f:
f.write(data) | def function[write_binary, parameter[filename, data]]:
constant[Create path to filename and saves binary data]
variable[dir] assign[=] call[name[os].path.dirname, parameter[name[filename]]]
if <ast.UnaryOp object at 0x7da1b23b1690> begin[:]
call[name[os].makedirs, parameter[name[dir]]]
with call[name[open], parameter[name[filename], constant[wb]]] begin[:]
call[name[f].write, parameter[name[data]]] | keyword[def] identifier[write_binary] ( identifier[filename] , identifier[data] ):
literal[string]
identifier[dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dir] ):
identifier[os] . identifier[makedirs] ( identifier[dir] )
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[data] ) | def write_binary(filename, data):
"""Create path to filename and saves binary data"""
dir = os.path.dirname(filename)
if not os.path.exists(dir):
os.makedirs(dir) # depends on [control=['if'], data=[]]
with open(filename, 'wb') as f:
f.write(data) # depends on [control=['with'], data=['f']] |
def create_revlookup_query(self, *fulltext_searchterms, **keyvalue_searchterms):
'''
Create the part of the solr request that comes after the question mark,
e.g. ?URL=*dkrz*&CHECKSUM=*abc*. If allowed search keys are
configured, only these are used. If no'allowed search keys are
specified, all key-value pairs are passed on to the reverse lookup
servlet.
:param fulltext_searchterms: Optional. Any term specified will be used
as search term. Not implemented yet, so will be ignored.
:param keyvalue_searchterms: Optional. Key-value pairs. Any key-value
pair will be used to search for the value in the field "key".
Wildcards accepted (refer to the documentation of the reverse
lookup servlet for syntax.)
:return: The query string, after the "?". If no valid search terms were
specified, None is returned.
'''
LOGGER.debug('create_revlookup_query...')
allowed_search_keys = self.__allowed_search_keys
only_search_for_allowed_keys = False
if len(allowed_search_keys) > 0:
only_search_for_allowed_keys = True
fulltext_searchterms_given = True
fulltext_searchterms = b2handle.util.remove_value_none_from_list(fulltext_searchterms)
if len(fulltext_searchterms) == 0:
fulltext_searchterms_given = False
if fulltext_searchterms_given:
msg = 'Full-text search is not implemented yet.'+\
' The provided searchterms '+str(fulltext_searchterms)+\
' can not be used.'
raise ReverseLookupException(msg=msg)
keyvalue_searchterms_given = True
keyvalue_searchterms = b2handle.util.remove_value_none_from_dict(keyvalue_searchterms)
if len(keyvalue_searchterms) == 0:
keyvalue_searchterms_given = False
if not keyvalue_searchterms_given and not fulltext_searchterms_given:
msg = 'No search terms have been specified. Please specify'+\
' at least one key-value-pair.'
raise ReverseLookupException(msg=msg)
counter = 0
query = '?'
for key, value in keyvalue_searchterms.items():
if only_search_for_allowed_keys and key not in allowed_search_keys:
msg = 'Cannot search for key "'+key+'". Only searches '+\
'for keys '+str(allowed_search_keys)+' are implemented.'
raise ReverseLookupException(msg=msg)
else:
query = query+'&'+key+'='+value
counter += 1
query = query.replace('?&', '?')
LOGGER.debug('create_revlookup_query: query: '+query)
if counter == 0: # unreachable?
msg = 'No valid search terms have been specified.'
raise ReverseLookupException(msg=msg)
return query | def function[create_revlookup_query, parameter[self]]:
constant[
Create the part of the solr request that comes after the question mark,
e.g. ?URL=*dkrz*&CHECKSUM=*abc*. If allowed search keys are
configured, only these are used. If no'allowed search keys are
specified, all key-value pairs are passed on to the reverse lookup
servlet.
:param fulltext_searchterms: Optional. Any term specified will be used
as search term. Not implemented yet, so will be ignored.
:param keyvalue_searchterms: Optional. Key-value pairs. Any key-value
pair will be used to search for the value in the field "key".
Wildcards accepted (refer to the documentation of the reverse
lookup servlet for syntax.)
:return: The query string, after the "?". If no valid search terms were
specified, None is returned.
]
call[name[LOGGER].debug, parameter[constant[create_revlookup_query...]]]
variable[allowed_search_keys] assign[=] name[self].__allowed_search_keys
variable[only_search_for_allowed_keys] assign[=] constant[False]
if compare[call[name[len], parameter[name[allowed_search_keys]]] greater[>] constant[0]] begin[:]
variable[only_search_for_allowed_keys] assign[=] constant[True]
variable[fulltext_searchterms_given] assign[=] constant[True]
variable[fulltext_searchterms] assign[=] call[name[b2handle].util.remove_value_none_from_list, parameter[name[fulltext_searchterms]]]
if compare[call[name[len], parameter[name[fulltext_searchterms]]] equal[==] constant[0]] begin[:]
variable[fulltext_searchterms_given] assign[=] constant[False]
if name[fulltext_searchterms_given] begin[:]
variable[msg] assign[=] binary_operation[binary_operation[binary_operation[constant[Full-text search is not implemented yet.] + constant[ The provided searchterms ]] + call[name[str], parameter[name[fulltext_searchterms]]]] + constant[ can not be used.]]
<ast.Raise object at 0x7da1b0ff0f10>
variable[keyvalue_searchterms_given] assign[=] constant[True]
variable[keyvalue_searchterms] assign[=] call[name[b2handle].util.remove_value_none_from_dict, parameter[name[keyvalue_searchterms]]]
if compare[call[name[len], parameter[name[keyvalue_searchterms]]] equal[==] constant[0]] begin[:]
variable[keyvalue_searchterms_given] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b0ff0b20> begin[:]
variable[msg] assign[=] binary_operation[constant[No search terms have been specified. Please specify] + constant[ at least one key-value-pair.]]
<ast.Raise object at 0x7da1b0d1ec20>
variable[counter] assign[=] constant[0]
variable[query] assign[=] constant[?]
for taget[tuple[[<ast.Name object at 0x7da1b0d1f790>, <ast.Name object at 0x7da1b0d1c6a0>]]] in starred[call[name[keyvalue_searchterms].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0d1e860> begin[:]
variable[msg] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[Cannot search for key "] + name[key]] + constant[". Only searches ]] + constant[for keys ]] + call[name[str], parameter[name[allowed_search_keys]]]] + constant[ are implemented.]]
<ast.Raise object at 0x7da1b0d1d3c0>
variable[query] assign[=] call[name[query].replace, parameter[constant[?&], constant[?]]]
call[name[LOGGER].debug, parameter[binary_operation[constant[create_revlookup_query: query: ] + name[query]]]]
if compare[name[counter] equal[==] constant[0]] begin[:]
variable[msg] assign[=] constant[No valid search terms have been specified.]
<ast.Raise object at 0x7da1b0d1c1f0>
return[name[query]] | keyword[def] identifier[create_revlookup_query] ( identifier[self] ,* identifier[fulltext_searchterms] ,** identifier[keyvalue_searchterms] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
identifier[allowed_search_keys] = identifier[self] . identifier[__allowed_search_keys]
identifier[only_search_for_allowed_keys] = keyword[False]
keyword[if] identifier[len] ( identifier[allowed_search_keys] )> literal[int] :
identifier[only_search_for_allowed_keys] = keyword[True]
identifier[fulltext_searchterms_given] = keyword[True]
identifier[fulltext_searchterms] = identifier[b2handle] . identifier[util] . identifier[remove_value_none_from_list] ( identifier[fulltext_searchterms] )
keyword[if] identifier[len] ( identifier[fulltext_searchterms] )== literal[int] :
identifier[fulltext_searchterms_given] = keyword[False]
keyword[if] identifier[fulltext_searchterms_given] :
identifier[msg] = literal[string] + literal[string] + identifier[str] ( identifier[fulltext_searchterms] )+ literal[string]
keyword[raise] identifier[ReverseLookupException] ( identifier[msg] = identifier[msg] )
identifier[keyvalue_searchterms_given] = keyword[True]
identifier[keyvalue_searchterms] = identifier[b2handle] . identifier[util] . identifier[remove_value_none_from_dict] ( identifier[keyvalue_searchterms] )
keyword[if] identifier[len] ( identifier[keyvalue_searchterms] )== literal[int] :
identifier[keyvalue_searchterms_given] = keyword[False]
keyword[if] keyword[not] identifier[keyvalue_searchterms_given] keyword[and] keyword[not] identifier[fulltext_searchterms_given] :
identifier[msg] = literal[string] + literal[string]
keyword[raise] identifier[ReverseLookupException] ( identifier[msg] = identifier[msg] )
identifier[counter] = literal[int]
identifier[query] = literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[keyvalue_searchterms] . identifier[items] ():
keyword[if] identifier[only_search_for_allowed_keys] keyword[and] identifier[key] keyword[not] keyword[in] identifier[allowed_search_keys] :
identifier[msg] = literal[string] + identifier[key] + literal[string] + literal[string] + identifier[str] ( identifier[allowed_search_keys] )+ literal[string]
keyword[raise] identifier[ReverseLookupException] ( identifier[msg] = identifier[msg] )
keyword[else] :
identifier[query] = identifier[query] + literal[string] + identifier[key] + literal[string] + identifier[value]
identifier[counter] += literal[int]
identifier[query] = identifier[query] . identifier[replace] ( literal[string] , literal[string] )
identifier[LOGGER] . identifier[debug] ( literal[string] + identifier[query] )
keyword[if] identifier[counter] == literal[int] :
identifier[msg] = literal[string]
keyword[raise] identifier[ReverseLookupException] ( identifier[msg] = identifier[msg] )
keyword[return] identifier[query] | def create_revlookup_query(self, *fulltext_searchterms, **keyvalue_searchterms):
"""
Create the part of the solr request that comes after the question mark,
e.g. ?URL=*dkrz*&CHECKSUM=*abc*. If allowed search keys are
configured, only these are used. If no'allowed search keys are
specified, all key-value pairs are passed on to the reverse lookup
servlet.
:param fulltext_searchterms: Optional. Any term specified will be used
as search term. Not implemented yet, so will be ignored.
:param keyvalue_searchterms: Optional. Key-value pairs. Any key-value
pair will be used to search for the value in the field "key".
Wildcards accepted (refer to the documentation of the reverse
lookup servlet for syntax.)
:return: The query string, after the "?". If no valid search terms were
specified, None is returned.
"""
LOGGER.debug('create_revlookup_query...')
allowed_search_keys = self.__allowed_search_keys
only_search_for_allowed_keys = False
if len(allowed_search_keys) > 0:
only_search_for_allowed_keys = True # depends on [control=['if'], data=[]]
fulltext_searchterms_given = True
fulltext_searchterms = b2handle.util.remove_value_none_from_list(fulltext_searchterms)
if len(fulltext_searchterms) == 0:
fulltext_searchterms_given = False # depends on [control=['if'], data=[]]
if fulltext_searchterms_given:
msg = 'Full-text search is not implemented yet.' + ' The provided searchterms ' + str(fulltext_searchterms) + ' can not be used.'
raise ReverseLookupException(msg=msg) # depends on [control=['if'], data=[]]
keyvalue_searchterms_given = True
keyvalue_searchterms = b2handle.util.remove_value_none_from_dict(keyvalue_searchterms)
if len(keyvalue_searchterms) == 0:
keyvalue_searchterms_given = False # depends on [control=['if'], data=[]]
if not keyvalue_searchterms_given and (not fulltext_searchterms_given):
msg = 'No search terms have been specified. Please specify' + ' at least one key-value-pair.'
raise ReverseLookupException(msg=msg) # depends on [control=['if'], data=[]]
counter = 0
query = '?'
for (key, value) in keyvalue_searchterms.items():
if only_search_for_allowed_keys and key not in allowed_search_keys:
msg = 'Cannot search for key "' + key + '". Only searches ' + 'for keys ' + str(allowed_search_keys) + ' are implemented.'
raise ReverseLookupException(msg=msg) # depends on [control=['if'], data=[]]
else:
query = query + '&' + key + '=' + value
counter += 1 # depends on [control=['for'], data=[]]
query = query.replace('?&', '?')
LOGGER.debug('create_revlookup_query: query: ' + query)
if counter == 0: # unreachable?
msg = 'No valid search terms have been specified.'
raise ReverseLookupException(msg=msg) # depends on [control=['if'], data=[]]
return query |
def sync(self):
"""
synchronize self from Ariane server according its id (prioritary) or name
:return:
"""
LOGGER.debug("Company.sync")
params = None
if self.id is not None:
params = {'id': self.id}
elif self.name is not None:
params = {'name': self.name}
if params is not None:
args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params}
response = CompanyService.requester.call(args)
if response.rc != 0:
LOGGER.warning(
'Company.sync - Problem while syncing company (name:' + self.name + ', id:' + str(self.id) +
'). Reason: ' + str(response.response_content) + '-' + str(response.error_message) +
" (" + str(response.rc) + ")"
)
else:
json_obj = response.response_content
self.id = json_obj['companyID']
self.name = json_obj['companyName']
self.description = json_obj['companyDescription']
self.applications_ids = json_obj['companyApplicationsID']
self.ost_ids = json_obj['companyOSTypesID'] | def function[sync, parameter[self]]:
constant[
synchronize self from Ariane server according its id (prioritary) or name
:return:
]
call[name[LOGGER].debug, parameter[constant[Company.sync]]]
variable[params] assign[=] constant[None]
if compare[name[self].id is_not constant[None]] begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1365150>], [<ast.Attribute object at 0x7da1b13650f0>]]
if compare[name[params] is_not constant[None]] begin[:]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da2054a7dc0>, <ast.Constant object at 0x7da2054a5b10>, <ast.Constant object at 0x7da2054a69b0>], [<ast.Constant object at 0x7da2054a6680>, <ast.Constant object at 0x7da2054a5090>, <ast.Name object at 0x7da2054a5c60>]]
variable[response] assign[=] call[name[CompanyService].requester.call, parameter[name[args]]]
if compare[name[response].rc not_equal[!=] constant[0]] begin[:]
call[name[LOGGER].warning, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[Company.sync - Problem while syncing company (name:] + name[self].name] + constant[, id:]] + call[name[str], parameter[name[self].id]]] + constant[). Reason: ]] + call[name[str], parameter[name[response].response_content]]] + constant[-]] + call[name[str], parameter[name[response].error_message]]] + constant[ (]] + call[name[str], parameter[name[response].rc]]] + constant[)]]]] | keyword[def] identifier[sync] ( identifier[self] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
identifier[params] = keyword[None]
keyword[if] identifier[self] . identifier[id] keyword[is] keyword[not] keyword[None] :
identifier[params] ={ literal[string] : identifier[self] . identifier[id] }
keyword[elif] identifier[self] . identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[params] ={ literal[string] : identifier[self] . identifier[name] }
keyword[if] identifier[params] keyword[is] keyword[not] keyword[None] :
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] }
identifier[response] = identifier[CompanyService] . identifier[requester] . identifier[call] ( identifier[args] )
keyword[if] identifier[response] . identifier[rc] != literal[int] :
identifier[LOGGER] . identifier[warning] (
literal[string] + identifier[self] . identifier[name] + literal[string] + identifier[str] ( identifier[self] . identifier[id] )+
literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+
literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
)
keyword[else] :
identifier[json_obj] = identifier[response] . identifier[response_content]
identifier[self] . identifier[id] = identifier[json_obj] [ literal[string] ]
identifier[self] . identifier[name] = identifier[json_obj] [ literal[string] ]
identifier[self] . identifier[description] = identifier[json_obj] [ literal[string] ]
identifier[self] . identifier[applications_ids] = identifier[json_obj] [ literal[string] ]
identifier[self] . identifier[ost_ids] = identifier[json_obj] [ literal[string] ] | def sync(self):
"""
synchronize self from Ariane server according its id (prioritary) or name
:return:
"""
LOGGER.debug('Company.sync')
params = None
if self.id is not None:
params = {'id': self.id} # depends on [control=['if'], data=[]]
elif self.name is not None:
params = {'name': self.name} # depends on [control=['if'], data=[]]
if params is not None:
args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params}
response = CompanyService.requester.call(args)
if response.rc != 0:
LOGGER.warning('Company.sync - Problem while syncing company (name:' + self.name + ', id:' + str(self.id) + '). Reason: ' + str(response.response_content) + '-' + str(response.error_message) + ' (' + str(response.rc) + ')') # depends on [control=['if'], data=[]]
else:
json_obj = response.response_content
self.id = json_obj['companyID']
self.name = json_obj['companyName']
self.description = json_obj['companyDescription']
self.applications_ids = json_obj['companyApplicationsID']
self.ost_ids = json_obj['companyOSTypesID'] # depends on [control=['if'], data=['params']] |
def fit(self, **skip_gram_params):
"""
Creates the embeddings using gensim's Word2Vec.
:param skip_gram_params: Parameteres for gensim.models.Word2Vec - do not supply 'size' it is taken from the Node2Vec 'dimensions' parameter
:type skip_gram_params: dict
:return: A gensim word2vec model
"""
if 'workers' not in skip_gram_params:
skip_gram_params['workers'] = self.workers
if 'size' not in skip_gram_params:
skip_gram_params['size'] = self.dimensions
return gensim.models.Word2Vec(self.walks, **skip_gram_params) | def function[fit, parameter[self]]:
constant[
Creates the embeddings using gensim's Word2Vec.
:param skip_gram_params: Parameteres for gensim.models.Word2Vec - do not supply 'size' it is taken from the Node2Vec 'dimensions' parameter
:type skip_gram_params: dict
:return: A gensim word2vec model
]
if compare[constant[workers] <ast.NotIn object at 0x7da2590d7190> name[skip_gram_params]] begin[:]
call[name[skip_gram_params]][constant[workers]] assign[=] name[self].workers
if compare[constant[size] <ast.NotIn object at 0x7da2590d7190> name[skip_gram_params]] begin[:]
call[name[skip_gram_params]][constant[size]] assign[=] name[self].dimensions
return[call[name[gensim].models.Word2Vec, parameter[name[self].walks]]] | keyword[def] identifier[fit] ( identifier[self] ,** identifier[skip_gram_params] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[skip_gram_params] :
identifier[skip_gram_params] [ literal[string] ]= identifier[self] . identifier[workers]
keyword[if] literal[string] keyword[not] keyword[in] identifier[skip_gram_params] :
identifier[skip_gram_params] [ literal[string] ]= identifier[self] . identifier[dimensions]
keyword[return] identifier[gensim] . identifier[models] . identifier[Word2Vec] ( identifier[self] . identifier[walks] ,** identifier[skip_gram_params] ) | def fit(self, **skip_gram_params):
"""
Creates the embeddings using gensim's Word2Vec.
:param skip_gram_params: Parameteres for gensim.models.Word2Vec - do not supply 'size' it is taken from the Node2Vec 'dimensions' parameter
:type skip_gram_params: dict
:return: A gensim word2vec model
"""
if 'workers' not in skip_gram_params:
skip_gram_params['workers'] = self.workers # depends on [control=['if'], data=['skip_gram_params']]
if 'size' not in skip_gram_params:
skip_gram_params['size'] = self.dimensions # depends on [control=['if'], data=['skip_gram_params']]
return gensim.models.Word2Vec(self.walks, **skip_gram_params) |
def _interleave(self):
"""
Return ndarray from blocks with specified item order
Items must be contained in the blocks
"""
from pandas.core.dtypes.common import is_sparse
dtype = _interleaved_dtype(self.blocks)
# TODO: https://github.com/pandas-dev/pandas/issues/22791
# Give EAs some input on what happens here. Sparse needs this.
if is_sparse(dtype):
dtype = dtype.subtype
elif is_extension_array_dtype(dtype):
dtype = 'object'
result = np.empty(self.shape, dtype=dtype)
itemmask = np.zeros(self.shape[0])
for blk in self.blocks:
rl = blk.mgr_locs
result[rl.indexer] = blk.get_values(dtype)
itemmask[rl.indexer] = 1
if not itemmask.all():
raise AssertionError('Some items were not contained in blocks')
return result | def function[_interleave, parameter[self]]:
constant[
Return ndarray from blocks with specified item order
Items must be contained in the blocks
]
from relative_module[pandas.core.dtypes.common] import module[is_sparse]
variable[dtype] assign[=] call[name[_interleaved_dtype], parameter[name[self].blocks]]
if call[name[is_sparse], parameter[name[dtype]]] begin[:]
variable[dtype] assign[=] name[dtype].subtype
variable[result] assign[=] call[name[np].empty, parameter[name[self].shape]]
variable[itemmask] assign[=] call[name[np].zeros, parameter[call[name[self].shape][constant[0]]]]
for taget[name[blk]] in starred[name[self].blocks] begin[:]
variable[rl] assign[=] name[blk].mgr_locs
call[name[result]][name[rl].indexer] assign[=] call[name[blk].get_values, parameter[name[dtype]]]
call[name[itemmask]][name[rl].indexer] assign[=] constant[1]
if <ast.UnaryOp object at 0x7da1b2346410> begin[:]
<ast.Raise object at 0x7da1b2347bb0>
return[name[result]] | keyword[def] identifier[_interleave] ( identifier[self] ):
literal[string]
keyword[from] identifier[pandas] . identifier[core] . identifier[dtypes] . identifier[common] keyword[import] identifier[is_sparse]
identifier[dtype] = identifier[_interleaved_dtype] ( identifier[self] . identifier[blocks] )
keyword[if] identifier[is_sparse] ( identifier[dtype] ):
identifier[dtype] = identifier[dtype] . identifier[subtype]
keyword[elif] identifier[is_extension_array_dtype] ( identifier[dtype] ):
identifier[dtype] = literal[string]
identifier[result] = identifier[np] . identifier[empty] ( identifier[self] . identifier[shape] , identifier[dtype] = identifier[dtype] )
identifier[itemmask] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[shape] [ literal[int] ])
keyword[for] identifier[blk] keyword[in] identifier[self] . identifier[blocks] :
identifier[rl] = identifier[blk] . identifier[mgr_locs]
identifier[result] [ identifier[rl] . identifier[indexer] ]= identifier[blk] . identifier[get_values] ( identifier[dtype] )
identifier[itemmask] [ identifier[rl] . identifier[indexer] ]= literal[int]
keyword[if] keyword[not] identifier[itemmask] . identifier[all] ():
keyword[raise] identifier[AssertionError] ( literal[string] )
keyword[return] identifier[result] | def _interleave(self):
"""
Return ndarray from blocks with specified item order
Items must be contained in the blocks
"""
from pandas.core.dtypes.common import is_sparse
dtype = _interleaved_dtype(self.blocks)
# TODO: https://github.com/pandas-dev/pandas/issues/22791
# Give EAs some input on what happens here. Sparse needs this.
if is_sparse(dtype):
dtype = dtype.subtype # depends on [control=['if'], data=[]]
elif is_extension_array_dtype(dtype):
dtype = 'object' # depends on [control=['if'], data=[]]
result = np.empty(self.shape, dtype=dtype)
itemmask = np.zeros(self.shape[0])
for blk in self.blocks:
rl = blk.mgr_locs
result[rl.indexer] = blk.get_values(dtype)
itemmask[rl.indexer] = 1 # depends on [control=['for'], data=['blk']]
if not itemmask.all():
raise AssertionError('Some items were not contained in blocks') # depends on [control=['if'], data=[]]
return result |
def tupleize(self, contents):
"""
Turns the contents of a file into a list of easily-processed
tuples describing the CPP lines in the file.
The first element of each tuple is the line's preprocessor
directive (#if, #include, #define, etc., minus the initial '#').
The remaining elements are specific to the type of directive, as
pulled apart by the regular expression.
"""
global CPP_Expression, Table
contents = line_continuations.sub('', contents)
cpp_tuples = CPP_Expression.findall(contents)
return [(m[0],) + Table[m[0]].match(m[1]).groups() for m in cpp_tuples] | def function[tupleize, parameter[self, contents]]:
constant[
Turns the contents of a file into a list of easily-processed
tuples describing the CPP lines in the file.
The first element of each tuple is the line's preprocessor
directive (#if, #include, #define, etc., minus the initial '#').
The remaining elements are specific to the type of directive, as
pulled apart by the regular expression.
]
<ast.Global object at 0x7da18f00c6a0>
variable[contents] assign[=] call[name[line_continuations].sub, parameter[constant[], name[contents]]]
variable[cpp_tuples] assign[=] call[name[CPP_Expression].findall, parameter[name[contents]]]
return[<ast.ListComp object at 0x7da18f00d810>] | keyword[def] identifier[tupleize] ( identifier[self] , identifier[contents] ):
literal[string]
keyword[global] identifier[CPP_Expression] , identifier[Table]
identifier[contents] = identifier[line_continuations] . identifier[sub] ( literal[string] , identifier[contents] )
identifier[cpp_tuples] = identifier[CPP_Expression] . identifier[findall] ( identifier[contents] )
keyword[return] [( identifier[m] [ literal[int] ],)+ identifier[Table] [ identifier[m] [ literal[int] ]]. identifier[match] ( identifier[m] [ literal[int] ]). identifier[groups] () keyword[for] identifier[m] keyword[in] identifier[cpp_tuples] ] | def tupleize(self, contents):
"""
Turns the contents of a file into a list of easily-processed
tuples describing the CPP lines in the file.
The first element of each tuple is the line's preprocessor
directive (#if, #include, #define, etc., minus the initial '#').
The remaining elements are specific to the type of directive, as
pulled apart by the regular expression.
"""
global CPP_Expression, Table
contents = line_continuations.sub('', contents)
cpp_tuples = CPP_Expression.findall(contents)
return [(m[0],) + Table[m[0]].match(m[1]).groups() for m in cpp_tuples] |
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""
Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (skipkeys is False and ensure_ascii is True and
check_circular is True and allow_nan is True and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj) | def function[dumps, parameter[obj, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, encoding, default]]:
constant[
Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
]
if <ast.BoolOp object at 0x7da1b004dff0> begin[:]
return[call[name[_default_encoder].encode, parameter[name[obj]]]]
if compare[name[cls] is constant[None]] begin[:]
variable[cls] assign[=] name[JSONEncoder]
return[call[call[name[cls], parameter[]].encode, parameter[name[obj]]]] | keyword[def] identifier[dumps] ( identifier[obj] , identifier[skipkeys] = keyword[False] , identifier[ensure_ascii] = keyword[True] , identifier[check_circular] = keyword[True] ,
identifier[allow_nan] = keyword[True] , identifier[cls] = keyword[None] , identifier[indent] = keyword[None] , identifier[separators] = keyword[None] ,
identifier[encoding] = literal[string] , identifier[default] = keyword[None] ,** identifier[kw] ):
literal[string]
keyword[if] ( identifier[skipkeys] keyword[is] keyword[False] keyword[and] identifier[ensure_ascii] keyword[is] keyword[True] keyword[and]
identifier[check_circular] keyword[is] keyword[True] keyword[and] identifier[allow_nan] keyword[is] keyword[True] keyword[and]
identifier[cls] keyword[is] keyword[None] keyword[and] identifier[indent] keyword[is] keyword[None] keyword[and] identifier[separators] keyword[is] keyword[None] keyword[and]
identifier[encoding] == literal[string] keyword[and] identifier[default] keyword[is] keyword[None] keyword[and] keyword[not] identifier[kw] ):
keyword[return] identifier[_default_encoder] . identifier[encode] ( identifier[obj] )
keyword[if] identifier[cls] keyword[is] keyword[None] :
identifier[cls] = identifier[JSONEncoder]
keyword[return] identifier[cls] (
identifier[skipkeys] = identifier[skipkeys] , identifier[ensure_ascii] = identifier[ensure_ascii] ,
identifier[check_circular] = identifier[check_circular] , identifier[allow_nan] = identifier[allow_nan] , identifier[indent] = identifier[indent] ,
identifier[separators] = identifier[separators] , identifier[encoding] = identifier[encoding] , identifier[default] = identifier[default] ,
** identifier[kw] ). identifier[encode] ( identifier[obj] ) | def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, **kw):
"""
Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is ``False``, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is ``False``, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if skipkeys is False and ensure_ascii is True and (check_circular is True) and (allow_nan is True) and (cls is None) and (indent is None) and (separators is None) and (encoding == 'utf-8') and (default is None) and (not kw):
return _default_encoder.encode(obj) # depends on [control=['if'], data=[]]
if cls is None:
cls = JSONEncoder # depends on [control=['if'], data=['cls']]
return cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, indent=indent, separators=separators, encoding=encoding, default=default, **kw).encode(obj) |
def to_dict(self, flat=True):
"""
Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the last value for each key.
:return: a :class:`dict`
"""
if flat:
return dict(self.items())
return dict(self.lists()) | def function[to_dict, parameter[self, flat]]:
constant[
Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the last value for each key.
:return: a :class:`dict`
]
if name[flat] begin[:]
return[call[name[dict], parameter[call[name[self].items, parameter[]]]]]
return[call[name[dict], parameter[call[name[self].lists, parameter[]]]]] | keyword[def] identifier[to_dict] ( identifier[self] , identifier[flat] = keyword[True] ):
literal[string]
keyword[if] identifier[flat] :
keyword[return] identifier[dict] ( identifier[self] . identifier[items] ())
keyword[return] identifier[dict] ( identifier[self] . identifier[lists] ()) | def to_dict(self, flat=True):
"""
Return the contents as regular dict. If `flat` is `True` the
returned dict will only have the first item present, if `flat` is
`False` all values will be returned as lists.
:param flat: If set to `False` the dict returned will have lists
with all the values in it. Otherwise it will only
contain the last value for each key.
:return: a :class:`dict`
"""
if flat:
return dict(self.items()) # depends on [control=['if'], data=[]]
return dict(self.lists()) |
def _analyzeDontMeasure(self, chunkSize, willMeasureLater, *sinks):
""" Figure out the best diffs to use to reach all our required volumes. """
nodes = [None]
height = 1
def sortKey(node):
if node is None:
return None
return (node.intermediate, self._totalSize(node))
while len(nodes) > 0:
logger.debug("Analyzing %d nodes for height %d...", len(nodes), height)
nodes.sort(key=sortKey)
for fromNode in nodes:
if self._height(fromNode) >= height:
continue
if fromNode is not None and fromNode.diffSize is None:
continue
fromVol = fromNode.volume if fromNode else None
logger.debug("Following edges from %s", fromVol)
for sink in sinks:
# logger.debug(
# "Listing edges in %s",
# sink
# )
for edge in sink.getEdges(fromVol):
toVol = edge.toVol
# logger.debug("Edge: %s", edge)
# Skip any edges already in the destination
if sink != self.dest and self.dest.hasEdge(edge):
continue
if toVol in self.nodes:
toNode = self.nodes[toVol]
# Don't transfer any edges we won't need in the destination
# elif sink != self.dest:
# logger.debug("Won't transfer unnecessary %s", edge)
# continue
else:
toNode = _Node(toVol, True)
self.nodes[toVol] = toNode
logger.debug("Considering %s", edge)
edgeSize = edge.size
if edge.sizeIsEstimated:
if willMeasureLater:
# Slight preference for accurate sizes
edgeSize *= 1.2
else:
# Large preference for accurate sizes
edgeSize *= 2
newCost = self._cost(sink, edgeSize, fromNode, height)
if toNode.diff is None:
oldCost = None
else:
oldCost = self._cost(
toNode.sink,
toNode.diffSize,
self._getNode(toNode.previous),
self._height(toNode)
)
# Don't use a more-expensive path
if oldCost is not None and oldCost <= newCost:
continue
# Don't create circular paths
if self._wouldLoop(fromVol, toVol):
# logger.debug("Ignoring looping edge: %s", toVol.display(sink))
continue
# if measureSize and sink != self.dest and edge.sizeIsEstimated:
# sink.measureSize(edge, chunkSize)
# newCost = self._cost(sink, edge.size, fromSize, height)
# if oldCost is not None and oldCost <= newCost:
# continue
logger.debug(
"Replacing edge (%s -> %s cost)\n%s",
humanize(oldCost),
humanize(newCost),
toNode.display(sink)
)
# logger.debug("Cost elements: %s", dict(
# sink=str(sink),
# edgeSize=humanize(edgeSize),
# fromSize=humanize(fromSize),
# height=height,
# ))
toNode.diff = edge
nodes = [node for node in self.nodes.values() if self._height(node) == height]
height += 1
self._prune()
for node in self.nodes.values():
node.height = self._height(node)
if node.diff is None:
logger.error(
"No source diffs for %s",
node.volume.display(sinks[-1], detail="line"),
) | def function[_analyzeDontMeasure, parameter[self, chunkSize, willMeasureLater]]:
constant[ Figure out the best diffs to use to reach all our required volumes. ]
variable[nodes] assign[=] list[[<ast.Constant object at 0x7da1b27ca680>]]
variable[height] assign[=] constant[1]
def function[sortKey, parameter[node]]:
if compare[name[node] is constant[None]] begin[:]
return[constant[None]]
return[tuple[[<ast.Attribute object at 0x7da1b27cb550>, <ast.Call object at 0x7da1b27cb8e0>]]]
while compare[call[name[len], parameter[name[nodes]]] greater[>] constant[0]] begin[:]
call[name[logger].debug, parameter[constant[Analyzing %d nodes for height %d...], call[name[len], parameter[name[nodes]]], name[height]]]
call[name[nodes].sort, parameter[]]
for taget[name[fromNode]] in starred[name[nodes]] begin[:]
if compare[call[name[self]._height, parameter[name[fromNode]]] greater_or_equal[>=] name[height]] begin[:]
continue
if <ast.BoolOp object at 0x7da1b27cb490> begin[:]
continue
variable[fromVol] assign[=] <ast.IfExp object at 0x7da1b27cb3a0>
call[name[logger].debug, parameter[constant[Following edges from %s], name[fromVol]]]
for taget[name[sink]] in starred[name[sinks]] begin[:]
for taget[name[edge]] in starred[call[name[sink].getEdges, parameter[name[fromVol]]]] begin[:]
variable[toVol] assign[=] name[edge].toVol
if <ast.BoolOp object at 0x7da20e9b3a90> begin[:]
continue
if compare[name[toVol] in name[self].nodes] begin[:]
variable[toNode] assign[=] call[name[self].nodes][name[toVol]]
call[name[logger].debug, parameter[constant[Considering %s], name[edge]]]
variable[edgeSize] assign[=] name[edge].size
if name[edge].sizeIsEstimated begin[:]
if name[willMeasureLater] begin[:]
<ast.AugAssign object at 0x7da1b2724700>
variable[newCost] assign[=] call[name[self]._cost, parameter[name[sink], name[edgeSize], name[fromNode], name[height]]]
if compare[name[toNode].diff is constant[None]] begin[:]
variable[oldCost] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b2724910> begin[:]
continue
if call[name[self]._wouldLoop, parameter[name[fromVol], name[toVol]]] begin[:]
continue
call[name[logger].debug, parameter[constant[Replacing edge (%s -> %s cost)
%s], call[name[humanize], parameter[name[oldCost]]], call[name[humanize], parameter[name[newCost]]], call[name[toNode].display, parameter[name[sink]]]]]
name[toNode].diff assign[=] name[edge]
variable[nodes] assign[=] <ast.ListComp object at 0x7da1b27252d0>
<ast.AugAssign object at 0x7da1b2725930>
call[name[self]._prune, parameter[]]
for taget[name[node]] in starred[call[name[self].nodes.values, parameter[]]] begin[:]
name[node].height assign[=] call[name[self]._height, parameter[name[node]]]
if compare[name[node].diff is constant[None]] begin[:]
call[name[logger].error, parameter[constant[No source diffs for %s], call[name[node].volume.display, parameter[call[name[sinks]][<ast.UnaryOp object at 0x7da1b2726ce0>]]]]] | keyword[def] identifier[_analyzeDontMeasure] ( identifier[self] , identifier[chunkSize] , identifier[willMeasureLater] ,* identifier[sinks] ):
literal[string]
identifier[nodes] =[ keyword[None] ]
identifier[height] = literal[int]
keyword[def] identifier[sortKey] ( identifier[node] ):
keyword[if] identifier[node] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] ( identifier[node] . identifier[intermediate] , identifier[self] . identifier[_totalSize] ( identifier[node] ))
keyword[while] identifier[len] ( identifier[nodes] )> literal[int] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[len] ( identifier[nodes] ), identifier[height] )
identifier[nodes] . identifier[sort] ( identifier[key] = identifier[sortKey] )
keyword[for] identifier[fromNode] keyword[in] identifier[nodes] :
keyword[if] identifier[self] . identifier[_height] ( identifier[fromNode] )>= identifier[height] :
keyword[continue]
keyword[if] identifier[fromNode] keyword[is] keyword[not] keyword[None] keyword[and] identifier[fromNode] . identifier[diffSize] keyword[is] keyword[None] :
keyword[continue]
identifier[fromVol] = identifier[fromNode] . identifier[volume] keyword[if] identifier[fromNode] keyword[else] keyword[None]
identifier[logger] . identifier[debug] ( literal[string] , identifier[fromVol] )
keyword[for] identifier[sink] keyword[in] identifier[sinks] :
keyword[for] identifier[edge] keyword[in] identifier[sink] . identifier[getEdges] ( identifier[fromVol] ):
identifier[toVol] = identifier[edge] . identifier[toVol]
keyword[if] identifier[sink] != identifier[self] . identifier[dest] keyword[and] identifier[self] . identifier[dest] . identifier[hasEdge] ( identifier[edge] ):
keyword[continue]
keyword[if] identifier[toVol] keyword[in] identifier[self] . identifier[nodes] :
identifier[toNode] = identifier[self] . identifier[nodes] [ identifier[toVol] ]
keyword[else] :
identifier[toNode] = identifier[_Node] ( identifier[toVol] , keyword[True] )
identifier[self] . identifier[nodes] [ identifier[toVol] ]= identifier[toNode]
identifier[logger] . identifier[debug] ( literal[string] , identifier[edge] )
identifier[edgeSize] = identifier[edge] . identifier[size]
keyword[if] identifier[edge] . identifier[sizeIsEstimated] :
keyword[if] identifier[willMeasureLater] :
identifier[edgeSize] *= literal[int]
keyword[else] :
identifier[edgeSize] *= literal[int]
identifier[newCost] = identifier[self] . identifier[_cost] ( identifier[sink] , identifier[edgeSize] , identifier[fromNode] , identifier[height] )
keyword[if] identifier[toNode] . identifier[diff] keyword[is] keyword[None] :
identifier[oldCost] = keyword[None]
keyword[else] :
identifier[oldCost] = identifier[self] . identifier[_cost] (
identifier[toNode] . identifier[sink] ,
identifier[toNode] . identifier[diffSize] ,
identifier[self] . identifier[_getNode] ( identifier[toNode] . identifier[previous] ),
identifier[self] . identifier[_height] ( identifier[toNode] )
)
keyword[if] identifier[oldCost] keyword[is] keyword[not] keyword[None] keyword[and] identifier[oldCost] <= identifier[newCost] :
keyword[continue]
keyword[if] identifier[self] . identifier[_wouldLoop] ( identifier[fromVol] , identifier[toVol] ):
keyword[continue]
identifier[logger] . identifier[debug] (
literal[string] ,
identifier[humanize] ( identifier[oldCost] ),
identifier[humanize] ( identifier[newCost] ),
identifier[toNode] . identifier[display] ( identifier[sink] )
)
identifier[toNode] . identifier[diff] = identifier[edge]
identifier[nodes] =[ identifier[node] keyword[for] identifier[node] keyword[in] identifier[self] . identifier[nodes] . identifier[values] () keyword[if] identifier[self] . identifier[_height] ( identifier[node] )== identifier[height] ]
identifier[height] += literal[int]
identifier[self] . identifier[_prune] ()
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[nodes] . identifier[values] ():
identifier[node] . identifier[height] = identifier[self] . identifier[_height] ( identifier[node] )
keyword[if] identifier[node] . identifier[diff] keyword[is] keyword[None] :
identifier[logger] . identifier[error] (
literal[string] ,
identifier[node] . identifier[volume] . identifier[display] ( identifier[sinks] [- literal[int] ], identifier[detail] = literal[string] ),
) | def _analyzeDontMeasure(self, chunkSize, willMeasureLater, *sinks):
""" Figure out the best diffs to use to reach all our required volumes. """
nodes = [None]
height = 1
def sortKey(node):
if node is None:
return None # depends on [control=['if'], data=[]]
return (node.intermediate, self._totalSize(node))
while len(nodes) > 0:
logger.debug('Analyzing %d nodes for height %d...', len(nodes), height)
nodes.sort(key=sortKey)
for fromNode in nodes:
if self._height(fromNode) >= height:
continue # depends on [control=['if'], data=[]]
if fromNode is not None and fromNode.diffSize is None:
continue # depends on [control=['if'], data=[]]
fromVol = fromNode.volume if fromNode else None
logger.debug('Following edges from %s', fromVol)
for sink in sinks:
# logger.debug(
# "Listing edges in %s",
# sink
# )
for edge in sink.getEdges(fromVol):
toVol = edge.toVol
# logger.debug("Edge: %s", edge)
# Skip any edges already in the destination
if sink != self.dest and self.dest.hasEdge(edge):
continue # depends on [control=['if'], data=[]]
if toVol in self.nodes:
toNode = self.nodes[toVol] # depends on [control=['if'], data=['toVol']]
else:
# Don't transfer any edges we won't need in the destination
# elif sink != self.dest:
# logger.debug("Won't transfer unnecessary %s", edge)
# continue
toNode = _Node(toVol, True)
self.nodes[toVol] = toNode
logger.debug('Considering %s', edge)
edgeSize = edge.size
if edge.sizeIsEstimated:
if willMeasureLater:
# Slight preference for accurate sizes
edgeSize *= 1.2 # depends on [control=['if'], data=[]]
else:
# Large preference for accurate sizes
edgeSize *= 2 # depends on [control=['if'], data=[]]
newCost = self._cost(sink, edgeSize, fromNode, height)
if toNode.diff is None:
oldCost = None # depends on [control=['if'], data=[]]
else:
oldCost = self._cost(toNode.sink, toNode.diffSize, self._getNode(toNode.previous), self._height(toNode))
# Don't use a more-expensive path
if oldCost is not None and oldCost <= newCost:
continue # depends on [control=['if'], data=[]]
# Don't create circular paths
if self._wouldLoop(fromVol, toVol):
# logger.debug("Ignoring looping edge: %s", toVol.display(sink))
continue # depends on [control=['if'], data=[]]
# if measureSize and sink != self.dest and edge.sizeIsEstimated:
# sink.measureSize(edge, chunkSize)
# newCost = self._cost(sink, edge.size, fromSize, height)
# if oldCost is not None and oldCost <= newCost:
# continue
logger.debug('Replacing edge (%s -> %s cost)\n%s', humanize(oldCost), humanize(newCost), toNode.display(sink))
# logger.debug("Cost elements: %s", dict(
# sink=str(sink),
# edgeSize=humanize(edgeSize),
# fromSize=humanize(fromSize),
# height=height,
# ))
toNode.diff = edge # depends on [control=['for'], data=['edge']] # depends on [control=['for'], data=['sink']] # depends on [control=['for'], data=['fromNode']]
nodes = [node for node in self.nodes.values() if self._height(node) == height]
height += 1 # depends on [control=['while'], data=[]]
self._prune()
for node in self.nodes.values():
node.height = self._height(node)
if node.diff is None:
logger.error('No source diffs for %s', node.volume.display(sinks[-1], detail='line')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] |
def filter(self, table, cg_snapshots, filter_string):
"""Naive case-insensitive search."""
query = filter_string.lower()
return [cg_snapshot for cg_snapshot in cg_snapshots
if query in cg_snapshot.name.lower()] | def function[filter, parameter[self, table, cg_snapshots, filter_string]]:
constant[Naive case-insensitive search.]
variable[query] assign[=] call[name[filter_string].lower, parameter[]]
return[<ast.ListComp object at 0x7da18ede6e60>] | keyword[def] identifier[filter] ( identifier[self] , identifier[table] , identifier[cg_snapshots] , identifier[filter_string] ):
literal[string]
identifier[query] = identifier[filter_string] . identifier[lower] ()
keyword[return] [ identifier[cg_snapshot] keyword[for] identifier[cg_snapshot] keyword[in] identifier[cg_snapshots]
keyword[if] identifier[query] keyword[in] identifier[cg_snapshot] . identifier[name] . identifier[lower] ()] | def filter(self, table, cg_snapshots, filter_string):
"""Naive case-insensitive search."""
query = filter_string.lower()
return [cg_snapshot for cg_snapshot in cg_snapshots if query in cg_snapshot.name.lower()] |
def remove_all_custom_funcs(self):
""" Remove all instances of CustomFeature from the active feature list.
"""
custom_feats = [f for f in self.active_features if isinstance(f, CustomFeature)]
for f in custom_feats:
self.active_features.remove(f) | def function[remove_all_custom_funcs, parameter[self]]:
constant[ Remove all instances of CustomFeature from the active feature list.
]
variable[custom_feats] assign[=] <ast.ListComp object at 0x7da18f723be0>
for taget[name[f]] in starred[name[custom_feats]] begin[:]
call[name[self].active_features.remove, parameter[name[f]]] | keyword[def] identifier[remove_all_custom_funcs] ( identifier[self] ):
literal[string]
identifier[custom_feats] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[active_features] keyword[if] identifier[isinstance] ( identifier[f] , identifier[CustomFeature] )]
keyword[for] identifier[f] keyword[in] identifier[custom_feats] :
identifier[self] . identifier[active_features] . identifier[remove] ( identifier[f] ) | def remove_all_custom_funcs(self):
""" Remove all instances of CustomFeature from the active feature list.
"""
custom_feats = [f for f in self.active_features if isinstance(f, CustomFeature)]
for f in custom_feats:
self.active_features.remove(f) # depends on [control=['for'], data=['f']] |
def _getBatchDirectory(self, projectRootDirectory):
"""
Check the project file for the REPLACE_FOLDER card. If it exists, append it's value to create the batch directory path.
This is the directory output is written to when run in batch mode.
"""
# Set output directory to main directory as default
batchDirectory = projectRootDirectory
# Get the replace folder card
replaceFolderCard = self.getCard('REPLACE_FOLDER')
if replaceFolderCard:
replaceDir = replaceFolderCard.value.strip('"')
batchDirectory = os.path.join(batchDirectory, replaceDir)
# Create directory if it doesn't exist
if not os.path.isdir(batchDirectory):
os.mkdir(batchDirectory)
log.info('Creating directory for batch output: {0}'.format(batchDirectory))
return batchDirectory | def function[_getBatchDirectory, parameter[self, projectRootDirectory]]:
constant[
Check the project file for the REPLACE_FOLDER card. If it exists, append it's value to create the batch directory path.
This is the directory output is written to when run in batch mode.
]
variable[batchDirectory] assign[=] name[projectRootDirectory]
variable[replaceFolderCard] assign[=] call[name[self].getCard, parameter[constant[REPLACE_FOLDER]]]
if name[replaceFolderCard] begin[:]
variable[replaceDir] assign[=] call[name[replaceFolderCard].value.strip, parameter[constant["]]]
variable[batchDirectory] assign[=] call[name[os].path.join, parameter[name[batchDirectory], name[replaceDir]]]
if <ast.UnaryOp object at 0x7da18f09ece0> begin[:]
call[name[os].mkdir, parameter[name[batchDirectory]]]
call[name[log].info, parameter[call[constant[Creating directory for batch output: {0}].format, parameter[name[batchDirectory]]]]]
return[name[batchDirectory]] | keyword[def] identifier[_getBatchDirectory] ( identifier[self] , identifier[projectRootDirectory] ):
literal[string]
identifier[batchDirectory] = identifier[projectRootDirectory]
identifier[replaceFolderCard] = identifier[self] . identifier[getCard] ( literal[string] )
keyword[if] identifier[replaceFolderCard] :
identifier[replaceDir] = identifier[replaceFolderCard] . identifier[value] . identifier[strip] ( literal[string] )
identifier[batchDirectory] = identifier[os] . identifier[path] . identifier[join] ( identifier[batchDirectory] , identifier[replaceDir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[batchDirectory] ):
identifier[os] . identifier[mkdir] ( identifier[batchDirectory] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[batchDirectory] ))
keyword[return] identifier[batchDirectory] | def _getBatchDirectory(self, projectRootDirectory):
"""
Check the project file for the REPLACE_FOLDER card. If it exists, append it's value to create the batch directory path.
This is the directory output is written to when run in batch mode.
"""
# Set output directory to main directory as default
batchDirectory = projectRootDirectory
# Get the replace folder card
replaceFolderCard = self.getCard('REPLACE_FOLDER')
if replaceFolderCard:
replaceDir = replaceFolderCard.value.strip('"')
batchDirectory = os.path.join(batchDirectory, replaceDir) # depends on [control=['if'], data=[]]
# Create directory if it doesn't exist
if not os.path.isdir(batchDirectory):
os.mkdir(batchDirectory)
log.info('Creating directory for batch output: {0}'.format(batchDirectory)) # depends on [control=['if'], data=[]]
return batchDirectory |
def check_for_system_time_change(self): # pragma: no cover, hardly testable with unit tests...
"""Check if our system time change. If so, change our
:return: 0 if the difference < 900, difference else
:rtype: int
"""
now = time.time()
difference = now - self.t_each_loop
# Now set the new value for the tick loop
self.t_each_loop = now
# If we have more than 15 min time change, we need to compensate it
# todo: confirm that 15 minutes is a good choice...
if abs(difference) > 900: # pragma: no cover, not with unit tests...
return difference
return 0 | def function[check_for_system_time_change, parameter[self]]:
constant[Check if our system time change. If so, change our
:return: 0 if the difference < 900, difference else
:rtype: int
]
variable[now] assign[=] call[name[time].time, parameter[]]
variable[difference] assign[=] binary_operation[name[now] - name[self].t_each_loop]
name[self].t_each_loop assign[=] name[now]
if compare[call[name[abs], parameter[name[difference]]] greater[>] constant[900]] begin[:]
return[name[difference]]
return[constant[0]] | keyword[def] identifier[check_for_system_time_change] ( identifier[self] ):
literal[string]
identifier[now] = identifier[time] . identifier[time] ()
identifier[difference] = identifier[now] - identifier[self] . identifier[t_each_loop]
identifier[self] . identifier[t_each_loop] = identifier[now]
keyword[if] identifier[abs] ( identifier[difference] )> literal[int] :
keyword[return] identifier[difference]
keyword[return] literal[int] | def check_for_system_time_change(self): # pragma: no cover, hardly testable with unit tests...
'Check if our system time change. If so, change our\n\n :return: 0 if the difference < 900, difference else\n :rtype: int\n '
now = time.time()
difference = now - self.t_each_loop
# Now set the new value for the tick loop
self.t_each_loop = now
# If we have more than 15 min time change, we need to compensate it
# todo: confirm that 15 minutes is a good choice...
if abs(difference) > 900: # pragma: no cover, not with unit tests...
return difference # depends on [control=['if'], data=[]]
return 0 |
def description_from_content(self):
"""
Returns the first block or sentence of the first content-like
field.
"""
description = ""
# Use the first RichTextField, or TextField if none found.
for field_type in (RichTextField, models.TextField):
if not description:
for field in self._meta.fields:
if (isinstance(field, field_type) and
field.name != "description"):
description = getattr(self, field.name)
if description:
from yacms.core.templatetags.yacms_tags \
import richtext_filters
description = richtext_filters(description)
break
# Fall back to the title if description couldn't be determined.
if not description:
description = str(self)
# Strip everything after the first block or sentence.
ends = ("</p>", "<br />", "<br/>", "<br>", "</ul>",
"\n", ". ", "! ", "? ")
for end in ends:
pos = description.lower().find(end)
if pos > -1:
description = TagCloser(description[:pos]).html
break
else:
description = truncatewords_html(description, 100)
try:
description = unicode(description)
except NameError:
pass # Python 3.
return description | def function[description_from_content, parameter[self]]:
constant[
Returns the first block or sentence of the first content-like
field.
]
variable[description] assign[=] constant[]
for taget[name[field_type]] in starred[tuple[[<ast.Name object at 0x7da1b15f1540>, <ast.Attribute object at 0x7da1b15f12a0>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b15f08e0> begin[:]
for taget[name[field]] in starred[name[self]._meta.fields] begin[:]
if <ast.BoolOp object at 0x7da1b15f1ba0> begin[:]
variable[description] assign[=] call[name[getattr], parameter[name[self], name[field].name]]
if name[description] begin[:]
from relative_module[yacms.core.templatetags.yacms_tags] import module[richtext_filters]
variable[description] assign[=] call[name[richtext_filters], parameter[name[description]]]
break
if <ast.UnaryOp object at 0x7da1b15f2fb0> begin[:]
variable[description] assign[=] call[name[str], parameter[name[self]]]
variable[ends] assign[=] tuple[[<ast.Constant object at 0x7da1b15f02e0>, <ast.Constant object at 0x7da1b15f0d30>, <ast.Constant object at 0x7da1b15f0df0>, <ast.Constant object at 0x7da1b15f2e30>, <ast.Constant object at 0x7da1b15f0e80>, <ast.Constant object at 0x7da1b15f2b60>, <ast.Constant object at 0x7da1b15f2bc0>, <ast.Constant object at 0x7da1b15f23b0>, <ast.Constant object at 0x7da1b15f2a10>]]
for taget[name[end]] in starred[name[ends]] begin[:]
variable[pos] assign[=] call[call[name[description].lower, parameter[]].find, parameter[name[end]]]
if compare[name[pos] greater[>] <ast.UnaryOp object at 0x7da1b15f32e0>] begin[:]
variable[description] assign[=] call[name[TagCloser], parameter[call[name[description]][<ast.Slice object at 0x7da1b15f18d0>]]].html
break
<ast.Try object at 0x7da1b15f2620>
return[name[description]] | keyword[def] identifier[description_from_content] ( identifier[self] ):
literal[string]
identifier[description] = literal[string]
keyword[for] identifier[field_type] keyword[in] ( identifier[RichTextField] , identifier[models] . identifier[TextField] ):
keyword[if] keyword[not] identifier[description] :
keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_meta] . identifier[fields] :
keyword[if] ( identifier[isinstance] ( identifier[field] , identifier[field_type] ) keyword[and]
identifier[field] . identifier[name] != literal[string] ):
identifier[description] = identifier[getattr] ( identifier[self] , identifier[field] . identifier[name] )
keyword[if] identifier[description] :
keyword[from] identifier[yacms] . identifier[core] . identifier[templatetags] . identifier[yacms_tags] keyword[import] identifier[richtext_filters]
identifier[description] = identifier[richtext_filters] ( identifier[description] )
keyword[break]
keyword[if] keyword[not] identifier[description] :
identifier[description] = identifier[str] ( identifier[self] )
identifier[ends] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] )
keyword[for] identifier[end] keyword[in] identifier[ends] :
identifier[pos] = identifier[description] . identifier[lower] (). identifier[find] ( identifier[end] )
keyword[if] identifier[pos] >- literal[int] :
identifier[description] = identifier[TagCloser] ( identifier[description] [: identifier[pos] ]). identifier[html]
keyword[break]
keyword[else] :
identifier[description] = identifier[truncatewords_html] ( identifier[description] , literal[int] )
keyword[try] :
identifier[description] = identifier[unicode] ( identifier[description] )
keyword[except] identifier[NameError] :
keyword[pass]
keyword[return] identifier[description] | def description_from_content(self):
"""
Returns the first block or sentence of the first content-like
field.
"""
description = ''
# Use the first RichTextField, or TextField if none found.
for field_type in (RichTextField, models.TextField):
if not description:
for field in self._meta.fields:
if isinstance(field, field_type) and field.name != 'description':
description = getattr(self, field.name)
if description:
from yacms.core.templatetags.yacms_tags import richtext_filters
description = richtext_filters(description)
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field_type']]
# Fall back to the title if description couldn't be determined.
if not description:
description = str(self) # depends on [control=['if'], data=[]]
# Strip everything after the first block or sentence.
ends = ('</p>', '<br />', '<br/>', '<br>', '</ul>', '\n', '. ', '! ', '? ')
for end in ends:
pos = description.lower().find(end)
if pos > -1:
description = TagCloser(description[:pos]).html
break # depends on [control=['if'], data=['pos']] # depends on [control=['for'], data=['end']]
else:
description = truncatewords_html(description, 100)
try:
description = unicode(description) # depends on [control=['try'], data=[]]
except NameError:
pass # Python 3. # depends on [control=['except'], data=[]]
return description |
def from_tibiadata(cls, content):
"""Parses a TibiaData.com response into a :class:`World`
Parameters
----------
content: :class:`str`
The raw JSON content from TibiaData
Returns
-------
:class:`World`
The World described in the page, or ``None``.
Raises
------
InvalidContent
If the provided content is not a TibiaData world response.
"""
json_data = parse_json(content)
try:
world_data = json_data["world"]
world_info = world_data["world_information"]
world = cls(world_info["name"])
if "location" not in world_info:
return None
world.online_count = world_info["players_online"]
world.status = "Online" if world.online_count > 0 else "Offline"
world.record_count = world_info["online_record"]["players"]
world.record_date = parse_tibiadata_datetime(world_info["online_record"]["date"])
world.creation_date = world_info["creation_date"]
world.location = try_enum(WorldLocation, world_info["location"])
world.pvp_type = try_enum(PvpType, world_info["pvp_type"])
world.transfer_type = try_enum(TransferType, world_info.get("transfer_type"), TransferType.REGULAR)
world.premium_only = "premium_type" in world_info
world.world_quest_titles = world_info.get("world_quest_titles", [])
world._parse_battleye_status(world_info.get("battleye_status", ""))
world.experimental = world_info.get("Game World Type:", "Regular") != "Regular"
for player in world_data.get("players_online", []):
world.online_players.append(OnlineCharacter(player["name"], world.name, player["level"],
player["vocation"]))
return world
except KeyError:
raise InvalidContent("content is not a world json response from TibiaData") | def function[from_tibiadata, parameter[cls, content]]:
constant[Parses a TibiaData.com response into a :class:`World`
Parameters
----------
content: :class:`str`
The raw JSON content from TibiaData
Returns
-------
:class:`World`
The World described in the page, or ``None``.
Raises
------
InvalidContent
If the provided content is not a TibiaData world response.
]
variable[json_data] assign[=] call[name[parse_json], parameter[name[content]]]
<ast.Try object at 0x7da1b0bd3310> | keyword[def] identifier[from_tibiadata] ( identifier[cls] , identifier[content] ):
literal[string]
identifier[json_data] = identifier[parse_json] ( identifier[content] )
keyword[try] :
identifier[world_data] = identifier[json_data] [ literal[string] ]
identifier[world_info] = identifier[world_data] [ literal[string] ]
identifier[world] = identifier[cls] ( identifier[world_info] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[world_info] :
keyword[return] keyword[None]
identifier[world] . identifier[online_count] = identifier[world_info] [ literal[string] ]
identifier[world] . identifier[status] = literal[string] keyword[if] identifier[world] . identifier[online_count] > literal[int] keyword[else] literal[string]
identifier[world] . identifier[record_count] = identifier[world_info] [ literal[string] ][ literal[string] ]
identifier[world] . identifier[record_date] = identifier[parse_tibiadata_datetime] ( identifier[world_info] [ literal[string] ][ literal[string] ])
identifier[world] . identifier[creation_date] = identifier[world_info] [ literal[string] ]
identifier[world] . identifier[location] = identifier[try_enum] ( identifier[WorldLocation] , identifier[world_info] [ literal[string] ])
identifier[world] . identifier[pvp_type] = identifier[try_enum] ( identifier[PvpType] , identifier[world_info] [ literal[string] ])
identifier[world] . identifier[transfer_type] = identifier[try_enum] ( identifier[TransferType] , identifier[world_info] . identifier[get] ( literal[string] ), identifier[TransferType] . identifier[REGULAR] )
identifier[world] . identifier[premium_only] = literal[string] keyword[in] identifier[world_info]
identifier[world] . identifier[world_quest_titles] = identifier[world_info] . identifier[get] ( literal[string] ,[])
identifier[world] . identifier[_parse_battleye_status] ( identifier[world_info] . identifier[get] ( literal[string] , literal[string] ))
identifier[world] . identifier[experimental] = identifier[world_info] . identifier[get] ( literal[string] , literal[string] )!= literal[string]
keyword[for] identifier[player] keyword[in] identifier[world_data] . identifier[get] ( literal[string] ,[]):
identifier[world] . identifier[online_players] . identifier[append] ( identifier[OnlineCharacter] ( identifier[player] [ literal[string] ], identifier[world] . identifier[name] , identifier[player] [ literal[string] ],
identifier[player] [ literal[string] ]))
keyword[return] identifier[world]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[InvalidContent] ( literal[string] ) | def from_tibiadata(cls, content):
"""Parses a TibiaData.com response into a :class:`World`
Parameters
----------
content: :class:`str`
The raw JSON content from TibiaData
Returns
-------
:class:`World`
The World described in the page, or ``None``.
Raises
------
InvalidContent
If the provided content is not a TibiaData world response.
"""
json_data = parse_json(content)
try:
world_data = json_data['world']
world_info = world_data['world_information']
world = cls(world_info['name'])
if 'location' not in world_info:
return None # depends on [control=['if'], data=[]]
world.online_count = world_info['players_online']
world.status = 'Online' if world.online_count > 0 else 'Offline'
world.record_count = world_info['online_record']['players']
world.record_date = parse_tibiadata_datetime(world_info['online_record']['date'])
world.creation_date = world_info['creation_date']
world.location = try_enum(WorldLocation, world_info['location'])
world.pvp_type = try_enum(PvpType, world_info['pvp_type'])
world.transfer_type = try_enum(TransferType, world_info.get('transfer_type'), TransferType.REGULAR)
world.premium_only = 'premium_type' in world_info
world.world_quest_titles = world_info.get('world_quest_titles', [])
world._parse_battleye_status(world_info.get('battleye_status', ''))
world.experimental = world_info.get('Game World Type:', 'Regular') != 'Regular'
for player in world_data.get('players_online', []):
world.online_players.append(OnlineCharacter(player['name'], world.name, player['level'], player['vocation'])) # depends on [control=['for'], data=['player']]
return world # depends on [control=['try'], data=[]]
except KeyError:
raise InvalidContent('content is not a world json response from TibiaData') # depends on [control=['except'], data=[]] |
def com_google_fonts_check_name_no_copyright_on_description(ttFont):
"""Description strings in the name table must not contain copyright info."""
failed = False
for name in ttFont['name'].names:
if 'opyright' in name.string.decode(name.getEncoding())\
and name.nameID == NameID.DESCRIPTION:
failed = True
if failed:
yield FAIL, ("Namerecords with ID={} (NameID.DESCRIPTION)"
" should be removed (perhaps these were added by"
" a longstanding FontLab Studio 5.x bug that"
" copied copyright notices to them.)"
"").format(NameID.DESCRIPTION)
else:
yield PASS, ("Description strings in the name table"
" do not contain any copyright string.") | def function[com_google_fonts_check_name_no_copyright_on_description, parameter[ttFont]]:
constant[Description strings in the name table must not contain copyright info.]
variable[failed] assign[=] constant[False]
for taget[name[name]] in starred[call[name[ttFont]][constant[name]].names] begin[:]
if <ast.BoolOp object at 0x7da20c7cabf0> begin[:]
variable[failed] assign[=] constant[True]
if name[failed] begin[:]
<ast.Yield object at 0x7da20c7c8f10> | keyword[def] identifier[com_google_fonts_check_name_no_copyright_on_description] ( identifier[ttFont] ):
literal[string]
identifier[failed] = keyword[False]
keyword[for] identifier[name] keyword[in] identifier[ttFont] [ literal[string] ]. identifier[names] :
keyword[if] literal[string] keyword[in] identifier[name] . identifier[string] . identifier[decode] ( identifier[name] . identifier[getEncoding] ()) keyword[and] identifier[name] . identifier[nameID] == identifier[NameID] . identifier[DESCRIPTION] :
identifier[failed] = keyword[True]
keyword[if] identifier[failed] :
keyword[yield] identifier[FAIL] ,( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[NameID] . identifier[DESCRIPTION] )
keyword[else] :
keyword[yield] identifier[PASS] ,( literal[string]
literal[string] ) | def com_google_fonts_check_name_no_copyright_on_description(ttFont):
"""Description strings in the name table must not contain copyright info."""
failed = False
for name in ttFont['name'].names:
if 'opyright' in name.string.decode(name.getEncoding()) and name.nameID == NameID.DESCRIPTION:
failed = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
if failed:
yield (FAIL, 'Namerecords with ID={} (NameID.DESCRIPTION) should be removed (perhaps these were added by a longstanding FontLab Studio 5.x bug that copied copyright notices to them.)'.format(NameID.DESCRIPTION)) # depends on [control=['if'], data=[]]
else:
yield (PASS, 'Description strings in the name table do not contain any copyright string.') |
def step(self, observations):
""" Sample action from an action space for given state """
log_histogram = self(observations)
actions = self.q_head.sample(log_histogram)
return {
'actions': actions,
'log_histogram': log_histogram
} | def function[step, parameter[self, observations]]:
constant[ Sample action from an action space for given state ]
variable[log_histogram] assign[=] call[name[self], parameter[name[observations]]]
variable[actions] assign[=] call[name[self].q_head.sample, parameter[name[log_histogram]]]
return[dictionary[[<ast.Constant object at 0x7da1b17fad70>, <ast.Constant object at 0x7da1b17f8820>], [<ast.Name object at 0x7da1b17fb4f0>, <ast.Name object at 0x7da1b17f8a30>]]] | keyword[def] identifier[step] ( identifier[self] , identifier[observations] ):
literal[string]
identifier[log_histogram] = identifier[self] ( identifier[observations] )
identifier[actions] = identifier[self] . identifier[q_head] . identifier[sample] ( identifier[log_histogram] )
keyword[return] {
literal[string] : identifier[actions] ,
literal[string] : identifier[log_histogram]
} | def step(self, observations):
""" Sample action from an action space for given state """
log_histogram = self(observations)
actions = self.q_head.sample(log_histogram)
return {'actions': actions, 'log_histogram': log_histogram} |
def average_sweep(self,T1=0,T2=None,sweeps=None,stdErr=False):
"""
given an array of sweeps, return X,Y,Err average.
This returns *SWEEPS* of data, not just 1 data point.
"""
T1=T1*self.rate
if T2 is None:
T2 = self.sweepSize-1
else:
T2 = T2*self.rate
if sweeps is None:
sweeps = range(self.sweeps)
Ys=np.empty((len(sweeps),(T2-T1)))
for i in range(len(sweeps)):
self.setSweep(sweeps[i])
Ys[i]=self.dataY[T1:T2]
Av = np.average(Ys,0)
Es = np.std(Ys,0)
Xs = self.dataX[T1:T2]
if stdErr: #otherwise return stdev
Es = Es/np.sqrt(len(sweeps))
return Xs,Av,Es | def function[average_sweep, parameter[self, T1, T2, sweeps, stdErr]]:
constant[
given an array of sweeps, return X,Y,Err average.
This returns *SWEEPS* of data, not just 1 data point.
]
variable[T1] assign[=] binary_operation[name[T1] * name[self].rate]
if compare[name[T2] is constant[None]] begin[:]
variable[T2] assign[=] binary_operation[name[self].sweepSize - constant[1]]
if compare[name[sweeps] is constant[None]] begin[:]
variable[sweeps] assign[=] call[name[range], parameter[name[self].sweeps]]
variable[Ys] assign[=] call[name[np].empty, parameter[tuple[[<ast.Call object at 0x7da1afe0e560>, <ast.BinOp object at 0x7da1afe0f760>]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[sweeps]]]]]] begin[:]
call[name[self].setSweep, parameter[call[name[sweeps]][name[i]]]]
call[name[Ys]][name[i]] assign[=] call[name[self].dataY][<ast.Slice object at 0x7da1afe0e9b0>]
variable[Av] assign[=] call[name[np].average, parameter[name[Ys], constant[0]]]
variable[Es] assign[=] call[name[np].std, parameter[name[Ys], constant[0]]]
variable[Xs] assign[=] call[name[self].dataX][<ast.Slice object at 0x7da1afe0ead0>]
if name[stdErr] begin[:]
variable[Es] assign[=] binary_operation[name[Es] / call[name[np].sqrt, parameter[call[name[len], parameter[name[sweeps]]]]]]
return[tuple[[<ast.Name object at 0x7da1afe0ee30>, <ast.Name object at 0x7da1afe0da50>, <ast.Name object at 0x7da1afe0df00>]]] | keyword[def] identifier[average_sweep] ( identifier[self] , identifier[T1] = literal[int] , identifier[T2] = keyword[None] , identifier[sweeps] = keyword[None] , identifier[stdErr] = keyword[False] ):
literal[string]
identifier[T1] = identifier[T1] * identifier[self] . identifier[rate]
keyword[if] identifier[T2] keyword[is] keyword[None] :
identifier[T2] = identifier[self] . identifier[sweepSize] - literal[int]
keyword[else] :
identifier[T2] = identifier[T2] * identifier[self] . identifier[rate]
keyword[if] identifier[sweeps] keyword[is] keyword[None] :
identifier[sweeps] = identifier[range] ( identifier[self] . identifier[sweeps] )
identifier[Ys] = identifier[np] . identifier[empty] (( identifier[len] ( identifier[sweeps] ),( identifier[T2] - identifier[T1] )))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[sweeps] )):
identifier[self] . identifier[setSweep] ( identifier[sweeps] [ identifier[i] ])
identifier[Ys] [ identifier[i] ]= identifier[self] . identifier[dataY] [ identifier[T1] : identifier[T2] ]
identifier[Av] = identifier[np] . identifier[average] ( identifier[Ys] , literal[int] )
identifier[Es] = identifier[np] . identifier[std] ( identifier[Ys] , literal[int] )
identifier[Xs] = identifier[self] . identifier[dataX] [ identifier[T1] : identifier[T2] ]
keyword[if] identifier[stdErr] :
identifier[Es] = identifier[Es] / identifier[np] . identifier[sqrt] ( identifier[len] ( identifier[sweeps] ))
keyword[return] identifier[Xs] , identifier[Av] , identifier[Es] | def average_sweep(self, T1=0, T2=None, sweeps=None, stdErr=False):
"""
given an array of sweeps, return X,Y,Err average.
This returns *SWEEPS* of data, not just 1 data point.
"""
T1 = T1 * self.rate
if T2 is None:
T2 = self.sweepSize - 1 # depends on [control=['if'], data=['T2']]
else:
T2 = T2 * self.rate
if sweeps is None:
sweeps = range(self.sweeps) # depends on [control=['if'], data=['sweeps']]
Ys = np.empty((len(sweeps), T2 - T1))
for i in range(len(sweeps)):
self.setSweep(sweeps[i])
Ys[i] = self.dataY[T1:T2] # depends on [control=['for'], data=['i']]
Av = np.average(Ys, 0)
Es = np.std(Ys, 0)
Xs = self.dataX[T1:T2]
if stdErr: #otherwise return stdev
Es = Es / np.sqrt(len(sweeps)) # depends on [control=['if'], data=[]]
return (Xs, Av, Es) |
def contourf(self, *args, **kwargs):
"""Plot contours.
If a 3D or higher Data object is passed, a lower dimensional
channel can be plotted, provided the ``squeeze`` of the channel
has ``ndim==2`` and the first two axes do not span dimensions
other than those spanned by that channel.
Parameters
----------
data : 2D WrightTools.data.Data object
Data to plot.
channel : int or string (optional)
Channel index or name. Default is 0.
dynamic_range : boolean (optional)
Force plotting of all contours, overloading for major extent. Only applies to signed
data. Default is False.
autolabel : {'none', 'both', 'x', 'y'} (optional)
Parameterize application of labels directly from data object. Default is none.
xlabel : string (optional)
xlabel. Default is None.
ylabel : string (optional)
ylabel. Default is None.
**kwargs
matplotlib.axes.Axes.contourf__ optional keyword arguments.
__ https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.contourf.html
Returns
-------
matplotlib.contour.QuadContourSet
"""
args, kwargs = self._parse_plot_args(*args, **kwargs, plot_type="contourf")
# Overloading contourf in an attempt to fix aliasing problems when saving vector graphics
# see https://stackoverflow.com/questions/15822159
# also see https://stackoverflow.com/a/32911283
# set_edgecolor('face') does indeed remove all of the aliasing problems
# unfortunately, it also seems to distort the plot in a subtle but important way
# it shifts the entire colorbar down w.r.t. the data (by one contour? not clear)
# so for now, I am trying to fix the problem by adding contour just below contourf
# this does not perfectly get rid of the aliasing, but it doesn't distort the data
# which is more important
# I anticipate that this method will be tinkered with in the future
# so I've left the things I have tried and abandoned as comments---good luck!
# ---Blaise 2017-07-30
kwargs["antialiased"] = False
kwargs["extend"] = "both"
contours = super().contourf(*args, **kwargs)
# fill lines
zorder = contours.collections[0].zorder - 0.1
levels = (contours.levels[1:] + contours.levels[:-1]) / 2
matplotlib.axes.Axes.contour(
self, *args[:3], levels=levels, cmap=contours.cmap, zorder=zorder
)
# decoration
self.set_facecolor([0.75] * 3)
# PathCollection modifications
for c in contours.collections:
pass
# c.set_rasterized(True)
# c.set_edgecolor('face')
return contours | def function[contourf, parameter[self]]:
constant[Plot contours.
If a 3D or higher Data object is passed, a lower dimensional
channel can be plotted, provided the ``squeeze`` of the channel
has ``ndim==2`` and the first two axes do not span dimensions
other than those spanned by that channel.
Parameters
----------
data : 2D WrightTools.data.Data object
Data to plot.
channel : int or string (optional)
Channel index or name. Default is 0.
dynamic_range : boolean (optional)
Force plotting of all contours, overloading for major extent. Only applies to signed
data. Default is False.
autolabel : {'none', 'both', 'x', 'y'} (optional)
Parameterize application of labels directly from data object. Default is none.
xlabel : string (optional)
xlabel. Default is None.
ylabel : string (optional)
ylabel. Default is None.
**kwargs
matplotlib.axes.Axes.contourf__ optional keyword arguments.
__ https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.contourf.html
Returns
-------
matplotlib.contour.QuadContourSet
]
<ast.Tuple object at 0x7da20c6a84f0> assign[=] call[name[self]._parse_plot_args, parameter[<ast.Starred object at 0x7da20c6ab550>]]
call[name[kwargs]][constant[antialiased]] assign[=] constant[False]
call[name[kwargs]][constant[extend]] assign[=] constant[both]
variable[contours] assign[=] call[call[name[super], parameter[]].contourf, parameter[<ast.Starred object at 0x7da20c6aad10>]]
variable[zorder] assign[=] binary_operation[call[name[contours].collections][constant[0]].zorder - constant[0.1]]
variable[levels] assign[=] binary_operation[binary_operation[call[name[contours].levels][<ast.Slice object at 0x7da20c6ab9a0>] + call[name[contours].levels][<ast.Slice object at 0x7da20c6abaf0>]] / constant[2]]
call[name[matplotlib].axes.Axes.contour, parameter[name[self], <ast.Starred object at 0x7da20c6a9930>]]
call[name[self].set_facecolor, parameter[binary_operation[list[[<ast.Constant object at 0x7da20c6ab310>]] * constant[3]]]]
for taget[name[c]] in starred[name[contours].collections] begin[:]
pass
return[name[contours]] | keyword[def] identifier[contourf] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[args] , identifier[kwargs] = identifier[self] . identifier[_parse_plot_args] (* identifier[args] ,** identifier[kwargs] , identifier[plot_type] = literal[string] )
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[kwargs] [ literal[string] ]= literal[string]
identifier[contours] = identifier[super] (). identifier[contourf] (* identifier[args] ,** identifier[kwargs] )
identifier[zorder] = identifier[contours] . identifier[collections] [ literal[int] ]. identifier[zorder] - literal[int]
identifier[levels] =( identifier[contours] . identifier[levels] [ literal[int] :]+ identifier[contours] . identifier[levels] [:- literal[int] ])/ literal[int]
identifier[matplotlib] . identifier[axes] . identifier[Axes] . identifier[contour] (
identifier[self] ,* identifier[args] [: literal[int] ], identifier[levels] = identifier[levels] , identifier[cmap] = identifier[contours] . identifier[cmap] , identifier[zorder] = identifier[zorder]
)
identifier[self] . identifier[set_facecolor] ([ literal[int] ]* literal[int] )
keyword[for] identifier[c] keyword[in] identifier[contours] . identifier[collections] :
keyword[pass]
keyword[return] identifier[contours] | def contourf(self, *args, **kwargs):
"""Plot contours.
If a 3D or higher Data object is passed, a lower dimensional
channel can be plotted, provided the ``squeeze`` of the channel
has ``ndim==2`` and the first two axes do not span dimensions
other than those spanned by that channel.
Parameters
----------
data : 2D WrightTools.data.Data object
Data to plot.
channel : int or string (optional)
Channel index or name. Default is 0.
dynamic_range : boolean (optional)
Force plotting of all contours, overloading for major extent. Only applies to signed
data. Default is False.
autolabel : {'none', 'both', 'x', 'y'} (optional)
Parameterize application of labels directly from data object. Default is none.
xlabel : string (optional)
xlabel. Default is None.
ylabel : string (optional)
ylabel. Default is None.
**kwargs
matplotlib.axes.Axes.contourf__ optional keyword arguments.
__ https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.contourf.html
Returns
-------
matplotlib.contour.QuadContourSet
"""
(args, kwargs) = self._parse_plot_args(*args, **kwargs, plot_type='contourf')
# Overloading contourf in an attempt to fix aliasing problems when saving vector graphics
# see https://stackoverflow.com/questions/15822159
# also see https://stackoverflow.com/a/32911283
# set_edgecolor('face') does indeed remove all of the aliasing problems
# unfortunately, it also seems to distort the plot in a subtle but important way
# it shifts the entire colorbar down w.r.t. the data (by one contour? not clear)
# so for now, I am trying to fix the problem by adding contour just below contourf
# this does not perfectly get rid of the aliasing, but it doesn't distort the data
# which is more important
# I anticipate that this method will be tinkered with in the future
# so I've left the things I have tried and abandoned as comments---good luck!
# ---Blaise 2017-07-30
kwargs['antialiased'] = False
kwargs['extend'] = 'both'
contours = super().contourf(*args, **kwargs)
# fill lines
zorder = contours.collections[0].zorder - 0.1
levels = (contours.levels[1:] + contours.levels[:-1]) / 2
matplotlib.axes.Axes.contour(self, *args[:3], levels=levels, cmap=contours.cmap, zorder=zorder)
# decoration
self.set_facecolor([0.75] * 3)
# PathCollection modifications
for c in contours.collections:
pass # depends on [control=['for'], data=[]]
# c.set_rasterized(True)
# c.set_edgecolor('face')
return contours |
def add_completions(
replace_list: list, belstr: str, replace_span: Span, completion_text: str
) -> List[Mapping[str, Any]]:
"""Create completions to return given replacement list
Args:
replace_list: list of completion replacement values
belstr: BEL String
replace_span: start, stop of belstr to replace
completion_text: text to use for completion - used for creating highlight
Returns:
[{
"replacement": replacement,
"cursor_loc": cursor_loc,
"highlight": highlight,
"label": label,
}]
"""
completions = []
for r in replace_list:
# if '(' not in belstr:
# replacement = f'{r["replacement"]}()'
# cursor_loc = len(replacement) - 1 # inside parenthesis
# elif r['type'] == 'Function' and replace_span[1] == len(belstr) - 1:
if len(belstr) > 0:
belstr_end = len(belstr) - 1
else:
belstr_end = 0
log.debug(
f'Replace list {r} Replace_span {replace_span} BELstr: {belstr} Len: {belstr_end} Test1 {r["type"] == "Function"} Test2 {replace_span[1] + 1 == len(belstr)}'
)
# Put a space between comma and following function arg
if (
r["type"] == "Function"
and replace_span[0] > 0
and belstr[replace_span[0] - 1] == ","
):
log.debug("prior char is a comma")
replacement = (
belstr[0 : replace_span[0]]
+ " "
+ f"{r['replacement']}()"
+ belstr[replace_span[1] + 1 :]
)
cursor_loc = len(
belstr[0 : replace_span[0]] + " " + f"{r['replacement']}()"
)
# Put a space between comman and following NSArg or StrArg
elif replace_span[0] > 0 and belstr[replace_span[0] - 1] == ",":
log.debug("prior char is a comma")
replacement = (
belstr[0 : replace_span[0]]
+ " "
+ r["replacement"]
+ belstr[replace_span[1] + 1 :]
)
cursor_loc = len(belstr[0 : replace_span[0]] + " " + r["replacement"])
# Add function to end of belstr
elif r["type"] == "Function" and replace_span[1] >= belstr_end:
replacement = belstr[0 : replace_span[0]] + f"{r['replacement']}()"
cursor_loc = len(replacement) - 1 # inside parenthesis
log.debug(f"Replacement: {replacement}")
# Insert replacement in beginning or middle of belstr
else:
replacement = (
belstr[0 : replace_span[0]]
+ r["replacement"]
+ belstr[replace_span[1] + 1 :]
)
cursor_loc = len(
belstr[0 : replace_span[0]] + r["replacement"]
) # move cursor just past replacement
completions.append(
{
"replacement": replacement,
"cursor_loc": cursor_loc,
"highlight": r["highlight"],
"label": r["label"],
}
)
return completions | def function[add_completions, parameter[replace_list, belstr, replace_span, completion_text]]:
constant[Create completions to return given replacement list
Args:
replace_list: list of completion replacement values
belstr: BEL String
replace_span: start, stop of belstr to replace
completion_text: text to use for completion - used for creating highlight
Returns:
[{
"replacement": replacement,
"cursor_loc": cursor_loc,
"highlight": highlight,
"label": label,
}]
]
variable[completions] assign[=] list[[]]
for taget[name[r]] in starred[name[replace_list]] begin[:]
if compare[call[name[len], parameter[name[belstr]]] greater[>] constant[0]] begin[:]
variable[belstr_end] assign[=] binary_operation[call[name[len], parameter[name[belstr]]] - constant[1]]
call[name[log].debug, parameter[<ast.JoinedStr object at 0x7da1b18b9600>]]
if <ast.BoolOp object at 0x7da1b18b9300> begin[:]
call[name[log].debug, parameter[constant[prior char is a comma]]]
variable[replacement] assign[=] binary_operation[binary_operation[binary_operation[call[name[belstr]][<ast.Slice object at 0x7da1b18b8f70>] + constant[ ]] + <ast.JoinedStr object at 0x7da1b18b9a20>] + call[name[belstr]][<ast.Slice object at 0x7da1b1969ff0>]]
variable[cursor_loc] assign[=] call[name[len], parameter[binary_operation[binary_operation[call[name[belstr]][<ast.Slice object at 0x7da1b1969630>] + constant[ ]] + <ast.JoinedStr object at 0x7da1b196bf40>]]]
call[name[completions].append, parameter[dictionary[[<ast.Constant object at 0x7da1b195f0a0>, <ast.Constant object at 0x7da1b195f2b0>, <ast.Constant object at 0x7da1b195d120>, <ast.Constant object at 0x7da1b195cbb0>], [<ast.Name object at 0x7da1b195cc10>, <ast.Name object at 0x7da1b195de10>, <ast.Subscript object at 0x7da1b195d930>, <ast.Subscript object at 0x7da1b195e0e0>]]]]
return[name[completions]] | keyword[def] identifier[add_completions] (
identifier[replace_list] : identifier[list] , identifier[belstr] : identifier[str] , identifier[replace_span] : identifier[Span] , identifier[completion_text] : identifier[str]
)-> identifier[List] [ identifier[Mapping] [ identifier[str] , identifier[Any] ]]:
literal[string]
identifier[completions] =[]
keyword[for] identifier[r] keyword[in] identifier[replace_list] :
keyword[if] identifier[len] ( identifier[belstr] )> literal[int] :
identifier[belstr_end] = identifier[len] ( identifier[belstr] )- literal[int]
keyword[else] :
identifier[belstr_end] = literal[int]
identifier[log] . identifier[debug] (
literal[string]
)
keyword[if] (
identifier[r] [ literal[string] ]== literal[string]
keyword[and] identifier[replace_span] [ literal[int] ]> literal[int]
keyword[and] identifier[belstr] [ identifier[replace_span] [ literal[int] ]- literal[int] ]== literal[string]
):
identifier[log] . identifier[debug] ( literal[string] )
identifier[replacement] =(
identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]
+ literal[string]
+ literal[string]
+ identifier[belstr] [ identifier[replace_span] [ literal[int] ]+ literal[int] :]
)
identifier[cursor_loc] = identifier[len] (
identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]+ literal[string] + literal[string]
)
keyword[elif] identifier[replace_span] [ literal[int] ]> literal[int] keyword[and] identifier[belstr] [ identifier[replace_span] [ literal[int] ]- literal[int] ]== literal[string] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[replacement] =(
identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]
+ literal[string]
+ identifier[r] [ literal[string] ]
+ identifier[belstr] [ identifier[replace_span] [ literal[int] ]+ literal[int] :]
)
identifier[cursor_loc] = identifier[len] ( identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]+ literal[string] + identifier[r] [ literal[string] ])
keyword[elif] identifier[r] [ literal[string] ]== literal[string] keyword[and] identifier[replace_span] [ literal[int] ]>= identifier[belstr_end] :
identifier[replacement] = identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]+ literal[string]
identifier[cursor_loc] = identifier[len] ( identifier[replacement] )- literal[int]
identifier[log] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[replacement] =(
identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]
+ identifier[r] [ literal[string] ]
+ identifier[belstr] [ identifier[replace_span] [ literal[int] ]+ literal[int] :]
)
identifier[cursor_loc] = identifier[len] (
identifier[belstr] [ literal[int] : identifier[replace_span] [ literal[int] ]]+ identifier[r] [ literal[string] ]
)
identifier[completions] . identifier[append] (
{
literal[string] : identifier[replacement] ,
literal[string] : identifier[cursor_loc] ,
literal[string] : identifier[r] [ literal[string] ],
literal[string] : identifier[r] [ literal[string] ],
}
)
keyword[return] identifier[completions] | def add_completions(replace_list: list, belstr: str, replace_span: Span, completion_text: str) -> List[Mapping[str, Any]]:
"""Create completions to return given replacement list
Args:
replace_list: list of completion replacement values
belstr: BEL String
replace_span: start, stop of belstr to replace
completion_text: text to use for completion - used for creating highlight
Returns:
[{
"replacement": replacement,
"cursor_loc": cursor_loc,
"highlight": highlight,
"label": label,
}]
"""
completions = []
for r in replace_list:
# if '(' not in belstr:
# replacement = f'{r["replacement"]}()'
# cursor_loc = len(replacement) - 1 # inside parenthesis
# elif r['type'] == 'Function' and replace_span[1] == len(belstr) - 1:
if len(belstr) > 0:
belstr_end = len(belstr) - 1 # depends on [control=['if'], data=[]]
else:
belstr_end = 0
log.debug(f"Replace list {r} Replace_span {replace_span} BELstr: {belstr} Len: {belstr_end} Test1 {r['type'] == 'Function'} Test2 {replace_span[1] + 1 == len(belstr)}")
# Put a space between comma and following function arg
if r['type'] == 'Function' and replace_span[0] > 0 and (belstr[replace_span[0] - 1] == ','):
log.debug('prior char is a comma')
replacement = belstr[0:replace_span[0]] + ' ' + f"{r['replacement']}()" + belstr[replace_span[1] + 1:]
cursor_loc = len(belstr[0:replace_span[0]] + ' ' + f"{r['replacement']}()") # depends on [control=['if'], data=[]]
# Put a space between comman and following NSArg or StrArg
elif replace_span[0] > 0 and belstr[replace_span[0] - 1] == ',':
log.debug('prior char is a comma')
replacement = belstr[0:replace_span[0]] + ' ' + r['replacement'] + belstr[replace_span[1] + 1:]
cursor_loc = len(belstr[0:replace_span[0]] + ' ' + r['replacement']) # depends on [control=['if'], data=[]]
# Add function to end of belstr
elif r['type'] == 'Function' and replace_span[1] >= belstr_end:
replacement = belstr[0:replace_span[0]] + f"{r['replacement']}()"
cursor_loc = len(replacement) - 1 # inside parenthesis
log.debug(f'Replacement: {replacement}') # depends on [control=['if'], data=[]]
else:
# Insert replacement in beginning or middle of belstr
replacement = belstr[0:replace_span[0]] + r['replacement'] + belstr[replace_span[1] + 1:]
cursor_loc = len(belstr[0:replace_span[0]] + r['replacement']) # move cursor just past replacement
completions.append({'replacement': replacement, 'cursor_loc': cursor_loc, 'highlight': r['highlight'], 'label': r['label']}) # depends on [control=['for'], data=['r']]
return completions |
def quat_rotate(rotation, vector):
"""
Rotate a vector according to a quaternion. Equivalent to the C++ method tf::quatRotate
:param rotation: the rotation
:param vector: the vector to rotate
:return: the rotated vector
"""
def quat_mult_point(q, w):
return (q[3] * w[0] + q[1] * w[2] - q[2] * w[1],
q[3] * w[1] + q[2] * w[0] - q[0] * w[2],
q[3] * w[2] + q[0] * w[1] - q[1] * w[0],
-q[0] * w[0] - q[1] * w[1] - q[2] * w[2])
q = quat_mult_point(rotation, vector)
q = tf.transformations.quaternion_multiply(
q, tf.transformations.quaternion_inverse(rotation))
return [q[0], q[1], q[2]] | def function[quat_rotate, parameter[rotation, vector]]:
constant[
Rotate a vector according to a quaternion. Equivalent to the C++ method tf::quatRotate
:param rotation: the rotation
:param vector: the vector to rotate
:return: the rotated vector
]
def function[quat_mult_point, parameter[q, w]]:
return[tuple[[<ast.BinOp object at 0x7da207f003a0>, <ast.BinOp object at 0x7da207f00af0>, <ast.BinOp object at 0x7da207f03f40>, <ast.BinOp object at 0x7da207f00be0>]]]
variable[q] assign[=] call[name[quat_mult_point], parameter[name[rotation], name[vector]]]
variable[q] assign[=] call[name[tf].transformations.quaternion_multiply, parameter[name[q], call[name[tf].transformations.quaternion_inverse, parameter[name[rotation]]]]]
return[list[[<ast.Subscript object at 0x7da207f000d0>, <ast.Subscript object at 0x7da207f01f60>, <ast.Subscript object at 0x7da207f02260>]]] | keyword[def] identifier[quat_rotate] ( identifier[rotation] , identifier[vector] ):
literal[string]
keyword[def] identifier[quat_mult_point] ( identifier[q] , identifier[w] ):
keyword[return] ( identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]+ identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]- identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ],
identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]+ identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]- identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ],
identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]+ identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]- identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ],
- identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]- identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ]- identifier[q] [ literal[int] ]* identifier[w] [ literal[int] ])
identifier[q] = identifier[quat_mult_point] ( identifier[rotation] , identifier[vector] )
identifier[q] = identifier[tf] . identifier[transformations] . identifier[quaternion_multiply] (
identifier[q] , identifier[tf] . identifier[transformations] . identifier[quaternion_inverse] ( identifier[rotation] ))
keyword[return] [ identifier[q] [ literal[int] ], identifier[q] [ literal[int] ], identifier[q] [ literal[int] ]] | def quat_rotate(rotation, vector):
"""
Rotate a vector according to a quaternion. Equivalent to the C++ method tf::quatRotate
:param rotation: the rotation
:param vector: the vector to rotate
:return: the rotated vector
"""
def quat_mult_point(q, w):
return (q[3] * w[0] + q[1] * w[2] - q[2] * w[1], q[3] * w[1] + q[2] * w[0] - q[0] * w[2], q[3] * w[2] + q[0] * w[1] - q[1] * w[0], -q[0] * w[0] - q[1] * w[1] - q[2] * w[2])
q = quat_mult_point(rotation, vector)
q = tf.transformations.quaternion_multiply(q, tf.transformations.quaternion_inverse(rotation))
return [q[0], q[1], q[2]] |
def maybe_start_recording(tokens, index):
"""Return a new _RSTCommentBlockRecorder when its time to record."""
if tokens[index].type == TokenType.BeginRSTComment:
return _RSTCommentBlockRecorder(index, tokens[index].line)
return None | def function[maybe_start_recording, parameter[tokens, index]]:
constant[Return a new _RSTCommentBlockRecorder when its time to record.]
if compare[call[name[tokens]][name[index]].type equal[==] name[TokenType].BeginRSTComment] begin[:]
return[call[name[_RSTCommentBlockRecorder], parameter[name[index], call[name[tokens]][name[index]].line]]]
return[constant[None]] | keyword[def] identifier[maybe_start_recording] ( identifier[tokens] , identifier[index] ):
literal[string]
keyword[if] identifier[tokens] [ identifier[index] ]. identifier[type] == identifier[TokenType] . identifier[BeginRSTComment] :
keyword[return] identifier[_RSTCommentBlockRecorder] ( identifier[index] , identifier[tokens] [ identifier[index] ]. identifier[line] )
keyword[return] keyword[None] | def maybe_start_recording(tokens, index):
"""Return a new _RSTCommentBlockRecorder when its time to record."""
if tokens[index].type == TokenType.BeginRSTComment:
return _RSTCommentBlockRecorder(index, tokens[index].line) # depends on [control=['if'], data=[]]
return None |
def _filter_messages(messages, products=None, levels=None):
"""filter messages for desired products and levels."""
if products is None:
products = []
if levels is None:
levels = []
segments = []
bounds = len(messages)
for i, message in enumerate(messages):
if (message[3] in products or len(products) == 0) and \
(message[4] in levels or len(levels) == 0):
start = int(message[1])
if i == (bounds - 1):
end = None
else:
end = int(messages[i+1][1])
segments.append((start, end))
return _join(segments) | def function[_filter_messages, parameter[messages, products, levels]]:
constant[filter messages for desired products and levels.]
if compare[name[products] is constant[None]] begin[:]
variable[products] assign[=] list[[]]
if compare[name[levels] is constant[None]] begin[:]
variable[levels] assign[=] list[[]]
variable[segments] assign[=] list[[]]
variable[bounds] assign[=] call[name[len], parameter[name[messages]]]
for taget[tuple[[<ast.Name object at 0x7da1b0a610f0>, <ast.Name object at 0x7da1b0a63790>]]] in starred[call[name[enumerate], parameter[name[messages]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0a626b0> begin[:]
variable[start] assign[=] call[name[int], parameter[call[name[message]][constant[1]]]]
if compare[name[i] equal[==] binary_operation[name[bounds] - constant[1]]] begin[:]
variable[end] assign[=] constant[None]
call[name[segments].append, parameter[tuple[[<ast.Name object at 0x7da1b0a61f00>, <ast.Name object at 0x7da1b0a61450>]]]]
return[call[name[_join], parameter[name[segments]]]] | keyword[def] identifier[_filter_messages] ( identifier[messages] , identifier[products] = keyword[None] , identifier[levels] = keyword[None] ):
literal[string]
keyword[if] identifier[products] keyword[is] keyword[None] :
identifier[products] =[]
keyword[if] identifier[levels] keyword[is] keyword[None] :
identifier[levels] =[]
identifier[segments] =[]
identifier[bounds] = identifier[len] ( identifier[messages] )
keyword[for] identifier[i] , identifier[message] keyword[in] identifier[enumerate] ( identifier[messages] ):
keyword[if] ( identifier[message] [ literal[int] ] keyword[in] identifier[products] keyword[or] identifier[len] ( identifier[products] )== literal[int] ) keyword[and] ( identifier[message] [ literal[int] ] keyword[in] identifier[levels] keyword[or] identifier[len] ( identifier[levels] )== literal[int] ):
identifier[start] = identifier[int] ( identifier[message] [ literal[int] ])
keyword[if] identifier[i] ==( identifier[bounds] - literal[int] ):
identifier[end] = keyword[None]
keyword[else] :
identifier[end] = identifier[int] ( identifier[messages] [ identifier[i] + literal[int] ][ literal[int] ])
identifier[segments] . identifier[append] (( identifier[start] , identifier[end] ))
keyword[return] identifier[_join] ( identifier[segments] ) | def _filter_messages(messages, products=None, levels=None):
"""filter messages for desired products and levels."""
if products is None:
products = [] # depends on [control=['if'], data=['products']]
if levels is None:
levels = [] # depends on [control=['if'], data=['levels']]
segments = []
bounds = len(messages)
for (i, message) in enumerate(messages):
if (message[3] in products or len(products) == 0) and (message[4] in levels or len(levels) == 0):
start = int(message[1])
if i == bounds - 1:
end = None # depends on [control=['if'], data=[]]
else:
end = int(messages[i + 1][1])
segments.append((start, end)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return _join(segments) |
def __set_proxy(self, config):
"""
Set proxy if needed
Args:
config: Config dict
"""
if "proxy" in config and config["proxy"]:
proxy = config["proxy"]
splitted = proxy.split(':')
if len(splitted) != 2:
raise ValueError(ONEVIEW_CLIENT_INVALID_PROXY)
proxy_host = splitted[0]
proxy_port = int(splitted[1])
self.__connection.set_proxy(proxy_host, proxy_port) | def function[__set_proxy, parameter[self, config]]:
constant[
Set proxy if needed
Args:
config: Config dict
]
if <ast.BoolOp object at 0x7da18bcc9420> begin[:]
variable[proxy] assign[=] call[name[config]][constant[proxy]]
variable[splitted] assign[=] call[name[proxy].split, parameter[constant[:]]]
if compare[call[name[len], parameter[name[splitted]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da20c76fca0>
variable[proxy_host] assign[=] call[name[splitted]][constant[0]]
variable[proxy_port] assign[=] call[name[int], parameter[call[name[splitted]][constant[1]]]]
call[name[self].__connection.set_proxy, parameter[name[proxy_host], name[proxy_port]]] | keyword[def] identifier[__set_proxy] ( identifier[self] , identifier[config] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[config] keyword[and] identifier[config] [ literal[string] ]:
identifier[proxy] = identifier[config] [ literal[string] ]
identifier[splitted] = identifier[proxy] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[splitted] )!= literal[int] :
keyword[raise] identifier[ValueError] ( identifier[ONEVIEW_CLIENT_INVALID_PROXY] )
identifier[proxy_host] = identifier[splitted] [ literal[int] ]
identifier[proxy_port] = identifier[int] ( identifier[splitted] [ literal[int] ])
identifier[self] . identifier[__connection] . identifier[set_proxy] ( identifier[proxy_host] , identifier[proxy_port] ) | def __set_proxy(self, config):
"""
Set proxy if needed
Args:
config: Config dict
"""
if 'proxy' in config and config['proxy']:
proxy = config['proxy']
splitted = proxy.split(':')
if len(splitted) != 2:
raise ValueError(ONEVIEW_CLIENT_INVALID_PROXY) # depends on [control=['if'], data=[]]
proxy_host = splitted[0]
proxy_port = int(splitted[1])
self.__connection.set_proxy(proxy_host, proxy_port) # depends on [control=['if'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.