code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def nr_profiles(arr, genomes):
"""
Get a condensed cgMLST pairwise distance matrix for specified Genomes_
where condensed means redundant cgMLST profiles are only represented once in the distance matrix.
Args:
user_name (list): List of Genome_ names to retrieve condensed distance matrix for
Returns:
(numpy.array, list): tuple of condensed cgMLST distance matrix and list of grouped Genomes_
"""
gs_collapse = []
genome_idx_dict = {}
indices = []
patt_dict = {}
for i, g in enumerate(genomes):
p = arr[i, :].tostring()
if p in patt_dict:
parent = patt_dict[p]
idx = genome_idx_dict[parent]
gs_collapse[idx].append(g)
else:
indices.append(i)
patt_dict[p] = g
genome_idx_dict[g] = len(gs_collapse)
gs_collapse.append([g])
return arr[indices, :], gs_collapse | def function[nr_profiles, parameter[arr, genomes]]:
constant[
Get a condensed cgMLST pairwise distance matrix for specified Genomes_
where condensed means redundant cgMLST profiles are only represented once in the distance matrix.
Args:
user_name (list): List of Genome_ names to retrieve condensed distance matrix for
Returns:
(numpy.array, list): tuple of condensed cgMLST distance matrix and list of grouped Genomes_
]
variable[gs_collapse] assign[=] list[[]]
variable[genome_idx_dict] assign[=] dictionary[[], []]
variable[indices] assign[=] list[[]]
variable[patt_dict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1a8fcd0>, <ast.Name object at 0x7da1b1a8feb0>]]] in starred[call[name[enumerate], parameter[name[genomes]]]] begin[:]
variable[p] assign[=] call[call[name[arr]][tuple[[<ast.Name object at 0x7da1b1a8fca0>, <ast.Slice object at 0x7da1b1a8fe50>]]].tostring, parameter[]]
if compare[name[p] in name[patt_dict]] begin[:]
variable[parent] assign[=] call[name[patt_dict]][name[p]]
variable[idx] assign[=] call[name[genome_idx_dict]][name[parent]]
call[call[name[gs_collapse]][name[idx]].append, parameter[name[g]]]
return[tuple[[<ast.Subscript object at 0x7da1b1ba9630>, <ast.Name object at 0x7da1b1bab250>]]] | keyword[def] identifier[nr_profiles] ( identifier[arr] , identifier[genomes] ):
literal[string]
identifier[gs_collapse] =[]
identifier[genome_idx_dict] ={}
identifier[indices] =[]
identifier[patt_dict] ={}
keyword[for] identifier[i] , identifier[g] keyword[in] identifier[enumerate] ( identifier[genomes] ):
identifier[p] = identifier[arr] [ identifier[i] ,:]. identifier[tostring] ()
keyword[if] identifier[p] keyword[in] identifier[patt_dict] :
identifier[parent] = identifier[patt_dict] [ identifier[p] ]
identifier[idx] = identifier[genome_idx_dict] [ identifier[parent] ]
identifier[gs_collapse] [ identifier[idx] ]. identifier[append] ( identifier[g] )
keyword[else] :
identifier[indices] . identifier[append] ( identifier[i] )
identifier[patt_dict] [ identifier[p] ]= identifier[g]
identifier[genome_idx_dict] [ identifier[g] ]= identifier[len] ( identifier[gs_collapse] )
identifier[gs_collapse] . identifier[append] ([ identifier[g] ])
keyword[return] identifier[arr] [ identifier[indices] ,:], identifier[gs_collapse] | def nr_profiles(arr, genomes):
"""
Get a condensed cgMLST pairwise distance matrix for specified Genomes_
where condensed means redundant cgMLST profiles are only represented once in the distance matrix.
Args:
user_name (list): List of Genome_ names to retrieve condensed distance matrix for
Returns:
(numpy.array, list): tuple of condensed cgMLST distance matrix and list of grouped Genomes_
"""
gs_collapse = []
genome_idx_dict = {}
indices = []
patt_dict = {}
for (i, g) in enumerate(genomes):
p = arr[i, :].tostring()
if p in patt_dict:
parent = patt_dict[p]
idx = genome_idx_dict[parent]
gs_collapse[idx].append(g) # depends on [control=['if'], data=['p', 'patt_dict']]
else:
indices.append(i)
patt_dict[p] = g
genome_idx_dict[g] = len(gs_collapse)
gs_collapse.append([g]) # depends on [control=['for'], data=[]]
return (arr[indices, :], gs_collapse) |
def render_lights_debug(self, camera_matrix, projection):
"""Render outlines of light volumes"""
self.ctx.enable(moderngl.BLEND)
self.ctx.blend_func = moderngl.SRC_ALPHA, moderngl.ONE_MINUS_SRC_ALPHA
for light in self.point_lights:
m_mv = matrix44.multiply(light.matrix, camera_matrix)
light_size = light.radius
self.debug_shader["m_proj"].write(projection.tobytes())
self.debug_shader["m_mv"].write(m_mv.astype('f4').tobytes())
self.debug_shader["size"].value = light_size
self.unit_cube.render(self.debug_shader, mode=moderngl.LINE_STRIP)
self.ctx.disable(moderngl.BLEND) | def function[render_lights_debug, parameter[self, camera_matrix, projection]]:
constant[Render outlines of light volumes]
call[name[self].ctx.enable, parameter[name[moderngl].BLEND]]
name[self].ctx.blend_func assign[=] tuple[[<ast.Attribute object at 0x7da2041da020>, <ast.Attribute object at 0x7da2041dbe20>]]
for taget[name[light]] in starred[name[self].point_lights] begin[:]
variable[m_mv] assign[=] call[name[matrix44].multiply, parameter[name[light].matrix, name[camera_matrix]]]
variable[light_size] assign[=] name[light].radius
call[call[name[self].debug_shader][constant[m_proj]].write, parameter[call[name[projection].tobytes, parameter[]]]]
call[call[name[self].debug_shader][constant[m_mv]].write, parameter[call[call[name[m_mv].astype, parameter[constant[f4]]].tobytes, parameter[]]]]
call[name[self].debug_shader][constant[size]].value assign[=] name[light_size]
call[name[self].unit_cube.render, parameter[name[self].debug_shader]]
call[name[self].ctx.disable, parameter[name[moderngl].BLEND]] | keyword[def] identifier[render_lights_debug] ( identifier[self] , identifier[camera_matrix] , identifier[projection] ):
literal[string]
identifier[self] . identifier[ctx] . identifier[enable] ( identifier[moderngl] . identifier[BLEND] )
identifier[self] . identifier[ctx] . identifier[blend_func] = identifier[moderngl] . identifier[SRC_ALPHA] , identifier[moderngl] . identifier[ONE_MINUS_SRC_ALPHA]
keyword[for] identifier[light] keyword[in] identifier[self] . identifier[point_lights] :
identifier[m_mv] = identifier[matrix44] . identifier[multiply] ( identifier[light] . identifier[matrix] , identifier[camera_matrix] )
identifier[light_size] = identifier[light] . identifier[radius]
identifier[self] . identifier[debug_shader] [ literal[string] ]. identifier[write] ( identifier[projection] . identifier[tobytes] ())
identifier[self] . identifier[debug_shader] [ literal[string] ]. identifier[write] ( identifier[m_mv] . identifier[astype] ( literal[string] ). identifier[tobytes] ())
identifier[self] . identifier[debug_shader] [ literal[string] ]. identifier[value] = identifier[light_size]
identifier[self] . identifier[unit_cube] . identifier[render] ( identifier[self] . identifier[debug_shader] , identifier[mode] = identifier[moderngl] . identifier[LINE_STRIP] )
identifier[self] . identifier[ctx] . identifier[disable] ( identifier[moderngl] . identifier[BLEND] ) | def render_lights_debug(self, camera_matrix, projection):
"""Render outlines of light volumes"""
self.ctx.enable(moderngl.BLEND)
self.ctx.blend_func = (moderngl.SRC_ALPHA, moderngl.ONE_MINUS_SRC_ALPHA)
for light in self.point_lights:
m_mv = matrix44.multiply(light.matrix, camera_matrix)
light_size = light.radius
self.debug_shader['m_proj'].write(projection.tobytes())
self.debug_shader['m_mv'].write(m_mv.astype('f4').tobytes())
self.debug_shader['size'].value = light_size
self.unit_cube.render(self.debug_shader, mode=moderngl.LINE_STRIP) # depends on [control=['for'], data=['light']]
self.ctx.disable(moderngl.BLEND) |
def quniform(low, high, q, random_state):
'''
low: an float that represent an lower bound
high: an float that represent an upper bound
q: sample step
random_state: an object of numpy.random.RandomState
'''
return np.round(uniform(low, high, random_state) / q) * q | def function[quniform, parameter[low, high, q, random_state]]:
constant[
low: an float that represent an lower bound
high: an float that represent an upper bound
q: sample step
random_state: an object of numpy.random.RandomState
]
return[binary_operation[call[name[np].round, parameter[binary_operation[call[name[uniform], parameter[name[low], name[high], name[random_state]]] / name[q]]]] * name[q]]] | keyword[def] identifier[quniform] ( identifier[low] , identifier[high] , identifier[q] , identifier[random_state] ):
literal[string]
keyword[return] identifier[np] . identifier[round] ( identifier[uniform] ( identifier[low] , identifier[high] , identifier[random_state] )/ identifier[q] )* identifier[q] | def quniform(low, high, q, random_state):
"""
low: an float that represent an lower bound
high: an float that represent an upper bound
q: sample step
random_state: an object of numpy.random.RandomState
"""
return np.round(uniform(low, high, random_state) / q) * q |
def _request_address(self):
"""Get address of a POST request to the service."""
if not self._request_address_val:
template = (
'https://sb-ssl.google.com/safebrowsing/api/lookup'
'?client={0}&key={1}&appver={2}&pver={3}'
)
self._request_address_val = template.format(
self.client_name,
self.api_key,
self.app_version,
self.protocol_version
)
return self._request_address_val | def function[_request_address, parameter[self]]:
constant[Get address of a POST request to the service.]
if <ast.UnaryOp object at 0x7da1b25d35b0> begin[:]
variable[template] assign[=] constant[https://sb-ssl.google.com/safebrowsing/api/lookup?client={0}&key={1}&appver={2}&pver={3}]
name[self]._request_address_val assign[=] call[name[template].format, parameter[name[self].client_name, name[self].api_key, name[self].app_version, name[self].protocol_version]]
return[name[self]._request_address_val] | keyword[def] identifier[_request_address] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_request_address_val] :
identifier[template] =(
literal[string]
literal[string]
)
identifier[self] . identifier[_request_address_val] = identifier[template] . identifier[format] (
identifier[self] . identifier[client_name] ,
identifier[self] . identifier[api_key] ,
identifier[self] . identifier[app_version] ,
identifier[self] . identifier[protocol_version]
)
keyword[return] identifier[self] . identifier[_request_address_val] | def _request_address(self):
"""Get address of a POST request to the service."""
if not self._request_address_val:
template = 'https://sb-ssl.google.com/safebrowsing/api/lookup?client={0}&key={1}&appver={2}&pver={3}'
self._request_address_val = template.format(self.client_name, self.api_key, self.app_version, self.protocol_version) # depends on [control=['if'], data=[]]
return self._request_address_val |
def _create_attrcontent_class(name, fields, inheritance=(object,), data_structure=None, extra_functions=None, docstring=""):
'''Helper function that creates a class for attribute contents.
This function creates is a boilerplate to create all the expected methods of
an attributes. The basic methods work in the same way for all classes.
Once it executes it defines a dynamic class with the methods "__init__",
"__repr__" and "__eq__" based on the fields passed in the ``fields`` parameter.
If the ``data_structure`` parameter is present, the classmethod ``get_representation_size``
and the class variable ``_REPR`` will also be present.
It is also possible to define the inheritance using this method by passing
a list of classes in the ``inheritance`` parameter.
If the ``extra_functions`` argument is present, they will be added to the
class.
Note:
If the ``extra_functions`` has defined any of dinamically created methods,
they will *replace* the ones created.
Args:
name (str): Name of the class that will be created.
fields (tuple(str)): The attributes that will be added to the class.
inherited (tuple(object)): List of objects that will be inherited by
the new class
extra_functions (dict(str : function)): A dictionary where the key
will be the name of the function in the class and the content
of the key is a function that will be bound to the class
doctring (str): Class' docstring
Returns:
A new class with the ``name`` as it's name.
'''
def create_func_from_str(f_name, args, content, docstring=""):
'''Helper function to create functions from strings.
To improve performance, the standard functions are created at runtime
based on the string derived from the content. This way the function, from
the interpreter point of view, looks like statically defined.
Note:
This function should be used only for methods that will receive
``self`` (instace methods). The ``self`` argument is added automatically.
Args:
f_name (str): Function name
args (list(str)): List of extra arguments that the function will receive
content (str): Content of the function
docstring (str): Function's docstring
Returns:
A new function object that can be inserted in the class.
'''
exec_namespace = {"__name__" : f"{f_name}"}
new_args = ", ".join(["self"] + args)
func_str = f"def {f_name}({new_args}): {content}"
exec(func_str, exec_namespace)
func = exec_namespace[f_name]
func.__doc__ = docstring
return func
#creates the functions necessary for the new class
slots = fields
init_content = ", ".join([f"self.{field}" for field in fields]) + " = content"
__init__ = create_func_from_str("__init__", [f"content=(None,)*{len(fields)}"], init_content)
temp = ", ".join([f"{field}={{self.{field}}}" for field in fields])
repr = "return " + f"f\'{{self.__class__.__name__}}({temp})\'"
__repr__ = create_func_from_str("__repr__", [], repr)
temp = " and ".join([f"self.{field} == other.{field}" for field in fields])
eq = f"return {temp} if isinstance(other, {name}) else False"
__eq__ = create_func_from_str("__eq__", ["other"], eq)
@classmethod
def get_representation_size(cls):
return cls._REPR.size
#adapted from namedtuple code
# Modify function metadata to help with introspection and debugging
for method in (__init__, get_representation_size.__func__, __eq__,
__repr__):
method.__qualname__ = f'{name}.{method.__name__}'
#map class namespace for the class creation
namespace = {"__slots__" : slots,
"__init__" : __init__,
"__repr__" : __repr__,
"__eq__" : __eq__
}
if data_structure is not None:
namespace["_REPR"] = struct.Struct(data_structure)
namespace["get_representation_size"] = get_representation_size
if docstring:
namespace["__doc__"] = docstring
#some new mappings can be set or overload the ones defined
if extra_functions is not None:
for method in extra_functions.values():
try:
method.__qualname__ = f'{name}.{method.__name__}'
except AttributeError:
try:
method.__func__.__qualname__ = f'{name}.{method.__func__.__name__}'
except AttributeError:
#if we got here, it is not a method or classmethod, must be an attribute
#TODO feels like a hack, change it
#TODO design a test for this
pass
namespace = {**namespace, **extra_functions}
#TODO check if docstring was provided, issue a warning
new_class = type(name, inheritance, namespace)
# adapted from namedtuple code
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython), or where the user has
# specified a particular module.
try:
new_class.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return new_class | def function[_create_attrcontent_class, parameter[name, fields, inheritance, data_structure, extra_functions, docstring]]:
constant[Helper function that creates a class for attribute contents.
This function creates is a boilerplate to create all the expected methods of
an attributes. The basic methods work in the same way for all classes.
Once it executes it defines a dynamic class with the methods "__init__",
"__repr__" and "__eq__" based on the fields passed in the ``fields`` parameter.
If the ``data_structure`` parameter is present, the classmethod ``get_representation_size``
and the class variable ``_REPR`` will also be present.
It is also possible to define the inheritance using this method by passing
a list of classes in the ``inheritance`` parameter.
If the ``extra_functions`` argument is present, they will be added to the
class.
Note:
If the ``extra_functions`` has defined any of dinamically created methods,
they will *replace* the ones created.
Args:
name (str): Name of the class that will be created.
fields (tuple(str)): The attributes that will be added to the class.
inherited (tuple(object)): List of objects that will be inherited by
the new class
extra_functions (dict(str : function)): A dictionary where the key
will be the name of the function in the class and the content
of the key is a function that will be bound to the class
doctring (str): Class' docstring
Returns:
A new class with the ``name`` as it's name.
]
def function[create_func_from_str, parameter[f_name, args, content, docstring]]:
constant[Helper function to create functions from strings.
To improve performance, the standard functions are created at runtime
based on the string derived from the content. This way the function, from
the interpreter point of view, looks like statically defined.
Note:
This function should be used only for methods that will receive
``self`` (instace methods). The ``self`` argument is added automatically.
Args:
f_name (str): Function name
args (list(str)): List of extra arguments that the function will receive
content (str): Content of the function
docstring (str): Function's docstring
Returns:
A new function object that can be inserted in the class.
]
variable[exec_namespace] assign[=] dictionary[[<ast.Constant object at 0x7da1b15b6620>], [<ast.JoinedStr object at 0x7da1b15b61d0>]]
variable[new_args] assign[=] call[constant[, ].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da1b15b5150>]] + name[args]]]]
variable[func_str] assign[=] <ast.JoinedStr object at 0x7da1b15b5c00>
call[name[exec], parameter[name[func_str], name[exec_namespace]]]
variable[func] assign[=] call[name[exec_namespace]][name[f_name]]
name[func].__doc__ assign[=] name[docstring]
return[name[func]]
variable[slots] assign[=] name[fields]
variable[init_content] assign[=] binary_operation[call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b15b7b50>]] + constant[ = content]]
variable[__init__] assign[=] call[name[create_func_from_str], parameter[constant[__init__], list[[<ast.JoinedStr object at 0x7da1b1354070>]], name[init_content]]]
variable[temp] assign[=] call[constant[, ].join, parameter[<ast.ListComp object at 0x7da1b13579a0>]]
variable[repr] assign[=] binary_operation[constant[return ] + <ast.JoinedStr object at 0x7da1b1356b90>]
variable[__repr__] assign[=] call[name[create_func_from_str], parameter[constant[__repr__], list[[]], name[repr]]]
variable[temp] assign[=] call[constant[ and ].join, parameter[<ast.ListComp object at 0x7da1b1355a80>]]
variable[eq] assign[=] <ast.JoinedStr object at 0x7da1b1356f20>
variable[__eq__] assign[=] call[name[create_func_from_str], parameter[constant[__eq__], list[[<ast.Constant object at 0x7da1b1357700>]], name[eq]]]
def function[get_representation_size, parameter[cls]]:
return[name[cls]._REPR.size]
for taget[name[method]] in starred[tuple[[<ast.Name object at 0x7da1b1356410>, <ast.Attribute object at 0x7da1b1354220>, <ast.Name object at 0x7da1b1355930>, <ast.Name object at 0x7da1b1356b30>]]] begin[:]
name[method].__qualname__ assign[=] <ast.JoinedStr object at 0x7da1b1354d60>
variable[namespace] assign[=] dictionary[[<ast.Constant object at 0x7da1b1355900>, <ast.Constant object at 0x7da1b13551b0>, <ast.Constant object at 0x7da1b1357100>, <ast.Constant object at 0x7da1b1355c90>], [<ast.Name object at 0x7da1b1355330>, <ast.Name object at 0x7da1b1354d90>, <ast.Name object at 0x7da1b1355810>, <ast.Name object at 0x7da1b1355090>]]
if compare[name[data_structure] is_not constant[None]] begin[:]
call[name[namespace]][constant[_REPR]] assign[=] call[name[struct].Struct, parameter[name[data_structure]]]
call[name[namespace]][constant[get_representation_size]] assign[=] name[get_representation_size]
if name[docstring] begin[:]
call[name[namespace]][constant[__doc__]] assign[=] name[docstring]
if compare[name[extra_functions] is_not constant[None]] begin[:]
for taget[name[method]] in starred[call[name[extra_functions].values, parameter[]]] begin[:]
<ast.Try object at 0x7da1b15b1c00>
variable[namespace] assign[=] dictionary[[None, None], [<ast.Name object at 0x7da1b1307220>, <ast.Name object at 0x7da1b1307460>]]
variable[new_class] assign[=] call[name[type], parameter[name[name], name[inheritance], name[namespace]]]
<ast.Try object at 0x7da1b13061a0>
return[name[new_class]] | keyword[def] identifier[_create_attrcontent_class] ( identifier[name] , identifier[fields] , identifier[inheritance] =( identifier[object] ,), identifier[data_structure] = keyword[None] , identifier[extra_functions] = keyword[None] , identifier[docstring] = literal[string] ):
literal[string]
keyword[def] identifier[create_func_from_str] ( identifier[f_name] , identifier[args] , identifier[content] , identifier[docstring] = literal[string] ):
literal[string]
identifier[exec_namespace] ={ literal[string] : literal[string] }
identifier[new_args] = literal[string] . identifier[join] ([ literal[string] ]+ identifier[args] )
identifier[func_str] = literal[string]
identifier[exec] ( identifier[func_str] , identifier[exec_namespace] )
identifier[func] = identifier[exec_namespace] [ identifier[f_name] ]
identifier[func] . identifier[__doc__] = identifier[docstring]
keyword[return] identifier[func]
identifier[slots] = identifier[fields]
identifier[init_content] = literal[string] . identifier[join] ([ literal[string] keyword[for] identifier[field] keyword[in] identifier[fields] ])+ literal[string]
identifier[__init__] = identifier[create_func_from_str] ( literal[string] ,[ literal[string] ], identifier[init_content] )
identifier[temp] = literal[string] . identifier[join] ([ literal[string] keyword[for] identifier[field] keyword[in] identifier[fields] ])
identifier[repr] = literal[string] + literal[string]
identifier[__repr__] = identifier[create_func_from_str] ( literal[string] ,[], identifier[repr] )
identifier[temp] = literal[string] . identifier[join] ([ literal[string] keyword[for] identifier[field] keyword[in] identifier[fields] ])
identifier[eq] = literal[string]
identifier[__eq__] = identifier[create_func_from_str] ( literal[string] ,[ literal[string] ], identifier[eq] )
@ identifier[classmethod]
keyword[def] identifier[get_representation_size] ( identifier[cls] ):
keyword[return] identifier[cls] . identifier[_REPR] . identifier[size]
keyword[for] identifier[method] keyword[in] ( identifier[__init__] , identifier[get_representation_size] . identifier[__func__] , identifier[__eq__] ,
identifier[__repr__] ):
identifier[method] . identifier[__qualname__] = literal[string]
identifier[namespace] ={ literal[string] : identifier[slots] ,
literal[string] : identifier[__init__] ,
literal[string] : identifier[__repr__] ,
literal[string] : identifier[__eq__]
}
keyword[if] identifier[data_structure] keyword[is] keyword[not] keyword[None] :
identifier[namespace] [ literal[string] ]= identifier[struct] . identifier[Struct] ( identifier[data_structure] )
identifier[namespace] [ literal[string] ]= identifier[get_representation_size]
keyword[if] identifier[docstring] :
identifier[namespace] [ literal[string] ]= identifier[docstring]
keyword[if] identifier[extra_functions] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[method] keyword[in] identifier[extra_functions] . identifier[values] ():
keyword[try] :
identifier[method] . identifier[__qualname__] = literal[string]
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[method] . identifier[__func__] . identifier[__qualname__] = literal[string]
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[namespace] ={** identifier[namespace] ,** identifier[extra_functions] }
identifier[new_class] = identifier[type] ( identifier[name] , identifier[inheritance] , identifier[namespace] )
keyword[try] :
identifier[new_class] . identifier[__module__] = identifier[_sys] . identifier[_getframe] ( literal[int] ). identifier[f_globals] . identifier[get] ( literal[string] , literal[string] )
keyword[except] ( identifier[AttributeError] , identifier[ValueError] ):
keyword[pass]
keyword[return] identifier[new_class] | def _create_attrcontent_class(name, fields, inheritance=(object,), data_structure=None, extra_functions=None, docstring=''):
"""Helper function that creates a class for attribute contents.
This function creates is a boilerplate to create all the expected methods of
an attributes. The basic methods work in the same way for all classes.
Once it executes it defines a dynamic class with the methods "__init__",
"__repr__" and "__eq__" based on the fields passed in the ``fields`` parameter.
If the ``data_structure`` parameter is present, the classmethod ``get_representation_size``
and the class variable ``_REPR`` will also be present.
It is also possible to define the inheritance using this method by passing
a list of classes in the ``inheritance`` parameter.
If the ``extra_functions`` argument is present, they will be added to the
class.
Note:
If the ``extra_functions`` has defined any of dinamically created methods,
they will *replace* the ones created.
Args:
name (str): Name of the class that will be created.
fields (tuple(str)): The attributes that will be added to the class.
inherited (tuple(object)): List of objects that will be inherited by
the new class
extra_functions (dict(str : function)): A dictionary where the key
will be the name of the function in the class and the content
of the key is a function that will be bound to the class
doctring (str): Class' docstring
Returns:
A new class with the ``name`` as it's name.
"""
def create_func_from_str(f_name, args, content, docstring=''):
"""Helper function to create functions from strings.
To improve performance, the standard functions are created at runtime
based on the string derived from the content. This way the function, from
the interpreter point of view, looks like statically defined.
Note:
This function should be used only for methods that will receive
``self`` (instace methods). The ``self`` argument is added automatically.
Args:
f_name (str): Function name
args (list(str)): List of extra arguments that the function will receive
content (str): Content of the function
docstring (str): Function's docstring
Returns:
A new function object that can be inserted in the class.
"""
exec_namespace = {'__name__': f'{f_name}'}
new_args = ', '.join(['self'] + args)
func_str = f'def {f_name}({new_args}): {content}'
exec(func_str, exec_namespace)
func = exec_namespace[f_name]
func.__doc__ = docstring
return func
#creates the functions necessary for the new class
slots = fields
init_content = ', '.join([f'self.{field}' for field in fields]) + ' = content'
__init__ = create_func_from_str('__init__', [f'content=(None,)*{len(fields)}'], init_content)
temp = ', '.join([f'{field}={{self.{field}}}' for field in fields])
repr = 'return ' + f"f'{{self.__class__.__name__}}({temp})'"
__repr__ = create_func_from_str('__repr__', [], repr)
temp = ' and '.join([f'self.{field} == other.{field}' for field in fields])
eq = f'return {temp} if isinstance(other, {name}) else False'
__eq__ = create_func_from_str('__eq__', ['other'], eq)
@classmethod
def get_representation_size(cls):
return cls._REPR.size
#adapted from namedtuple code
# Modify function metadata to help with introspection and debugging
for method in (__init__, get_representation_size.__func__, __eq__, __repr__):
method.__qualname__ = f'{name}.{method.__name__}' # depends on [control=['for'], data=['method']]
#map class namespace for the class creation
namespace = {'__slots__': slots, '__init__': __init__, '__repr__': __repr__, '__eq__': __eq__}
if data_structure is not None:
namespace['_REPR'] = struct.Struct(data_structure)
namespace['get_representation_size'] = get_representation_size # depends on [control=['if'], data=['data_structure']]
if docstring:
namespace['__doc__'] = docstring # depends on [control=['if'], data=[]]
#some new mappings can be set or overload the ones defined
if extra_functions is not None:
for method in extra_functions.values():
try:
method.__qualname__ = f'{name}.{method.__name__}' # depends on [control=['try'], data=[]]
except AttributeError:
try:
method.__func__.__qualname__ = f'{name}.{method.__func__.__name__}' # depends on [control=['try'], data=[]]
except AttributeError:
#if we got here, it is not a method or classmethod, must be an attribute
#TODO feels like a hack, change it
#TODO design a test for this
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['method']]
namespace = {**namespace, **extra_functions} # depends on [control=['if'], data=['extra_functions']]
#TODO check if docstring was provided, issue a warning
new_class = type(name, inheritance, namespace)
# adapted from namedtuple code
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython), or where the user has
# specified a particular module.
try:
new_class.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') # depends on [control=['try'], data=[]]
except (AttributeError, ValueError):
pass # depends on [control=['except'], data=[]]
return new_class |
def bind_queues(self, bindings):
"""
Declare a set of bindings between queues and exchanges.
Args:
bindings (list of dict): A list of binding definitions. Each dictionary
must contain the "queue" key whose value is the name of the queue
to create the binding on, as well as the "exchange" key whose value
should be the name of the exchange to bind to. Additional acceptable
keys are any keyword arguments accepted by
:meth:`pika.channel.Channel.queue_bind`.
Raises:
NoFreeChannels: If there are no available channels on this connection.
If this occurs, you can either reduce the number of consumers on this
connection or create an additional connection.
BadDeclaration: If a binding could not be declared. This can occur if the
queue or exchange don't exist, or if they do, but the current user does
not have permissions to create bindings.
"""
channel = yield self._allocate_channel()
try:
for binding in bindings:
try:
yield channel.queue_bind(**binding)
except pika.exceptions.ChannelClosed as e:
raise BadDeclaration("binding", binding, e)
finally:
try:
channel.close()
except pika.exceptions.AMQPError:
pass | def function[bind_queues, parameter[self, bindings]]:
constant[
Declare a set of bindings between queues and exchanges.
Args:
bindings (list of dict): A list of binding definitions. Each dictionary
must contain the "queue" key whose value is the name of the queue
to create the binding on, as well as the "exchange" key whose value
should be the name of the exchange to bind to. Additional acceptable
keys are any keyword arguments accepted by
:meth:`pika.channel.Channel.queue_bind`.
Raises:
NoFreeChannels: If there are no available channels on this connection.
If this occurs, you can either reduce the number of consumers on this
connection or create an additional connection.
BadDeclaration: If a binding could not be declared. This can occur if the
queue or exchange don't exist, or if they do, but the current user does
not have permissions to create bindings.
]
variable[channel] assign[=] <ast.Yield object at 0x7da1b056a140>
<ast.Try object at 0x7da1b0569f30> | keyword[def] identifier[bind_queues] ( identifier[self] , identifier[bindings] ):
literal[string]
identifier[channel] = keyword[yield] identifier[self] . identifier[_allocate_channel] ()
keyword[try] :
keyword[for] identifier[binding] keyword[in] identifier[bindings] :
keyword[try] :
keyword[yield] identifier[channel] . identifier[queue_bind] (** identifier[binding] )
keyword[except] identifier[pika] . identifier[exceptions] . identifier[ChannelClosed] keyword[as] identifier[e] :
keyword[raise] identifier[BadDeclaration] ( literal[string] , identifier[binding] , identifier[e] )
keyword[finally] :
keyword[try] :
identifier[channel] . identifier[close] ()
keyword[except] identifier[pika] . identifier[exceptions] . identifier[AMQPError] :
keyword[pass] | def bind_queues(self, bindings):
"""
Declare a set of bindings between queues and exchanges.
Args:
bindings (list of dict): A list of binding definitions. Each dictionary
must contain the "queue" key whose value is the name of the queue
to create the binding on, as well as the "exchange" key whose value
should be the name of the exchange to bind to. Additional acceptable
keys are any keyword arguments accepted by
:meth:`pika.channel.Channel.queue_bind`.
Raises:
NoFreeChannels: If there are no available channels on this connection.
If this occurs, you can either reduce the number of consumers on this
connection or create an additional connection.
BadDeclaration: If a binding could not be declared. This can occur if the
queue or exchange don't exist, or if they do, but the current user does
not have permissions to create bindings.
"""
channel = (yield self._allocate_channel())
try:
for binding in bindings:
try:
yield channel.queue_bind(**binding) # depends on [control=['try'], data=[]]
except pika.exceptions.ChannelClosed as e:
raise BadDeclaration('binding', binding, e) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['binding']] # depends on [control=['try'], data=[]]
finally:
try:
channel.close() # depends on [control=['try'], data=[]]
except pika.exceptions.AMQPError:
pass # depends on [control=['except'], data=[]] |
def run_bcl2fastq(run_folder, ss_csv, config):
"""Run bcl2fastq for de-multiplexing and fastq generation.
run_folder -- directory of Illumina outputs
ss_csv -- Samplesheet CSV file describing samples.
"""
bc_dir = os.path.join(run_folder, "Data", "Intensities", "BaseCalls")
output_dir = os.path.join(run_folder, "fastq")
if not os.path.exists(os.path.join(output_dir, "Makefile")):
subprocess.check_call(["configureBclToFastq.pl", "--no-eamss",
"--input-dir", bc_dir, "--output-dir", output_dir,
"--sample-sheet", ss_csv])
with utils.chdir(output_dir):
cores = str(utils.get_in(config, ("algorithm", "num_cores"), 1))
cmd = ["make", "-j", cores]
if "submit_cmd" in config["process"] and "bcl2fastq_batch" in config["process"]:
_submit_and_wait(cmd, cores, config, output_dir)
else:
subprocess.check_call(cmd)
return output_dir | def function[run_bcl2fastq, parameter[run_folder, ss_csv, config]]:
constant[Run bcl2fastq for de-multiplexing and fastq generation.
run_folder -- directory of Illumina outputs
ss_csv -- Samplesheet CSV file describing samples.
]
variable[bc_dir] assign[=] call[name[os].path.join, parameter[name[run_folder], constant[Data], constant[Intensities], constant[BaseCalls]]]
variable[output_dir] assign[=] call[name[os].path.join, parameter[name[run_folder], constant[fastq]]]
if <ast.UnaryOp object at 0x7da20c76f370> begin[:]
call[name[subprocess].check_call, parameter[list[[<ast.Constant object at 0x7da20c76d8a0>, <ast.Constant object at 0x7da20c76d210>, <ast.Constant object at 0x7da20c76dde0>, <ast.Name object at 0x7da20c76e5f0>, <ast.Constant object at 0x7da20c76e350>, <ast.Name object at 0x7da20c76d5a0>, <ast.Constant object at 0x7da20c76cf40>, <ast.Name object at 0x7da20c76f3d0>]]]]
with call[name[utils].chdir, parameter[name[output_dir]]] begin[:]
variable[cores] assign[=] call[name[str], parameter[call[name[utils].get_in, parameter[name[config], tuple[[<ast.Constant object at 0x7da20c76d390>, <ast.Constant object at 0x7da20c76d570>]], constant[1]]]]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da20c76ffa0>, <ast.Constant object at 0x7da20c76d2a0>, <ast.Name object at 0x7da20c76fc10>]]
if <ast.BoolOp object at 0x7da20c76f790> begin[:]
call[name[_submit_and_wait], parameter[name[cmd], name[cores], name[config], name[output_dir]]]
return[name[output_dir]] | keyword[def] identifier[run_bcl2fastq] ( identifier[run_folder] , identifier[ss_csv] , identifier[config] ):
literal[string]
identifier[bc_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[run_folder] , literal[string] , literal[string] , literal[string] )
identifier[output_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[run_folder] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , literal[string] )):
identifier[subprocess] . identifier[check_call] ([ literal[string] , literal[string] ,
literal[string] , identifier[bc_dir] , literal[string] , identifier[output_dir] ,
literal[string] , identifier[ss_csv] ])
keyword[with] identifier[utils] . identifier[chdir] ( identifier[output_dir] ):
identifier[cores] = identifier[str] ( identifier[utils] . identifier[get_in] ( identifier[config] ,( literal[string] , literal[string] ), literal[int] ))
identifier[cmd] =[ literal[string] , literal[string] , identifier[cores] ]
keyword[if] literal[string] keyword[in] identifier[config] [ literal[string] ] keyword[and] literal[string] keyword[in] identifier[config] [ literal[string] ]:
identifier[_submit_and_wait] ( identifier[cmd] , identifier[cores] , identifier[config] , identifier[output_dir] )
keyword[else] :
identifier[subprocess] . identifier[check_call] ( identifier[cmd] )
keyword[return] identifier[output_dir] | def run_bcl2fastq(run_folder, ss_csv, config):
"""Run bcl2fastq for de-multiplexing and fastq generation.
run_folder -- directory of Illumina outputs
ss_csv -- Samplesheet CSV file describing samples.
"""
bc_dir = os.path.join(run_folder, 'Data', 'Intensities', 'BaseCalls')
output_dir = os.path.join(run_folder, 'fastq')
if not os.path.exists(os.path.join(output_dir, 'Makefile')):
subprocess.check_call(['configureBclToFastq.pl', '--no-eamss', '--input-dir', bc_dir, '--output-dir', output_dir, '--sample-sheet', ss_csv]) # depends on [control=['if'], data=[]]
with utils.chdir(output_dir):
cores = str(utils.get_in(config, ('algorithm', 'num_cores'), 1))
cmd = ['make', '-j', cores]
if 'submit_cmd' in config['process'] and 'bcl2fastq_batch' in config['process']:
_submit_and_wait(cmd, cores, config, output_dir) # depends on [control=['if'], data=[]]
else:
subprocess.check_call(cmd) # depends on [control=['with'], data=[]]
return output_dir |
def mirror_file(self, path_to, path_from, from_quick_server=True):
"""Mirrors a file to a different location. Each time the file changes
while the process is running it will be copied to 'path_to',
overwriting the destination.
Parameters
----------
path_to : string
The mirror destination.
path_from : string
The mirror origin.
from_quick_server : bool
If set the origin path is relative to *this* script otherwise it is
relative to the process.
"""
full_path = path_from if not from_quick_server else os.path.join(
os.path.dirname(__file__), path_from)
if self._mirror is None:
if not self._symlink_mirror(path_to, full_path, init=True):
self._poll_mirror(path_to, full_path, init=True)
return
impl = self._mirror["impl"]
if impl == "symlink":
self._symlink_mirror(path_to, full_path, init=False)
elif impl == "poll":
self._poll_mirror(path_to, full_path, init=False)
else:
raise ValueError("unknown mirror implementation: {0}".format(impl)) | def function[mirror_file, parameter[self, path_to, path_from, from_quick_server]]:
constant[Mirrors a file to a different location. Each time the file changes
while the process is running it will be copied to 'path_to',
overwriting the destination.
Parameters
----------
path_to : string
The mirror destination.
path_from : string
The mirror origin.
from_quick_server : bool
If set the origin path is relative to *this* script otherwise it is
relative to the process.
]
variable[full_path] assign[=] <ast.IfExp object at 0x7da2044c2860>
if compare[name[self]._mirror is constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da2044c3640> begin[:]
call[name[self]._poll_mirror, parameter[name[path_to], name[full_path]]]
return[None]
variable[impl] assign[=] call[name[self]._mirror][constant[impl]]
if compare[name[impl] equal[==] constant[symlink]] begin[:]
call[name[self]._symlink_mirror, parameter[name[path_to], name[full_path]]] | keyword[def] identifier[mirror_file] ( identifier[self] , identifier[path_to] , identifier[path_from] , identifier[from_quick_server] = keyword[True] ):
literal[string]
identifier[full_path] = identifier[path_from] keyword[if] keyword[not] identifier[from_quick_server] keyword[else] identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), identifier[path_from] )
keyword[if] identifier[self] . identifier[_mirror] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[self] . identifier[_symlink_mirror] ( identifier[path_to] , identifier[full_path] , identifier[init] = keyword[True] ):
identifier[self] . identifier[_poll_mirror] ( identifier[path_to] , identifier[full_path] , identifier[init] = keyword[True] )
keyword[return]
identifier[impl] = identifier[self] . identifier[_mirror] [ literal[string] ]
keyword[if] identifier[impl] == literal[string] :
identifier[self] . identifier[_symlink_mirror] ( identifier[path_to] , identifier[full_path] , identifier[init] = keyword[False] )
keyword[elif] identifier[impl] == literal[string] :
identifier[self] . identifier[_poll_mirror] ( identifier[path_to] , identifier[full_path] , identifier[init] = keyword[False] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[impl] )) | def mirror_file(self, path_to, path_from, from_quick_server=True):
"""Mirrors a file to a different location. Each time the file changes
while the process is running it will be copied to 'path_to',
overwriting the destination.
Parameters
----------
path_to : string
The mirror destination.
path_from : string
The mirror origin.
from_quick_server : bool
If set the origin path is relative to *this* script otherwise it is
relative to the process.
"""
full_path = path_from if not from_quick_server else os.path.join(os.path.dirname(__file__), path_from)
if self._mirror is None:
if not self._symlink_mirror(path_to, full_path, init=True):
self._poll_mirror(path_to, full_path, init=True) # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
impl = self._mirror['impl']
if impl == 'symlink':
self._symlink_mirror(path_to, full_path, init=False) # depends on [control=['if'], data=[]]
elif impl == 'poll':
self._poll_mirror(path_to, full_path, init=False) # depends on [control=['if'], data=[]]
else:
raise ValueError('unknown mirror implementation: {0}'.format(impl)) |
def cvt_iter(a):
'''
Convert an iterator/generator to a tuple so that it can be iterated again.
E.g., convert zip in PY3.
'''
if a is None:
return a
if not isinstance(a, (tuple, list)):
# convert iterator/generator to tuple
a = tuple(a)
return a | def function[cvt_iter, parameter[a]]:
constant[
Convert an iterator/generator to a tuple so that it can be iterated again.
E.g., convert zip in PY3.
]
if compare[name[a] is constant[None]] begin[:]
return[name[a]]
if <ast.UnaryOp object at 0x7da18fe90fd0> begin[:]
variable[a] assign[=] call[name[tuple], parameter[name[a]]]
return[name[a]] | keyword[def] identifier[cvt_iter] ( identifier[a] ):
literal[string]
keyword[if] identifier[a] keyword[is] keyword[None] :
keyword[return] identifier[a]
keyword[if] keyword[not] identifier[isinstance] ( identifier[a] ,( identifier[tuple] , identifier[list] )):
identifier[a] = identifier[tuple] ( identifier[a] )
keyword[return] identifier[a] | def cvt_iter(a):
"""
Convert an iterator/generator to a tuple so that it can be iterated again.
E.g., convert zip in PY3.
"""
if a is None:
return a # depends on [control=['if'], data=['a']]
if not isinstance(a, (tuple, list)):
# convert iterator/generator to tuple
a = tuple(a) # depends on [control=['if'], data=[]]
return a |
def apply(model_path):
"""Run inference from a training model checkpoint. """
pred_config = PredictConfig(
session_init=get_model_loader(model_path),
model=Model(),
input_names=['input_img'],
output_names=['prediction_img'])
pred = OfflinePredictor(pred_config)
img = cv2.imread('lena.png')
prediction = pred([img])[0]
cv2.imwrite('applied_default.jpg', prediction[0]) | def function[apply, parameter[model_path]]:
constant[Run inference from a training model checkpoint. ]
variable[pred_config] assign[=] call[name[PredictConfig], parameter[]]
variable[pred] assign[=] call[name[OfflinePredictor], parameter[name[pred_config]]]
variable[img] assign[=] call[name[cv2].imread, parameter[constant[lena.png]]]
variable[prediction] assign[=] call[call[name[pred], parameter[list[[<ast.Name object at 0x7da18f09d9c0>]]]]][constant[0]]
call[name[cv2].imwrite, parameter[constant[applied_default.jpg], call[name[prediction]][constant[0]]]] | keyword[def] identifier[apply] ( identifier[model_path] ):
literal[string]
identifier[pred_config] = identifier[PredictConfig] (
identifier[session_init] = identifier[get_model_loader] ( identifier[model_path] ),
identifier[model] = identifier[Model] (),
identifier[input_names] =[ literal[string] ],
identifier[output_names] =[ literal[string] ])
identifier[pred] = identifier[OfflinePredictor] ( identifier[pred_config] )
identifier[img] = identifier[cv2] . identifier[imread] ( literal[string] )
identifier[prediction] = identifier[pred] ([ identifier[img] ])[ literal[int] ]
identifier[cv2] . identifier[imwrite] ( literal[string] , identifier[prediction] [ literal[int] ]) | def apply(model_path):
"""Run inference from a training model checkpoint. """
pred_config = PredictConfig(session_init=get_model_loader(model_path), model=Model(), input_names=['input_img'], output_names=['prediction_img'])
pred = OfflinePredictor(pred_config)
img = cv2.imread('lena.png')
prediction = pred([img])[0]
cv2.imwrite('applied_default.jpg', prediction[0]) |
def get_numId(li, w_namespace):
"""
The numId on an li tag maps to the numbering dictionary along side the ilvl
to determine what the list should look like (unordered, digits, lower
alpha, etc)
"""
numIds = li.xpath('.//w:numId', namespaces=li.nsmap)
if len(numIds) == 0:
return -1
return numIds[0].get('%sval' % w_namespace) | def function[get_numId, parameter[li, w_namespace]]:
constant[
The numId on an li tag maps to the numbering dictionary along side the ilvl
to determine what the list should look like (unordered, digits, lower
alpha, etc)
]
variable[numIds] assign[=] call[name[li].xpath, parameter[constant[.//w:numId]]]
if compare[call[name[len], parameter[name[numIds]]] equal[==] constant[0]] begin[:]
return[<ast.UnaryOp object at 0x7da1b0286c50>]
return[call[call[name[numIds]][constant[0]].get, parameter[binary_operation[constant[%sval] <ast.Mod object at 0x7da2590d6920> name[w_namespace]]]]] | keyword[def] identifier[get_numId] ( identifier[li] , identifier[w_namespace] ):
literal[string]
identifier[numIds] = identifier[li] . identifier[xpath] ( literal[string] , identifier[namespaces] = identifier[li] . identifier[nsmap] )
keyword[if] identifier[len] ( identifier[numIds] )== literal[int] :
keyword[return] - literal[int]
keyword[return] identifier[numIds] [ literal[int] ]. identifier[get] ( literal[string] % identifier[w_namespace] ) | def get_numId(li, w_namespace):
"""
The numId on an li tag maps to the numbering dictionary along side the ilvl
to determine what the list should look like (unordered, digits, lower
alpha, etc)
"""
numIds = li.xpath('.//w:numId', namespaces=li.nsmap)
if len(numIds) == 0:
return -1 # depends on [control=['if'], data=[]]
return numIds[0].get('%sval' % w_namespace) |
def get_text(nodelist):
"""Return a concatenation of text fields from list of nodes
"""
s = ''
for node in nodelist:
if node.nodeType == Node.TEXT_NODE:
s += node.nodeValue + ', '
if len(s)>0: s = s[:-2]
return s | def function[get_text, parameter[nodelist]]:
constant[Return a concatenation of text fields from list of nodes
]
variable[s] assign[=] constant[]
for taget[name[node]] in starred[name[nodelist]] begin[:]
if compare[name[node].nodeType equal[==] name[Node].TEXT_NODE] begin[:]
<ast.AugAssign object at 0x7da20e9570d0>
if compare[call[name[len], parameter[name[s]]] greater[>] constant[0]] begin[:]
variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da20e954940>]
return[name[s]] | keyword[def] identifier[get_text] ( identifier[nodelist] ):
literal[string]
identifier[s] = literal[string]
keyword[for] identifier[node] keyword[in] identifier[nodelist] :
keyword[if] identifier[node] . identifier[nodeType] == identifier[Node] . identifier[TEXT_NODE] :
identifier[s] += identifier[node] . identifier[nodeValue] + literal[string]
keyword[if] identifier[len] ( identifier[s] )> literal[int] : identifier[s] = identifier[s] [:- literal[int] ]
keyword[return] identifier[s] | def get_text(nodelist):
"""Return a concatenation of text fields from list of nodes
"""
s = ''
for node in nodelist:
if node.nodeType == Node.TEXT_NODE:
s += node.nodeValue + ', ' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
if len(s) > 0:
s = s[:-2] # depends on [control=['if'], data=[]]
return s |
def ok(prompt='OK ', loc={}, glo={}, cmd=""):
'''
Invoke the peforth interpreter.
An statement: peforth.ok(prompt='OK ', loc=locals(), glo=globals(), cmd="")
is like a breakpoint. The prompt indicates which breakpoint it is if there are
many. Arguments loc (locals) and glo (globals) along with the prompt are the
debuggee's informations that is packed as a tuple (loc,glo,prompt) left on TOS
of the FORTH vm when the breakpoint is called. Replace the loc=locals() with
loc=dict(locals()) to get a snapshot copy instead of a reference, as well as
the glo. 'exit' command to stop debugging.
'''
if loc or glo: vm.push((loc,glo,prompt)) # parent's data
while True:
if cmd == "": #
if vm.tick('accept') and not vm.multiple: # Input can be single line (default) or
vm.execute('accept') # multiple lines. Press Ctrl-D to toggle
cmd = vm.pop().strip() # between the two modes. Place a Ctrl-D
elif vm.tick('<accept>') and vm.multiple: # before the last <Enter> key to end the
vm.execute('<accept>') # input when in multiple-line mode.
cmd = vm.pop().strip() #
else: #
cmd = input("").strip() #
# pass the command line to forth VM
if cmd == "":
print(prompt, end="")
continue
elif cmd == chr(4):
vm.multiple = not vm.multiple
if not vm.multiple: print(prompt, end="")
else:
vm.dictate(cmd)
if vm.multiple: vm.multiple = False # switch back to the normal mode
print(prompt, end="")
cmd = ""
# Master switch vm.exit is a flag of boolean. When it's True
# then exit to the caller that usually is python interpreter.
if vm.exit:
vm.exit = False # Avoid exit immediately when called again
break
return(vm) # support function cascade | def function[ok, parameter[prompt, loc, glo, cmd]]:
constant[
Invoke the peforth interpreter.
An statement: peforth.ok(prompt='OK ', loc=locals(), glo=globals(), cmd="")
is like a breakpoint. The prompt indicates which breakpoint it is if there are
many. Arguments loc (locals) and glo (globals) along with the prompt are the
debuggee's informations that is packed as a tuple (loc,glo,prompt) left on TOS
of the FORTH vm when the breakpoint is called. Replace the loc=locals() with
loc=dict(locals()) to get a snapshot copy instead of a reference, as well as
the glo. 'exit' command to stop debugging.
]
if <ast.BoolOp object at 0x7da1b261f010> begin[:]
call[name[vm].push, parameter[tuple[[<ast.Name object at 0x7da1b261f5e0>, <ast.Name object at 0x7da1b261fdf0>, <ast.Name object at 0x7da1b261de70>]]]]
while constant[True] begin[:]
if compare[name[cmd] equal[==] constant[]] begin[:]
if <ast.BoolOp object at 0x7da1b261f220> begin[:]
call[name[vm].execute, parameter[constant[accept]]]
variable[cmd] assign[=] call[call[name[vm].pop, parameter[]].strip, parameter[]]
if compare[name[cmd] equal[==] constant[]] begin[:]
call[name[print], parameter[name[prompt]]]
continue
variable[cmd] assign[=] constant[]
if name[vm].exit begin[:]
name[vm].exit assign[=] constant[False]
break
return[name[vm]] | keyword[def] identifier[ok] ( identifier[prompt] = literal[string] , identifier[loc] ={}, identifier[glo] ={}, identifier[cmd] = literal[string] ):
literal[string]
keyword[if] identifier[loc] keyword[or] identifier[glo] : identifier[vm] . identifier[push] (( identifier[loc] , identifier[glo] , identifier[prompt] ))
keyword[while] keyword[True] :
keyword[if] identifier[cmd] == literal[string] :
keyword[if] identifier[vm] . identifier[tick] ( literal[string] ) keyword[and] keyword[not] identifier[vm] . identifier[multiple] :
identifier[vm] . identifier[execute] ( literal[string] )
identifier[cmd] = identifier[vm] . identifier[pop] (). identifier[strip] ()
keyword[elif] identifier[vm] . identifier[tick] ( literal[string] ) keyword[and] identifier[vm] . identifier[multiple] :
identifier[vm] . identifier[execute] ( literal[string] )
identifier[cmd] = identifier[vm] . identifier[pop] (). identifier[strip] ()
keyword[else] :
identifier[cmd] = identifier[input] ( literal[string] ). identifier[strip] ()
keyword[if] identifier[cmd] == literal[string] :
identifier[print] ( identifier[prompt] , identifier[end] = literal[string] )
keyword[continue]
keyword[elif] identifier[cmd] == identifier[chr] ( literal[int] ):
identifier[vm] . identifier[multiple] = keyword[not] identifier[vm] . identifier[multiple]
keyword[if] keyword[not] identifier[vm] . identifier[multiple] : identifier[print] ( identifier[prompt] , identifier[end] = literal[string] )
keyword[else] :
identifier[vm] . identifier[dictate] ( identifier[cmd] )
keyword[if] identifier[vm] . identifier[multiple] : identifier[vm] . identifier[multiple] = keyword[False]
identifier[print] ( identifier[prompt] , identifier[end] = literal[string] )
identifier[cmd] = literal[string]
keyword[if] identifier[vm] . identifier[exit] :
identifier[vm] . identifier[exit] = keyword[False]
keyword[break]
keyword[return] ( identifier[vm] ) | def ok(prompt='OK ', loc={}, glo={}, cmd=''):
"""
Invoke the peforth interpreter.
An statement: peforth.ok(prompt='OK ', loc=locals(), glo=globals(), cmd="")
is like a breakpoint. The prompt indicates which breakpoint it is if there are
many. Arguments loc (locals) and glo (globals) along with the prompt are the
debuggee's informations that is packed as a tuple (loc,glo,prompt) left on TOS
of the FORTH vm when the breakpoint is called. Replace the loc=locals() with
loc=dict(locals()) to get a snapshot copy instead of a reference, as well as
the glo. 'exit' command to stop debugging.
"""
if loc or glo:
vm.push((loc, glo, prompt)) # parent's data # depends on [control=['if'], data=[]]
while True:
if cmd == '': #
if vm.tick('accept') and (not vm.multiple): # Input can be single line (default) or
vm.execute('accept') # multiple lines. Press Ctrl-D to toggle
cmd = vm.pop().strip() # between the two modes. Place a Ctrl-D # depends on [control=['if'], data=[]]
elif vm.tick('<accept>') and vm.multiple: # before the last <Enter> key to end the
vm.execute('<accept>') # input when in multiple-line mode.
cmd = vm.pop().strip() # # depends on [control=['if'], data=[]]
else: #
cmd = input('').strip() # # depends on [control=['if'], data=['cmd']]
# pass the command line to forth VM
if cmd == '':
print(prompt, end='')
continue # depends on [control=['if'], data=[]]
elif cmd == chr(4):
vm.multiple = not vm.multiple
if not vm.multiple:
print(prompt, end='') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
vm.dictate(cmd)
if vm.multiple:
vm.multiple = False # switch back to the normal mode # depends on [control=['if'], data=[]]
print(prompt, end='')
cmd = ''
# Master switch vm.exit is a flag of boolean. When it's True
# then exit to the caller that usually is python interpreter.
if vm.exit:
vm.exit = False # Avoid exit immediately when called again
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return vm # support function cascade |
def mode(data):
"""
Return the modal value of a iterable with discrete values.
If there is more than 1 modal value, arbritrarily return the first top n.
"""
c = Counter(data)
mode, freq = c.most_common(1)[0]
return mode | def function[mode, parameter[data]]:
constant[
Return the modal value of a iterable with discrete values.
If there is more than 1 modal value, arbritrarily return the first top n.
]
variable[c] assign[=] call[name[Counter], parameter[name[data]]]
<ast.Tuple object at 0x7da1b2346d70> assign[=] call[call[name[c].most_common, parameter[constant[1]]]][constant[0]]
return[name[mode]] | keyword[def] identifier[mode] ( identifier[data] ):
literal[string]
identifier[c] = identifier[Counter] ( identifier[data] )
identifier[mode] , identifier[freq] = identifier[c] . identifier[most_common] ( literal[int] )[ literal[int] ]
keyword[return] identifier[mode] | def mode(data):
"""
Return the modal value of a iterable with discrete values.
If there is more than 1 modal value, arbritrarily return the first top n.
"""
c = Counter(data)
(mode, freq) = c.most_common(1)[0]
return mode |
def value_to_jam(value, methods=False):
"""Makes a token to refer to a Python value inside Jam language code.
The token is merely a string that can be passed around in Jam code and
eventually passed back. For example, we might want to pass PropertySet
instance to a tag function and it might eventually call back
to virtual_target.add_suffix_and_prefix, passing the same instance.
For values that are classes, we'll also make class methods callable
from Jam.
Note that this is necessary to make a bit more of existing Jamfiles work.
This trick should not be used to much, or else the performance benefits of
Python port will be eaten.
"""
global __value_id
r = __python_to_jam.get(value, None)
if r:
return r
exported_name = '###_' + str(__value_id)
__value_id = __value_id + 1
__python_to_jam[value] = exported_name
__jam_to_python[exported_name] = value
if methods and type(value) == types.InstanceType:
for field_name in dir(value):
field = getattr(value, field_name)
if callable(field) and not field_name.startswith("__"):
bjam.import_rule("", exported_name + "." + field_name, field)
return exported_name | def function[value_to_jam, parameter[value, methods]]:
constant[Makes a token to refer to a Python value inside Jam language code.
The token is merely a string that can be passed around in Jam code and
eventually passed back. For example, we might want to pass PropertySet
instance to a tag function and it might eventually call back
to virtual_target.add_suffix_and_prefix, passing the same instance.
For values that are classes, we'll also make class methods callable
from Jam.
Note that this is necessary to make a bit more of existing Jamfiles work.
This trick should not be used to much, or else the performance benefits of
Python port will be eaten.
]
<ast.Global object at 0x7da204347070>
variable[r] assign[=] call[name[__python_to_jam].get, parameter[name[value], constant[None]]]
if name[r] begin[:]
return[name[r]]
variable[exported_name] assign[=] binary_operation[constant[###_] + call[name[str], parameter[name[__value_id]]]]
variable[__value_id] assign[=] binary_operation[name[__value_id] + constant[1]]
call[name[__python_to_jam]][name[value]] assign[=] name[exported_name]
call[name[__jam_to_python]][name[exported_name]] assign[=] name[value]
if <ast.BoolOp object at 0x7da204961a50> begin[:]
for taget[name[field_name]] in starred[call[name[dir], parameter[name[value]]]] begin[:]
variable[field] assign[=] call[name[getattr], parameter[name[value], name[field_name]]]
if <ast.BoolOp object at 0x7da204962950> begin[:]
call[name[bjam].import_rule, parameter[constant[], binary_operation[binary_operation[name[exported_name] + constant[.]] + name[field_name]], name[field]]]
return[name[exported_name]] | keyword[def] identifier[value_to_jam] ( identifier[value] , identifier[methods] = keyword[False] ):
literal[string]
keyword[global] identifier[__value_id]
identifier[r] = identifier[__python_to_jam] . identifier[get] ( identifier[value] , keyword[None] )
keyword[if] identifier[r] :
keyword[return] identifier[r]
identifier[exported_name] = literal[string] + identifier[str] ( identifier[__value_id] )
identifier[__value_id] = identifier[__value_id] + literal[int]
identifier[__python_to_jam] [ identifier[value] ]= identifier[exported_name]
identifier[__jam_to_python] [ identifier[exported_name] ]= identifier[value]
keyword[if] identifier[methods] keyword[and] identifier[type] ( identifier[value] )== identifier[types] . identifier[InstanceType] :
keyword[for] identifier[field_name] keyword[in] identifier[dir] ( identifier[value] ):
identifier[field] = identifier[getattr] ( identifier[value] , identifier[field_name] )
keyword[if] identifier[callable] ( identifier[field] ) keyword[and] keyword[not] identifier[field_name] . identifier[startswith] ( literal[string] ):
identifier[bjam] . identifier[import_rule] ( literal[string] , identifier[exported_name] + literal[string] + identifier[field_name] , identifier[field] )
keyword[return] identifier[exported_name] | def value_to_jam(value, methods=False):
"""Makes a token to refer to a Python value inside Jam language code.
The token is merely a string that can be passed around in Jam code and
eventually passed back. For example, we might want to pass PropertySet
instance to a tag function and it might eventually call back
to virtual_target.add_suffix_and_prefix, passing the same instance.
For values that are classes, we'll also make class methods callable
from Jam.
Note that this is necessary to make a bit more of existing Jamfiles work.
This trick should not be used to much, or else the performance benefits of
Python port will be eaten.
"""
global __value_id
r = __python_to_jam.get(value, None)
if r:
return r # depends on [control=['if'], data=[]]
exported_name = '###_' + str(__value_id)
__value_id = __value_id + 1
__python_to_jam[value] = exported_name
__jam_to_python[exported_name] = value
if methods and type(value) == types.InstanceType:
for field_name in dir(value):
field = getattr(value, field_name)
if callable(field) and (not field_name.startswith('__')):
bjam.import_rule('', exported_name + '.' + field_name, field) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field_name']] # depends on [control=['if'], data=[]]
return exported_name |
def pretty_string(s, embedded, current_line, uni_lit=False,
min_trip_str=20, max_line=100):
"""There are a lot of reasons why we might not want to or
be able to return a triple-quoted string. We can always
punt back to the default normal string.
"""
default = repr(s)
# Punt on abnormal strings
if (isinstance(s, special_unicode) or not isinstance(s, basestring)):
return default
if uni_lit and isinstance(s, bytes):
return 'b' + default
len_s = len(default)
if current_line.strip():
len_current = len(current_line)
second_line_start = s.find('\n') + 1
if embedded > 1 and not second_line_start:
return default
if len_s < min_trip_str:
return default
line_indent = len_current - len(current_line.lstrip())
# Could be on a line by itself...
if embedded and not second_line_start:
return default
total_len = len_current + len_s
if total_len < max_line and not _properly_indented(s, line_indent):
return default
fancy = string_triplequote_repr(s)
# Sometimes this doesn't work. One reason is that
# the AST has no understanding of whether \r\n was
# entered that way in the string or was a cr/lf in the
# file. So we punt just so we can round-trip properly.
try:
if eval(fancy) == s and '\r' not in fancy:
return fancy
except:
pass
return default | def function[pretty_string, parameter[s, embedded, current_line, uni_lit, min_trip_str, max_line]]:
constant[There are a lot of reasons why we might not want to or
be able to return a triple-quoted string. We can always
punt back to the default normal string.
]
variable[default] assign[=] call[name[repr], parameter[name[s]]]
if <ast.BoolOp object at 0x7da1b1d445b0> begin[:]
return[name[default]]
if <ast.BoolOp object at 0x7da1b1d47760> begin[:]
return[binary_operation[constant[b] + name[default]]]
variable[len_s] assign[=] call[name[len], parameter[name[default]]]
if call[name[current_line].strip, parameter[]] begin[:]
variable[len_current] assign[=] call[name[len], parameter[name[current_line]]]
variable[second_line_start] assign[=] binary_operation[call[name[s].find, parameter[constant[
]]] + constant[1]]
if <ast.BoolOp object at 0x7da1b1d47fd0> begin[:]
return[name[default]]
if compare[name[len_s] less[<] name[min_trip_str]] begin[:]
return[name[default]]
variable[line_indent] assign[=] binary_operation[name[len_current] - call[name[len], parameter[call[name[current_line].lstrip, parameter[]]]]]
if <ast.BoolOp object at 0x7da1b1d47280> begin[:]
return[name[default]]
variable[total_len] assign[=] binary_operation[name[len_current] + name[len_s]]
if <ast.BoolOp object at 0x7da1b1e0a080> begin[:]
return[name[default]]
variable[fancy] assign[=] call[name[string_triplequote_repr], parameter[name[s]]]
<ast.Try object at 0x7da1b1eea500>
return[name[default]] | keyword[def] identifier[pretty_string] ( identifier[s] , identifier[embedded] , identifier[current_line] , identifier[uni_lit] = keyword[False] ,
identifier[min_trip_str] = literal[int] , identifier[max_line] = literal[int] ):
literal[string]
identifier[default] = identifier[repr] ( identifier[s] )
keyword[if] ( identifier[isinstance] ( identifier[s] , identifier[special_unicode] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[s] , identifier[basestring] )):
keyword[return] identifier[default]
keyword[if] identifier[uni_lit] keyword[and] identifier[isinstance] ( identifier[s] , identifier[bytes] ):
keyword[return] literal[string] + identifier[default]
identifier[len_s] = identifier[len] ( identifier[default] )
keyword[if] identifier[current_line] . identifier[strip] ():
identifier[len_current] = identifier[len] ( identifier[current_line] )
identifier[second_line_start] = identifier[s] . identifier[find] ( literal[string] )+ literal[int]
keyword[if] identifier[embedded] > literal[int] keyword[and] keyword[not] identifier[second_line_start] :
keyword[return] identifier[default]
keyword[if] identifier[len_s] < identifier[min_trip_str] :
keyword[return] identifier[default]
identifier[line_indent] = identifier[len_current] - identifier[len] ( identifier[current_line] . identifier[lstrip] ())
keyword[if] identifier[embedded] keyword[and] keyword[not] identifier[second_line_start] :
keyword[return] identifier[default]
identifier[total_len] = identifier[len_current] + identifier[len_s]
keyword[if] identifier[total_len] < identifier[max_line] keyword[and] keyword[not] identifier[_properly_indented] ( identifier[s] , identifier[line_indent] ):
keyword[return] identifier[default]
identifier[fancy] = identifier[string_triplequote_repr] ( identifier[s] )
keyword[try] :
keyword[if] identifier[eval] ( identifier[fancy] )== identifier[s] keyword[and] literal[string] keyword[not] keyword[in] identifier[fancy] :
keyword[return] identifier[fancy]
keyword[except] :
keyword[pass]
keyword[return] identifier[default] | def pretty_string(s, embedded, current_line, uni_lit=False, min_trip_str=20, max_line=100):
"""There are a lot of reasons why we might not want to or
be able to return a triple-quoted string. We can always
punt back to the default normal string.
"""
default = repr(s)
# Punt on abnormal strings
if isinstance(s, special_unicode) or not isinstance(s, basestring):
return default # depends on [control=['if'], data=[]]
if uni_lit and isinstance(s, bytes):
return 'b' + default # depends on [control=['if'], data=[]]
len_s = len(default)
if current_line.strip():
len_current = len(current_line)
second_line_start = s.find('\n') + 1
if embedded > 1 and (not second_line_start):
return default # depends on [control=['if'], data=[]]
if len_s < min_trip_str:
return default # depends on [control=['if'], data=[]]
line_indent = len_current - len(current_line.lstrip())
# Could be on a line by itself...
if embedded and (not second_line_start):
return default # depends on [control=['if'], data=[]]
total_len = len_current + len_s
if total_len < max_line and (not _properly_indented(s, line_indent)):
return default # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
fancy = string_triplequote_repr(s)
# Sometimes this doesn't work. One reason is that
# the AST has no understanding of whether \r\n was
# entered that way in the string or was a cr/lf in the
# file. So we punt just so we can round-trip properly.
try:
if eval(fancy) == s and '\r' not in fancy:
return fancy # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
return default |
def zip_sequences(G, allseqs, color="white"):
"""
Fuse certain nodes together, if they contain same data except for the
sequence name.
"""
for s in zip(*allseqs):
groups = defaultdict(list)
for x in s:
part = x.split('_', 1)[1]
groups[part].append(x)
for part, g in groups.items():
with G.subgraph(name="cluster_" + part) as c:
for x in g:
c.node(x)
c.attr(style="invis") | def function[zip_sequences, parameter[G, allseqs, color]]:
constant[
Fuse certain nodes together, if they contain same data except for the
sequence name.
]
for taget[name[s]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da20c9920e0>]]] begin[:]
variable[groups] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[x]] in starred[name[s]] begin[:]
variable[part] assign[=] call[call[name[x].split, parameter[constant[_], constant[1]]]][constant[1]]
call[call[name[groups]][name[part]].append, parameter[name[x]]]
for taget[tuple[[<ast.Name object at 0x7da20c991120>, <ast.Name object at 0x7da20c990b20>]]] in starred[call[name[groups].items, parameter[]]] begin[:]
with call[name[G].subgraph, parameter[]] begin[:]
for taget[name[x]] in starred[name[g]] begin[:]
call[name[c].node, parameter[name[x]]]
call[name[c].attr, parameter[]] | keyword[def] identifier[zip_sequences] ( identifier[G] , identifier[allseqs] , identifier[color] = literal[string] ):
literal[string]
keyword[for] identifier[s] keyword[in] identifier[zip] (* identifier[allseqs] ):
identifier[groups] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[x] keyword[in] identifier[s] :
identifier[part] = identifier[x] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
identifier[groups] [ identifier[part] ]. identifier[append] ( identifier[x] )
keyword[for] identifier[part] , identifier[g] keyword[in] identifier[groups] . identifier[items] ():
keyword[with] identifier[G] . identifier[subgraph] ( identifier[name] = literal[string] + identifier[part] ) keyword[as] identifier[c] :
keyword[for] identifier[x] keyword[in] identifier[g] :
identifier[c] . identifier[node] ( identifier[x] )
identifier[c] . identifier[attr] ( identifier[style] = literal[string] ) | def zip_sequences(G, allseqs, color='white'):
"""
Fuse certain nodes together, if they contain same data except for the
sequence name.
"""
for s in zip(*allseqs):
groups = defaultdict(list)
for x in s:
part = x.split('_', 1)[1]
groups[part].append(x) # depends on [control=['for'], data=['x']]
for (part, g) in groups.items():
with G.subgraph(name='cluster_' + part) as c:
for x in g:
c.node(x) # depends on [control=['for'], data=['x']]
c.attr(style='invis') # depends on [control=['with'], data=['c']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['s']] |
def tarbell_spreadsheet(command, args):
"""
Open context spreadsheet
"""
with ensure_settings(command, args) as settings, ensure_project(command, args) as site:
try:
# First, try to get the Google Spreadsheet URL
spreadsheet_url = _google_spreadsheet_url(site.project.SPREADSHEET_KEY)
except AttributeError:
# The project doesn't seem to be using a Google Spreadsheet.
# Try the URL or path specified in the CONTEXT_SOURCE_FILE setting
try:
spreadsheet_url = _context_source_file_url(
site.project.CONTEXT_SOURCE_FILE)
print(spreadsheet_url)
except AttributeError:
puts(colored.red("No Google spreadsheet or context source file "
"has been configured.\n"))
return
# Use the webbrowser package to try to open the file whether it's a
# remote URL on the web, or a local file. On some platforms it will
# successfully open local files in the default application.
# This seems preferable to trying to do os detection and calling
# the system-specific command for opening files in default
# applications.
# See
# http://stackoverflow.com/questions/434597/open-document-with-default-application-in-python
webbrowser.open(spreadsheet_url) | def function[tarbell_spreadsheet, parameter[command, args]]:
constant[
Open context spreadsheet
]
with call[name[ensure_settings], parameter[name[command], name[args]]] begin[:]
<ast.Try object at 0x7da1b1968f40>
call[name[webbrowser].open, parameter[name[spreadsheet_url]]] | keyword[def] identifier[tarbell_spreadsheet] ( identifier[command] , identifier[args] ):
literal[string]
keyword[with] identifier[ensure_settings] ( identifier[command] , identifier[args] ) keyword[as] identifier[settings] , identifier[ensure_project] ( identifier[command] , identifier[args] ) keyword[as] identifier[site] :
keyword[try] :
identifier[spreadsheet_url] = identifier[_google_spreadsheet_url] ( identifier[site] . identifier[project] . identifier[SPREADSHEET_KEY] )
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[spreadsheet_url] = identifier[_context_source_file_url] (
identifier[site] . identifier[project] . identifier[CONTEXT_SOURCE_FILE] )
identifier[print] ( identifier[spreadsheet_url] )
keyword[except] identifier[AttributeError] :
identifier[puts] ( identifier[colored] . identifier[red] ( literal[string]
literal[string] ))
keyword[return]
identifier[webbrowser] . identifier[open] ( identifier[spreadsheet_url] ) | def tarbell_spreadsheet(command, args):
"""
Open context spreadsheet
"""
with ensure_settings(command, args) as settings, ensure_project(command, args) as site:
try:
# First, try to get the Google Spreadsheet URL
spreadsheet_url = _google_spreadsheet_url(site.project.SPREADSHEET_KEY) # depends on [control=['try'], data=[]]
except AttributeError:
# The project doesn't seem to be using a Google Spreadsheet.
# Try the URL or path specified in the CONTEXT_SOURCE_FILE setting
try:
spreadsheet_url = _context_source_file_url(site.project.CONTEXT_SOURCE_FILE)
print(spreadsheet_url) # depends on [control=['try'], data=[]]
except AttributeError:
puts(colored.red('No Google spreadsheet or context source file has been configured.\n'))
return # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
# Use the webbrowser package to try to open the file whether it's a
# remote URL on the web, or a local file. On some platforms it will
# successfully open local files in the default application.
# This seems preferable to trying to do os detection and calling
# the system-specific command for opening files in default
# applications.
# See
# http://stackoverflow.com/questions/434597/open-document-with-default-application-in-python
webbrowser.open(spreadsheet_url) # depends on [control=['with'], data=[]] |
def update_elbs(self):
"""Update list of ELBs for the account / region
Returns:
`None`
"""
self.log.debug('Updating ELBs for {}/{}'.format(
self.account.account_name,
self.region
))
# ELBs known to CINQ
elbs_from_db = ELB.get_all(self.account, self.region)
try:
# ELBs known to AWS
elb_client = self.session.client('elb', region_name=self.region)
load_balancer_instances = elb_client.describe_load_balancers()['LoadBalancerDescriptions']
elbs_from_api = {}
for load_balancer in load_balancer_instances:
key = '{}::{}'.format(self.region, load_balancer['LoadBalancerName'])
elbs_from_api[key] = load_balancer
# Process ELBs known to AWS
for elb_identifier in elbs_from_api:
data = elbs_from_api[elb_identifier]
# ELB already in DB?
if elb_identifier in elbs_from_db:
elb = elbs_from_db[elb_identifier]
if elb.update(data):
self.log.info(
'Updating info for ELB {} in {}/{}'.format(
elb.resource.resource_id,
self.account.account_name,
self.region
)
)
db.session.add(elb.resource)
else:
# Not previously seen this ELB, so add it
if 'Tags' in data:
try:
tags = {tag['Key']: tag['Value'] for tag in data['Tags']}
except AttributeError:
tags = {}
else:
tags = {}
vpc_data = (data['VPCId'] if ('VPCId' in data and data['VPCId']) else 'no vpc')
properties = {
'lb_name': data['LoadBalancerName'],
'dns_name': data['DNSName'],
'instances': ' '.join(
[instance['InstanceId'] for instance in data['Instances']]
),
'num_instances': len(
[instance['InstanceId'] for instance in data['Instances']]
),
'vpc_id': vpc_data,
'state': 'not_reported'
}
if 'CanonicalHostedZoneName' in data:
properties['canonical_hosted_zone_name'] = data['CanonicalHostedZoneName']
else:
properties['canonical_hosted_zone_name'] = None
# LoadBalancerName doesn't have to be unique across all regions
# Use region::LoadBalancerName as resource_id
resource_id = '{}::{}'.format(self.region, data['LoadBalancerName'])
# All done, create
elb = ELB.create(
resource_id,
account_id=self.account.account_id,
location=self.region,
properties=properties,
tags=tags
)
# elbs[elb.resource.resource_id] = elb
self.log.info(
'Added new ELB {}/{}/{}'.format(
self.account.account_name,
self.region,
elb.resource.resource_id
)
)
# Delete no longer existing ELBs
elb_keys_from_db = set(list(elbs_from_db.keys()))
self.log.debug('elb_keys_from_db = %s', elb_keys_from_db)
elb_keys_from_api = set(list(elbs_from_api.keys()))
self.log.debug('elb_keys_from_api = %s', elb_keys_from_api)
for elb_identifier in elb_keys_from_db - elb_keys_from_api:
db.session.delete(elbs_from_db[elb_identifier].resource)
self.log.info('Deleted ELB {}/{}/{}'.format(
self.account.account_name,
self.region,
elb_identifier
)
)
db.session.commit()
except:
self.log.exception('There was a problem during ELB collection for {}/{}'.format(
self.account.account_name,
self.region
))
db.session.rollback() | def function[update_elbs, parameter[self]]:
constant[Update list of ELBs for the account / region
Returns:
`None`
]
call[name[self].log.debug, parameter[call[constant[Updating ELBs for {}/{}].format, parameter[name[self].account.account_name, name[self].region]]]]
variable[elbs_from_db] assign[=] call[name[ELB].get_all, parameter[name[self].account, name[self].region]]
<ast.Try object at 0x7da1b204a260> | keyword[def] identifier[update_elbs] ( identifier[self] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] (
identifier[self] . identifier[account] . identifier[account_name] ,
identifier[self] . identifier[region]
))
identifier[elbs_from_db] = identifier[ELB] . identifier[get_all] ( identifier[self] . identifier[account] , identifier[self] . identifier[region] )
keyword[try] :
identifier[elb_client] = identifier[self] . identifier[session] . identifier[client] ( literal[string] , identifier[region_name] = identifier[self] . identifier[region] )
identifier[load_balancer_instances] = identifier[elb_client] . identifier[describe_load_balancers] ()[ literal[string] ]
identifier[elbs_from_api] ={}
keyword[for] identifier[load_balancer] keyword[in] identifier[load_balancer_instances] :
identifier[key] = literal[string] . identifier[format] ( identifier[self] . identifier[region] , identifier[load_balancer] [ literal[string] ])
identifier[elbs_from_api] [ identifier[key] ]= identifier[load_balancer]
keyword[for] identifier[elb_identifier] keyword[in] identifier[elbs_from_api] :
identifier[data] = identifier[elbs_from_api] [ identifier[elb_identifier] ]
keyword[if] identifier[elb_identifier] keyword[in] identifier[elbs_from_db] :
identifier[elb] = identifier[elbs_from_db] [ identifier[elb_identifier] ]
keyword[if] identifier[elb] . identifier[update] ( identifier[data] ):
identifier[self] . identifier[log] . identifier[info] (
literal[string] . identifier[format] (
identifier[elb] . identifier[resource] . identifier[resource_id] ,
identifier[self] . identifier[account] . identifier[account_name] ,
identifier[self] . identifier[region]
)
)
identifier[db] . identifier[session] . identifier[add] ( identifier[elb] . identifier[resource] )
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[data] :
keyword[try] :
identifier[tags] ={ identifier[tag] [ literal[string] ]: identifier[tag] [ literal[string] ] keyword[for] identifier[tag] keyword[in] identifier[data] [ literal[string] ]}
keyword[except] identifier[AttributeError] :
identifier[tags] ={}
keyword[else] :
identifier[tags] ={}
identifier[vpc_data] =( identifier[data] [ literal[string] ] keyword[if] ( literal[string] keyword[in] identifier[data] keyword[and] identifier[data] [ literal[string] ]) keyword[else] literal[string] )
identifier[properties] ={
literal[string] : identifier[data] [ literal[string] ],
literal[string] : identifier[data] [ literal[string] ],
literal[string] : literal[string] . identifier[join] (
[ identifier[instance] [ literal[string] ] keyword[for] identifier[instance] keyword[in] identifier[data] [ literal[string] ]]
),
literal[string] : identifier[len] (
[ identifier[instance] [ literal[string] ] keyword[for] identifier[instance] keyword[in] identifier[data] [ literal[string] ]]
),
literal[string] : identifier[vpc_data] ,
literal[string] : literal[string]
}
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[properties] [ literal[string] ]= identifier[data] [ literal[string] ]
keyword[else] :
identifier[properties] [ literal[string] ]= keyword[None]
identifier[resource_id] = literal[string] . identifier[format] ( identifier[self] . identifier[region] , identifier[data] [ literal[string] ])
identifier[elb] = identifier[ELB] . identifier[create] (
identifier[resource_id] ,
identifier[account_id] = identifier[self] . identifier[account] . identifier[account_id] ,
identifier[location] = identifier[self] . identifier[region] ,
identifier[properties] = identifier[properties] ,
identifier[tags] = identifier[tags]
)
identifier[self] . identifier[log] . identifier[info] (
literal[string] . identifier[format] (
identifier[self] . identifier[account] . identifier[account_name] ,
identifier[self] . identifier[region] ,
identifier[elb] . identifier[resource] . identifier[resource_id]
)
)
identifier[elb_keys_from_db] = identifier[set] ( identifier[list] ( identifier[elbs_from_db] . identifier[keys] ()))
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[elb_keys_from_db] )
identifier[elb_keys_from_api] = identifier[set] ( identifier[list] ( identifier[elbs_from_api] . identifier[keys] ()))
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[elb_keys_from_api] )
keyword[for] identifier[elb_identifier] keyword[in] identifier[elb_keys_from_db] - identifier[elb_keys_from_api] :
identifier[db] . identifier[session] . identifier[delete] ( identifier[elbs_from_db] [ identifier[elb_identifier] ]. identifier[resource] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] (
identifier[self] . identifier[account] . identifier[account_name] ,
identifier[self] . identifier[region] ,
identifier[elb_identifier]
)
)
identifier[db] . identifier[session] . identifier[commit] ()
keyword[except] :
identifier[self] . identifier[log] . identifier[exception] ( literal[string] . identifier[format] (
identifier[self] . identifier[account] . identifier[account_name] ,
identifier[self] . identifier[region]
))
identifier[db] . identifier[session] . identifier[rollback] () | def update_elbs(self):
"""Update list of ELBs for the account / region
Returns:
`None`
"""
self.log.debug('Updating ELBs for {}/{}'.format(self.account.account_name, self.region))
# ELBs known to CINQ
elbs_from_db = ELB.get_all(self.account, self.region)
try:
# ELBs known to AWS
elb_client = self.session.client('elb', region_name=self.region)
load_balancer_instances = elb_client.describe_load_balancers()['LoadBalancerDescriptions']
elbs_from_api = {}
for load_balancer in load_balancer_instances:
key = '{}::{}'.format(self.region, load_balancer['LoadBalancerName'])
elbs_from_api[key] = load_balancer # depends on [control=['for'], data=['load_balancer']]
# Process ELBs known to AWS
for elb_identifier in elbs_from_api:
data = elbs_from_api[elb_identifier]
# ELB already in DB?
if elb_identifier in elbs_from_db:
elb = elbs_from_db[elb_identifier]
if elb.update(data):
self.log.info('Updating info for ELB {} in {}/{}'.format(elb.resource.resource_id, self.account.account_name, self.region))
db.session.add(elb.resource) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['elb_identifier', 'elbs_from_db']]
else:
# Not previously seen this ELB, so add it
if 'Tags' in data:
try:
tags = {tag['Key']: tag['Value'] for tag in data['Tags']} # depends on [control=['try'], data=[]]
except AttributeError:
tags = {} # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['data']]
else:
tags = {}
vpc_data = data['VPCId'] if 'VPCId' in data and data['VPCId'] else 'no vpc'
properties = {'lb_name': data['LoadBalancerName'], 'dns_name': data['DNSName'], 'instances': ' '.join([instance['InstanceId'] for instance in data['Instances']]), 'num_instances': len([instance['InstanceId'] for instance in data['Instances']]), 'vpc_id': vpc_data, 'state': 'not_reported'}
if 'CanonicalHostedZoneName' in data:
properties['canonical_hosted_zone_name'] = data['CanonicalHostedZoneName'] # depends on [control=['if'], data=['data']]
else:
properties['canonical_hosted_zone_name'] = None
# LoadBalancerName doesn't have to be unique across all regions
# Use region::LoadBalancerName as resource_id
resource_id = '{}::{}'.format(self.region, data['LoadBalancerName'])
# All done, create
elb = ELB.create(resource_id, account_id=self.account.account_id, location=self.region, properties=properties, tags=tags)
# elbs[elb.resource.resource_id] = elb
self.log.info('Added new ELB {}/{}/{}'.format(self.account.account_name, self.region, elb.resource.resource_id)) # depends on [control=['for'], data=['elb_identifier']]
# Delete no longer existing ELBs
elb_keys_from_db = set(list(elbs_from_db.keys()))
self.log.debug('elb_keys_from_db = %s', elb_keys_from_db)
elb_keys_from_api = set(list(elbs_from_api.keys()))
self.log.debug('elb_keys_from_api = %s', elb_keys_from_api)
for elb_identifier in elb_keys_from_db - elb_keys_from_api:
db.session.delete(elbs_from_db[elb_identifier].resource)
self.log.info('Deleted ELB {}/{}/{}'.format(self.account.account_name, self.region, elb_identifier)) # depends on [control=['for'], data=['elb_identifier']]
db.session.commit() # depends on [control=['try'], data=[]]
except:
self.log.exception('There was a problem during ELB collection for {}/{}'.format(self.account.account_name, self.region))
db.session.rollback() # depends on [control=['except'], data=[]] |
def CreateStorageWriterForFile(cls, session, path):
"""Creates a storage writer based on the file.
Args:
session (Session): session the storage changes are part of.
path (str): path to the storage file.
Returns:
StorageWriter: a storage writer or None if the storage file cannot be
opened or the storage format is not supported.
"""
if sqlite_file.SQLiteStorageFile.CheckSupportedFormat(path):
return sqlite_writer.SQLiteStorageFileWriter(session, path)
return None | def function[CreateStorageWriterForFile, parameter[cls, session, path]]:
constant[Creates a storage writer based on the file.
Args:
session (Session): session the storage changes are part of.
path (str): path to the storage file.
Returns:
StorageWriter: a storage writer or None if the storage file cannot be
opened or the storage format is not supported.
]
if call[name[sqlite_file].SQLiteStorageFile.CheckSupportedFormat, parameter[name[path]]] begin[:]
return[call[name[sqlite_writer].SQLiteStorageFileWriter, parameter[name[session], name[path]]]]
return[constant[None]] | keyword[def] identifier[CreateStorageWriterForFile] ( identifier[cls] , identifier[session] , identifier[path] ):
literal[string]
keyword[if] identifier[sqlite_file] . identifier[SQLiteStorageFile] . identifier[CheckSupportedFormat] ( identifier[path] ):
keyword[return] identifier[sqlite_writer] . identifier[SQLiteStorageFileWriter] ( identifier[session] , identifier[path] )
keyword[return] keyword[None] | def CreateStorageWriterForFile(cls, session, path):
"""Creates a storage writer based on the file.
Args:
session (Session): session the storage changes are part of.
path (str): path to the storage file.
Returns:
StorageWriter: a storage writer or None if the storage file cannot be
opened or the storage format is not supported.
"""
if sqlite_file.SQLiteStorageFile.CheckSupportedFormat(path):
return sqlite_writer.SQLiteStorageFileWriter(session, path) # depends on [control=['if'], data=[]]
return None |
def _ssl_agent(self):
"""
Get a Twisted Agent that performs Client SSL authentication for Koji.
"""
# Load "cert" into a PrivateCertificate.
certfile = self.lookup(self.profile, 'cert')
certfile = os.path.expanduser(certfile)
with open(certfile) as certfp:
pemdata = certfp.read()
client_cert = PrivateCertificate.loadPEM(pemdata)
trustRoot = None # Use Twisted's platformTrust().
# Optionally load "serverca" into a Certificate.
servercafile = self.lookup(self.profile, 'serverca')
if servercafile:
servercafile = os.path.expanduser(servercafile)
trustRoot = RootCATrustRoot(servercafile)
policy = ClientCertPolicy(trustRoot=trustRoot, client_cert=client_cert)
return Agent(reactor, policy) | def function[_ssl_agent, parameter[self]]:
constant[
Get a Twisted Agent that performs Client SSL authentication for Koji.
]
variable[certfile] assign[=] call[name[self].lookup, parameter[name[self].profile, constant[cert]]]
variable[certfile] assign[=] call[name[os].path.expanduser, parameter[name[certfile]]]
with call[name[open], parameter[name[certfile]]] begin[:]
variable[pemdata] assign[=] call[name[certfp].read, parameter[]]
variable[client_cert] assign[=] call[name[PrivateCertificate].loadPEM, parameter[name[pemdata]]]
variable[trustRoot] assign[=] constant[None]
variable[servercafile] assign[=] call[name[self].lookup, parameter[name[self].profile, constant[serverca]]]
if name[servercafile] begin[:]
variable[servercafile] assign[=] call[name[os].path.expanduser, parameter[name[servercafile]]]
variable[trustRoot] assign[=] call[name[RootCATrustRoot], parameter[name[servercafile]]]
variable[policy] assign[=] call[name[ClientCertPolicy], parameter[]]
return[call[name[Agent], parameter[name[reactor], name[policy]]]] | keyword[def] identifier[_ssl_agent] ( identifier[self] ):
literal[string]
identifier[certfile] = identifier[self] . identifier[lookup] ( identifier[self] . identifier[profile] , literal[string] )
identifier[certfile] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[certfile] )
keyword[with] identifier[open] ( identifier[certfile] ) keyword[as] identifier[certfp] :
identifier[pemdata] = identifier[certfp] . identifier[read] ()
identifier[client_cert] = identifier[PrivateCertificate] . identifier[loadPEM] ( identifier[pemdata] )
identifier[trustRoot] = keyword[None]
identifier[servercafile] = identifier[self] . identifier[lookup] ( identifier[self] . identifier[profile] , literal[string] )
keyword[if] identifier[servercafile] :
identifier[servercafile] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[servercafile] )
identifier[trustRoot] = identifier[RootCATrustRoot] ( identifier[servercafile] )
identifier[policy] = identifier[ClientCertPolicy] ( identifier[trustRoot] = identifier[trustRoot] , identifier[client_cert] = identifier[client_cert] )
keyword[return] identifier[Agent] ( identifier[reactor] , identifier[policy] ) | def _ssl_agent(self):
"""
Get a Twisted Agent that performs Client SSL authentication for Koji.
"""
# Load "cert" into a PrivateCertificate.
certfile = self.lookup(self.profile, 'cert')
certfile = os.path.expanduser(certfile)
with open(certfile) as certfp:
pemdata = certfp.read()
client_cert = PrivateCertificate.loadPEM(pemdata) # depends on [control=['with'], data=['certfp']]
trustRoot = None # Use Twisted's platformTrust().
# Optionally load "serverca" into a Certificate.
servercafile = self.lookup(self.profile, 'serverca')
if servercafile:
servercafile = os.path.expanduser(servercafile)
trustRoot = RootCATrustRoot(servercafile) # depends on [control=['if'], data=[]]
policy = ClientCertPolicy(trustRoot=trustRoot, client_cert=client_cert)
return Agent(reactor, policy) |
def ips_with_roles(self, roles, env=None, match_all=False):
"""
Returns a function that, when called, gets servers with the given roles.
If env is given, then the environment must match as well. If match_all is True,
then only return servers who have all of the given roles. Otherwise, return
servers that have one or more of the given roles.
"""
def func():
return [s['external_ip'] for s in self.servers_with_roles(roles, env, match_all)]
return func | def function[ips_with_roles, parameter[self, roles, env, match_all]]:
constant[
Returns a function that, when called, gets servers with the given roles.
If env is given, then the environment must match as well. If match_all is True,
then only return servers who have all of the given roles. Otherwise, return
servers that have one or more of the given roles.
]
def function[func, parameter[]]:
return[<ast.ListComp object at 0x7da18fe92f50>]
return[name[func]] | keyword[def] identifier[ips_with_roles] ( identifier[self] , identifier[roles] , identifier[env] = keyword[None] , identifier[match_all] = keyword[False] ):
literal[string]
keyword[def] identifier[func] ():
keyword[return] [ identifier[s] [ literal[string] ] keyword[for] identifier[s] keyword[in] identifier[self] . identifier[servers_with_roles] ( identifier[roles] , identifier[env] , identifier[match_all] )]
keyword[return] identifier[func] | def ips_with_roles(self, roles, env=None, match_all=False):
"""
Returns a function that, when called, gets servers with the given roles.
If env is given, then the environment must match as well. If match_all is True,
then only return servers who have all of the given roles. Otherwise, return
servers that have one or more of the given roles.
"""
def func():
return [s['external_ip'] for s in self.servers_with_roles(roles, env, match_all)]
return func |
def copy_files(project_vars, project_dir, files):
"""
Copies files from the template into their target location. Unicode files
get their variables replaced here and files with a shebang are set to be
executable.
"""
for root, name, content, is_unicode in files:
project_name = project_vars['project_name_snake']
if is_unicode:
content = replace_content(content, project_vars)
file_path = make_file_path(project_dir, project_name, root, name)
makedirs(make_dir_path(project_dir, root, project_name), exist_ok=True)
if is_unicode:
with open(file_path, 'w') as f:
f.write(content)
if content.startswith('#!'):
chmod(file_path, 0o755)
else:
with open(file_path, 'wb') as f:
f.write(content) | def function[copy_files, parameter[project_vars, project_dir, files]]:
constant[
Copies files from the template into their target location. Unicode files
get their variables replaced here and files with a shebang are set to be
executable.
]
for taget[tuple[[<ast.Name object at 0x7da18f812200>, <ast.Name object at 0x7da18f8101f0>, <ast.Name object at 0x7da18f8103d0>, <ast.Name object at 0x7da18f812a70>]]] in starred[name[files]] begin[:]
variable[project_name] assign[=] call[name[project_vars]][constant[project_name_snake]]
if name[is_unicode] begin[:]
variable[content] assign[=] call[name[replace_content], parameter[name[content], name[project_vars]]]
variable[file_path] assign[=] call[name[make_file_path], parameter[name[project_dir], name[project_name], name[root], name[name]]]
call[name[makedirs], parameter[call[name[make_dir_path], parameter[name[project_dir], name[root], name[project_name]]]]]
if name[is_unicode] begin[:]
with call[name[open], parameter[name[file_path], constant[w]]] begin[:]
call[name[f].write, parameter[name[content]]]
if call[name[content].startswith, parameter[constant[#!]]] begin[:]
call[name[chmod], parameter[name[file_path], constant[493]]] | keyword[def] identifier[copy_files] ( identifier[project_vars] , identifier[project_dir] , identifier[files] ):
literal[string]
keyword[for] identifier[root] , identifier[name] , identifier[content] , identifier[is_unicode] keyword[in] identifier[files] :
identifier[project_name] = identifier[project_vars] [ literal[string] ]
keyword[if] identifier[is_unicode] :
identifier[content] = identifier[replace_content] ( identifier[content] , identifier[project_vars] )
identifier[file_path] = identifier[make_file_path] ( identifier[project_dir] , identifier[project_name] , identifier[root] , identifier[name] )
identifier[makedirs] ( identifier[make_dir_path] ( identifier[project_dir] , identifier[root] , identifier[project_name] ), identifier[exist_ok] = keyword[True] )
keyword[if] identifier[is_unicode] :
keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[content] )
keyword[if] identifier[content] . identifier[startswith] ( literal[string] ):
identifier[chmod] ( identifier[file_path] , literal[int] )
keyword[else] :
keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[content] ) | def copy_files(project_vars, project_dir, files):
"""
Copies files from the template into their target location. Unicode files
get their variables replaced here and files with a shebang are set to be
executable.
"""
for (root, name, content, is_unicode) in files:
project_name = project_vars['project_name_snake']
if is_unicode:
content = replace_content(content, project_vars) # depends on [control=['if'], data=[]]
file_path = make_file_path(project_dir, project_name, root, name)
makedirs(make_dir_path(project_dir, root, project_name), exist_ok=True)
if is_unicode:
with open(file_path, 'w') as f:
f.write(content) # depends on [control=['with'], data=['f']]
if content.startswith('#!'):
chmod(file_path, 493) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
with open(file_path, 'wb') as f:
f.write(content) # depends on [control=['with'], data=['f']] # depends on [control=['for'], data=[]] |
def get_avatar_metadata(self):
"""Gets the metadata for an asset.
return: (osid.Metadata) - metadata for the asset
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metadata = dict(self._mdata['avatar'])
metadata.update({'existing_id_values': self._my_map['avatarId']})
return Metadata(**metadata) | def function[get_avatar_metadata, parameter[self]]:
constant[Gets the metadata for an asset.
return: (osid.Metadata) - metadata for the asset
*compliance: mandatory -- This method must be implemented.*
]
variable[metadata] assign[=] call[name[dict], parameter[call[name[self]._mdata][constant[avatar]]]]
call[name[metadata].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0a23fd0>], [<ast.Subscript object at 0x7da1b0a21090>]]]]
return[call[name[Metadata], parameter[]]] | keyword[def] identifier[get_avatar_metadata] ( identifier[self] ):
literal[string]
identifier[metadata] = identifier[dict] ( identifier[self] . identifier[_mdata] [ literal[string] ])
identifier[metadata] . identifier[update] ({ literal[string] : identifier[self] . identifier[_my_map] [ literal[string] ]})
keyword[return] identifier[Metadata] (** identifier[metadata] ) | def get_avatar_metadata(self):
"""Gets the metadata for an asset.
return: (osid.Metadata) - metadata for the asset
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template
metadata = dict(self._mdata['avatar'])
metadata.update({'existing_id_values': self._my_map['avatarId']})
return Metadata(**metadata) |
def QA_data_min_resample(min_data, type_='5min'):
"""分钟线采样成大周期
分钟线采样成子级别的分钟线
time+ OHLC==> resample
Arguments:
min {[type]} -- [description]
raw_type {[type]} -- [description]
new_type {[type]} -- [description]
"""
try:
min_data = min_data.reset_index().set_index('datetime', drop=False)
except:
min_data = min_data.set_index('datetime', drop=False)
CONVERSION = {
'code': 'first',
'open': 'first',
'high': 'max',
'low': 'min',
'close': 'last',
'vol': 'sum',
'amount': 'sum'
} if 'vol' in min_data.columns else {
'code': 'first',
'open': 'first',
'high': 'max',
'low': 'min',
'close': 'last',
'volume': 'sum',
'amount': 'sum'
}
resx = pd.DataFrame()
for item in set(min_data.index.date):
min_data_p = min_data.loc[str(item)]
n = min_data_p['{} 21:00:00'.format(item):].resample(
type_,
base=30,
closed='right',
loffset=type_
).apply(CONVERSION)
d = min_data_p[:'{} 11:30:00'.format(item)].resample(
type_,
base=30,
closed='right',
loffset=type_
).apply(CONVERSION)
f = min_data_p['{} 13:00:00'.format(item):].resample(
type_,
closed='right',
loffset=type_
).apply(CONVERSION)
resx = resx.append(d).append(f)
return resx.dropna().reset_index().set_index(['datetime', 'code']) | def function[QA_data_min_resample, parameter[min_data, type_]]:
constant[分钟线采样成大周期
分钟线采样成子级别的分钟线
time+ OHLC==> resample
Arguments:
min {[type]} -- [description]
raw_type {[type]} -- [description]
new_type {[type]} -- [description]
]
<ast.Try object at 0x7da1b20400a0>
variable[CONVERSION] assign[=] <ast.IfExp object at 0x7da1b2042920>
variable[resx] assign[=] call[name[pd].DataFrame, parameter[]]
for taget[name[item]] in starred[call[name[set], parameter[name[min_data].index.date]]] begin[:]
variable[min_data_p] assign[=] call[name[min_data].loc][call[name[str], parameter[name[item]]]]
variable[n] assign[=] call[call[call[name[min_data_p]][<ast.Slice object at 0x7da1b2042b90>].resample, parameter[name[type_]]].apply, parameter[name[CONVERSION]]]
variable[d] assign[=] call[call[call[name[min_data_p]][<ast.Slice object at 0x7da1b1face80>].resample, parameter[name[type_]]].apply, parameter[name[CONVERSION]]]
variable[f] assign[=] call[call[call[name[min_data_p]][<ast.Slice object at 0x7da1b1fad9f0>].resample, parameter[name[type_]]].apply, parameter[name[CONVERSION]]]
variable[resx] assign[=] call[call[name[resx].append, parameter[name[d]]].append, parameter[name[f]]]
return[call[call[call[name[resx].dropna, parameter[]].reset_index, parameter[]].set_index, parameter[list[[<ast.Constant object at 0x7da1b1facd60>, <ast.Constant object at 0x7da1b1facdc0>]]]]] | keyword[def] identifier[QA_data_min_resample] ( identifier[min_data] , identifier[type_] = literal[string] ):
literal[string]
keyword[try] :
identifier[min_data] = identifier[min_data] . identifier[reset_index] (). identifier[set_index] ( literal[string] , identifier[drop] = keyword[False] )
keyword[except] :
identifier[min_data] = identifier[min_data] . identifier[set_index] ( literal[string] , identifier[drop] = keyword[False] )
identifier[CONVERSION] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
} keyword[if] literal[string] keyword[in] identifier[min_data] . identifier[columns] keyword[else] {
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[resx] = identifier[pd] . identifier[DataFrame] ()
keyword[for] identifier[item] keyword[in] identifier[set] ( identifier[min_data] . identifier[index] . identifier[date] ):
identifier[min_data_p] = identifier[min_data] . identifier[loc] [ identifier[str] ( identifier[item] )]
identifier[n] = identifier[min_data_p] [ literal[string] . identifier[format] ( identifier[item] ):]. identifier[resample] (
identifier[type_] ,
identifier[base] = literal[int] ,
identifier[closed] = literal[string] ,
identifier[loffset] = identifier[type_]
). identifier[apply] ( identifier[CONVERSION] )
identifier[d] = identifier[min_data_p] [: literal[string] . identifier[format] ( identifier[item] )]. identifier[resample] (
identifier[type_] ,
identifier[base] = literal[int] ,
identifier[closed] = literal[string] ,
identifier[loffset] = identifier[type_]
). identifier[apply] ( identifier[CONVERSION] )
identifier[f] = identifier[min_data_p] [ literal[string] . identifier[format] ( identifier[item] ):]. identifier[resample] (
identifier[type_] ,
identifier[closed] = literal[string] ,
identifier[loffset] = identifier[type_]
). identifier[apply] ( identifier[CONVERSION] )
identifier[resx] = identifier[resx] . identifier[append] ( identifier[d] ). identifier[append] ( identifier[f] )
keyword[return] identifier[resx] . identifier[dropna] (). identifier[reset_index] (). identifier[set_index] ([ literal[string] , literal[string] ]) | def QA_data_min_resample(min_data, type_='5min'):
"""分钟线采样成大周期
分钟线采样成子级别的分钟线
time+ OHLC==> resample
Arguments:
min {[type]} -- [description]
raw_type {[type]} -- [description]
new_type {[type]} -- [description]
"""
try:
min_data = min_data.reset_index().set_index('datetime', drop=False) # depends on [control=['try'], data=[]]
except:
min_data = min_data.set_index('datetime', drop=False) # depends on [control=['except'], data=[]]
CONVERSION = {'code': 'first', 'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'vol': 'sum', 'amount': 'sum'} if 'vol' in min_data.columns else {'code': 'first', 'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum', 'amount': 'sum'}
resx = pd.DataFrame()
for item in set(min_data.index.date):
min_data_p = min_data.loc[str(item)]
n = min_data_p['{} 21:00:00'.format(item):].resample(type_, base=30, closed='right', loffset=type_).apply(CONVERSION)
d = min_data_p[:'{} 11:30:00'.format(item)].resample(type_, base=30, closed='right', loffset=type_).apply(CONVERSION)
f = min_data_p['{} 13:00:00'.format(item):].resample(type_, closed='right', loffset=type_).apply(CONVERSION)
resx = resx.append(d).append(f) # depends on [control=['for'], data=['item']]
return resx.dropna().reset_index().set_index(['datetime', 'code']) |
def pause():
'''Pause playback.
Calls PlaybackController.pause()'''
server = getServer()
server.core.playback.pause()
pos = server.core.playback.get_time_position()
print('Paused at {}'.format(formatTimeposition(pos))) | def function[pause, parameter[]]:
constant[Pause playback.
Calls PlaybackController.pause()]
variable[server] assign[=] call[name[getServer], parameter[]]
call[name[server].core.playback.pause, parameter[]]
variable[pos] assign[=] call[name[server].core.playback.get_time_position, parameter[]]
call[name[print], parameter[call[constant[Paused at {}].format, parameter[call[name[formatTimeposition], parameter[name[pos]]]]]]] | keyword[def] identifier[pause] ():
literal[string]
identifier[server] = identifier[getServer] ()
identifier[server] . identifier[core] . identifier[playback] . identifier[pause] ()
identifier[pos] = identifier[server] . identifier[core] . identifier[playback] . identifier[get_time_position] ()
identifier[print] ( literal[string] . identifier[format] ( identifier[formatTimeposition] ( identifier[pos] ))) | def pause():
"""Pause playback.
Calls PlaybackController.pause()"""
server = getServer()
server.core.playback.pause()
pos = server.core.playback.get_time_position()
print('Paused at {}'.format(formatTimeposition(pos))) |
def initialize_uninitialized_global_variables(sess):
"""
Only initializes the variables of a TensorFlow session that were not
already initialized.
:param sess: the TensorFlow session
:return:
"""
# List all global variables
global_vars = tf.global_variables()
# Find initialized status for all variables
is_var_init = [tf.is_variable_initialized(var) for var in global_vars]
is_initialized = sess.run(is_var_init)
# List all variables that were not initialized previously
not_initialized_vars = [var for (var, init) in
zip(global_vars, is_initialized) if not init]
# Initialize all uninitialized variables found, if any
if len(not_initialized_vars):
sess.run(tf.variables_initializer(not_initialized_vars)) | def function[initialize_uninitialized_global_variables, parameter[sess]]:
constant[
Only initializes the variables of a TensorFlow session that were not
already initialized.
:param sess: the TensorFlow session
:return:
]
variable[global_vars] assign[=] call[name[tf].global_variables, parameter[]]
variable[is_var_init] assign[=] <ast.ListComp object at 0x7da20c6e7c40>
variable[is_initialized] assign[=] call[name[sess].run, parameter[name[is_var_init]]]
variable[not_initialized_vars] assign[=] <ast.ListComp object at 0x7da20c6e6350>
if call[name[len], parameter[name[not_initialized_vars]]] begin[:]
call[name[sess].run, parameter[call[name[tf].variables_initializer, parameter[name[not_initialized_vars]]]]] | keyword[def] identifier[initialize_uninitialized_global_variables] ( identifier[sess] ):
literal[string]
identifier[global_vars] = identifier[tf] . identifier[global_variables] ()
identifier[is_var_init] =[ identifier[tf] . identifier[is_variable_initialized] ( identifier[var] ) keyword[for] identifier[var] keyword[in] identifier[global_vars] ]
identifier[is_initialized] = identifier[sess] . identifier[run] ( identifier[is_var_init] )
identifier[not_initialized_vars] =[ identifier[var] keyword[for] ( identifier[var] , identifier[init] ) keyword[in]
identifier[zip] ( identifier[global_vars] , identifier[is_initialized] ) keyword[if] keyword[not] identifier[init] ]
keyword[if] identifier[len] ( identifier[not_initialized_vars] ):
identifier[sess] . identifier[run] ( identifier[tf] . identifier[variables_initializer] ( identifier[not_initialized_vars] )) | def initialize_uninitialized_global_variables(sess):
"""
Only initializes the variables of a TensorFlow session that were not
already initialized.
:param sess: the TensorFlow session
:return:
"""
# List all global variables
global_vars = tf.global_variables()
# Find initialized status for all variables
is_var_init = [tf.is_variable_initialized(var) for var in global_vars]
is_initialized = sess.run(is_var_init)
# List all variables that were not initialized previously
not_initialized_vars = [var for (var, init) in zip(global_vars, is_initialized) if not init]
# Initialize all uninitialized variables found, if any
if len(not_initialized_vars):
sess.run(tf.variables_initializer(not_initialized_vars)) # depends on [control=['if'], data=[]] |
def created(message):
"""Create a Deleted response builder with specified message."""
def create(value, _context, **_params):
return Created(value, message)
return create | def function[created, parameter[message]]:
constant[Create a Deleted response builder with specified message.]
def function[create, parameter[value, _context]]:
return[call[name[Created], parameter[name[value], name[message]]]]
return[name[create]] | keyword[def] identifier[created] ( identifier[message] ):
literal[string]
keyword[def] identifier[create] ( identifier[value] , identifier[_context] ,** identifier[_params] ):
keyword[return] identifier[Created] ( identifier[value] , identifier[message] )
keyword[return] identifier[create] | def created(message):
"""Create a Deleted response builder with specified message."""
def create(value, _context, **_params):
return Created(value, message)
return create |
def GetAPFSFileEntryByPathSpec(self, path_spec):
"""Retrieves the APFS file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
pyfsapfs.file_entry: file entry.
Raises:
PathSpecError: if the path specification is missing location and
identifier.
"""
# Opening a file by identifier is faster than opening a file by location.
location = getattr(path_spec, 'location', None)
identifier = getattr(path_spec, 'identifier', None)
if identifier is not None:
fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_identifier(
identifier)
elif location is not None:
fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_path(location)
else:
raise errors.PathSpecError(
'Path specification missing location and identifier.')
return fsapfs_file_entry | def function[GetAPFSFileEntryByPathSpec, parameter[self, path_spec]]:
constant[Retrieves the APFS file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
pyfsapfs.file_entry: file entry.
Raises:
PathSpecError: if the path specification is missing location and
identifier.
]
variable[location] assign[=] call[name[getattr], parameter[name[path_spec], constant[location], constant[None]]]
variable[identifier] assign[=] call[name[getattr], parameter[name[path_spec], constant[identifier], constant[None]]]
if compare[name[identifier] is_not constant[None]] begin[:]
variable[fsapfs_file_entry] assign[=] call[name[self]._fsapfs_volume.get_file_entry_by_identifier, parameter[name[identifier]]]
return[name[fsapfs_file_entry]] | keyword[def] identifier[GetAPFSFileEntryByPathSpec] ( identifier[self] , identifier[path_spec] ):
literal[string]
identifier[location] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
identifier[identifier] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
keyword[if] identifier[identifier] keyword[is] keyword[not] keyword[None] :
identifier[fsapfs_file_entry] = identifier[self] . identifier[_fsapfs_volume] . identifier[get_file_entry_by_identifier] (
identifier[identifier] )
keyword[elif] identifier[location] keyword[is] keyword[not] keyword[None] :
identifier[fsapfs_file_entry] = identifier[self] . identifier[_fsapfs_volume] . identifier[get_file_entry_by_path] ( identifier[location] )
keyword[else] :
keyword[raise] identifier[errors] . identifier[PathSpecError] (
literal[string] )
keyword[return] identifier[fsapfs_file_entry] | def GetAPFSFileEntryByPathSpec(self, path_spec):
"""Retrieves the APFS file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
pyfsapfs.file_entry: file entry.
Raises:
PathSpecError: if the path specification is missing location and
identifier.
"""
# Opening a file by identifier is faster than opening a file by location.
location = getattr(path_spec, 'location', None)
identifier = getattr(path_spec, 'identifier', None)
if identifier is not None:
fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_identifier(identifier) # depends on [control=['if'], data=['identifier']]
elif location is not None:
fsapfs_file_entry = self._fsapfs_volume.get_file_entry_by_path(location) # depends on [control=['if'], data=['location']]
else:
raise errors.PathSpecError('Path specification missing location and identifier.')
return fsapfs_file_entry |
def write_text(filename: str, text: str) -> None:
"""
Writes text to a file.
"""
with open(filename, 'w') as f: # type: TextIO
print(text, file=f) | def function[write_text, parameter[filename, text]]:
constant[
Writes text to a file.
]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[print], parameter[name[text]]] | keyword[def] identifier[write_text] ( identifier[filename] : identifier[str] , identifier[text] : identifier[str] )-> keyword[None] :
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[print] ( identifier[text] , identifier[file] = identifier[f] ) | def write_text(filename: str, text: str) -> None:
"""
Writes text to a file.
"""
with open(filename, 'w') as f: # type: TextIO
print(text, file=f) # depends on [control=['with'], data=['f']] |
def _csv_header(self):
"""
Extract the expected CSV header from the exposure metadata
"""
fields = ['id', 'number', 'taxonomy', 'lon', 'lat']
for name in self.cost_types['name']:
fields.append(name)
if 'per_area' in self.cost_types['type']:
fields.append('area')
if self.occupancy_periods:
fields.extend(self.occupancy_periods.split())
fields.extend(self.tagcol.tagnames)
return set(fields) | def function[_csv_header, parameter[self]]:
constant[
Extract the expected CSV header from the exposure metadata
]
variable[fields] assign[=] list[[<ast.Constant object at 0x7da2046206d0>, <ast.Constant object at 0x7da2046212a0>, <ast.Constant object at 0x7da2046227d0>, <ast.Constant object at 0x7da204622b00>, <ast.Constant object at 0x7da204620e80>]]
for taget[name[name]] in starred[call[name[self].cost_types][constant[name]]] begin[:]
call[name[fields].append, parameter[name[name]]]
if compare[constant[per_area] in call[name[self].cost_types][constant[type]]] begin[:]
call[name[fields].append, parameter[constant[area]]]
if name[self].occupancy_periods begin[:]
call[name[fields].extend, parameter[call[name[self].occupancy_periods.split, parameter[]]]]
call[name[fields].extend, parameter[name[self].tagcol.tagnames]]
return[call[name[set], parameter[name[fields]]]] | keyword[def] identifier[_csv_header] ( identifier[self] ):
literal[string]
identifier[fields] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[cost_types] [ literal[string] ]:
identifier[fields] . identifier[append] ( identifier[name] )
keyword[if] literal[string] keyword[in] identifier[self] . identifier[cost_types] [ literal[string] ]:
identifier[fields] . identifier[append] ( literal[string] )
keyword[if] identifier[self] . identifier[occupancy_periods] :
identifier[fields] . identifier[extend] ( identifier[self] . identifier[occupancy_periods] . identifier[split] ())
identifier[fields] . identifier[extend] ( identifier[self] . identifier[tagcol] . identifier[tagnames] )
keyword[return] identifier[set] ( identifier[fields] ) | def _csv_header(self):
"""
Extract the expected CSV header from the exposure metadata
"""
fields = ['id', 'number', 'taxonomy', 'lon', 'lat']
for name in self.cost_types['name']:
fields.append(name) # depends on [control=['for'], data=['name']]
if 'per_area' in self.cost_types['type']:
fields.append('area') # depends on [control=['if'], data=[]]
if self.occupancy_periods:
fields.extend(self.occupancy_periods.split()) # depends on [control=['if'], data=[]]
fields.extend(self.tagcol.tagnames)
return set(fields) |
def __resolveport(self, definitions):
"""
Resolve port_type reference.
@param definitions: A definitions object.
@type definitions: L{Definitions}
"""
ref = qualify(self.type, self.root, definitions.tns)
port_type = definitions.port_types.get(ref)
if port_type is None:
raise Exception("portType '%s', not-found" % (self.type,))
# Later on we will require access to the message data referenced by
# this port_type instance, and in order for those data references to be
# available, port_type first needs to dereference its message
# identification string. The only scenario where the port_type might
# possibly not have already resolved its references, and where this
# explicit resolve() call is required, is if we are dealing with a
# recursive WSDL import chain.
port_type.resolve(definitions)
self.type = port_type | def function[__resolveport, parameter[self, definitions]]:
constant[
Resolve port_type reference.
@param definitions: A definitions object.
@type definitions: L{Definitions}
]
variable[ref] assign[=] call[name[qualify], parameter[name[self].type, name[self].root, name[definitions].tns]]
variable[port_type] assign[=] call[name[definitions].port_types.get, parameter[name[ref]]]
if compare[name[port_type] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f58f7f0>
call[name[port_type].resolve, parameter[name[definitions]]]
name[self].type assign[=] name[port_type] | keyword[def] identifier[__resolveport] ( identifier[self] , identifier[definitions] ):
literal[string]
identifier[ref] = identifier[qualify] ( identifier[self] . identifier[type] , identifier[self] . identifier[root] , identifier[definitions] . identifier[tns] )
identifier[port_type] = identifier[definitions] . identifier[port_types] . identifier[get] ( identifier[ref] )
keyword[if] identifier[port_type] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] %( identifier[self] . identifier[type] ,))
identifier[port_type] . identifier[resolve] ( identifier[definitions] )
identifier[self] . identifier[type] = identifier[port_type] | def __resolveport(self, definitions):
"""
Resolve port_type reference.
@param definitions: A definitions object.
@type definitions: L{Definitions}
"""
ref = qualify(self.type, self.root, definitions.tns)
port_type = definitions.port_types.get(ref)
if port_type is None:
raise Exception("portType '%s', not-found" % (self.type,)) # depends on [control=['if'], data=[]]
# Later on we will require access to the message data referenced by
# this port_type instance, and in order for those data references to be
# available, port_type first needs to dereference its message
# identification string. The only scenario where the port_type might
# possibly not have already resolved its references, and where this
# explicit resolve() call is required, is if we are dealing with a
# recursive WSDL import chain.
port_type.resolve(definitions)
self.type = port_type |
def _context_menu_make(self, pos):
""" Reimplemented to add an action for raw copy.
"""
menu = super(FrontendWidget, self)._context_menu_make(pos)
for before_action in menu.actions():
if before_action.shortcut().matches(QtGui.QKeySequence.Paste) == \
QtGui.QKeySequence.ExactMatch:
menu.insertAction(before_action, self._copy_raw_action)
break
return menu | def function[_context_menu_make, parameter[self, pos]]:
constant[ Reimplemented to add an action for raw copy.
]
variable[menu] assign[=] call[call[name[super], parameter[name[FrontendWidget], name[self]]]._context_menu_make, parameter[name[pos]]]
for taget[name[before_action]] in starred[call[name[menu].actions, parameter[]]] begin[:]
if compare[call[call[name[before_action].shortcut, parameter[]].matches, parameter[name[QtGui].QKeySequence.Paste]] equal[==] name[QtGui].QKeySequence.ExactMatch] begin[:]
call[name[menu].insertAction, parameter[name[before_action], name[self]._copy_raw_action]]
break
return[name[menu]] | keyword[def] identifier[_context_menu_make] ( identifier[self] , identifier[pos] ):
literal[string]
identifier[menu] = identifier[super] ( identifier[FrontendWidget] , identifier[self] ). identifier[_context_menu_make] ( identifier[pos] )
keyword[for] identifier[before_action] keyword[in] identifier[menu] . identifier[actions] ():
keyword[if] identifier[before_action] . identifier[shortcut] (). identifier[matches] ( identifier[QtGui] . identifier[QKeySequence] . identifier[Paste] )== identifier[QtGui] . identifier[QKeySequence] . identifier[ExactMatch] :
identifier[menu] . identifier[insertAction] ( identifier[before_action] , identifier[self] . identifier[_copy_raw_action] )
keyword[break]
keyword[return] identifier[menu] | def _context_menu_make(self, pos):
""" Reimplemented to add an action for raw copy.
"""
menu = super(FrontendWidget, self)._context_menu_make(pos)
for before_action in menu.actions():
if before_action.shortcut().matches(QtGui.QKeySequence.Paste) == QtGui.QKeySequence.ExactMatch:
menu.insertAction(before_action, self._copy_raw_action)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['before_action']]
return menu |
def get_stp_brief_info_output_spanning_tree_info_spanning_tree_mode_rstp_rstp_migrate_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_stp_brief_info = ET.Element("get_stp_brief_info")
config = get_stp_brief_info
output = ET.SubElement(get_stp_brief_info, "output")
spanning_tree_info = ET.SubElement(output, "spanning-tree-info")
spanning_tree_mode = ET.SubElement(spanning_tree_info, "spanning-tree-mode")
rstp = ET.SubElement(spanning_tree_mode, "rstp")
rstp = ET.SubElement(rstp, "rstp")
migrate_time = ET.SubElement(rstp, "migrate-time")
migrate_time.text = kwargs.pop('migrate_time')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_stp_brief_info_output_spanning_tree_info_spanning_tree_mode_rstp_rstp_migrate_time, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_stp_brief_info] assign[=] call[name[ET].Element, parameter[constant[get_stp_brief_info]]]
variable[config] assign[=] name[get_stp_brief_info]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_stp_brief_info], constant[output]]]
variable[spanning_tree_info] assign[=] call[name[ET].SubElement, parameter[name[output], constant[spanning-tree-info]]]
variable[spanning_tree_mode] assign[=] call[name[ET].SubElement, parameter[name[spanning_tree_info], constant[spanning-tree-mode]]]
variable[rstp] assign[=] call[name[ET].SubElement, parameter[name[spanning_tree_mode], constant[rstp]]]
variable[rstp] assign[=] call[name[ET].SubElement, parameter[name[rstp], constant[rstp]]]
variable[migrate_time] assign[=] call[name[ET].SubElement, parameter[name[rstp], constant[migrate-time]]]
name[migrate_time].text assign[=] call[name[kwargs].pop, parameter[constant[migrate_time]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_stp_brief_info_output_spanning_tree_info_spanning_tree_mode_rstp_rstp_migrate_time] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_stp_brief_info] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_stp_brief_info]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_stp_brief_info] , literal[string] )
identifier[spanning_tree_info] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[spanning_tree_mode] = identifier[ET] . identifier[SubElement] ( identifier[spanning_tree_info] , literal[string] )
identifier[rstp] = identifier[ET] . identifier[SubElement] ( identifier[spanning_tree_mode] , literal[string] )
identifier[rstp] = identifier[ET] . identifier[SubElement] ( identifier[rstp] , literal[string] )
identifier[migrate_time] = identifier[ET] . identifier[SubElement] ( identifier[rstp] , literal[string] )
identifier[migrate_time] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_stp_brief_info_output_spanning_tree_info_spanning_tree_mode_rstp_rstp_migrate_time(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_stp_brief_info = ET.Element('get_stp_brief_info')
config = get_stp_brief_info
output = ET.SubElement(get_stp_brief_info, 'output')
spanning_tree_info = ET.SubElement(output, 'spanning-tree-info')
spanning_tree_mode = ET.SubElement(spanning_tree_info, 'spanning-tree-mode')
rstp = ET.SubElement(spanning_tree_mode, 'rstp')
rstp = ET.SubElement(rstp, 'rstp')
migrate_time = ET.SubElement(rstp, 'migrate-time')
migrate_time.text = kwargs.pop('migrate_time')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def annual_frequency_of_exceedence(poe, t_haz):
"""
:param poe: array of probabilities of exceedence
:param t_haz: hazard investigation time
:returns: array of frequencies (with +inf values where poe=1)
"""
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# avoid RuntimeWarning: divide by zero encountered in log
return - numpy.log(1. - poe) / t_haz | def function[annual_frequency_of_exceedence, parameter[poe, t_haz]]:
constant[
:param poe: array of probabilities of exceedence
:param t_haz: hazard investigation time
:returns: array of frequencies (with +inf values where poe=1)
]
with call[name[warnings].catch_warnings, parameter[]] begin[:]
call[name[warnings].simplefilter, parameter[constant[ignore]]]
return[binary_operation[<ast.UnaryOp object at 0x7da20e955a50> / name[t_haz]]] | keyword[def] identifier[annual_frequency_of_exceedence] ( identifier[poe] , identifier[t_haz] ):
literal[string]
keyword[with] identifier[warnings] . identifier[catch_warnings] ():
identifier[warnings] . identifier[simplefilter] ( literal[string] )
keyword[return] - identifier[numpy] . identifier[log] ( literal[int] - identifier[poe] )/ identifier[t_haz] | def annual_frequency_of_exceedence(poe, t_haz):
"""
:param poe: array of probabilities of exceedence
:param t_haz: hazard investigation time
:returns: array of frequencies (with +inf values where poe=1)
"""
with warnings.catch_warnings():
warnings.simplefilter('ignore')
# avoid RuntimeWarning: divide by zero encountered in log
return -numpy.log(1.0 - poe) / t_haz # depends on [control=['with'], data=[]] |
def rfc2426(self):
"""RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`"""
return rfc2425encode("n",u';'.join(quote_semicolon(val) for val in
(self.family,self.given,self.middle,self.prefix,self.suffix))) | def function[rfc2426, parameter[self]]:
constant[RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`]
return[call[name[rfc2425encode], parameter[constant[n], call[constant[;].join, parameter[<ast.GeneratorExp object at 0x7da20c990a30>]]]]] | keyword[def] identifier[rfc2426] ( identifier[self] ):
literal[string]
keyword[return] identifier[rfc2425encode] ( literal[string] , literal[string] . identifier[join] ( identifier[quote_semicolon] ( identifier[val] ) keyword[for] identifier[val] keyword[in]
( identifier[self] . identifier[family] , identifier[self] . identifier[given] , identifier[self] . identifier[middle] , identifier[self] . identifier[prefix] , identifier[self] . identifier[suffix] ))) | def rfc2426(self):
"""RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`"""
return rfc2425encode('n', u';'.join((quote_semicolon(val) for val in (self.family, self.given, self.middle, self.prefix, self.suffix)))) |
def merge(self, merge_func=None, merge_key=None, stash='active'):
"""
Merge the states in a given stash.
:param stash: The stash (default: 'active')
:param merge_func: If provided, instead of using state.merge, call this function with
the states as the argument. Should return the merged state.
:param merge_key: If provided, should be a function that takes a state and returns a key that will compare
equal for all states that are allowed to be merged together, as a first aproximation.
By default: uses PC, callstack, and open file descriptors.
:returns: The simulation manager, for chaining.
:rtype: SimulationManager
"""
self.prune(from_stash=stash)
to_merge = self._fetch_states(stash=stash)
not_to_merge = []
if merge_key is None: merge_key = self._merge_key
merge_groups = [ ]
while to_merge:
base_key = merge_key(to_merge[0])
g, to_merge = self._filter_states(lambda s: base_key == merge_key(s), to_merge)
if len(g) <= 1:
not_to_merge.extend(g)
else:
merge_groups.append(g)
for g in merge_groups:
try:
m = self._merge_states(g) if merge_func is None else merge_func(*g)
not_to_merge.append(m)
except SimMergeError:
l.warning("SimMergeError while merging %d states", len(g), exc_info=True)
not_to_merge.extend(g)
self._clear_states(stash)
self._store_states(stash, not_to_merge)
return self | def function[merge, parameter[self, merge_func, merge_key, stash]]:
constant[
Merge the states in a given stash.
:param stash: The stash (default: 'active')
:param merge_func: If provided, instead of using state.merge, call this function with
the states as the argument. Should return the merged state.
:param merge_key: If provided, should be a function that takes a state and returns a key that will compare
equal for all states that are allowed to be merged together, as a first aproximation.
By default: uses PC, callstack, and open file descriptors.
:returns: The simulation manager, for chaining.
:rtype: SimulationManager
]
call[name[self].prune, parameter[]]
variable[to_merge] assign[=] call[name[self]._fetch_states, parameter[]]
variable[not_to_merge] assign[=] list[[]]
if compare[name[merge_key] is constant[None]] begin[:]
variable[merge_key] assign[=] name[self]._merge_key
variable[merge_groups] assign[=] list[[]]
while name[to_merge] begin[:]
variable[base_key] assign[=] call[name[merge_key], parameter[call[name[to_merge]][constant[0]]]]
<ast.Tuple object at 0x7da20c6a9540> assign[=] call[name[self]._filter_states, parameter[<ast.Lambda object at 0x7da18ede78e0>, name[to_merge]]]
if compare[call[name[len], parameter[name[g]]] less_or_equal[<=] constant[1]] begin[:]
call[name[not_to_merge].extend, parameter[name[g]]]
for taget[name[g]] in starred[name[merge_groups]] begin[:]
<ast.Try object at 0x7da20c7960e0>
call[name[self]._clear_states, parameter[name[stash]]]
call[name[self]._store_states, parameter[name[stash], name[not_to_merge]]]
return[name[self]] | keyword[def] identifier[merge] ( identifier[self] , identifier[merge_func] = keyword[None] , identifier[merge_key] = keyword[None] , identifier[stash] = literal[string] ):
literal[string]
identifier[self] . identifier[prune] ( identifier[from_stash] = identifier[stash] )
identifier[to_merge] = identifier[self] . identifier[_fetch_states] ( identifier[stash] = identifier[stash] )
identifier[not_to_merge] =[]
keyword[if] identifier[merge_key] keyword[is] keyword[None] : identifier[merge_key] = identifier[self] . identifier[_merge_key]
identifier[merge_groups] =[]
keyword[while] identifier[to_merge] :
identifier[base_key] = identifier[merge_key] ( identifier[to_merge] [ literal[int] ])
identifier[g] , identifier[to_merge] = identifier[self] . identifier[_filter_states] ( keyword[lambda] identifier[s] : identifier[base_key] == identifier[merge_key] ( identifier[s] ), identifier[to_merge] )
keyword[if] identifier[len] ( identifier[g] )<= literal[int] :
identifier[not_to_merge] . identifier[extend] ( identifier[g] )
keyword[else] :
identifier[merge_groups] . identifier[append] ( identifier[g] )
keyword[for] identifier[g] keyword[in] identifier[merge_groups] :
keyword[try] :
identifier[m] = identifier[self] . identifier[_merge_states] ( identifier[g] ) keyword[if] identifier[merge_func] keyword[is] keyword[None] keyword[else] identifier[merge_func] (* identifier[g] )
identifier[not_to_merge] . identifier[append] ( identifier[m] )
keyword[except] identifier[SimMergeError] :
identifier[l] . identifier[warning] ( literal[string] , identifier[len] ( identifier[g] ), identifier[exc_info] = keyword[True] )
identifier[not_to_merge] . identifier[extend] ( identifier[g] )
identifier[self] . identifier[_clear_states] ( identifier[stash] )
identifier[self] . identifier[_store_states] ( identifier[stash] , identifier[not_to_merge] )
keyword[return] identifier[self] | def merge(self, merge_func=None, merge_key=None, stash='active'):
"""
Merge the states in a given stash.
:param stash: The stash (default: 'active')
:param merge_func: If provided, instead of using state.merge, call this function with
the states as the argument. Should return the merged state.
:param merge_key: If provided, should be a function that takes a state and returns a key that will compare
equal for all states that are allowed to be merged together, as a first aproximation.
By default: uses PC, callstack, and open file descriptors.
:returns: The simulation manager, for chaining.
:rtype: SimulationManager
"""
self.prune(from_stash=stash)
to_merge = self._fetch_states(stash=stash)
not_to_merge = []
if merge_key is None:
merge_key = self._merge_key # depends on [control=['if'], data=['merge_key']]
merge_groups = []
while to_merge:
base_key = merge_key(to_merge[0])
(g, to_merge) = self._filter_states(lambda s: base_key == merge_key(s), to_merge)
if len(g) <= 1:
not_to_merge.extend(g) # depends on [control=['if'], data=[]]
else:
merge_groups.append(g) # depends on [control=['while'], data=[]]
for g in merge_groups:
try:
m = self._merge_states(g) if merge_func is None else merge_func(*g)
not_to_merge.append(m) # depends on [control=['try'], data=[]]
except SimMergeError:
l.warning('SimMergeError while merging %d states', len(g), exc_info=True)
not_to_merge.extend(g) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['g']]
self._clear_states(stash)
self._store_states(stash, not_to_merge)
return self |
def _merge_keys(kwargs):
'''
The log_config is a mixture of the CLI options --log-driver and --log-opt
(which we support in Salt as log_driver and log_opt, respectively), but it
must be submitted to the host config in the format {'Type': log_driver,
'Config': log_opt}. So, we need to construct this argument to be passed to
the API from those two arguments.
'''
log_driver = kwargs.pop('log_driver', helpers.NOTSET)
log_opt = kwargs.pop('log_opt', helpers.NOTSET)
if 'log_config' not in kwargs:
if log_driver is not helpers.NOTSET \
or log_opt is not helpers.NOTSET:
kwargs['log_config'] = {
'Type': log_driver
if log_driver is not helpers.NOTSET
else 'none',
'Config': log_opt
if log_opt is not helpers.NOTSET
else {}
} | def function[_merge_keys, parameter[kwargs]]:
constant[
The log_config is a mixture of the CLI options --log-driver and --log-opt
(which we support in Salt as log_driver and log_opt, respectively), but it
must be submitted to the host config in the format {'Type': log_driver,
'Config': log_opt}. So, we need to construct this argument to be passed to
the API from those two arguments.
]
variable[log_driver] assign[=] call[name[kwargs].pop, parameter[constant[log_driver], name[helpers].NOTSET]]
variable[log_opt] assign[=] call[name[kwargs].pop, parameter[constant[log_opt], name[helpers].NOTSET]]
if compare[constant[log_config] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
if <ast.BoolOp object at 0x7da2054a7850> begin[:]
call[name[kwargs]][constant[log_config]] assign[=] dictionary[[<ast.Constant object at 0x7da18dc06380>, <ast.Constant object at 0x7da18dc07a60>], [<ast.IfExp object at 0x7da18dc070a0>, <ast.IfExp object at 0x7da18dc04df0>]] | keyword[def] identifier[_merge_keys] ( identifier[kwargs] ):
literal[string]
identifier[log_driver] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[helpers] . identifier[NOTSET] )
identifier[log_opt] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[helpers] . identifier[NOTSET] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
keyword[if] identifier[log_driver] keyword[is] keyword[not] identifier[helpers] . identifier[NOTSET] keyword[or] identifier[log_opt] keyword[is] keyword[not] identifier[helpers] . identifier[NOTSET] :
identifier[kwargs] [ literal[string] ]={
literal[string] : identifier[log_driver]
keyword[if] identifier[log_driver] keyword[is] keyword[not] identifier[helpers] . identifier[NOTSET]
keyword[else] literal[string] ,
literal[string] : identifier[log_opt]
keyword[if] identifier[log_opt] keyword[is] keyword[not] identifier[helpers] . identifier[NOTSET]
keyword[else] {}
} | def _merge_keys(kwargs):
"""
The log_config is a mixture of the CLI options --log-driver and --log-opt
(which we support in Salt as log_driver and log_opt, respectively), but it
must be submitted to the host config in the format {'Type': log_driver,
'Config': log_opt}. So, we need to construct this argument to be passed to
the API from those two arguments.
"""
log_driver = kwargs.pop('log_driver', helpers.NOTSET)
log_opt = kwargs.pop('log_opt', helpers.NOTSET)
if 'log_config' not in kwargs:
if log_driver is not helpers.NOTSET or log_opt is not helpers.NOTSET:
kwargs['log_config'] = {'Type': log_driver if log_driver is not helpers.NOTSET else 'none', 'Config': log_opt if log_opt is not helpers.NOTSET else {}} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kwargs']] |
def load(self, *rules):
"""
Load rules from a Rule module, class or instance
:param rules:
:type rules:
:return:
:rtype:
"""
for rule in rules:
if inspect.ismodule(rule):
self.load_module(rule)
elif inspect.isclass(rule):
self.load_class(rule)
else:
self.append(rule) | def function[load, parameter[self]]:
constant[
Load rules from a Rule module, class or instance
:param rules:
:type rules:
:return:
:rtype:
]
for taget[name[rule]] in starred[name[rules]] begin[:]
if call[name[inspect].ismodule, parameter[name[rule]]] begin[:]
call[name[self].load_module, parameter[name[rule]]] | keyword[def] identifier[load] ( identifier[self] ,* identifier[rules] ):
literal[string]
keyword[for] identifier[rule] keyword[in] identifier[rules] :
keyword[if] identifier[inspect] . identifier[ismodule] ( identifier[rule] ):
identifier[self] . identifier[load_module] ( identifier[rule] )
keyword[elif] identifier[inspect] . identifier[isclass] ( identifier[rule] ):
identifier[self] . identifier[load_class] ( identifier[rule] )
keyword[else] :
identifier[self] . identifier[append] ( identifier[rule] ) | def load(self, *rules):
"""
Load rules from a Rule module, class or instance
:param rules:
:type rules:
:return:
:rtype:
"""
for rule in rules:
if inspect.ismodule(rule):
self.load_module(rule) # depends on [control=['if'], data=[]]
elif inspect.isclass(rule):
self.load_class(rule) # depends on [control=['if'], data=[]]
else:
self.append(rule) # depends on [control=['for'], data=['rule']] |
def normalize_options(options):
"""
Turns a mapping of 'option: arg' to a list and prefix the options.
arg can be a list of arguments.
for example:
dict = {
o1: a1,
o2: ,
o3: [a31, a32]
o4: []
}
will be transformed to:
[
prefix_option(o1), a1, prefix_option(o2),
prefix_option(o3), a31, prefix_option(o3), a32
prefix_option(o4)
]
note that empty arguments are omitted
Args:
options (dict): A mapping between options and arguments
Returns:
lst: A normalized version of 'options' as mentioned above
"""
normalized_options = []
def _add(option, arg=None):
normalized_options.append(option)
arg and normalized_options.append(arg)
for option, arg in options.viewitems():
prefixed_option = Build.prefix_option(option)
if isinstance(arg, list) and arg:
for a in arg:
_add(prefixed_option, a)
else:
_add(prefixed_option, arg)
return normalized_options | def function[normalize_options, parameter[options]]:
constant[
Turns a mapping of 'option: arg' to a list and prefix the options.
arg can be a list of arguments.
for example:
dict = {
o1: a1,
o2: ,
o3: [a31, a32]
o4: []
}
will be transformed to:
[
prefix_option(o1), a1, prefix_option(o2),
prefix_option(o3), a31, prefix_option(o3), a32
prefix_option(o4)
]
note that empty arguments are omitted
Args:
options (dict): A mapping between options and arguments
Returns:
lst: A normalized version of 'options' as mentioned above
]
variable[normalized_options] assign[=] list[[]]
def function[_add, parameter[option, arg]]:
call[name[normalized_options].append, parameter[name[option]]]
<ast.BoolOp object at 0x7da1b2347a90>
for taget[tuple[[<ast.Name object at 0x7da1b2347c10>, <ast.Name object at 0x7da1b2346b00>]]] in starred[call[name[options].viewitems, parameter[]]] begin[:]
variable[prefixed_option] assign[=] call[name[Build].prefix_option, parameter[name[option]]]
if <ast.BoolOp object at 0x7da1b2345180> begin[:]
for taget[name[a]] in starred[name[arg]] begin[:]
call[name[_add], parameter[name[prefixed_option], name[a]]]
return[name[normalized_options]] | keyword[def] identifier[normalize_options] ( identifier[options] ):
literal[string]
identifier[normalized_options] =[]
keyword[def] identifier[_add] ( identifier[option] , identifier[arg] = keyword[None] ):
identifier[normalized_options] . identifier[append] ( identifier[option] )
identifier[arg] keyword[and] identifier[normalized_options] . identifier[append] ( identifier[arg] )
keyword[for] identifier[option] , identifier[arg] keyword[in] identifier[options] . identifier[viewitems] ():
identifier[prefixed_option] = identifier[Build] . identifier[prefix_option] ( identifier[option] )
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[list] ) keyword[and] identifier[arg] :
keyword[for] identifier[a] keyword[in] identifier[arg] :
identifier[_add] ( identifier[prefixed_option] , identifier[a] )
keyword[else] :
identifier[_add] ( identifier[prefixed_option] , identifier[arg] )
keyword[return] identifier[normalized_options] | def normalize_options(options):
"""
Turns a mapping of 'option: arg' to a list and prefix the options.
arg can be a list of arguments.
for example:
dict = {
o1: a1,
o2: ,
o3: [a31, a32]
o4: []
}
will be transformed to:
[
prefix_option(o1), a1, prefix_option(o2),
prefix_option(o3), a31, prefix_option(o3), a32
prefix_option(o4)
]
note that empty arguments are omitted
Args:
options (dict): A mapping between options and arguments
Returns:
lst: A normalized version of 'options' as mentioned above
"""
normalized_options = []
def _add(option, arg=None):
normalized_options.append(option)
arg and normalized_options.append(arg)
for (option, arg) in options.viewitems():
prefixed_option = Build.prefix_option(option)
if isinstance(arg, list) and arg:
for a in arg:
_add(prefixed_option, a) # depends on [control=['for'], data=['a']] # depends on [control=['if'], data=[]]
else:
_add(prefixed_option, arg) # depends on [control=['for'], data=[]]
return normalized_options |
def build(client, repository_tag, docker_file, tag=None, use_cache=False):
"""
Build a docker image
"""
if not isinstance(client, docker.Client):
raise TypeError("client needs to be of type docker.Client.")
if not isinstance(docker_file, six.string_types) or not os.path.exists(docker_file):
# TODO: need to add path stuff for git and http etc.
raise Exception("docker file path doesn't exist: {0}".format(docker_file))
if not isinstance(repository_tag, six.string_types):
raise TypeError('repository must be a string')
if not tag:
tag = 'latest'
if not isinstance(use_cache, bool):
raise TypeError("use_cache must be a bool. {0} was passed.".format(use_cache))
no_cache = not use_cache
if ':' not in repository_tag:
repository_tag = "{0}:{1}".format(repository_tag, tag)
file_obj = None
try:
if os.path.isfile(docker_file):
path = os.getcwd()
docker_file = "./{0}".format(os.path.relpath(docker_file))
# TODO: support using file_obj in the future. Needed for post pre hooks and the injector.
# with open(docker_file) as Dockerfile:
# testing = Dockerfile.read()
# file_obj = BytesIO(testing.encode('utf-8'))
response = client.build(
path=path,
nocache=no_cache,
# custom_context=True,
dockerfile=docker_file,
# fileobj=file_obj,
tag=repository_tag,
rm=True,
stream=True
)
else:
response = client.build(path=docker_file, tag=repository_tag, rm=True, nocache=no_cache, stream=True)
except Exception as e:
raise e
finally:
if file_obj:
file_obj.close()
parse_stream(response)
client.close()
return Image(client, repository_tag) | def function[build, parameter[client, repository_tag, docker_file, tag, use_cache]]:
constant[
Build a docker image
]
if <ast.UnaryOp object at 0x7da18ede4ac0> begin[:]
<ast.Raise object at 0x7da18ede4e80>
if <ast.BoolOp object at 0x7da18ede7c40> begin[:]
<ast.Raise object at 0x7da18ede7f70>
if <ast.UnaryOp object at 0x7da18ede5a80> begin[:]
<ast.Raise object at 0x7da18ede4dc0>
if <ast.UnaryOp object at 0x7da18ede7520> begin[:]
variable[tag] assign[=] constant[latest]
if <ast.UnaryOp object at 0x7da18ede5ba0> begin[:]
<ast.Raise object at 0x7da18ede6620>
variable[no_cache] assign[=] <ast.UnaryOp object at 0x7da18ede7a00>
if compare[constant[:] <ast.NotIn object at 0x7da2590d7190> name[repository_tag]] begin[:]
variable[repository_tag] assign[=] call[constant[{0}:{1}].format, parameter[name[repository_tag], name[tag]]]
variable[file_obj] assign[=] constant[None]
<ast.Try object at 0x7da18f58d450>
call[name[parse_stream], parameter[name[response]]]
call[name[client].close, parameter[]]
return[call[name[Image], parameter[name[client], name[repository_tag]]]] | keyword[def] identifier[build] ( identifier[client] , identifier[repository_tag] , identifier[docker_file] , identifier[tag] = keyword[None] , identifier[use_cache] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[client] , identifier[docker] . identifier[Client] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[docker_file] , identifier[six] . identifier[string_types] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[docker_file] ):
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[docker_file] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[repository_tag] , identifier[six] . identifier[string_types] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[tag] :
identifier[tag] = literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[use_cache] , identifier[bool] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[use_cache] ))
identifier[no_cache] = keyword[not] identifier[use_cache]
keyword[if] literal[string] keyword[not] keyword[in] identifier[repository_tag] :
identifier[repository_tag] = literal[string] . identifier[format] ( identifier[repository_tag] , identifier[tag] )
identifier[file_obj] = keyword[None]
keyword[try] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[docker_file] ):
identifier[path] = identifier[os] . identifier[getcwd] ()
identifier[docker_file] = literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[relpath] ( identifier[docker_file] ))
identifier[response] = identifier[client] . identifier[build] (
identifier[path] = identifier[path] ,
identifier[nocache] = identifier[no_cache] ,
identifier[dockerfile] = identifier[docker_file] ,
identifier[tag] = identifier[repository_tag] ,
identifier[rm] = keyword[True] ,
identifier[stream] = keyword[True]
)
keyword[else] :
identifier[response] = identifier[client] . identifier[build] ( identifier[path] = identifier[docker_file] , identifier[tag] = identifier[repository_tag] , identifier[rm] = keyword[True] , identifier[nocache] = identifier[no_cache] , identifier[stream] = keyword[True] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[finally] :
keyword[if] identifier[file_obj] :
identifier[file_obj] . identifier[close] ()
identifier[parse_stream] ( identifier[response] )
identifier[client] . identifier[close] ()
keyword[return] identifier[Image] ( identifier[client] , identifier[repository_tag] ) | def build(client, repository_tag, docker_file, tag=None, use_cache=False):
"""
Build a docker image
"""
if not isinstance(client, docker.Client):
raise TypeError('client needs to be of type docker.Client.') # depends on [control=['if'], data=[]]
if not isinstance(docker_file, six.string_types) or not os.path.exists(docker_file):
# TODO: need to add path stuff for git and http etc.
raise Exception("docker file path doesn't exist: {0}".format(docker_file)) # depends on [control=['if'], data=[]]
if not isinstance(repository_tag, six.string_types):
raise TypeError('repository must be a string') # depends on [control=['if'], data=[]]
if not tag:
tag = 'latest' # depends on [control=['if'], data=[]]
if not isinstance(use_cache, bool):
raise TypeError('use_cache must be a bool. {0} was passed.'.format(use_cache)) # depends on [control=['if'], data=[]]
no_cache = not use_cache
if ':' not in repository_tag:
repository_tag = '{0}:{1}'.format(repository_tag, tag) # depends on [control=['if'], data=['repository_tag']]
file_obj = None
try:
if os.path.isfile(docker_file):
path = os.getcwd()
docker_file = './{0}'.format(os.path.relpath(docker_file))
# TODO: support using file_obj in the future. Needed for post pre hooks and the injector.
# with open(docker_file) as Dockerfile:
# testing = Dockerfile.read()
# file_obj = BytesIO(testing.encode('utf-8'))
# custom_context=True,
# fileobj=file_obj,
response = client.build(path=path, nocache=no_cache, dockerfile=docker_file, tag=repository_tag, rm=True, stream=True) # depends on [control=['if'], data=[]]
else:
response = client.build(path=docker_file, tag=repository_tag, rm=True, nocache=no_cache, stream=True) # depends on [control=['try'], data=[]]
except Exception as e:
raise e # depends on [control=['except'], data=['e']]
finally:
if file_obj:
file_obj.close() # depends on [control=['if'], data=[]]
parse_stream(response)
client.close()
return Image(client, repository_tag) |
def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True):
"""Install just the base environment, no distutils patches etc"""
if sys.executable.startswith(bin_dir):
print('Please use the *system* python to run this script')
return
if clear:
rmtree(lib_dir)
## FIXME: why not delete it?
## Maybe it should delete everything with #!/path/to/venv/python in it
logger.notify('Not deleting %s', bin_dir)
if hasattr(sys, 'real_prefix'):
logger.notify('Using real prefix %r' % sys.real_prefix)
prefix = sys.real_prefix
elif hasattr(sys, 'base_prefix'):
logger.notify('Using base prefix %r' % sys.base_prefix)
prefix = sys.base_prefix
else:
prefix = sys.prefix
mkdir(lib_dir)
fix_lib64(lib_dir, symlink)
stdlib_dirs = [os.path.dirname(os.__file__)]
if is_win:
stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs'))
elif is_darwin:
stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages'))
if hasattr(os, 'symlink'):
logger.info('Symlinking Python bootstrap modules')
else:
logger.info('Copying Python bootstrap modules')
logger.indent += 2
try:
# copy required files...
for stdlib_dir in stdlib_dirs:
if not os.path.isdir(stdlib_dir):
continue
for fn in os.listdir(stdlib_dir):
bn = os.path.splitext(fn)[0]
if fn != 'site-packages' and bn in REQUIRED_FILES:
copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink)
# ...and modules
copy_required_modules(home_dir, symlink)
finally:
logger.indent -= 2
# ...copy tcl/tk
if is_win:
copy_tcltk(prefix, home_dir, symlink)
mkdir(join(lib_dir, 'site-packages'))
import site
site_filename = site.__file__
if site_filename.endswith('.pyc') or site_filename.endswith('.pyo'):
site_filename = site_filename[:-1]
elif site_filename.endswith('$py.class'):
site_filename = site_filename.replace('$py.class', '.py')
site_filename_dst = change_prefix(site_filename, home_dir)
site_dir = os.path.dirname(site_filename_dst)
writefile(site_filename_dst, SITE_PY)
writefile(join(site_dir, 'orig-prefix.txt'), prefix)
site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
if not site_packages:
writefile(site_packages_filename, '')
if is_pypy or is_win:
stdinc_dir = join(prefix, 'include')
else:
stdinc_dir = join(prefix, 'include', py_version + abiflags)
if os.path.exists(stdinc_dir):
copyfile(stdinc_dir, inc_dir, symlink)
else:
logger.debug('No include dir %s' % stdinc_dir)
platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
if platinc_dir != stdinc_dir:
platinc_dest = distutils.sysconfig.get_python_inc(
plat_specific=1, prefix=home_dir)
if platinc_dir == platinc_dest:
# Do platinc_dest manually due to a CPython bug;
# not http://bugs.python.org/issue3386 but a close cousin
platinc_dest = subst_path(platinc_dir, prefix, home_dir)
if platinc_dest:
# PyPy's stdinc_dir and prefix are relative to the original binary
# (traversing virtualenvs), whereas the platinc_dir is relative to
# the inner virtualenv and ignores the prefix argument.
# This seems more evolved than designed.
copyfile(platinc_dir, platinc_dest, symlink)
# pypy never uses exec_prefix, just ignore it
if sys.exec_prefix != prefix and not is_pypy:
if is_win:
exec_dir = join(sys.exec_prefix, 'lib')
elif is_jython:
exec_dir = join(sys.exec_prefix, 'Lib')
else:
exec_dir = join(sys.exec_prefix, 'lib', py_version)
for fn in os.listdir(exec_dir):
copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink)
if is_jython:
# Jython has either jython-dev.jar and javalib/ dir, or just
# jython.jar
for name in 'jython-dev.jar', 'javalib', 'jython.jar':
src = join(prefix, name)
if os.path.exists(src):
copyfile(src, join(home_dir, name), symlink)
# XXX: registry should always exist after Jython 2.5rc1
src = join(prefix, 'registry')
if os.path.exists(src):
copyfile(src, join(home_dir, 'registry'), symlink=False)
copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'),
symlink=False)
mkdir(bin_dir)
py_executable = join(bin_dir, os.path.basename(sys.executable))
if 'Python.framework' in prefix:
# OS X framework builds cause validation to break
# https://github.com/pypa/virtualenv/issues/322
if os.environ.get('__PYVENV_LAUNCHER__'):
del os.environ["__PYVENV_LAUNCHER__"]
if re.search(r'/Python(?:-32|-64)*$', py_executable):
# The name of the python executable is not quite what
# we want, rename it.
py_executable = os.path.join(
os.path.dirname(py_executable), 'python')
logger.notify('New %s executable in %s', expected_exe, py_executable)
pcbuild_dir = os.path.dirname(sys.executable)
pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
logger.notify('Detected python running from build directory %s', pcbuild_dir)
logger.notify('Writing .pth file linking to build directory for *.pyd files')
writefile(pyd_pth, pcbuild_dir)
else:
pcbuild_dir = None
if os.path.exists(pyd_pth):
logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
os.unlink(pyd_pth)
if sys.executable != py_executable:
## FIXME: could I just hard link?
executable = sys.executable
shutil.copyfile(executable, py_executable)
make_exe(py_executable)
if is_win or is_cygwin:
pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
if os.path.exists(pythonw):
logger.info('Also created pythonw.exe')
shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe'))
python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
if os.path.exists(python_d):
logger.info('Also created python_d.exe')
shutil.copyfile(python_d, python_d_dest)
elif os.path.exists(python_d_dest):
logger.info('Removed python_d.exe as it is no longer at the source')
os.unlink(python_d_dest)
# we need to copy the DLL to enforce that windows will load the correct one.
# may not exist if we are cygwin.
py_executable_dll = 'python%s%s.dll' % (
sys.version_info[0], sys.version_info[1])
py_executable_dll_d = 'python%s%s_d.dll' % (
sys.version_info[0], sys.version_info[1])
pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
if os.path.exists(pythondll):
logger.info('Also created %s' % py_executable_dll)
shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll))
if os.path.exists(pythondll_d):
logger.info('Also created %s' % py_executable_dll_d)
shutil.copyfile(pythondll_d, pythondll_d_dest)
elif os.path.exists(pythondll_d_dest):
logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
os.unlink(pythondll_d_dest)
if is_pypy:
# make a symlink python --> pypy-c
python_executable = os.path.join(os.path.dirname(py_executable), 'python')
if sys.platform in ('win32', 'cygwin'):
python_executable += '.exe'
logger.info('Also created executable %s' % python_executable)
copyfile(py_executable, python_executable, symlink)
if is_win:
for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll',
'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll',
'tcl85.dll', 'tk85.dll']:
src = join(prefix, name)
if os.path.exists(src):
copyfile(src, join(bin_dir, name), symlink)
for d in sys.path:
if d.endswith('lib_pypy'):
break
else:
logger.fatal('Could not find lib_pypy in sys.path')
raise SystemExit(3)
logger.info('Copying lib_pypy')
copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink)
if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
secondary_exe = os.path.join(os.path.dirname(py_executable),
expected_exe)
py_executable_ext = os.path.splitext(py_executable)[1]
if py_executable_ext.lower() == '.exe':
# python2.4 gives an extension of '.4' :P
secondary_exe += py_executable_ext
if os.path.exists(secondary_exe):
logger.warn('Not overwriting existing %s script %s (you must use %s)'
% (expected_exe, secondary_exe, py_executable))
else:
logger.notify('Also creating executable in %s' % secondary_exe)
shutil.copyfile(sys.executable, secondary_exe)
make_exe(secondary_exe)
if '.framework' in prefix:
if 'Python.framework' in prefix:
logger.debug('MacOSX Python framework detected')
# Make sure we use the embedded interpreter inside
# the framework, even if sys.executable points to
# the stub executable in ${sys.prefix}/bin
# See http://groups.google.com/group/python-virtualenv/
# browse_thread/thread/17cab2f85da75951
original_python = os.path.join(
prefix, 'Resources/Python.app/Contents/MacOS/Python')
if 'EPD' in prefix:
logger.debug('EPD framework detected')
original_python = os.path.join(prefix, 'bin/python')
shutil.copy(original_python, py_executable)
# Copy the framework's dylib into the virtual
# environment
virtual_lib = os.path.join(home_dir, '.Python')
if os.path.exists(virtual_lib):
os.unlink(virtual_lib)
copyfile(
os.path.join(prefix, 'Python'),
virtual_lib,
symlink)
# And then change the install_name of the copied python executable
try:
mach_o_change(py_executable,
os.path.join(prefix, 'Python'),
'@executable_path/../.Python')
except:
e = sys.exc_info()[1]
logger.warn("Could not call mach_o_change: %s. "
"Trying to call install_name_tool instead." % e)
try:
call_subprocess(
["install_name_tool", "-change",
os.path.join(prefix, 'Python'),
'@executable_path/../.Python',
py_executable])
except:
logger.fatal("Could not call install_name_tool -- you must "
"have Apple's development tools installed")
raise
if not is_win:
# Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
py_exe_version_major = 'python%s' % sys.version_info[0]
py_exe_version_major_minor = 'python%s.%s' % (
sys.version_info[0], sys.version_info[1])
py_exe_no_version = 'python'
required_symlinks = [ py_exe_no_version, py_exe_version_major,
py_exe_version_major_minor ]
py_executable_base = os.path.basename(py_executable)
if py_executable_base in required_symlinks:
# Don't try to symlink to yourself.
required_symlinks.remove(py_executable_base)
for pth in required_symlinks:
full_pth = join(bin_dir, pth)
if os.path.exists(full_pth):
os.unlink(full_pth)
if symlink:
os.symlink(py_executable_base, full_pth)
else:
copyfile(py_executable, full_pth, symlink)
if is_win and ' ' in py_executable:
# There's a bug with subprocess on Windows when using a first
# argument that has a space in it. Instead we have to quote
# the value:
py_executable = '"%s"' % py_executable
# NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
cmd = [py_executable, '-c', 'import sys;out=sys.stdout;'
'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
try:
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE)
proc_stdout, proc_stderr = proc.communicate()
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.EACCES:
logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
sys.exit(100)
else:
raise e
proc_stdout = proc_stdout.strip().decode("utf-8")
proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
if hasattr(norm_home_dir, 'decode'):
norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding())
if proc_stdout != norm_home_dir:
logger.fatal(
'ERROR: The executable %s is not functioning' % py_executable)
logger.fatal(
'ERROR: It thinks sys.prefix is %r (should be %r)'
% (proc_stdout, norm_home_dir))
logger.fatal(
'ERROR: virtualenv is not compatible with this system or executable')
if is_win:
logger.fatal(
'Note: some Windows users have reported this error when they '
'installed Python for "Only this user" or have multiple '
'versions of Python installed. Copying the appropriate '
'PythonXX.dll to the virtualenv Scripts/ directory may fix '
'this problem.')
sys.exit(100)
else:
logger.info('Got sys.prefix result: %r' % proc_stdout)
pydistutils = os.path.expanduser('~/.pydistutils.cfg')
if os.path.exists(pydistutils):
logger.notify('Please make sure you remove any previous custom paths from '
'your %s file.' % pydistutils)
## FIXME: really this should be calculated earlier
fix_local_scheme(home_dir, symlink)
if site_packages:
if os.path.exists(site_packages_filename):
logger.info('Deleting %s' % site_packages_filename)
os.unlink(site_packages_filename)
return py_executable | def function[install_python, parameter[home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink]]:
constant[Install just the base environment, no distutils patches etc]
if call[name[sys].executable.startswith, parameter[name[bin_dir]]] begin[:]
call[name[print], parameter[constant[Please use the *system* python to run this script]]]
return[None]
if name[clear] begin[:]
call[name[rmtree], parameter[name[lib_dir]]]
call[name[logger].notify, parameter[constant[Not deleting %s], name[bin_dir]]]
if call[name[hasattr], parameter[name[sys], constant[real_prefix]]] begin[:]
call[name[logger].notify, parameter[binary_operation[constant[Using real prefix %r] <ast.Mod object at 0x7da2590d6920> name[sys].real_prefix]]]
variable[prefix] assign[=] name[sys].real_prefix
call[name[mkdir], parameter[name[lib_dir]]]
call[name[fix_lib64], parameter[name[lib_dir], name[symlink]]]
variable[stdlib_dirs] assign[=] list[[<ast.Call object at 0x7da1b05877c0>]]
if name[is_win] begin[:]
call[name[stdlib_dirs].append, parameter[call[name[join], parameter[call[name[os].path.dirname, parameter[call[name[stdlib_dirs]][constant[0]]]], constant[DLLs]]]]]
if call[name[hasattr], parameter[name[os], constant[symlink]]] begin[:]
call[name[logger].info, parameter[constant[Symlinking Python bootstrap modules]]]
<ast.AugAssign object at 0x7da1b0586ce0>
<ast.Try object at 0x7da1b0586f80>
if name[is_win] begin[:]
call[name[copy_tcltk], parameter[name[prefix], name[home_dir], name[symlink]]]
call[name[mkdir], parameter[call[name[join], parameter[name[lib_dir], constant[site-packages]]]]]
import module[site]
variable[site_filename] assign[=] name[site].__file__
if <ast.BoolOp object at 0x7da1b0578af0> begin[:]
variable[site_filename] assign[=] call[name[site_filename]][<ast.Slice object at 0x7da1b0578910>]
variable[site_filename_dst] assign[=] call[name[change_prefix], parameter[name[site_filename], name[home_dir]]]
variable[site_dir] assign[=] call[name[os].path.dirname, parameter[name[site_filename_dst]]]
call[name[writefile], parameter[name[site_filename_dst], name[SITE_PY]]]
call[name[writefile], parameter[call[name[join], parameter[name[site_dir], constant[orig-prefix.txt]]], name[prefix]]]
variable[site_packages_filename] assign[=] call[name[join], parameter[name[site_dir], constant[no-global-site-packages.txt]]]
if <ast.UnaryOp object at 0x7da1b0578d90> begin[:]
call[name[writefile], parameter[name[site_packages_filename], constant[]]]
if <ast.BoolOp object at 0x7da1b0579150> begin[:]
variable[stdinc_dir] assign[=] call[name[join], parameter[name[prefix], constant[include]]]
if call[name[os].path.exists, parameter[name[stdinc_dir]]] begin[:]
call[name[copyfile], parameter[name[stdinc_dir], name[inc_dir], name[symlink]]]
variable[platinc_dir] assign[=] call[name[distutils].sysconfig.get_python_inc, parameter[]]
if compare[name[platinc_dir] not_equal[!=] name[stdinc_dir]] begin[:]
variable[platinc_dest] assign[=] call[name[distutils].sysconfig.get_python_inc, parameter[]]
if compare[name[platinc_dir] equal[==] name[platinc_dest]] begin[:]
variable[platinc_dest] assign[=] call[name[subst_path], parameter[name[platinc_dir], name[prefix], name[home_dir]]]
if name[platinc_dest] begin[:]
call[name[copyfile], parameter[name[platinc_dir], name[platinc_dest], name[symlink]]]
if <ast.BoolOp object at 0x7da1b05fe0b0> begin[:]
if name[is_win] begin[:]
variable[exec_dir] assign[=] call[name[join], parameter[name[sys].exec_prefix, constant[lib]]]
for taget[name[fn]] in starred[call[name[os].listdir, parameter[name[exec_dir]]]] begin[:]
call[name[copyfile], parameter[call[name[join], parameter[name[exec_dir], name[fn]]], call[name[join], parameter[name[lib_dir], name[fn]]], name[symlink]]]
if name[is_jython] begin[:]
for taget[name[name]] in starred[tuple[[<ast.Constant object at 0x7da1b05fec80>, <ast.Constant object at 0x7da1b05fece0>, <ast.Constant object at 0x7da1b05fecb0>]]] begin[:]
variable[src] assign[=] call[name[join], parameter[name[prefix], name[name]]]
if call[name[os].path.exists, parameter[name[src]]] begin[:]
call[name[copyfile], parameter[name[src], call[name[join], parameter[name[home_dir], name[name]]], name[symlink]]]
variable[src] assign[=] call[name[join], parameter[name[prefix], constant[registry]]]
if call[name[os].path.exists, parameter[name[src]]] begin[:]
call[name[copyfile], parameter[name[src], call[name[join], parameter[name[home_dir], constant[registry]]]]]
call[name[copyfile], parameter[call[name[join], parameter[name[prefix], constant[cachedir]]], call[name[join], parameter[name[home_dir], constant[cachedir]]]]]
call[name[mkdir], parameter[name[bin_dir]]]
variable[py_executable] assign[=] call[name[join], parameter[name[bin_dir], call[name[os].path.basename, parameter[name[sys].executable]]]]
if compare[constant[Python.framework] in name[prefix]] begin[:]
if call[name[os].environ.get, parameter[constant[__PYVENV_LAUNCHER__]]] begin[:]
<ast.Delete object at 0x7da1b055c1c0>
if call[name[re].search, parameter[constant[/Python(?:-32|-64)*$], name[py_executable]]] begin[:]
variable[py_executable] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], constant[python]]]
call[name[logger].notify, parameter[constant[New %s executable in %s], name[expected_exe], name[py_executable]]]
variable[pcbuild_dir] assign[=] call[name[os].path.dirname, parameter[name[sys].executable]]
variable[pyd_pth] assign[=] call[name[os].path.join, parameter[name[lib_dir], constant[site-packages], constant[virtualenv_builddir_pyd.pth]]]
if <ast.BoolOp object at 0x7da1b055d5a0> begin[:]
call[name[logger].notify, parameter[constant[Detected python running from build directory %s], name[pcbuild_dir]]]
call[name[logger].notify, parameter[constant[Writing .pth file linking to build directory for *.pyd files]]]
call[name[writefile], parameter[name[pyd_pth], name[pcbuild_dir]]]
if compare[name[sys].executable not_equal[!=] name[py_executable]] begin[:]
variable[executable] assign[=] name[sys].executable
call[name[shutil].copyfile, parameter[name[executable], name[py_executable]]]
call[name[make_exe], parameter[name[py_executable]]]
if <ast.BoolOp object at 0x7da1b055fa30> begin[:]
variable[pythonw] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[sys].executable]], constant[pythonw.exe]]]
if call[name[os].path.exists, parameter[name[pythonw]]] begin[:]
call[name[logger].info, parameter[constant[Also created pythonw.exe]]]
call[name[shutil].copyfile, parameter[name[pythonw], call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], constant[pythonw.exe]]]]]
variable[python_d] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[sys].executable]], constant[python_d.exe]]]
variable[python_d_dest] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], constant[python_d.exe]]]
if call[name[os].path.exists, parameter[name[python_d]]] begin[:]
call[name[logger].info, parameter[constant[Also created python_d.exe]]]
call[name[shutil].copyfile, parameter[name[python_d], name[python_d_dest]]]
variable[py_executable_dll] assign[=] binary_operation[constant[python%s%s.dll] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b05ae9b0>, <ast.Subscript object at 0x7da1b05ae8f0>]]]
variable[py_executable_dll_d] assign[=] binary_operation[constant[python%s%s_d.dll] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b05ae740>, <ast.Subscript object at 0x7da1b05ae680>]]]
variable[pythondll] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[sys].executable]], name[py_executable_dll]]]
variable[pythondll_d] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[sys].executable]], name[py_executable_dll_d]]]
variable[pythondll_d_dest] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], name[py_executable_dll_d]]]
if call[name[os].path.exists, parameter[name[pythondll]]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[Also created %s] <ast.Mod object at 0x7da2590d6920> name[py_executable_dll]]]]
call[name[shutil].copyfile, parameter[name[pythondll], call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], name[py_executable_dll]]]]]
if call[name[os].path.exists, parameter[name[pythondll_d]]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[Also created %s] <ast.Mod object at 0x7da2590d6920> name[py_executable_dll_d]]]]
call[name[shutil].copyfile, parameter[name[pythondll_d], name[pythondll_d_dest]]]
if name[is_pypy] begin[:]
variable[python_executable] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], constant[python]]]
if compare[name[sys].platform in tuple[[<ast.Constant object at 0x7da1b05acd90>, <ast.Constant object at 0x7da1b05acd60>]]] begin[:]
<ast.AugAssign object at 0x7da1b05acd30>
call[name[logger].info, parameter[binary_operation[constant[Also created executable %s] <ast.Mod object at 0x7da2590d6920> name[python_executable]]]]
call[name[copyfile], parameter[name[py_executable], name[python_executable], name[symlink]]]
if name[is_win] begin[:]
for taget[name[name]] in starred[list[[<ast.Constant object at 0x7da1b05ac8e0>, <ast.Constant object at 0x7da1b05ac8b0>, <ast.Constant object at 0x7da1b05ac880>, <ast.Constant object at 0x7da1b05ac850>, <ast.Constant object at 0x7da1b05ac820>, <ast.Constant object at 0x7da1b05ac7f0>, <ast.Constant object at 0x7da1b05ac7c0>, <ast.Constant object at 0x7da1b05ac790>]]] begin[:]
variable[src] assign[=] call[name[join], parameter[name[prefix], name[name]]]
if call[name[os].path.exists, parameter[name[src]]] begin[:]
call[name[copyfile], parameter[name[src], call[name[join], parameter[name[bin_dir], name[name]]], name[symlink]]]
for taget[name[d]] in starred[name[sys].path] begin[:]
if call[name[d].endswith, parameter[constant[lib_pypy]]] begin[:]
break
call[name[logger].info, parameter[constant[Copying lib_pypy]]]
call[name[copyfile], parameter[name[d], call[name[os].path.join, parameter[name[home_dir], constant[lib_pypy]]], name[symlink]]]
if compare[call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[py_executable]]]]]][constant[0]] not_equal[!=] name[expected_exe]] begin[:]
variable[secondary_exe] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[py_executable]]], name[expected_exe]]]
variable[py_executable_ext] assign[=] call[call[name[os].path.splitext, parameter[name[py_executable]]]][constant[1]]
if compare[call[name[py_executable_ext].lower, parameter[]] equal[==] constant[.exe]] begin[:]
<ast.AugAssign object at 0x7da1b05499f0>
if call[name[os].path.exists, parameter[name[secondary_exe]]] begin[:]
call[name[logger].warn, parameter[binary_operation[constant[Not overwriting existing %s script %s (you must use %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0549060>, <ast.Name object at 0x7da1b0549090>, <ast.Name object at 0x7da1b0548fd0>]]]]]
if compare[constant[.framework] in name[prefix]] begin[:]
if compare[constant[Python.framework] in name[prefix]] begin[:]
call[name[logger].debug, parameter[constant[MacOSX Python framework detected]]]
variable[original_python] assign[=] call[name[os].path.join, parameter[name[prefix], constant[Resources/Python.app/Contents/MacOS/Python]]]
if compare[constant[EPD] in name[prefix]] begin[:]
call[name[logger].debug, parameter[constant[EPD framework detected]]]
variable[original_python] assign[=] call[name[os].path.join, parameter[name[prefix], constant[bin/python]]]
call[name[shutil].copy, parameter[name[original_python], name[py_executable]]]
variable[virtual_lib] assign[=] call[name[os].path.join, parameter[name[home_dir], constant[.Python]]]
if call[name[os].path.exists, parameter[name[virtual_lib]]] begin[:]
call[name[os].unlink, parameter[name[virtual_lib]]]
call[name[copyfile], parameter[call[name[os].path.join, parameter[name[prefix], constant[Python]]], name[virtual_lib], name[symlink]]]
<ast.Try object at 0x7da1b05481c0>
if <ast.UnaryOp object at 0x7da1b05b7b20> begin[:]
variable[py_exe_version_major] assign[=] binary_operation[constant[python%s] <ast.Mod object at 0x7da2590d6920> call[name[sys].version_info][constant[0]]]
variable[py_exe_version_major_minor] assign[=] binary_operation[constant[python%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b05b47f0>, <ast.Subscript object at 0x7da1b05b7160>]]]
variable[py_exe_no_version] assign[=] constant[python]
variable[required_symlinks] assign[=] list[[<ast.Name object at 0x7da1b05b5b40>, <ast.Name object at 0x7da1b05b5d80>, <ast.Name object at 0x7da1b05b7dc0>]]
variable[py_executable_base] assign[=] call[name[os].path.basename, parameter[name[py_executable]]]
if compare[name[py_executable_base] in name[required_symlinks]] begin[:]
call[name[required_symlinks].remove, parameter[name[py_executable_base]]]
for taget[name[pth]] in starred[name[required_symlinks]] begin[:]
variable[full_pth] assign[=] call[name[join], parameter[name[bin_dir], name[pth]]]
if call[name[os].path.exists, parameter[name[full_pth]]] begin[:]
call[name[os].unlink, parameter[name[full_pth]]]
if name[symlink] begin[:]
call[name[os].symlink, parameter[name[py_executable_base], name[full_pth]]]
if <ast.BoolOp object at 0x7da1b05b5e40> begin[:]
variable[py_executable] assign[=] binary_operation[constant["%s"] <ast.Mod object at 0x7da2590d6920> name[py_executable]]
variable[cmd] assign[=] list[[<ast.Name object at 0x7da1b05b7a60>, <ast.Constant object at 0x7da1b05b5330>, <ast.Constant object at 0x7da1b05b6b00>]]
call[name[logger].info, parameter[binary_operation[constant[Testing executable with %s %s "%s"] <ast.Mod object at 0x7da2590d6920> call[name[tuple], parameter[name[cmd]]]]]]
<ast.Try object at 0x7da1b05b7310>
variable[proc_stdout] assign[=] call[call[name[proc_stdout].strip, parameter[]].decode, parameter[constant[utf-8]]]
variable[proc_stdout] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[proc_stdout]]]]]
variable[norm_home_dir] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[home_dir]]]]]
if call[name[hasattr], parameter[name[norm_home_dir], constant[decode]]] begin[:]
variable[norm_home_dir] assign[=] call[name[norm_home_dir].decode, parameter[call[name[sys].getfilesystemencoding, parameter[]]]]
if compare[name[proc_stdout] not_equal[!=] name[norm_home_dir]] begin[:]
call[name[logger].fatal, parameter[binary_operation[constant[ERROR: The executable %s is not functioning] <ast.Mod object at 0x7da2590d6920> name[py_executable]]]]
call[name[logger].fatal, parameter[binary_operation[constant[ERROR: It thinks sys.prefix is %r (should be %r)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05e3880>, <ast.Name object at 0x7da1b05e3d00>]]]]]
call[name[logger].fatal, parameter[constant[ERROR: virtualenv is not compatible with this system or executable]]]
if name[is_win] begin[:]
call[name[logger].fatal, parameter[constant[Note: some Windows users have reported this error when they installed Python for "Only this user" or have multiple versions of Python installed. Copying the appropriate PythonXX.dll to the virtualenv Scripts/ directory may fix this problem.]]]
call[name[sys].exit, parameter[constant[100]]]
variable[pydistutils] assign[=] call[name[os].path.expanduser, parameter[constant[~/.pydistutils.cfg]]]
if call[name[os].path.exists, parameter[name[pydistutils]]] begin[:]
call[name[logger].notify, parameter[binary_operation[constant[Please make sure you remove any previous custom paths from your %s file.] <ast.Mod object at 0x7da2590d6920> name[pydistutils]]]]
call[name[fix_local_scheme], parameter[name[home_dir], name[symlink]]]
if name[site_packages] begin[:]
if call[name[os].path.exists, parameter[name[site_packages_filename]]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[Deleting %s] <ast.Mod object at 0x7da2590d6920> name[site_packages_filename]]]]
call[name[os].unlink, parameter[name[site_packages_filename]]]
return[name[py_executable]] | keyword[def] identifier[install_python] ( identifier[home_dir] , identifier[lib_dir] , identifier[inc_dir] , identifier[bin_dir] , identifier[site_packages] , identifier[clear] , identifier[symlink] = keyword[True] ):
literal[string]
keyword[if] identifier[sys] . identifier[executable] . identifier[startswith] ( identifier[bin_dir] ):
identifier[print] ( literal[string] )
keyword[return]
keyword[if] identifier[clear] :
identifier[rmtree] ( identifier[lib_dir] )
identifier[logger] . identifier[notify] ( literal[string] , identifier[bin_dir] )
keyword[if] identifier[hasattr] ( identifier[sys] , literal[string] ):
identifier[logger] . identifier[notify] ( literal[string] % identifier[sys] . identifier[real_prefix] )
identifier[prefix] = identifier[sys] . identifier[real_prefix]
keyword[elif] identifier[hasattr] ( identifier[sys] , literal[string] ):
identifier[logger] . identifier[notify] ( literal[string] % identifier[sys] . identifier[base_prefix] )
identifier[prefix] = identifier[sys] . identifier[base_prefix]
keyword[else] :
identifier[prefix] = identifier[sys] . identifier[prefix]
identifier[mkdir] ( identifier[lib_dir] )
identifier[fix_lib64] ( identifier[lib_dir] , identifier[symlink] )
identifier[stdlib_dirs] =[ identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[__file__] )]
keyword[if] identifier[is_win] :
identifier[stdlib_dirs] . identifier[append] ( identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[stdlib_dirs] [ literal[int] ]), literal[string] ))
keyword[elif] identifier[is_darwin] :
identifier[stdlib_dirs] . identifier[append] ( identifier[join] ( identifier[stdlib_dirs] [ literal[int] ], literal[string] ))
keyword[if] identifier[hasattr] ( identifier[os] , literal[string] ):
identifier[logger] . identifier[info] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[logger] . identifier[indent] += literal[int]
keyword[try] :
keyword[for] identifier[stdlib_dir] keyword[in] identifier[stdlib_dirs] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[stdlib_dir] ):
keyword[continue]
keyword[for] identifier[fn] keyword[in] identifier[os] . identifier[listdir] ( identifier[stdlib_dir] ):
identifier[bn] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[fn] )[ literal[int] ]
keyword[if] identifier[fn] != literal[string] keyword[and] identifier[bn] keyword[in] identifier[REQUIRED_FILES] :
identifier[copyfile] ( identifier[join] ( identifier[stdlib_dir] , identifier[fn] ), identifier[join] ( identifier[lib_dir] , identifier[fn] ), identifier[symlink] )
identifier[copy_required_modules] ( identifier[home_dir] , identifier[symlink] )
keyword[finally] :
identifier[logger] . identifier[indent] -= literal[int]
keyword[if] identifier[is_win] :
identifier[copy_tcltk] ( identifier[prefix] , identifier[home_dir] , identifier[symlink] )
identifier[mkdir] ( identifier[join] ( identifier[lib_dir] , literal[string] ))
keyword[import] identifier[site]
identifier[site_filename] = identifier[site] . identifier[__file__]
keyword[if] identifier[site_filename] . identifier[endswith] ( literal[string] ) keyword[or] identifier[site_filename] . identifier[endswith] ( literal[string] ):
identifier[site_filename] = identifier[site_filename] [:- literal[int] ]
keyword[elif] identifier[site_filename] . identifier[endswith] ( literal[string] ):
identifier[site_filename] = identifier[site_filename] . identifier[replace] ( literal[string] , literal[string] )
identifier[site_filename_dst] = identifier[change_prefix] ( identifier[site_filename] , identifier[home_dir] )
identifier[site_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[site_filename_dst] )
identifier[writefile] ( identifier[site_filename_dst] , identifier[SITE_PY] )
identifier[writefile] ( identifier[join] ( identifier[site_dir] , literal[string] ), identifier[prefix] )
identifier[site_packages_filename] = identifier[join] ( identifier[site_dir] , literal[string] )
keyword[if] keyword[not] identifier[site_packages] :
identifier[writefile] ( identifier[site_packages_filename] , literal[string] )
keyword[if] identifier[is_pypy] keyword[or] identifier[is_win] :
identifier[stdinc_dir] = identifier[join] ( identifier[prefix] , literal[string] )
keyword[else] :
identifier[stdinc_dir] = identifier[join] ( identifier[prefix] , literal[string] , identifier[py_version] + identifier[abiflags] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[stdinc_dir] ):
identifier[copyfile] ( identifier[stdinc_dir] , identifier[inc_dir] , identifier[symlink] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] % identifier[stdinc_dir] )
identifier[platinc_dir] = identifier[distutils] . identifier[sysconfig] . identifier[get_python_inc] ( identifier[plat_specific] = literal[int] )
keyword[if] identifier[platinc_dir] != identifier[stdinc_dir] :
identifier[platinc_dest] = identifier[distutils] . identifier[sysconfig] . identifier[get_python_inc] (
identifier[plat_specific] = literal[int] , identifier[prefix] = identifier[home_dir] )
keyword[if] identifier[platinc_dir] == identifier[platinc_dest] :
identifier[platinc_dest] = identifier[subst_path] ( identifier[platinc_dir] , identifier[prefix] , identifier[home_dir] )
keyword[if] identifier[platinc_dest] :
identifier[copyfile] ( identifier[platinc_dir] , identifier[platinc_dest] , identifier[symlink] )
keyword[if] identifier[sys] . identifier[exec_prefix] != identifier[prefix] keyword[and] keyword[not] identifier[is_pypy] :
keyword[if] identifier[is_win] :
identifier[exec_dir] = identifier[join] ( identifier[sys] . identifier[exec_prefix] , literal[string] )
keyword[elif] identifier[is_jython] :
identifier[exec_dir] = identifier[join] ( identifier[sys] . identifier[exec_prefix] , literal[string] )
keyword[else] :
identifier[exec_dir] = identifier[join] ( identifier[sys] . identifier[exec_prefix] , literal[string] , identifier[py_version] )
keyword[for] identifier[fn] keyword[in] identifier[os] . identifier[listdir] ( identifier[exec_dir] ):
identifier[copyfile] ( identifier[join] ( identifier[exec_dir] , identifier[fn] ), identifier[join] ( identifier[lib_dir] , identifier[fn] ), identifier[symlink] )
keyword[if] identifier[is_jython] :
keyword[for] identifier[name] keyword[in] literal[string] , literal[string] , literal[string] :
identifier[src] = identifier[join] ( identifier[prefix] , identifier[name] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[src] ):
identifier[copyfile] ( identifier[src] , identifier[join] ( identifier[home_dir] , identifier[name] ), identifier[symlink] )
identifier[src] = identifier[join] ( identifier[prefix] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[src] ):
identifier[copyfile] ( identifier[src] , identifier[join] ( identifier[home_dir] , literal[string] ), identifier[symlink] = keyword[False] )
identifier[copyfile] ( identifier[join] ( identifier[prefix] , literal[string] ), identifier[join] ( identifier[home_dir] , literal[string] ),
identifier[symlink] = keyword[False] )
identifier[mkdir] ( identifier[bin_dir] )
identifier[py_executable] = identifier[join] ( identifier[bin_dir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[sys] . identifier[executable] ))
keyword[if] literal[string] keyword[in] identifier[prefix] :
keyword[if] identifier[os] . identifier[environ] . identifier[get] ( literal[string] ):
keyword[del] identifier[os] . identifier[environ] [ literal[string] ]
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[py_executable] ):
identifier[py_executable] = identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ), literal[string] )
identifier[logger] . identifier[notify] ( literal[string] , identifier[expected_exe] , identifier[py_executable] )
identifier[pcbuild_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[executable] )
identifier[pyd_pth] = identifier[os] . identifier[path] . identifier[join] ( identifier[lib_dir] , literal[string] , literal[string] )
keyword[if] identifier[is_win] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[pcbuild_dir] , literal[string] )):
identifier[logger] . identifier[notify] ( literal[string] , identifier[pcbuild_dir] )
identifier[logger] . identifier[notify] ( literal[string] )
identifier[writefile] ( identifier[pyd_pth] , identifier[pcbuild_dir] )
keyword[else] :
identifier[pcbuild_dir] = keyword[None]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pyd_pth] ):
identifier[logger] . identifier[info] ( literal[string] % identifier[pyd_pth] )
identifier[os] . identifier[unlink] ( identifier[pyd_pth] )
keyword[if] identifier[sys] . identifier[executable] != identifier[py_executable] :
identifier[executable] = identifier[sys] . identifier[executable]
identifier[shutil] . identifier[copyfile] ( identifier[executable] , identifier[py_executable] )
identifier[make_exe] ( identifier[py_executable] )
keyword[if] identifier[is_win] keyword[or] identifier[is_cygwin] :
identifier[pythonw] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[executable] ), literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pythonw] ):
identifier[logger] . identifier[info] ( literal[string] )
identifier[shutil] . identifier[copyfile] ( identifier[pythonw] , identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ), literal[string] ))
identifier[python_d] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[executable] ), literal[string] )
identifier[python_d_dest] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ), literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[python_d] ):
identifier[logger] . identifier[info] ( literal[string] )
identifier[shutil] . identifier[copyfile] ( identifier[python_d] , identifier[python_d_dest] )
keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( identifier[python_d_dest] ):
identifier[logger] . identifier[info] ( literal[string] )
identifier[os] . identifier[unlink] ( identifier[python_d_dest] )
identifier[py_executable_dll] = literal[string] %(
identifier[sys] . identifier[version_info] [ literal[int] ], identifier[sys] . identifier[version_info] [ literal[int] ])
identifier[py_executable_dll_d] = literal[string] %(
identifier[sys] . identifier[version_info] [ literal[int] ], identifier[sys] . identifier[version_info] [ literal[int] ])
identifier[pythondll] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[executable] ), identifier[py_executable_dll] )
identifier[pythondll_d] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[executable] ), identifier[py_executable_dll_d] )
identifier[pythondll_d_dest] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ), identifier[py_executable_dll_d] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pythondll] ):
identifier[logger] . identifier[info] ( literal[string] % identifier[py_executable_dll] )
identifier[shutil] . identifier[copyfile] ( identifier[pythondll] , identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ), identifier[py_executable_dll] ))
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pythondll_d] ):
identifier[logger] . identifier[info] ( literal[string] % identifier[py_executable_dll_d] )
identifier[shutil] . identifier[copyfile] ( identifier[pythondll_d] , identifier[pythondll_d_dest] )
keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( identifier[pythondll_d_dest] ):
identifier[logger] . identifier[info] ( literal[string] % identifier[pythondll_d_dest] )
identifier[os] . identifier[unlink] ( identifier[pythondll_d_dest] )
keyword[if] identifier[is_pypy] :
identifier[python_executable] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ), literal[string] )
keyword[if] identifier[sys] . identifier[platform] keyword[in] ( literal[string] , literal[string] ):
identifier[python_executable] += literal[string]
identifier[logger] . identifier[info] ( literal[string] % identifier[python_executable] )
identifier[copyfile] ( identifier[py_executable] , identifier[python_executable] , identifier[symlink] )
keyword[if] identifier[is_win] :
keyword[for] identifier[name] keyword[in] [ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ]:
identifier[src] = identifier[join] ( identifier[prefix] , identifier[name] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[src] ):
identifier[copyfile] ( identifier[src] , identifier[join] ( identifier[bin_dir] , identifier[name] ), identifier[symlink] )
keyword[for] identifier[d] keyword[in] identifier[sys] . identifier[path] :
keyword[if] identifier[d] . identifier[endswith] ( literal[string] ):
keyword[break]
keyword[else] :
identifier[logger] . identifier[fatal] ( literal[string] )
keyword[raise] identifier[SystemExit] ( literal[int] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[copyfile] ( identifier[d] , identifier[os] . identifier[path] . identifier[join] ( identifier[home_dir] , literal[string] ), identifier[symlink] )
keyword[if] identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[py_executable] ))[ literal[int] ]!= identifier[expected_exe] :
identifier[secondary_exe] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[py_executable] ),
identifier[expected_exe] )
identifier[py_executable_ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[py_executable] )[ literal[int] ]
keyword[if] identifier[py_executable_ext] . identifier[lower] ()== literal[string] :
identifier[secondary_exe] += identifier[py_executable_ext]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[secondary_exe] ):
identifier[logger] . identifier[warn] ( literal[string]
%( identifier[expected_exe] , identifier[secondary_exe] , identifier[py_executable] ))
keyword[else] :
identifier[logger] . identifier[notify] ( literal[string] % identifier[secondary_exe] )
identifier[shutil] . identifier[copyfile] ( identifier[sys] . identifier[executable] , identifier[secondary_exe] )
identifier[make_exe] ( identifier[secondary_exe] )
keyword[if] literal[string] keyword[in] identifier[prefix] :
keyword[if] literal[string] keyword[in] identifier[prefix] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[original_python] = identifier[os] . identifier[path] . identifier[join] (
identifier[prefix] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[prefix] :
identifier[logger] . identifier[debug] ( literal[string] )
identifier[original_python] = identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] , literal[string] )
identifier[shutil] . identifier[copy] ( identifier[original_python] , identifier[py_executable] )
identifier[virtual_lib] = identifier[os] . identifier[path] . identifier[join] ( identifier[home_dir] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[virtual_lib] ):
identifier[os] . identifier[unlink] ( identifier[virtual_lib] )
identifier[copyfile] (
identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] , literal[string] ),
identifier[virtual_lib] ,
identifier[symlink] )
keyword[try] :
identifier[mach_o_change] ( identifier[py_executable] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] , literal[string] ),
literal[string] )
keyword[except] :
identifier[e] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]
identifier[logger] . identifier[warn] ( literal[string]
literal[string] % identifier[e] )
keyword[try] :
identifier[call_subprocess] (
[ literal[string] , literal[string] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[prefix] , literal[string] ),
literal[string] ,
identifier[py_executable] ])
keyword[except] :
identifier[logger] . identifier[fatal] ( literal[string]
literal[string] )
keyword[raise]
keyword[if] keyword[not] identifier[is_win] :
identifier[py_exe_version_major] = literal[string] % identifier[sys] . identifier[version_info] [ literal[int] ]
identifier[py_exe_version_major_minor] = literal[string] %(
identifier[sys] . identifier[version_info] [ literal[int] ], identifier[sys] . identifier[version_info] [ literal[int] ])
identifier[py_exe_no_version] = literal[string]
identifier[required_symlinks] =[ identifier[py_exe_no_version] , identifier[py_exe_version_major] ,
identifier[py_exe_version_major_minor] ]
identifier[py_executable_base] = identifier[os] . identifier[path] . identifier[basename] ( identifier[py_executable] )
keyword[if] identifier[py_executable_base] keyword[in] identifier[required_symlinks] :
identifier[required_symlinks] . identifier[remove] ( identifier[py_executable_base] )
keyword[for] identifier[pth] keyword[in] identifier[required_symlinks] :
identifier[full_pth] = identifier[join] ( identifier[bin_dir] , identifier[pth] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[full_pth] ):
identifier[os] . identifier[unlink] ( identifier[full_pth] )
keyword[if] identifier[symlink] :
identifier[os] . identifier[symlink] ( identifier[py_executable_base] , identifier[full_pth] )
keyword[else] :
identifier[copyfile] ( identifier[py_executable] , identifier[full_pth] , identifier[symlink] )
keyword[if] identifier[is_win] keyword[and] literal[string] keyword[in] identifier[py_executable] :
identifier[py_executable] = literal[string] % identifier[py_executable]
identifier[cmd] =[ identifier[py_executable] , literal[string] , literal[string]
literal[string] ]
identifier[logger] . identifier[info] ( literal[string] % identifier[tuple] ( identifier[cmd] ))
keyword[try] :
identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
identifier[proc_stdout] , identifier[proc_stderr] = identifier[proc] . identifier[communicate] ()
keyword[except] identifier[OSError] :
identifier[e] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]
keyword[if] identifier[e] . identifier[errno] == identifier[errno] . identifier[EACCES] :
identifier[logger] . identifier[fatal] ( literal[string] %( identifier[py_executable] , identifier[e] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[else] :
keyword[raise] identifier[e]
identifier[proc_stdout] = identifier[proc_stdout] . identifier[strip] (). identifier[decode] ( literal[string] )
identifier[proc_stdout] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[proc_stdout] ))
identifier[norm_home_dir] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[home_dir] ))
keyword[if] identifier[hasattr] ( identifier[norm_home_dir] , literal[string] ):
identifier[norm_home_dir] = identifier[norm_home_dir] . identifier[decode] ( identifier[sys] . identifier[getfilesystemencoding] ())
keyword[if] identifier[proc_stdout] != identifier[norm_home_dir] :
identifier[logger] . identifier[fatal] (
literal[string] % identifier[py_executable] )
identifier[logger] . identifier[fatal] (
literal[string]
%( identifier[proc_stdout] , identifier[norm_home_dir] ))
identifier[logger] . identifier[fatal] (
literal[string] )
keyword[if] identifier[is_win] :
identifier[logger] . identifier[fatal] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] % identifier[proc_stdout] )
identifier[pydistutils] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[pydistutils] ):
identifier[logger] . identifier[notify] ( literal[string]
literal[string] % identifier[pydistutils] )
identifier[fix_local_scheme] ( identifier[home_dir] , identifier[symlink] )
keyword[if] identifier[site_packages] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[site_packages_filename] ):
identifier[logger] . identifier[info] ( literal[string] % identifier[site_packages_filename] )
identifier[os] . identifier[unlink] ( identifier[site_packages_filename] )
keyword[return] identifier[py_executable] | def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True):
"""Install just the base environment, no distutils patches etc"""
if sys.executable.startswith(bin_dir):
print('Please use the *system* python to run this script')
return # depends on [control=['if'], data=[]]
if clear:
rmtree(lib_dir)
## FIXME: why not delete it?
## Maybe it should delete everything with #!/path/to/venv/python in it
logger.notify('Not deleting %s', bin_dir) # depends on [control=['if'], data=[]]
if hasattr(sys, 'real_prefix'):
logger.notify('Using real prefix %r' % sys.real_prefix)
prefix = sys.real_prefix # depends on [control=['if'], data=[]]
elif hasattr(sys, 'base_prefix'):
logger.notify('Using base prefix %r' % sys.base_prefix)
prefix = sys.base_prefix # depends on [control=['if'], data=[]]
else:
prefix = sys.prefix
mkdir(lib_dir)
fix_lib64(lib_dir, symlink)
stdlib_dirs = [os.path.dirname(os.__file__)]
if is_win:
stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs')) # depends on [control=['if'], data=[]]
elif is_darwin:
stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages')) # depends on [control=['if'], data=[]]
if hasattr(os, 'symlink'):
logger.info('Symlinking Python bootstrap modules') # depends on [control=['if'], data=[]]
else:
logger.info('Copying Python bootstrap modules')
logger.indent += 2
try:
# copy required files...
for stdlib_dir in stdlib_dirs:
if not os.path.isdir(stdlib_dir):
continue # depends on [control=['if'], data=[]]
for fn in os.listdir(stdlib_dir):
bn = os.path.splitext(fn)[0]
if fn != 'site-packages' and bn in REQUIRED_FILES:
copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fn']] # depends on [control=['for'], data=['stdlib_dir']]
# ...and modules
copy_required_modules(home_dir, symlink) # depends on [control=['try'], data=[]]
finally:
logger.indent -= 2
# ...copy tcl/tk
if is_win:
copy_tcltk(prefix, home_dir, symlink) # depends on [control=['if'], data=[]]
mkdir(join(lib_dir, 'site-packages'))
import site
site_filename = site.__file__
if site_filename.endswith('.pyc') or site_filename.endswith('.pyo'):
site_filename = site_filename[:-1] # depends on [control=['if'], data=[]]
elif site_filename.endswith('$py.class'):
site_filename = site_filename.replace('$py.class', '.py') # depends on [control=['if'], data=[]]
site_filename_dst = change_prefix(site_filename, home_dir)
site_dir = os.path.dirname(site_filename_dst)
writefile(site_filename_dst, SITE_PY)
writefile(join(site_dir, 'orig-prefix.txt'), prefix)
site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
if not site_packages:
writefile(site_packages_filename, '') # depends on [control=['if'], data=[]]
if is_pypy or is_win:
stdinc_dir = join(prefix, 'include') # depends on [control=['if'], data=[]]
else:
stdinc_dir = join(prefix, 'include', py_version + abiflags)
if os.path.exists(stdinc_dir):
copyfile(stdinc_dir, inc_dir, symlink) # depends on [control=['if'], data=[]]
else:
logger.debug('No include dir %s' % stdinc_dir)
platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
if platinc_dir != stdinc_dir:
platinc_dest = distutils.sysconfig.get_python_inc(plat_specific=1, prefix=home_dir)
if platinc_dir == platinc_dest:
# Do platinc_dest manually due to a CPython bug;
# not http://bugs.python.org/issue3386 but a close cousin
platinc_dest = subst_path(platinc_dir, prefix, home_dir) # depends on [control=['if'], data=['platinc_dir', 'platinc_dest']]
if platinc_dest:
# PyPy's stdinc_dir and prefix are relative to the original binary
# (traversing virtualenvs), whereas the platinc_dir is relative to
# the inner virtualenv and ignores the prefix argument.
# This seems more evolved than designed.
copyfile(platinc_dir, platinc_dest, symlink) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['platinc_dir']]
# pypy never uses exec_prefix, just ignore it
if sys.exec_prefix != prefix and (not is_pypy):
if is_win:
exec_dir = join(sys.exec_prefix, 'lib') # depends on [control=['if'], data=[]]
elif is_jython:
exec_dir = join(sys.exec_prefix, 'Lib') # depends on [control=['if'], data=[]]
else:
exec_dir = join(sys.exec_prefix, 'lib', py_version)
for fn in os.listdir(exec_dir):
copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink) # depends on [control=['for'], data=['fn']] # depends on [control=['if'], data=[]]
if is_jython:
# Jython has either jython-dev.jar and javalib/ dir, or just
# jython.jar
for name in ('jython-dev.jar', 'javalib', 'jython.jar'):
src = join(prefix, name)
if os.path.exists(src):
copyfile(src, join(home_dir, name), symlink) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
# XXX: registry should always exist after Jython 2.5rc1
src = join(prefix, 'registry')
if os.path.exists(src):
copyfile(src, join(home_dir, 'registry'), symlink=False) # depends on [control=['if'], data=[]]
copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'), symlink=False) # depends on [control=['if'], data=[]]
mkdir(bin_dir)
py_executable = join(bin_dir, os.path.basename(sys.executable))
if 'Python.framework' in prefix:
# OS X framework builds cause validation to break
# https://github.com/pypa/virtualenv/issues/322
if os.environ.get('__PYVENV_LAUNCHER__'):
del os.environ['__PYVENV_LAUNCHER__'] # depends on [control=['if'], data=[]]
if re.search('/Python(?:-32|-64)*$', py_executable):
# The name of the python executable is not quite what
# we want, rename it.
py_executable = os.path.join(os.path.dirname(py_executable), 'python') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
logger.notify('New %s executable in %s', expected_exe, py_executable)
pcbuild_dir = os.path.dirname(sys.executable)
pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
logger.notify('Detected python running from build directory %s', pcbuild_dir)
logger.notify('Writing .pth file linking to build directory for *.pyd files')
writefile(pyd_pth, pcbuild_dir) # depends on [control=['if'], data=[]]
else:
pcbuild_dir = None
if os.path.exists(pyd_pth):
logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
os.unlink(pyd_pth) # depends on [control=['if'], data=[]]
if sys.executable != py_executable:
## FIXME: could I just hard link?
executable = sys.executable
shutil.copyfile(executable, py_executable)
make_exe(py_executable)
if is_win or is_cygwin:
pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
if os.path.exists(pythonw):
logger.info('Also created pythonw.exe')
shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe')) # depends on [control=['if'], data=[]]
python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
if os.path.exists(python_d):
logger.info('Also created python_d.exe')
shutil.copyfile(python_d, python_d_dest) # depends on [control=['if'], data=[]]
elif os.path.exists(python_d_dest):
logger.info('Removed python_d.exe as it is no longer at the source')
os.unlink(python_d_dest) # depends on [control=['if'], data=[]]
# we need to copy the DLL to enforce that windows will load the correct one.
# may not exist if we are cygwin.
py_executable_dll = 'python%s%s.dll' % (sys.version_info[0], sys.version_info[1])
py_executable_dll_d = 'python%s%s_d.dll' % (sys.version_info[0], sys.version_info[1])
pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
if os.path.exists(pythondll):
logger.info('Also created %s' % py_executable_dll)
shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll)) # depends on [control=['if'], data=[]]
if os.path.exists(pythondll_d):
logger.info('Also created %s' % py_executable_dll_d)
shutil.copyfile(pythondll_d, pythondll_d_dest) # depends on [control=['if'], data=[]]
elif os.path.exists(pythondll_d_dest):
logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
os.unlink(pythondll_d_dest) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if is_pypy:
# make a symlink python --> pypy-c
python_executable = os.path.join(os.path.dirname(py_executable), 'python')
if sys.platform in ('win32', 'cygwin'):
python_executable += '.exe' # depends on [control=['if'], data=[]]
logger.info('Also created executable %s' % python_executable)
copyfile(py_executable, python_executable, symlink)
if is_win:
for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll', 'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll', 'tcl85.dll', 'tk85.dll']:
src = join(prefix, name)
if os.path.exists(src):
copyfile(src, join(bin_dir, name), symlink) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
for d in sys.path:
if d.endswith('lib_pypy'):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
else:
logger.fatal('Could not find lib_pypy in sys.path')
raise SystemExit(3)
logger.info('Copying lib_pypy')
copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['py_executable']]
if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
secondary_exe = os.path.join(os.path.dirname(py_executable), expected_exe)
py_executable_ext = os.path.splitext(py_executable)[1]
if py_executable_ext.lower() == '.exe':
# python2.4 gives an extension of '.4' :P
secondary_exe += py_executable_ext # depends on [control=['if'], data=[]]
if os.path.exists(secondary_exe):
logger.warn('Not overwriting existing %s script %s (you must use %s)' % (expected_exe, secondary_exe, py_executable)) # depends on [control=['if'], data=[]]
else:
logger.notify('Also creating executable in %s' % secondary_exe)
shutil.copyfile(sys.executable, secondary_exe)
make_exe(secondary_exe) # depends on [control=['if'], data=['expected_exe']]
if '.framework' in prefix:
if 'Python.framework' in prefix:
logger.debug('MacOSX Python framework detected')
# Make sure we use the embedded interpreter inside
# the framework, even if sys.executable points to
# the stub executable in ${sys.prefix}/bin
# See http://groups.google.com/group/python-virtualenv/
# browse_thread/thread/17cab2f85da75951
original_python = os.path.join(prefix, 'Resources/Python.app/Contents/MacOS/Python') # depends on [control=['if'], data=['prefix']]
if 'EPD' in prefix:
logger.debug('EPD framework detected')
original_python = os.path.join(prefix, 'bin/python') # depends on [control=['if'], data=['prefix']]
shutil.copy(original_python, py_executable)
# Copy the framework's dylib into the virtual
# environment
virtual_lib = os.path.join(home_dir, '.Python')
if os.path.exists(virtual_lib):
os.unlink(virtual_lib) # depends on [control=['if'], data=[]]
copyfile(os.path.join(prefix, 'Python'), virtual_lib, symlink)
# And then change the install_name of the copied python executable
try:
mach_o_change(py_executable, os.path.join(prefix, 'Python'), '@executable_path/../.Python') # depends on [control=['try'], data=[]]
except:
e = sys.exc_info()[1]
logger.warn('Could not call mach_o_change: %s. Trying to call install_name_tool instead.' % e)
try:
call_subprocess(['install_name_tool', '-change', os.path.join(prefix, 'Python'), '@executable_path/../.Python', py_executable]) # depends on [control=['try'], data=[]]
except:
logger.fatal("Could not call install_name_tool -- you must have Apple's development tools installed")
raise # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['prefix']]
if not is_win:
# Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
py_exe_version_major = 'python%s' % sys.version_info[0]
py_exe_version_major_minor = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
py_exe_no_version = 'python'
required_symlinks = [py_exe_no_version, py_exe_version_major, py_exe_version_major_minor]
py_executable_base = os.path.basename(py_executable)
if py_executable_base in required_symlinks:
# Don't try to symlink to yourself.
required_symlinks.remove(py_executable_base) # depends on [control=['if'], data=['py_executable_base', 'required_symlinks']]
for pth in required_symlinks:
full_pth = join(bin_dir, pth)
if os.path.exists(full_pth):
os.unlink(full_pth) # depends on [control=['if'], data=[]]
if symlink:
os.symlink(py_executable_base, full_pth) # depends on [control=['if'], data=[]]
else:
copyfile(py_executable, full_pth, symlink) # depends on [control=['for'], data=['pth']] # depends on [control=['if'], data=[]]
if is_win and ' ' in py_executable:
# There's a bug with subprocess on Windows when using a first
# argument that has a space in it. Instead we have to quote
# the value:
py_executable = '"%s"' % py_executable # depends on [control=['if'], data=[]]
# NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
cmd = [py_executable, '-c', 'import sys;out=sys.stdout;getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
(proc_stdout, proc_stderr) = proc.communicate() # depends on [control=['try'], data=[]]
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.EACCES:
logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
sys.exit(100) # depends on [control=['if'], data=[]]
else:
raise e # depends on [control=['except'], data=[]]
proc_stdout = proc_stdout.strip().decode('utf-8')
proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
if hasattr(norm_home_dir, 'decode'):
norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding()) # depends on [control=['if'], data=[]]
if proc_stdout != norm_home_dir:
logger.fatal('ERROR: The executable %s is not functioning' % py_executable)
logger.fatal('ERROR: It thinks sys.prefix is %r (should be %r)' % (proc_stdout, norm_home_dir))
logger.fatal('ERROR: virtualenv is not compatible with this system or executable')
if is_win:
logger.fatal('Note: some Windows users have reported this error when they installed Python for "Only this user" or have multiple versions of Python installed. Copying the appropriate PythonXX.dll to the virtualenv Scripts/ directory may fix this problem.') # depends on [control=['if'], data=[]]
sys.exit(100) # depends on [control=['if'], data=['proc_stdout', 'norm_home_dir']]
else:
logger.info('Got sys.prefix result: %r' % proc_stdout)
pydistutils = os.path.expanduser('~/.pydistutils.cfg')
if os.path.exists(pydistutils):
logger.notify('Please make sure you remove any previous custom paths from your %s file.' % pydistutils) # depends on [control=['if'], data=[]]
## FIXME: really this should be calculated earlier
fix_local_scheme(home_dir, symlink)
if site_packages:
if os.path.exists(site_packages_filename):
logger.info('Deleting %s' % site_packages_filename)
os.unlink(site_packages_filename) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return py_executable |
def position_for_index(self, index):
"""Calculates the position within the vector to insert a given index.
This is used internally by insert and upsert. If there are duplicate
indexes then the position is returned as if the value for that index
were to be updated, but it is the callers responsibility to check
whether there is a duplicate at that index
"""
if not self.elements:
return 0
start = 0
end = int(len(self.elements) / 2)
slice_length = end - start
pivot_point = int(slice_length / 2)
pivot_index = self.elements[pivot_point * 2]
while slice_length > 1:
if pivot_index < index:
start = pivot_point
elif pivot_index > index:
end = pivot_point
else:
break
slice_length = end - start
pivot_point = start + int(slice_length / 2)
pivot_index = self.elements[pivot_point * 2]
if pivot_index == index:
return pivot_point * 2
elif pivot_index > index:
return pivot_point * 2
else:
return (pivot_point + 1) * 2 | def function[position_for_index, parameter[self, index]]:
constant[Calculates the position within the vector to insert a given index.
This is used internally by insert and upsert. If there are duplicate
indexes then the position is returned as if the value for that index
were to be updated, but it is the callers responsibility to check
whether there is a duplicate at that index
]
if <ast.UnaryOp object at 0x7da1b2468af0> begin[:]
return[constant[0]]
variable[start] assign[=] constant[0]
variable[end] assign[=] call[name[int], parameter[binary_operation[call[name[len], parameter[name[self].elements]] / constant[2]]]]
variable[slice_length] assign[=] binary_operation[name[end] - name[start]]
variable[pivot_point] assign[=] call[name[int], parameter[binary_operation[name[slice_length] / constant[2]]]]
variable[pivot_index] assign[=] call[name[self].elements][binary_operation[name[pivot_point] * constant[2]]]
while compare[name[slice_length] greater[>] constant[1]] begin[:]
if compare[name[pivot_index] less[<] name[index]] begin[:]
variable[start] assign[=] name[pivot_point]
variable[slice_length] assign[=] binary_operation[name[end] - name[start]]
variable[pivot_point] assign[=] binary_operation[name[start] + call[name[int], parameter[binary_operation[name[slice_length] / constant[2]]]]]
variable[pivot_index] assign[=] call[name[self].elements][binary_operation[name[pivot_point] * constant[2]]]
if compare[name[pivot_index] equal[==] name[index]] begin[:]
return[binary_operation[name[pivot_point] * constant[2]]] | keyword[def] identifier[position_for_index] ( identifier[self] , identifier[index] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[elements] :
keyword[return] literal[int]
identifier[start] = literal[int]
identifier[end] = identifier[int] ( identifier[len] ( identifier[self] . identifier[elements] )/ literal[int] )
identifier[slice_length] = identifier[end] - identifier[start]
identifier[pivot_point] = identifier[int] ( identifier[slice_length] / literal[int] )
identifier[pivot_index] = identifier[self] . identifier[elements] [ identifier[pivot_point] * literal[int] ]
keyword[while] identifier[slice_length] > literal[int] :
keyword[if] identifier[pivot_index] < identifier[index] :
identifier[start] = identifier[pivot_point]
keyword[elif] identifier[pivot_index] > identifier[index] :
identifier[end] = identifier[pivot_point]
keyword[else] :
keyword[break]
identifier[slice_length] = identifier[end] - identifier[start]
identifier[pivot_point] = identifier[start] + identifier[int] ( identifier[slice_length] / literal[int] )
identifier[pivot_index] = identifier[self] . identifier[elements] [ identifier[pivot_point] * literal[int] ]
keyword[if] identifier[pivot_index] == identifier[index] :
keyword[return] identifier[pivot_point] * literal[int]
keyword[elif] identifier[pivot_index] > identifier[index] :
keyword[return] identifier[pivot_point] * literal[int]
keyword[else] :
keyword[return] ( identifier[pivot_point] + literal[int] )* literal[int] | def position_for_index(self, index):
"""Calculates the position within the vector to insert a given index.
This is used internally by insert and upsert. If there are duplicate
indexes then the position is returned as if the value for that index
were to be updated, but it is the callers responsibility to check
whether there is a duplicate at that index
"""
if not self.elements:
return 0 # depends on [control=['if'], data=[]]
start = 0
end = int(len(self.elements) / 2)
slice_length = end - start
pivot_point = int(slice_length / 2)
pivot_index = self.elements[pivot_point * 2]
while slice_length > 1:
if pivot_index < index:
start = pivot_point # depends on [control=['if'], data=[]]
elif pivot_index > index:
end = pivot_point # depends on [control=['if'], data=[]]
else:
break
slice_length = end - start
pivot_point = start + int(slice_length / 2)
pivot_index = self.elements[pivot_point * 2] # depends on [control=['while'], data=['slice_length']]
if pivot_index == index:
return pivot_point * 2 # depends on [control=['if'], data=[]]
elif pivot_index > index:
return pivot_point * 2 # depends on [control=['if'], data=[]]
else:
return (pivot_point + 1) * 2 |
def get_package_hashes(
package,
version=None,
algorithm=DEFAULT_ALGORITHM,
python_versions=(),
verbose=False,
include_prereleases=False,
lookup_memory=None,
index_url=DEFAULT_INDEX_URL,
):
"""
Gets the hashes for the given package.
>>> get_package_hashes('hashin')
{
'package': 'hashin',
'version': '0.10',
'hashes': [
{
'url': 'https://pypi.org/packages/[...]',
'hash': '45d1c5d2237a3b4f78b4198709fb2ecf[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': '0d63bf4c115154781846ecf573049324[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': 'c32e6d9fb09dc36ab9222c4606a1f43a[...]'
}
]
}
"""
if lookup_memory is not None and package in lookup_memory:
data = lookup_memory[package]
else:
data = get_package_data(package, index_url, verbose)
if not version:
version = get_latest_version(data, include_prereleases)
assert version
if verbose:
_verbose("Latest version for {0} is {1}".format(package, version))
# Independent of how you like to case type it, pick the correct
# name from the PyPI index.
package = data["info"]["name"]
try:
releases = data["releases"][version]
except KeyError:
raise PackageError("No data found for version {0}".format(version))
if python_versions:
releases = filter_releases(releases, python_versions)
if not releases:
if python_versions:
raise PackageError(
"No releases could be found for "
"{0} matching Python versions {1}".format(version, python_versions)
)
else:
raise PackageError("No releases could be found for {0}".format(version))
hashes = list(
get_releases_hashes(releases=releases, algorithm=algorithm, verbose=verbose)
)
return {"package": package, "version": version, "hashes": hashes} | def function[get_package_hashes, parameter[package, version, algorithm, python_versions, verbose, include_prereleases, lookup_memory, index_url]]:
constant[
Gets the hashes for the given package.
>>> get_package_hashes('hashin')
{
'package': 'hashin',
'version': '0.10',
'hashes': [
{
'url': 'https://pypi.org/packages/[...]',
'hash': '45d1c5d2237a3b4f78b4198709fb2ecf[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': '0d63bf4c115154781846ecf573049324[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': 'c32e6d9fb09dc36ab9222c4606a1f43a[...]'
}
]
}
]
if <ast.BoolOp object at 0x7da1b0ebd4b0> begin[:]
variable[data] assign[=] call[name[lookup_memory]][name[package]]
if <ast.UnaryOp object at 0x7da1b0ebd690> begin[:]
variable[version] assign[=] call[name[get_latest_version], parameter[name[data], name[include_prereleases]]]
assert[name[version]]
if name[verbose] begin[:]
call[name[_verbose], parameter[call[constant[Latest version for {0} is {1}].format, parameter[name[package], name[version]]]]]
variable[package] assign[=] call[call[name[data]][constant[info]]][constant[name]]
<ast.Try object at 0x7da204567d30>
if name[python_versions] begin[:]
variable[releases] assign[=] call[name[filter_releases], parameter[name[releases], name[python_versions]]]
if <ast.UnaryOp object at 0x7da20c991d50> begin[:]
if name[python_versions] begin[:]
<ast.Raise object at 0x7da20c991840>
variable[hashes] assign[=] call[name[list], parameter[call[name[get_releases_hashes], parameter[]]]]
return[dictionary[[<ast.Constant object at 0x7da204566170>, <ast.Constant object at 0x7da204565ff0>, <ast.Constant object at 0x7da2045662c0>], [<ast.Name object at 0x7da2045672b0>, <ast.Name object at 0x7da204565960>, <ast.Name object at 0x7da204564340>]]] | keyword[def] identifier[get_package_hashes] (
identifier[package] ,
identifier[version] = keyword[None] ,
identifier[algorithm] = identifier[DEFAULT_ALGORITHM] ,
identifier[python_versions] =(),
identifier[verbose] = keyword[False] ,
identifier[include_prereleases] = keyword[False] ,
identifier[lookup_memory] = keyword[None] ,
identifier[index_url] = identifier[DEFAULT_INDEX_URL] ,
):
literal[string]
keyword[if] identifier[lookup_memory] keyword[is] keyword[not] keyword[None] keyword[and] identifier[package] keyword[in] identifier[lookup_memory] :
identifier[data] = identifier[lookup_memory] [ identifier[package] ]
keyword[else] :
identifier[data] = identifier[get_package_data] ( identifier[package] , identifier[index_url] , identifier[verbose] )
keyword[if] keyword[not] identifier[version] :
identifier[version] = identifier[get_latest_version] ( identifier[data] , identifier[include_prereleases] )
keyword[assert] identifier[version]
keyword[if] identifier[verbose] :
identifier[_verbose] ( literal[string] . identifier[format] ( identifier[package] , identifier[version] ))
identifier[package] = identifier[data] [ literal[string] ][ literal[string] ]
keyword[try] :
identifier[releases] = identifier[data] [ literal[string] ][ identifier[version] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[PackageError] ( literal[string] . identifier[format] ( identifier[version] ))
keyword[if] identifier[python_versions] :
identifier[releases] = identifier[filter_releases] ( identifier[releases] , identifier[python_versions] )
keyword[if] keyword[not] identifier[releases] :
keyword[if] identifier[python_versions] :
keyword[raise] identifier[PackageError] (
literal[string]
literal[string] . identifier[format] ( identifier[version] , identifier[python_versions] )
)
keyword[else] :
keyword[raise] identifier[PackageError] ( literal[string] . identifier[format] ( identifier[version] ))
identifier[hashes] = identifier[list] (
identifier[get_releases_hashes] ( identifier[releases] = identifier[releases] , identifier[algorithm] = identifier[algorithm] , identifier[verbose] = identifier[verbose] )
)
keyword[return] { literal[string] : identifier[package] , literal[string] : identifier[version] , literal[string] : identifier[hashes] } | def get_package_hashes(package, version=None, algorithm=DEFAULT_ALGORITHM, python_versions=(), verbose=False, include_prereleases=False, lookup_memory=None, index_url=DEFAULT_INDEX_URL):
"""
Gets the hashes for the given package.
>>> get_package_hashes('hashin')
{
'package': 'hashin',
'version': '0.10',
'hashes': [
{
'url': 'https://pypi.org/packages/[...]',
'hash': '45d1c5d2237a3b4f78b4198709fb2ecf[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': '0d63bf4c115154781846ecf573049324[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': 'c32e6d9fb09dc36ab9222c4606a1f43a[...]'
}
]
}
"""
if lookup_memory is not None and package in lookup_memory:
data = lookup_memory[package] # depends on [control=['if'], data=[]]
else:
data = get_package_data(package, index_url, verbose)
if not version:
version = get_latest_version(data, include_prereleases)
assert version
if verbose:
_verbose('Latest version for {0} is {1}'.format(package, version)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Independent of how you like to case type it, pick the correct
# name from the PyPI index.
package = data['info']['name']
try:
releases = data['releases'][version] # depends on [control=['try'], data=[]]
except KeyError:
raise PackageError('No data found for version {0}'.format(version)) # depends on [control=['except'], data=[]]
if python_versions:
releases = filter_releases(releases, python_versions) # depends on [control=['if'], data=[]]
if not releases:
if python_versions:
raise PackageError('No releases could be found for {0} matching Python versions {1}'.format(version, python_versions)) # depends on [control=['if'], data=[]]
else:
raise PackageError('No releases could be found for {0}'.format(version)) # depends on [control=['if'], data=[]]
hashes = list(get_releases_hashes(releases=releases, algorithm=algorithm, verbose=verbose))
return {'package': package, 'version': version, 'hashes': hashes} |
def AAM(cpu, imm=None):
"""
ASCII adjust AX after multiply.
Adjusts the result of the multiplication of two unpacked BCD values
to create a pair of unpacked (base 10) BCD values. The AX register is
the implied source and destination operand for this instruction. The AAM
instruction is only useful when it follows a MUL instruction that multiplies
(binary multiplication) two unpacked BCD values and stores a word result
in the AX register. The AAM instruction then adjusts the contents of the
AX register to contain the correct 2-digit unpacked (base 10) BCD result.
The SF, ZF, and PF flags are set according to the resulting binary value in the AL register.
This instruction executes as described in compatibility mode and legacy mode.
It is not valid in 64-bit mode.::
tempAL = AL;
AH = tempAL / 10;
AL = tempAL MOD 10;
:param cpu: current CPU.
"""
if imm is None:
imm = 10
else:
imm = imm.read()
cpu.AH = Operators.UDIV(cpu.AL, imm)
cpu.AL = Operators.UREM(cpu.AL, imm)
# Defined flags: ...sz.p.
cpu._calculate_logic_flags(8, cpu.AL) | def function[AAM, parameter[cpu, imm]]:
constant[
ASCII adjust AX after multiply.
Adjusts the result of the multiplication of two unpacked BCD values
to create a pair of unpacked (base 10) BCD values. The AX register is
the implied source and destination operand for this instruction. The AAM
instruction is only useful when it follows a MUL instruction that multiplies
(binary multiplication) two unpacked BCD values and stores a word result
in the AX register. The AAM instruction then adjusts the contents of the
AX register to contain the correct 2-digit unpacked (base 10) BCD result.
The SF, ZF, and PF flags are set according to the resulting binary value in the AL register.
This instruction executes as described in compatibility mode and legacy mode.
It is not valid in 64-bit mode.::
tempAL = AL;
AH = tempAL / 10;
AL = tempAL MOD 10;
:param cpu: current CPU.
]
if compare[name[imm] is constant[None]] begin[:]
variable[imm] assign[=] constant[10]
name[cpu].AH assign[=] call[name[Operators].UDIV, parameter[name[cpu].AL, name[imm]]]
name[cpu].AL assign[=] call[name[Operators].UREM, parameter[name[cpu].AL, name[imm]]]
call[name[cpu]._calculate_logic_flags, parameter[constant[8], name[cpu].AL]] | keyword[def] identifier[AAM] ( identifier[cpu] , identifier[imm] = keyword[None] ):
literal[string]
keyword[if] identifier[imm] keyword[is] keyword[None] :
identifier[imm] = literal[int]
keyword[else] :
identifier[imm] = identifier[imm] . identifier[read] ()
identifier[cpu] . identifier[AH] = identifier[Operators] . identifier[UDIV] ( identifier[cpu] . identifier[AL] , identifier[imm] )
identifier[cpu] . identifier[AL] = identifier[Operators] . identifier[UREM] ( identifier[cpu] . identifier[AL] , identifier[imm] )
identifier[cpu] . identifier[_calculate_logic_flags] ( literal[int] , identifier[cpu] . identifier[AL] ) | def AAM(cpu, imm=None):
"""
ASCII adjust AX after multiply.
Adjusts the result of the multiplication of two unpacked BCD values
to create a pair of unpacked (base 10) BCD values. The AX register is
the implied source and destination operand for this instruction. The AAM
instruction is only useful when it follows a MUL instruction that multiplies
(binary multiplication) two unpacked BCD values and stores a word result
in the AX register. The AAM instruction then adjusts the contents of the
AX register to contain the correct 2-digit unpacked (base 10) BCD result.
The SF, ZF, and PF flags are set according to the resulting binary value in the AL register.
This instruction executes as described in compatibility mode and legacy mode.
It is not valid in 64-bit mode.::
tempAL = AL;
AH = tempAL / 10;
AL = tempAL MOD 10;
:param cpu: current CPU.
"""
if imm is None:
imm = 10 # depends on [control=['if'], data=['imm']]
else:
imm = imm.read()
cpu.AH = Operators.UDIV(cpu.AL, imm)
cpu.AL = Operators.UREM(cpu.AL, imm)
# Defined flags: ...sz.p.
cpu._calculate_logic_flags(8, cpu.AL) |
def get_metric_definitions(self, webspace_name, website_name):
'''
Get metric definitions of metrics available of this web site.
webspace_name:
The name of the webspace.
website_name:
The name of the website.
'''
return self._perform_get(self._get_metric_definitions_path(webspace_name, website_name),
MetricDefinitions) | def function[get_metric_definitions, parameter[self, webspace_name, website_name]]:
constant[
Get metric definitions of metrics available of this web site.
webspace_name:
The name of the webspace.
website_name:
The name of the website.
]
return[call[name[self]._perform_get, parameter[call[name[self]._get_metric_definitions_path, parameter[name[webspace_name], name[website_name]]], name[MetricDefinitions]]]] | keyword[def] identifier[get_metric_definitions] ( identifier[self] , identifier[webspace_name] , identifier[website_name] ):
literal[string]
keyword[return] identifier[self] . identifier[_perform_get] ( identifier[self] . identifier[_get_metric_definitions_path] ( identifier[webspace_name] , identifier[website_name] ),
identifier[MetricDefinitions] ) | def get_metric_definitions(self, webspace_name, website_name):
"""
Get metric definitions of metrics available of this web site.
webspace_name:
The name of the webspace.
website_name:
The name of the website.
"""
return self._perform_get(self._get_metric_definitions_path(webspace_name, website_name), MetricDefinitions) |
def use_comparative_hierarchy_view(self):
"""Pass through to provider HierarchyLookupSession.use_comparative_hierarchy_view"""
self._hierarchy_view = COMPARATIVE
# self._get_provider_session('hierarchy_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_comparative_hierarchy_view()
except AttributeError:
pass | def function[use_comparative_hierarchy_view, parameter[self]]:
constant[Pass through to provider HierarchyLookupSession.use_comparative_hierarchy_view]
name[self]._hierarchy_view assign[=] name[COMPARATIVE]
for taget[name[session]] in starred[call[name[self]._get_provider_sessions, parameter[]]] begin[:]
<ast.Try object at 0x7da2041d9030> | keyword[def] identifier[use_comparative_hierarchy_view] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_hierarchy_view] = identifier[COMPARATIVE]
keyword[for] identifier[session] keyword[in] identifier[self] . identifier[_get_provider_sessions] ():
keyword[try] :
identifier[session] . identifier[use_comparative_hierarchy_view] ()
keyword[except] identifier[AttributeError] :
keyword[pass] | def use_comparative_hierarchy_view(self):
"""Pass through to provider HierarchyLookupSession.use_comparative_hierarchy_view"""
self._hierarchy_view = COMPARATIVE
# self._get_provider_session('hierarchy_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_comparative_hierarchy_view() # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['session']] |
def lisp_to_nested_expression(lisp_string: str) -> List:
"""
Takes a logical form as a lisp string and returns a nested list representation of the lisp.
For example, "(count (division first))" would get mapped to ['count', ['division', 'first']].
"""
stack: List = []
current_expression: List = []
tokens = lisp_string.split()
for token in tokens:
while token[0] == '(':
nested_expression: List = []
current_expression.append(nested_expression)
stack.append(current_expression)
current_expression = nested_expression
token = token[1:]
current_expression.append(token.replace(')', ''))
while token[-1] == ')':
current_expression = stack.pop()
token = token[:-1]
return current_expression[0] | def function[lisp_to_nested_expression, parameter[lisp_string]]:
constant[
Takes a logical form as a lisp string and returns a nested list representation of the lisp.
For example, "(count (division first))" would get mapped to ['count', ['division', 'first']].
]
<ast.AnnAssign object at 0x7da20c794a00>
<ast.AnnAssign object at 0x7da20c794850>
variable[tokens] assign[=] call[name[lisp_string].split, parameter[]]
for taget[name[token]] in starred[name[tokens]] begin[:]
while compare[call[name[token]][constant[0]] equal[==] constant[(]] begin[:]
<ast.AnnAssign object at 0x7da20c794250>
call[name[current_expression].append, parameter[name[nested_expression]]]
call[name[stack].append, parameter[name[current_expression]]]
variable[current_expression] assign[=] name[nested_expression]
variable[token] assign[=] call[name[token]][<ast.Slice object at 0x7da20c795ea0>]
call[name[current_expression].append, parameter[call[name[token].replace, parameter[constant[)], constant[]]]]]
while compare[call[name[token]][<ast.UnaryOp object at 0x7da20c795990>] equal[==] constant[)]] begin[:]
variable[current_expression] assign[=] call[name[stack].pop, parameter[]]
variable[token] assign[=] call[name[token]][<ast.Slice object at 0x7da20c796710>]
return[call[name[current_expression]][constant[0]]] | keyword[def] identifier[lisp_to_nested_expression] ( identifier[lisp_string] : identifier[str] )-> identifier[List] :
literal[string]
identifier[stack] : identifier[List] =[]
identifier[current_expression] : identifier[List] =[]
identifier[tokens] = identifier[lisp_string] . identifier[split] ()
keyword[for] identifier[token] keyword[in] identifier[tokens] :
keyword[while] identifier[token] [ literal[int] ]== literal[string] :
identifier[nested_expression] : identifier[List] =[]
identifier[current_expression] . identifier[append] ( identifier[nested_expression] )
identifier[stack] . identifier[append] ( identifier[current_expression] )
identifier[current_expression] = identifier[nested_expression]
identifier[token] = identifier[token] [ literal[int] :]
identifier[current_expression] . identifier[append] ( identifier[token] . identifier[replace] ( literal[string] , literal[string] ))
keyword[while] identifier[token] [- literal[int] ]== literal[string] :
identifier[current_expression] = identifier[stack] . identifier[pop] ()
identifier[token] = identifier[token] [:- literal[int] ]
keyword[return] identifier[current_expression] [ literal[int] ] | def lisp_to_nested_expression(lisp_string: str) -> List:
"""
Takes a logical form as a lisp string and returns a nested list representation of the lisp.
For example, "(count (division first))" would get mapped to ['count', ['division', 'first']].
"""
stack: List = []
current_expression: List = []
tokens = lisp_string.split()
for token in tokens:
while token[0] == '(':
nested_expression: List = []
current_expression.append(nested_expression)
stack.append(current_expression)
current_expression = nested_expression
token = token[1:] # depends on [control=['while'], data=[]]
current_expression.append(token.replace(')', ''))
while token[-1] == ')':
current_expression = stack.pop()
token = token[:-1] # depends on [control=['while'], data=[]] # depends on [control=['for'], data=['token']]
return current_expression[0] |
def _reset(self, indices):
"""Resets environments at indices shouldn't pre-process or record.
Subclasses should override this to do the actual reset if something other
than the default implementation is desired.
Args:
indices: list of indices of underlying envs to call reset on.
Returns:
np.ndarray of stacked observations from the reset-ed envs.
"""
# Pre-conditions: common_preconditions, see `assert_common_preconditions`.
self.assert_common_preconditions()
# This returns a numpy array with first dimension `len(indices)` and the
# rest being the dimensionality of the observation.
return np.stack([self._envs[index].reset() for index in indices]) | def function[_reset, parameter[self, indices]]:
constant[Resets environments at indices shouldn't pre-process or record.
Subclasses should override this to do the actual reset if something other
than the default implementation is desired.
Args:
indices: list of indices of underlying envs to call reset on.
Returns:
np.ndarray of stacked observations from the reset-ed envs.
]
call[name[self].assert_common_preconditions, parameter[]]
return[call[name[np].stack, parameter[<ast.ListComp object at 0x7da1b1e174c0>]]] | keyword[def] identifier[_reset] ( identifier[self] , identifier[indices] ):
literal[string]
identifier[self] . identifier[assert_common_preconditions] ()
keyword[return] identifier[np] . identifier[stack] ([ identifier[self] . identifier[_envs] [ identifier[index] ]. identifier[reset] () keyword[for] identifier[index] keyword[in] identifier[indices] ]) | def _reset(self, indices):
"""Resets environments at indices shouldn't pre-process or record.
Subclasses should override this to do the actual reset if something other
than the default implementation is desired.
Args:
indices: list of indices of underlying envs to call reset on.
Returns:
np.ndarray of stacked observations from the reset-ed envs.
"""
# Pre-conditions: common_preconditions, see `assert_common_preconditions`.
self.assert_common_preconditions()
# This returns a numpy array with first dimension `len(indices)` and the
# rest being the dimensionality of the observation.
return np.stack([self._envs[index].reset() for index in indices]) |
def DbExportDevice(self, argin):
""" Export a device to the database
:param argin: Str[0] = Device name
Str[1] = CORBA IOR
Str[2] = Device server process host name
Str[3] = Device server process PID or string ``null``
Str[4] = Device server process version
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbExportDevice()")
if len(argin) < 5:
self.warn_stream("DataBase::DbExportDevice(): insufficient export info for device ")
th_exc(DB_IncorrectArguments,
"insufficient export info for device",
"DataBase::ExportDevice()")
dev_name, IOR, host, pid, version = argin[:5]
dev_name = dev_name.lower()
if pid.lower() == 'null':
pid = "-1"
self.db.export_device(dev_name, IOR, host, pid, version) | def function[DbExportDevice, parameter[self, argin]]:
constant[ Export a device to the database
:param argin: Str[0] = Device name
Str[1] = CORBA IOR
Str[2] = Device server process host name
Str[3] = Device server process PID or string ``null``
Str[4] = Device server process version
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid ]
call[name[self]._log.debug, parameter[constant[In DbExportDevice()]]]
if compare[call[name[len], parameter[name[argin]]] less[<] constant[5]] begin[:]
call[name[self].warn_stream, parameter[constant[DataBase::DbExportDevice(): insufficient export info for device ]]]
call[name[th_exc], parameter[name[DB_IncorrectArguments], constant[insufficient export info for device], constant[DataBase::ExportDevice()]]]
<ast.Tuple object at 0x7da20c990f70> assign[=] call[name[argin]][<ast.Slice object at 0x7da20c9908e0>]
variable[dev_name] assign[=] call[name[dev_name].lower, parameter[]]
if compare[call[name[pid].lower, parameter[]] equal[==] constant[null]] begin[:]
variable[pid] assign[=] constant[-1]
call[name[self].db.export_device, parameter[name[dev_name], name[IOR], name[host], name[pid], name[version]]] | keyword[def] identifier[DbExportDevice] ( identifier[self] , identifier[argin] ):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[if] identifier[len] ( identifier[argin] )< literal[int] :
identifier[self] . identifier[warn_stream] ( literal[string] )
identifier[th_exc] ( identifier[DB_IncorrectArguments] ,
literal[string] ,
literal[string] )
identifier[dev_name] , identifier[IOR] , identifier[host] , identifier[pid] , identifier[version] = identifier[argin] [: literal[int] ]
identifier[dev_name] = identifier[dev_name] . identifier[lower] ()
keyword[if] identifier[pid] . identifier[lower] ()== literal[string] :
identifier[pid] = literal[string]
identifier[self] . identifier[db] . identifier[export_device] ( identifier[dev_name] , identifier[IOR] , identifier[host] , identifier[pid] , identifier[version] ) | def DbExportDevice(self, argin):
""" Export a device to the database
:param argin: Str[0] = Device name
Str[1] = CORBA IOR
Str[2] = Device server process host name
Str[3] = Device server process PID or string ``null``
Str[4] = Device server process version
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug('In DbExportDevice()')
if len(argin) < 5:
self.warn_stream('DataBase::DbExportDevice(): insufficient export info for device ')
th_exc(DB_IncorrectArguments, 'insufficient export info for device', 'DataBase::ExportDevice()') # depends on [control=['if'], data=[]]
(dev_name, IOR, host, pid, version) = argin[:5]
dev_name = dev_name.lower()
if pid.lower() == 'null':
pid = '-1' # depends on [control=['if'], data=[]]
self.db.export_device(dev_name, IOR, host, pid, version) |
def system_monitor_SFM_threshold_marginal_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
system_monitor = ET.SubElement(config, "system-monitor", xmlns="urn:brocade.com:mgmt:brocade-system-monitor")
SFM = ET.SubElement(system_monitor, "SFM")
threshold = ET.SubElement(SFM, "threshold")
marginal_threshold = ET.SubElement(threshold, "marginal-threshold")
marginal_threshold.text = kwargs.pop('marginal_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[system_monitor_SFM_threshold_marginal_threshold, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[system_monitor] assign[=] call[name[ET].SubElement, parameter[name[config], constant[system-monitor]]]
variable[SFM] assign[=] call[name[ET].SubElement, parameter[name[system_monitor], constant[SFM]]]
variable[threshold] assign[=] call[name[ET].SubElement, parameter[name[SFM], constant[threshold]]]
variable[marginal_threshold] assign[=] call[name[ET].SubElement, parameter[name[threshold], constant[marginal-threshold]]]
name[marginal_threshold].text assign[=] call[name[kwargs].pop, parameter[constant[marginal_threshold]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[system_monitor_SFM_threshold_marginal_threshold] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[system_monitor] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[SFM] = identifier[ET] . identifier[SubElement] ( identifier[system_monitor] , literal[string] )
identifier[threshold] = identifier[ET] . identifier[SubElement] ( identifier[SFM] , literal[string] )
identifier[marginal_threshold] = identifier[ET] . identifier[SubElement] ( identifier[threshold] , literal[string] )
identifier[marginal_threshold] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def system_monitor_SFM_threshold_marginal_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
system_monitor = ET.SubElement(config, 'system-monitor', xmlns='urn:brocade.com:mgmt:brocade-system-monitor')
SFM = ET.SubElement(system_monitor, 'SFM')
threshold = ET.SubElement(SFM, 'threshold')
marginal_threshold = ET.SubElement(threshold, 'marginal-threshold')
marginal_threshold.text = kwargs.pop('marginal_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def hardware_custom_profile_kap_custom_profile_xstp_xstp_hello_interval(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hardware = ET.SubElement(config, "hardware", xmlns="urn:brocade.com:mgmt:brocade-hardware")
custom_profile = ET.SubElement(hardware, "custom-profile")
kap_custom_profile = ET.SubElement(custom_profile, "kap-custom-profile")
name_key = ET.SubElement(kap_custom_profile, "name")
name_key.text = kwargs.pop('name')
xstp = ET.SubElement(kap_custom_profile, "xstp")
xstp_hello_interval = ET.SubElement(xstp, "xstp_hello_interval")
xstp_hello_interval.text = kwargs.pop('xstp_hello_interval')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[hardware_custom_profile_kap_custom_profile_xstp_xstp_hello_interval, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[hardware] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hardware]]]
variable[custom_profile] assign[=] call[name[ET].SubElement, parameter[name[hardware], constant[custom-profile]]]
variable[kap_custom_profile] assign[=] call[name[ET].SubElement, parameter[name[custom_profile], constant[kap-custom-profile]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[kap_custom_profile], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[xstp] assign[=] call[name[ET].SubElement, parameter[name[kap_custom_profile], constant[xstp]]]
variable[xstp_hello_interval] assign[=] call[name[ET].SubElement, parameter[name[xstp], constant[xstp_hello_interval]]]
name[xstp_hello_interval].text assign[=] call[name[kwargs].pop, parameter[constant[xstp_hello_interval]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[hardware_custom_profile_kap_custom_profile_xstp_xstp_hello_interval] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[hardware] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[custom_profile] = identifier[ET] . identifier[SubElement] ( identifier[hardware] , literal[string] )
identifier[kap_custom_profile] = identifier[ET] . identifier[SubElement] ( identifier[custom_profile] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[kap_custom_profile] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[xstp] = identifier[ET] . identifier[SubElement] ( identifier[kap_custom_profile] , literal[string] )
identifier[xstp_hello_interval] = identifier[ET] . identifier[SubElement] ( identifier[xstp] , literal[string] )
identifier[xstp_hello_interval] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def hardware_custom_profile_kap_custom_profile_xstp_xstp_hello_interval(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
hardware = ET.SubElement(config, 'hardware', xmlns='urn:brocade.com:mgmt:brocade-hardware')
custom_profile = ET.SubElement(hardware, 'custom-profile')
kap_custom_profile = ET.SubElement(custom_profile, 'kap-custom-profile')
name_key = ET.SubElement(kap_custom_profile, 'name')
name_key.text = kwargs.pop('name')
xstp = ET.SubElement(kap_custom_profile, 'xstp')
xstp_hello_interval = ET.SubElement(xstp, 'xstp_hello_interval')
xstp_hello_interval.text = kwargs.pop('xstp_hello_interval')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def decode_conjure_enum_type(cls, obj, conjure_type):
"""Decodes json into a conjure enum type.
Args:
obj: the json object to decode
conjure_type: a class object which is the enum type
we're decoding into.
Returns:
An instance of enum of type conjure_type.
"""
if not (isinstance(obj, str) or str(type(obj)) == "<type 'unicode'>"):
raise Exception(
'Expected to find str type but found {} instead'.format(
type(obj)))
if obj in conjure_type.__members__:
return conjure_type[obj]
else:
return conjure_type["UNKNOWN"] | def function[decode_conjure_enum_type, parameter[cls, obj, conjure_type]]:
constant[Decodes json into a conjure enum type.
Args:
obj: the json object to decode
conjure_type: a class object which is the enum type
we're decoding into.
Returns:
An instance of enum of type conjure_type.
]
if <ast.UnaryOp object at 0x7da1b0e15990> begin[:]
<ast.Raise object at 0x7da1b0e14370>
if compare[name[obj] in name[conjure_type].__members__] begin[:]
return[call[name[conjure_type]][name[obj]]] | keyword[def] identifier[decode_conjure_enum_type] ( identifier[cls] , identifier[obj] , identifier[conjure_type] ):
literal[string]
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[obj] , identifier[str] ) keyword[or] identifier[str] ( identifier[type] ( identifier[obj] ))== literal[string] ):
keyword[raise] identifier[Exception] (
literal[string] . identifier[format] (
identifier[type] ( identifier[obj] )))
keyword[if] identifier[obj] keyword[in] identifier[conjure_type] . identifier[__members__] :
keyword[return] identifier[conjure_type] [ identifier[obj] ]
keyword[else] :
keyword[return] identifier[conjure_type] [ literal[string] ] | def decode_conjure_enum_type(cls, obj, conjure_type):
"""Decodes json into a conjure enum type.
Args:
obj: the json object to decode
conjure_type: a class object which is the enum type
we're decoding into.
Returns:
An instance of enum of type conjure_type.
"""
if not (isinstance(obj, str) or str(type(obj)) == "<type 'unicode'>"):
raise Exception('Expected to find str type but found {} instead'.format(type(obj))) # depends on [control=['if'], data=[]]
if obj in conjure_type.__members__:
return conjure_type[obj] # depends on [control=['if'], data=['obj']]
else:
return conjure_type['UNKNOWN'] |
def publish_message(self,
exchange,
routing_key,
properties,
body,
connection=None):
"""Publish a message to RabbitMQ on the same channel the original
message was received on. If
`publisher confirmations <https://www.rabbitmq.com/confirms.html>`_
are enabled, the method will return a
:class:`~tornado.concurrent.Future` that will resolve a :class:`bool`
that indicates if the publishing was successful.
.. versionchanged:: 4.0.0
Return a :class:`~tornado.concurrent.Future` if
`publisher confirmations <https://www.rabbitmq.com/confirms.html>`_
are enabled. Removed the ``channel`` parameter.
:param exchange: The exchange to publish to
:type exchange: :class:`str`
:param routing_key: The routing key to publish with
:type routing_key: :class:`str`
:param properties: The message properties
:type properties: :class:`dict`
:param body: The message body
:type body: :class:`bytes` or :class:`str`
:param connection: The connection to use. If it is not
specified, the channel that the message was delivered on is used.
:type connection: :class:`str`
:rtype: :class:`tornado.concurrent.Future` or :data:`None`
"""
conn = self._publish_connection(connection)
self.logger.debug('Publishing message to %s:%s (%s)', exchange,
routing_key, conn.name)
basic_properties = self._get_pika_properties(properties)
with self._measurement.track_duration('publish.{}.{}'.format(
exchange, routing_key)):
conn.channel.basic_publish(
exchange=exchange,
routing_key=routing_key,
properties=basic_properties,
body=body,
mandatory=conn.publisher_confirmations)
return self._publisher_confirmation_future(
conn.name, exchange, routing_key, basic_properties) | def function[publish_message, parameter[self, exchange, routing_key, properties, body, connection]]:
constant[Publish a message to RabbitMQ on the same channel the original
message was received on. If
`publisher confirmations <https://www.rabbitmq.com/confirms.html>`_
are enabled, the method will return a
:class:`~tornado.concurrent.Future` that will resolve a :class:`bool`
that indicates if the publishing was successful.
.. versionchanged:: 4.0.0
Return a :class:`~tornado.concurrent.Future` if
`publisher confirmations <https://www.rabbitmq.com/confirms.html>`_
are enabled. Removed the ``channel`` parameter.
:param exchange: The exchange to publish to
:type exchange: :class:`str`
:param routing_key: The routing key to publish with
:type routing_key: :class:`str`
:param properties: The message properties
:type properties: :class:`dict`
:param body: The message body
:type body: :class:`bytes` or :class:`str`
:param connection: The connection to use. If it is not
specified, the channel that the message was delivered on is used.
:type connection: :class:`str`
:rtype: :class:`tornado.concurrent.Future` or :data:`None`
]
variable[conn] assign[=] call[name[self]._publish_connection, parameter[name[connection]]]
call[name[self].logger.debug, parameter[constant[Publishing message to %s:%s (%s)], name[exchange], name[routing_key], name[conn].name]]
variable[basic_properties] assign[=] call[name[self]._get_pika_properties, parameter[name[properties]]]
with call[name[self]._measurement.track_duration, parameter[call[constant[publish.{}.{}].format, parameter[name[exchange], name[routing_key]]]]] begin[:]
call[name[conn].channel.basic_publish, parameter[]]
return[call[name[self]._publisher_confirmation_future, parameter[name[conn].name, name[exchange], name[routing_key], name[basic_properties]]]] | keyword[def] identifier[publish_message] ( identifier[self] ,
identifier[exchange] ,
identifier[routing_key] ,
identifier[properties] ,
identifier[body] ,
identifier[connection] = keyword[None] ):
literal[string]
identifier[conn] = identifier[self] . identifier[_publish_connection] ( identifier[connection] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[exchange] ,
identifier[routing_key] , identifier[conn] . identifier[name] )
identifier[basic_properties] = identifier[self] . identifier[_get_pika_properties] ( identifier[properties] )
keyword[with] identifier[self] . identifier[_measurement] . identifier[track_duration] ( literal[string] . identifier[format] (
identifier[exchange] , identifier[routing_key] )):
identifier[conn] . identifier[channel] . identifier[basic_publish] (
identifier[exchange] = identifier[exchange] ,
identifier[routing_key] = identifier[routing_key] ,
identifier[properties] = identifier[basic_properties] ,
identifier[body] = identifier[body] ,
identifier[mandatory] = identifier[conn] . identifier[publisher_confirmations] )
keyword[return] identifier[self] . identifier[_publisher_confirmation_future] (
identifier[conn] . identifier[name] , identifier[exchange] , identifier[routing_key] , identifier[basic_properties] ) | def publish_message(self, exchange, routing_key, properties, body, connection=None):
"""Publish a message to RabbitMQ on the same channel the original
message was received on. If
`publisher confirmations <https://www.rabbitmq.com/confirms.html>`_
are enabled, the method will return a
:class:`~tornado.concurrent.Future` that will resolve a :class:`bool`
that indicates if the publishing was successful.
.. versionchanged:: 4.0.0
Return a :class:`~tornado.concurrent.Future` if
`publisher confirmations <https://www.rabbitmq.com/confirms.html>`_
are enabled. Removed the ``channel`` parameter.
:param exchange: The exchange to publish to
:type exchange: :class:`str`
:param routing_key: The routing key to publish with
:type routing_key: :class:`str`
:param properties: The message properties
:type properties: :class:`dict`
:param body: The message body
:type body: :class:`bytes` or :class:`str`
:param connection: The connection to use. If it is not
specified, the channel that the message was delivered on is used.
:type connection: :class:`str`
:rtype: :class:`tornado.concurrent.Future` or :data:`None`
"""
conn = self._publish_connection(connection)
self.logger.debug('Publishing message to %s:%s (%s)', exchange, routing_key, conn.name)
basic_properties = self._get_pika_properties(properties)
with self._measurement.track_duration('publish.{}.{}'.format(exchange, routing_key)):
conn.channel.basic_publish(exchange=exchange, routing_key=routing_key, properties=basic_properties, body=body, mandatory=conn.publisher_confirmations)
return self._publisher_confirmation_future(conn.name, exchange, routing_key, basic_properties) # depends on [control=['with'], data=[]] |
def make(assembly, samples):
""" Make phylip and nexus formats. This is hackish since I'm recycling the
code whole-hog from pyrad V3. Probably could be good to go back through
and clean up the conversion code some time.
"""
## get the longest name
longname = max([len(i) for i in assembly.samples.keys()])
names = [i.name for i in samples]
partitions = makephy(assembly, samples, longname)
makenex(assembly, names, longname, partitions) | def function[make, parameter[assembly, samples]]:
constant[ Make phylip and nexus formats. This is hackish since I'm recycling the
code whole-hog from pyrad V3. Probably could be good to go back through
and clean up the conversion code some time.
]
variable[longname] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da1afe339d0>]]
variable[names] assign[=] <ast.ListComp object at 0x7da1afe33730>
variable[partitions] assign[=] call[name[makephy], parameter[name[assembly], name[samples], name[longname]]]
call[name[makenex], parameter[name[assembly], name[names], name[longname], name[partitions]]] | keyword[def] identifier[make] ( identifier[assembly] , identifier[samples] ):
literal[string]
identifier[longname] = identifier[max] ([ identifier[len] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[assembly] . identifier[samples] . identifier[keys] ()])
identifier[names] =[ identifier[i] . identifier[name] keyword[for] identifier[i] keyword[in] identifier[samples] ]
identifier[partitions] = identifier[makephy] ( identifier[assembly] , identifier[samples] , identifier[longname] )
identifier[makenex] ( identifier[assembly] , identifier[names] , identifier[longname] , identifier[partitions] ) | def make(assembly, samples):
""" Make phylip and nexus formats. This is hackish since I'm recycling the
code whole-hog from pyrad V3. Probably could be good to go back through
and clean up the conversion code some time.
"""
## get the longest name
longname = max([len(i) for i in assembly.samples.keys()])
names = [i.name for i in samples]
partitions = makephy(assembly, samples, longname)
makenex(assembly, names, longname, partitions) |
def copy(cls, data):
"""Set the clipboard data ('Copy').
Parameters: data to set (string)
Optional: datatype if it's not a string
Returns: True / False on successful copy, Any exception raised (like
passes the NSPasteboardCommunicationError) should be caught
by the caller.
"""
pp = pprint.PrettyPrinter()
copy_data = 'Data to copy (put in pasteboard): %s'
logging.debug(copy_data % pp.pformat(data))
# Clear the pasteboard first:
cleared = cls.clearAll()
if not cleared:
logging.warning('Clipboard could not clear properly')
return False
# Prepare to write the data
# If we just use writeObjects the sequence to write to the clipboard is
# a) Call clearContents()
# b) Call writeObjects() with a list of objects to write to the
# clipboard
if not isinstance(data, types.ListType):
data = [data]
pb = AppKit.NSPasteboard.generalPasteboard()
pb_set_ok = pb.writeObjects_(data)
return bool(pb_set_ok) | def function[copy, parameter[cls, data]]:
constant[Set the clipboard data ('Copy').
Parameters: data to set (string)
Optional: datatype if it's not a string
Returns: True / False on successful copy, Any exception raised (like
passes the NSPasteboardCommunicationError) should be caught
by the caller.
]
variable[pp] assign[=] call[name[pprint].PrettyPrinter, parameter[]]
variable[copy_data] assign[=] constant[Data to copy (put in pasteboard): %s]
call[name[logging].debug, parameter[binary_operation[name[copy_data] <ast.Mod object at 0x7da2590d6920> call[name[pp].pformat, parameter[name[data]]]]]]
variable[cleared] assign[=] call[name[cls].clearAll, parameter[]]
if <ast.UnaryOp object at 0x7da18dc06f80> begin[:]
call[name[logging].warning, parameter[constant[Clipboard could not clear properly]]]
return[constant[False]]
if <ast.UnaryOp object at 0x7da18dc065c0> begin[:]
variable[data] assign[=] list[[<ast.Name object at 0x7da18dc05690>]]
variable[pb] assign[=] call[name[AppKit].NSPasteboard.generalPasteboard, parameter[]]
variable[pb_set_ok] assign[=] call[name[pb].writeObjects_, parameter[name[data]]]
return[call[name[bool], parameter[name[pb_set_ok]]]] | keyword[def] identifier[copy] ( identifier[cls] , identifier[data] ):
literal[string]
identifier[pp] = identifier[pprint] . identifier[PrettyPrinter] ()
identifier[copy_data] = literal[string]
identifier[logging] . identifier[debug] ( identifier[copy_data] % identifier[pp] . identifier[pformat] ( identifier[data] ))
identifier[cleared] = identifier[cls] . identifier[clearAll] ()
keyword[if] keyword[not] identifier[cleared] :
identifier[logging] . identifier[warning] ( literal[string] )
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[types] . identifier[ListType] ):
identifier[data] =[ identifier[data] ]
identifier[pb] = identifier[AppKit] . identifier[NSPasteboard] . identifier[generalPasteboard] ()
identifier[pb_set_ok] = identifier[pb] . identifier[writeObjects_] ( identifier[data] )
keyword[return] identifier[bool] ( identifier[pb_set_ok] ) | def copy(cls, data):
"""Set the clipboard data ('Copy').
Parameters: data to set (string)
Optional: datatype if it's not a string
Returns: True / False on successful copy, Any exception raised (like
passes the NSPasteboardCommunicationError) should be caught
by the caller.
"""
pp = pprint.PrettyPrinter()
copy_data = 'Data to copy (put in pasteboard): %s'
logging.debug(copy_data % pp.pformat(data))
# Clear the pasteboard first:
cleared = cls.clearAll()
if not cleared:
logging.warning('Clipboard could not clear properly')
return False # depends on [control=['if'], data=[]]
# Prepare to write the data
# If we just use writeObjects the sequence to write to the clipboard is
# a) Call clearContents()
# b) Call writeObjects() with a list of objects to write to the
# clipboard
if not isinstance(data, types.ListType):
data = [data] # depends on [control=['if'], data=[]]
pb = AppKit.NSPasteboard.generalPasteboard()
pb_set_ok = pb.writeObjects_(data)
return bool(pb_set_ok) |
def _getSyntaxBySourceFileName(self, name):
"""Get syntax by source name of file, which is going to be highlighted
"""
for regExp, xmlFileName in self._extensionToXmlFileName.items():
if regExp.match(name):
return self._getSyntaxByXmlFileName(xmlFileName)
else:
raise KeyError("No syntax for " + name) | def function[_getSyntaxBySourceFileName, parameter[self, name]]:
constant[Get syntax by source name of file, which is going to be highlighted
]
for taget[tuple[[<ast.Name object at 0x7da20c6c4550>, <ast.Name object at 0x7da20c6c7130>]]] in starred[call[name[self]._extensionToXmlFileName.items, parameter[]]] begin[:]
if call[name[regExp].match, parameter[name[name]]] begin[:]
return[call[name[self]._getSyntaxByXmlFileName, parameter[name[xmlFileName]]]] | keyword[def] identifier[_getSyntaxBySourceFileName] ( identifier[self] , identifier[name] ):
literal[string]
keyword[for] identifier[regExp] , identifier[xmlFileName] keyword[in] identifier[self] . identifier[_extensionToXmlFileName] . identifier[items] ():
keyword[if] identifier[regExp] . identifier[match] ( identifier[name] ):
keyword[return] identifier[self] . identifier[_getSyntaxByXmlFileName] ( identifier[xmlFileName] )
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] + identifier[name] ) | def _getSyntaxBySourceFileName(self, name):
"""Get syntax by source name of file, which is going to be highlighted
"""
for (regExp, xmlFileName) in self._extensionToXmlFileName.items():
if regExp.match(name):
return self._getSyntaxByXmlFileName(xmlFileName) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
else:
raise KeyError('No syntax for ' + name) |
def app_templates_dirs(self):
"""
Build a cached dict with settings.INSTALLED_APPS as keys
and the 'templates' directory of each application as values.
"""
app_templates_dirs = OrderedDict()
for app_config in apps.get_app_configs():
templates_dir = os.path.join(
getattr(app_config, 'path', '/'), 'templates')
if os.path.isdir(templates_dir):
templates_dir = upath(templates_dir)
app_templates_dirs[app_config.name] = templates_dir
app_templates_dirs[app_config.label] = templates_dir
return app_templates_dirs | def function[app_templates_dirs, parameter[self]]:
constant[
Build a cached dict with settings.INSTALLED_APPS as keys
and the 'templates' directory of each application as values.
]
variable[app_templates_dirs] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[app_config]] in starred[call[name[apps].get_app_configs, parameter[]]] begin[:]
variable[templates_dir] assign[=] call[name[os].path.join, parameter[call[name[getattr], parameter[name[app_config], constant[path], constant[/]]], constant[templates]]]
if call[name[os].path.isdir, parameter[name[templates_dir]]] begin[:]
variable[templates_dir] assign[=] call[name[upath], parameter[name[templates_dir]]]
call[name[app_templates_dirs]][name[app_config].name] assign[=] name[templates_dir]
call[name[app_templates_dirs]][name[app_config].label] assign[=] name[templates_dir]
return[name[app_templates_dirs]] | keyword[def] identifier[app_templates_dirs] ( identifier[self] ):
literal[string]
identifier[app_templates_dirs] = identifier[OrderedDict] ()
keyword[for] identifier[app_config] keyword[in] identifier[apps] . identifier[get_app_configs] ():
identifier[templates_dir] = identifier[os] . identifier[path] . identifier[join] (
identifier[getattr] ( identifier[app_config] , literal[string] , literal[string] ), literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[templates_dir] ):
identifier[templates_dir] = identifier[upath] ( identifier[templates_dir] )
identifier[app_templates_dirs] [ identifier[app_config] . identifier[name] ]= identifier[templates_dir]
identifier[app_templates_dirs] [ identifier[app_config] . identifier[label] ]= identifier[templates_dir]
keyword[return] identifier[app_templates_dirs] | def app_templates_dirs(self):
"""
Build a cached dict with settings.INSTALLED_APPS as keys
and the 'templates' directory of each application as values.
"""
app_templates_dirs = OrderedDict()
for app_config in apps.get_app_configs():
templates_dir = os.path.join(getattr(app_config, 'path', '/'), 'templates')
if os.path.isdir(templates_dir):
templates_dir = upath(templates_dir)
app_templates_dirs[app_config.name] = templates_dir
app_templates_dirs[app_config.label] = templates_dir # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['app_config']]
return app_templates_dirs |
def ripple_carry_add(A, B, cin=0):
"""Return symbolic logic for an N-bit ripple carry adder."""
if len(A) != len(B):
raise ValueError("expected A and B to be equal length")
ss, cs = list(), list()
for i, a in enumerate(A):
c = (cin if i == 0 else cs[i-1])
ss.append(a ^ B[i] ^ c)
cs.append(a & B[i] | a & c | B[i] & c)
return farray(ss), farray(cs) | def function[ripple_carry_add, parameter[A, B, cin]]:
constant[Return symbolic logic for an N-bit ripple carry adder.]
if compare[call[name[len], parameter[name[A]]] not_equal[!=] call[name[len], parameter[name[B]]]] begin[:]
<ast.Raise object at 0x7da1b0d0c190>
<ast.Tuple object at 0x7da1b0d0e0b0> assign[=] tuple[[<ast.Call object at 0x7da1b0d0cfa0>, <ast.Call object at 0x7da1b0d0c5e0>]]
for taget[tuple[[<ast.Name object at 0x7da1b0d0e740>, <ast.Name object at 0x7da1b0d0dab0>]]] in starred[call[name[enumerate], parameter[name[A]]]] begin[:]
variable[c] assign[=] <ast.IfExp object at 0x7da1b0d0d2a0>
call[name[ss].append, parameter[binary_operation[binary_operation[name[a] <ast.BitXor object at 0x7da2590d6b00> call[name[B]][name[i]]] <ast.BitXor object at 0x7da2590d6b00> name[c]]]]
call[name[cs].append, parameter[binary_operation[binary_operation[binary_operation[name[a] <ast.BitAnd object at 0x7da2590d6b60> call[name[B]][name[i]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[a] <ast.BitAnd object at 0x7da2590d6b60> name[c]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[call[name[B]][name[i]] <ast.BitAnd object at 0x7da2590d6b60> name[c]]]]]
return[tuple[[<ast.Call object at 0x7da1b0ebc130>, <ast.Call object at 0x7da1b0ebcd00>]]] | keyword[def] identifier[ripple_carry_add] ( identifier[A] , identifier[B] , identifier[cin] = literal[int] ):
literal[string]
keyword[if] identifier[len] ( identifier[A] )!= identifier[len] ( identifier[B] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[ss] , identifier[cs] = identifier[list] (), identifier[list] ()
keyword[for] identifier[i] , identifier[a] keyword[in] identifier[enumerate] ( identifier[A] ):
identifier[c] =( identifier[cin] keyword[if] identifier[i] == literal[int] keyword[else] identifier[cs] [ identifier[i] - literal[int] ])
identifier[ss] . identifier[append] ( identifier[a] ^ identifier[B] [ identifier[i] ]^ identifier[c] )
identifier[cs] . identifier[append] ( identifier[a] & identifier[B] [ identifier[i] ]| identifier[a] & identifier[c] | identifier[B] [ identifier[i] ]& identifier[c] )
keyword[return] identifier[farray] ( identifier[ss] ), identifier[farray] ( identifier[cs] ) | def ripple_carry_add(A, B, cin=0):
"""Return symbolic logic for an N-bit ripple carry adder."""
if len(A) != len(B):
raise ValueError('expected A and B to be equal length') # depends on [control=['if'], data=[]]
(ss, cs) = (list(), list())
for (i, a) in enumerate(A):
c = cin if i == 0 else cs[i - 1]
ss.append(a ^ B[i] ^ c)
cs.append(a & B[i] | a & c | B[i] & c) # depends on [control=['for'], data=[]]
return (farray(ss), farray(cs)) |
def swap_buffers(self):
"""
Headless window currently don't support double buffering.
We only increment the frame counter here.
"""
self.frames += 1
if self.headless_frames and self.frames >= self.headless_frames:
self.close() | def function[swap_buffers, parameter[self]]:
constant[
Headless window currently don't support double buffering.
We only increment the frame counter here.
]
<ast.AugAssign object at 0x7da20c76e140>
if <ast.BoolOp object at 0x7da20c76e350> begin[:]
call[name[self].close, parameter[]] | keyword[def] identifier[swap_buffers] ( identifier[self] ):
literal[string]
identifier[self] . identifier[frames] += literal[int]
keyword[if] identifier[self] . identifier[headless_frames] keyword[and] identifier[self] . identifier[frames] >= identifier[self] . identifier[headless_frames] :
identifier[self] . identifier[close] () | def swap_buffers(self):
"""
Headless window currently don't support double buffering.
We only increment the frame counter here.
"""
self.frames += 1
if self.headless_frames and self.frames >= self.headless_frames:
self.close() # depends on [control=['if'], data=[]] |
def register_to_openmath(self, py_class, converter):
"""Register a conversion from Python to OpenMath
:param py_class: A Python class the conversion is attached to, or None
:type py_class: None, type
:param converter: A conversion function or an OpenMath object
:type converter: Callable, OMAny
:rtype: None
``converter`` will used to convert any object of type ``py_class``,
or any object if ``py_class`` is ``None``. If ``converter`` is an
OpenMath object, it is returned immediately. If it is a callable, it
is called with the Python object as paramter; in this case, it must
either return an OpenMath object, or raise an exception. The
special exception ``CannotConvertError`` can be used to signify that
``converter`` does not know how to convert the current object, and that
``to_openmath`` shall continue with the other converters. Any other
exception stops conversion immediately.
Converters registered by this function are called in order from the
most recent to the oldest.
"""
if py_class is not None and not isclass(py_class):
raise TypeError('Expected class, found %r' % py_class)
if not callable(converter) and not isinstance(converter, om.OMAny):
raise TypeError('Expected callable or openmath.OMAny object, found %r' % converter)
self._conv_to_om.append((py_class, converter)) | def function[register_to_openmath, parameter[self, py_class, converter]]:
constant[Register a conversion from Python to OpenMath
:param py_class: A Python class the conversion is attached to, or None
:type py_class: None, type
:param converter: A conversion function or an OpenMath object
:type converter: Callable, OMAny
:rtype: None
``converter`` will used to convert any object of type ``py_class``,
or any object if ``py_class`` is ``None``. If ``converter`` is an
OpenMath object, it is returned immediately. If it is a callable, it
is called with the Python object as paramter; in this case, it must
either return an OpenMath object, or raise an exception. The
special exception ``CannotConvertError`` can be used to signify that
``converter`` does not know how to convert the current object, and that
``to_openmath`` shall continue with the other converters. Any other
exception stops conversion immediately.
Converters registered by this function are called in order from the
most recent to the oldest.
]
if <ast.BoolOp object at 0x7da1b0370c70> begin[:]
<ast.Raise object at 0x7da1b02ad660>
if <ast.BoolOp object at 0x7da1b02ad270> begin[:]
<ast.Raise object at 0x7da1b02aeb30>
call[name[self]._conv_to_om.append, parameter[tuple[[<ast.Name object at 0x7da1b0210160>, <ast.Name object at 0x7da1b0213760>]]]] | keyword[def] identifier[register_to_openmath] ( identifier[self] , identifier[py_class] , identifier[converter] ):
literal[string]
keyword[if] identifier[py_class] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isclass] ( identifier[py_class] ):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[py_class] )
keyword[if] keyword[not] identifier[callable] ( identifier[converter] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[converter] , identifier[om] . identifier[OMAny] ):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[converter] )
identifier[self] . identifier[_conv_to_om] . identifier[append] (( identifier[py_class] , identifier[converter] )) | def register_to_openmath(self, py_class, converter):
"""Register a conversion from Python to OpenMath
:param py_class: A Python class the conversion is attached to, or None
:type py_class: None, type
:param converter: A conversion function or an OpenMath object
:type converter: Callable, OMAny
:rtype: None
``converter`` will used to convert any object of type ``py_class``,
or any object if ``py_class`` is ``None``. If ``converter`` is an
OpenMath object, it is returned immediately. If it is a callable, it
is called with the Python object as paramter; in this case, it must
either return an OpenMath object, or raise an exception. The
special exception ``CannotConvertError`` can be used to signify that
``converter`` does not know how to convert the current object, and that
``to_openmath`` shall continue with the other converters. Any other
exception stops conversion immediately.
Converters registered by this function are called in order from the
most recent to the oldest.
"""
if py_class is not None and (not isclass(py_class)):
raise TypeError('Expected class, found %r' % py_class) # depends on [control=['if'], data=[]]
if not callable(converter) and (not isinstance(converter, om.OMAny)):
raise TypeError('Expected callable or openmath.OMAny object, found %r' % converter) # depends on [control=['if'], data=[]]
self._conv_to_om.append((py_class, converter)) |
def _query(self, params, direct=False):
"""
:param params: dict
:return: pybomb.clients.response
"""
params["api_key"] = self._api_key
if "format" not in params:
params["format"] = self._default_format
response = self._query_api(params, direct)
self._validate_response(response)
return Response.from_response_data(response) | def function[_query, parameter[self, params, direct]]:
constant[
:param params: dict
:return: pybomb.clients.response
]
call[name[params]][constant[api_key]] assign[=] name[self]._api_key
if compare[constant[format] <ast.NotIn object at 0x7da2590d7190> name[params]] begin[:]
call[name[params]][constant[format]] assign[=] name[self]._default_format
variable[response] assign[=] call[name[self]._query_api, parameter[name[params], name[direct]]]
call[name[self]._validate_response, parameter[name[response]]]
return[call[name[Response].from_response_data, parameter[name[response]]]] | keyword[def] identifier[_query] ( identifier[self] , identifier[params] , identifier[direct] = keyword[False] ):
literal[string]
identifier[params] [ literal[string] ]= identifier[self] . identifier[_api_key]
keyword[if] literal[string] keyword[not] keyword[in] identifier[params] :
identifier[params] [ literal[string] ]= identifier[self] . identifier[_default_format]
identifier[response] = identifier[self] . identifier[_query_api] ( identifier[params] , identifier[direct] )
identifier[self] . identifier[_validate_response] ( identifier[response] )
keyword[return] identifier[Response] . identifier[from_response_data] ( identifier[response] ) | def _query(self, params, direct=False):
"""
:param params: dict
:return: pybomb.clients.response
"""
params['api_key'] = self._api_key
if 'format' not in params:
params['format'] = self._default_format # depends on [control=['if'], data=['params']]
response = self._query_api(params, direct)
self._validate_response(response)
return Response.from_response_data(response) |
def get_all_users(configuration=None, **kwargs):
# type: (Optional[Configuration], Any) -> List['User']
"""Get all users in HDX
Args:
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
**kwargs: See below
q (str): Restrict to names containing a string. Defaults to all users.
order_by (str): Field by which to sort - any user field or edits (number_of_edits). Defaults to 'name'.
Returns:
List[User]: List of all users in HDX
"""
user = User(configuration=configuration)
user['id'] = 'all users' # only for error message if produced
result = user._write_to_hdx('list', kwargs, 'id')
users = list()
if result:
for userdict in result:
user = User(userdict, configuration=configuration)
users.append(user)
else:
logger.debug(result)
return users | def function[get_all_users, parameter[configuration]]:
constant[Get all users in HDX
Args:
configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.
**kwargs: See below
q (str): Restrict to names containing a string. Defaults to all users.
order_by (str): Field by which to sort - any user field or edits (number_of_edits). Defaults to 'name'.
Returns:
List[User]: List of all users in HDX
]
variable[user] assign[=] call[name[User], parameter[]]
call[name[user]][constant[id]] assign[=] constant[all users]
variable[result] assign[=] call[name[user]._write_to_hdx, parameter[constant[list], name[kwargs], constant[id]]]
variable[users] assign[=] call[name[list], parameter[]]
if name[result] begin[:]
for taget[name[userdict]] in starred[name[result]] begin[:]
variable[user] assign[=] call[name[User], parameter[name[userdict]]]
call[name[users].append, parameter[name[user]]]
return[name[users]] | keyword[def] identifier[get_all_users] ( identifier[configuration] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[user] = identifier[User] ( identifier[configuration] = identifier[configuration] )
identifier[user] [ literal[string] ]= literal[string]
identifier[result] = identifier[user] . identifier[_write_to_hdx] ( literal[string] , identifier[kwargs] , literal[string] )
identifier[users] = identifier[list] ()
keyword[if] identifier[result] :
keyword[for] identifier[userdict] keyword[in] identifier[result] :
identifier[user] = identifier[User] ( identifier[userdict] , identifier[configuration] = identifier[configuration] )
identifier[users] . identifier[append] ( identifier[user] )
keyword[else] :
identifier[logger] . identifier[debug] ( identifier[result] )
keyword[return] identifier[users] | def get_all_users(configuration=None, **kwargs):
# type: (Optional[Configuration], Any) -> List['User']
"Get all users in HDX\n\n Args:\n configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration.\n **kwargs: See below\n q (str): Restrict to names containing a string. Defaults to all users.\n order_by (str): Field by which to sort - any user field or edits (number_of_edits). Defaults to 'name'.\n\n Returns:\n List[User]: List of all users in HDX\n "
user = User(configuration=configuration)
user['id'] = 'all users' # only for error message if produced
result = user._write_to_hdx('list', kwargs, 'id')
users = list()
if result:
for userdict in result:
user = User(userdict, configuration=configuration)
users.append(user) # depends on [control=['for'], data=['userdict']] # depends on [control=['if'], data=[]]
else:
logger.debug(result)
return users |
def init(self):
"""Init the connection to the rabbitmq server."""
if not self.export_enable:
return None
try:
parameters = pika.URLParameters(
'amqp://' + self.user +
':' + self.password +
'@' + self.host +
':' + self.port + '/')
connection = pika.BlockingConnection(parameters)
channel = connection.channel()
return channel
except Exception as e:
logger.critical("Connection to rabbitMQ failed : %s " % e)
return None | def function[init, parameter[self]]:
constant[Init the connection to the rabbitmq server.]
if <ast.UnaryOp object at 0x7da2047eb370> begin[:]
return[constant[None]]
<ast.Try object at 0x7da2047ea710> | keyword[def] identifier[init] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[export_enable] :
keyword[return] keyword[None]
keyword[try] :
identifier[parameters] = identifier[pika] . identifier[URLParameters] (
literal[string] + identifier[self] . identifier[user] +
literal[string] + identifier[self] . identifier[password] +
literal[string] + identifier[self] . identifier[host] +
literal[string] + identifier[self] . identifier[port] + literal[string] )
identifier[connection] = identifier[pika] . identifier[BlockingConnection] ( identifier[parameters] )
identifier[channel] = identifier[connection] . identifier[channel] ()
keyword[return] identifier[channel]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[critical] ( literal[string] % identifier[e] )
keyword[return] keyword[None] | def init(self):
"""Init the connection to the rabbitmq server."""
if not self.export_enable:
return None # depends on [control=['if'], data=[]]
try:
parameters = pika.URLParameters('amqp://' + self.user + ':' + self.password + '@' + self.host + ':' + self.port + '/')
connection = pika.BlockingConnection(parameters)
channel = connection.channel()
return channel # depends on [control=['try'], data=[]]
except Exception as e:
logger.critical('Connection to rabbitMQ failed : %s ' % e)
return None # depends on [control=['except'], data=['e']] |
def process_pattern(fn):
"""Return a list of paths matching a pattern (or None on error).
"""
directory, pattern = validate_pattern(fn)
if directory is not None:
filenames = fnmatch.filter(auto(listdir, directory), pattern)
if filenames:
return [directory + '/' + sfn for sfn in filenames]
else:
print_err("cannot access '{}': No such file or directory".format(fn)) | def function[process_pattern, parameter[fn]]:
constant[Return a list of paths matching a pattern (or None on error).
]
<ast.Tuple object at 0x7da20c6c7460> assign[=] call[name[validate_pattern], parameter[name[fn]]]
if compare[name[directory] is_not constant[None]] begin[:]
variable[filenames] assign[=] call[name[fnmatch].filter, parameter[call[name[auto], parameter[name[listdir], name[directory]]], name[pattern]]]
if name[filenames] begin[:]
return[<ast.ListComp object at 0x7da20c6c6380>] | keyword[def] identifier[process_pattern] ( identifier[fn] ):
literal[string]
identifier[directory] , identifier[pattern] = identifier[validate_pattern] ( identifier[fn] )
keyword[if] identifier[directory] keyword[is] keyword[not] keyword[None] :
identifier[filenames] = identifier[fnmatch] . identifier[filter] ( identifier[auto] ( identifier[listdir] , identifier[directory] ), identifier[pattern] )
keyword[if] identifier[filenames] :
keyword[return] [ identifier[directory] + literal[string] + identifier[sfn] keyword[for] identifier[sfn] keyword[in] identifier[filenames] ]
keyword[else] :
identifier[print_err] ( literal[string] . identifier[format] ( identifier[fn] )) | def process_pattern(fn):
"""Return a list of paths matching a pattern (or None on error).
"""
(directory, pattern) = validate_pattern(fn)
if directory is not None:
filenames = fnmatch.filter(auto(listdir, directory), pattern)
if filenames:
return [directory + '/' + sfn for sfn in filenames] # depends on [control=['if'], data=[]]
else:
print_err("cannot access '{}': No such file or directory".format(fn)) # depends on [control=['if'], data=['directory']] |
def on_linecolor(self, *args):
"""If I don't yet have the instructions for drawing the selection box
in my canvas, put them there. In any case, set the
:class:`Color` instruction to match my current ``linecolor``.
"""
if hasattr(self, 'color'):
self.color.rgba = self.linecolor
return
def upd_box_translate(*args):
self.box_translate.xy = self.pos
def upd_box_points(*args):
self.box.points = [0, 0, self.width, 0, self.width, self.height, 0, self.height, 0, 0]
self.boxgrp = boxgrp = InstructionGroup()
self.color = Color(*self.linecolor)
self.box_translate = Translate(*self.pos)
boxgrp.add(PushMatrix())
boxgrp.add(self.box_translate)
boxgrp.add(self.color)
self.box = Line()
upd_box_points()
self.bind(
size=upd_box_points,
pos=upd_box_translate
)
boxgrp.add(self.box)
boxgrp.add(Color(1., 1., 1.))
boxgrp.add(PopMatrix()) | def function[on_linecolor, parameter[self]]:
constant[If I don't yet have the instructions for drawing the selection box
in my canvas, put them there. In any case, set the
:class:`Color` instruction to match my current ``linecolor``.
]
if call[name[hasattr], parameter[name[self], constant[color]]] begin[:]
name[self].color.rgba assign[=] name[self].linecolor
return[None]
def function[upd_box_translate, parameter[]]:
name[self].box_translate.xy assign[=] name[self].pos
def function[upd_box_points, parameter[]]:
name[self].box.points assign[=] list[[<ast.Constant object at 0x7da1b0babbb0>, <ast.Constant object at 0x7da1b0babb80>, <ast.Attribute object at 0x7da1b0baba30>, <ast.Constant object at 0x7da1b0baba90>, <ast.Attribute object at 0x7da1b0bab940>, <ast.Attribute object at 0x7da1b0babf10>, <ast.Constant object at 0x7da1b0bab820>, <ast.Attribute object at 0x7da1b0bab850>, <ast.Constant object at 0x7da1b0bab910>, <ast.Constant object at 0x7da1b0bab8e0>]]
name[self].boxgrp assign[=] call[name[InstructionGroup], parameter[]]
name[self].color assign[=] call[name[Color], parameter[<ast.Starred object at 0x7da1b0babd30>]]
name[self].box_translate assign[=] call[name[Translate], parameter[<ast.Starred object at 0x7da1b0baae90>]]
call[name[boxgrp].add, parameter[call[name[PushMatrix], parameter[]]]]
call[name[boxgrp].add, parameter[name[self].box_translate]]
call[name[boxgrp].add, parameter[name[self].color]]
name[self].box assign[=] call[name[Line], parameter[]]
call[name[upd_box_points], parameter[]]
call[name[self].bind, parameter[]]
call[name[boxgrp].add, parameter[name[self].box]]
call[name[boxgrp].add, parameter[call[name[Color], parameter[constant[1.0], constant[1.0], constant[1.0]]]]]
call[name[boxgrp].add, parameter[call[name[PopMatrix], parameter[]]]] | keyword[def] identifier[on_linecolor] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[color] . identifier[rgba] = identifier[self] . identifier[linecolor]
keyword[return]
keyword[def] identifier[upd_box_translate] (* identifier[args] ):
identifier[self] . identifier[box_translate] . identifier[xy] = identifier[self] . identifier[pos]
keyword[def] identifier[upd_box_points] (* identifier[args] ):
identifier[self] . identifier[box] . identifier[points] =[ literal[int] , literal[int] , identifier[self] . identifier[width] , literal[int] , identifier[self] . identifier[width] , identifier[self] . identifier[height] , literal[int] , identifier[self] . identifier[height] , literal[int] , literal[int] ]
identifier[self] . identifier[boxgrp] = identifier[boxgrp] = identifier[InstructionGroup] ()
identifier[self] . identifier[color] = identifier[Color] (* identifier[self] . identifier[linecolor] )
identifier[self] . identifier[box_translate] = identifier[Translate] (* identifier[self] . identifier[pos] )
identifier[boxgrp] . identifier[add] ( identifier[PushMatrix] ())
identifier[boxgrp] . identifier[add] ( identifier[self] . identifier[box_translate] )
identifier[boxgrp] . identifier[add] ( identifier[self] . identifier[color] )
identifier[self] . identifier[box] = identifier[Line] ()
identifier[upd_box_points] ()
identifier[self] . identifier[bind] (
identifier[size] = identifier[upd_box_points] ,
identifier[pos] = identifier[upd_box_translate]
)
identifier[boxgrp] . identifier[add] ( identifier[self] . identifier[box] )
identifier[boxgrp] . identifier[add] ( identifier[Color] ( literal[int] , literal[int] , literal[int] ))
identifier[boxgrp] . identifier[add] ( identifier[PopMatrix] ()) | def on_linecolor(self, *args):
"""If I don't yet have the instructions for drawing the selection box
in my canvas, put them there. In any case, set the
:class:`Color` instruction to match my current ``linecolor``.
"""
if hasattr(self, 'color'):
self.color.rgba = self.linecolor
return # depends on [control=['if'], data=[]]
def upd_box_translate(*args):
self.box_translate.xy = self.pos
def upd_box_points(*args):
self.box.points = [0, 0, self.width, 0, self.width, self.height, 0, self.height, 0, 0]
self.boxgrp = boxgrp = InstructionGroup()
self.color = Color(*self.linecolor)
self.box_translate = Translate(*self.pos)
boxgrp.add(PushMatrix())
boxgrp.add(self.box_translate)
boxgrp.add(self.color)
self.box = Line()
upd_box_points()
self.bind(size=upd_box_points, pos=upd_box_translate)
boxgrp.add(self.box)
boxgrp.add(Color(1.0, 1.0, 1.0))
boxgrp.add(PopMatrix()) |
def with_prefix(self, root_path):
"""Returns a new conflict with a prepended prefix as a path."""
return Conflict(self.conflict_type, root_path + self.path, self.body) | def function[with_prefix, parameter[self, root_path]]:
constant[Returns a new conflict with a prepended prefix as a path.]
return[call[name[Conflict], parameter[name[self].conflict_type, binary_operation[name[root_path] + name[self].path], name[self].body]]] | keyword[def] identifier[with_prefix] ( identifier[self] , identifier[root_path] ):
literal[string]
keyword[return] identifier[Conflict] ( identifier[self] . identifier[conflict_type] , identifier[root_path] + identifier[self] . identifier[path] , identifier[self] . identifier[body] ) | def with_prefix(self, root_path):
"""Returns a new conflict with a prepended prefix as a path."""
return Conflict(self.conflict_type, root_path + self.path, self.body) |
def check_path_allowed(path):
"""
If the server is non local raise an error if
the path is outside project directories
Raise a 403 in case of error
"""
config = Config.instance().get_section_config("Server")
project_directory = get_default_project_directory()
if len(os.path.commonprefix([project_directory, path])) == len(project_directory):
return
if "local" in config and config.getboolean("local") is False:
raise aiohttp.web.HTTPForbidden(text="The path is not allowed") | def function[check_path_allowed, parameter[path]]:
constant[
If the server is non local raise an error if
the path is outside project directories
Raise a 403 in case of error
]
variable[config] assign[=] call[call[name[Config].instance, parameter[]].get_section_config, parameter[constant[Server]]]
variable[project_directory] assign[=] call[name[get_default_project_directory], parameter[]]
if compare[call[name[len], parameter[call[name[os].path.commonprefix, parameter[list[[<ast.Name object at 0x7da2044c2f20>, <ast.Name object at 0x7da2044c2f50>]]]]]] equal[==] call[name[len], parameter[name[project_directory]]]] begin[:]
return[None]
if <ast.BoolOp object at 0x7da2044c0b80> begin[:]
<ast.Raise object at 0x7da2044c14e0> | keyword[def] identifier[check_path_allowed] ( identifier[path] ):
literal[string]
identifier[config] = identifier[Config] . identifier[instance] (). identifier[get_section_config] ( literal[string] )
identifier[project_directory] = identifier[get_default_project_directory] ()
keyword[if] identifier[len] ( identifier[os] . identifier[path] . identifier[commonprefix] ([ identifier[project_directory] , identifier[path] ]))== identifier[len] ( identifier[project_directory] ):
keyword[return]
keyword[if] literal[string] keyword[in] identifier[config] keyword[and] identifier[config] . identifier[getboolean] ( literal[string] ) keyword[is] keyword[False] :
keyword[raise] identifier[aiohttp] . identifier[web] . identifier[HTTPForbidden] ( identifier[text] = literal[string] ) | def check_path_allowed(path):
"""
If the server is non local raise an error if
the path is outside project directories
Raise a 403 in case of error
"""
config = Config.instance().get_section_config('Server')
project_directory = get_default_project_directory()
if len(os.path.commonprefix([project_directory, path])) == len(project_directory):
return # depends on [control=['if'], data=[]]
if 'local' in config and config.getboolean('local') is False:
raise aiohttp.web.HTTPForbidden(text='The path is not allowed') # depends on [control=['if'], data=[]] |
def do_include(self, t):
"""
Default handling of a #include line.
"""
t = self.resolve_include(t)
include_file = self.find_include_file(t)
if include_file:
#print("include_file =", include_file)
self.result.append(include_file)
contents = self.read_file(include_file)
new_tuples = [('scons_current_file', include_file)] + \
self.tupleize(contents) + \
[('scons_current_file', self.current_file)]
self.tuples[:] = new_tuples + self.tuples | def function[do_include, parameter[self, t]]:
constant[
Default handling of a #include line.
]
variable[t] assign[=] call[name[self].resolve_include, parameter[name[t]]]
variable[include_file] assign[=] call[name[self].find_include_file, parameter[name[t]]]
if name[include_file] begin[:]
call[name[self].result.append, parameter[name[include_file]]]
variable[contents] assign[=] call[name[self].read_file, parameter[name[include_file]]]
variable[new_tuples] assign[=] binary_operation[binary_operation[list[[<ast.Tuple object at 0x7da18fe91270>]] + call[name[self].tupleize, parameter[name[contents]]]] + list[[<ast.Tuple object at 0x7da18fe92320>]]]
call[name[self].tuples][<ast.Slice object at 0x7da18fe91ab0>] assign[=] binary_operation[name[new_tuples] + name[self].tuples] | keyword[def] identifier[do_include] ( identifier[self] , identifier[t] ):
literal[string]
identifier[t] = identifier[self] . identifier[resolve_include] ( identifier[t] )
identifier[include_file] = identifier[self] . identifier[find_include_file] ( identifier[t] )
keyword[if] identifier[include_file] :
identifier[self] . identifier[result] . identifier[append] ( identifier[include_file] )
identifier[contents] = identifier[self] . identifier[read_file] ( identifier[include_file] )
identifier[new_tuples] =[( literal[string] , identifier[include_file] )]+ identifier[self] . identifier[tupleize] ( identifier[contents] )+[( literal[string] , identifier[self] . identifier[current_file] )]
identifier[self] . identifier[tuples] [:]= identifier[new_tuples] + identifier[self] . identifier[tuples] | def do_include(self, t):
"""
Default handling of a #include line.
"""
t = self.resolve_include(t)
include_file = self.find_include_file(t)
if include_file:
#print("include_file =", include_file)
self.result.append(include_file)
contents = self.read_file(include_file)
new_tuples = [('scons_current_file', include_file)] + self.tupleize(contents) + [('scons_current_file', self.current_file)]
self.tuples[:] = new_tuples + self.tuples # depends on [control=['if'], data=[]] |
def create(config, name, group, type):
"""Create an LDAP user."""
if type not in ('user', 'service'):
raise click.BadOptionUsage("--type must be 'user' or 'service'")
client = Client()
client.prepare_connection()
user_api = API(client)
group_api = GroupApi(client)
user_api.create(name[0], name[1], group, type, group_api) | def function[create, parameter[config, name, group, type]]:
constant[Create an LDAP user.]
if compare[name[type] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2044c2a70>, <ast.Constant object at 0x7da2044c22c0>]]] begin[:]
<ast.Raise object at 0x7da2044c2c20>
variable[client] assign[=] call[name[Client], parameter[]]
call[name[client].prepare_connection, parameter[]]
variable[user_api] assign[=] call[name[API], parameter[name[client]]]
variable[group_api] assign[=] call[name[GroupApi], parameter[name[client]]]
call[name[user_api].create, parameter[call[name[name]][constant[0]], call[name[name]][constant[1]], name[group], name[type], name[group_api]]] | keyword[def] identifier[create] ( identifier[config] , identifier[name] , identifier[group] , identifier[type] ):
literal[string]
keyword[if] identifier[type] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[click] . identifier[BadOptionUsage] ( literal[string] )
identifier[client] = identifier[Client] ()
identifier[client] . identifier[prepare_connection] ()
identifier[user_api] = identifier[API] ( identifier[client] )
identifier[group_api] = identifier[GroupApi] ( identifier[client] )
identifier[user_api] . identifier[create] ( identifier[name] [ literal[int] ], identifier[name] [ literal[int] ], identifier[group] , identifier[type] , identifier[group_api] ) | def create(config, name, group, type):
"""Create an LDAP user."""
if type not in ('user', 'service'):
raise click.BadOptionUsage("--type must be 'user' or 'service'") # depends on [control=['if'], data=[]]
client = Client()
client.prepare_connection()
user_api = API(client)
group_api = GroupApi(client)
user_api.create(name[0], name[1], group, type, group_api) |
def find_by_id(self, object_type, field, value):
"""
Find resource by a specific ID.
Results are a dict in the format:
{
'id': <resource URI fragment>,
'identifier': <resource identifier>,
'title': <title of the resource>,
'levelOfDescription': <level of description>,
}
:param str object_type: One of 'digital_object_components' or 'archival_objects'
:param str field: Name of the field to search. One of 'component_id' or 'ref_id'.
:param value: Value of the field to search for
:return: List of dicts containing results.
"""
def format_record(record):
resolved = record["_resolved"]
identifier = (
resolved["ref_id"]
if "ref_id" in resolved
else resolved.get("component_id", "")
)
return {
"id": record["ref"],
"type": self.resource_type(record["ref"]),
"identifier": identifier,
"title": resolved.get("title", ""),
"levelOfDescription": resolved.get("level", ""),
"fullrecord": resolved,
}
if object_type not in ("digital_object_components", "archival_objects"):
raise ValueError(
"object_type must be 'digital_object_components' or 'archival_objects'"
)
if field not in ("ref_id", "component_id"):
raise ValueError("field must be 'component_id' or 'ref_id'")
params = {field + "[]": value, "resolve[]": object_type}
url = self.repository + "/find_by_id/" + object_type
response = self._get(url, params=params)
hits = response.json()
return [format_record(r) for r in hits[object_type]] | def function[find_by_id, parameter[self, object_type, field, value]]:
constant[
Find resource by a specific ID.
Results are a dict in the format:
{
'id': <resource URI fragment>,
'identifier': <resource identifier>,
'title': <title of the resource>,
'levelOfDescription': <level of description>,
}
:param str object_type: One of 'digital_object_components' or 'archival_objects'
:param str field: Name of the field to search. One of 'component_id' or 'ref_id'.
:param value: Value of the field to search for
:return: List of dicts containing results.
]
def function[format_record, parameter[record]]:
variable[resolved] assign[=] call[name[record]][constant[_resolved]]
variable[identifier] assign[=] <ast.IfExp object at 0x7da2047eb3d0>
return[dictionary[[<ast.Constant object at 0x7da18fe92c20>, <ast.Constant object at 0x7da18fe910c0>, <ast.Constant object at 0x7da18fe90730>, <ast.Constant object at 0x7da18fe90fa0>, <ast.Constant object at 0x7da18fe90100>, <ast.Constant object at 0x7da20c6e7580>], [<ast.Subscript object at 0x7da20c6e5780>, <ast.Call object at 0x7da20c6e4e50>, <ast.Name object at 0x7da20c6e4b50>, <ast.Call object at 0x7da20c6e7a30>, <ast.Call object at 0x7da20c6e7400>, <ast.Name object at 0x7da20c6e4bb0>]]]
if compare[name[object_type] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6e6320>, <ast.Constant object at 0x7da20c6e5b40>]]] begin[:]
<ast.Raise object at 0x7da20c6e52d0>
if compare[name[field] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6e7a60>, <ast.Constant object at 0x7da20c6e5870>]]] begin[:]
<ast.Raise object at 0x7da20c6e4850>
variable[params] assign[=] dictionary[[<ast.BinOp object at 0x7da1b1800f40>, <ast.Constant object at 0x7da1b1802920>], [<ast.Name object at 0x7da1b1801240>, <ast.Name object at 0x7da1b1802c50>]]
variable[url] assign[=] binary_operation[binary_operation[name[self].repository + constant[/find_by_id/]] + name[object_type]]
variable[response] assign[=] call[name[self]._get, parameter[name[url]]]
variable[hits] assign[=] call[name[response].json, parameter[]]
return[<ast.ListComp object at 0x7da20c6e5120>] | keyword[def] identifier[find_by_id] ( identifier[self] , identifier[object_type] , identifier[field] , identifier[value] ):
literal[string]
keyword[def] identifier[format_record] ( identifier[record] ):
identifier[resolved] = identifier[record] [ literal[string] ]
identifier[identifier] =(
identifier[resolved] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[resolved]
keyword[else] identifier[resolved] . identifier[get] ( literal[string] , literal[string] )
)
keyword[return] {
literal[string] : identifier[record] [ literal[string] ],
literal[string] : identifier[self] . identifier[resource_type] ( identifier[record] [ literal[string] ]),
literal[string] : identifier[identifier] ,
literal[string] : identifier[resolved] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[resolved] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[resolved] ,
}
keyword[if] identifier[object_type] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] (
literal[string]
)
keyword[if] identifier[field] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[params] ={ identifier[field] + literal[string] : identifier[value] , literal[string] : identifier[object_type] }
identifier[url] = identifier[self] . identifier[repository] + literal[string] + identifier[object_type]
identifier[response] = identifier[self] . identifier[_get] ( identifier[url] , identifier[params] = identifier[params] )
identifier[hits] = identifier[response] . identifier[json] ()
keyword[return] [ identifier[format_record] ( identifier[r] ) keyword[for] identifier[r] keyword[in] identifier[hits] [ identifier[object_type] ]] | def find_by_id(self, object_type, field, value):
"""
Find resource by a specific ID.
Results are a dict in the format:
{
'id': <resource URI fragment>,
'identifier': <resource identifier>,
'title': <title of the resource>,
'levelOfDescription': <level of description>,
}
:param str object_type: One of 'digital_object_components' or 'archival_objects'
:param str field: Name of the field to search. One of 'component_id' or 'ref_id'.
:param value: Value of the field to search for
:return: List of dicts containing results.
"""
def format_record(record):
resolved = record['_resolved']
identifier = resolved['ref_id'] if 'ref_id' in resolved else resolved.get('component_id', '')
return {'id': record['ref'], 'type': self.resource_type(record['ref']), 'identifier': identifier, 'title': resolved.get('title', ''), 'levelOfDescription': resolved.get('level', ''), 'fullrecord': resolved}
if object_type not in ('digital_object_components', 'archival_objects'):
raise ValueError("object_type must be 'digital_object_components' or 'archival_objects'") # depends on [control=['if'], data=[]]
if field not in ('ref_id', 'component_id'):
raise ValueError("field must be 'component_id' or 'ref_id'") # depends on [control=['if'], data=[]]
params = {field + '[]': value, 'resolve[]': object_type}
url = self.repository + '/find_by_id/' + object_type
response = self._get(url, params=params)
hits = response.json()
return [format_record(r) for r in hits[object_type]] |
def absent(name, Name,
region=None, key=None, keyid=None, profile=None):
'''
Ensure trail with passed properties is absent.
name
The name of the state definition.
Name
Name of the trail.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
'''
ret = {'name': Name,
'result': True,
'comment': '',
'changes': {}
}
r = __salt__['boto_cloudtrail.exists'](Name,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to delete trail: {0}.'.format(r['error']['message'])
return ret
if r and not r['exists']:
ret['comment'] = 'CloudTrail {0} does not exist.'.format(Name)
return ret
if __opts__['test']:
ret['comment'] = 'CloudTrail {0} is set to be removed.'.format(Name)
ret['result'] = None
return ret
r = __salt__['boto_cloudtrail.delete'](Name,
region=region, key=key,
keyid=keyid, profile=profile)
if not r['deleted']:
ret['result'] = False
ret['comment'] = 'Failed to delete trail: {0}.'.format(r['error']['message'])
return ret
ret['changes']['old'] = {'trail': Name}
ret['changes']['new'] = {'trail': None}
ret['comment'] = 'CloudTrail {0} deleted.'.format(Name)
return ret | def function[absent, parameter[name, Name, region, key, keyid, profile]]:
constant[
Ensure trail with passed properties is absent.
name
The name of the state definition.
Name
Name of the trail.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da20cabeec0>, <ast.Constant object at 0x7da20cabe290>, <ast.Constant object at 0x7da20cabdb40>, <ast.Constant object at 0x7da20cabc310>], [<ast.Name object at 0x7da20cabe7d0>, <ast.Constant object at 0x7da20cabe560>, <ast.Constant object at 0x7da20cabd180>, <ast.Dict object at 0x7da20cabda50>]]
variable[r] assign[=] call[call[name[__salt__]][constant[boto_cloudtrail.exists]], parameter[name[Name]]]
if compare[constant[error] in name[r]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to delete trail: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]]
return[name[ret]]
if <ast.BoolOp object at 0x7da20cabdd80> begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[CloudTrail {0} does not exist.].format, parameter[name[Name]]]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[CloudTrail {0} is set to be removed.].format, parameter[name[Name]]]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[r] assign[=] call[call[name[__salt__]][constant[boto_cloudtrail.delete]], parameter[name[Name]]]
if <ast.UnaryOp object at 0x7da20e9b1510> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[constant[Failed to delete trail: {0}.].format, parameter[call[call[name[r]][constant[error]]][constant[message]]]]
return[name[ret]]
call[call[name[ret]][constant[changes]]][constant[old]] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b30a0>], [<ast.Name object at 0x7da20e9b3af0>]]
call[call[name[ret]][constant[changes]]][constant[new]] assign[=] dictionary[[<ast.Constant object at 0x7da18f812470>], [<ast.Constant object at 0x7da18f812a10>]]
call[name[ret]][constant[comment]] assign[=] call[constant[CloudTrail {0} deleted.].format, parameter[name[Name]]]
return[name[ret]] | keyword[def] identifier[absent] ( identifier[name] , identifier[Name] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[Name] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] :{}
}
identifier[r] = identifier[__salt__] [ literal[string] ]( identifier[Name] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[if] literal[string] keyword[in] identifier[r] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ])
keyword[return] identifier[ret]
keyword[if] identifier[r] keyword[and] keyword[not] identifier[r] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Name] )
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[r] = identifier[__salt__] [ literal[string] ]( identifier[Name] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[if] keyword[not] identifier[r] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[r] [ literal[string] ][ literal[string] ])
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ][ literal[string] ]={ literal[string] : identifier[Name] }
identifier[ret] [ literal[string] ][ literal[string] ]={ literal[string] : keyword[None] }
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[Name] )
keyword[return] identifier[ret] | def absent(name, Name, region=None, key=None, keyid=None, profile=None):
"""
Ensure trail with passed properties is absent.
name
The name of the state definition.
Name
Name of the trail.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
"""
ret = {'name': Name, 'result': True, 'comment': '', 'changes': {}}
r = __salt__['boto_cloudtrail.exists'](Name, region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to delete trail: {0}.'.format(r['error']['message'])
return ret # depends on [control=['if'], data=['r']]
if r and (not r['exists']):
ret['comment'] = 'CloudTrail {0} does not exist.'.format(Name)
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['comment'] = 'CloudTrail {0} is set to be removed.'.format(Name)
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
r = __salt__['boto_cloudtrail.delete'](Name, region=region, key=key, keyid=keyid, profile=profile)
if not r['deleted']:
ret['result'] = False
ret['comment'] = 'Failed to delete trail: {0}.'.format(r['error']['message'])
return ret # depends on [control=['if'], data=[]]
ret['changes']['old'] = {'trail': Name}
ret['changes']['new'] = {'trail': None}
ret['comment'] = 'CloudTrail {0} deleted.'.format(Name)
return ret |
def is_full_slice(obj, l):
"""
We have a full length slice.
"""
return (isinstance(obj, slice) and obj.start == 0 and obj.stop == l and
obj.step is None) | def function[is_full_slice, parameter[obj, l]]:
constant[
We have a full length slice.
]
return[<ast.BoolOp object at 0x7da20cabe290>] | keyword[def] identifier[is_full_slice] ( identifier[obj] , identifier[l] ):
literal[string]
keyword[return] ( identifier[isinstance] ( identifier[obj] , identifier[slice] ) keyword[and] identifier[obj] . identifier[start] == literal[int] keyword[and] identifier[obj] . identifier[stop] == identifier[l] keyword[and]
identifier[obj] . identifier[step] keyword[is] keyword[None] ) | def is_full_slice(obj, l):
"""
We have a full length slice.
"""
return isinstance(obj, slice) and obj.start == 0 and (obj.stop == l) and (obj.step is None) |
def __deserialize_datetime(self, payload, obj_type):
# type: (str, Union[T, str]) -> Any
"""Deserialize datetime instance in ISO8601 format to
date/datetime object.
:param payload: data to be deserialized in ISO8601 format
:type payload: str
:param obj_type: primitive datatype str
:type obj_type: Union[object, str]
:return: deserialized primitive datatype object
:rtype: object
:raises: :py:class:`ask_sdk_core.exceptions.SerializationException`
"""
obj_cast = cast(Any, obj_type)
try:
from dateutil.parser import parse
parsed_datetime = parse(payload)
if obj_type is date:
return parsed_datetime.date()
else:
return parsed_datetime
except ImportError:
return payload
except ValueError:
raise SerializationException(
"Failed to parse {} into '{}' object".format(
payload, obj_cast.__name__)) | def function[__deserialize_datetime, parameter[self, payload, obj_type]]:
constant[Deserialize datetime instance in ISO8601 format to
date/datetime object.
:param payload: data to be deserialized in ISO8601 format
:type payload: str
:param obj_type: primitive datatype str
:type obj_type: Union[object, str]
:return: deserialized primitive datatype object
:rtype: object
:raises: :py:class:`ask_sdk_core.exceptions.SerializationException`
]
variable[obj_cast] assign[=] call[name[cast], parameter[name[Any], name[obj_type]]]
<ast.Try object at 0x7da1b18ddf30> | keyword[def] identifier[__deserialize_datetime] ( identifier[self] , identifier[payload] , identifier[obj_type] ):
literal[string]
identifier[obj_cast] = identifier[cast] ( identifier[Any] , identifier[obj_type] )
keyword[try] :
keyword[from] identifier[dateutil] . identifier[parser] keyword[import] identifier[parse]
identifier[parsed_datetime] = identifier[parse] ( identifier[payload] )
keyword[if] identifier[obj_type] keyword[is] identifier[date] :
keyword[return] identifier[parsed_datetime] . identifier[date] ()
keyword[else] :
keyword[return] identifier[parsed_datetime]
keyword[except] identifier[ImportError] :
keyword[return] identifier[payload]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[SerializationException] (
literal[string] . identifier[format] (
identifier[payload] , identifier[obj_cast] . identifier[__name__] )) | def __deserialize_datetime(self, payload, obj_type):
# type: (str, Union[T, str]) -> Any
'Deserialize datetime instance in ISO8601 format to\n date/datetime object.\n\n :param payload: data to be deserialized in ISO8601 format\n :type payload: str\n :param obj_type: primitive datatype str\n :type obj_type: Union[object, str]\n :return: deserialized primitive datatype object\n :rtype: object\n :raises: :py:class:`ask_sdk_core.exceptions.SerializationException`\n '
obj_cast = cast(Any, obj_type)
try:
from dateutil.parser import parse
parsed_datetime = parse(payload)
if obj_type is date:
return parsed_datetime.date() # depends on [control=['if'], data=[]]
else:
return parsed_datetime # depends on [control=['try'], data=[]]
except ImportError:
return payload # depends on [control=['except'], data=[]]
except ValueError:
raise SerializationException("Failed to parse {} into '{}' object".format(payload, obj_cast.__name__)) # depends on [control=['except'], data=[]] |
def opendocs(where='index', how='default'):
'''
Rebuild documentation and opens it in your browser.
Use the first argument to specify how it should be opened:
`d` or `default`: Open in new tab or new window, using the default
method of your browser.
`t` or `tab`: Open documentation in new tab.
`n`, `w` or `window`: Open documentation in new window.
'''
import webbrowser
docs_dir = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'docs')
index = os.path.join(docs_dir, '_build/html/%s.html' % where)
builddocs('html')
url = 'file://%s' % os.path.abspath(index)
if how in ('d', 'default'):
webbrowser.open(url)
elif how in ('t', 'tab'):
webbrowser.open_new_tab(url)
elif how in ('n', 'w', 'window'):
webbrowser.open_new(url) | def function[opendocs, parameter[where, how]]:
constant[
Rebuild documentation and opens it in your browser.
Use the first argument to specify how it should be opened:
`d` or `default`: Open in new tab or new window, using the default
method of your browser.
`t` or `tab`: Open documentation in new tab.
`n`, `w` or `window`: Open documentation in new window.
]
import module[webbrowser]
variable[docs_dir] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[docs]]]
variable[index] assign[=] call[name[os].path.join, parameter[name[docs_dir], binary_operation[constant[_build/html/%s.html] <ast.Mod object at 0x7da2590d6920> name[where]]]]
call[name[builddocs], parameter[constant[html]]]
variable[url] assign[=] binary_operation[constant[file://%s] <ast.Mod object at 0x7da2590d6920> call[name[os].path.abspath, parameter[name[index]]]]
if compare[name[how] in tuple[[<ast.Constant object at 0x7da1b0538e20>, <ast.Constant object at 0x7da1b0539a50>]]] begin[:]
call[name[webbrowser].open, parameter[name[url]]] | keyword[def] identifier[opendocs] ( identifier[where] = literal[string] , identifier[how] = literal[string] ):
literal[string]
keyword[import] identifier[webbrowser]
identifier[docs_dir] = identifier[os] . identifier[path] . identifier[join] (
identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )),
literal[string] )
identifier[index] = identifier[os] . identifier[path] . identifier[join] ( identifier[docs_dir] , literal[string] % identifier[where] )
identifier[builddocs] ( literal[string] )
identifier[url] = literal[string] % identifier[os] . identifier[path] . identifier[abspath] ( identifier[index] )
keyword[if] identifier[how] keyword[in] ( literal[string] , literal[string] ):
identifier[webbrowser] . identifier[open] ( identifier[url] )
keyword[elif] identifier[how] keyword[in] ( literal[string] , literal[string] ):
identifier[webbrowser] . identifier[open_new_tab] ( identifier[url] )
keyword[elif] identifier[how] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[webbrowser] . identifier[open_new] ( identifier[url] ) | def opendocs(where='index', how='default'):
"""
Rebuild documentation and opens it in your browser.
Use the first argument to specify how it should be opened:
`d` or `default`: Open in new tab or new window, using the default
method of your browser.
`t` or `tab`: Open documentation in new tab.
`n`, `w` or `window`: Open documentation in new window.
"""
import webbrowser
docs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'docs')
index = os.path.join(docs_dir, '_build/html/%s.html' % where)
builddocs('html')
url = 'file://%s' % os.path.abspath(index)
if how in ('d', 'default'):
webbrowser.open(url) # depends on [control=['if'], data=[]]
elif how in ('t', 'tab'):
webbrowser.open_new_tab(url) # depends on [control=['if'], data=[]]
elif how in ('n', 'w', 'window'):
webbrowser.open_new(url) # depends on [control=['if'], data=[]] |
def StartKvmSession(self, serviceProfile=None, blade=None, rackUnit=None, frameTitle=None, dumpXml=None):
"""
Starts KVM session.
launches the KVM session for the specific service profile, blade or rackUnit.
- serviceProfile specifies an object of type lsServer. Launches KVM session with which the service profile is associated.
- blade specifies an object of type computeBlade. Launches KVM session of blade server.
- rackUnit specifies an object of type computeRackUnit. Launches KVM session of rack Unit.
- frameTitle specifies the title of the frame window.
"""
from UcsBase import WriteUcsWarning, _GenericMO, UcsUtils, UcsValidationException, UcsException
from Mos import MgmtIf
import os, subprocess, urllib
if (self._transactionInProgress):
raise UcsValidationException(
"UCS transaction in progress. Cannot execute StartKvmSession. Complete or Undo UCS transaction.")
# raise Exception("UCS transaction in progress. Cannot execute StartKvmSession. Complete or Undo UCS transaction.")
if ((blade != None and rackUnit != None) or (serviceProfile != None and rackUnit != None) or (
blade != None and serviceProfile != None)):
raise UcsValidationException("Please provide only one parameter from blade, rackUnit and service profile.")
# raise Exception("Please provide only one parameter from blade, rackUnit and service profile.")
if (serviceProfile == None and blade == None and rackUnit == None):
raise UcsValidationException(
"Please provide at least one parameter from blade, rackUnit and service profile.")
# raise Exception("Please provide at least one parameter from blade, rackUnit and service profile.")
minVersion = UcsVersion('1.4(1a)')
if self._version < minVersion:
raise UcsValidationException(
"StartKvmSession not supported for Ucs version older than %s. You are connected to Ucs Version %s" % (
minVersion, self._version))
# raise Exception("StartKvmSession not supported for Ucs version older than %s. You are connected to Ucs Version %s" %(minVersion, self._version))
PARAM_CENTRALE_PASSWORD = "centralePassword"
PARAM_CENTRALE_USER = "centraleUser"
PARAM_DN = "dn"
PARAM_FRAME_TITLE = "frameTitle"
PARAM_KVM_IP_ADDR = "kvmIpAddr"
PARAM_KVM_PASSWORD = "kvmPassword"
PARAM_KVM_PN_DN = "kvmPnDn"
PARAM_KVM_USER = "kvmUser"
PARAM_TEMP_UNPW = "tempunpw"
PARAM_KVM_DN = "kvmDn"
_lock = Lock()
sp_bool = False
nvc = {}
dn = None
pnDn = None
ipAddress = None
if ((blade != None) or (rackUnit != None)):
if (blade != None):
pnDn = blade.getattr("Dn")
else:
pnDn = rackUnit.getattr("Dn")
nvc[PARAM_DN] = pnDn
if (frameTitle == None):
frameTitle = self._ucs + ':' + pnDn + ' KVM Console'
nvc[PARAM_FRAME_TITLE] = frameTitle
nvc[PARAM_KVM_PN_DN] = pnDn
cs = self.ConfigScope(dn=pnDn, inClass=NamingId.MGMT_IF, inFilter=None, inRecursive=YesOrNo.FALSE,
inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml)
if (cs.errorCode == 0):
for mgmtIf in cs.OutConfigs.GetChild():
if ((mgmtIf.getattr("Subject") == MgmtIf.CONST_SUBJECT_BLADE) and (
mgmtIf.getattr("AdminState") == MgmtIf.CONST_ADMIN_STATE_ENABLE)):
ipAddress = mgmtIf.getattr("ExtIp")
else:
raise UcsException(cs.errorCode, cs.errorDescr)
# raise Exception('[Error]: StartKvmSession [Code]:' + cs.errorCode + ' [Description]:' + cs.errorDescr)
# If the blade does not have an IP, check if a service profile is associated
if ((ipAddress == None) or (ipAddress == '0.0.0.0')):
crDn = self.ConfigResolveDn(pnDn, inHierarchical=YesOrNo.TRUE, dumpXml=dumpXml)
if (crDn.errorCode != 0):
raise UcsException(crDn.errorCode, crDn.errorDescr)
# raise Exception('[Error]: StartKvmSession [Code]:' + crDn.errorCode + ' [Description]:' + crDn.errorDescr)
for mo in crDn.OutConfig.GetChild():
dn = mo.getattr("AssignedToDn")
if dn != None:
sp_bool = True
if (sp_bool or serviceProfile != None):
if dn == None:
dn = serviceProfile.getattr("Dn")
if (frameTitle == None):
frameTitle = self._ucs + ':' + dn + ' KVM Console'
nvc[PARAM_FRAME_TITLE] = frameTitle
nvc[PARAM_KVM_DN] = dn
crDn = self.ConfigResolveDn(dn, inHierarchical=YesOrNo.TRUE, dumpXml=dumpXml)
if (crDn.errorCode != 0):
raise UcsException(crDn.errorCode, crDn.errorDescr)
# raise Exception('[Error]: StartKvmSession [Code]:' + crDn.errorCode + ' [Description]:' + crDn.errorDescr)
spMo = None
for mo in crDn.OutConfig.GetChild():
spMo = mo
if spMo == None:
raise Exception('Service Profile not found.')
if spMo.getattr("PnDn") == None:
raise UcsValidationException('Service Profile is not associated with blade or rackUnit.')
# raise Exception('Service Profile is not associated with blade or rackUnit.')
pnDn = spMo.getattr("PnDn")
nvc[PARAM_DN] = pnDn
crc = self.ConfigResolveChildren('vnicIpV4Addr', dn, None, inHierarchical=YesOrNo.FALSE,
dumpXml=dumpXml) # TODO:replace classId with proper constantafter generating mos.py or Constant.py
if (crc.errorCode == 0):
for mo in crc.OutConfigs.GetChild():
gmo = _GenericMO(mo=mo, option=WriteXmlOption.All)
if not ('Addr' in gmo.properties):
continue
ipAddress = gmo.GetAttribute('Addr')
if ipAddress != None:
break
else:
raise UcsException(crc.errorCode, crc.errorDescr)
# raise Exception('[Error]: StartKvmSession [Code]:' + crc.errorCode + ' [Description]:' + crc.errorDescr)
if (((ipAddress == None) or (ipAddress == '0.0.0.0')) and (serviceProfile != None)):
cs = self.ConfigScope(dn=pnDn, inClass=NamingId.MGMT_IF, inFilter=None, inRecursive=YesOrNo.FALSE,
inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml)
if (cs.errorCode == 0):
for mgmtIf in cs.OutConfigs.GetChild():
if ((mgmtIf.getattr("Subject") == MgmtIf.CONST_SUBJECT_BLADE) and (
mgmtIf.getattr("AdminState") == MgmtIf.CONST_ADMIN_STATE_ENABLE)):
ipAddress = mgmtIf.getattr("ExtIp")
else:
raise UcsException(cs.errorCode, cs.errorDescr)
# raise Exception('[Error]: StartKvmSession [Code]:' + cs.errorCode + ' [Description]:' + cs.errorDescr)
if ((ipAddress == None) or (ipAddress == '0.0.0.0')):
raise UcsValidationException("No assigned IP address to use.")
# raise Exception("No assigned IP address to use.")
nvc[PARAM_KVM_IP_ADDR] = ipAddress
cat = self.AaaGetNComputeAuthTokenByDn(pnDn, 2, dumpXml=dumpXml)
if (cat.errorCode == 0):
nvc[PARAM_CENTRALE_PASSWORD] = cat.OutTokens.split(',')[0]
nvc[PARAM_CENTRALE_USER] = cat.OutUser
nvc[PARAM_KVM_PASSWORD] = cat.OutTokens.split(',')[1]
nvc[PARAM_KVM_USER] = cat.OutUser
else:
raise UcsException(cat.errorCode, cat.errorDescr)
# raise Exception('[Error]: StartKvmSession [Code]:' + cat.errorCode + ' [Description]:' + cat.errorDescr)
nvc[PARAM_TEMP_UNPW] = "true"
kvmUrl = '%s/ucsm/kvm.jnlp?%s' % (self.Uri(), urllib.urlencode(nvc))
installPath = UcsUtils.GetJavaInstallationPath()
if installPath != None:
subprocess.call([installPath, kvmUrl])
else:
# TODO: Add Warning/Error messages in Logger.
WriteUcsWarning("Java is not installed on System.")
p = subprocess.Popen(kvmUrl) | def function[StartKvmSession, parameter[self, serviceProfile, blade, rackUnit, frameTitle, dumpXml]]:
constant[
Starts KVM session.
launches the KVM session for the specific service profile, blade or rackUnit.
- serviceProfile specifies an object of type lsServer. Launches KVM session with which the service profile is associated.
- blade specifies an object of type computeBlade. Launches KVM session of blade server.
- rackUnit specifies an object of type computeRackUnit. Launches KVM session of rack Unit.
- frameTitle specifies the title of the frame window.
]
from relative_module[UcsBase] import module[WriteUcsWarning], module[_GenericMO], module[UcsUtils], module[UcsValidationException], module[UcsException]
from relative_module[Mos] import module[MgmtIf]
import module[os], module[subprocess], module[urllib]
if name[self]._transactionInProgress begin[:]
<ast.Raise object at 0x7da1b24690f0>
if <ast.BoolOp object at 0x7da1b2468fa0> begin[:]
<ast.Raise object at 0x7da1b2468280>
if <ast.BoolOp object at 0x7da1b2469870> begin[:]
<ast.Raise object at 0x7da1b2469a50>
variable[minVersion] assign[=] call[name[UcsVersion], parameter[constant[1.4(1a)]]]
if compare[name[self]._version less[<] name[minVersion]] begin[:]
<ast.Raise object at 0x7da1b2469480>
variable[PARAM_CENTRALE_PASSWORD] assign[=] constant[centralePassword]
variable[PARAM_CENTRALE_USER] assign[=] constant[centraleUser]
variable[PARAM_DN] assign[=] constant[dn]
variable[PARAM_FRAME_TITLE] assign[=] constant[frameTitle]
variable[PARAM_KVM_IP_ADDR] assign[=] constant[kvmIpAddr]
variable[PARAM_KVM_PASSWORD] assign[=] constant[kvmPassword]
variable[PARAM_KVM_PN_DN] assign[=] constant[kvmPnDn]
variable[PARAM_KVM_USER] assign[=] constant[kvmUser]
variable[PARAM_TEMP_UNPW] assign[=] constant[tempunpw]
variable[PARAM_KVM_DN] assign[=] constant[kvmDn]
variable[_lock] assign[=] call[name[Lock], parameter[]]
variable[sp_bool] assign[=] constant[False]
variable[nvc] assign[=] dictionary[[], []]
variable[dn] assign[=] constant[None]
variable[pnDn] assign[=] constant[None]
variable[ipAddress] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b259e560> begin[:]
if compare[name[blade] not_equal[!=] constant[None]] begin[:]
variable[pnDn] assign[=] call[name[blade].getattr, parameter[constant[Dn]]]
call[name[nvc]][name[PARAM_DN]] assign[=] name[pnDn]
if compare[name[frameTitle] equal[==] constant[None]] begin[:]
variable[frameTitle] assign[=] binary_operation[binary_operation[binary_operation[name[self]._ucs + constant[:]] + name[pnDn]] + constant[ KVM Console]]
call[name[nvc]][name[PARAM_FRAME_TITLE]] assign[=] name[frameTitle]
call[name[nvc]][name[PARAM_KVM_PN_DN]] assign[=] name[pnDn]
variable[cs] assign[=] call[name[self].ConfigScope, parameter[]]
if compare[name[cs].errorCode equal[==] constant[0]] begin[:]
for taget[name[mgmtIf]] in starred[call[name[cs].OutConfigs.GetChild, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b259f0a0> begin[:]
variable[ipAddress] assign[=] call[name[mgmtIf].getattr, parameter[constant[ExtIp]]]
if <ast.BoolOp object at 0x7da1b24e7d30> begin[:]
variable[crDn] assign[=] call[name[self].ConfigResolveDn, parameter[name[pnDn]]]
if compare[name[crDn].errorCode not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b25e8190>
for taget[name[mo]] in starred[call[name[crDn].OutConfig.GetChild, parameter[]]] begin[:]
variable[dn] assign[=] call[name[mo].getattr, parameter[constant[AssignedToDn]]]
if compare[name[dn] not_equal[!=] constant[None]] begin[:]
variable[sp_bool] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b25e87c0> begin[:]
if compare[name[dn] equal[==] constant[None]] begin[:]
variable[dn] assign[=] call[name[serviceProfile].getattr, parameter[constant[Dn]]]
if compare[name[frameTitle] equal[==] constant[None]] begin[:]
variable[frameTitle] assign[=] binary_operation[binary_operation[binary_operation[name[self]._ucs + constant[:]] + name[dn]] + constant[ KVM Console]]
call[name[nvc]][name[PARAM_FRAME_TITLE]] assign[=] name[frameTitle]
call[name[nvc]][name[PARAM_KVM_DN]] assign[=] name[dn]
variable[crDn] assign[=] call[name[self].ConfigResolveDn, parameter[name[dn]]]
if compare[name[crDn].errorCode not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b25eac80>
variable[spMo] assign[=] constant[None]
for taget[name[mo]] in starred[call[name[crDn].OutConfig.GetChild, parameter[]]] begin[:]
variable[spMo] assign[=] name[mo]
if compare[name[spMo] equal[==] constant[None]] begin[:]
<ast.Raise object at 0x7da1b25eb070>
if compare[call[name[spMo].getattr, parameter[constant[PnDn]]] equal[==] constant[None]] begin[:]
<ast.Raise object at 0x7da1b25ea050>
variable[pnDn] assign[=] call[name[spMo].getattr, parameter[constant[PnDn]]]
call[name[nvc]][name[PARAM_DN]] assign[=] name[pnDn]
variable[crc] assign[=] call[name[self].ConfigResolveChildren, parameter[constant[vnicIpV4Addr], name[dn], constant[None]]]
if compare[name[crc].errorCode equal[==] constant[0]] begin[:]
for taget[name[mo]] in starred[call[name[crc].OutConfigs.GetChild, parameter[]]] begin[:]
variable[gmo] assign[=] call[name[_GenericMO], parameter[]]
if <ast.UnaryOp object at 0x7da1b25ef730> begin[:]
continue
variable[ipAddress] assign[=] call[name[gmo].GetAttribute, parameter[constant[Addr]]]
if compare[name[ipAddress] not_equal[!=] constant[None]] begin[:]
break
if <ast.BoolOp object at 0x7da1b25ed720> begin[:]
variable[cs] assign[=] call[name[self].ConfigScope, parameter[]]
if compare[name[cs].errorCode equal[==] constant[0]] begin[:]
for taget[name[mgmtIf]] in starred[call[name[cs].OutConfigs.GetChild, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b25eefe0> begin[:]
variable[ipAddress] assign[=] call[name[mgmtIf].getattr, parameter[constant[ExtIp]]]
if <ast.BoolOp object at 0x7da1b2405ba0> begin[:]
<ast.Raise object at 0x7da1b2407f40>
call[name[nvc]][name[PARAM_KVM_IP_ADDR]] assign[=] name[ipAddress]
variable[cat] assign[=] call[name[self].AaaGetNComputeAuthTokenByDn, parameter[name[pnDn], constant[2]]]
if compare[name[cat].errorCode equal[==] constant[0]] begin[:]
call[name[nvc]][name[PARAM_CENTRALE_PASSWORD]] assign[=] call[call[name[cat].OutTokens.split, parameter[constant[,]]]][constant[0]]
call[name[nvc]][name[PARAM_CENTRALE_USER]] assign[=] name[cat].OutUser
call[name[nvc]][name[PARAM_KVM_PASSWORD]] assign[=] call[call[name[cat].OutTokens.split, parameter[constant[,]]]][constant[1]]
call[name[nvc]][name[PARAM_KVM_USER]] assign[=] name[cat].OutUser
call[name[nvc]][name[PARAM_TEMP_UNPW]] assign[=] constant[true]
variable[kvmUrl] assign[=] binary_operation[constant[%s/ucsm/kvm.jnlp?%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b2404820>, <ast.Call object at 0x7da1b2407430>]]]
variable[installPath] assign[=] call[name[UcsUtils].GetJavaInstallationPath, parameter[]]
if compare[name[installPath] not_equal[!=] constant[None]] begin[:]
call[name[subprocess].call, parameter[list[[<ast.Name object at 0x7da1b243bf70>, <ast.Name object at 0x7da1b243bf40>]]]] | keyword[def] identifier[StartKvmSession] ( identifier[self] , identifier[serviceProfile] = keyword[None] , identifier[blade] = keyword[None] , identifier[rackUnit] = keyword[None] , identifier[frameTitle] = keyword[None] , identifier[dumpXml] = keyword[None] ):
literal[string]
keyword[from] identifier[UcsBase] keyword[import] identifier[WriteUcsWarning] , identifier[_GenericMO] , identifier[UcsUtils] , identifier[UcsValidationException] , identifier[UcsException]
keyword[from] identifier[Mos] keyword[import] identifier[MgmtIf]
keyword[import] identifier[os] , identifier[subprocess] , identifier[urllib]
keyword[if] ( identifier[self] . identifier[_transactionInProgress] ):
keyword[raise] identifier[UcsValidationException] (
literal[string] )
keyword[if] (( identifier[blade] != keyword[None] keyword[and] identifier[rackUnit] != keyword[None] ) keyword[or] ( identifier[serviceProfile] != keyword[None] keyword[and] identifier[rackUnit] != keyword[None] ) keyword[or] (
identifier[blade] != keyword[None] keyword[and] identifier[serviceProfile] != keyword[None] )):
keyword[raise] identifier[UcsValidationException] ( literal[string] )
keyword[if] ( identifier[serviceProfile] == keyword[None] keyword[and] identifier[blade] == keyword[None] keyword[and] identifier[rackUnit] == keyword[None] ):
keyword[raise] identifier[UcsValidationException] (
literal[string] )
identifier[minVersion] = identifier[UcsVersion] ( literal[string] )
keyword[if] identifier[self] . identifier[_version] < identifier[minVersion] :
keyword[raise] identifier[UcsValidationException] (
literal[string] %(
identifier[minVersion] , identifier[self] . identifier[_version] ))
identifier[PARAM_CENTRALE_PASSWORD] = literal[string]
identifier[PARAM_CENTRALE_USER] = literal[string]
identifier[PARAM_DN] = literal[string]
identifier[PARAM_FRAME_TITLE] = literal[string]
identifier[PARAM_KVM_IP_ADDR] = literal[string]
identifier[PARAM_KVM_PASSWORD] = literal[string]
identifier[PARAM_KVM_PN_DN] = literal[string]
identifier[PARAM_KVM_USER] = literal[string]
identifier[PARAM_TEMP_UNPW] = literal[string]
identifier[PARAM_KVM_DN] = literal[string]
identifier[_lock] = identifier[Lock] ()
identifier[sp_bool] = keyword[False]
identifier[nvc] ={}
identifier[dn] = keyword[None]
identifier[pnDn] = keyword[None]
identifier[ipAddress] = keyword[None]
keyword[if] (( identifier[blade] != keyword[None] ) keyword[or] ( identifier[rackUnit] != keyword[None] )):
keyword[if] ( identifier[blade] != keyword[None] ):
identifier[pnDn] = identifier[blade] . identifier[getattr] ( literal[string] )
keyword[else] :
identifier[pnDn] = identifier[rackUnit] . identifier[getattr] ( literal[string] )
identifier[nvc] [ identifier[PARAM_DN] ]= identifier[pnDn]
keyword[if] ( identifier[frameTitle] == keyword[None] ):
identifier[frameTitle] = identifier[self] . identifier[_ucs] + literal[string] + identifier[pnDn] + literal[string]
identifier[nvc] [ identifier[PARAM_FRAME_TITLE] ]= identifier[frameTitle]
identifier[nvc] [ identifier[PARAM_KVM_PN_DN] ]= identifier[pnDn]
identifier[cs] = identifier[self] . identifier[ConfigScope] ( identifier[dn] = identifier[pnDn] , identifier[inClass] = identifier[NamingId] . identifier[MGMT_IF] , identifier[inFilter] = keyword[None] , identifier[inRecursive] = identifier[YesOrNo] . identifier[FALSE] ,
identifier[inHierarchical] = identifier[YesOrNo] . identifier[FALSE] , identifier[dumpXml] = identifier[dumpXml] )
keyword[if] ( identifier[cs] . identifier[errorCode] == literal[int] ):
keyword[for] identifier[mgmtIf] keyword[in] identifier[cs] . identifier[OutConfigs] . identifier[GetChild] ():
keyword[if] (( identifier[mgmtIf] . identifier[getattr] ( literal[string] )== identifier[MgmtIf] . identifier[CONST_SUBJECT_BLADE] ) keyword[and] (
identifier[mgmtIf] . identifier[getattr] ( literal[string] )== identifier[MgmtIf] . identifier[CONST_ADMIN_STATE_ENABLE] )):
identifier[ipAddress] = identifier[mgmtIf] . identifier[getattr] ( literal[string] )
keyword[else] :
keyword[raise] identifier[UcsException] ( identifier[cs] . identifier[errorCode] , identifier[cs] . identifier[errorDescr] )
keyword[if] (( identifier[ipAddress] == keyword[None] ) keyword[or] ( identifier[ipAddress] == literal[string] )):
identifier[crDn] = identifier[self] . identifier[ConfigResolveDn] ( identifier[pnDn] , identifier[inHierarchical] = identifier[YesOrNo] . identifier[TRUE] , identifier[dumpXml] = identifier[dumpXml] )
keyword[if] ( identifier[crDn] . identifier[errorCode] != literal[int] ):
keyword[raise] identifier[UcsException] ( identifier[crDn] . identifier[errorCode] , identifier[crDn] . identifier[errorDescr] )
keyword[for] identifier[mo] keyword[in] identifier[crDn] . identifier[OutConfig] . identifier[GetChild] ():
identifier[dn] = identifier[mo] . identifier[getattr] ( literal[string] )
keyword[if] identifier[dn] != keyword[None] :
identifier[sp_bool] = keyword[True]
keyword[if] ( identifier[sp_bool] keyword[or] identifier[serviceProfile] != keyword[None] ):
keyword[if] identifier[dn] == keyword[None] :
identifier[dn] = identifier[serviceProfile] . identifier[getattr] ( literal[string] )
keyword[if] ( identifier[frameTitle] == keyword[None] ):
identifier[frameTitle] = identifier[self] . identifier[_ucs] + literal[string] + identifier[dn] + literal[string]
identifier[nvc] [ identifier[PARAM_FRAME_TITLE] ]= identifier[frameTitle]
identifier[nvc] [ identifier[PARAM_KVM_DN] ]= identifier[dn]
identifier[crDn] = identifier[self] . identifier[ConfigResolveDn] ( identifier[dn] , identifier[inHierarchical] = identifier[YesOrNo] . identifier[TRUE] , identifier[dumpXml] = identifier[dumpXml] )
keyword[if] ( identifier[crDn] . identifier[errorCode] != literal[int] ):
keyword[raise] identifier[UcsException] ( identifier[crDn] . identifier[errorCode] , identifier[crDn] . identifier[errorDescr] )
identifier[spMo] = keyword[None]
keyword[for] identifier[mo] keyword[in] identifier[crDn] . identifier[OutConfig] . identifier[GetChild] ():
identifier[spMo] = identifier[mo]
keyword[if] identifier[spMo] == keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[spMo] . identifier[getattr] ( literal[string] )== keyword[None] :
keyword[raise] identifier[UcsValidationException] ( literal[string] )
identifier[pnDn] = identifier[spMo] . identifier[getattr] ( literal[string] )
identifier[nvc] [ identifier[PARAM_DN] ]= identifier[pnDn]
identifier[crc] = identifier[self] . identifier[ConfigResolveChildren] ( literal[string] , identifier[dn] , keyword[None] , identifier[inHierarchical] = identifier[YesOrNo] . identifier[FALSE] ,
identifier[dumpXml] = identifier[dumpXml] )
keyword[if] ( identifier[crc] . identifier[errorCode] == literal[int] ):
keyword[for] identifier[mo] keyword[in] identifier[crc] . identifier[OutConfigs] . identifier[GetChild] ():
identifier[gmo] = identifier[_GenericMO] ( identifier[mo] = identifier[mo] , identifier[option] = identifier[WriteXmlOption] . identifier[All] )
keyword[if] keyword[not] ( literal[string] keyword[in] identifier[gmo] . identifier[properties] ):
keyword[continue]
identifier[ipAddress] = identifier[gmo] . identifier[GetAttribute] ( literal[string] )
keyword[if] identifier[ipAddress] != keyword[None] :
keyword[break]
keyword[else] :
keyword[raise] identifier[UcsException] ( identifier[crc] . identifier[errorCode] , identifier[crc] . identifier[errorDescr] )
keyword[if] ((( identifier[ipAddress] == keyword[None] ) keyword[or] ( identifier[ipAddress] == literal[string] )) keyword[and] ( identifier[serviceProfile] != keyword[None] )):
identifier[cs] = identifier[self] . identifier[ConfigScope] ( identifier[dn] = identifier[pnDn] , identifier[inClass] = identifier[NamingId] . identifier[MGMT_IF] , identifier[inFilter] = keyword[None] , identifier[inRecursive] = identifier[YesOrNo] . identifier[FALSE] ,
identifier[inHierarchical] = identifier[YesOrNo] . identifier[FALSE] , identifier[dumpXml] = identifier[dumpXml] )
keyword[if] ( identifier[cs] . identifier[errorCode] == literal[int] ):
keyword[for] identifier[mgmtIf] keyword[in] identifier[cs] . identifier[OutConfigs] . identifier[GetChild] ():
keyword[if] (( identifier[mgmtIf] . identifier[getattr] ( literal[string] )== identifier[MgmtIf] . identifier[CONST_SUBJECT_BLADE] ) keyword[and] (
identifier[mgmtIf] . identifier[getattr] ( literal[string] )== identifier[MgmtIf] . identifier[CONST_ADMIN_STATE_ENABLE] )):
identifier[ipAddress] = identifier[mgmtIf] . identifier[getattr] ( literal[string] )
keyword[else] :
keyword[raise] identifier[UcsException] ( identifier[cs] . identifier[errorCode] , identifier[cs] . identifier[errorDescr] )
keyword[if] (( identifier[ipAddress] == keyword[None] ) keyword[or] ( identifier[ipAddress] == literal[string] )):
keyword[raise] identifier[UcsValidationException] ( literal[string] )
identifier[nvc] [ identifier[PARAM_KVM_IP_ADDR] ]= identifier[ipAddress]
identifier[cat] = identifier[self] . identifier[AaaGetNComputeAuthTokenByDn] ( identifier[pnDn] , literal[int] , identifier[dumpXml] = identifier[dumpXml] )
keyword[if] ( identifier[cat] . identifier[errorCode] == literal[int] ):
identifier[nvc] [ identifier[PARAM_CENTRALE_PASSWORD] ]= identifier[cat] . identifier[OutTokens] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[nvc] [ identifier[PARAM_CENTRALE_USER] ]= identifier[cat] . identifier[OutUser]
identifier[nvc] [ identifier[PARAM_KVM_PASSWORD] ]= identifier[cat] . identifier[OutTokens] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[nvc] [ identifier[PARAM_KVM_USER] ]= identifier[cat] . identifier[OutUser]
keyword[else] :
keyword[raise] identifier[UcsException] ( identifier[cat] . identifier[errorCode] , identifier[cat] . identifier[errorDescr] )
identifier[nvc] [ identifier[PARAM_TEMP_UNPW] ]= literal[string]
identifier[kvmUrl] = literal[string] %( identifier[self] . identifier[Uri] (), identifier[urllib] . identifier[urlencode] ( identifier[nvc] ))
identifier[installPath] = identifier[UcsUtils] . identifier[GetJavaInstallationPath] ()
keyword[if] identifier[installPath] != keyword[None] :
identifier[subprocess] . identifier[call] ([ identifier[installPath] , identifier[kvmUrl] ])
keyword[else] :
identifier[WriteUcsWarning] ( literal[string] )
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[kvmUrl] ) | def StartKvmSession(self, serviceProfile=None, blade=None, rackUnit=None, frameTitle=None, dumpXml=None):
"""
Starts KVM session.
launches the KVM session for the specific service profile, blade or rackUnit.
- serviceProfile specifies an object of type lsServer. Launches KVM session with which the service profile is associated.
- blade specifies an object of type computeBlade. Launches KVM session of blade server.
- rackUnit specifies an object of type computeRackUnit. Launches KVM session of rack Unit.
- frameTitle specifies the title of the frame window.
"""
from UcsBase import WriteUcsWarning, _GenericMO, UcsUtils, UcsValidationException, UcsException
from Mos import MgmtIf
import os, subprocess, urllib
if self._transactionInProgress:
raise UcsValidationException('UCS transaction in progress. Cannot execute StartKvmSession. Complete or Undo UCS transaction.') # depends on [control=['if'], data=[]] # raise Exception("UCS transaction in progress. Cannot execute StartKvmSession. Complete or Undo UCS transaction.")
if blade != None and rackUnit != None or (serviceProfile != None and rackUnit != None) or (blade != None and serviceProfile != None):
raise UcsValidationException('Please provide only one parameter from blade, rackUnit and service profile.') # depends on [control=['if'], data=[]] # raise Exception("Please provide only one parameter from blade, rackUnit and service profile.")
if serviceProfile == None and blade == None and (rackUnit == None):
raise UcsValidationException('Please provide at least one parameter from blade, rackUnit and service profile.') # depends on [control=['if'], data=[]] # raise Exception("Please provide at least one parameter from blade, rackUnit and service profile.")
minVersion = UcsVersion('1.4(1a)')
if self._version < minVersion:
raise UcsValidationException('StartKvmSession not supported for Ucs version older than %s. You are connected to Ucs Version %s' % (minVersion, self._version)) # depends on [control=['if'], data=['minVersion']] # raise Exception("StartKvmSession not supported for Ucs version older than %s. You are connected to Ucs Version %s" %(minVersion, self._version))
PARAM_CENTRALE_PASSWORD = 'centralePassword'
PARAM_CENTRALE_USER = 'centraleUser'
PARAM_DN = 'dn'
PARAM_FRAME_TITLE = 'frameTitle'
PARAM_KVM_IP_ADDR = 'kvmIpAddr'
PARAM_KVM_PASSWORD = 'kvmPassword'
PARAM_KVM_PN_DN = 'kvmPnDn'
PARAM_KVM_USER = 'kvmUser'
PARAM_TEMP_UNPW = 'tempunpw'
PARAM_KVM_DN = 'kvmDn'
_lock = Lock()
sp_bool = False
nvc = {}
dn = None
pnDn = None
ipAddress = None
if blade != None or rackUnit != None:
if blade != None:
pnDn = blade.getattr('Dn') # depends on [control=['if'], data=['blade']]
else:
pnDn = rackUnit.getattr('Dn')
nvc[PARAM_DN] = pnDn
if frameTitle == None:
frameTitle = self._ucs + ':' + pnDn + ' KVM Console' # depends on [control=['if'], data=['frameTitle']]
nvc[PARAM_FRAME_TITLE] = frameTitle
nvc[PARAM_KVM_PN_DN] = pnDn
cs = self.ConfigScope(dn=pnDn, inClass=NamingId.MGMT_IF, inFilter=None, inRecursive=YesOrNo.FALSE, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml)
if cs.errorCode == 0:
for mgmtIf in cs.OutConfigs.GetChild():
if mgmtIf.getattr('Subject') == MgmtIf.CONST_SUBJECT_BLADE and mgmtIf.getattr('AdminState') == MgmtIf.CONST_ADMIN_STATE_ENABLE:
ipAddress = mgmtIf.getattr('ExtIp') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mgmtIf']] # depends on [control=['if'], data=[]]
else:
raise UcsException(cs.errorCode, cs.errorDescr) # raise Exception('[Error]: StartKvmSession [Code]:' + cs.errorCode + ' [Description]:' + cs.errorDescr)
# If the blade does not have an IP, check if a service profile is associated
if ipAddress == None or ipAddress == '0.0.0.0':
crDn = self.ConfigResolveDn(pnDn, inHierarchical=YesOrNo.TRUE, dumpXml=dumpXml)
if crDn.errorCode != 0:
raise UcsException(crDn.errorCode, crDn.errorDescr) # depends on [control=['if'], data=[]] # raise Exception('[Error]: StartKvmSession [Code]:' + crDn.errorCode + ' [Description]:' + crDn.errorDescr)
for mo in crDn.OutConfig.GetChild():
dn = mo.getattr('AssignedToDn') # depends on [control=['for'], data=['mo']]
if dn != None:
sp_bool = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if sp_bool or serviceProfile != None:
if dn == None:
dn = serviceProfile.getattr('Dn')
if frameTitle == None:
frameTitle = self._ucs + ':' + dn + ' KVM Console' # depends on [control=['if'], data=['frameTitle']]
nvc[PARAM_FRAME_TITLE] = frameTitle # depends on [control=['if'], data=['dn']]
nvc[PARAM_KVM_DN] = dn
crDn = self.ConfigResolveDn(dn, inHierarchical=YesOrNo.TRUE, dumpXml=dumpXml)
if crDn.errorCode != 0:
raise UcsException(crDn.errorCode, crDn.errorDescr) # depends on [control=['if'], data=[]] # raise Exception('[Error]: StartKvmSession [Code]:' + crDn.errorCode + ' [Description]:' + crDn.errorDescr)
spMo = None
for mo in crDn.OutConfig.GetChild():
spMo = mo # depends on [control=['for'], data=['mo']]
if spMo == None:
raise Exception('Service Profile not found.') # depends on [control=['if'], data=[]]
if spMo.getattr('PnDn') == None:
raise UcsValidationException('Service Profile is not associated with blade or rackUnit.') # depends on [control=['if'], data=[]] # raise Exception('Service Profile is not associated with blade or rackUnit.')
pnDn = spMo.getattr('PnDn')
nvc[PARAM_DN] = pnDn
crc = self.ConfigResolveChildren('vnicIpV4Addr', dn, None, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) # TODO:replace classId with proper constantafter generating mos.py or Constant.py
if crc.errorCode == 0:
for mo in crc.OutConfigs.GetChild():
gmo = _GenericMO(mo=mo, option=WriteXmlOption.All)
if not 'Addr' in gmo.properties:
continue # depends on [control=['if'], data=[]]
ipAddress = gmo.GetAttribute('Addr')
if ipAddress != None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mo']] # depends on [control=['if'], data=[]]
else:
raise UcsException(crc.errorCode, crc.errorDescr) # depends on [control=['if'], data=[]] # raise Exception('[Error]: StartKvmSession [Code]:' + crc.errorCode + ' [Description]:' + crc.errorDescr)
if (ipAddress == None or ipAddress == '0.0.0.0') and serviceProfile != None:
cs = self.ConfigScope(dn=pnDn, inClass=NamingId.MGMT_IF, inFilter=None, inRecursive=YesOrNo.FALSE, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml)
if cs.errorCode == 0:
for mgmtIf in cs.OutConfigs.GetChild():
if mgmtIf.getattr('Subject') == MgmtIf.CONST_SUBJECT_BLADE and mgmtIf.getattr('AdminState') == MgmtIf.CONST_ADMIN_STATE_ENABLE:
ipAddress = mgmtIf.getattr('ExtIp') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mgmtIf']] # depends on [control=['if'], data=[]]
else:
raise UcsException(cs.errorCode, cs.errorDescr) # depends on [control=['if'], data=[]] # raise Exception('[Error]: StartKvmSession [Code]:' + cs.errorCode + ' [Description]:' + cs.errorDescr)
if ipAddress == None or ipAddress == '0.0.0.0':
raise UcsValidationException('No assigned IP address to use.') # depends on [control=['if'], data=[]] # raise Exception("No assigned IP address to use.")
nvc[PARAM_KVM_IP_ADDR] = ipAddress
cat = self.AaaGetNComputeAuthTokenByDn(pnDn, 2, dumpXml=dumpXml)
if cat.errorCode == 0:
nvc[PARAM_CENTRALE_PASSWORD] = cat.OutTokens.split(',')[0]
nvc[PARAM_CENTRALE_USER] = cat.OutUser
nvc[PARAM_KVM_PASSWORD] = cat.OutTokens.split(',')[1]
nvc[PARAM_KVM_USER] = cat.OutUser # depends on [control=['if'], data=[]]
else:
raise UcsException(cat.errorCode, cat.errorDescr) # raise Exception('[Error]: StartKvmSession [Code]:' + cat.errorCode + ' [Description]:' + cat.errorDescr)
nvc[PARAM_TEMP_UNPW] = 'true'
kvmUrl = '%s/ucsm/kvm.jnlp?%s' % (self.Uri(), urllib.urlencode(nvc))
installPath = UcsUtils.GetJavaInstallationPath()
if installPath != None:
subprocess.call([installPath, kvmUrl]) # depends on [control=['if'], data=['installPath']]
else: # TODO: Add Warning/Error messages in Logger.
WriteUcsWarning('Java is not installed on System.')
p = subprocess.Popen(kvmUrl) |
def until_state(self, state, timeout=None):
"""Return a tornado Future that will resolve when the requested state is set"""
if state not in self._valid_states:
raise ValueError('State must be one of {0}, not {1}'
.format(self._valid_states, state))
if state != self._state:
if timeout:
return with_timeout(self._ioloop.time() + timeout,
self._waiting_futures[state],
self._ioloop)
else:
return self._waiting_futures[state]
else:
f = tornado_Future()
f.set_result(True)
return f | def function[until_state, parameter[self, state, timeout]]:
constant[Return a tornado Future that will resolve when the requested state is set]
if compare[name[state] <ast.NotIn object at 0x7da2590d7190> name[self]._valid_states] begin[:]
<ast.Raise object at 0x7da20c6aa110>
if compare[name[state] not_equal[!=] name[self]._state] begin[:]
if name[timeout] begin[:]
return[call[name[with_timeout], parameter[binary_operation[call[name[self]._ioloop.time, parameter[]] + name[timeout]], call[name[self]._waiting_futures][name[state]], name[self]._ioloop]]] | keyword[def] identifier[until_state] ( identifier[self] , identifier[state] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] identifier[state] keyword[not] keyword[in] identifier[self] . identifier[_valid_states] :
keyword[raise] identifier[ValueError] ( literal[string]
. identifier[format] ( identifier[self] . identifier[_valid_states] , identifier[state] ))
keyword[if] identifier[state] != identifier[self] . identifier[_state] :
keyword[if] identifier[timeout] :
keyword[return] identifier[with_timeout] ( identifier[self] . identifier[_ioloop] . identifier[time] ()+ identifier[timeout] ,
identifier[self] . identifier[_waiting_futures] [ identifier[state] ],
identifier[self] . identifier[_ioloop] )
keyword[else] :
keyword[return] identifier[self] . identifier[_waiting_futures] [ identifier[state] ]
keyword[else] :
identifier[f] = identifier[tornado_Future] ()
identifier[f] . identifier[set_result] ( keyword[True] )
keyword[return] identifier[f] | def until_state(self, state, timeout=None):
"""Return a tornado Future that will resolve when the requested state is set"""
if state not in self._valid_states:
raise ValueError('State must be one of {0}, not {1}'.format(self._valid_states, state)) # depends on [control=['if'], data=['state']]
if state != self._state:
if timeout:
return with_timeout(self._ioloop.time() + timeout, self._waiting_futures[state], self._ioloop) # depends on [control=['if'], data=[]]
else:
return self._waiting_futures[state] # depends on [control=['if'], data=['state']]
else:
f = tornado_Future()
f.set_result(True)
return f |
def is_objective_required(self, objective_id=None, required_objective_id=None):
"""Tests if an objective is required before proceeding with an
objective.
arg: objective_id (osid.id.Id): Id of the dependent Objective
arg: required_objective_id (osid.id.Id): Id of the required
Objective
return: (boolean) - true if objective_id depends on
required_objective_id, false otherwise
raise: NotFound - objective_id not found
raise: NullArgument - objective_id is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if objective_id is None or required_objective_id is None:
raise NullArgument()
requisite_objective_ids = list()
for requisite in self.get_all_requisite_objectives(objective_id):
requisite_objective_ids.append(requisite.get_id())
return required_objective_id in requisite_objective_ids | def function[is_objective_required, parameter[self, objective_id, required_objective_id]]:
constant[Tests if an objective is required before proceeding with an
objective.
arg: objective_id (osid.id.Id): Id of the dependent Objective
arg: required_objective_id (osid.id.Id): Id of the required
Objective
return: (boolean) - true if objective_id depends on
required_objective_id, false otherwise
raise: NotFound - objective_id not found
raise: NullArgument - objective_id is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
]
if <ast.BoolOp object at 0x7da18c4ceb30> begin[:]
<ast.Raise object at 0x7da18c4cd570>
variable[requisite_objective_ids] assign[=] call[name[list], parameter[]]
for taget[name[requisite]] in starred[call[name[self].get_all_requisite_objectives, parameter[name[objective_id]]]] begin[:]
call[name[requisite_objective_ids].append, parameter[call[name[requisite].get_id, parameter[]]]]
return[compare[name[required_objective_id] in name[requisite_objective_ids]]] | keyword[def] identifier[is_objective_required] ( identifier[self] , identifier[objective_id] = keyword[None] , identifier[required_objective_id] = keyword[None] ):
literal[string]
keyword[if] identifier[objective_id] keyword[is] keyword[None] keyword[or] identifier[required_objective_id] keyword[is] keyword[None] :
keyword[raise] identifier[NullArgument] ()
identifier[requisite_objective_ids] = identifier[list] ()
keyword[for] identifier[requisite] keyword[in] identifier[self] . identifier[get_all_requisite_objectives] ( identifier[objective_id] ):
identifier[requisite_objective_ids] . identifier[append] ( identifier[requisite] . identifier[get_id] ())
keyword[return] identifier[required_objective_id] keyword[in] identifier[requisite_objective_ids] | def is_objective_required(self, objective_id=None, required_objective_id=None):
"""Tests if an objective is required before proceeding with an
objective.
arg: objective_id (osid.id.Id): Id of the dependent Objective
arg: required_objective_id (osid.id.Id): Id of the required
Objective
return: (boolean) - true if objective_id depends on
required_objective_id, false otherwise
raise: NotFound - objective_id not found
raise: NullArgument - objective_id is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if objective_id is None or required_objective_id is None:
raise NullArgument() # depends on [control=['if'], data=[]]
requisite_objective_ids = list()
for requisite in self.get_all_requisite_objectives(objective_id):
requisite_objective_ids.append(requisite.get_id()) # depends on [control=['for'], data=['requisite']]
return required_objective_id in requisite_objective_ids |
def from_config(cls, cp, variable_params):
"""Gets sampling transforms specified in a config file.
Sampling parameters and the parameters they replace are read from the
``sampling_params`` section, if it exists. Sampling transforms are
read from the ``sampling_transforms`` section(s), using
``transforms.read_transforms_from_config``.
An ``AssertionError`` is raised if no ``sampling_params`` section
exists in the config file.
Parameters
----------
cp : WorkflowConfigParser
Config file parser to read.
variable_params : list
List of parameter names of the original variable params.
Returns
-------
SamplingTransforms
A sampling transforms class.
"""
if not cp.has_section('sampling_params'):
raise ValueError("no sampling_params section found in config file")
# get sampling transformations
sampling_params, replace_parameters = \
read_sampling_params_from_config(cp)
sampling_transforms = transforms.read_transforms_from_config(
cp, 'sampling_transforms')
logging.info("Sampling in {} in place of {}".format(
', '.join(sampling_params), ', '.join(replace_parameters)))
return cls(variable_params, sampling_params,
replace_parameters, sampling_transforms) | def function[from_config, parameter[cls, cp, variable_params]]:
constant[Gets sampling transforms specified in a config file.
Sampling parameters and the parameters they replace are read from the
``sampling_params`` section, if it exists. Sampling transforms are
read from the ``sampling_transforms`` section(s), using
``transforms.read_transforms_from_config``.
An ``AssertionError`` is raised if no ``sampling_params`` section
exists in the config file.
Parameters
----------
cp : WorkflowConfigParser
Config file parser to read.
variable_params : list
List of parameter names of the original variable params.
Returns
-------
SamplingTransforms
A sampling transforms class.
]
if <ast.UnaryOp object at 0x7da18f8120e0> begin[:]
<ast.Raise object at 0x7da2054a4a00>
<ast.Tuple object at 0x7da2054a7550> assign[=] call[name[read_sampling_params_from_config], parameter[name[cp]]]
variable[sampling_transforms] assign[=] call[name[transforms].read_transforms_from_config, parameter[name[cp], constant[sampling_transforms]]]
call[name[logging].info, parameter[call[constant[Sampling in {} in place of {}].format, parameter[call[constant[, ].join, parameter[name[sampling_params]]], call[constant[, ].join, parameter[name[replace_parameters]]]]]]]
return[call[name[cls], parameter[name[variable_params], name[sampling_params], name[replace_parameters], name[sampling_transforms]]]] | keyword[def] identifier[from_config] ( identifier[cls] , identifier[cp] , identifier[variable_params] ):
literal[string]
keyword[if] keyword[not] identifier[cp] . identifier[has_section] ( literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[sampling_params] , identifier[replace_parameters] = identifier[read_sampling_params_from_config] ( identifier[cp] )
identifier[sampling_transforms] = identifier[transforms] . identifier[read_transforms_from_config] (
identifier[cp] , literal[string] )
identifier[logging] . identifier[info] ( literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[sampling_params] ), literal[string] . identifier[join] ( identifier[replace_parameters] )))
keyword[return] identifier[cls] ( identifier[variable_params] , identifier[sampling_params] ,
identifier[replace_parameters] , identifier[sampling_transforms] ) | def from_config(cls, cp, variable_params):
"""Gets sampling transforms specified in a config file.
Sampling parameters and the parameters they replace are read from the
``sampling_params`` section, if it exists. Sampling transforms are
read from the ``sampling_transforms`` section(s), using
``transforms.read_transforms_from_config``.
An ``AssertionError`` is raised if no ``sampling_params`` section
exists in the config file.
Parameters
----------
cp : WorkflowConfigParser
Config file parser to read.
variable_params : list
List of parameter names of the original variable params.
Returns
-------
SamplingTransforms
A sampling transforms class.
"""
if not cp.has_section('sampling_params'):
raise ValueError('no sampling_params section found in config file') # depends on [control=['if'], data=[]]
# get sampling transformations
(sampling_params, replace_parameters) = read_sampling_params_from_config(cp)
sampling_transforms = transforms.read_transforms_from_config(cp, 'sampling_transforms')
logging.info('Sampling in {} in place of {}'.format(', '.join(sampling_params), ', '.join(replace_parameters)))
return cls(variable_params, sampling_params, replace_parameters, sampling_transforms) |
def enable_device(self):
"""
re-enable the connected device and allow user activity in device again
:return: bool
"""
cmd_response = self.__send_command(const.CMD_ENABLEDEVICE)
if cmd_response.get('status'):
self.is_enabled = True
return True
else:
raise ZKErrorResponse("Can't enable device") | def function[enable_device, parameter[self]]:
constant[
re-enable the connected device and allow user activity in device again
:return: bool
]
variable[cmd_response] assign[=] call[name[self].__send_command, parameter[name[const].CMD_ENABLEDEVICE]]
if call[name[cmd_response].get, parameter[constant[status]]] begin[:]
name[self].is_enabled assign[=] constant[True]
return[constant[True]] | keyword[def] identifier[enable_device] ( identifier[self] ):
literal[string]
identifier[cmd_response] = identifier[self] . identifier[__send_command] ( identifier[const] . identifier[CMD_ENABLEDEVICE] )
keyword[if] identifier[cmd_response] . identifier[get] ( literal[string] ):
identifier[self] . identifier[is_enabled] = keyword[True]
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[ZKErrorResponse] ( literal[string] ) | def enable_device(self):
"""
re-enable the connected device and allow user activity in device again
:return: bool
"""
cmd_response = self.__send_command(const.CMD_ENABLEDEVICE)
if cmd_response.get('status'):
self.is_enabled = True
return True # depends on [control=['if'], data=[]]
else:
raise ZKErrorResponse("Can't enable device") |
def delete_hierarchy(self, hierarchy_id):
"""Deletes a ``Hierarchy``.
arg: hierarchy_id (osid.id.Id): the ``Id`` of the
``Hierarchy`` to remove
raise: NotFound - ``hierarchy_id`` not found
raise: NullArgument - ``hierarchy_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
collection = JSONClientValidated('hierarchy',
collection='Hierarchy',
runtime=self._runtime)
if not isinstance(hierarchy_id, ABCId):
return InvalidArgument('the argument is not a valid OSID Id')
# Should we delete the underlying Relationship Family here???
collection.delete_one({'_id': ObjectId(hierarchy_id.get_identifier())}) | def function[delete_hierarchy, parameter[self, hierarchy_id]]:
constant[Deletes a ``Hierarchy``.
arg: hierarchy_id (osid.id.Id): the ``Id`` of the
``Hierarchy`` to remove
raise: NotFound - ``hierarchy_id`` not found
raise: NullArgument - ``hierarchy_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[hierarchy]]]
if <ast.UnaryOp object at 0x7da18ede4340> begin[:]
return[call[name[InvalidArgument], parameter[constant[the argument is not a valid OSID Id]]]]
call[name[collection].delete_one, parameter[dictionary[[<ast.Constant object at 0x7da18ede6bf0>], [<ast.Call object at 0x7da18ede4e80>]]]] | keyword[def] identifier[delete_hierarchy] ( identifier[self] , identifier[hierarchy_id] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[hierarchy_id] , identifier[ABCId] ):
keyword[return] identifier[InvalidArgument] ( literal[string] )
identifier[collection] . identifier[delete_one] ({ literal[string] : identifier[ObjectId] ( identifier[hierarchy_id] . identifier[get_identifier] ())}) | def delete_hierarchy(self, hierarchy_id):
"""Deletes a ``Hierarchy``.
arg: hierarchy_id (osid.id.Id): the ``Id`` of the
``Hierarchy`` to remove
raise: NotFound - ``hierarchy_id`` not found
raise: NullArgument - ``hierarchy_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
collection = JSONClientValidated('hierarchy', collection='Hierarchy', runtime=self._runtime)
if not isinstance(hierarchy_id, ABCId):
return InvalidArgument('the argument is not a valid OSID Id') # depends on [control=['if'], data=[]]
# Should we delete the underlying Relationship Family here???
collection.delete_one({'_id': ObjectId(hierarchy_id.get_identifier())}) |
def add_spanning_tree(self, st):
"""
Add a spanning tree to the graph.
@type st: dictionary
@param st: Spanning tree.
"""
self.add_nodes(list(st.keys()))
for each in st:
if (st[each] is not None):
self.add_edge((st[each], each)) | def function[add_spanning_tree, parameter[self, st]]:
constant[
Add a spanning tree to the graph.
@type st: dictionary
@param st: Spanning tree.
]
call[name[self].add_nodes, parameter[call[name[list], parameter[call[name[st].keys, parameter[]]]]]]
for taget[name[each]] in starred[name[st]] begin[:]
if compare[call[name[st]][name[each]] is_not constant[None]] begin[:]
call[name[self].add_edge, parameter[tuple[[<ast.Subscript object at 0x7da1b17ce6e0>, <ast.Name object at 0x7da1b17cf6a0>]]]] | keyword[def] identifier[add_spanning_tree] ( identifier[self] , identifier[st] ):
literal[string]
identifier[self] . identifier[add_nodes] ( identifier[list] ( identifier[st] . identifier[keys] ()))
keyword[for] identifier[each] keyword[in] identifier[st] :
keyword[if] ( identifier[st] [ identifier[each] ] keyword[is] keyword[not] keyword[None] ):
identifier[self] . identifier[add_edge] (( identifier[st] [ identifier[each] ], identifier[each] )) | def add_spanning_tree(self, st):
"""
Add a spanning tree to the graph.
@type st: dictionary
@param st: Spanning tree.
"""
self.add_nodes(list(st.keys()))
for each in st:
if st[each] is not None:
self.add_edge((st[each], each)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['each']] |
def edge_refine_triangulation_by_triangles(self, triangles):
"""
return points defining a refined triangulation obtained by bisection of all edges
in the triangulation that are associated with the triangles in the list
of indices provided.
Notes
-----
The triangles are here represented as a single index.
The vertices of triangle i are given by self.simplices[i].
"""
## Note there should be no duplicates in the list of triangles
## but because we remove duplicates from the list of all segments,
## there is no pressing need to check this.
# identify the segments
simplices = self.simplices
segments = set()
for index in np.array(triangles).reshape(-1):
tri = simplices[index]
segments.add( min( tuple((tri[0], tri[1])), tuple((tri[1], tri[0]))) )
segments.add( min( tuple((tri[1], tri[2])), tuple((tri[2], tri[1]))) )
segments.add( min( tuple((tri[0], tri[2])), tuple((tri[2], tri[0]))) )
segs = np.array(list(segments))
mlons, mlats = self.segment_midpoints(segs)
lonv1 = np.concatenate((self.lons, mlons), axis=0)
latv1 = np.concatenate((self.lats, mlats), axis=0)
return lonv1, latv1 | def function[edge_refine_triangulation_by_triangles, parameter[self, triangles]]:
constant[
return points defining a refined triangulation obtained by bisection of all edges
in the triangulation that are associated with the triangles in the list
of indices provided.
Notes
-----
The triangles are here represented as a single index.
The vertices of triangle i are given by self.simplices[i].
]
variable[simplices] assign[=] name[self].simplices
variable[segments] assign[=] call[name[set], parameter[]]
for taget[name[index]] in starred[call[call[name[np].array, parameter[name[triangles]]].reshape, parameter[<ast.UnaryOp object at 0x7da20c76fd60>]]] begin[:]
variable[tri] assign[=] call[name[simplices]][name[index]]
call[name[segments].add, parameter[call[name[min], parameter[call[name[tuple], parameter[tuple[[<ast.Subscript object at 0x7da20c76f490>, <ast.Subscript object at 0x7da20c76c5b0>]]]], call[name[tuple], parameter[tuple[[<ast.Subscript object at 0x7da20c76f0d0>, <ast.Subscript object at 0x7da20c76d540>]]]]]]]]
call[name[segments].add, parameter[call[name[min], parameter[call[name[tuple], parameter[tuple[[<ast.Subscript object at 0x7da20c76fbb0>, <ast.Subscript object at 0x7da20c76cf10>]]]], call[name[tuple], parameter[tuple[[<ast.Subscript object at 0x7da20c76df30>, <ast.Subscript object at 0x7da20c76f2b0>]]]]]]]]
call[name[segments].add, parameter[call[name[min], parameter[call[name[tuple], parameter[tuple[[<ast.Subscript object at 0x7da20c76f340>, <ast.Subscript object at 0x7da20c76f550>]]]], call[name[tuple], parameter[tuple[[<ast.Subscript object at 0x7da20c76e9e0>, <ast.Subscript object at 0x7da20c76ebc0>]]]]]]]]
variable[segs] assign[=] call[name[np].array, parameter[call[name[list], parameter[name[segments]]]]]
<ast.Tuple object at 0x7da20c76ff70> assign[=] call[name[self].segment_midpoints, parameter[name[segs]]]
variable[lonv1] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Attribute object at 0x7da20c76cd90>, <ast.Name object at 0x7da20c76ee60>]]]]
variable[latv1] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Attribute object at 0x7da20c76fdc0>, <ast.Name object at 0x7da20c76f4f0>]]]]
return[tuple[[<ast.Name object at 0x7da1b23d05e0>, <ast.Name object at 0x7da1b23d0400>]]] | keyword[def] identifier[edge_refine_triangulation_by_triangles] ( identifier[self] , identifier[triangles] ):
literal[string]
identifier[simplices] = identifier[self] . identifier[simplices]
identifier[segments] = identifier[set] ()
keyword[for] identifier[index] keyword[in] identifier[np] . identifier[array] ( identifier[triangles] ). identifier[reshape] (- literal[int] ):
identifier[tri] = identifier[simplices] [ identifier[index] ]
identifier[segments] . identifier[add] ( identifier[min] ( identifier[tuple] (( identifier[tri] [ literal[int] ], identifier[tri] [ literal[int] ])), identifier[tuple] (( identifier[tri] [ literal[int] ], identifier[tri] [ literal[int] ]))))
identifier[segments] . identifier[add] ( identifier[min] ( identifier[tuple] (( identifier[tri] [ literal[int] ], identifier[tri] [ literal[int] ])), identifier[tuple] (( identifier[tri] [ literal[int] ], identifier[tri] [ literal[int] ]))))
identifier[segments] . identifier[add] ( identifier[min] ( identifier[tuple] (( identifier[tri] [ literal[int] ], identifier[tri] [ literal[int] ])), identifier[tuple] (( identifier[tri] [ literal[int] ], identifier[tri] [ literal[int] ]))))
identifier[segs] = identifier[np] . identifier[array] ( identifier[list] ( identifier[segments] ))
identifier[mlons] , identifier[mlats] = identifier[self] . identifier[segment_midpoints] ( identifier[segs] )
identifier[lonv1] = identifier[np] . identifier[concatenate] (( identifier[self] . identifier[lons] , identifier[mlons] ), identifier[axis] = literal[int] )
identifier[latv1] = identifier[np] . identifier[concatenate] (( identifier[self] . identifier[lats] , identifier[mlats] ), identifier[axis] = literal[int] )
keyword[return] identifier[lonv1] , identifier[latv1] | def edge_refine_triangulation_by_triangles(self, triangles):
"""
return points defining a refined triangulation obtained by bisection of all edges
in the triangulation that are associated with the triangles in the list
of indices provided.
Notes
-----
The triangles are here represented as a single index.
The vertices of triangle i are given by self.simplices[i].
"""
## Note there should be no duplicates in the list of triangles
## but because we remove duplicates from the list of all segments,
## there is no pressing need to check this.
# identify the segments
simplices = self.simplices
segments = set()
for index in np.array(triangles).reshape(-1):
tri = simplices[index]
segments.add(min(tuple((tri[0], tri[1])), tuple((tri[1], tri[0]))))
segments.add(min(tuple((tri[1], tri[2])), tuple((tri[2], tri[1]))))
segments.add(min(tuple((tri[0], tri[2])), tuple((tri[2], tri[0])))) # depends on [control=['for'], data=['index']]
segs = np.array(list(segments))
(mlons, mlats) = self.segment_midpoints(segs)
lonv1 = np.concatenate((self.lons, mlons), axis=0)
latv1 = np.concatenate((self.lats, mlats), axis=0)
return (lonv1, latv1) |
def cluster_create(version,
name='main',
port=None,
locale=None,
encoding=None,
datadir=None,
allow_group_access=None,
data_checksums=None,
wal_segsize=None):
'''
Adds a cluster to the Postgres server.
.. warning:
Only works for debian family distros so far.
CLI Example:
.. code-block:: bash
salt '*' postgres.cluster_create '9.3'
salt '*' postgres.cluster_create '9.3' 'main'
salt '*' postgres.cluster_create '9.3' locale='fr_FR'
salt '*' postgres.cluster_create '11' data_checksums=True wal_segsize='32'
'''
cmd = [salt.utils.path.which('pg_createcluster')]
if port:
cmd += ['--port', six.text_type(port)]
if locale:
cmd += ['--locale', locale]
if encoding:
cmd += ['--encoding', encoding]
if datadir:
cmd += ['--datadir', datadir]
cmd += [version, name]
# initdb-specific options are passed after '--'
if allow_group_access or data_checksums or wal_segsize:
cmd += ['--']
if allow_group_access is True:
cmd += ['--allow-group-access']
if data_checksums is True:
cmd += ['--data-checksums']
if wal_segsize:
cmd += ['--wal-segsize', wal_segsize]
cmdstr = ' '.join([pipes.quote(c) for c in cmd])
ret = __salt__['cmd.run_all'](cmdstr, python_shell=False)
if ret.get('retcode', 0) != 0:
log.error('Error creating a Postgresql cluster %s/%s', version, name)
return False
return ret | def function[cluster_create, parameter[version, name, port, locale, encoding, datadir, allow_group_access, data_checksums, wal_segsize]]:
constant[
Adds a cluster to the Postgres server.
.. warning:
Only works for debian family distros so far.
CLI Example:
.. code-block:: bash
salt '*' postgres.cluster_create '9.3'
salt '*' postgres.cluster_create '9.3' 'main'
salt '*' postgres.cluster_create '9.3' locale='fr_FR'
salt '*' postgres.cluster_create '11' data_checksums=True wal_segsize='32'
]
variable[cmd] assign[=] list[[<ast.Call object at 0x7da1b216af20>]]
if name[port] begin[:]
<ast.AugAssign object at 0x7da1b216af50>
if name[locale] begin[:]
<ast.AugAssign object at 0x7da1b216b7c0>
if name[encoding] begin[:]
<ast.AugAssign object at 0x7da1b216a200>
if name[datadir] begin[:]
<ast.AugAssign object at 0x7da1b216a170>
<ast.AugAssign object at 0x7da1b216a530>
if <ast.BoolOp object at 0x7da1b2168460> begin[:]
<ast.AugAssign object at 0x7da1b2169180>
if compare[name[allow_group_access] is constant[True]] begin[:]
<ast.AugAssign object at 0x7da1b216ad40>
if compare[name[data_checksums] is constant[True]] begin[:]
<ast.AugAssign object at 0x7da1b2168160>
if name[wal_segsize] begin[:]
<ast.AugAssign object at 0x7da1b21690c0>
variable[cmdstr] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da1b216b0d0>]]
variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmdstr]]]
if compare[call[name[ret].get, parameter[constant[retcode], constant[0]]] not_equal[!=] constant[0]] begin[:]
call[name[log].error, parameter[constant[Error creating a Postgresql cluster %s/%s], name[version], name[name]]]
return[constant[False]]
return[name[ret]] | keyword[def] identifier[cluster_create] ( identifier[version] ,
identifier[name] = literal[string] ,
identifier[port] = keyword[None] ,
identifier[locale] = keyword[None] ,
identifier[encoding] = keyword[None] ,
identifier[datadir] = keyword[None] ,
identifier[allow_group_access] = keyword[None] ,
identifier[data_checksums] = keyword[None] ,
identifier[wal_segsize] = keyword[None] ):
literal[string]
identifier[cmd] =[ identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] )]
keyword[if] identifier[port] :
identifier[cmd] +=[ literal[string] , identifier[six] . identifier[text_type] ( identifier[port] )]
keyword[if] identifier[locale] :
identifier[cmd] +=[ literal[string] , identifier[locale] ]
keyword[if] identifier[encoding] :
identifier[cmd] +=[ literal[string] , identifier[encoding] ]
keyword[if] identifier[datadir] :
identifier[cmd] +=[ literal[string] , identifier[datadir] ]
identifier[cmd] +=[ identifier[version] , identifier[name] ]
keyword[if] identifier[allow_group_access] keyword[or] identifier[data_checksums] keyword[or] identifier[wal_segsize] :
identifier[cmd] +=[ literal[string] ]
keyword[if] identifier[allow_group_access] keyword[is] keyword[True] :
identifier[cmd] +=[ literal[string] ]
keyword[if] identifier[data_checksums] keyword[is] keyword[True] :
identifier[cmd] +=[ literal[string] ]
keyword[if] identifier[wal_segsize] :
identifier[cmd] +=[ literal[string] , identifier[wal_segsize] ]
identifier[cmdstr] = literal[string] . identifier[join] ([ identifier[pipes] . identifier[quote] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[cmd] ])
identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[cmdstr] , identifier[python_shell] = keyword[False] )
keyword[if] identifier[ret] . identifier[get] ( literal[string] , literal[int] )!= literal[int] :
identifier[log] . identifier[error] ( literal[string] , identifier[version] , identifier[name] )
keyword[return] keyword[False]
keyword[return] identifier[ret] | def cluster_create(version, name='main', port=None, locale=None, encoding=None, datadir=None, allow_group_access=None, data_checksums=None, wal_segsize=None):
"""
Adds a cluster to the Postgres server.
.. warning:
Only works for debian family distros so far.
CLI Example:
.. code-block:: bash
salt '*' postgres.cluster_create '9.3'
salt '*' postgres.cluster_create '9.3' 'main'
salt '*' postgres.cluster_create '9.3' locale='fr_FR'
salt '*' postgres.cluster_create '11' data_checksums=True wal_segsize='32'
"""
cmd = [salt.utils.path.which('pg_createcluster')]
if port:
cmd += ['--port', six.text_type(port)] # depends on [control=['if'], data=[]]
if locale:
cmd += ['--locale', locale] # depends on [control=['if'], data=[]]
if encoding:
cmd += ['--encoding', encoding] # depends on [control=['if'], data=[]]
if datadir:
cmd += ['--datadir', datadir] # depends on [control=['if'], data=[]]
cmd += [version, name]
# initdb-specific options are passed after '--'
if allow_group_access or data_checksums or wal_segsize:
cmd += ['--'] # depends on [control=['if'], data=[]]
if allow_group_access is True:
cmd += ['--allow-group-access'] # depends on [control=['if'], data=[]]
if data_checksums is True:
cmd += ['--data-checksums'] # depends on [control=['if'], data=[]]
if wal_segsize:
cmd += ['--wal-segsize', wal_segsize] # depends on [control=['if'], data=[]]
cmdstr = ' '.join([pipes.quote(c) for c in cmd])
ret = __salt__['cmd.run_all'](cmdstr, python_shell=False)
if ret.get('retcode', 0) != 0:
log.error('Error creating a Postgresql cluster %s/%s', version, name)
return False # depends on [control=['if'], data=[]]
return ret |
def to_dict(self):
"""Return common list python object.
:returns: Dictionary of groups and data
:rtype: dict
"""
list_data = []
for key, value in list(self.data.items()):
row = list(key)
row.append(value)
list_data.append(row)
return {
'groups': self.groups,
'data': list_data
} | def function[to_dict, parameter[self]]:
constant[Return common list python object.
:returns: Dictionary of groups and data
:rtype: dict
]
variable[list_data] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b2344e20>, <ast.Name object at 0x7da1b2346350>]]] in starred[call[name[list], parameter[call[name[self].data.items, parameter[]]]]] begin[:]
variable[row] assign[=] call[name[list], parameter[name[key]]]
call[name[row].append, parameter[name[value]]]
call[name[list_data].append, parameter[name[row]]]
return[dictionary[[<ast.Constant object at 0x7da1b2345480>, <ast.Constant object at 0x7da1b2347df0>], [<ast.Attribute object at 0x7da1b2347af0>, <ast.Name object at 0x7da1b2345150>]]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[list_data] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[list] ( identifier[self] . identifier[data] . identifier[items] ()):
identifier[row] = identifier[list] ( identifier[key] )
identifier[row] . identifier[append] ( identifier[value] )
identifier[list_data] . identifier[append] ( identifier[row] )
keyword[return] {
literal[string] : identifier[self] . identifier[groups] ,
literal[string] : identifier[list_data]
} | def to_dict(self):
"""Return common list python object.
:returns: Dictionary of groups and data
:rtype: dict
"""
list_data = []
for (key, value) in list(self.data.items()):
row = list(key)
row.append(value)
list_data.append(row) # depends on [control=['for'], data=[]]
return {'groups': self.groups, 'data': list_data} |
def save_html(out_file, plot_html):
""" Save html plots to an output file.
"""
internal_open = False
if type(out_file) == str:
out_file = open(out_file, "w")
internal_open = True
out_file.write("<html><head><script>\n")
# dump the js code
bundle_path = os.path.join(os.path.split(__file__)[0], "resources", "bundle.js")
with io.open(bundle_path, encoding="utf-8") as f:
bundle_data = f.read()
out_file.write(bundle_data)
out_file.write("</script></head><body>\n")
out_file.write(plot_html.data)
out_file.write("</body></html>\n")
if internal_open:
out_file.close() | def function[save_html, parameter[out_file, plot_html]]:
constant[ Save html plots to an output file.
]
variable[internal_open] assign[=] constant[False]
if compare[call[name[type], parameter[name[out_file]]] equal[==] name[str]] begin[:]
variable[out_file] assign[=] call[name[open], parameter[name[out_file], constant[w]]]
variable[internal_open] assign[=] constant[True]
call[name[out_file].write, parameter[constant[<html><head><script>
]]]
variable[bundle_path] assign[=] call[name[os].path.join, parameter[call[call[name[os].path.split, parameter[name[__file__]]]][constant[0]], constant[resources], constant[bundle.js]]]
with call[name[io].open, parameter[name[bundle_path]]] begin[:]
variable[bundle_data] assign[=] call[name[f].read, parameter[]]
call[name[out_file].write, parameter[name[bundle_data]]]
call[name[out_file].write, parameter[constant[</script></head><body>
]]]
call[name[out_file].write, parameter[name[plot_html].data]]
call[name[out_file].write, parameter[constant[</body></html>
]]]
if name[internal_open] begin[:]
call[name[out_file].close, parameter[]] | keyword[def] identifier[save_html] ( identifier[out_file] , identifier[plot_html] ):
literal[string]
identifier[internal_open] = keyword[False]
keyword[if] identifier[type] ( identifier[out_file] )== identifier[str] :
identifier[out_file] = identifier[open] ( identifier[out_file] , literal[string] )
identifier[internal_open] = keyword[True]
identifier[out_file] . identifier[write] ( literal[string] )
identifier[bundle_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[split] ( identifier[__file__] )[ literal[int] ], literal[string] , literal[string] )
keyword[with] identifier[io] . identifier[open] ( identifier[bundle_path] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] :
identifier[bundle_data] = identifier[f] . identifier[read] ()
identifier[out_file] . identifier[write] ( identifier[bundle_data] )
identifier[out_file] . identifier[write] ( literal[string] )
identifier[out_file] . identifier[write] ( identifier[plot_html] . identifier[data] )
identifier[out_file] . identifier[write] ( literal[string] )
keyword[if] identifier[internal_open] :
identifier[out_file] . identifier[close] () | def save_html(out_file, plot_html):
""" Save html plots to an output file.
"""
internal_open = False
if type(out_file) == str:
out_file = open(out_file, 'w')
internal_open = True # depends on [control=['if'], data=[]]
out_file.write('<html><head><script>\n')
# dump the js code
bundle_path = os.path.join(os.path.split(__file__)[0], 'resources', 'bundle.js')
with io.open(bundle_path, encoding='utf-8') as f:
bundle_data = f.read() # depends on [control=['with'], data=['f']]
out_file.write(bundle_data)
out_file.write('</script></head><body>\n')
out_file.write(plot_html.data)
out_file.write('</body></html>\n')
if internal_open:
out_file.close() # depends on [control=['if'], data=[]] |
def send_now(users, label, extra_context=None, sender=None):
"""
Creates a new notice.
This is intended to be how other apps create new notices.
notification.send(user, "friends_invite_sent", {
"spam": "eggs",
"foo": "bar",
)
"""
sent = False
if extra_context is None:
extra_context = {}
notice_type = NoticeType.objects.get(label=label)
current_language = get_language()
for user in users:
# get user language for user from language store defined in
# NOTIFICATION_LANGUAGE_MODULE setting
try:
language = get_notification_language(user)
except LanguageStoreNotAvailable:
language = None
if language is not None:
# activate the user's language
activate(language)
for backend in NOTIFICATION_BACKENDS.values():
if backend.can_send(user, notice_type):
backend.deliver(user, sender, notice_type, extra_context)
sent = True
# reset environment to original language
activate(current_language)
return sent | def function[send_now, parameter[users, label, extra_context, sender]]:
constant[
Creates a new notice.
This is intended to be how other apps create new notices.
notification.send(user, "friends_invite_sent", {
"spam": "eggs",
"foo": "bar",
)
]
variable[sent] assign[=] constant[False]
if compare[name[extra_context] is constant[None]] begin[:]
variable[extra_context] assign[=] dictionary[[], []]
variable[notice_type] assign[=] call[name[NoticeType].objects.get, parameter[]]
variable[current_language] assign[=] call[name[get_language], parameter[]]
for taget[name[user]] in starred[name[users]] begin[:]
<ast.Try object at 0x7da1b008b820>
if compare[name[language] is_not constant[None]] begin[:]
call[name[activate], parameter[name[language]]]
for taget[name[backend]] in starred[call[name[NOTIFICATION_BACKENDS].values, parameter[]]] begin[:]
if call[name[backend].can_send, parameter[name[user], name[notice_type]]] begin[:]
call[name[backend].deliver, parameter[name[user], name[sender], name[notice_type], name[extra_context]]]
variable[sent] assign[=] constant[True]
call[name[activate], parameter[name[current_language]]]
return[name[sent]] | keyword[def] identifier[send_now] ( identifier[users] , identifier[label] , identifier[extra_context] = keyword[None] , identifier[sender] = keyword[None] ):
literal[string]
identifier[sent] = keyword[False]
keyword[if] identifier[extra_context] keyword[is] keyword[None] :
identifier[extra_context] ={}
identifier[notice_type] = identifier[NoticeType] . identifier[objects] . identifier[get] ( identifier[label] = identifier[label] )
identifier[current_language] = identifier[get_language] ()
keyword[for] identifier[user] keyword[in] identifier[users] :
keyword[try] :
identifier[language] = identifier[get_notification_language] ( identifier[user] )
keyword[except] identifier[LanguageStoreNotAvailable] :
identifier[language] = keyword[None]
keyword[if] identifier[language] keyword[is] keyword[not] keyword[None] :
identifier[activate] ( identifier[language] )
keyword[for] identifier[backend] keyword[in] identifier[NOTIFICATION_BACKENDS] . identifier[values] ():
keyword[if] identifier[backend] . identifier[can_send] ( identifier[user] , identifier[notice_type] ):
identifier[backend] . identifier[deliver] ( identifier[user] , identifier[sender] , identifier[notice_type] , identifier[extra_context] )
identifier[sent] = keyword[True]
identifier[activate] ( identifier[current_language] )
keyword[return] identifier[sent] | def send_now(users, label, extra_context=None, sender=None):
"""
Creates a new notice.
This is intended to be how other apps create new notices.
notification.send(user, "friends_invite_sent", {
"spam": "eggs",
"foo": "bar",
)
"""
sent = False
if extra_context is None:
extra_context = {} # depends on [control=['if'], data=['extra_context']]
notice_type = NoticeType.objects.get(label=label)
current_language = get_language()
for user in users:
# get user language for user from language store defined in
# NOTIFICATION_LANGUAGE_MODULE setting
try:
language = get_notification_language(user) # depends on [control=['try'], data=[]]
except LanguageStoreNotAvailable:
language = None # depends on [control=['except'], data=[]]
if language is not None:
# activate the user's language
activate(language) # depends on [control=['if'], data=['language']]
for backend in NOTIFICATION_BACKENDS.values():
if backend.can_send(user, notice_type):
backend.deliver(user, sender, notice_type, extra_context)
sent = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['backend']] # depends on [control=['for'], data=['user']]
# reset environment to original language
activate(current_language)
return sent |
def approx_contains(self, point, atol):
"""Return ``True`` if ``point`` is "almost" contained in this set.
Parameters
----------
point : `array-like` or float
Point to be tested. Its length must be equal to `ndim`.
In the 1d case, ``point`` can be given as a float.
atol : float
Maximum allowed distance in maximum norm from ``point``
to ``self``.
Examples
--------
>>> min_pt, max_pt = [-1, 0, 2], [-0.5, 0, 3]
>>> rbox = IntervalProd(min_pt, max_pt)
>>> # Numerical error
>>> rbox.approx_contains([-1 + np.sqrt(0.5)**2, 0., 2.9], atol=0)
False
>>> rbox.approx_contains([-1 + np.sqrt(0.5)**2, 0., 2.9], atol=1e-9)
True
"""
try:
# Duck-typed check of type
point = np.array(point, dtype=np.float, copy=False, ndmin=1)
except (ValueError, TypeError):
return False
if point.size == 0:
return True
elif point.shape != (self.ndim,):
return False
return self.dist(point, exponent=np.inf) <= atol | def function[approx_contains, parameter[self, point, atol]]:
constant[Return ``True`` if ``point`` is "almost" contained in this set.
Parameters
----------
point : `array-like` or float
Point to be tested. Its length must be equal to `ndim`.
In the 1d case, ``point`` can be given as a float.
atol : float
Maximum allowed distance in maximum norm from ``point``
to ``self``.
Examples
--------
>>> min_pt, max_pt = [-1, 0, 2], [-0.5, 0, 3]
>>> rbox = IntervalProd(min_pt, max_pt)
>>> # Numerical error
>>> rbox.approx_contains([-1 + np.sqrt(0.5)**2, 0., 2.9], atol=0)
False
>>> rbox.approx_contains([-1 + np.sqrt(0.5)**2, 0., 2.9], atol=1e-9)
True
]
<ast.Try object at 0x7da1b1ec6710>
if compare[name[point].size equal[==] constant[0]] begin[:]
return[constant[True]]
return[compare[call[name[self].dist, parameter[name[point]]] less_or_equal[<=] name[atol]]] | keyword[def] identifier[approx_contains] ( identifier[self] , identifier[point] , identifier[atol] ):
literal[string]
keyword[try] :
identifier[point] = identifier[np] . identifier[array] ( identifier[point] , identifier[dtype] = identifier[np] . identifier[float] , identifier[copy] = keyword[False] , identifier[ndmin] = literal[int] )
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
keyword[return] keyword[False]
keyword[if] identifier[point] . identifier[size] == literal[int] :
keyword[return] keyword[True]
keyword[elif] identifier[point] . identifier[shape] !=( identifier[self] . identifier[ndim] ,):
keyword[return] keyword[False]
keyword[return] identifier[self] . identifier[dist] ( identifier[point] , identifier[exponent] = identifier[np] . identifier[inf] )<= identifier[atol] | def approx_contains(self, point, atol):
"""Return ``True`` if ``point`` is "almost" contained in this set.
Parameters
----------
point : `array-like` or float
Point to be tested. Its length must be equal to `ndim`.
In the 1d case, ``point`` can be given as a float.
atol : float
Maximum allowed distance in maximum norm from ``point``
to ``self``.
Examples
--------
>>> min_pt, max_pt = [-1, 0, 2], [-0.5, 0, 3]
>>> rbox = IntervalProd(min_pt, max_pt)
>>> # Numerical error
>>> rbox.approx_contains([-1 + np.sqrt(0.5)**2, 0., 2.9], atol=0)
False
>>> rbox.approx_contains([-1 + np.sqrt(0.5)**2, 0., 2.9], atol=1e-9)
True
"""
try:
# Duck-typed check of type
point = np.array(point, dtype=np.float, copy=False, ndmin=1) # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
return False # depends on [control=['except'], data=[]]
if point.size == 0:
return True # depends on [control=['if'], data=[]]
elif point.shape != (self.ndim,):
return False # depends on [control=['if'], data=[]]
return self.dist(point, exponent=np.inf) <= atol |
def swap_yaml_string(file_path, swaps):
"""
Swap a string in a yaml file without touching the existing formatting.
"""
original_file = file_to_string(file_path)
new_file = original_file
changed = False
for item in swaps:
match = re.compile(r'(?<={0}: )(["\']?)(.*)\1'.format(item[0]),
re.MULTILINE)
new_file = re.sub(match, item[1], new_file)
if new_file != original_file:
changed = True
string_to_file(file_path, new_file)
return (new_file, changed) | def function[swap_yaml_string, parameter[file_path, swaps]]:
constant[
Swap a string in a yaml file without touching the existing formatting.
]
variable[original_file] assign[=] call[name[file_to_string], parameter[name[file_path]]]
variable[new_file] assign[=] name[original_file]
variable[changed] assign[=] constant[False]
for taget[name[item]] in starred[name[swaps]] begin[:]
variable[match] assign[=] call[name[re].compile, parameter[call[constant[(?<={0}: )(["\']?)(.*)\1].format, parameter[call[name[item]][constant[0]]]], name[re].MULTILINE]]
variable[new_file] assign[=] call[name[re].sub, parameter[name[match], call[name[item]][constant[1]], name[new_file]]]
if compare[name[new_file] not_equal[!=] name[original_file]] begin[:]
variable[changed] assign[=] constant[True]
call[name[string_to_file], parameter[name[file_path], name[new_file]]]
return[tuple[[<ast.Name object at 0x7da1b0b44910>, <ast.Name object at 0x7da1b0b466e0>]]] | keyword[def] identifier[swap_yaml_string] ( identifier[file_path] , identifier[swaps] ):
literal[string]
identifier[original_file] = identifier[file_to_string] ( identifier[file_path] )
identifier[new_file] = identifier[original_file]
identifier[changed] = keyword[False]
keyword[for] identifier[item] keyword[in] identifier[swaps] :
identifier[match] = identifier[re] . identifier[compile] ( literal[string] . identifier[format] ( identifier[item] [ literal[int] ]),
identifier[re] . identifier[MULTILINE] )
identifier[new_file] = identifier[re] . identifier[sub] ( identifier[match] , identifier[item] [ literal[int] ], identifier[new_file] )
keyword[if] identifier[new_file] != identifier[original_file] :
identifier[changed] = keyword[True]
identifier[string_to_file] ( identifier[file_path] , identifier[new_file] )
keyword[return] ( identifier[new_file] , identifier[changed] ) | def swap_yaml_string(file_path, swaps):
"""
Swap a string in a yaml file without touching the existing formatting.
"""
original_file = file_to_string(file_path)
new_file = original_file
changed = False
for item in swaps:
match = re.compile('(?<={0}: )(["\\\']?)(.*)\\1'.format(item[0]), re.MULTILINE)
new_file = re.sub(match, item[1], new_file) # depends on [control=['for'], data=['item']]
if new_file != original_file:
changed = True # depends on [control=['if'], data=[]]
string_to_file(file_path, new_file)
return (new_file, changed) |
def line_tokenizer(self, text):
"""
From a .txt file, outputs lines as string in list.
input: 21. u2-wa-a-ru at-ta e2-kal2-la-ka _e2_-ka wu-e-er
22. ... u2-ul szi-...
23. ... x ...
output:['21. u2-wa-a-ru at-ta e2-kal2-la-ka _e2_-ka wu-e-er',
'22. ... u2-ul szi-...',
'23. ... x ...',]
:param: .txt file containing untokenized string
:return: lines as strings in list
"""
line_output = []
with open(text, mode='r+', encoding='utf8') as file:
lines = file.readlines()
assert isinstance(text, str), 'Incoming argument must be a string.'
for line in lines:
# Strip out damage characters
if not self.damage: # Add 'xn' -- missing sign or number?
line = ''.join(c for c in line if c not in "#[]?!*")
re.match(r'^\d*\.|\d\'\.', line)
line_output.append(line.rstrip())
return line_output | def function[line_tokenizer, parameter[self, text]]:
constant[
From a .txt file, outputs lines as string in list.
input: 21. u2-wa-a-ru at-ta e2-kal2-la-ka _e2_-ka wu-e-er
22. ... u2-ul szi-...
23. ... x ...
output:['21. u2-wa-a-ru at-ta e2-kal2-la-ka _e2_-ka wu-e-er',
'22. ... u2-ul szi-...',
'23. ... x ...',]
:param: .txt file containing untokenized string
:return: lines as strings in list
]
variable[line_output] assign[=] list[[]]
with call[name[open], parameter[name[text]]] begin[:]
variable[lines] assign[=] call[name[file].readlines, parameter[]]
assert[call[name[isinstance], parameter[name[text], name[str]]]]
for taget[name[line]] in starred[name[lines]] begin[:]
if <ast.UnaryOp object at 0x7da20eb29a50> begin[:]
variable[line] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da18eb54f70>]]
call[name[re].match, parameter[constant[^\d*\.|\d\'\.], name[line]]]
call[name[line_output].append, parameter[call[name[line].rstrip, parameter[]]]]
return[name[line_output]] | keyword[def] identifier[line_tokenizer] ( identifier[self] , identifier[text] ):
literal[string]
identifier[line_output] =[]
keyword[with] identifier[open] ( identifier[text] , identifier[mode] = literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[file] :
identifier[lines] = identifier[file] . identifier[readlines] ()
keyword[assert] identifier[isinstance] ( identifier[text] , identifier[str] ), literal[string]
keyword[for] identifier[line] keyword[in] identifier[lines] :
keyword[if] keyword[not] identifier[self] . identifier[damage] :
identifier[line] = literal[string] . identifier[join] ( identifier[c] keyword[for] identifier[c] keyword[in] identifier[line] keyword[if] identifier[c] keyword[not] keyword[in] literal[string] )
identifier[re] . identifier[match] ( literal[string] , identifier[line] )
identifier[line_output] . identifier[append] ( identifier[line] . identifier[rstrip] ())
keyword[return] identifier[line_output] | def line_tokenizer(self, text):
"""
From a .txt file, outputs lines as string in list.
input: 21. u2-wa-a-ru at-ta e2-kal2-la-ka _e2_-ka wu-e-er
22. ... u2-ul szi-...
23. ... x ...
output:['21. u2-wa-a-ru at-ta e2-kal2-la-ka _e2_-ka wu-e-er',
'22. ... u2-ul szi-...',
'23. ... x ...',]
:param: .txt file containing untokenized string
:return: lines as strings in list
"""
line_output = []
with open(text, mode='r+', encoding='utf8') as file:
lines = file.readlines()
assert isinstance(text, str), 'Incoming argument must be a string.' # depends on [control=['with'], data=['file']]
for line in lines:
# Strip out damage characters
if not self.damage: # Add 'xn' -- missing sign or number?
line = ''.join((c for c in line if c not in '#[]?!*'))
re.match("^\\d*\\.|\\d\\'\\.", line)
line_output.append(line.rstrip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return line_output |
def teardown_logical_port_connectivity(self, context, port_db,
hosting_device_id):
"""Removes connectivity for a logical port.
Unplugs the corresponding data interface from the VM.
"""
if port_db is None or port_db.get('id') is None:
LOG.warning("Port id is None! Cannot remove port "
"from hosting_device:%s", hosting_device_id)
return
hosting_port_id = port_db.hosting_info.hosting_port.id
try:
self._dev_mgr.svc_vm_mgr.interface_detach(hosting_device_id,
hosting_port_id)
self._gt_pool.spawn_n(self._cleanup_hosting_port, context,
hosting_port_id)
LOG.debug("Teardown logicalport completed for port:%s", port_db.id)
except Exception as e:
LOG.error("Failed to detach interface corresponding to port:"
"%(p_id)s on hosting device:%(hd_id)s due to "
"error %(error)s", {'p_id': hosting_port_id,
'hd_id': hosting_device_id,
'error': str(e)}) | def function[teardown_logical_port_connectivity, parameter[self, context, port_db, hosting_device_id]]:
constant[Removes connectivity for a logical port.
Unplugs the corresponding data interface from the VM.
]
if <ast.BoolOp object at 0x7da18dc98970> begin[:]
call[name[LOG].warning, parameter[constant[Port id is None! Cannot remove port from hosting_device:%s], name[hosting_device_id]]]
return[None]
variable[hosting_port_id] assign[=] name[port_db].hosting_info.hosting_port.id
<ast.Try object at 0x7da1b1c61d80> | keyword[def] identifier[teardown_logical_port_connectivity] ( identifier[self] , identifier[context] , identifier[port_db] ,
identifier[hosting_device_id] ):
literal[string]
keyword[if] identifier[port_db] keyword[is] keyword[None] keyword[or] identifier[port_db] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
identifier[LOG] . identifier[warning] ( literal[string]
literal[string] , identifier[hosting_device_id] )
keyword[return]
identifier[hosting_port_id] = identifier[port_db] . identifier[hosting_info] . identifier[hosting_port] . identifier[id]
keyword[try] :
identifier[self] . identifier[_dev_mgr] . identifier[svc_vm_mgr] . identifier[interface_detach] ( identifier[hosting_device_id] ,
identifier[hosting_port_id] )
identifier[self] . identifier[_gt_pool] . identifier[spawn_n] ( identifier[self] . identifier[_cleanup_hosting_port] , identifier[context] ,
identifier[hosting_port_id] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[port_db] . identifier[id] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOG] . identifier[error] ( literal[string]
literal[string]
literal[string] ,{ literal[string] : identifier[hosting_port_id] ,
literal[string] : identifier[hosting_device_id] ,
literal[string] : identifier[str] ( identifier[e] )}) | def teardown_logical_port_connectivity(self, context, port_db, hosting_device_id):
"""Removes connectivity for a logical port.
Unplugs the corresponding data interface from the VM.
"""
if port_db is None or port_db.get('id') is None:
LOG.warning('Port id is None! Cannot remove port from hosting_device:%s', hosting_device_id)
return # depends on [control=['if'], data=[]]
hosting_port_id = port_db.hosting_info.hosting_port.id
try:
self._dev_mgr.svc_vm_mgr.interface_detach(hosting_device_id, hosting_port_id)
self._gt_pool.spawn_n(self._cleanup_hosting_port, context, hosting_port_id)
LOG.debug('Teardown logicalport completed for port:%s', port_db.id) # depends on [control=['try'], data=[]]
except Exception as e:
LOG.error('Failed to detach interface corresponding to port:%(p_id)s on hosting device:%(hd_id)s due to error %(error)s', {'p_id': hosting_port_id, 'hd_id': hosting_device_id, 'error': str(e)}) # depends on [control=['except'], data=['e']] |
def jobs(self, job_key=None, timeoutSecs=10, **kwargs):
'''
Fetch all the jobs or a single job from the /Jobs endpoint.
'''
params_dict = {
# 'job_key': job_key
}
h2o_methods.check_params_update_kwargs(params_dict, kwargs, 'jobs', True)
result = self.do_json_request('3/Jobs.json', timeout=timeoutSecs, params=params_dict)
return result | def function[jobs, parameter[self, job_key, timeoutSecs]]:
constant[
Fetch all the jobs or a single job from the /Jobs endpoint.
]
variable[params_dict] assign[=] dictionary[[], []]
call[name[h2o_methods].check_params_update_kwargs, parameter[name[params_dict], name[kwargs], constant[jobs], constant[True]]]
variable[result] assign[=] call[name[self].do_json_request, parameter[constant[3/Jobs.json]]]
return[name[result]] | keyword[def] identifier[jobs] ( identifier[self] , identifier[job_key] = keyword[None] , identifier[timeoutSecs] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[params_dict] ={
}
identifier[h2o_methods] . identifier[check_params_update_kwargs] ( identifier[params_dict] , identifier[kwargs] , literal[string] , keyword[True] )
identifier[result] = identifier[self] . identifier[do_json_request] ( literal[string] , identifier[timeout] = identifier[timeoutSecs] , identifier[params] = identifier[params_dict] )
keyword[return] identifier[result] | def jobs(self, job_key=None, timeoutSecs=10, **kwargs):
"""
Fetch all the jobs or a single job from the /Jobs endpoint.
"""
# 'job_key': job_key
params_dict = {}
h2o_methods.check_params_update_kwargs(params_dict, kwargs, 'jobs', True)
result = self.do_json_request('3/Jobs.json', timeout=timeoutSecs, params=params_dict)
return result |
def spin(self):
""":class:`.BinaryQuadraticModel`: An instance of the Ising model subclass
of the :class:`.BinaryQuadraticModel` superclass, corresponding to
a binary quadratic model with spins as its variables.
Enables access to biases for the spin-valued binary quadratic model
regardless of the :class:`vartype` set when the model was created.
If the model was created with the :attr:`.binary` vartype,
the Ising model subclass is instantiated upon the first use of the
:attr:`.spin` property and used in any subsequent reads.
Examples:
This example creates a QUBO model and uses the :attr:`.spin` property
to instantiate the corresponding Ising model.
>>> import dimod
...
>>> bqm_qubo = dimod.BinaryQuadraticModel({0: -1, 1: -1}, {(0, 1): 2}, 0.0, dimod.BINARY)
>>> bqm_spin = bqm_qubo.spin
>>> bqm_spin # doctest: +SKIP
BinaryQuadraticModel({0: 0.0, 1: 0.0}, {(0, 1): 0.5}, -0.5, Vartype.SPIN)
>>> bqm_spin.spin is bqm_spin
True
Note:
Methods like :meth:`.add_variable`, :meth:`.add_variables_from`,
:meth:`.add_interaction`, etc. should only be used on the base model.
"""
# NB: The existence of the _spin property implies that it is up to date, methods that
# invalidate it will erase the property
try:
spin = self._spin
if spin is not None:
return spin
except AttributeError:
pass
if self.vartype is Vartype.SPIN:
self._spin = spin = self
else:
self._counterpart = self._spin = spin = self.change_vartype(Vartype.SPIN, inplace=False)
# we also want to go ahead and set spin.binary to refer back to self
spin._binary = self
return spin | def function[spin, parameter[self]]:
constant[:class:`.BinaryQuadraticModel`: An instance of the Ising model subclass
of the :class:`.BinaryQuadraticModel` superclass, corresponding to
a binary quadratic model with spins as its variables.
Enables access to biases for the spin-valued binary quadratic model
regardless of the :class:`vartype` set when the model was created.
If the model was created with the :attr:`.binary` vartype,
the Ising model subclass is instantiated upon the first use of the
:attr:`.spin` property and used in any subsequent reads.
Examples:
This example creates a QUBO model and uses the :attr:`.spin` property
to instantiate the corresponding Ising model.
>>> import dimod
...
>>> bqm_qubo = dimod.BinaryQuadraticModel({0: -1, 1: -1}, {(0, 1): 2}, 0.0, dimod.BINARY)
>>> bqm_spin = bqm_qubo.spin
>>> bqm_spin # doctest: +SKIP
BinaryQuadraticModel({0: 0.0, 1: 0.0}, {(0, 1): 0.5}, -0.5, Vartype.SPIN)
>>> bqm_spin.spin is bqm_spin
True
Note:
Methods like :meth:`.add_variable`, :meth:`.add_variables_from`,
:meth:`.add_interaction`, etc. should only be used on the base model.
]
<ast.Try object at 0x7da1b07615d0>
if compare[name[self].vartype is name[Vartype].SPIN] begin[:]
name[self]._spin assign[=] name[self]
return[name[spin]] | keyword[def] identifier[spin] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[spin] = identifier[self] . identifier[_spin]
keyword[if] identifier[spin] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[spin]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[if] identifier[self] . identifier[vartype] keyword[is] identifier[Vartype] . identifier[SPIN] :
identifier[self] . identifier[_spin] = identifier[spin] = identifier[self]
keyword[else] :
identifier[self] . identifier[_counterpart] = identifier[self] . identifier[_spin] = identifier[spin] = identifier[self] . identifier[change_vartype] ( identifier[Vartype] . identifier[SPIN] , identifier[inplace] = keyword[False] )
identifier[spin] . identifier[_binary] = identifier[self]
keyword[return] identifier[spin] | def spin(self):
""":class:`.BinaryQuadraticModel`: An instance of the Ising model subclass
of the :class:`.BinaryQuadraticModel` superclass, corresponding to
a binary quadratic model with spins as its variables.
Enables access to biases for the spin-valued binary quadratic model
regardless of the :class:`vartype` set when the model was created.
If the model was created with the :attr:`.binary` vartype,
the Ising model subclass is instantiated upon the first use of the
:attr:`.spin` property and used in any subsequent reads.
Examples:
This example creates a QUBO model and uses the :attr:`.spin` property
to instantiate the corresponding Ising model.
>>> import dimod
...
>>> bqm_qubo = dimod.BinaryQuadraticModel({0: -1, 1: -1}, {(0, 1): 2}, 0.0, dimod.BINARY)
>>> bqm_spin = bqm_qubo.spin
>>> bqm_spin # doctest: +SKIP
BinaryQuadraticModel({0: 0.0, 1: 0.0}, {(0, 1): 0.5}, -0.5, Vartype.SPIN)
>>> bqm_spin.spin is bqm_spin
True
Note:
Methods like :meth:`.add_variable`, :meth:`.add_variables_from`,
:meth:`.add_interaction`, etc. should only be used on the base model.
"""
# NB: The existence of the _spin property implies that it is up to date, methods that
# invalidate it will erase the property
try:
spin = self._spin
if spin is not None:
return spin # depends on [control=['if'], data=['spin']] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
if self.vartype is Vartype.SPIN:
self._spin = spin = self # depends on [control=['if'], data=[]]
else:
self._counterpart = self._spin = spin = self.change_vartype(Vartype.SPIN, inplace=False)
# we also want to go ahead and set spin.binary to refer back to self
spin._binary = self
return spin |
def feature(self, type_=None, identifier=None, description=None, entry_name=None, limit=None, as_df=False):
"""Method to query :class:`.models.Feature` objects in database
Check available features types with ``pyuniprot.query().feature_types``
:param type_: type(s) of feature
:type type_: str or tuple(str) or None
:param identifier: feature identifier(s)
:type identifier: str or tuple(str) or None
:param description: description(s) of feature(s)
:type description: str or tuple(str) or None
:param entry_name: name(s) in :class:`.models.Entry`
:type entry_name: str or tuple(str) or None
:param limit:
- if `isinstance(limit,int)==True` -> limit
- if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page)
- if limit == None -> all results
:type limit: int or tuple(int) or None
:param bool as_df: if `True` results are returned as :class:`pandas.DataFrame`
:return:
- if `as_df == False` -> list(:class:`.models.Feature`)
- if `as_df == True` -> :class:`pandas.DataFrame`
:rtype: list(:class:`.models.Feature`) or :class:`pandas.DataFrame`
"""
q = self.session.query(models.Feature)
model_queries_config = (
(type_, models.Feature.type_),
(identifier, models.Feature.identifier),
(description, models.Feature.description)
)
q = self.get_model_queries(q, model_queries_config)
q = self.get_one_to_many_queries(q, ((entry_name, models.Entry.name),))
return self._limit_and_df(q, limit, as_df) | def function[feature, parameter[self, type_, identifier, description, entry_name, limit, as_df]]:
constant[Method to query :class:`.models.Feature` objects in database
Check available features types with ``pyuniprot.query().feature_types``
:param type_: type(s) of feature
:type type_: str or tuple(str) or None
:param identifier: feature identifier(s)
:type identifier: str or tuple(str) or None
:param description: description(s) of feature(s)
:type description: str or tuple(str) or None
:param entry_name: name(s) in :class:`.models.Entry`
:type entry_name: str or tuple(str) or None
:param limit:
- if `isinstance(limit,int)==True` -> limit
- if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page)
- if limit == None -> all results
:type limit: int or tuple(int) or None
:param bool as_df: if `True` results are returned as :class:`pandas.DataFrame`
:return:
- if `as_df == False` -> list(:class:`.models.Feature`)
- if `as_df == True` -> :class:`pandas.DataFrame`
:rtype: list(:class:`.models.Feature`) or :class:`pandas.DataFrame`
]
variable[q] assign[=] call[name[self].session.query, parameter[name[models].Feature]]
variable[model_queries_config] assign[=] tuple[[<ast.Tuple object at 0x7da18f810be0>, <ast.Tuple object at 0x7da18f811c90>, <ast.Tuple object at 0x7da18f811000>]]
variable[q] assign[=] call[name[self].get_model_queries, parameter[name[q], name[model_queries_config]]]
variable[q] assign[=] call[name[self].get_one_to_many_queries, parameter[name[q], tuple[[<ast.Tuple object at 0x7da18f813490>]]]]
return[call[name[self]._limit_and_df, parameter[name[q], name[limit], name[as_df]]]] | keyword[def] identifier[feature] ( identifier[self] , identifier[type_] = keyword[None] , identifier[identifier] = keyword[None] , identifier[description] = keyword[None] , identifier[entry_name] = keyword[None] , identifier[limit] = keyword[None] , identifier[as_df] = keyword[False] ):
literal[string]
identifier[q] = identifier[self] . identifier[session] . identifier[query] ( identifier[models] . identifier[Feature] )
identifier[model_queries_config] =(
( identifier[type_] , identifier[models] . identifier[Feature] . identifier[type_] ),
( identifier[identifier] , identifier[models] . identifier[Feature] . identifier[identifier] ),
( identifier[description] , identifier[models] . identifier[Feature] . identifier[description] )
)
identifier[q] = identifier[self] . identifier[get_model_queries] ( identifier[q] , identifier[model_queries_config] )
identifier[q] = identifier[self] . identifier[get_one_to_many_queries] ( identifier[q] ,(( identifier[entry_name] , identifier[models] . identifier[Entry] . identifier[name] ),))
keyword[return] identifier[self] . identifier[_limit_and_df] ( identifier[q] , identifier[limit] , identifier[as_df] ) | def feature(self, type_=None, identifier=None, description=None, entry_name=None, limit=None, as_df=False):
"""Method to query :class:`.models.Feature` objects in database
Check available features types with ``pyuniprot.query().feature_types``
:param type_: type(s) of feature
:type type_: str or tuple(str) or None
:param identifier: feature identifier(s)
:type identifier: str or tuple(str) or None
:param description: description(s) of feature(s)
:type description: str or tuple(str) or None
:param entry_name: name(s) in :class:`.models.Entry`
:type entry_name: str or tuple(str) or None
:param limit:
- if `isinstance(limit,int)==True` -> limit
- if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page)
- if limit == None -> all results
:type limit: int or tuple(int) or None
:param bool as_df: if `True` results are returned as :class:`pandas.DataFrame`
:return:
- if `as_df == False` -> list(:class:`.models.Feature`)
- if `as_df == True` -> :class:`pandas.DataFrame`
:rtype: list(:class:`.models.Feature`) or :class:`pandas.DataFrame`
"""
q = self.session.query(models.Feature)
model_queries_config = ((type_, models.Feature.type_), (identifier, models.Feature.identifier), (description, models.Feature.description))
q = self.get_model_queries(q, model_queries_config)
q = self.get_one_to_many_queries(q, ((entry_name, models.Entry.name),))
return self._limit_and_df(q, limit, as_df) |
def _create_spec_config(self, table_name, spec_documents):
'''
Dynamo implementation of spec config creation
Called by `create_archive_table()` in
:py:class:`manager.BaseDataManager` Simply adds two rows to the spec
table
Parameters
----------
table_name :
base table name (not including .spec suffix)
spec_documents : list
list of dictionary documents defining the manager spec
'''
_spec_table = self._resource.Table(table_name + '.spec')
for doc in spec_documents:
_spec_table.put_item(Item=doc) | def function[_create_spec_config, parameter[self, table_name, spec_documents]]:
constant[
Dynamo implementation of spec config creation
Called by `create_archive_table()` in
:py:class:`manager.BaseDataManager` Simply adds two rows to the spec
table
Parameters
----------
table_name :
base table name (not including .spec suffix)
spec_documents : list
list of dictionary documents defining the manager spec
]
variable[_spec_table] assign[=] call[name[self]._resource.Table, parameter[binary_operation[name[table_name] + constant[.spec]]]]
for taget[name[doc]] in starred[name[spec_documents]] begin[:]
call[name[_spec_table].put_item, parameter[]] | keyword[def] identifier[_create_spec_config] ( identifier[self] , identifier[table_name] , identifier[spec_documents] ):
literal[string]
identifier[_spec_table] = identifier[self] . identifier[_resource] . identifier[Table] ( identifier[table_name] + literal[string] )
keyword[for] identifier[doc] keyword[in] identifier[spec_documents] :
identifier[_spec_table] . identifier[put_item] ( identifier[Item] = identifier[doc] ) | def _create_spec_config(self, table_name, spec_documents):
"""
Dynamo implementation of spec config creation
Called by `create_archive_table()` in
:py:class:`manager.BaseDataManager` Simply adds two rows to the spec
table
Parameters
----------
table_name :
base table name (not including .spec suffix)
spec_documents : list
list of dictionary documents defining the manager spec
"""
_spec_table = self._resource.Table(table_name + '.spec')
for doc in spec_documents:
_spec_table.put_item(Item=doc) # depends on [control=['for'], data=['doc']] |
def build_metamodel(self, id_generator=None):
'''
Build and return a *xtuml.MetaModel* containing previously loaded input.
'''
m = xtuml.MetaModel(id_generator)
self.populate(m)
return m | def function[build_metamodel, parameter[self, id_generator]]:
constant[
Build and return a *xtuml.MetaModel* containing previously loaded input.
]
variable[m] assign[=] call[name[xtuml].MetaModel, parameter[name[id_generator]]]
call[name[self].populate, parameter[name[m]]]
return[name[m]] | keyword[def] identifier[build_metamodel] ( identifier[self] , identifier[id_generator] = keyword[None] ):
literal[string]
identifier[m] = identifier[xtuml] . identifier[MetaModel] ( identifier[id_generator] )
identifier[self] . identifier[populate] ( identifier[m] )
keyword[return] identifier[m] | def build_metamodel(self, id_generator=None):
"""
Build and return a *xtuml.MetaModel* containing previously loaded input.
"""
m = xtuml.MetaModel(id_generator)
self.populate(m)
return m |
def is_indel(reference_bases, alternate_bases):
""" Return whether or not the variant is an INDEL """
if len(reference_bases) > 1:
return True
for alt in alternate_bases:
if alt is None:
return True
elif len(alt) != len(reference_bases):
return True
return False | def function[is_indel, parameter[reference_bases, alternate_bases]]:
constant[ Return whether or not the variant is an INDEL ]
if compare[call[name[len], parameter[name[reference_bases]]] greater[>] constant[1]] begin[:]
return[constant[True]]
for taget[name[alt]] in starred[name[alternate_bases]] begin[:]
if compare[name[alt] is constant[None]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_indel] ( identifier[reference_bases] , identifier[alternate_bases] ):
literal[string]
keyword[if] identifier[len] ( identifier[reference_bases] )> literal[int] :
keyword[return] keyword[True]
keyword[for] identifier[alt] keyword[in] identifier[alternate_bases] :
keyword[if] identifier[alt] keyword[is] keyword[None] :
keyword[return] keyword[True]
keyword[elif] identifier[len] ( identifier[alt] )!= identifier[len] ( identifier[reference_bases] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_indel(reference_bases, alternate_bases):
""" Return whether or not the variant is an INDEL """
if len(reference_bases) > 1:
return True # depends on [control=['if'], data=[]]
for alt in alternate_bases:
if alt is None:
return True # depends on [control=['if'], data=[]]
elif len(alt) != len(reference_bases):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['alt']]
return False |
def rollforward(self, date):
"""Roll date forward to nearest start of year"""
if self.onOffset(date):
return date
else:
return date + YearBegin(month=self.month) | def function[rollforward, parameter[self, date]]:
constant[Roll date forward to nearest start of year]
if call[name[self].onOffset, parameter[name[date]]] begin[:]
return[name[date]] | keyword[def] identifier[rollforward] ( identifier[self] , identifier[date] ):
literal[string]
keyword[if] identifier[self] . identifier[onOffset] ( identifier[date] ):
keyword[return] identifier[date]
keyword[else] :
keyword[return] identifier[date] + identifier[YearBegin] ( identifier[month] = identifier[self] . identifier[month] ) | def rollforward(self, date):
"""Roll date forward to nearest start of year"""
if self.onOffset(date):
return date # depends on [control=['if'], data=[]]
else:
return date + YearBegin(month=self.month) |
def setup_scrollarea(self):
"""Setup the scrollarea that will contain the FigureThumbnails."""
self.view = QWidget()
self.scene = QGridLayout(self.view)
self.scene.setColumnStretch(0, 100)
self.scene.setColumnStretch(2, 100)
self.scrollarea = QScrollArea()
self.scrollarea.setWidget(self.view)
self.scrollarea.setWidgetResizable(True)
self.scrollarea.setFrameStyle(0)
self.scrollarea.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scrollarea.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scrollarea.setSizePolicy(QSizePolicy(QSizePolicy.Ignored,
QSizePolicy.Preferred))
# Set the vertical scrollbar explicitely :
# This is required to avoid a "RuntimeError: no access to protected
# functions or signals for objects not created from Python" in Linux.
self.scrollarea.setVerticalScrollBar(QScrollBar())
return self.scrollarea | def function[setup_scrollarea, parameter[self]]:
constant[Setup the scrollarea that will contain the FigureThumbnails.]
name[self].view assign[=] call[name[QWidget], parameter[]]
name[self].scene assign[=] call[name[QGridLayout], parameter[name[self].view]]
call[name[self].scene.setColumnStretch, parameter[constant[0], constant[100]]]
call[name[self].scene.setColumnStretch, parameter[constant[2], constant[100]]]
name[self].scrollarea assign[=] call[name[QScrollArea], parameter[]]
call[name[self].scrollarea.setWidget, parameter[name[self].view]]
call[name[self].scrollarea.setWidgetResizable, parameter[constant[True]]]
call[name[self].scrollarea.setFrameStyle, parameter[constant[0]]]
call[name[self].scrollarea.setVerticalScrollBarPolicy, parameter[name[Qt].ScrollBarAlwaysOff]]
call[name[self].scrollarea.setHorizontalScrollBarPolicy, parameter[name[Qt].ScrollBarAlwaysOff]]
call[name[self].scrollarea.setSizePolicy, parameter[call[name[QSizePolicy], parameter[name[QSizePolicy].Ignored, name[QSizePolicy].Preferred]]]]
call[name[self].scrollarea.setVerticalScrollBar, parameter[call[name[QScrollBar], parameter[]]]]
return[name[self].scrollarea] | keyword[def] identifier[setup_scrollarea] ( identifier[self] ):
literal[string]
identifier[self] . identifier[view] = identifier[QWidget] ()
identifier[self] . identifier[scene] = identifier[QGridLayout] ( identifier[self] . identifier[view] )
identifier[self] . identifier[scene] . identifier[setColumnStretch] ( literal[int] , literal[int] )
identifier[self] . identifier[scene] . identifier[setColumnStretch] ( literal[int] , literal[int] )
identifier[self] . identifier[scrollarea] = identifier[QScrollArea] ()
identifier[self] . identifier[scrollarea] . identifier[setWidget] ( identifier[self] . identifier[view] )
identifier[self] . identifier[scrollarea] . identifier[setWidgetResizable] ( keyword[True] )
identifier[self] . identifier[scrollarea] . identifier[setFrameStyle] ( literal[int] )
identifier[self] . identifier[scrollarea] . identifier[setVerticalScrollBarPolicy] ( identifier[Qt] . identifier[ScrollBarAlwaysOff] )
identifier[self] . identifier[scrollarea] . identifier[setHorizontalScrollBarPolicy] ( identifier[Qt] . identifier[ScrollBarAlwaysOff] )
identifier[self] . identifier[scrollarea] . identifier[setSizePolicy] ( identifier[QSizePolicy] ( identifier[QSizePolicy] . identifier[Ignored] ,
identifier[QSizePolicy] . identifier[Preferred] ))
identifier[self] . identifier[scrollarea] . identifier[setVerticalScrollBar] ( identifier[QScrollBar] ())
keyword[return] identifier[self] . identifier[scrollarea] | def setup_scrollarea(self):
"""Setup the scrollarea that will contain the FigureThumbnails."""
self.view = QWidget()
self.scene = QGridLayout(self.view)
self.scene.setColumnStretch(0, 100)
self.scene.setColumnStretch(2, 100)
self.scrollarea = QScrollArea()
self.scrollarea.setWidget(self.view)
self.scrollarea.setWidgetResizable(True)
self.scrollarea.setFrameStyle(0)
self.scrollarea.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scrollarea.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scrollarea.setSizePolicy(QSizePolicy(QSizePolicy.Ignored, QSizePolicy.Preferred))
# Set the vertical scrollbar explicitely :
# This is required to avoid a "RuntimeError: no access to protected
# functions or signals for objects not created from Python" in Linux.
self.scrollarea.setVerticalScrollBar(QScrollBar())
return self.scrollarea |
def use_embedded_pkgs(embedded_lib_path=None):
"""Temporarily prepend embedded packages to sys.path."""
if embedded_lib_path is None:
embedded_lib_path = get_embedded_lib_path()
old_sys_path = list(sys.path)
sys.path.insert(
1, # https://stackoverflow.com/a/10097543
embedded_lib_path
)
try:
yield
finally:
sys.path = old_sys_path | def function[use_embedded_pkgs, parameter[embedded_lib_path]]:
constant[Temporarily prepend embedded packages to sys.path.]
if compare[name[embedded_lib_path] is constant[None]] begin[:]
variable[embedded_lib_path] assign[=] call[name[get_embedded_lib_path], parameter[]]
variable[old_sys_path] assign[=] call[name[list], parameter[name[sys].path]]
call[name[sys].path.insert, parameter[constant[1], name[embedded_lib_path]]]
<ast.Try object at 0x7da1b06ff460> | keyword[def] identifier[use_embedded_pkgs] ( identifier[embedded_lib_path] = keyword[None] ):
literal[string]
keyword[if] identifier[embedded_lib_path] keyword[is] keyword[None] :
identifier[embedded_lib_path] = identifier[get_embedded_lib_path] ()
identifier[old_sys_path] = identifier[list] ( identifier[sys] . identifier[path] )
identifier[sys] . identifier[path] . identifier[insert] (
literal[int] ,
identifier[embedded_lib_path]
)
keyword[try] :
keyword[yield]
keyword[finally] :
identifier[sys] . identifier[path] = identifier[old_sys_path] | def use_embedded_pkgs(embedded_lib_path=None):
"""Temporarily prepend embedded packages to sys.path."""
if embedded_lib_path is None:
embedded_lib_path = get_embedded_lib_path() # depends on [control=['if'], data=['embedded_lib_path']]
old_sys_path = list(sys.path) # https://stackoverflow.com/a/10097543
sys.path.insert(1, embedded_lib_path)
try:
yield # depends on [control=['try'], data=[]]
finally:
sys.path = old_sys_path |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.