code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def to_binary(self,filename):
"""write the parameter ensemble to a jco-style binary file
Parameters
----------
filename : str
the filename to write
Returns
-------
None
Note
----
this function back-transforms inplace with respect to
log10 before writing
"""
retrans = False
if self.istransformed:
self._back_transform(inplace=True)
retrans = True
if self.isnull().values.any():
warnings.warn("NaN in par ensemble",PyemuWarning)
self.as_pyemu_matrix().to_coo(filename)
if retrans:
self._transform(inplace=True) | def function[to_binary, parameter[self, filename]]:
constant[write the parameter ensemble to a jco-style binary file
Parameters
----------
filename : str
the filename to write
Returns
-------
None
Note
----
this function back-transforms inplace with respect to
log10 before writing
]
variable[retrans] assign[=] constant[False]
if name[self].istransformed begin[:]
call[name[self]._back_transform, parameter[]]
variable[retrans] assign[=] constant[True]
if call[call[name[self].isnull, parameter[]].values.any, parameter[]] begin[:]
call[name[warnings].warn, parameter[constant[NaN in par ensemble], name[PyemuWarning]]]
call[call[name[self].as_pyemu_matrix, parameter[]].to_coo, parameter[name[filename]]]
if name[retrans] begin[:]
call[name[self]._transform, parameter[]] | keyword[def] identifier[to_binary] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[retrans] = keyword[False]
keyword[if] identifier[self] . identifier[istransformed] :
identifier[self] . identifier[_back_transform] ( identifier[inplace] = keyword[True] )
identifier[retrans] = keyword[True]
keyword[if] identifier[self] . identifier[isnull] (). identifier[values] . identifier[any] ():
identifier[warnings] . identifier[warn] ( literal[string] , identifier[PyemuWarning] )
identifier[self] . identifier[as_pyemu_matrix] (). identifier[to_coo] ( identifier[filename] )
keyword[if] identifier[retrans] :
identifier[self] . identifier[_transform] ( identifier[inplace] = keyword[True] ) | def to_binary(self, filename):
"""write the parameter ensemble to a jco-style binary file
Parameters
----------
filename : str
the filename to write
Returns
-------
None
Note
----
this function back-transforms inplace with respect to
log10 before writing
"""
retrans = False
if self.istransformed:
self._back_transform(inplace=True)
retrans = True # depends on [control=['if'], data=[]]
if self.isnull().values.any():
warnings.warn('NaN in par ensemble', PyemuWarning) # depends on [control=['if'], data=[]]
self.as_pyemu_matrix().to_coo(filename)
if retrans:
self._transform(inplace=True) # depends on [control=['if'], data=[]] |
def _get_attributes(self):
"""Return a generator for instance and class attribute.
.. code-block:: python3
for instance_attribute, class_attribute in self._get_attributes():
print("Instance Attribute: {}".format(instance_attribute))
print("Class Attribute: {}".format(class_attribute))
Returns:
generator: Tuples with instance attribute and class attribute
"""
return map((lambda i, c: (i[1], c[1])),
self._get_instance_attributes(),
self.get_class_attributes()) | def function[_get_attributes, parameter[self]]:
constant[Return a generator for instance and class attribute.
.. code-block:: python3
for instance_attribute, class_attribute in self._get_attributes():
print("Instance Attribute: {}".format(instance_attribute))
print("Class Attribute: {}".format(class_attribute))
Returns:
generator: Tuples with instance attribute and class attribute
]
return[call[name[map], parameter[<ast.Lambda object at 0x7da20e9b04c0>, call[name[self]._get_instance_attributes, parameter[]], call[name[self].get_class_attributes, parameter[]]]]] | keyword[def] identifier[_get_attributes] ( identifier[self] ):
literal[string]
keyword[return] identifier[map] (( keyword[lambda] identifier[i] , identifier[c] :( identifier[i] [ literal[int] ], identifier[c] [ literal[int] ])),
identifier[self] . identifier[_get_instance_attributes] (),
identifier[self] . identifier[get_class_attributes] ()) | def _get_attributes(self):
"""Return a generator for instance and class attribute.
.. code-block:: python3
for instance_attribute, class_attribute in self._get_attributes():
print("Instance Attribute: {}".format(instance_attribute))
print("Class Attribute: {}".format(class_attribute))
Returns:
generator: Tuples with instance attribute and class attribute
"""
return map(lambda i, c: (i[1], c[1]), self._get_instance_attributes(), self.get_class_attributes()) |
def create_node(vm_):
'''
Build and submit the XML to create a node
'''
# Start the tree
content = ET.Element('ve')
# Name of the instance
name = ET.SubElement(content, 'name')
name.text = vm_['name']
# Description, defaults to name
desc = ET.SubElement(content, 'description')
desc.text = config.get_cloud_config_value(
'desc', vm_, __opts__, default=vm_['name'], search_global=False
)
# How many CPU cores, and how fast they are
cpu = ET.SubElement(content, 'cpu')
cpu.attrib['number'] = config.get_cloud_config_value(
'cpu_number', vm_, __opts__, default='1', search_global=False
)
cpu.attrib['power'] = config.get_cloud_config_value(
'cpu_power', vm_, __opts__, default='1000', search_global=False
)
# How many megabytes of RAM
ram = ET.SubElement(content, 'ram-size')
ram.text = config.get_cloud_config_value(
'ram', vm_, __opts__, default='256', search_global=False
)
# Bandwidth available, in kbps
bandwidth = ET.SubElement(content, 'bandwidth')
bandwidth.text = config.get_cloud_config_value(
'bandwidth', vm_, __opts__, default='100', search_global=False
)
# How many public IPs will be assigned to this instance
ip_num = ET.SubElement(content, 'no-of-public-ip')
ip_num.text = config.get_cloud_config_value(
'ip_num', vm_, __opts__, default='1', search_global=False
)
# Size of the instance disk
disk = ET.SubElement(content, 've-disk')
disk.attrib['local'] = 'true'
disk.attrib['size'] = config.get_cloud_config_value(
'disk_size', vm_, __opts__, default='10', search_global=False
)
# Attributes for the image
vm_image = config.get_cloud_config_value(
'image', vm_, __opts__, search_global=False
)
image = show_image({'image': vm_image}, call='function')
platform = ET.SubElement(content, 'platform')
template = ET.SubElement(platform, 'template-info')
template.attrib['name'] = vm_image
os_info = ET.SubElement(platform, 'os-info')
os_info.attrib['technology'] = image[vm_image]['technology']
os_info.attrib['type'] = image[vm_image]['osType']
# Username and password
admin = ET.SubElement(content, 'admin')
admin.attrib['login'] = config.get_cloud_config_value(
'ssh_username', vm_, __opts__, default='root'
)
admin.attrib['password'] = config.get_cloud_config_value(
'password', vm_, __opts__, search_global=False
)
data = ET.tostring(content, encoding='UTF-8')
__utils__['cloud.fire_event'](
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
args={
'kwargs': __utils__['cloud.filter_event']('requesting', data, list(data)),
},
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
node = query(action='ve', method='POST', data=data)
return node | def function[create_node, parameter[vm_]]:
constant[
Build and submit the XML to create a node
]
variable[content] assign[=] call[name[ET].Element, parameter[constant[ve]]]
variable[name] assign[=] call[name[ET].SubElement, parameter[name[content], constant[name]]]
name[name].text assign[=] call[name[vm_]][constant[name]]
variable[desc] assign[=] call[name[ET].SubElement, parameter[name[content], constant[description]]]
name[desc].text assign[=] call[name[config].get_cloud_config_value, parameter[constant[desc], name[vm_], name[__opts__]]]
variable[cpu] assign[=] call[name[ET].SubElement, parameter[name[content], constant[cpu]]]
call[name[cpu].attrib][constant[number]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[cpu_number], name[vm_], name[__opts__]]]
call[name[cpu].attrib][constant[power]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[cpu_power], name[vm_], name[__opts__]]]
variable[ram] assign[=] call[name[ET].SubElement, parameter[name[content], constant[ram-size]]]
name[ram].text assign[=] call[name[config].get_cloud_config_value, parameter[constant[ram], name[vm_], name[__opts__]]]
variable[bandwidth] assign[=] call[name[ET].SubElement, parameter[name[content], constant[bandwidth]]]
name[bandwidth].text assign[=] call[name[config].get_cloud_config_value, parameter[constant[bandwidth], name[vm_], name[__opts__]]]
variable[ip_num] assign[=] call[name[ET].SubElement, parameter[name[content], constant[no-of-public-ip]]]
name[ip_num].text assign[=] call[name[config].get_cloud_config_value, parameter[constant[ip_num], name[vm_], name[__opts__]]]
variable[disk] assign[=] call[name[ET].SubElement, parameter[name[content], constant[ve-disk]]]
call[name[disk].attrib][constant[local]] assign[=] constant[true]
call[name[disk].attrib][constant[size]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[disk_size], name[vm_], name[__opts__]]]
variable[vm_image] assign[=] call[name[config].get_cloud_config_value, parameter[constant[image], name[vm_], name[__opts__]]]
variable[image] assign[=] call[name[show_image], parameter[dictionary[[<ast.Constant object at 0x7da1b21e16f0>], [<ast.Name object at 0x7da1b21e1db0>]]]]
variable[platform] assign[=] call[name[ET].SubElement, parameter[name[content], constant[platform]]]
variable[template] assign[=] call[name[ET].SubElement, parameter[name[platform], constant[template-info]]]
call[name[template].attrib][constant[name]] assign[=] name[vm_image]
variable[os_info] assign[=] call[name[ET].SubElement, parameter[name[platform], constant[os-info]]]
call[name[os_info].attrib][constant[technology]] assign[=] call[call[name[image]][name[vm_image]]][constant[technology]]
call[name[os_info].attrib][constant[type]] assign[=] call[call[name[image]][name[vm_image]]][constant[osType]]
variable[admin] assign[=] call[name[ET].SubElement, parameter[name[content], constant[admin]]]
call[name[admin].attrib][constant[login]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[ssh_username], name[vm_], name[__opts__]]]
call[name[admin].attrib][constant[password]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[password], name[vm_], name[__opts__]]]
variable[data] assign[=] call[name[ET].tostring, parameter[name[content]]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[requesting instance], call[constant[salt/cloud/{0}/requesting].format, parameter[call[name[vm_]][constant[name]]]]]]
variable[node] assign[=] call[name[query], parameter[]]
return[name[node]] | keyword[def] identifier[create_node] ( identifier[vm_] ):
literal[string]
identifier[content] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[name] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[name] . identifier[text] = identifier[vm_] [ literal[string] ]
identifier[desc] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[desc] . identifier[text] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = identifier[vm_] [ literal[string] ], identifier[search_global] = keyword[False]
)
identifier[cpu] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[cpu] . identifier[attrib] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string] , identifier[search_global] = keyword[False]
)
identifier[cpu] . identifier[attrib] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string] , identifier[search_global] = keyword[False]
)
identifier[ram] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[ram] . identifier[text] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string] , identifier[search_global] = keyword[False]
)
identifier[bandwidth] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[bandwidth] . identifier[text] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string] , identifier[search_global] = keyword[False]
)
identifier[ip_num] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[ip_num] . identifier[text] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string] , identifier[search_global] = keyword[False]
)
identifier[disk] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[disk] . identifier[attrib] [ literal[string] ]= literal[string]
identifier[disk] . identifier[attrib] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string] , identifier[search_global] = keyword[False]
)
identifier[vm_image] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False]
)
identifier[image] = identifier[show_image] ({ literal[string] : identifier[vm_image] }, identifier[call] = literal[string] )
identifier[platform] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[template] = identifier[ET] . identifier[SubElement] ( identifier[platform] , literal[string] )
identifier[template] . identifier[attrib] [ literal[string] ]= identifier[vm_image]
identifier[os_info] = identifier[ET] . identifier[SubElement] ( identifier[platform] , literal[string] )
identifier[os_info] . identifier[attrib] [ literal[string] ]= identifier[image] [ identifier[vm_image] ][ literal[string] ]
identifier[os_info] . identifier[attrib] [ literal[string] ]= identifier[image] [ identifier[vm_image] ][ literal[string] ]
identifier[admin] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[admin] . identifier[attrib] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[string]
)
identifier[admin] . identifier[attrib] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False]
)
identifier[data] = identifier[ET] . identifier[tostring] ( identifier[content] , identifier[encoding] = literal[string] )
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] ={
literal[string] : identifier[__utils__] [ literal[string] ]( literal[string] , identifier[data] , identifier[list] ( identifier[data] )),
},
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
identifier[node] = identifier[query] ( identifier[action] = literal[string] , identifier[method] = literal[string] , identifier[data] = identifier[data] )
keyword[return] identifier[node] | def create_node(vm_):
"""
Build and submit the XML to create a node
"""
# Start the tree
content = ET.Element('ve')
# Name of the instance
name = ET.SubElement(content, 'name')
name.text = vm_['name']
# Description, defaults to name
desc = ET.SubElement(content, 'description')
desc.text = config.get_cloud_config_value('desc', vm_, __opts__, default=vm_['name'], search_global=False)
# How many CPU cores, and how fast they are
cpu = ET.SubElement(content, 'cpu')
cpu.attrib['number'] = config.get_cloud_config_value('cpu_number', vm_, __opts__, default='1', search_global=False)
cpu.attrib['power'] = config.get_cloud_config_value('cpu_power', vm_, __opts__, default='1000', search_global=False)
# How many megabytes of RAM
ram = ET.SubElement(content, 'ram-size')
ram.text = config.get_cloud_config_value('ram', vm_, __opts__, default='256', search_global=False)
# Bandwidth available, in kbps
bandwidth = ET.SubElement(content, 'bandwidth')
bandwidth.text = config.get_cloud_config_value('bandwidth', vm_, __opts__, default='100', search_global=False)
# How many public IPs will be assigned to this instance
ip_num = ET.SubElement(content, 'no-of-public-ip')
ip_num.text = config.get_cloud_config_value('ip_num', vm_, __opts__, default='1', search_global=False)
# Size of the instance disk
disk = ET.SubElement(content, 've-disk')
disk.attrib['local'] = 'true'
disk.attrib['size'] = config.get_cloud_config_value('disk_size', vm_, __opts__, default='10', search_global=False)
# Attributes for the image
vm_image = config.get_cloud_config_value('image', vm_, __opts__, search_global=False)
image = show_image({'image': vm_image}, call='function')
platform = ET.SubElement(content, 'platform')
template = ET.SubElement(platform, 'template-info')
template.attrib['name'] = vm_image
os_info = ET.SubElement(platform, 'os-info')
os_info.attrib['technology'] = image[vm_image]['technology']
os_info.attrib['type'] = image[vm_image]['osType']
# Username and password
admin = ET.SubElement(content, 'admin')
admin.attrib['login'] = config.get_cloud_config_value('ssh_username', vm_, __opts__, default='root')
admin.attrib['password'] = config.get_cloud_config_value('password', vm_, __opts__, search_global=False)
data = ET.tostring(content, encoding='UTF-8')
__utils__['cloud.fire_event']('event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args={'kwargs': __utils__['cloud.filter_event']('requesting', data, list(data))}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
node = query(action='ve', method='POST', data=data)
return node |
def param_squared_mean(ns_run, logw=None, simulate=False, param_ind=0):
"""Mean of the square of single parameter (second moment of its
posterior distribution).
Parameters
----------
ns_run: dict
Nested sampling run dict (see the data_processing module
docstring for more details).
logw: None or 1d numpy array, optional
Log weights of samples.
simulate: bool, optional
Passed to ns_run_utils.get_logw if logw needs to be
calculated.
param_ind: int, optional
Index of parameter for which the second moment should be
calculated. This corresponds to the column of ns_run['theta']
which contains the parameter.
Returns
-------
float
"""
if logw is None:
logw = nestcheck.ns_run_utils.get_logw(ns_run, simulate=simulate)
w_relative = np.exp(logw - logw.max()) # protect against overflow
w_relative /= np.sum(w_relative)
return np.sum(w_relative * (ns_run['theta'][:, param_ind] ** 2)) | def function[param_squared_mean, parameter[ns_run, logw, simulate, param_ind]]:
constant[Mean of the square of single parameter (second moment of its
posterior distribution).
Parameters
----------
ns_run: dict
Nested sampling run dict (see the data_processing module
docstring for more details).
logw: None or 1d numpy array, optional
Log weights of samples.
simulate: bool, optional
Passed to ns_run_utils.get_logw if logw needs to be
calculated.
param_ind: int, optional
Index of parameter for which the second moment should be
calculated. This corresponds to the column of ns_run['theta']
which contains the parameter.
Returns
-------
float
]
if compare[name[logw] is constant[None]] begin[:]
variable[logw] assign[=] call[name[nestcheck].ns_run_utils.get_logw, parameter[name[ns_run]]]
variable[w_relative] assign[=] call[name[np].exp, parameter[binary_operation[name[logw] - call[name[logw].max, parameter[]]]]]
<ast.AugAssign object at 0x7da20c76ca00>
return[call[name[np].sum, parameter[binary_operation[name[w_relative] * binary_operation[call[call[name[ns_run]][constant[theta]]][tuple[[<ast.Slice object at 0x7da20c6aa0b0>, <ast.Name object at 0x7da20c6abb50>]]] ** constant[2]]]]]] | keyword[def] identifier[param_squared_mean] ( identifier[ns_run] , identifier[logw] = keyword[None] , identifier[simulate] = keyword[False] , identifier[param_ind] = literal[int] ):
literal[string]
keyword[if] identifier[logw] keyword[is] keyword[None] :
identifier[logw] = identifier[nestcheck] . identifier[ns_run_utils] . identifier[get_logw] ( identifier[ns_run] , identifier[simulate] = identifier[simulate] )
identifier[w_relative] = identifier[np] . identifier[exp] ( identifier[logw] - identifier[logw] . identifier[max] ())
identifier[w_relative] /= identifier[np] . identifier[sum] ( identifier[w_relative] )
keyword[return] identifier[np] . identifier[sum] ( identifier[w_relative] *( identifier[ns_run] [ literal[string] ][:, identifier[param_ind] ]** literal[int] )) | def param_squared_mean(ns_run, logw=None, simulate=False, param_ind=0):
"""Mean of the square of single parameter (second moment of its
posterior distribution).
Parameters
----------
ns_run: dict
Nested sampling run dict (see the data_processing module
docstring for more details).
logw: None or 1d numpy array, optional
Log weights of samples.
simulate: bool, optional
Passed to ns_run_utils.get_logw if logw needs to be
calculated.
param_ind: int, optional
Index of parameter for which the second moment should be
calculated. This corresponds to the column of ns_run['theta']
which contains the parameter.
Returns
-------
float
"""
if logw is None:
logw = nestcheck.ns_run_utils.get_logw(ns_run, simulate=simulate) # depends on [control=['if'], data=['logw']]
w_relative = np.exp(logw - logw.max()) # protect against overflow
w_relative /= np.sum(w_relative)
return np.sum(w_relative * ns_run['theta'][:, param_ind] ** 2) |
def resolve_exported_function(self, pid, modName, procName):
"""
Resolves the exported DLL function for the given process.
@type pid: int
@param pid: Process global ID.
@type modName: str
@param modName: Name of the module that exports the function.
@type procName: str
@param procName: Name of the exported function to resolve.
@rtype: int, None
@return: On success, the address of the exported function.
On failure, returns C{None}.
"""
aProcess = self.system.get_process(pid)
aModule = aProcess.get_module_by_name(modName)
if not aModule:
aProcess.scan_modules()
aModule = aProcess.get_module_by_name(modName)
if aModule:
address = aModule.resolve(procName)
return address
return None | def function[resolve_exported_function, parameter[self, pid, modName, procName]]:
constant[
Resolves the exported DLL function for the given process.
@type pid: int
@param pid: Process global ID.
@type modName: str
@param modName: Name of the module that exports the function.
@type procName: str
@param procName: Name of the exported function to resolve.
@rtype: int, None
@return: On success, the address of the exported function.
On failure, returns C{None}.
]
variable[aProcess] assign[=] call[name[self].system.get_process, parameter[name[pid]]]
variable[aModule] assign[=] call[name[aProcess].get_module_by_name, parameter[name[modName]]]
if <ast.UnaryOp object at 0x7da18fe93310> begin[:]
call[name[aProcess].scan_modules, parameter[]]
variable[aModule] assign[=] call[name[aProcess].get_module_by_name, parameter[name[modName]]]
if name[aModule] begin[:]
variable[address] assign[=] call[name[aModule].resolve, parameter[name[procName]]]
return[name[address]]
return[constant[None]] | keyword[def] identifier[resolve_exported_function] ( identifier[self] , identifier[pid] , identifier[modName] , identifier[procName] ):
literal[string]
identifier[aProcess] = identifier[self] . identifier[system] . identifier[get_process] ( identifier[pid] )
identifier[aModule] = identifier[aProcess] . identifier[get_module_by_name] ( identifier[modName] )
keyword[if] keyword[not] identifier[aModule] :
identifier[aProcess] . identifier[scan_modules] ()
identifier[aModule] = identifier[aProcess] . identifier[get_module_by_name] ( identifier[modName] )
keyword[if] identifier[aModule] :
identifier[address] = identifier[aModule] . identifier[resolve] ( identifier[procName] )
keyword[return] identifier[address]
keyword[return] keyword[None] | def resolve_exported_function(self, pid, modName, procName):
"""
Resolves the exported DLL function for the given process.
@type pid: int
@param pid: Process global ID.
@type modName: str
@param modName: Name of the module that exports the function.
@type procName: str
@param procName: Name of the exported function to resolve.
@rtype: int, None
@return: On success, the address of the exported function.
On failure, returns C{None}.
"""
aProcess = self.system.get_process(pid)
aModule = aProcess.get_module_by_name(modName)
if not aModule:
aProcess.scan_modules()
aModule = aProcess.get_module_by_name(modName) # depends on [control=['if'], data=[]]
if aModule:
address = aModule.resolve(procName)
return address # depends on [control=['if'], data=[]]
return None |
def task_list(task_array):
"""Return a task list.
The task_array should be 2-dimensional; the first item should be the task
text, and the second the boolean completion state.
>>> task_list([["Be born", True], ["Be dead", False]])
'- [X] Be born\\n- [ ] Be dead'
When displayed using `print`, this will appear as:
- [X] Be born
- [ ] Be dead
"""
tasks = []
for item, completed in task_array:
task = "- [ ] " + esc_format(item)
if completed:
task = task[:3] + "X" + task[4:]
tasks.append(task)
return "\n".join(tasks) | def function[task_list, parameter[task_array]]:
constant[Return a task list.
The task_array should be 2-dimensional; the first item should be the task
text, and the second the boolean completion state.
>>> task_list([["Be born", True], ["Be dead", False]])
'- [X] Be born\n- [ ] Be dead'
When displayed using `print`, this will appear as:
- [X] Be born
- [ ] Be dead
]
variable[tasks] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c76c5b0>, <ast.Name object at 0x7da20c76f0a0>]]] in starred[name[task_array]] begin[:]
variable[task] assign[=] binary_operation[constant[- [ ] ] + call[name[esc_format], parameter[name[item]]]]
if name[completed] begin[:]
variable[task] assign[=] binary_operation[binary_operation[call[name[task]][<ast.Slice object at 0x7da20c76e560>] + constant[X]] + call[name[task]][<ast.Slice object at 0x7da20c76f370>]]
call[name[tasks].append, parameter[name[task]]]
return[call[constant[
].join, parameter[name[tasks]]]] | keyword[def] identifier[task_list] ( identifier[task_array] ):
literal[string]
identifier[tasks] =[]
keyword[for] identifier[item] , identifier[completed] keyword[in] identifier[task_array] :
identifier[task] = literal[string] + identifier[esc_format] ( identifier[item] )
keyword[if] identifier[completed] :
identifier[task] = identifier[task] [: literal[int] ]+ literal[string] + identifier[task] [ literal[int] :]
identifier[tasks] . identifier[append] ( identifier[task] )
keyword[return] literal[string] . identifier[join] ( identifier[tasks] ) | def task_list(task_array):
"""Return a task list.
The task_array should be 2-dimensional; the first item should be the task
text, and the second the boolean completion state.
>>> task_list([["Be born", True], ["Be dead", False]])
'- [X] Be born\\n- [ ] Be dead'
When displayed using `print`, this will appear as:
- [X] Be born
- [ ] Be dead
"""
tasks = []
for (item, completed) in task_array:
task = '- [ ] ' + esc_format(item)
if completed:
task = task[:3] + 'X' + task[4:] # depends on [control=['if'], data=[]]
tasks.append(task) # depends on [control=['for'], data=[]]
return '\n'.join(tasks) |
def handler(self, value):
"""
Setter for **self.__handler** attribute.
:param value: Attribute value. ( SocketServer.BaseRequestHandler )
"""
if value is not None:
assert issubclass(value, SocketServer.BaseRequestHandler), \
"'{0}' attribute: '{1}' is not 'SocketServer.BaseRequestHandler' subclass!".format("handler", value)
self.__handler = value
self.__handler.container = self | def function[handler, parameter[self, value]]:
constant[
Setter for **self.__handler** attribute.
:param value: Attribute value. ( SocketServer.BaseRequestHandler )
]
if compare[name[value] is_not constant[None]] begin[:]
assert[call[name[issubclass], parameter[name[value], name[SocketServer].BaseRequestHandler]]]
name[self].__handler assign[=] name[value]
name[self].__handler.container assign[=] name[self] | keyword[def] identifier[handler] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[issubclass] ( identifier[value] , identifier[SocketServer] . identifier[BaseRequestHandler] ), literal[string] . identifier[format] ( literal[string] , identifier[value] )
identifier[self] . identifier[__handler] = identifier[value]
identifier[self] . identifier[__handler] . identifier[container] = identifier[self] | def handler(self, value):
"""
Setter for **self.__handler** attribute.
:param value: Attribute value. ( SocketServer.BaseRequestHandler )
"""
if value is not None:
assert issubclass(value, SocketServer.BaseRequestHandler), "'{0}' attribute: '{1}' is not 'SocketServer.BaseRequestHandler' subclass!".format('handler', value) # depends on [control=['if'], data=['value']]
self.__handler = value
self.__handler.container = self |
def search_upwards(self, fpath=None, repodirname='.svn', upwards={}):
"""
Traverse filesystem upwards, searching for .svn directories
with matching UUIDs (Recursive)
Args:
fpath (str): file path to search upwards from
repodirname (str): directory name to search for (``.svn``)
upwards (dict): dict of already-searched directories
example::
repo/.svn
repo/dir1/.svn
repo/dir1/dir2/.svn
>> search_upwards('repo/')
<< 'repo/'
>> search_upwards('repo/dir1')
<< 'repo/'
>> search_upwards('repo/dir1/dir2')
<< 'repo/'
repo/.svn
repo/dirA/
repo/dirA/dirB/.svn
>> search_upwards('repo/dirA')
<< 'repo/'
>> search_upwards('repo/dirA/dirB')
>> 'repo/dirB')
"""
fpath = fpath or self.fpath
uuid = self.unique_id
last_path = self
path_comp = fpath.split(os.path.sep)
# [0:-1], [0:-2], [0:-1*len(path_comp)]
for n in xrange(1, len(path_comp)-1):
checkpath = os.path.join(*path_comp[0:-1 * n])
repodir = os.path.join(checkpath, repodirname)
upw_uuid = upwards.get(repodir)
if upw_uuid:
if upw_uuid == uuid:
last_path = SvnRepository(checkpath)
continue
else:
break
elif os.path.exists(repodir):
repo = SvnRepository(checkpath)
upw_uuid = repo.unique_id
upwards[repodir] = upw_uuid
# TODO: match on REVISION too
if upw_uuid == uuid:
last_path = repo
continue
else:
break
return last_path | def function[search_upwards, parameter[self, fpath, repodirname, upwards]]:
constant[
Traverse filesystem upwards, searching for .svn directories
with matching UUIDs (Recursive)
Args:
fpath (str): file path to search upwards from
repodirname (str): directory name to search for (``.svn``)
upwards (dict): dict of already-searched directories
example::
repo/.svn
repo/dir1/.svn
repo/dir1/dir2/.svn
>> search_upwards('repo/')
<< 'repo/'
>> search_upwards('repo/dir1')
<< 'repo/'
>> search_upwards('repo/dir1/dir2')
<< 'repo/'
repo/.svn
repo/dirA/
repo/dirA/dirB/.svn
>> search_upwards('repo/dirA')
<< 'repo/'
>> search_upwards('repo/dirA/dirB')
>> 'repo/dirB')
]
variable[fpath] assign[=] <ast.BoolOp object at 0x7da20e955840>
variable[uuid] assign[=] name[self].unique_id
variable[last_path] assign[=] name[self]
variable[path_comp] assign[=] call[name[fpath].split, parameter[name[os].path.sep]]
for taget[name[n]] in starred[call[name[xrange], parameter[constant[1], binary_operation[call[name[len], parameter[name[path_comp]]] - constant[1]]]]] begin[:]
variable[checkpath] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da20e9575b0>]]
variable[repodir] assign[=] call[name[os].path.join, parameter[name[checkpath], name[repodirname]]]
variable[upw_uuid] assign[=] call[name[upwards].get, parameter[name[repodir]]]
if name[upw_uuid] begin[:]
if compare[name[upw_uuid] equal[==] name[uuid]] begin[:]
variable[last_path] assign[=] call[name[SvnRepository], parameter[name[checkpath]]]
continue
return[name[last_path]] | keyword[def] identifier[search_upwards] ( identifier[self] , identifier[fpath] = keyword[None] , identifier[repodirname] = literal[string] , identifier[upwards] ={}):
literal[string]
identifier[fpath] = identifier[fpath] keyword[or] identifier[self] . identifier[fpath]
identifier[uuid] = identifier[self] . identifier[unique_id]
identifier[last_path] = identifier[self]
identifier[path_comp] = identifier[fpath] . identifier[split] ( identifier[os] . identifier[path] . identifier[sep] )
keyword[for] identifier[n] keyword[in] identifier[xrange] ( literal[int] , identifier[len] ( identifier[path_comp] )- literal[int] ):
identifier[checkpath] = identifier[os] . identifier[path] . identifier[join] (* identifier[path_comp] [ literal[int] :- literal[int] * identifier[n] ])
identifier[repodir] = identifier[os] . identifier[path] . identifier[join] ( identifier[checkpath] , identifier[repodirname] )
identifier[upw_uuid] = identifier[upwards] . identifier[get] ( identifier[repodir] )
keyword[if] identifier[upw_uuid] :
keyword[if] identifier[upw_uuid] == identifier[uuid] :
identifier[last_path] = identifier[SvnRepository] ( identifier[checkpath] )
keyword[continue]
keyword[else] :
keyword[break]
keyword[elif] identifier[os] . identifier[path] . identifier[exists] ( identifier[repodir] ):
identifier[repo] = identifier[SvnRepository] ( identifier[checkpath] )
identifier[upw_uuid] = identifier[repo] . identifier[unique_id]
identifier[upwards] [ identifier[repodir] ]= identifier[upw_uuid]
keyword[if] identifier[upw_uuid] == identifier[uuid] :
identifier[last_path] = identifier[repo]
keyword[continue]
keyword[else] :
keyword[break]
keyword[return] identifier[last_path] | def search_upwards(self, fpath=None, repodirname='.svn', upwards={}):
"""
Traverse filesystem upwards, searching for .svn directories
with matching UUIDs (Recursive)
Args:
fpath (str): file path to search upwards from
repodirname (str): directory name to search for (``.svn``)
upwards (dict): dict of already-searched directories
example::
repo/.svn
repo/dir1/.svn
repo/dir1/dir2/.svn
>> search_upwards('repo/')
<< 'repo/'
>> search_upwards('repo/dir1')
<< 'repo/'
>> search_upwards('repo/dir1/dir2')
<< 'repo/'
repo/.svn
repo/dirA/
repo/dirA/dirB/.svn
>> search_upwards('repo/dirA')
<< 'repo/'
>> search_upwards('repo/dirA/dirB')
>> 'repo/dirB')
"""
fpath = fpath or self.fpath
uuid = self.unique_id
last_path = self
path_comp = fpath.split(os.path.sep)
# [0:-1], [0:-2], [0:-1*len(path_comp)]
for n in xrange(1, len(path_comp) - 1):
checkpath = os.path.join(*path_comp[0:-1 * n])
repodir = os.path.join(checkpath, repodirname)
upw_uuid = upwards.get(repodir)
if upw_uuid:
if upw_uuid == uuid:
last_path = SvnRepository(checkpath)
continue # depends on [control=['if'], data=[]]
else:
break # depends on [control=['if'], data=[]]
elif os.path.exists(repodir):
repo = SvnRepository(checkpath)
upw_uuid = repo.unique_id
upwards[repodir] = upw_uuid
# TODO: match on REVISION too
if upw_uuid == uuid:
last_path = repo
continue # depends on [control=['if'], data=[]]
else:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
return last_path |
def read_lines_from_file(cls_name, filename):
"""Read lines from file, parsing out header and metadata."""
with tf.io.gfile.GFile(filename, "rb") as f:
lines = [tf.compat.as_text(line)[:-1] for line in f]
header_line = "%s%s" % (_HEADER_PREFIX, cls_name)
if lines[0] != header_line:
raise ValueError("File {fname} does not seem to have been created from "
"{name}.save_to_file.".format(
fname=filename, name=cls_name))
metadata_dict = json.loads(lines[1][len(_METADATA_PREFIX):])
return lines[2:], metadata_dict | def function[read_lines_from_file, parameter[cls_name, filename]]:
constant[Read lines from file, parsing out header and metadata.]
with call[name[tf].io.gfile.GFile, parameter[name[filename], constant[rb]]] begin[:]
variable[lines] assign[=] <ast.ListComp object at 0x7da18bc72740>
variable[header_line] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc73850>, <ast.Name object at 0x7da18bc702b0>]]]
if compare[call[name[lines]][constant[0]] not_equal[!=] name[header_line]] begin[:]
<ast.Raise object at 0x7da18bc716f0>
variable[metadata_dict] assign[=] call[name[json].loads, parameter[call[call[name[lines]][constant[1]]][<ast.Slice object at 0x7da18bc70d60>]]]
return[tuple[[<ast.Subscript object at 0x7da18bc73af0>, <ast.Name object at 0x7da18bc728c0>]]] | keyword[def] identifier[read_lines_from_file] ( identifier[cls_name] , identifier[filename] ):
literal[string]
keyword[with] identifier[tf] . identifier[io] . identifier[gfile] . identifier[GFile] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[lines] =[ identifier[tf] . identifier[compat] . identifier[as_text] ( identifier[line] )[:- literal[int] ] keyword[for] identifier[line] keyword[in] identifier[f] ]
identifier[header_line] = literal[string] %( identifier[_HEADER_PREFIX] , identifier[cls_name] )
keyword[if] identifier[lines] [ literal[int] ]!= identifier[header_line] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] (
identifier[fname] = identifier[filename] , identifier[name] = identifier[cls_name] ))
identifier[metadata_dict] = identifier[json] . identifier[loads] ( identifier[lines] [ literal[int] ][ identifier[len] ( identifier[_METADATA_PREFIX] ):])
keyword[return] identifier[lines] [ literal[int] :], identifier[metadata_dict] | def read_lines_from_file(cls_name, filename):
"""Read lines from file, parsing out header and metadata."""
with tf.io.gfile.GFile(filename, 'rb') as f:
lines = [tf.compat.as_text(line)[:-1] for line in f] # depends on [control=['with'], data=['f']]
header_line = '%s%s' % (_HEADER_PREFIX, cls_name)
if lines[0] != header_line:
raise ValueError('File {fname} does not seem to have been created from {name}.save_to_file.'.format(fname=filename, name=cls_name)) # depends on [control=['if'], data=[]]
metadata_dict = json.loads(lines[1][len(_METADATA_PREFIX):])
return (lines[2:], metadata_dict) |
def rest_of_string(self, offset=0):
"""A copy of the current position till the end of the source string."""
if self.has_space(offset=offset):
return self.string[self.pos + offset:]
else:
return '' | def function[rest_of_string, parameter[self, offset]]:
constant[A copy of the current position till the end of the source string.]
if call[name[self].has_space, parameter[]] begin[:]
return[call[name[self].string][<ast.Slice object at 0x7da1b28aec80>]] | keyword[def] identifier[rest_of_string] ( identifier[self] , identifier[offset] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[has_space] ( identifier[offset] = identifier[offset] ):
keyword[return] identifier[self] . identifier[string] [ identifier[self] . identifier[pos] + identifier[offset] :]
keyword[else] :
keyword[return] literal[string] | def rest_of_string(self, offset=0):
"""A copy of the current position till the end of the source string."""
if self.has_space(offset=offset):
return self.string[self.pos + offset:] # depends on [control=['if'], data=[]]
else:
return '' |
def data(self, root):
'''Convert etree.Element into a dictionary'''
value = self.dict()
children = [node for node in root if isinstance(node.tag, basestring)]
for attr, attrval in root.attrib.items():
attr = attr if self.attr_prefix is None else self.attr_prefix + attr
value[attr] = self._fromstring(attrval)
if root.text and self.text_content is not None:
text = root.text.strip()
if text:
if self.simple_text and len(children) == len(root.attrib) == 0:
value = self._fromstring(text)
else:
value[self.text_content] = self._fromstring(text)
count = Counter(child.tag for child in children)
for child in children:
if count[child.tag] == 1:
value.update(self.data(child))
else:
result = value.setdefault(child.tag, self.list())
result += self.data(child).values()
# if simple_text, elements with no children nor attrs become '', not {}
if isinstance(value, dict) and not value and self.simple_text:
value = ''
return self.dict([(root.tag, value)]) | def function[data, parameter[self, root]]:
constant[Convert etree.Element into a dictionary]
variable[value] assign[=] call[name[self].dict, parameter[]]
variable[children] assign[=] <ast.ListComp object at 0x7da1b11a7490>
for taget[tuple[[<ast.Name object at 0x7da1b11a6f80>, <ast.Name object at 0x7da1b11a4550>]]] in starred[call[name[root].attrib.items, parameter[]]] begin[:]
variable[attr] assign[=] <ast.IfExp object at 0x7da1b11a7310>
call[name[value]][name[attr]] assign[=] call[name[self]._fromstring, parameter[name[attrval]]]
if <ast.BoolOp object at 0x7da1b1193400> begin[:]
variable[text] assign[=] call[name[root].text.strip, parameter[]]
if name[text] begin[:]
if <ast.BoolOp object at 0x7da1b1190640> begin[:]
variable[value] assign[=] call[name[self]._fromstring, parameter[name[text]]]
variable[count] assign[=] call[name[Counter], parameter[<ast.GeneratorExp object at 0x7da1b1190310>]]
for taget[name[child]] in starred[name[children]] begin[:]
if compare[call[name[count]][name[child].tag] equal[==] constant[1]] begin[:]
call[name[value].update, parameter[call[name[self].data, parameter[name[child]]]]]
if <ast.BoolOp object at 0x7da1b1190880> begin[:]
variable[value] assign[=] constant[]
return[call[name[self].dict, parameter[list[[<ast.Tuple object at 0x7da1b11920e0>]]]]] | keyword[def] identifier[data] ( identifier[self] , identifier[root] ):
literal[string]
identifier[value] = identifier[self] . identifier[dict] ()
identifier[children] =[ identifier[node] keyword[for] identifier[node] keyword[in] identifier[root] keyword[if] identifier[isinstance] ( identifier[node] . identifier[tag] , identifier[basestring] )]
keyword[for] identifier[attr] , identifier[attrval] keyword[in] identifier[root] . identifier[attrib] . identifier[items] ():
identifier[attr] = identifier[attr] keyword[if] identifier[self] . identifier[attr_prefix] keyword[is] keyword[None] keyword[else] identifier[self] . identifier[attr_prefix] + identifier[attr]
identifier[value] [ identifier[attr] ]= identifier[self] . identifier[_fromstring] ( identifier[attrval] )
keyword[if] identifier[root] . identifier[text] keyword[and] identifier[self] . identifier[text_content] keyword[is] keyword[not] keyword[None] :
identifier[text] = identifier[root] . identifier[text] . identifier[strip] ()
keyword[if] identifier[text] :
keyword[if] identifier[self] . identifier[simple_text] keyword[and] identifier[len] ( identifier[children] )== identifier[len] ( identifier[root] . identifier[attrib] )== literal[int] :
identifier[value] = identifier[self] . identifier[_fromstring] ( identifier[text] )
keyword[else] :
identifier[value] [ identifier[self] . identifier[text_content] ]= identifier[self] . identifier[_fromstring] ( identifier[text] )
identifier[count] = identifier[Counter] ( identifier[child] . identifier[tag] keyword[for] identifier[child] keyword[in] identifier[children] )
keyword[for] identifier[child] keyword[in] identifier[children] :
keyword[if] identifier[count] [ identifier[child] . identifier[tag] ]== literal[int] :
identifier[value] . identifier[update] ( identifier[self] . identifier[data] ( identifier[child] ))
keyword[else] :
identifier[result] = identifier[value] . identifier[setdefault] ( identifier[child] . identifier[tag] , identifier[self] . identifier[list] ())
identifier[result] += identifier[self] . identifier[data] ( identifier[child] ). identifier[values] ()
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ) keyword[and] keyword[not] identifier[value] keyword[and] identifier[self] . identifier[simple_text] :
identifier[value] = literal[string]
keyword[return] identifier[self] . identifier[dict] ([( identifier[root] . identifier[tag] , identifier[value] )]) | def data(self, root):
"""Convert etree.Element into a dictionary"""
value = self.dict()
children = [node for node in root if isinstance(node.tag, basestring)]
for (attr, attrval) in root.attrib.items():
attr = attr if self.attr_prefix is None else self.attr_prefix + attr
value[attr] = self._fromstring(attrval) # depends on [control=['for'], data=[]]
if root.text and self.text_content is not None:
text = root.text.strip()
if text:
if self.simple_text and len(children) == len(root.attrib) == 0:
value = self._fromstring(text) # depends on [control=['if'], data=[]]
else:
value[self.text_content] = self._fromstring(text) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
count = Counter((child.tag for child in children))
for child in children:
if count[child.tag] == 1:
value.update(self.data(child)) # depends on [control=['if'], data=[]]
else:
result = value.setdefault(child.tag, self.list())
result += self.data(child).values() # depends on [control=['for'], data=['child']]
# if simple_text, elements with no children nor attrs become '', not {}
if isinstance(value, dict) and (not value) and self.simple_text:
value = '' # depends on [control=['if'], data=[]]
return self.dict([(root.tag, value)]) |
def ParseConfig(self, command, function=None, unique=1):
"""
Use the specified function to parse the output of the command
in order to modify the current environment. The 'command' can
be a string or a list of strings representing a command and
its arguments. 'Function' is an optional argument that takes
the environment, the output of the command, and the unique flag.
If no function is specified, MergeFlags, which treats the output
as the result of a typical 'X-config' command (i.e. gtk-config),
will merge the output into the appropriate variables.
"""
if function is None:
def parse_conf(env, cmd, unique=unique):
return env.MergeFlags(cmd, unique)
function = parse_conf
if SCons.Util.is_List(command):
command = ' '.join(command)
command = self.subst(command)
return function(self, self.backtick(command)) | def function[ParseConfig, parameter[self, command, function, unique]]:
constant[
Use the specified function to parse the output of the command
in order to modify the current environment. The 'command' can
be a string or a list of strings representing a command and
its arguments. 'Function' is an optional argument that takes
the environment, the output of the command, and the unique flag.
If no function is specified, MergeFlags, which treats the output
as the result of a typical 'X-config' command (i.e. gtk-config),
will merge the output into the appropriate variables.
]
if compare[name[function] is constant[None]] begin[:]
def function[parse_conf, parameter[env, cmd, unique]]:
return[call[name[env].MergeFlags, parameter[name[cmd], name[unique]]]]
variable[function] assign[=] name[parse_conf]
if call[name[SCons].Util.is_List, parameter[name[command]]] begin[:]
variable[command] assign[=] call[constant[ ].join, parameter[name[command]]]
variable[command] assign[=] call[name[self].subst, parameter[name[command]]]
return[call[name[function], parameter[name[self], call[name[self].backtick, parameter[name[command]]]]]] | keyword[def] identifier[ParseConfig] ( identifier[self] , identifier[command] , identifier[function] = keyword[None] , identifier[unique] = literal[int] ):
literal[string]
keyword[if] identifier[function] keyword[is] keyword[None] :
keyword[def] identifier[parse_conf] ( identifier[env] , identifier[cmd] , identifier[unique] = identifier[unique] ):
keyword[return] identifier[env] . identifier[MergeFlags] ( identifier[cmd] , identifier[unique] )
identifier[function] = identifier[parse_conf]
keyword[if] identifier[SCons] . identifier[Util] . identifier[is_List] ( identifier[command] ):
identifier[command] = literal[string] . identifier[join] ( identifier[command] )
identifier[command] = identifier[self] . identifier[subst] ( identifier[command] )
keyword[return] identifier[function] ( identifier[self] , identifier[self] . identifier[backtick] ( identifier[command] )) | def ParseConfig(self, command, function=None, unique=1):
"""
Use the specified function to parse the output of the command
in order to modify the current environment. The 'command' can
be a string or a list of strings representing a command and
its arguments. 'Function' is an optional argument that takes
the environment, the output of the command, and the unique flag.
If no function is specified, MergeFlags, which treats the output
as the result of a typical 'X-config' command (i.e. gtk-config),
will merge the output into the appropriate variables.
"""
if function is None:
def parse_conf(env, cmd, unique=unique):
return env.MergeFlags(cmd, unique)
function = parse_conf # depends on [control=['if'], data=['function']]
if SCons.Util.is_List(command):
command = ' '.join(command) # depends on [control=['if'], data=[]]
command = self.subst(command)
return function(self, self.backtick(command)) |
def n1ql_index_drop(self, ix, primary=False, **kwargs):
"""
Delete an index from the cluster.
:param str ix: the name of the index
:param bool primary: if this index is a primary index
:param bool ignore_missing: Do not raise an exception if the index
does not exist
:raise: :exc:`~.NotFoundError` if the index does not exist and
`ignore_missing` was not specified
"""
info = self._mk_index_def(ix, primary)
return IxmgmtRequest(self._cb, 'drop', info, **kwargs).execute() | def function[n1ql_index_drop, parameter[self, ix, primary]]:
constant[
Delete an index from the cluster.
:param str ix: the name of the index
:param bool primary: if this index is a primary index
:param bool ignore_missing: Do not raise an exception if the index
does not exist
:raise: :exc:`~.NotFoundError` if the index does not exist and
`ignore_missing` was not specified
]
variable[info] assign[=] call[name[self]._mk_index_def, parameter[name[ix], name[primary]]]
return[call[call[name[IxmgmtRequest], parameter[name[self]._cb, constant[drop], name[info]]].execute, parameter[]]] | keyword[def] identifier[n1ql_index_drop] ( identifier[self] , identifier[ix] , identifier[primary] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[info] = identifier[self] . identifier[_mk_index_def] ( identifier[ix] , identifier[primary] )
keyword[return] identifier[IxmgmtRequest] ( identifier[self] . identifier[_cb] , literal[string] , identifier[info] ,** identifier[kwargs] ). identifier[execute] () | def n1ql_index_drop(self, ix, primary=False, **kwargs):
"""
Delete an index from the cluster.
:param str ix: the name of the index
:param bool primary: if this index is a primary index
:param bool ignore_missing: Do not raise an exception if the index
does not exist
:raise: :exc:`~.NotFoundError` if the index does not exist and
`ignore_missing` was not specified
"""
info = self._mk_index_def(ix, primary)
return IxmgmtRequest(self._cb, 'drop', info, **kwargs).execute() |
def agent(server="http://localhost:8000"):
"""Run in agent mode.
This gathers data, and sends it to a server given by the server argument.
"""
import xmlrpc.client
sp = xmlrpc.client.ServerProxy(server)
hw = getallhwinfo()
fields = header_fields()
for f in fields:
if not f in hw:
hw[f] = ''
try:
sp.puthwinfo(xmlrpc.client.dumps((hw,)))
except xmlrpc.client.Error as v:
print("ERROR occured: ", v) | def function[agent, parameter[server]]:
constant[Run in agent mode.
This gathers data, and sends it to a server given by the server argument.
]
import module[xmlrpc.client]
variable[sp] assign[=] call[name[xmlrpc].client.ServerProxy, parameter[name[server]]]
variable[hw] assign[=] call[name[getallhwinfo], parameter[]]
variable[fields] assign[=] call[name[header_fields], parameter[]]
for taget[name[f]] in starred[name[fields]] begin[:]
if <ast.UnaryOp object at 0x7da20c6abe80> begin[:]
call[name[hw]][name[f]] assign[=] constant[]
<ast.Try object at 0x7da20c6aa1a0> | keyword[def] identifier[agent] ( identifier[server] = literal[string] ):
literal[string]
keyword[import] identifier[xmlrpc] . identifier[client]
identifier[sp] = identifier[xmlrpc] . identifier[client] . identifier[ServerProxy] ( identifier[server] )
identifier[hw] = identifier[getallhwinfo] ()
identifier[fields] = identifier[header_fields] ()
keyword[for] identifier[f] keyword[in] identifier[fields] :
keyword[if] keyword[not] identifier[f] keyword[in] identifier[hw] :
identifier[hw] [ identifier[f] ]= literal[string]
keyword[try] :
identifier[sp] . identifier[puthwinfo] ( identifier[xmlrpc] . identifier[client] . identifier[dumps] (( identifier[hw] ,)))
keyword[except] identifier[xmlrpc] . identifier[client] . identifier[Error] keyword[as] identifier[v] :
identifier[print] ( literal[string] , identifier[v] ) | def agent(server='http://localhost:8000'):
"""Run in agent mode.
This gathers data, and sends it to a server given by the server argument.
"""
import xmlrpc.client
sp = xmlrpc.client.ServerProxy(server)
hw = getallhwinfo()
fields = header_fields()
for f in fields:
if not f in hw:
hw[f] = '' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
try:
sp.puthwinfo(xmlrpc.client.dumps((hw,))) # depends on [control=['try'], data=[]]
except xmlrpc.client.Error as v:
print('ERROR occured: ', v) # depends on [control=['except'], data=['v']] |
async def async_init(self):
"""
Handle here the asynchronous part of the init.
"""
self.pool = await aioredis.create_pool(
(self.host, self.port),
db=self.db_id,
minsize=self.min_pool_size,
maxsize=self.max_pool_size,
loop=asyncio.get_event_loop(),
) | <ast.AsyncFunctionDef object at 0x7da18ede49d0> | keyword[async] keyword[def] identifier[async_init] ( identifier[self] ):
literal[string]
identifier[self] . identifier[pool] = keyword[await] identifier[aioredis] . identifier[create_pool] (
( identifier[self] . identifier[host] , identifier[self] . identifier[port] ),
identifier[db] = identifier[self] . identifier[db_id] ,
identifier[minsize] = identifier[self] . identifier[min_pool_size] ,
identifier[maxsize] = identifier[self] . identifier[max_pool_size] ,
identifier[loop] = identifier[asyncio] . identifier[get_event_loop] (),
) | async def async_init(self):
"""
Handle here the asynchronous part of the init.
"""
self.pool = await aioredis.create_pool((self.host, self.port), db=self.db_id, minsize=self.min_pool_size, maxsize=self.max_pool_size, loop=asyncio.get_event_loop()) |
def cas2mach(cas, h):
""" CAS Mach conversion """
tas = cas2tas(cas, h)
M = tas2mach(tas, h)
return M | def function[cas2mach, parameter[cas, h]]:
constant[ CAS Mach conversion ]
variable[tas] assign[=] call[name[cas2tas], parameter[name[cas], name[h]]]
variable[M] assign[=] call[name[tas2mach], parameter[name[tas], name[h]]]
return[name[M]] | keyword[def] identifier[cas2mach] ( identifier[cas] , identifier[h] ):
literal[string]
identifier[tas] = identifier[cas2tas] ( identifier[cas] , identifier[h] )
identifier[M] = identifier[tas2mach] ( identifier[tas] , identifier[h] )
keyword[return] identifier[M] | def cas2mach(cas, h):
""" CAS Mach conversion """
tas = cas2tas(cas, h)
M = tas2mach(tas, h)
return M |
def ReadCronJobRuns(self, job_id):
"""Reads all cron job runs for a given job id."""
runs = [
run for run in itervalues(self.cronjob_runs)
if run.cron_job_id == job_id
]
return sorted(runs, key=lambda run: run.started_at, reverse=True) | def function[ReadCronJobRuns, parameter[self, job_id]]:
constant[Reads all cron job runs for a given job id.]
variable[runs] assign[=] <ast.ListComp object at 0x7da1b1d91ea0>
return[call[name[sorted], parameter[name[runs]]]] | keyword[def] identifier[ReadCronJobRuns] ( identifier[self] , identifier[job_id] ):
literal[string]
identifier[runs] =[
identifier[run] keyword[for] identifier[run] keyword[in] identifier[itervalues] ( identifier[self] . identifier[cronjob_runs] )
keyword[if] identifier[run] . identifier[cron_job_id] == identifier[job_id]
]
keyword[return] identifier[sorted] ( identifier[runs] , identifier[key] = keyword[lambda] identifier[run] : identifier[run] . identifier[started_at] , identifier[reverse] = keyword[True] ) | def ReadCronJobRuns(self, job_id):
"""Reads all cron job runs for a given job id."""
runs = [run for run in itervalues(self.cronjob_runs) if run.cron_job_id == job_id]
return sorted(runs, key=lambda run: run.started_at, reverse=True) |
def select_authors_by_epithet(query):
"""Pass exact name (case insensitive) of epithet name, return ordered set
of author ids.
"""
for epithet, ids in AUTHOR_EPITHET.items():
if epithet.casefold() == query.casefold():
return set(ids) | def function[select_authors_by_epithet, parameter[query]]:
constant[Pass exact name (case insensitive) of epithet name, return ordered set
of author ids.
]
for taget[tuple[[<ast.Name object at 0x7da2045657b0>, <ast.Name object at 0x7da2045652d0>]]] in starred[call[name[AUTHOR_EPITHET].items, parameter[]]] begin[:]
if compare[call[name[epithet].casefold, parameter[]] equal[==] call[name[query].casefold, parameter[]]] begin[:]
return[call[name[set], parameter[name[ids]]]] | keyword[def] identifier[select_authors_by_epithet] ( identifier[query] ):
literal[string]
keyword[for] identifier[epithet] , identifier[ids] keyword[in] identifier[AUTHOR_EPITHET] . identifier[items] ():
keyword[if] identifier[epithet] . identifier[casefold] ()== identifier[query] . identifier[casefold] ():
keyword[return] identifier[set] ( identifier[ids] ) | def select_authors_by_epithet(query):
"""Pass exact name (case insensitive) of epithet name, return ordered set
of author ids.
"""
for (epithet, ids) in AUTHOR_EPITHET.items():
if epithet.casefold() == query.casefold():
return set(ids) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def GetSoapXMLForComplexType(self, type_name, value):
"""Return an XML string representing a SOAP complex type.
Args:
type_name: The name of the type with namespace prefix if necessary.
value: A python dictionary to hydrate the type instance with.
Returns:
A string containing the SOAP XML for the type.
"""
schema = self.suds_client.wsdl.schema
definition_type = schema.elements[(type_name, self._namespace_override)]
marshaller = suds.mx.literal.Literal(schema)
content = suds.mx.Content(
tag=type_name, value=value,
name=type_name, type=definition_type)
data = marshaller.process(content)
return data | def function[GetSoapXMLForComplexType, parameter[self, type_name, value]]:
constant[Return an XML string representing a SOAP complex type.
Args:
type_name: The name of the type with namespace prefix if necessary.
value: A python dictionary to hydrate the type instance with.
Returns:
A string containing the SOAP XML for the type.
]
variable[schema] assign[=] name[self].suds_client.wsdl.schema
variable[definition_type] assign[=] call[name[schema].elements][tuple[[<ast.Name object at 0x7da1b1c0e110>, <ast.Attribute object at 0x7da1b1c0d5a0>]]]
variable[marshaller] assign[=] call[name[suds].mx.literal.Literal, parameter[name[schema]]]
variable[content] assign[=] call[name[suds].mx.Content, parameter[]]
variable[data] assign[=] call[name[marshaller].process, parameter[name[content]]]
return[name[data]] | keyword[def] identifier[GetSoapXMLForComplexType] ( identifier[self] , identifier[type_name] , identifier[value] ):
literal[string]
identifier[schema] = identifier[self] . identifier[suds_client] . identifier[wsdl] . identifier[schema]
identifier[definition_type] = identifier[schema] . identifier[elements] [( identifier[type_name] , identifier[self] . identifier[_namespace_override] )]
identifier[marshaller] = identifier[suds] . identifier[mx] . identifier[literal] . identifier[Literal] ( identifier[schema] )
identifier[content] = identifier[suds] . identifier[mx] . identifier[Content] (
identifier[tag] = identifier[type_name] , identifier[value] = identifier[value] ,
identifier[name] = identifier[type_name] , identifier[type] = identifier[definition_type] )
identifier[data] = identifier[marshaller] . identifier[process] ( identifier[content] )
keyword[return] identifier[data] | def GetSoapXMLForComplexType(self, type_name, value):
"""Return an XML string representing a SOAP complex type.
Args:
type_name: The name of the type with namespace prefix if necessary.
value: A python dictionary to hydrate the type instance with.
Returns:
A string containing the SOAP XML for the type.
"""
schema = self.suds_client.wsdl.schema
definition_type = schema.elements[type_name, self._namespace_override]
marshaller = suds.mx.literal.Literal(schema)
content = suds.mx.Content(tag=type_name, value=value, name=type_name, type=definition_type)
data = marshaller.process(content)
return data |
def compress(pdf_in, pdf_output):
"""
mac下安装 brew install ghostscript
:param pdf_in:
:param pdf_output:
:return:
"""
cmd = 'gs -dNOPAUSE -dBATCH -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/screen -sOutputFile=%s %s'
cmd = cmd % (pdf_output, pdf_in)
os.system(cmd) | def function[compress, parameter[pdf_in, pdf_output]]:
constant[
mac下安装 brew install ghostscript
:param pdf_in:
:param pdf_output:
:return:
]
variable[cmd] assign[=] constant[gs -dNOPAUSE -dBATCH -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/screen -sOutputFile=%s %s]
variable[cmd] assign[=] binary_operation[name[cmd] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c796890>, <ast.Name object at 0x7da18ede6d10>]]]
call[name[os].system, parameter[name[cmd]]] | keyword[def] identifier[compress] ( identifier[pdf_in] , identifier[pdf_output] ):
literal[string]
identifier[cmd] = literal[string]
identifier[cmd] = identifier[cmd] %( identifier[pdf_output] , identifier[pdf_in] )
identifier[os] . identifier[system] ( identifier[cmd] ) | def compress(pdf_in, pdf_output):
"""
mac下安装 brew install ghostscript
:param pdf_in:
:param pdf_output:
:return:
"""
cmd = 'gs -dNOPAUSE -dBATCH -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/screen -sOutputFile=%s %s'
cmd = cmd % (pdf_output, pdf_in)
os.system(cmd) |
def formatException(type, value, tb):
"""
Format traceback, darkening entries from global site-packages directories
and user-specific site-packages directory.
https://stackoverflow.com/a/46071447/5156190
"""
# Absolute paths to site-packages
packages = tuple(join(abspath(p), "") for p in sys.path[1:])
# Highlight lines not referring to files in site-packages
lines = []
for line in format_exception(type, value, tb):
matches = re.search(r"^ File \"([^\"]+)\", line \d+, in .+", line)
if matches and matches.group(1).startswith(packages):
lines += line
else:
matches = re.search(r"^(\s*)(.*?)(\s*)$", line, re.DOTALL)
lines.append(matches.group(1) + colored(matches.group(2), "yellow") + matches.group(3))
return "".join(lines).rstrip() | def function[formatException, parameter[type, value, tb]]:
constant[
Format traceback, darkening entries from global site-packages directories
and user-specific site-packages directory.
https://stackoverflow.com/a/46071447/5156190
]
variable[packages] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18f09c550>]]
variable[lines] assign[=] list[[]]
for taget[name[line]] in starred[call[name[format_exception], parameter[name[type], name[value], name[tb]]]] begin[:]
variable[matches] assign[=] call[name[re].search, parameter[constant[^ File \"([^\"]+)\", line \d+, in .+], name[line]]]
if <ast.BoolOp object at 0x7da18f09f310> begin[:]
<ast.AugAssign object at 0x7da1b13597e0>
return[call[call[constant[].join, parameter[name[lines]]].rstrip, parameter[]]] | keyword[def] identifier[formatException] ( identifier[type] , identifier[value] , identifier[tb] ):
literal[string]
identifier[packages] = identifier[tuple] ( identifier[join] ( identifier[abspath] ( identifier[p] ), literal[string] ) keyword[for] identifier[p] keyword[in] identifier[sys] . identifier[path] [ literal[int] :])
identifier[lines] =[]
keyword[for] identifier[line] keyword[in] identifier[format_exception] ( identifier[type] , identifier[value] , identifier[tb] ):
identifier[matches] = identifier[re] . identifier[search] ( literal[string] , identifier[line] )
keyword[if] identifier[matches] keyword[and] identifier[matches] . identifier[group] ( literal[int] ). identifier[startswith] ( identifier[packages] ):
identifier[lines] += identifier[line]
keyword[else] :
identifier[matches] = identifier[re] . identifier[search] ( literal[string] , identifier[line] , identifier[re] . identifier[DOTALL] )
identifier[lines] . identifier[append] ( identifier[matches] . identifier[group] ( literal[int] )+ identifier[colored] ( identifier[matches] . identifier[group] ( literal[int] ), literal[string] )+ identifier[matches] . identifier[group] ( literal[int] ))
keyword[return] literal[string] . identifier[join] ( identifier[lines] ). identifier[rstrip] () | def formatException(type, value, tb):
"""
Format traceback, darkening entries from global site-packages directories
and user-specific site-packages directory.
https://stackoverflow.com/a/46071447/5156190
"""
# Absolute paths to site-packages
packages = tuple((join(abspath(p), '') for p in sys.path[1:]))
# Highlight lines not referring to files in site-packages
lines = []
for line in format_exception(type, value, tb):
matches = re.search('^ File \\"([^\\"]+)\\", line \\d+, in .+', line)
if matches and matches.group(1).startswith(packages):
lines += line # depends on [control=['if'], data=[]]
else:
matches = re.search('^(\\s*)(.*?)(\\s*)$', line, re.DOTALL)
lines.append(matches.group(1) + colored(matches.group(2), 'yellow') + matches.group(3)) # depends on [control=['for'], data=['line']]
return ''.join(lines).rstrip() |
def get_month_namedays(self, month=None):
"""Return names as a tuple based on given month.
If no month given, use current one"""
if month is None:
month = datetime.now().month
return self.NAMEDAYS[month-1] | def function[get_month_namedays, parameter[self, month]]:
constant[Return names as a tuple based on given month.
If no month given, use current one]
if compare[name[month] is constant[None]] begin[:]
variable[month] assign[=] call[name[datetime].now, parameter[]].month
return[call[name[self].NAMEDAYS][binary_operation[name[month] - constant[1]]]] | keyword[def] identifier[get_month_namedays] ( identifier[self] , identifier[month] = keyword[None] ):
literal[string]
keyword[if] identifier[month] keyword[is] keyword[None] :
identifier[month] = identifier[datetime] . identifier[now] (). identifier[month]
keyword[return] identifier[self] . identifier[NAMEDAYS] [ identifier[month] - literal[int] ] | def get_month_namedays(self, month=None):
"""Return names as a tuple based on given month.
If no month given, use current one"""
if month is None:
month = datetime.now().month # depends on [control=['if'], data=['month']]
return self.NAMEDAYS[month - 1] |
def _is_shortcut(self, event):
"""
Checks if the event's key and modifiers make the completion shortcut
(Ctrl+Space)
:param event: QKeyEvent
:return: bool
"""
modifier = (QtCore.Qt.MetaModifier if sys.platform == 'darwin' else
QtCore.Qt.ControlModifier)
valid_modifier = int(event.modifiers() & modifier) == modifier
valid_key = event.key() == self._trigger_key
return valid_key and valid_modifier | def function[_is_shortcut, parameter[self, event]]:
constant[
Checks if the event's key and modifiers make the completion shortcut
(Ctrl+Space)
:param event: QKeyEvent
:return: bool
]
variable[modifier] assign[=] <ast.IfExp object at 0x7da20c7c9b40>
variable[valid_modifier] assign[=] compare[call[name[int], parameter[binary_operation[call[name[event].modifiers, parameter[]] <ast.BitAnd object at 0x7da2590d6b60> name[modifier]]]] equal[==] name[modifier]]
variable[valid_key] assign[=] compare[call[name[event].key, parameter[]] equal[==] name[self]._trigger_key]
return[<ast.BoolOp object at 0x7da20c7cb700>] | keyword[def] identifier[_is_shortcut] ( identifier[self] , identifier[event] ):
literal[string]
identifier[modifier] =( identifier[QtCore] . identifier[Qt] . identifier[MetaModifier] keyword[if] identifier[sys] . identifier[platform] == literal[string] keyword[else]
identifier[QtCore] . identifier[Qt] . identifier[ControlModifier] )
identifier[valid_modifier] = identifier[int] ( identifier[event] . identifier[modifiers] ()& identifier[modifier] )== identifier[modifier]
identifier[valid_key] = identifier[event] . identifier[key] ()== identifier[self] . identifier[_trigger_key]
keyword[return] identifier[valid_key] keyword[and] identifier[valid_modifier] | def _is_shortcut(self, event):
"""
Checks if the event's key and modifiers make the completion shortcut
(Ctrl+Space)
:param event: QKeyEvent
:return: bool
"""
modifier = QtCore.Qt.MetaModifier if sys.platform == 'darwin' else QtCore.Qt.ControlModifier
valid_modifier = int(event.modifiers() & modifier) == modifier
valid_key = event.key() == self._trigger_key
return valid_key and valid_modifier |
def p_Body(p):
'''
Body : Line
| Body Line
'''
if not isinstance(p[1], Body):
p[0] = Body(None, p[1])
else:
p[0] = Body(p[1], p[2]) | def function[p_Body, parameter[p]]:
constant[
Body : Line
| Body Line
]
if <ast.UnaryOp object at 0x7da18bcc8fd0> begin[:]
call[name[p]][constant[0]] assign[=] call[name[Body], parameter[constant[None], call[name[p]][constant[1]]]] | keyword[def] identifier[p_Body] ( identifier[p] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[p] [ literal[int] ], identifier[Body] ):
identifier[p] [ literal[int] ]= identifier[Body] ( keyword[None] , identifier[p] [ literal[int] ])
keyword[else] :
identifier[p] [ literal[int] ]= identifier[Body] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]) | def p_Body(p):
"""
Body : Line
| Body Line
"""
if not isinstance(p[1], Body):
p[0] = Body(None, p[1]) # depends on [control=['if'], data=[]]
else:
p[0] = Body(p[1], p[2]) |
def read_file(self, filepath=None, filename=None):
"""
Tries to read JSON content from filename and convert it to a dict.
:param filepath: Path where the file is
:param filename: File name
:return: Dictionary read from the file
:raises EnvironmentError, ValueError
"""
name = filename if filename else self.filename
path = filepath if filepath else self.filepath
name = self._ends_with(name, ".json")
path = self._ends_with(path, os.path.sep)
try:
return self._read_json(path, name)
except EnvironmentError as error:
self.logger.error("Error while opening or reading the file: {}".format(error))
raise
except ValueError as error:
self.logger.error("File contents cannot be decoded to JSON: {}".format(error))
raise | def function[read_file, parameter[self, filepath, filename]]:
constant[
Tries to read JSON content from filename and convert it to a dict.
:param filepath: Path where the file is
:param filename: File name
:return: Dictionary read from the file
:raises EnvironmentError, ValueError
]
variable[name] assign[=] <ast.IfExp object at 0x7da1b0ed1e10>
variable[path] assign[=] <ast.IfExp object at 0x7da1b0ed17e0>
variable[name] assign[=] call[name[self]._ends_with, parameter[name[name], constant[.json]]]
variable[path] assign[=] call[name[self]._ends_with, parameter[name[path], name[os].path.sep]]
<ast.Try object at 0x7da1b0ed10f0> | keyword[def] identifier[read_file] ( identifier[self] , identifier[filepath] = keyword[None] , identifier[filename] = keyword[None] ):
literal[string]
identifier[name] = identifier[filename] keyword[if] identifier[filename] keyword[else] identifier[self] . identifier[filename]
identifier[path] = identifier[filepath] keyword[if] identifier[filepath] keyword[else] identifier[self] . identifier[filepath]
identifier[name] = identifier[self] . identifier[_ends_with] ( identifier[name] , literal[string] )
identifier[path] = identifier[self] . identifier[_ends_with] ( identifier[path] , identifier[os] . identifier[path] . identifier[sep] )
keyword[try] :
keyword[return] identifier[self] . identifier[_read_json] ( identifier[path] , identifier[name] )
keyword[except] identifier[EnvironmentError] keyword[as] identifier[error] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[error] ))
keyword[raise]
keyword[except] identifier[ValueError] keyword[as] identifier[error] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[error] ))
keyword[raise] | def read_file(self, filepath=None, filename=None):
"""
Tries to read JSON content from filename and convert it to a dict.
:param filepath: Path where the file is
:param filename: File name
:return: Dictionary read from the file
:raises EnvironmentError, ValueError
"""
name = filename if filename else self.filename
path = filepath if filepath else self.filepath
name = self._ends_with(name, '.json')
path = self._ends_with(path, os.path.sep)
try:
return self._read_json(path, name) # depends on [control=['try'], data=[]]
except EnvironmentError as error:
self.logger.error('Error while opening or reading the file: {}'.format(error))
raise # depends on [control=['except'], data=['error']]
except ValueError as error:
self.logger.error('File contents cannot be decoded to JSON: {}'.format(error))
raise # depends on [control=['except'], data=['error']] |
def _receive(self, root, directory, dirs, files, include, exclude):
"""Internal function processing each yield from os.walk."""
self._received += 1
if not self.symlinks:
where = root + os.path.sep + directory + os.path.sep
files = [
file_name for file_name in files
if not os.path.islink(where + file_name)
]
include = FileSetState("Include", directory, include, None
if include else self.include)
exclude = FileSetState("Exclude", directory, exclude, None
if exclude else self.exclude)
if exclude.matches_all_files_all_subdirs():
# Exclude everything and do no traverse any subdirectories
del dirs[0:]
matched = set()
else:
if include.no_possible_matches_in_subdirs():
# Do no traverse any subdirectories
del dirs[0:]
matched = include.match(set(files))
matched -= exclude.match(matched)
return matched, include, exclude | def function[_receive, parameter[self, root, directory, dirs, files, include, exclude]]:
constant[Internal function processing each yield from os.walk.]
<ast.AugAssign object at 0x7da1b0ad82e0>
if <ast.UnaryOp object at 0x7da1b0ad8610> begin[:]
variable[where] assign[=] binary_operation[binary_operation[binary_operation[name[root] + name[os].path.sep] + name[directory]] + name[os].path.sep]
variable[files] assign[=] <ast.ListComp object at 0x7da1b0ad8f40>
variable[include] assign[=] call[name[FileSetState], parameter[constant[Include], name[directory], name[include], <ast.IfExp object at 0x7da1b0adb730>]]
variable[exclude] assign[=] call[name[FileSetState], parameter[constant[Exclude], name[directory], name[exclude], <ast.IfExp object at 0x7da1b0ad9f60>]]
if call[name[exclude].matches_all_files_all_subdirs, parameter[]] begin[:]
<ast.Delete object at 0x7da1b0ad9000>
variable[matched] assign[=] call[name[set], parameter[]]
return[tuple[[<ast.Name object at 0x7da1b0ae2800>, <ast.Name object at 0x7da1b0ae1db0>, <ast.Name object at 0x7da1b0ae0fd0>]]] | keyword[def] identifier[_receive] ( identifier[self] , identifier[root] , identifier[directory] , identifier[dirs] , identifier[files] , identifier[include] , identifier[exclude] ):
literal[string]
identifier[self] . identifier[_received] += literal[int]
keyword[if] keyword[not] identifier[self] . identifier[symlinks] :
identifier[where] = identifier[root] + identifier[os] . identifier[path] . identifier[sep] + identifier[directory] + identifier[os] . identifier[path] . identifier[sep]
identifier[files] =[
identifier[file_name] keyword[for] identifier[file_name] keyword[in] identifier[files]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[islink] ( identifier[where] + identifier[file_name] )
]
identifier[include] = identifier[FileSetState] ( literal[string] , identifier[directory] , identifier[include] , keyword[None]
keyword[if] identifier[include] keyword[else] identifier[self] . identifier[include] )
identifier[exclude] = identifier[FileSetState] ( literal[string] , identifier[directory] , identifier[exclude] , keyword[None]
keyword[if] identifier[exclude] keyword[else] identifier[self] . identifier[exclude] )
keyword[if] identifier[exclude] . identifier[matches_all_files_all_subdirs] ():
keyword[del] identifier[dirs] [ literal[int] :]
identifier[matched] = identifier[set] ()
keyword[else] :
keyword[if] identifier[include] . identifier[no_possible_matches_in_subdirs] ():
keyword[del] identifier[dirs] [ literal[int] :]
identifier[matched] = identifier[include] . identifier[match] ( identifier[set] ( identifier[files] ))
identifier[matched] -= identifier[exclude] . identifier[match] ( identifier[matched] )
keyword[return] identifier[matched] , identifier[include] , identifier[exclude] | def _receive(self, root, directory, dirs, files, include, exclude):
"""Internal function processing each yield from os.walk."""
self._received += 1
if not self.symlinks:
where = root + os.path.sep + directory + os.path.sep
files = [file_name for file_name in files if not os.path.islink(where + file_name)] # depends on [control=['if'], data=[]]
include = FileSetState('Include', directory, include, None if include else self.include)
exclude = FileSetState('Exclude', directory, exclude, None if exclude else self.exclude)
if exclude.matches_all_files_all_subdirs():
# Exclude everything and do no traverse any subdirectories
del dirs[0:]
matched = set() # depends on [control=['if'], data=[]]
else:
if include.no_possible_matches_in_subdirs():
# Do no traverse any subdirectories
del dirs[0:] # depends on [control=['if'], data=[]]
matched = include.match(set(files))
matched -= exclude.match(matched)
return (matched, include, exclude) |
def to_array(self):
"""
Serializes this TextMessage to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(TextMessage, self).to_array()
array['text'] = u(self.text) # py2: type unicode, py3: type str
if self.receiver is not None:
if isinstance(self.receiver, None):
array['chat_id'] = None(self.receiver) # type Noneelif isinstance(self.receiver, str):
array['chat_id'] = u(self.receiver) # py2: type unicode, py3: type str
elif isinstance(self.receiver, int):
array['chat_id'] = int(self.receiver) # type intelse:
raise TypeError('Unknown type, must be one of None, str, int.')
# end if
if self.reply_id is not None:
if isinstance(self.reply_id, DEFAULT_MESSAGE_ID):
array['reply_to_message_id'] = DEFAULT_MESSAGE_ID(self.reply_id) # type DEFAULT_MESSAGE_IDelif isinstance(self.reply_id, int):
array['reply_to_message_id'] = int(self.reply_id) # type intelse:
raise TypeError('Unknown type, must be one of DEFAULT_MESSAGE_ID, int.')
# end if
if self.parse_mode is not None:
array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str
if self.disable_web_page_preview is not None:
array['disable_web_page_preview'] = bool(self.disable_web_page_preview) # type bool
if self.disable_notification is not None:
array['disable_notification'] = bool(self.disable_notification) # type bool
if self.reply_markup is not None:
if isinstance(self.reply_markup, InlineKeyboardMarkup):
array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup
elif isinstance(self.reply_markup, ReplyKeyboardMarkup):
array['reply_markup'] = self.reply_markup.to_array() # type ReplyKeyboardMarkup
elif isinstance(self.reply_markup, ReplyKeyboardRemove):
array['reply_markup'] = self.reply_markup.to_array() # type ReplyKeyboardRemove
elif isinstance(self.reply_markup, ForceReply):
array['reply_markup'] = self.reply_markup.to_array() # type ForceReply
else:
raise TypeError('Unknown type, must be one of InlineKeyboardMarkup, ReplyKeyboardMarkup, ReplyKeyboardRemove, ForceReply.')
# end if
return array | def function[to_array, parameter[self]]:
constant[
Serializes this TextMessage to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
]
variable[array] assign[=] call[call[name[super], parameter[name[TextMessage], name[self]]].to_array, parameter[]]
call[name[array]][constant[text]] assign[=] call[name[u], parameter[name[self].text]]
if compare[name[self].receiver is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[self].receiver, constant[None]]] begin[:]
call[name[array]][constant[chat_id]] assign[=] call[constant[None], parameter[name[self].receiver]]
call[name[array]][constant[chat_id]] assign[=] call[name[u], parameter[name[self].receiver]]
if compare[name[self].reply_id is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[self].reply_id, name[DEFAULT_MESSAGE_ID]]] begin[:]
call[name[array]][constant[reply_to_message_id]] assign[=] call[name[DEFAULT_MESSAGE_ID], parameter[name[self].reply_id]]
call[name[array]][constant[reply_to_message_id]] assign[=] call[name[int], parameter[name[self].reply_id]]
<ast.Raise object at 0x7da1b0498d00>
if compare[name[self].parse_mode is_not constant[None]] begin[:]
call[name[array]][constant[parse_mode]] assign[=] call[name[u], parameter[name[self].parse_mode]]
if compare[name[self].disable_web_page_preview is_not constant[None]] begin[:]
call[name[array]][constant[disable_web_page_preview]] assign[=] call[name[bool], parameter[name[self].disable_web_page_preview]]
if compare[name[self].disable_notification is_not constant[None]] begin[:]
call[name[array]][constant[disable_notification]] assign[=] call[name[bool], parameter[name[self].disable_notification]]
if compare[name[self].reply_markup is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[self].reply_markup, name[InlineKeyboardMarkup]]] begin[:]
call[name[array]][constant[reply_markup]] assign[=] call[name[self].reply_markup.to_array, parameter[]]
return[name[array]] | keyword[def] identifier[to_array] ( identifier[self] ):
literal[string]
identifier[array] = identifier[super] ( identifier[TextMessage] , identifier[self] ). identifier[to_array] ()
identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[text] )
keyword[if] identifier[self] . identifier[receiver] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[receiver] , keyword[None] ):
identifier[array] [ literal[string] ]= keyword[None] ( identifier[self] . identifier[receiver] )
identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[receiver] )
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[receiver] , identifier[int] ):
identifier[array] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[receiver] )
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[self] . identifier[reply_id] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[reply_id] , identifier[DEFAULT_MESSAGE_ID] ):
identifier[array] [ literal[string] ]= identifier[DEFAULT_MESSAGE_ID] ( identifier[self] . identifier[reply_id] )
identifier[array] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[reply_id] )
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[self] . identifier[parse_mode] keyword[is] keyword[not] keyword[None] :
identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[parse_mode] )
keyword[if] identifier[self] . identifier[disable_web_page_preview] keyword[is] keyword[not] keyword[None] :
identifier[array] [ literal[string] ]= identifier[bool] ( identifier[self] . identifier[disable_web_page_preview] )
keyword[if] identifier[self] . identifier[disable_notification] keyword[is] keyword[not] keyword[None] :
identifier[array] [ literal[string] ]= identifier[bool] ( identifier[self] . identifier[disable_notification] )
keyword[if] identifier[self] . identifier[reply_markup] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[reply_markup] , identifier[InlineKeyboardMarkup] ):
identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] ()
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[reply_markup] , identifier[ReplyKeyboardMarkup] ):
identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] ()
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[reply_markup] , identifier[ReplyKeyboardRemove] ):
identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] ()
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[reply_markup] , identifier[ForceReply] ):
identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] ()
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] identifier[array] | def to_array(self):
"""
Serializes this TextMessage to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(TextMessage, self).to_array()
array['text'] = u(self.text) # py2: type unicode, py3: type str
if self.receiver is not None:
if isinstance(self.receiver, None):
array['chat_id'] = None(self.receiver) # type Noneelif isinstance(self.receiver, str):
array['chat_id'] = u(self.receiver) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]]
elif isinstance(self.receiver, int):
array['chat_id'] = int(self.receiver) # type intelse:
raise TypeError('Unknown type, must be one of None, str, int.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# end if
if self.reply_id is not None:
if isinstance(self.reply_id, DEFAULT_MESSAGE_ID):
array['reply_to_message_id'] = DEFAULT_MESSAGE_ID(self.reply_id) # type DEFAULT_MESSAGE_IDelif isinstance(self.reply_id, int):
array['reply_to_message_id'] = int(self.reply_id) # type intelse:
raise TypeError('Unknown type, must be one of DEFAULT_MESSAGE_ID, int.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# end if
if self.parse_mode is not None:
array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]]
if self.disable_web_page_preview is not None:
array['disable_web_page_preview'] = bool(self.disable_web_page_preview) # type bool # depends on [control=['if'], data=[]]
if self.disable_notification is not None:
array['disable_notification'] = bool(self.disable_notification) # type bool # depends on [control=['if'], data=[]]
if self.reply_markup is not None:
if isinstance(self.reply_markup, InlineKeyboardMarkup):
array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup # depends on [control=['if'], data=[]]
elif isinstance(self.reply_markup, ReplyKeyboardMarkup):
array['reply_markup'] = self.reply_markup.to_array() # type ReplyKeyboardMarkup # depends on [control=['if'], data=[]]
elif isinstance(self.reply_markup, ReplyKeyboardRemove):
array['reply_markup'] = self.reply_markup.to_array() # type ReplyKeyboardRemove # depends on [control=['if'], data=[]]
elif isinstance(self.reply_markup, ForceReply):
array['reply_markup'] = self.reply_markup.to_array() # type ForceReply # depends on [control=['if'], data=[]]
else:
raise TypeError('Unknown type, must be one of InlineKeyboardMarkup, ReplyKeyboardMarkup, ReplyKeyboardRemove, ForceReply.') # depends on [control=['if'], data=[]]
# end if
return array |
def overall_rating(object, category=""):
"""
Usage:
{% overall_rating obj [category] as var %}
"""
try:
ct = ContentType.objects.get_for_model(object)
if category:
rating = OverallRating.objects.get(
object_id=object.pk,
content_type=ct,
category=category_value(object, category)
).rating or 0
else:
rating = OverallRating.objects.filter(
object_id=object.pk,
content_type=ct
).aggregate(r=models.Avg("rating"))["r"]
rating = Decimal(str(rating or "0"))
except OverallRating.DoesNotExist:
rating = 0
return rating | def function[overall_rating, parameter[object, category]]:
constant[
Usage:
{% overall_rating obj [category] as var %}
]
<ast.Try object at 0x7da1b11a22c0>
return[name[rating]] | keyword[def] identifier[overall_rating] ( identifier[object] , identifier[category] = literal[string] ):
literal[string]
keyword[try] :
identifier[ct] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[object] )
keyword[if] identifier[category] :
identifier[rating] = identifier[OverallRating] . identifier[objects] . identifier[get] (
identifier[object_id] = identifier[object] . identifier[pk] ,
identifier[content_type] = identifier[ct] ,
identifier[category] = identifier[category_value] ( identifier[object] , identifier[category] )
). identifier[rating] keyword[or] literal[int]
keyword[else] :
identifier[rating] = identifier[OverallRating] . identifier[objects] . identifier[filter] (
identifier[object_id] = identifier[object] . identifier[pk] ,
identifier[content_type] = identifier[ct]
). identifier[aggregate] ( identifier[r] = identifier[models] . identifier[Avg] ( literal[string] ))[ literal[string] ]
identifier[rating] = identifier[Decimal] ( identifier[str] ( identifier[rating] keyword[or] literal[string] ))
keyword[except] identifier[OverallRating] . identifier[DoesNotExist] :
identifier[rating] = literal[int]
keyword[return] identifier[rating] | def overall_rating(object, category=''):
"""
Usage:
{% overall_rating obj [category] as var %}
"""
try:
ct = ContentType.objects.get_for_model(object)
if category:
rating = OverallRating.objects.get(object_id=object.pk, content_type=ct, category=category_value(object, category)).rating or 0 # depends on [control=['if'], data=[]]
else:
rating = OverallRating.objects.filter(object_id=object.pk, content_type=ct).aggregate(r=models.Avg('rating'))['r']
rating = Decimal(str(rating or '0')) # depends on [control=['try'], data=[]]
except OverallRating.DoesNotExist:
rating = 0 # depends on [control=['except'], data=[]]
return rating |
def loop_node_skeleton(self, test, node):
"""Common handling of looped structures, while and for."""
body_connect_stmts = self.stmt_star_handler(
node.body,
prev_node_to_avoid=self.nodes[-1]
)
test.connect(body_connect_stmts.first_statement)
test.connect_predecessors(body_connect_stmts.last_statements)
# last_nodes is used for making connections to the next node in the parent node
# this is handled in stmt_star_handler
last_nodes = list()
last_nodes.extend(body_connect_stmts.break_statements)
if node.orelse:
orelse_connect_stmts = self.stmt_star_handler(
node.orelse,
prev_node_to_avoid=self.nodes[-1]
)
test.connect(orelse_connect_stmts.first_statement)
last_nodes.extend(orelse_connect_stmts.last_statements)
else:
last_nodes.append(test) # if there is no orelse, test needs an edge to the next_node
return ControlFlowNode(test, last_nodes, list()) | def function[loop_node_skeleton, parameter[self, test, node]]:
constant[Common handling of looped structures, while and for.]
variable[body_connect_stmts] assign[=] call[name[self].stmt_star_handler, parameter[name[node].body]]
call[name[test].connect, parameter[name[body_connect_stmts].first_statement]]
call[name[test].connect_predecessors, parameter[name[body_connect_stmts].last_statements]]
variable[last_nodes] assign[=] call[name[list], parameter[]]
call[name[last_nodes].extend, parameter[name[body_connect_stmts].break_statements]]
if name[node].orelse begin[:]
variable[orelse_connect_stmts] assign[=] call[name[self].stmt_star_handler, parameter[name[node].orelse]]
call[name[test].connect, parameter[name[orelse_connect_stmts].first_statement]]
call[name[last_nodes].extend, parameter[name[orelse_connect_stmts].last_statements]]
return[call[name[ControlFlowNode], parameter[name[test], name[last_nodes], call[name[list], parameter[]]]]] | keyword[def] identifier[loop_node_skeleton] ( identifier[self] , identifier[test] , identifier[node] ):
literal[string]
identifier[body_connect_stmts] = identifier[self] . identifier[stmt_star_handler] (
identifier[node] . identifier[body] ,
identifier[prev_node_to_avoid] = identifier[self] . identifier[nodes] [- literal[int] ]
)
identifier[test] . identifier[connect] ( identifier[body_connect_stmts] . identifier[first_statement] )
identifier[test] . identifier[connect_predecessors] ( identifier[body_connect_stmts] . identifier[last_statements] )
identifier[last_nodes] = identifier[list] ()
identifier[last_nodes] . identifier[extend] ( identifier[body_connect_stmts] . identifier[break_statements] )
keyword[if] identifier[node] . identifier[orelse] :
identifier[orelse_connect_stmts] = identifier[self] . identifier[stmt_star_handler] (
identifier[node] . identifier[orelse] ,
identifier[prev_node_to_avoid] = identifier[self] . identifier[nodes] [- literal[int] ]
)
identifier[test] . identifier[connect] ( identifier[orelse_connect_stmts] . identifier[first_statement] )
identifier[last_nodes] . identifier[extend] ( identifier[orelse_connect_stmts] . identifier[last_statements] )
keyword[else] :
identifier[last_nodes] . identifier[append] ( identifier[test] )
keyword[return] identifier[ControlFlowNode] ( identifier[test] , identifier[last_nodes] , identifier[list] ()) | def loop_node_skeleton(self, test, node):
"""Common handling of looped structures, while and for."""
body_connect_stmts = self.stmt_star_handler(node.body, prev_node_to_avoid=self.nodes[-1])
test.connect(body_connect_stmts.first_statement)
test.connect_predecessors(body_connect_stmts.last_statements)
# last_nodes is used for making connections to the next node in the parent node
# this is handled in stmt_star_handler
last_nodes = list()
last_nodes.extend(body_connect_stmts.break_statements)
if node.orelse:
orelse_connect_stmts = self.stmt_star_handler(node.orelse, prev_node_to_avoid=self.nodes[-1])
test.connect(orelse_connect_stmts.first_statement)
last_nodes.extend(orelse_connect_stmts.last_statements) # depends on [control=['if'], data=[]]
else:
last_nodes.append(test) # if there is no orelse, test needs an edge to the next_node
return ControlFlowNode(test, last_nodes, list()) |
def set_version(self, new_version: str):
"""
Set the version for this given file.
:param new_version: The new version string to set.
"""
try:
f = open(self.file_path, 'r')
lines = f.readlines()
f.close()
except Exception as e:
print(str(e))
return
for idx, line in enumerate(lines):
if self.magic_line in line:
start = len(self.magic_line)
end = len(line) - self.strip_end_chars
start_str = line[0:start]
end_str = line[end:]
lines[idx] = start_str + new_version + end_str
try:
f = open(self.file_path, 'w')
f.writelines(lines)
f.close()
except Exception as e:
print(str(e))
return | def function[set_version, parameter[self, new_version]]:
constant[
Set the version for this given file.
:param new_version: The new version string to set.
]
<ast.Try object at 0x7da18ede40a0>
for taget[tuple[[<ast.Name object at 0x7da18ede4370>, <ast.Name object at 0x7da18ede4250>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:]
if compare[name[self].magic_line in name[line]] begin[:]
variable[start] assign[=] call[name[len], parameter[name[self].magic_line]]
variable[end] assign[=] binary_operation[call[name[len], parameter[name[line]]] - name[self].strip_end_chars]
variable[start_str] assign[=] call[name[line]][<ast.Slice object at 0x7da18ede6740>]
variable[end_str] assign[=] call[name[line]][<ast.Slice object at 0x7da18ede6920>]
call[name[lines]][name[idx]] assign[=] binary_operation[binary_operation[name[start_str] + name[new_version]] + name[end_str]]
<ast.Try object at 0x7da18ede7190> | keyword[def] identifier[set_version] ( identifier[self] , identifier[new_version] : identifier[str] ):
literal[string]
keyword[try] :
identifier[f] = identifier[open] ( identifier[self] . identifier[file_path] , literal[string] )
identifier[lines] = identifier[f] . identifier[readlines] ()
identifier[f] . identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( identifier[str] ( identifier[e] ))
keyword[return]
keyword[for] identifier[idx] , identifier[line] keyword[in] identifier[enumerate] ( identifier[lines] ):
keyword[if] identifier[self] . identifier[magic_line] keyword[in] identifier[line] :
identifier[start] = identifier[len] ( identifier[self] . identifier[magic_line] )
identifier[end] = identifier[len] ( identifier[line] )- identifier[self] . identifier[strip_end_chars]
identifier[start_str] = identifier[line] [ literal[int] : identifier[start] ]
identifier[end_str] = identifier[line] [ identifier[end] :]
identifier[lines] [ identifier[idx] ]= identifier[start_str] + identifier[new_version] + identifier[end_str]
keyword[try] :
identifier[f] = identifier[open] ( identifier[self] . identifier[file_path] , literal[string] )
identifier[f] . identifier[writelines] ( identifier[lines] )
identifier[f] . identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[print] ( identifier[str] ( identifier[e] ))
keyword[return] | def set_version(self, new_version: str):
"""
Set the version for this given file.
:param new_version: The new version string to set.
"""
try:
f = open(self.file_path, 'r')
lines = f.readlines()
f.close() # depends on [control=['try'], data=[]]
except Exception as e:
print(str(e))
return # depends on [control=['except'], data=['e']]
for (idx, line) in enumerate(lines):
if self.magic_line in line:
start = len(self.magic_line)
end = len(line) - self.strip_end_chars
start_str = line[0:start]
end_str = line[end:]
lines[idx] = start_str + new_version + end_str # depends on [control=['if'], data=['line']] # depends on [control=['for'], data=[]]
try:
f = open(self.file_path, 'w')
f.writelines(lines)
f.close() # depends on [control=['try'], data=[]]
except Exception as e:
print(str(e))
return # depends on [control=['except'], data=['e']] |
def from_Solis(filepath, name=None, parent=None, verbose=True) -> Data:
"""Create a data object from Andor Solis software (ascii exports).
Parameters
----------
filepath : path-like
Path to .txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
name : string (optional)
Name to give to the created data object. If None, filename is used.
Default is None.
parent : WrightTools.Collection (optional)
Collection to place new data object within. Default is None.
verbose : boolean (optional)
Toggle talkback. Default is True.
Returns
-------
data
New data object.
"""
# parse filepath
filestr = os.fspath(filepath)
filepath = pathlib.Path(filepath)
if not ".asc" in filepath.suffixes:
wt_exceptions.WrongFileTypeWarning.warn(filepath, ".asc")
# parse name
if not name:
name = filepath.name.split(".")[0]
# create data
ds = np.DataSource(None)
f = ds.open(filestr, "rt")
axis0 = []
arr = []
attrs = {}
while True:
line = f.readline().strip()[:-1]
if len(line) == 0:
break
else:
line = line.split(",")
line = [float(x) for x in line]
axis0.append(line.pop(0))
arr.append(line)
i = 0
while i < 3:
line = f.readline().strip()
if len(line) == 0:
i += 1
else:
try:
key, val = line.split(":", 1)
except ValueError:
pass
else:
attrs[key.strip()] = val.strip()
f.close()
created = attrs["Date and Time"] # is this UTC?
created = time.strptime(created, "%a %b %d %H:%M:%S %Y")
created = timestamp.TimeStamp(time.mktime(created)).RFC3339
kwargs = {"name": name, "kind": "Solis", "source": filestr, "created": created}
if parent is None:
data = Data(**kwargs)
else:
data = parent.create_data(**kwargs)
arr = np.array(arr)
arr /= float(attrs["Exposure Time (secs)"])
# signal has units of Hz because time normalized
arr = data.create_channel(name="signal", values=arr, signed=False, units="Hz")
axis0 = np.array(axis0)
if float(attrs["Grating Groove Density (l/mm)"]) == 0:
xname = "xindex"
xunits = None
else:
xname = "wm"
xunits = "nm"
data.create_variable(name=xname, values=axis0[:, None], units=xunits)
data.create_variable(name="yindex", values=np.arange(arr.shape[1])[None, :], units=None)
data.transform(data.variables[0].natural_name, "yindex")
for key, val in attrs.items():
data.attrs[key] = val
# finish
if verbose:
print("data created at {0}".format(data.fullpath))
print(" axes: {0}".format(data.axis_names))
print(" shape: {0}".format(data.shape))
return data | def function[from_Solis, parameter[filepath, name, parent, verbose]]:
constant[Create a data object from Andor Solis software (ascii exports).
Parameters
----------
filepath : path-like
Path to .txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
name : string (optional)
Name to give to the created data object. If None, filename is used.
Default is None.
parent : WrightTools.Collection (optional)
Collection to place new data object within. Default is None.
verbose : boolean (optional)
Toggle talkback. Default is True.
Returns
-------
data
New data object.
]
variable[filestr] assign[=] call[name[os].fspath, parameter[name[filepath]]]
variable[filepath] assign[=] call[name[pathlib].Path, parameter[name[filepath]]]
if <ast.UnaryOp object at 0x7da1b0cff3a0> begin[:]
call[name[wt_exceptions].WrongFileTypeWarning.warn, parameter[name[filepath], constant[.asc]]]
if <ast.UnaryOp object at 0x7da1b0cffbe0> begin[:]
variable[name] assign[=] call[call[name[filepath].name.split, parameter[constant[.]]]][constant[0]]
variable[ds] assign[=] call[name[np].DataSource, parameter[constant[None]]]
variable[f] assign[=] call[name[ds].open, parameter[name[filestr], constant[rt]]]
variable[axis0] assign[=] list[[]]
variable[arr] assign[=] list[[]]
variable[attrs] assign[=] dictionary[[], []]
while constant[True] begin[:]
variable[line] assign[=] call[call[call[name[f].readline, parameter[]].strip, parameter[]]][<ast.Slice object at 0x7da1b0b7cc70>]
if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:]
break
variable[i] assign[=] constant[0]
while compare[name[i] less[<] constant[3]] begin[:]
variable[line] assign[=] call[call[name[f].readline, parameter[]].strip, parameter[]]
if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b0b7d6f0>
call[name[f].close, parameter[]]
variable[created] assign[=] call[name[attrs]][constant[Date and Time]]
variable[created] assign[=] call[name[time].strptime, parameter[name[created], constant[%a %b %d %H:%M:%S %Y]]]
variable[created] assign[=] call[name[timestamp].TimeStamp, parameter[call[name[time].mktime, parameter[name[created]]]]].RFC3339
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b7ef50>, <ast.Constant object at 0x7da1b0b7f5b0>, <ast.Constant object at 0x7da1b0b7d8d0>, <ast.Constant object at 0x7da1b0b7e8c0>], [<ast.Name object at 0x7da1b0b7e6b0>, <ast.Constant object at 0x7da1b0b7e590>, <ast.Name object at 0x7da1b0b7eb90>, <ast.Name object at 0x7da1b0b7d1b0>]]
if compare[name[parent] is constant[None]] begin[:]
variable[data] assign[=] call[name[Data], parameter[]]
variable[arr] assign[=] call[name[np].array, parameter[name[arr]]]
<ast.AugAssign object at 0x7da1b0b7de10>
variable[arr] assign[=] call[name[data].create_channel, parameter[]]
variable[axis0] assign[=] call[name[np].array, parameter[name[axis0]]]
if compare[call[name[float], parameter[call[name[attrs]][constant[Grating Groove Density (l/mm)]]]] equal[==] constant[0]] begin[:]
variable[xname] assign[=] constant[xindex]
variable[xunits] assign[=] constant[None]
call[name[data].create_variable, parameter[]]
call[name[data].create_variable, parameter[]]
call[name[data].transform, parameter[call[name[data].variables][constant[0]].natural_name, constant[yindex]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a9ba0>, <ast.Name object at 0x7da20c6a9360>]]] in starred[call[name[attrs].items, parameter[]]] begin[:]
call[name[data].attrs][name[key]] assign[=] name[val]
if name[verbose] begin[:]
call[name[print], parameter[call[constant[data created at {0}].format, parameter[name[data].fullpath]]]]
call[name[print], parameter[call[constant[ axes: {0}].format, parameter[name[data].axis_names]]]]
call[name[print], parameter[call[constant[ shape: {0}].format, parameter[name[data].shape]]]]
return[name[data]] | keyword[def] identifier[from_Solis] ( identifier[filepath] , identifier[name] = keyword[None] , identifier[parent] = keyword[None] , identifier[verbose] = keyword[True] )-> identifier[Data] :
literal[string]
identifier[filestr] = identifier[os] . identifier[fspath] ( identifier[filepath] )
identifier[filepath] = identifier[pathlib] . identifier[Path] ( identifier[filepath] )
keyword[if] keyword[not] literal[string] keyword[in] identifier[filepath] . identifier[suffixes] :
identifier[wt_exceptions] . identifier[WrongFileTypeWarning] . identifier[warn] ( identifier[filepath] , literal[string] )
keyword[if] keyword[not] identifier[name] :
identifier[name] = identifier[filepath] . identifier[name] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[ds] = identifier[np] . identifier[DataSource] ( keyword[None] )
identifier[f] = identifier[ds] . identifier[open] ( identifier[filestr] , literal[string] )
identifier[axis0] =[]
identifier[arr] =[]
identifier[attrs] ={}
keyword[while] keyword[True] :
identifier[line] = identifier[f] . identifier[readline] (). identifier[strip] ()[:- literal[int] ]
keyword[if] identifier[len] ( identifier[line] )== literal[int] :
keyword[break]
keyword[else] :
identifier[line] = identifier[line] . identifier[split] ( literal[string] )
identifier[line] =[ identifier[float] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[line] ]
identifier[axis0] . identifier[append] ( identifier[line] . identifier[pop] ( literal[int] ))
identifier[arr] . identifier[append] ( identifier[line] )
identifier[i] = literal[int]
keyword[while] identifier[i] < literal[int] :
identifier[line] = identifier[f] . identifier[readline] (). identifier[strip] ()
keyword[if] identifier[len] ( identifier[line] )== literal[int] :
identifier[i] += literal[int]
keyword[else] :
keyword[try] :
identifier[key] , identifier[val] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[else] :
identifier[attrs] [ identifier[key] . identifier[strip] ()]= identifier[val] . identifier[strip] ()
identifier[f] . identifier[close] ()
identifier[created] = identifier[attrs] [ literal[string] ]
identifier[created] = identifier[time] . identifier[strptime] ( identifier[created] , literal[string] )
identifier[created] = identifier[timestamp] . identifier[TimeStamp] ( identifier[time] . identifier[mktime] ( identifier[created] )). identifier[RFC3339]
identifier[kwargs] ={ literal[string] : identifier[name] , literal[string] : literal[string] , literal[string] : identifier[filestr] , literal[string] : identifier[created] }
keyword[if] identifier[parent] keyword[is] keyword[None] :
identifier[data] = identifier[Data] (** identifier[kwargs] )
keyword[else] :
identifier[data] = identifier[parent] . identifier[create_data] (** identifier[kwargs] )
identifier[arr] = identifier[np] . identifier[array] ( identifier[arr] )
identifier[arr] /= identifier[float] ( identifier[attrs] [ literal[string] ])
identifier[arr] = identifier[data] . identifier[create_channel] ( identifier[name] = literal[string] , identifier[values] = identifier[arr] , identifier[signed] = keyword[False] , identifier[units] = literal[string] )
identifier[axis0] = identifier[np] . identifier[array] ( identifier[axis0] )
keyword[if] identifier[float] ( identifier[attrs] [ literal[string] ])== literal[int] :
identifier[xname] = literal[string]
identifier[xunits] = keyword[None]
keyword[else] :
identifier[xname] = literal[string]
identifier[xunits] = literal[string]
identifier[data] . identifier[create_variable] ( identifier[name] = identifier[xname] , identifier[values] = identifier[axis0] [:, keyword[None] ], identifier[units] = identifier[xunits] )
identifier[data] . identifier[create_variable] ( identifier[name] = literal[string] , identifier[values] = identifier[np] . identifier[arange] ( identifier[arr] . identifier[shape] [ literal[int] ])[ keyword[None] ,:], identifier[units] = keyword[None] )
identifier[data] . identifier[transform] ( identifier[data] . identifier[variables] [ literal[int] ]. identifier[natural_name] , literal[string] )
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[attrs] . identifier[items] ():
identifier[data] . identifier[attrs] [ identifier[key] ]= identifier[val]
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] . identifier[format] ( identifier[data] . identifier[fullpath] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[data] . identifier[axis_names] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[data] . identifier[shape] ))
keyword[return] identifier[data] | def from_Solis(filepath, name=None, parent=None, verbose=True) -> Data:
"""Create a data object from Andor Solis software (ascii exports).
Parameters
----------
filepath : path-like
Path to .txt file.
Can be either a local or remote file (http/ftp).
Can be compressed with gz/bz2, decompression based on file name.
name : string (optional)
Name to give to the created data object. If None, filename is used.
Default is None.
parent : WrightTools.Collection (optional)
Collection to place new data object within. Default is None.
verbose : boolean (optional)
Toggle talkback. Default is True.
Returns
-------
data
New data object.
"""
# parse filepath
filestr = os.fspath(filepath)
filepath = pathlib.Path(filepath)
if not '.asc' in filepath.suffixes:
wt_exceptions.WrongFileTypeWarning.warn(filepath, '.asc') # depends on [control=['if'], data=[]]
# parse name
if not name:
name = filepath.name.split('.')[0] # depends on [control=['if'], data=[]]
# create data
ds = np.DataSource(None)
f = ds.open(filestr, 'rt')
axis0 = []
arr = []
attrs = {}
while True:
line = f.readline().strip()[:-1]
if len(line) == 0:
break # depends on [control=['if'], data=[]]
else:
line = line.split(',')
line = [float(x) for x in line]
axis0.append(line.pop(0))
arr.append(line) # depends on [control=['while'], data=[]]
i = 0
while i < 3:
line = f.readline().strip()
if len(line) == 0:
i += 1 # depends on [control=['if'], data=[]]
else:
try:
(key, val) = line.split(':', 1) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
else:
attrs[key.strip()] = val.strip() # depends on [control=['while'], data=['i']]
f.close()
created = attrs['Date and Time'] # is this UTC?
created = time.strptime(created, '%a %b %d %H:%M:%S %Y')
created = timestamp.TimeStamp(time.mktime(created)).RFC3339
kwargs = {'name': name, 'kind': 'Solis', 'source': filestr, 'created': created}
if parent is None:
data = Data(**kwargs) # depends on [control=['if'], data=[]]
else:
data = parent.create_data(**kwargs)
arr = np.array(arr)
arr /= float(attrs['Exposure Time (secs)'])
# signal has units of Hz because time normalized
arr = data.create_channel(name='signal', values=arr, signed=False, units='Hz')
axis0 = np.array(axis0)
if float(attrs['Grating Groove Density (l/mm)']) == 0:
xname = 'xindex'
xunits = None # depends on [control=['if'], data=[]]
else:
xname = 'wm'
xunits = 'nm'
data.create_variable(name=xname, values=axis0[:, None], units=xunits)
data.create_variable(name='yindex', values=np.arange(arr.shape[1])[None, :], units=None)
data.transform(data.variables[0].natural_name, 'yindex')
for (key, val) in attrs.items():
data.attrs[key] = val # depends on [control=['for'], data=[]]
# finish
if verbose:
print('data created at {0}'.format(data.fullpath))
print(' axes: {0}'.format(data.axis_names))
print(' shape: {0}'.format(data.shape)) # depends on [control=['if'], data=[]]
return data |
def _xy_locs(mask):
"""Mask should be a set of bools from comparison with a feature layer."""
y, x = mask.nonzero()
return list(zip(x, y)) | def function[_xy_locs, parameter[mask]]:
constant[Mask should be a set of bools from comparison with a feature layer.]
<ast.Tuple object at 0x7da18bcc9240> assign[=] call[name[mask].nonzero, parameter[]]
return[call[name[list], parameter[call[name[zip], parameter[name[x], name[y]]]]]] | keyword[def] identifier[_xy_locs] ( identifier[mask] ):
literal[string]
identifier[y] , identifier[x] = identifier[mask] . identifier[nonzero] ()
keyword[return] identifier[list] ( identifier[zip] ( identifier[x] , identifier[y] )) | def _xy_locs(mask):
"""Mask should be a set of bools from comparison with a feature layer."""
(y, x) = mask.nonzero()
return list(zip(x, y)) |
def list_packages_in_eups_table(table_text):
"""List the names of packages that are required by an EUPS table file.
Parameters
----------
table_text : `str`
The text content of an EUPS table file.
Returns
-------
names : `list` [`str`]
List of package names that are required byy the EUPS table file.
"""
logger = logging.getLogger(__name__)
# This pattern matches required product names in EUPS table files.
pattern = re.compile(r'setupRequired\((?P<name>\w+)\)')
listed_packages = [m.group('name') for m in pattern.finditer(table_text)]
logger.debug('Packages listed in the table file: %r', listed_packages)
return listed_packages | def function[list_packages_in_eups_table, parameter[table_text]]:
constant[List the names of packages that are required by an EUPS table file.
Parameters
----------
table_text : `str`
The text content of an EUPS table file.
Returns
-------
names : `list` [`str`]
List of package names that are required byy the EUPS table file.
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
variable[pattern] assign[=] call[name[re].compile, parameter[constant[setupRequired\((?P<name>\w+)\)]]]
variable[listed_packages] assign[=] <ast.ListComp object at 0x7da1b24e0a30>
call[name[logger].debug, parameter[constant[Packages listed in the table file: %r], name[listed_packages]]]
return[name[listed_packages]] | keyword[def] identifier[list_packages_in_eups_table] ( identifier[table_text] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] )
identifier[listed_packages] =[ identifier[m] . identifier[group] ( literal[string] ) keyword[for] identifier[m] keyword[in] identifier[pattern] . identifier[finditer] ( identifier[table_text] )]
identifier[logger] . identifier[debug] ( literal[string] , identifier[listed_packages] )
keyword[return] identifier[listed_packages] | def list_packages_in_eups_table(table_text):
"""List the names of packages that are required by an EUPS table file.
Parameters
----------
table_text : `str`
The text content of an EUPS table file.
Returns
-------
names : `list` [`str`]
List of package names that are required byy the EUPS table file.
"""
logger = logging.getLogger(__name__)
# This pattern matches required product names in EUPS table files.
pattern = re.compile('setupRequired\\((?P<name>\\w+)\\)')
listed_packages = [m.group('name') for m in pattern.finditer(table_text)]
logger.debug('Packages listed in the table file: %r', listed_packages)
return listed_packages |
def DbGetDeviceFamilyList(self, argin):
""" Get a list of device name families for device name matching the
specified wildcard
:param argin: The wildcard
:type: tango.DevString
:return: Family list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceFamilyList()")
argin = replace_wildcard(argin)
return self.db.get_device_family_list(argin) | def function[DbGetDeviceFamilyList, parameter[self, argin]]:
constant[ Get a list of device name families for device name matching the
specified wildcard
:param argin: The wildcard
:type: tango.DevString
:return: Family list
:rtype: tango.DevVarStringArray ]
call[name[self]._log.debug, parameter[constant[In DbGetDeviceFamilyList()]]]
variable[argin] assign[=] call[name[replace_wildcard], parameter[name[argin]]]
return[call[name[self].db.get_device_family_list, parameter[name[argin]]]] | keyword[def] identifier[DbGetDeviceFamilyList] ( identifier[self] , identifier[argin] ):
literal[string]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[argin] = identifier[replace_wildcard] ( identifier[argin] )
keyword[return] identifier[self] . identifier[db] . identifier[get_device_family_list] ( identifier[argin] ) | def DbGetDeviceFamilyList(self, argin):
""" Get a list of device name families for device name matching the
specified wildcard
:param argin: The wildcard
:type: tango.DevString
:return: Family list
:rtype: tango.DevVarStringArray """
self._log.debug('In DbGetDeviceFamilyList()')
argin = replace_wildcard(argin)
return self.db.get_device_family_list(argin) |
def doit(self):
"""Do (most of) it function of the model class."""
print(' . doit')
lines = Lines()
lines.add(1, 'cpdef inline void doit(self, int idx) %s:' % _nogil)
lines.add(2, 'self.idx_sim = idx')
if getattr(self.model.sequences, 'inputs', None) is not None:
lines.add(2, 'self.load_data()')
if self.model.INLET_METHODS:
lines.add(2, 'self.update_inlets()')
if hasattr(self.model, 'solve'):
lines.add(2, 'self.solve()')
else:
lines.add(2, 'self.run()')
if getattr(self.model.sequences, 'states', None) is not None:
lines.add(2, 'self.new2old()')
if self.model.OUTLET_METHODS:
lines.add(2, 'self.update_outlets()')
return lines | def function[doit, parameter[self]]:
constant[Do (most of) it function of the model class.]
call[name[print], parameter[constant[ . doit]]]
variable[lines] assign[=] call[name[Lines], parameter[]]
call[name[lines].add, parameter[constant[1], binary_operation[constant[cpdef inline void doit(self, int idx) %s:] <ast.Mod object at 0x7da2590d6920> name[_nogil]]]]
call[name[lines].add, parameter[constant[2], constant[self.idx_sim = idx]]]
if compare[call[name[getattr], parameter[name[self].model.sequences, constant[inputs], constant[None]]] is_not constant[None]] begin[:]
call[name[lines].add, parameter[constant[2], constant[self.load_data()]]]
if name[self].model.INLET_METHODS begin[:]
call[name[lines].add, parameter[constant[2], constant[self.update_inlets()]]]
if call[name[hasattr], parameter[name[self].model, constant[solve]]] begin[:]
call[name[lines].add, parameter[constant[2], constant[self.solve()]]]
if name[self].model.OUTLET_METHODS begin[:]
call[name[lines].add, parameter[constant[2], constant[self.update_outlets()]]]
return[name[lines]] | keyword[def] identifier[doit] ( identifier[self] ):
literal[string]
identifier[print] ( literal[string] )
identifier[lines] = identifier[Lines] ()
identifier[lines] . identifier[add] ( literal[int] , literal[string] % identifier[_nogil] )
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[if] identifier[getattr] ( identifier[self] . identifier[model] . identifier[sequences] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[if] identifier[self] . identifier[model] . identifier[INLET_METHODS] :
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[if] identifier[hasattr] ( identifier[self] . identifier[model] , literal[string] ):
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[else] :
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[if] identifier[getattr] ( identifier[self] . identifier[model] . identifier[sequences] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[if] identifier[self] . identifier[model] . identifier[OUTLET_METHODS] :
identifier[lines] . identifier[add] ( literal[int] , literal[string] )
keyword[return] identifier[lines] | def doit(self):
"""Do (most of) it function of the model class."""
print(' . doit')
lines = Lines()
lines.add(1, 'cpdef inline void doit(self, int idx) %s:' % _nogil)
lines.add(2, 'self.idx_sim = idx')
if getattr(self.model.sequences, 'inputs', None) is not None:
lines.add(2, 'self.load_data()') # depends on [control=['if'], data=[]]
if self.model.INLET_METHODS:
lines.add(2, 'self.update_inlets()') # depends on [control=['if'], data=[]]
if hasattr(self.model, 'solve'):
lines.add(2, 'self.solve()') # depends on [control=['if'], data=[]]
else:
lines.add(2, 'self.run()')
if getattr(self.model.sequences, 'states', None) is not None:
lines.add(2, 'self.new2old()') # depends on [control=['if'], data=[]]
if self.model.OUTLET_METHODS:
lines.add(2, 'self.update_outlets()') # depends on [control=['if'], data=[]]
return lines |
def firmware_image_destroy(self, image_id, **kwargs): # noqa: E501
"""Delete an image # noqa: E501
Delete a firmware image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.firmware_image_destroy(image_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str image_id: The firmware image ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.firmware_image_destroy_with_http_info(image_id, **kwargs) # noqa: E501
else:
(data) = self.firmware_image_destroy_with_http_info(image_id, **kwargs) # noqa: E501
return data | def function[firmware_image_destroy, parameter[self, image_id]]:
constant[Delete an image # noqa: E501
Delete a firmware image. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.firmware_image_destroy(image_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str image_id: The firmware image ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].firmware_image_destroy_with_http_info, parameter[name[image_id]]]] | keyword[def] identifier[firmware_image_destroy] ( identifier[self] , identifier[image_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[firmware_image_destroy_with_http_info] ( identifier[image_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[firmware_image_destroy_with_http_info] ( identifier[image_id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def firmware_image_destroy(self, image_id, **kwargs): # noqa: E501
'Delete an image # noqa: E501\n\n Delete a firmware image. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.firmware_image_destroy(image_id, asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str image_id: The firmware image ID (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.firmware_image_destroy_with_http_info(image_id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.firmware_image_destroy_with_http_info(image_id, **kwargs) # noqa: E501
return data |
def set_defaults(self, *args, **kwargs):
'''
node.set_defaults(a=b...) yields a new calculation node identical to the given node except
with the default values matching the given key-value pairs. Arguments are collapsed left-to
right with later arguments overwriting earlier arguments.
'''
args = merge(self.defaults, args, kwargs)
new_cnode = copy.copy(self)
object.__setattr__(new_cnode, 'defaults', ps.pmap(args))
return new_cnode | def function[set_defaults, parameter[self]]:
constant[
node.set_defaults(a=b...) yields a new calculation node identical to the given node except
with the default values matching the given key-value pairs. Arguments are collapsed left-to
right with later arguments overwriting earlier arguments.
]
variable[args] assign[=] call[name[merge], parameter[name[self].defaults, name[args], name[kwargs]]]
variable[new_cnode] assign[=] call[name[copy].copy, parameter[name[self]]]
call[name[object].__setattr__, parameter[name[new_cnode], constant[defaults], call[name[ps].pmap, parameter[name[args]]]]]
return[name[new_cnode]] | keyword[def] identifier[set_defaults] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[args] = identifier[merge] ( identifier[self] . identifier[defaults] , identifier[args] , identifier[kwargs] )
identifier[new_cnode] = identifier[copy] . identifier[copy] ( identifier[self] )
identifier[object] . identifier[__setattr__] ( identifier[new_cnode] , literal[string] , identifier[ps] . identifier[pmap] ( identifier[args] ))
keyword[return] identifier[new_cnode] | def set_defaults(self, *args, **kwargs):
"""
node.set_defaults(a=b...) yields a new calculation node identical to the given node except
with the default values matching the given key-value pairs. Arguments are collapsed left-to
right with later arguments overwriting earlier arguments.
"""
args = merge(self.defaults, args, kwargs)
new_cnode = copy.copy(self)
object.__setattr__(new_cnode, 'defaults', ps.pmap(args))
return new_cnode |
def _apply_rate(self, max_rate, aggressive=False):
"""
Try to adjust the rate (characters/second)
of the fragments of the list,
so that it does not exceed the given ``max_rate``.
This is done by testing whether some slack
can be borrowed from the fragment before
the faster current one.
If ``aggressive`` is ``True``,
the slack might be retrieved from the fragment after
the faster current one,
if the previous fragment could not contribute enough slack.
"""
self.log(u"Called _apply_rate")
self.log([u" Aggressive: %s", aggressive])
self.log([u" Max rate: %.3f", max_rate])
regular_fragments = list(self.smflist.regular_fragments)
if len(regular_fragments) <= 1:
self.log(u" The list contains at most one regular fragment, returning")
return
faster_fragments = [(i, f) for i, f in regular_fragments if (f.rate is not None) and (f.rate >= max_rate + Decimal("0.001"))]
if len(faster_fragments) == 0:
self.log(u" No regular fragment faster than max rate, returning")
return
self.log_warn(u" Some fragments have rate faster than max rate:")
self.log([u" %s", [i for i, f in faster_fragments]])
self.log(u"Fixing rate for faster fragments...")
for frag_index, fragment in faster_fragments:
self.smflist.fix_fragment_rate(frag_index, max_rate, aggressive=aggressive)
self.log(u"Fixing rate for faster fragments... done")
faster_fragments = [(i, f) for i, f in regular_fragments if (f.rate is not None) and (f.rate >= max_rate + Decimal("0.001"))]
if len(faster_fragments) > 0:
self.log_warn(u" Some fragments still have rate faster than max rate:")
self.log([u" %s", [i for i, f in faster_fragments]]) | def function[_apply_rate, parameter[self, max_rate, aggressive]]:
constant[
Try to adjust the rate (characters/second)
of the fragments of the list,
so that it does not exceed the given ``max_rate``.
This is done by testing whether some slack
can be borrowed from the fragment before
the faster current one.
If ``aggressive`` is ``True``,
the slack might be retrieved from the fragment after
the faster current one,
if the previous fragment could not contribute enough slack.
]
call[name[self].log, parameter[constant[Called _apply_rate]]]
call[name[self].log, parameter[list[[<ast.Constant object at 0x7da1b17efbe0>, <ast.Name object at 0x7da1b17efbb0>]]]]
call[name[self].log, parameter[list[[<ast.Constant object at 0x7da1b17ed990>, <ast.Name object at 0x7da1b17ed9c0>]]]]
variable[regular_fragments] assign[=] call[name[list], parameter[name[self].smflist.regular_fragments]]
if compare[call[name[len], parameter[name[regular_fragments]]] less_or_equal[<=] constant[1]] begin[:]
call[name[self].log, parameter[constant[ The list contains at most one regular fragment, returning]]]
return[None]
variable[faster_fragments] assign[=] <ast.ListComp object at 0x7da1b17ec100>
if compare[call[name[len], parameter[name[faster_fragments]]] equal[==] constant[0]] begin[:]
call[name[self].log, parameter[constant[ No regular fragment faster than max rate, returning]]]
return[None]
call[name[self].log_warn, parameter[constant[ Some fragments have rate faster than max rate:]]]
call[name[self].log, parameter[list[[<ast.Constant object at 0x7da1b17ee1d0>, <ast.ListComp object at 0x7da1b17ee200>]]]]
call[name[self].log, parameter[constant[Fixing rate for faster fragments...]]]
for taget[tuple[[<ast.Name object at 0x7da1b17ee920>, <ast.Name object at 0x7da1b17ee950>]]] in starred[name[faster_fragments]] begin[:]
call[name[self].smflist.fix_fragment_rate, parameter[name[frag_index], name[max_rate]]]
call[name[self].log, parameter[constant[Fixing rate for faster fragments... done]]]
variable[faster_fragments] assign[=] <ast.ListComp object at 0x7da1b17ef430>
if compare[call[name[len], parameter[name[faster_fragments]]] greater[>] constant[0]] begin[:]
call[name[self].log_warn, parameter[constant[ Some fragments still have rate faster than max rate:]]]
call[name[self].log, parameter[list[[<ast.Constant object at 0x7da1b17ed3f0>, <ast.ListComp object at 0x7da1b17ed3c0>]]]] | keyword[def] identifier[_apply_rate] ( identifier[self] , identifier[max_rate] , identifier[aggressive] = keyword[False] ):
literal[string]
identifier[self] . identifier[log] ( literal[string] )
identifier[self] . identifier[log] ([ literal[string] , identifier[aggressive] ])
identifier[self] . identifier[log] ([ literal[string] , identifier[max_rate] ])
identifier[regular_fragments] = identifier[list] ( identifier[self] . identifier[smflist] . identifier[regular_fragments] )
keyword[if] identifier[len] ( identifier[regular_fragments] )<= literal[int] :
identifier[self] . identifier[log] ( literal[string] )
keyword[return]
identifier[faster_fragments] =[( identifier[i] , identifier[f] ) keyword[for] identifier[i] , identifier[f] keyword[in] identifier[regular_fragments] keyword[if] ( identifier[f] . identifier[rate] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[f] . identifier[rate] >= identifier[max_rate] + identifier[Decimal] ( literal[string] ))]
keyword[if] identifier[len] ( identifier[faster_fragments] )== literal[int] :
identifier[self] . identifier[log] ( literal[string] )
keyword[return]
identifier[self] . identifier[log_warn] ( literal[string] )
identifier[self] . identifier[log] ([ literal[string] ,[ identifier[i] keyword[for] identifier[i] , identifier[f] keyword[in] identifier[faster_fragments] ]])
identifier[self] . identifier[log] ( literal[string] )
keyword[for] identifier[frag_index] , identifier[fragment] keyword[in] identifier[faster_fragments] :
identifier[self] . identifier[smflist] . identifier[fix_fragment_rate] ( identifier[frag_index] , identifier[max_rate] , identifier[aggressive] = identifier[aggressive] )
identifier[self] . identifier[log] ( literal[string] )
identifier[faster_fragments] =[( identifier[i] , identifier[f] ) keyword[for] identifier[i] , identifier[f] keyword[in] identifier[regular_fragments] keyword[if] ( identifier[f] . identifier[rate] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[f] . identifier[rate] >= identifier[max_rate] + identifier[Decimal] ( literal[string] ))]
keyword[if] identifier[len] ( identifier[faster_fragments] )> literal[int] :
identifier[self] . identifier[log_warn] ( literal[string] )
identifier[self] . identifier[log] ([ literal[string] ,[ identifier[i] keyword[for] identifier[i] , identifier[f] keyword[in] identifier[faster_fragments] ]]) | def _apply_rate(self, max_rate, aggressive=False):
"""
Try to adjust the rate (characters/second)
of the fragments of the list,
so that it does not exceed the given ``max_rate``.
This is done by testing whether some slack
can be borrowed from the fragment before
the faster current one.
If ``aggressive`` is ``True``,
the slack might be retrieved from the fragment after
the faster current one,
if the previous fragment could not contribute enough slack.
"""
self.log(u'Called _apply_rate')
self.log([u' Aggressive: %s', aggressive])
self.log([u' Max rate: %.3f', max_rate])
regular_fragments = list(self.smflist.regular_fragments)
if len(regular_fragments) <= 1:
self.log(u' The list contains at most one regular fragment, returning')
return # depends on [control=['if'], data=[]]
faster_fragments = [(i, f) for (i, f) in regular_fragments if f.rate is not None and f.rate >= max_rate + Decimal('0.001')]
if len(faster_fragments) == 0:
self.log(u' No regular fragment faster than max rate, returning')
return # depends on [control=['if'], data=[]]
self.log_warn(u' Some fragments have rate faster than max rate:')
self.log([u' %s', [i for (i, f) in faster_fragments]])
self.log(u'Fixing rate for faster fragments...')
for (frag_index, fragment) in faster_fragments:
self.smflist.fix_fragment_rate(frag_index, max_rate, aggressive=aggressive) # depends on [control=['for'], data=[]]
self.log(u'Fixing rate for faster fragments... done')
faster_fragments = [(i, f) for (i, f) in regular_fragments if f.rate is not None and f.rate >= max_rate + Decimal('0.001')]
if len(faster_fragments) > 0:
self.log_warn(u' Some fragments still have rate faster than max rate:')
self.log([u' %s', [i for (i, f) in faster_fragments]]) # depends on [control=['if'], data=[]] |
def template_files(path, exts=None):
"""
Return a list of filenames found at @path.
The list of filenames can be filtered by extensions.
Arguments:
path: Existing filepath we want to list.
exts: List of extensions to filter by.
Returns:
A list of filenames found in the path.
"""
if not os.path.isabs(path):
_path = os.path.join(determine_path(), path)
if not (os.path.exists(_path) and os.path.isdir(_path)):
return []
if not exts:
exts = []
files = os.listdir(_path)
files = [f for f in files if os.path.splitext(f)[-1] in exts]
files = [os.path.join(path, f) for f in files]
return files | def function[template_files, parameter[path, exts]]:
constant[
Return a list of filenames found at @path.
The list of filenames can be filtered by extensions.
Arguments:
path: Existing filepath we want to list.
exts: List of extensions to filter by.
Returns:
A list of filenames found in the path.
]
if <ast.UnaryOp object at 0x7da2041d98a0> begin[:]
variable[_path] assign[=] call[name[os].path.join, parameter[call[name[determine_path], parameter[]], name[path]]]
if <ast.UnaryOp object at 0x7da2041da050> begin[:]
return[list[[]]]
if <ast.UnaryOp object at 0x7da2041dace0> begin[:]
variable[exts] assign[=] list[[]]
variable[files] assign[=] call[name[os].listdir, parameter[name[_path]]]
variable[files] assign[=] <ast.ListComp object at 0x7da2041da2c0>
variable[files] assign[=] <ast.ListComp object at 0x7da2041da2f0>
return[name[files]] | keyword[def] identifier[template_files] ( identifier[path] , identifier[exts] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
identifier[_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[determine_path] (), identifier[path] )
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[_path] ) keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[_path] )):
keyword[return] []
keyword[if] keyword[not] identifier[exts] :
identifier[exts] =[]
identifier[files] = identifier[os] . identifier[listdir] ( identifier[_path] )
identifier[files] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[files] keyword[if] identifier[os] . identifier[path] . identifier[splitext] ( identifier[f] )[- literal[int] ] keyword[in] identifier[exts] ]
identifier[files] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[files] ]
keyword[return] identifier[files] | def template_files(path, exts=None):
"""
Return a list of filenames found at @path.
The list of filenames can be filtered by extensions.
Arguments:
path: Existing filepath we want to list.
exts: List of extensions to filter by.
Returns:
A list of filenames found in the path.
"""
if not os.path.isabs(path):
_path = os.path.join(determine_path(), path) # depends on [control=['if'], data=[]]
if not (os.path.exists(_path) and os.path.isdir(_path)):
return [] # depends on [control=['if'], data=[]]
if not exts:
exts = [] # depends on [control=['if'], data=[]]
files = os.listdir(_path)
files = [f for f in files if os.path.splitext(f)[-1] in exts]
files = [os.path.join(path, f) for f in files]
return files |
def write_contribs(def_dict_list: Dict[str, List[str]]) -> None:
"""Write to file, in current dir, 'contributors.md'."""
file_str = '' # type: str
note = '# Contributors\nCLTK Core authors, ordered alphabetically by first name\n\n' # type: str # pylint: disable=line-too-long
file_str += note
for contrib in def_dict_list:
file_str += '## ' + contrib + '\n'
for module in def_dict_list[contrib]:
file_str += '* ' + module + '\n'
file_str += '\n'
file_name = 'contributors.md' # type: str
with open(file_name, 'w') as file_open: # type: IO
file_open.write(file_str)
logger.info('Wrote contribs file at "%s".', file_name) | def function[write_contribs, parameter[def_dict_list]]:
constant[Write to file, in current dir, 'contributors.md'.]
variable[file_str] assign[=] constant[]
variable[note] assign[=] constant[# Contributors
CLTK Core authors, ordered alphabetically by first name
]
<ast.AugAssign object at 0x7da1b26afac0>
for taget[name[contrib]] in starred[name[def_dict_list]] begin[:]
<ast.AugAssign object at 0x7da1b26acb80>
for taget[name[module]] in starred[call[name[def_dict_list]][name[contrib]]] begin[:]
<ast.AugAssign object at 0x7da1b26ae950>
<ast.AugAssign object at 0x7da20c76cd00>
variable[file_name] assign[=] constant[contributors.md]
with call[name[open], parameter[name[file_name], constant[w]]] begin[:]
call[name[file_open].write, parameter[name[file_str]]]
call[name[logger].info, parameter[constant[Wrote contribs file at "%s".], name[file_name]]] | keyword[def] identifier[write_contribs] ( identifier[def_dict_list] : identifier[Dict] [ identifier[str] , identifier[List] [ identifier[str] ]])-> keyword[None] :
literal[string]
identifier[file_str] = literal[string]
identifier[note] = literal[string]
identifier[file_str] += identifier[note]
keyword[for] identifier[contrib] keyword[in] identifier[def_dict_list] :
identifier[file_str] += literal[string] + identifier[contrib] + literal[string]
keyword[for] identifier[module] keyword[in] identifier[def_dict_list] [ identifier[contrib] ]:
identifier[file_str] += literal[string] + identifier[module] + literal[string]
identifier[file_str] += literal[string]
identifier[file_name] = literal[string]
keyword[with] identifier[open] ( identifier[file_name] , literal[string] ) keyword[as] identifier[file_open] :
identifier[file_open] . identifier[write] ( identifier[file_str] )
identifier[logger] . identifier[info] ( literal[string] , identifier[file_name] ) | def write_contribs(def_dict_list: Dict[str, List[str]]) -> None:
"""Write to file, in current dir, 'contributors.md'."""
file_str = '' # type: str
note = '# Contributors\nCLTK Core authors, ordered alphabetically by first name\n\n' # type: str # pylint: disable=line-too-long
file_str += note
for contrib in def_dict_list:
file_str += '## ' + contrib + '\n'
for module in def_dict_list[contrib]:
file_str += '* ' + module + '\n' # depends on [control=['for'], data=['module']]
file_str += '\n' # depends on [control=['for'], data=['contrib']]
file_name = 'contributors.md' # type: str
with open(file_name, 'w') as file_open: # type: IO
file_open.write(file_str) # depends on [control=['with'], data=['file_open']]
logger.info('Wrote contribs file at "%s".', file_name) |
def is_canfulfill_intent_name(name):
# type: (str) -> Callable[[HandlerInput], bool]
"""A predicate function returning a boolean, when name matches the
intent name in a CanFulfill Intent Request.
The function can be applied on a
:py:class:`ask_sdk_core.handler_input.HandlerInput`, to
check if the input is of
:py:class:`ask_sdk_model.intent_request.CanFulfillIntentRequest` type and if the
name of the request matches with the passed name.
:param name: Name to be matched with the CanFulfill Intent Request Name
:type name: str
:return: Predicate function that can be used to check name of the
request
:rtype: Callable[[HandlerInput], bool]
"""
def can_handle_wrapper(handler_input):
# type: (HandlerInput) -> bool
return (isinstance(
handler_input.request_envelope.request, CanFulfillIntentRequest) and
handler_input.request_envelope.request.intent.name == name)
return can_handle_wrapper | def function[is_canfulfill_intent_name, parameter[name]]:
constant[A predicate function returning a boolean, when name matches the
intent name in a CanFulfill Intent Request.
The function can be applied on a
:py:class:`ask_sdk_core.handler_input.HandlerInput`, to
check if the input is of
:py:class:`ask_sdk_model.intent_request.CanFulfillIntentRequest` type and if the
name of the request matches with the passed name.
:param name: Name to be matched with the CanFulfill Intent Request Name
:type name: str
:return: Predicate function that can be used to check name of the
request
:rtype: Callable[[HandlerInput], bool]
]
def function[can_handle_wrapper, parameter[handler_input]]:
return[<ast.BoolOp object at 0x7da1b18ac0a0>]
return[name[can_handle_wrapper]] | keyword[def] identifier[is_canfulfill_intent_name] ( identifier[name] ):
literal[string]
keyword[def] identifier[can_handle_wrapper] ( identifier[handler_input] ):
keyword[return] ( identifier[isinstance] (
identifier[handler_input] . identifier[request_envelope] . identifier[request] , identifier[CanFulfillIntentRequest] ) keyword[and]
identifier[handler_input] . identifier[request_envelope] . identifier[request] . identifier[intent] . identifier[name] == identifier[name] )
keyword[return] identifier[can_handle_wrapper] | def is_canfulfill_intent_name(name):
# type: (str) -> Callable[[HandlerInput], bool]
'A predicate function returning a boolean, when name matches the\n intent name in a CanFulfill Intent Request.\n\n The function can be applied on a\n :py:class:`ask_sdk_core.handler_input.HandlerInput`, to\n check if the input is of\n :py:class:`ask_sdk_model.intent_request.CanFulfillIntentRequest` type and if the\n name of the request matches with the passed name.\n\n :param name: Name to be matched with the CanFulfill Intent Request Name\n :type name: str\n :return: Predicate function that can be used to check name of the\n request\n :rtype: Callable[[HandlerInput], bool]\n '
def can_handle_wrapper(handler_input):
# type: (HandlerInput) -> bool
return isinstance(handler_input.request_envelope.request, CanFulfillIntentRequest) and handler_input.request_envelope.request.intent.name == name
return can_handle_wrapper |
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
high_threshold = ET.SubElement(threshold, "high-threshold")
high_threshold.text = kwargs.pop('high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[threshold_monitor_hidden] assign[=] call[name[ET].SubElement, parameter[name[config], constant[threshold-monitor-hidden]]]
variable[threshold_monitor] assign[=] call[name[ET].SubElement, parameter[name[threshold_monitor_hidden], constant[threshold-monitor]]]
variable[sfp] assign[=] call[name[ET].SubElement, parameter[name[threshold_monitor], constant[sfp]]]
variable[policy] assign[=] call[name[ET].SubElement, parameter[name[sfp], constant[policy]]]
variable[policy_name_key] assign[=] call[name[ET].SubElement, parameter[name[policy], constant[policy_name]]]
name[policy_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[policy_name]]]
variable[area] assign[=] call[name[ET].SubElement, parameter[name[policy], constant[area]]]
variable[type_key] assign[=] call[name[ET].SubElement, parameter[name[area], constant[type]]]
name[type_key].text assign[=] call[name[kwargs].pop, parameter[constant[type]]]
variable[area_value_key] assign[=] call[name[ET].SubElement, parameter[name[area], constant[area_value]]]
name[area_value_key].text assign[=] call[name[kwargs].pop, parameter[constant[area_value]]]
variable[threshold] assign[=] call[name[ET].SubElement, parameter[name[area], constant[threshold]]]
variable[high_threshold] assign[=] call[name[ET].SubElement, parameter[name[threshold], constant[high-threshold]]]
name[high_threshold].text assign[=] call[name[kwargs].pop, parameter[constant[high_threshold]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[threshold_monitor_hidden] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[threshold_monitor] = identifier[ET] . identifier[SubElement] ( identifier[threshold_monitor_hidden] , literal[string] )
identifier[sfp] = identifier[ET] . identifier[SubElement] ( identifier[threshold_monitor] , literal[string] )
identifier[policy] = identifier[ET] . identifier[SubElement] ( identifier[sfp] , literal[string] )
identifier[policy_name_key] = identifier[ET] . identifier[SubElement] ( identifier[policy] , literal[string] )
identifier[policy_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[area] = identifier[ET] . identifier[SubElement] ( identifier[policy] , literal[string] )
identifier[type_key] = identifier[ET] . identifier[SubElement] ( identifier[area] , literal[string] )
identifier[type_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[area_value_key] = identifier[ET] . identifier[SubElement] ( identifier[area] , literal[string] )
identifier[area_value_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[threshold] = identifier[ET] . identifier[SubElement] ( identifier[area] , literal[string] )
identifier[high_threshold] = identifier[ET] . identifier[SubElement] ( identifier[threshold] , literal[string] )
identifier[high_threshold] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
threshold_monitor_hidden = ET.SubElement(config, 'threshold-monitor-hidden', xmlns='urn:brocade.com:mgmt:brocade-threshold-monitor')
threshold_monitor = ET.SubElement(threshold_monitor_hidden, 'threshold-monitor')
sfp = ET.SubElement(threshold_monitor, 'sfp')
policy = ET.SubElement(sfp, 'policy')
policy_name_key = ET.SubElement(policy, 'policy_name')
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, 'area')
type_key = ET.SubElement(area, 'type')
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, 'area_value')
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, 'threshold')
high_threshold = ET.SubElement(threshold, 'high-threshold')
high_threshold.text = kwargs.pop('high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def json_data(self, instance, default=None):
"""Get a JSON compatible value
"""
value = self.get(instance)
if not value:
return ""
if callable(value):
value = value()
return api.to_iso_date(value, default=default) | def function[json_data, parameter[self, instance, default]]:
constant[Get a JSON compatible value
]
variable[value] assign[=] call[name[self].get, parameter[name[instance]]]
if <ast.UnaryOp object at 0x7da20c6a9a20> begin[:]
return[constant[]]
if call[name[callable], parameter[name[value]]] begin[:]
variable[value] assign[=] call[name[value], parameter[]]
return[call[name[api].to_iso_date, parameter[name[value]]]] | keyword[def] identifier[json_data] ( identifier[self] , identifier[instance] , identifier[default] = keyword[None] ):
literal[string]
identifier[value] = identifier[self] . identifier[get] ( identifier[instance] )
keyword[if] keyword[not] identifier[value] :
keyword[return] literal[string]
keyword[if] identifier[callable] ( identifier[value] ):
identifier[value] = identifier[value] ()
keyword[return] identifier[api] . identifier[to_iso_date] ( identifier[value] , identifier[default] = identifier[default] ) | def json_data(self, instance, default=None):
"""Get a JSON compatible value
"""
value = self.get(instance)
if not value:
return '' # depends on [control=['if'], data=[]]
if callable(value):
value = value() # depends on [control=['if'], data=[]]
return api.to_iso_date(value, default=default) |
def dump_index(args, idx):
"""Create a metatab file for the index"""
import csv
import sys
from metatab import MetatabDoc
doc = MetatabDoc()
pack_section = doc.new_section('Packages', ['Identifier', 'Name', 'Nvname', 'Version', 'Format'])
r = doc['Root']
r.new_term('Root.Title', 'Package Index')
for p in idx.list():
pack_section.new_term('Package',
p['url'],
identifier=p['ident'],
name=p['name'],
nvname=p['nvname'],
version=p['version'],
format=p['format'])
doc.write_csv(args.dump) | def function[dump_index, parameter[args, idx]]:
constant[Create a metatab file for the index]
import module[csv]
import module[sys]
from relative_module[metatab] import module[MetatabDoc]
variable[doc] assign[=] call[name[MetatabDoc], parameter[]]
variable[pack_section] assign[=] call[name[doc].new_section, parameter[constant[Packages], list[[<ast.Constant object at 0x7da1b185ab30>, <ast.Constant object at 0x7da1b185b5e0>, <ast.Constant object at 0x7da1b18581f0>, <ast.Constant object at 0x7da1b1858eb0>, <ast.Constant object at 0x7da1b185a890>]]]]
variable[r] assign[=] call[name[doc]][constant[Root]]
call[name[r].new_term, parameter[constant[Root.Title], constant[Package Index]]]
for taget[name[p]] in starred[call[name[idx].list, parameter[]]] begin[:]
call[name[pack_section].new_term, parameter[constant[Package], call[name[p]][constant[url]]]]
call[name[doc].write_csv, parameter[name[args].dump]] | keyword[def] identifier[dump_index] ( identifier[args] , identifier[idx] ):
literal[string]
keyword[import] identifier[csv]
keyword[import] identifier[sys]
keyword[from] identifier[metatab] keyword[import] identifier[MetatabDoc]
identifier[doc] = identifier[MetatabDoc] ()
identifier[pack_section] = identifier[doc] . identifier[new_section] ( literal[string] ,[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ])
identifier[r] = identifier[doc] [ literal[string] ]
identifier[r] . identifier[new_term] ( literal[string] , literal[string] )
keyword[for] identifier[p] keyword[in] identifier[idx] . identifier[list] ():
identifier[pack_section] . identifier[new_term] ( literal[string] ,
identifier[p] [ literal[string] ],
identifier[identifier] = identifier[p] [ literal[string] ],
identifier[name] = identifier[p] [ literal[string] ],
identifier[nvname] = identifier[p] [ literal[string] ],
identifier[version] = identifier[p] [ literal[string] ],
identifier[format] = identifier[p] [ literal[string] ])
identifier[doc] . identifier[write_csv] ( identifier[args] . identifier[dump] ) | def dump_index(args, idx):
"""Create a metatab file for the index"""
import csv
import sys
from metatab import MetatabDoc
doc = MetatabDoc()
pack_section = doc.new_section('Packages', ['Identifier', 'Name', 'Nvname', 'Version', 'Format'])
r = doc['Root']
r.new_term('Root.Title', 'Package Index')
for p in idx.list():
pack_section.new_term('Package', p['url'], identifier=p['ident'], name=p['name'], nvname=p['nvname'], version=p['version'], format=p['format']) # depends on [control=['for'], data=['p']]
doc.write_csv(args.dump) |
def area(self):
"""Square Foot Area (sqft)"""
area = self.parse['attributes'].get('Shape_Area')
if area:
return round(float(area) * 10.76391) | def function[area, parameter[self]]:
constant[Square Foot Area (sqft)]
variable[area] assign[=] call[call[name[self].parse][constant[attributes]].get, parameter[constant[Shape_Area]]]
if name[area] begin[:]
return[call[name[round], parameter[binary_operation[call[name[float], parameter[name[area]]] * constant[10.76391]]]]] | keyword[def] identifier[area] ( identifier[self] ):
literal[string]
identifier[area] = identifier[self] . identifier[parse] [ literal[string] ]. identifier[get] ( literal[string] )
keyword[if] identifier[area] :
keyword[return] identifier[round] ( identifier[float] ( identifier[area] )* literal[int] ) | def area(self):
"""Square Foot Area (sqft)"""
area = self.parse['attributes'].get('Shape_Area')
if area:
return round(float(area) * 10.76391) # depends on [control=['if'], data=[]] |
def extract_path_info(
environ_or_baseurl,
path_or_url,
charset="utf-8",
errors="werkzeug.url_quote",
collapse_http_schemes=True,
):
"""Extracts the path info from the given URL (or WSGI environment) and
path. The path info returned is a unicode string, not a bytestring
suitable for a WSGI environment. The URLs might also be IRIs.
If the path info could not be determined, `None` is returned.
Some examples:
>>> extract_path_info('http://example.com/app', '/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello',
... collapse_http_schemes=False) is None
True
Instead of providing a base URL you can also pass a WSGI environment.
:param environ_or_baseurl: a WSGI environment dict, a base URL or
base IRI. This is the root of the
application.
:param path_or_url: an absolute path from the server root, a
relative path (in which case it's the path info)
or a full URL. Also accepts IRIs and unicode
parameters.
:param charset: the charset for byte data in URLs
:param errors: the error handling on decode
:param collapse_http_schemes: if set to `False` the algorithm does
not assume that http and https on the
same server point to the same
resource.
.. versionchanged:: 0.15
The ``errors`` parameter defaults to leaving invalid bytes
quoted instead of replacing them.
.. versionadded:: 0.6
"""
def _normalize_netloc(scheme, netloc):
parts = netloc.split(u"@", 1)[-1].split(u":", 1)
if len(parts) == 2:
netloc, port = parts
if (scheme == u"http" and port == u"80") or (
scheme == u"https" and port == u"443"
):
port = None
else:
netloc = parts[0]
port = None
if port is not None:
netloc += u":" + port
return netloc
# make sure whatever we are working on is a IRI and parse it
path = uri_to_iri(path_or_url, charset, errors)
if isinstance(environ_or_baseurl, dict):
environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True)
base_iri = uri_to_iri(environ_or_baseurl, charset, errors)
base_scheme, base_netloc, base_path = url_parse(base_iri)[:3]
cur_scheme, cur_netloc, cur_path, = url_parse(url_join(base_iri, path))[:3]
# normalize the network location
base_netloc = _normalize_netloc(base_scheme, base_netloc)
cur_netloc = _normalize_netloc(cur_scheme, cur_netloc)
# is that IRI even on a known HTTP scheme?
if collapse_http_schemes:
for scheme in base_scheme, cur_scheme:
if scheme not in (u"http", u"https"):
return None
else:
if not (base_scheme in (u"http", u"https") and base_scheme == cur_scheme):
return None
# are the netlocs compatible?
if base_netloc != cur_netloc:
return None
# are we below the application path?
base_path = base_path.rstrip(u"/")
if not cur_path.startswith(base_path):
return None
return u"/" + cur_path[len(base_path) :].lstrip(u"/") | def function[extract_path_info, parameter[environ_or_baseurl, path_or_url, charset, errors, collapse_http_schemes]]:
constant[Extracts the path info from the given URL (or WSGI environment) and
path. The path info returned is a unicode string, not a bytestring
suitable for a WSGI environment. The URLs might also be IRIs.
If the path info could not be determined, `None` is returned.
Some examples:
>>> extract_path_info('http://example.com/app', '/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello',
... collapse_http_schemes=False) is None
True
Instead of providing a base URL you can also pass a WSGI environment.
:param environ_or_baseurl: a WSGI environment dict, a base URL or
base IRI. This is the root of the
application.
:param path_or_url: an absolute path from the server root, a
relative path (in which case it's the path info)
or a full URL. Also accepts IRIs and unicode
parameters.
:param charset: the charset for byte data in URLs
:param errors: the error handling on decode
:param collapse_http_schemes: if set to `False` the algorithm does
not assume that http and https on the
same server point to the same
resource.
.. versionchanged:: 0.15
The ``errors`` parameter defaults to leaving invalid bytes
quoted instead of replacing them.
.. versionadded:: 0.6
]
def function[_normalize_netloc, parameter[scheme, netloc]]:
variable[parts] assign[=] call[call[call[name[netloc].split, parameter[constant[@], constant[1]]]][<ast.UnaryOp object at 0x7da20c76e7a0>].split, parameter[constant[:], constant[1]]]
if compare[call[name[len], parameter[name[parts]]] equal[==] constant[2]] begin[:]
<ast.Tuple object at 0x7da20c76c520> assign[=] name[parts]
if <ast.BoolOp object at 0x7da20c76ee60> begin[:]
variable[port] assign[=] constant[None]
if compare[name[port] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da20c76cdc0>
return[name[netloc]]
variable[path] assign[=] call[name[uri_to_iri], parameter[name[path_or_url], name[charset], name[errors]]]
if call[name[isinstance], parameter[name[environ_or_baseurl], name[dict]]] begin[:]
variable[environ_or_baseurl] assign[=] call[name[get_current_url], parameter[name[environ_or_baseurl]]]
variable[base_iri] assign[=] call[name[uri_to_iri], parameter[name[environ_or_baseurl], name[charset], name[errors]]]
<ast.Tuple object at 0x7da20c76e6b0> assign[=] call[call[name[url_parse], parameter[name[base_iri]]]][<ast.Slice object at 0x7da20c76f100>]
<ast.Tuple object at 0x7da20c76e0e0> assign[=] call[call[name[url_parse], parameter[call[name[url_join], parameter[name[base_iri], name[path]]]]]][<ast.Slice object at 0x7da20c76cca0>]
variable[base_netloc] assign[=] call[name[_normalize_netloc], parameter[name[base_scheme], name[base_netloc]]]
variable[cur_netloc] assign[=] call[name[_normalize_netloc], parameter[name[cur_scheme], name[cur_netloc]]]
if name[collapse_http_schemes] begin[:]
for taget[name[scheme]] in starred[tuple[[<ast.Name object at 0x7da20c76ef20>, <ast.Name object at 0x7da20c76e230>]]] begin[:]
if compare[name[scheme] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c76d420>, <ast.Constant object at 0x7da20c76c430>]]] begin[:]
return[constant[None]]
if compare[name[base_netloc] not_equal[!=] name[cur_netloc]] begin[:]
return[constant[None]]
variable[base_path] assign[=] call[name[base_path].rstrip, parameter[constant[/]]]
if <ast.UnaryOp object at 0x7da20c76fdf0> begin[:]
return[constant[None]]
return[binary_operation[constant[/] + call[call[name[cur_path]][<ast.Slice object at 0x7da20c76ec50>].lstrip, parameter[constant[/]]]]] | keyword[def] identifier[extract_path_info] (
identifier[environ_or_baseurl] ,
identifier[path_or_url] ,
identifier[charset] = literal[string] ,
identifier[errors] = literal[string] ,
identifier[collapse_http_schemes] = keyword[True] ,
):
literal[string]
keyword[def] identifier[_normalize_netloc] ( identifier[scheme] , identifier[netloc] ):
identifier[parts] = identifier[netloc] . identifier[split] ( literal[string] , literal[int] )[- literal[int] ]. identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[len] ( identifier[parts] )== literal[int] :
identifier[netloc] , identifier[port] = identifier[parts]
keyword[if] ( identifier[scheme] == literal[string] keyword[and] identifier[port] == literal[string] ) keyword[or] (
identifier[scheme] == literal[string] keyword[and] identifier[port] == literal[string]
):
identifier[port] = keyword[None]
keyword[else] :
identifier[netloc] = identifier[parts] [ literal[int] ]
identifier[port] = keyword[None]
keyword[if] identifier[port] keyword[is] keyword[not] keyword[None] :
identifier[netloc] += literal[string] + identifier[port]
keyword[return] identifier[netloc]
identifier[path] = identifier[uri_to_iri] ( identifier[path_or_url] , identifier[charset] , identifier[errors] )
keyword[if] identifier[isinstance] ( identifier[environ_or_baseurl] , identifier[dict] ):
identifier[environ_or_baseurl] = identifier[get_current_url] ( identifier[environ_or_baseurl] , identifier[root_only] = keyword[True] )
identifier[base_iri] = identifier[uri_to_iri] ( identifier[environ_or_baseurl] , identifier[charset] , identifier[errors] )
identifier[base_scheme] , identifier[base_netloc] , identifier[base_path] = identifier[url_parse] ( identifier[base_iri] )[: literal[int] ]
identifier[cur_scheme] , identifier[cur_netloc] , identifier[cur_path] ,= identifier[url_parse] ( identifier[url_join] ( identifier[base_iri] , identifier[path] ))[: literal[int] ]
identifier[base_netloc] = identifier[_normalize_netloc] ( identifier[base_scheme] , identifier[base_netloc] )
identifier[cur_netloc] = identifier[_normalize_netloc] ( identifier[cur_scheme] , identifier[cur_netloc] )
keyword[if] identifier[collapse_http_schemes] :
keyword[for] identifier[scheme] keyword[in] identifier[base_scheme] , identifier[cur_scheme] :
keyword[if] identifier[scheme] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[return] keyword[None]
keyword[else] :
keyword[if] keyword[not] ( identifier[base_scheme] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[base_scheme] == identifier[cur_scheme] ):
keyword[return] keyword[None]
keyword[if] identifier[base_netloc] != identifier[cur_netloc] :
keyword[return] keyword[None]
identifier[base_path] = identifier[base_path] . identifier[rstrip] ( literal[string] )
keyword[if] keyword[not] identifier[cur_path] . identifier[startswith] ( identifier[base_path] ):
keyword[return] keyword[None]
keyword[return] literal[string] + identifier[cur_path] [ identifier[len] ( identifier[base_path] ):]. identifier[lstrip] ( literal[string] ) | def extract_path_info(environ_or_baseurl, path_or_url, charset='utf-8', errors='werkzeug.url_quote', collapse_http_schemes=True):
"""Extracts the path info from the given URL (or WSGI environment) and
path. The path info returned is a unicode string, not a bytestring
suitable for a WSGI environment. The URLs might also be IRIs.
If the path info could not be determined, `None` is returned.
Some examples:
>>> extract_path_info('http://example.com/app', '/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello',
... collapse_http_schemes=False) is None
True
Instead of providing a base URL you can also pass a WSGI environment.
:param environ_or_baseurl: a WSGI environment dict, a base URL or
base IRI. This is the root of the
application.
:param path_or_url: an absolute path from the server root, a
relative path (in which case it's the path info)
or a full URL. Also accepts IRIs and unicode
parameters.
:param charset: the charset for byte data in URLs
:param errors: the error handling on decode
:param collapse_http_schemes: if set to `False` the algorithm does
not assume that http and https on the
same server point to the same
resource.
.. versionchanged:: 0.15
The ``errors`` parameter defaults to leaving invalid bytes
quoted instead of replacing them.
.. versionadded:: 0.6
"""
def _normalize_netloc(scheme, netloc):
parts = netloc.split(u'@', 1)[-1].split(u':', 1)
if len(parts) == 2:
(netloc, port) = parts
if scheme == u'http' and port == u'80' or (scheme == u'https' and port == u'443'):
port = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
netloc = parts[0]
port = None
if port is not None:
netloc += u':' + port # depends on [control=['if'], data=['port']]
return netloc
# make sure whatever we are working on is a IRI and parse it
path = uri_to_iri(path_or_url, charset, errors)
if isinstance(environ_or_baseurl, dict):
environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True) # depends on [control=['if'], data=[]]
base_iri = uri_to_iri(environ_or_baseurl, charset, errors)
(base_scheme, base_netloc, base_path) = url_parse(base_iri)[:3]
(cur_scheme, cur_netloc, cur_path) = url_parse(url_join(base_iri, path))[:3]
# normalize the network location
base_netloc = _normalize_netloc(base_scheme, base_netloc)
cur_netloc = _normalize_netloc(cur_scheme, cur_netloc)
# is that IRI even on a known HTTP scheme?
if collapse_http_schemes:
for scheme in (base_scheme, cur_scheme):
if scheme not in (u'http', u'https'):
return None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['scheme']] # depends on [control=['if'], data=[]]
elif not (base_scheme in (u'http', u'https') and base_scheme == cur_scheme):
return None # depends on [control=['if'], data=[]]
# are the netlocs compatible?
if base_netloc != cur_netloc:
return None # depends on [control=['if'], data=[]]
# are we below the application path?
base_path = base_path.rstrip(u'/')
if not cur_path.startswith(base_path):
return None # depends on [control=['if'], data=[]]
return u'/' + cur_path[len(base_path):].lstrip(u'/') |
def upload_file(
source,
name,
extra_args=None,
region=None,
key=None,
keyid=None,
profile=None,
):
'''
Upload a local file as an S3 object.
CLI Example:
.. code-block:: bash
salt myminion boto_s3.upload_file \\
/path/to/local/file \\
my_bucket/path/to/object \\
region=us-east-1 \\
key=key \\
keyid=keyid \\
profile=profile \\
'''
bucket, _, s3_key = name.partition('/')
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
conn.upload_file(source, bucket, s3_key, ExtraArgs=extra_args)
except boto3.exceptions.S3UploadFailedError as e:
return {'error': __utils__['boto3.get_error'](e)}
log.info('S3 object uploaded to %s', name)
return {'result': True} | def function[upload_file, parameter[source, name, extra_args, region, key, keyid, profile]]:
constant[
Upload a local file as an S3 object.
CLI Example:
.. code-block:: bash
salt myminion boto_s3.upload_file \
/path/to/local/file \
my_bucket/path/to/object \
region=us-east-1 \
key=key \
keyid=keyid \
profile=profile \
]
<ast.Tuple object at 0x7da18c4cd210> assign[=] call[name[name].partition, parameter[constant[/]]]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
<ast.Try object at 0x7da18c4cf100>
call[name[log].info, parameter[constant[S3 object uploaded to %s], name[name]]]
return[dictionary[[<ast.Constant object at 0x7da18c4ce830>], [<ast.Constant object at 0x7da18c4cecb0>]]] | keyword[def] identifier[upload_file] (
identifier[source] ,
identifier[name] ,
identifier[extra_args] = keyword[None] ,
identifier[region] = keyword[None] ,
identifier[key] = keyword[None] ,
identifier[keyid] = keyword[None] ,
identifier[profile] = keyword[None] ,
):
literal[string]
identifier[bucket] , identifier[_] , identifier[s3_key] = identifier[name] . identifier[partition] ( literal[string] )
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[try] :
identifier[conn] . identifier[upload_file] ( identifier[source] , identifier[bucket] , identifier[s3_key] , identifier[ExtraArgs] = identifier[extra_args] )
keyword[except] identifier[boto3] . identifier[exceptions] . identifier[S3UploadFailedError] keyword[as] identifier[e] :
keyword[return] { literal[string] : identifier[__utils__] [ literal[string] ]( identifier[e] )}
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[return] { literal[string] : keyword[True] } | def upload_file(source, name, extra_args=None, region=None, key=None, keyid=None, profile=None):
"""
Upload a local file as an S3 object.
CLI Example:
.. code-block:: bash
salt myminion boto_s3.upload_file \\
/path/to/local/file \\
my_bucket/path/to/object \\
region=us-east-1 \\
key=key \\
keyid=keyid \\
profile=profile \\
"""
(bucket, _, s3_key) = name.partition('/')
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
conn.upload_file(source, bucket, s3_key, ExtraArgs=extra_args) # depends on [control=['try'], data=[]]
except boto3.exceptions.S3UploadFailedError as e:
return {'error': __utils__['boto3.get_error'](e)} # depends on [control=['except'], data=['e']]
log.info('S3 object uploaded to %s', name)
return {'result': True} |
def get_parameter_defaults(self, include_flags=True):
"""
Get a dict mapping parameter names to their defaults (if set).
:rtype: dict[str, object]
"""
return {
name: parameter.default
for (name, parameter)
in self.parameters.items()
if parameter.default is not None and (include_flags or parameter.type != 'flag')
} | def function[get_parameter_defaults, parameter[self, include_flags]]:
constant[
Get a dict mapping parameter names to their defaults (if set).
:rtype: dict[str, object]
]
return[<ast.DictComp object at 0x7da18dc06320>] | keyword[def] identifier[get_parameter_defaults] ( identifier[self] , identifier[include_flags] = keyword[True] ):
literal[string]
keyword[return] {
identifier[name] : identifier[parameter] . identifier[default]
keyword[for] ( identifier[name] , identifier[parameter] )
keyword[in] identifier[self] . identifier[parameters] . identifier[items] ()
keyword[if] identifier[parameter] . identifier[default] keyword[is] keyword[not] keyword[None] keyword[and] ( identifier[include_flags] keyword[or] identifier[parameter] . identifier[type] != literal[string] )
} | def get_parameter_defaults(self, include_flags=True):
"""
Get a dict mapping parameter names to their defaults (if set).
:rtype: dict[str, object]
"""
return {name: parameter.default for (name, parameter) in self.parameters.items() if parameter.default is not None and (include_flags or parameter.type != 'flag')} |
def _add_pending_action(self, p_action, p_size):
"""
Creates action waiting for execution and forwards it to the mainloop.
"""
def generate_callback():
def callback(*args):
self.resolve_action(p_action, p_size)
self.keystate = None
return callback
urwid.emit_signal(self, 'add_pending_action', generate_callback()) | def function[_add_pending_action, parameter[self, p_action, p_size]]:
constant[
Creates action waiting for execution and forwards it to the mainloop.
]
def function[generate_callback, parameter[]]:
def function[callback, parameter[]]:
call[name[self].resolve_action, parameter[name[p_action], name[p_size]]]
name[self].keystate assign[=] constant[None]
return[name[callback]]
call[name[urwid].emit_signal, parameter[name[self], constant[add_pending_action], call[name[generate_callback], parameter[]]]] | keyword[def] identifier[_add_pending_action] ( identifier[self] , identifier[p_action] , identifier[p_size] ):
literal[string]
keyword[def] identifier[generate_callback] ():
keyword[def] identifier[callback] (* identifier[args] ):
identifier[self] . identifier[resolve_action] ( identifier[p_action] , identifier[p_size] )
identifier[self] . identifier[keystate] = keyword[None]
keyword[return] identifier[callback]
identifier[urwid] . identifier[emit_signal] ( identifier[self] , literal[string] , identifier[generate_callback] ()) | def _add_pending_action(self, p_action, p_size):
"""
Creates action waiting for execution and forwards it to the mainloop.
"""
def generate_callback():
def callback(*args):
self.resolve_action(p_action, p_size)
self.keystate = None
return callback
urwid.emit_signal(self, 'add_pending_action', generate_callback()) |
def coefficients(deriv, acc):
"""
Calculates the finite difference coefficients for given derivative order and accuracy order.
Assumes that the underlying grid is uniform. This function is available at the `findiff`
package level.
:param deriv: int > 0: The derivative order.
:param acc: even int > 0: The accuracy order.
:return: dict with the finite difference coefficients and corresponding offsets
"""
if acc % 2 == 1:
acc += 1
ret = {}
# Determine central coefficients
num_central = 2 * math.floor((deriv + 1) / 2) - 1 + acc
num_side = num_central // 2
ret["center"] = _calc_coef(num_side, num_side, deriv)
# Determine forward coefficients
if deriv % 2 == 0:
num_coef = num_central + 1
else:
num_coef = num_central
ret["forward"] = _calc_coef(0, num_coef - 1, deriv)
# Determine backward coefficients
ret["backward"] = _calc_coef(num_coef - 1, 0, deriv)
return ret | def function[coefficients, parameter[deriv, acc]]:
constant[
Calculates the finite difference coefficients for given derivative order and accuracy order.
Assumes that the underlying grid is uniform. This function is available at the `findiff`
package level.
:param deriv: int > 0: The derivative order.
:param acc: even int > 0: The accuracy order.
:return: dict with the finite difference coefficients and corresponding offsets
]
if compare[binary_operation[name[acc] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[1]] begin[:]
<ast.AugAssign object at 0x7da1b0b35fc0>
variable[ret] assign[=] dictionary[[], []]
variable[num_central] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * call[name[math].floor, parameter[binary_operation[binary_operation[name[deriv] + constant[1]] / constant[2]]]]] - constant[1]] + name[acc]]
variable[num_side] assign[=] binary_operation[name[num_central] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
call[name[ret]][constant[center]] assign[=] call[name[_calc_coef], parameter[name[num_side], name[num_side], name[deriv]]]
if compare[binary_operation[name[deriv] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[0]] begin[:]
variable[num_coef] assign[=] binary_operation[name[num_central] + constant[1]]
call[name[ret]][constant[forward]] assign[=] call[name[_calc_coef], parameter[constant[0], binary_operation[name[num_coef] - constant[1]], name[deriv]]]
call[name[ret]][constant[backward]] assign[=] call[name[_calc_coef], parameter[binary_operation[name[num_coef] - constant[1]], constant[0], name[deriv]]]
return[name[ret]] | keyword[def] identifier[coefficients] ( identifier[deriv] , identifier[acc] ):
literal[string]
keyword[if] identifier[acc] % literal[int] == literal[int] :
identifier[acc] += literal[int]
identifier[ret] ={}
identifier[num_central] = literal[int] * identifier[math] . identifier[floor] (( identifier[deriv] + literal[int] )/ literal[int] )- literal[int] + identifier[acc]
identifier[num_side] = identifier[num_central] // literal[int]
identifier[ret] [ literal[string] ]= identifier[_calc_coef] ( identifier[num_side] , identifier[num_side] , identifier[deriv] )
keyword[if] identifier[deriv] % literal[int] == literal[int] :
identifier[num_coef] = identifier[num_central] + literal[int]
keyword[else] :
identifier[num_coef] = identifier[num_central]
identifier[ret] [ literal[string] ]= identifier[_calc_coef] ( literal[int] , identifier[num_coef] - literal[int] , identifier[deriv] )
identifier[ret] [ literal[string] ]= identifier[_calc_coef] ( identifier[num_coef] - literal[int] , literal[int] , identifier[deriv] )
keyword[return] identifier[ret] | def coefficients(deriv, acc):
"""
Calculates the finite difference coefficients for given derivative order and accuracy order.
Assumes that the underlying grid is uniform. This function is available at the `findiff`
package level.
:param deriv: int > 0: The derivative order.
:param acc: even int > 0: The accuracy order.
:return: dict with the finite difference coefficients and corresponding offsets
"""
if acc % 2 == 1:
acc += 1 # depends on [control=['if'], data=[]]
ret = {}
# Determine central coefficients
num_central = 2 * math.floor((deriv + 1) / 2) - 1 + acc
num_side = num_central // 2
ret['center'] = _calc_coef(num_side, num_side, deriv)
# Determine forward coefficients
if deriv % 2 == 0:
num_coef = num_central + 1 # depends on [control=['if'], data=[]]
else:
num_coef = num_central
ret['forward'] = _calc_coef(0, num_coef - 1, deriv)
# Determine backward coefficients
ret['backward'] = _calc_coef(num_coef - 1, 0, deriv)
return ret |
def save_formset(self, request, form, formset, change):
"""
For each photo set it's author to currently authenticated user.
"""
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, Photo):
instance.author = request.user
instance.save() | def function[save_formset, parameter[self, request, form, formset, change]]:
constant[
For each photo set it's author to currently authenticated user.
]
variable[instances] assign[=] call[name[formset].save, parameter[]]
for taget[name[instance]] in starred[name[instances]] begin[:]
if call[name[isinstance], parameter[name[instance], name[Photo]]] begin[:]
name[instance].author assign[=] name[request].user
call[name[instance].save, parameter[]] | keyword[def] identifier[save_formset] ( identifier[self] , identifier[request] , identifier[form] , identifier[formset] , identifier[change] ):
literal[string]
identifier[instances] = identifier[formset] . identifier[save] ( identifier[commit] = keyword[False] )
keyword[for] identifier[instance] keyword[in] identifier[instances] :
keyword[if] identifier[isinstance] ( identifier[instance] , identifier[Photo] ):
identifier[instance] . identifier[author] = identifier[request] . identifier[user]
identifier[instance] . identifier[save] () | def save_formset(self, request, form, formset, change):
"""
For each photo set it's author to currently authenticated user.
"""
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, Photo):
instance.author = request.user # depends on [control=['if'], data=[]]
instance.save() # depends on [control=['for'], data=['instance']] |
def fuse_exit():
'''
This will shutdown the FUSE mount and cause the call to FUSE(...) to
return, similar to sending SIGINT to the process.
Flags the native FUSE session as terminated and will cause any running FUSE
event loops to exit on the next opportunity. (see fuse.c::fuse_exit)
'''
fuse_ptr = ctypes.c_void_p(_libfuse.fuse_get_context().contents.fuse)
_libfuse.fuse_exit(fuse_ptr) | def function[fuse_exit, parameter[]]:
constant[
This will shutdown the FUSE mount and cause the call to FUSE(...) to
return, similar to sending SIGINT to the process.
Flags the native FUSE session as terminated and will cause any running FUSE
event loops to exit on the next opportunity. (see fuse.c::fuse_exit)
]
variable[fuse_ptr] assign[=] call[name[ctypes].c_void_p, parameter[call[name[_libfuse].fuse_get_context, parameter[]].contents.fuse]]
call[name[_libfuse].fuse_exit, parameter[name[fuse_ptr]]] | keyword[def] identifier[fuse_exit] ():
literal[string]
identifier[fuse_ptr] = identifier[ctypes] . identifier[c_void_p] ( identifier[_libfuse] . identifier[fuse_get_context] (). identifier[contents] . identifier[fuse] )
identifier[_libfuse] . identifier[fuse_exit] ( identifier[fuse_ptr] ) | def fuse_exit():
"""
This will shutdown the FUSE mount and cause the call to FUSE(...) to
return, similar to sending SIGINT to the process.
Flags the native FUSE session as terminated and will cause any running FUSE
event loops to exit on the next opportunity. (see fuse.c::fuse_exit)
"""
fuse_ptr = ctypes.c_void_p(_libfuse.fuse_get_context().contents.fuse)
_libfuse.fuse_exit(fuse_ptr) |
def remove_component(self, entity: int, component_type: Any) -> int:
"""Remove a Component instance from an Entity, by type.
A Component instance can be removed by providing it's type.
For example: world.delete_component(enemy_a, Velocity) will remove
the Velocity instance from the Entity enemy_a.
Raises a KeyError if either the given entity or Component type does
not exist in the database.
:param entity: The Entity to remove the Component from.
:param component_type: The type of the Component to remove.
"""
self._components[component_type].discard(entity)
if not self._components[component_type]:
del self._components[component_type]
del self._entities[entity][component_type]
if not self._entities[entity]:
del self._entities[entity]
self.clear_cache()
return entity | def function[remove_component, parameter[self, entity, component_type]]:
constant[Remove a Component instance from an Entity, by type.
A Component instance can be removed by providing it's type.
For example: world.delete_component(enemy_a, Velocity) will remove
the Velocity instance from the Entity enemy_a.
Raises a KeyError if either the given entity or Component type does
not exist in the database.
:param entity: The Entity to remove the Component from.
:param component_type: The type of the Component to remove.
]
call[call[name[self]._components][name[component_type]].discard, parameter[name[entity]]]
if <ast.UnaryOp object at 0x7da1b0ef06a0> begin[:]
<ast.Delete object at 0x7da1b0d54280>
<ast.Delete object at 0x7da1b0d543d0>
if <ast.UnaryOp object at 0x7da1b0d57e20> begin[:]
<ast.Delete object at 0x7da1b0d54ee0>
call[name[self].clear_cache, parameter[]]
return[name[entity]] | keyword[def] identifier[remove_component] ( identifier[self] , identifier[entity] : identifier[int] , identifier[component_type] : identifier[Any] )-> identifier[int] :
literal[string]
identifier[self] . identifier[_components] [ identifier[component_type] ]. identifier[discard] ( identifier[entity] )
keyword[if] keyword[not] identifier[self] . identifier[_components] [ identifier[component_type] ]:
keyword[del] identifier[self] . identifier[_components] [ identifier[component_type] ]
keyword[del] identifier[self] . identifier[_entities] [ identifier[entity] ][ identifier[component_type] ]
keyword[if] keyword[not] identifier[self] . identifier[_entities] [ identifier[entity] ]:
keyword[del] identifier[self] . identifier[_entities] [ identifier[entity] ]
identifier[self] . identifier[clear_cache] ()
keyword[return] identifier[entity] | def remove_component(self, entity: int, component_type: Any) -> int:
"""Remove a Component instance from an Entity, by type.
A Component instance can be removed by providing it's type.
For example: world.delete_component(enemy_a, Velocity) will remove
the Velocity instance from the Entity enemy_a.
Raises a KeyError if either the given entity or Component type does
not exist in the database.
:param entity: The Entity to remove the Component from.
:param component_type: The type of the Component to remove.
"""
self._components[component_type].discard(entity)
if not self._components[component_type]:
del self._components[component_type] # depends on [control=['if'], data=[]]
del self._entities[entity][component_type]
if not self._entities[entity]:
del self._entities[entity] # depends on [control=['if'], data=[]]
self.clear_cache()
return entity |
def readSettings(settings_path=None):
global _settings
'''
Reads the settings corresponding to the plugin from where the method is called.
This function has to be called in the __init__ method of the plugin class.
Settings are stored in a settings.json file in the plugin folder.
Here is an eample of such a file:
[
{"name":"mysetting",
"label": "My setting",
"description": "A setting to customize my plugin",
"type": "string",
"default": "dummy string",
"group": "Group 1"
"onEdit": "def f():\\n\\tprint "Value edited in settings dialog"
"onChange": "def f():\\n\\tprint "New settings value has been saved"
},
{"name":"anothersetting",
"label": "Another setting",
"description": "Another setting to customize my plugin",
"type": "number",
"default": 0,
"group": "Group 2"
},
{"name":"achoicesetting",
"label": "A choice setting",
"description": "A setting to select from a set of possible options",
"type": "choice",
"default": "option 1",
"options":["option 1", "option 2", "option 3"],
"group": "Group 2"
}
]
Available types for settings are: string, bool, number, choice, crs and text (a multiline string)
The onEdit property contains a function that will be executed when the user edits the value
in the settings dialog. It shouldl return false if, after it has been executed, the setting
should not be modified and should recover its original value.
The onEdit property contains a function that will be executed when the setting is changed after
closing the settings dialog, or programatically by callin the setPluginSetting method
Both onEdit and onChange are optional properties
'''
namespace = _callerName().split(".")[0]
settings_path = settings_path or os.path.join(os.path.dirname(_callerPath()), "settings.json")
with open(settings_path) as f:
_settings[namespace] = json.load(f) | def function[readSettings, parameter[settings_path]]:
<ast.Global object at 0x7da1b0f58820>
constant[
Reads the settings corresponding to the plugin from where the method is called.
This function has to be called in the __init__ method of the plugin class.
Settings are stored in a settings.json file in the plugin folder.
Here is an eample of such a file:
[
{"name":"mysetting",
"label": "My setting",
"description": "A setting to customize my plugin",
"type": "string",
"default": "dummy string",
"group": "Group 1"
"onEdit": "def f():\n\tprint "Value edited in settings dialog"
"onChange": "def f():\n\tprint "New settings value has been saved"
},
{"name":"anothersetting",
"label": "Another setting",
"description": "Another setting to customize my plugin",
"type": "number",
"default": 0,
"group": "Group 2"
},
{"name":"achoicesetting",
"label": "A choice setting",
"description": "A setting to select from a set of possible options",
"type": "choice",
"default": "option 1",
"options":["option 1", "option 2", "option 3"],
"group": "Group 2"
}
]
Available types for settings are: string, bool, number, choice, crs and text (a multiline string)
The onEdit property contains a function that will be executed when the user edits the value
in the settings dialog. It shouldl return false if, after it has been executed, the setting
should not be modified and should recover its original value.
The onEdit property contains a function that will be executed when the setting is changed after
closing the settings dialog, or programatically by callin the setPluginSetting method
Both onEdit and onChange are optional properties
]
variable[namespace] assign[=] call[call[call[name[_callerName], parameter[]].split, parameter[constant[.]]]][constant[0]]
variable[settings_path] assign[=] <ast.BoolOp object at 0x7da1b0f5a560>
with call[name[open], parameter[name[settings_path]]] begin[:]
call[name[_settings]][name[namespace]] assign[=] call[name[json].load, parameter[name[f]]] | keyword[def] identifier[readSettings] ( identifier[settings_path] = keyword[None] ):
keyword[global] identifier[_settings]
literal[string]
identifier[namespace] = identifier[_callerName] (). identifier[split] ( literal[string] )[ literal[int] ]
identifier[settings_path] = identifier[settings_path] keyword[or] identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[_callerPath] ()), literal[string] )
keyword[with] identifier[open] ( identifier[settings_path] ) keyword[as] identifier[f] :
identifier[_settings] [ identifier[namespace] ]= identifier[json] . identifier[load] ( identifier[f] ) | def readSettings(settings_path=None):
global _settings
'\n Reads the settings corresponding to the plugin from where the method is called.\n This function has to be called in the __init__ method of the plugin class.\n Settings are stored in a settings.json file in the plugin folder.\n Here is an eample of such a file:\n\n [\n {"name":"mysetting",\n "label": "My setting",\n "description": "A setting to customize my plugin",\n "type": "string",\n "default": "dummy string",\n "group": "Group 1"\n "onEdit": "def f():\\n\\tprint "Value edited in settings dialog"\n "onChange": "def f():\\n\\tprint "New settings value has been saved"\n },\n {"name":"anothersetting",\n "label": "Another setting",\n "description": "Another setting to customize my plugin",\n "type": "number",\n "default": 0,\n "group": "Group 2"\n },\n {"name":"achoicesetting",\n "label": "A choice setting",\n "description": "A setting to select from a set of possible options",\n "type": "choice",\n "default": "option 1",\n "options":["option 1", "option 2", "option 3"],\n "group": "Group 2"\n }\n ]\n\n Available types for settings are: string, bool, number, choice, crs and text (a multiline string)\n\n The onEdit property contains a function that will be executed when the user edits the value\n in the settings dialog. It shouldl return false if, after it has been executed, the setting\n should not be modified and should recover its original value.\n\n The onEdit property contains a function that will be executed when the setting is changed after\n closing the settings dialog, or programatically by callin the setPluginSetting method\n\n Both onEdit and onChange are optional properties\n\n '
namespace = _callerName().split('.')[0]
settings_path = settings_path or os.path.join(os.path.dirname(_callerPath()), 'settings.json')
with open(settings_path) as f:
_settings[namespace] = json.load(f) # depends on [control=['with'], data=['f']] |
def load_from_db(cls, callback_etat=print, out=None):
"""Launch data fetching then load data received.
The method _load_remote_db should be overridden.
If out is given, datas are set in it, instead of returning a new base object.
"""
dic = cls._load_remote_db(callback_etat)
callback_etat("Chargement...", 2, 3)
if out is None:
return cls(dic)
cls.__init__(out, datas=dic) | def function[load_from_db, parameter[cls, callback_etat, out]]:
constant[Launch data fetching then load data received.
The method _load_remote_db should be overridden.
If out is given, datas are set in it, instead of returning a new base object.
]
variable[dic] assign[=] call[name[cls]._load_remote_db, parameter[name[callback_etat]]]
call[name[callback_etat], parameter[constant[Chargement...], constant[2], constant[3]]]
if compare[name[out] is constant[None]] begin[:]
return[call[name[cls], parameter[name[dic]]]]
call[name[cls].__init__, parameter[name[out]]] | keyword[def] identifier[load_from_db] ( identifier[cls] , identifier[callback_etat] = identifier[print] , identifier[out] = keyword[None] ):
literal[string]
identifier[dic] = identifier[cls] . identifier[_load_remote_db] ( identifier[callback_etat] )
identifier[callback_etat] ( literal[string] , literal[int] , literal[int] )
keyword[if] identifier[out] keyword[is] keyword[None] :
keyword[return] identifier[cls] ( identifier[dic] )
identifier[cls] . identifier[__init__] ( identifier[out] , identifier[datas] = identifier[dic] ) | def load_from_db(cls, callback_etat=print, out=None):
"""Launch data fetching then load data received.
The method _load_remote_db should be overridden.
If out is given, datas are set in it, instead of returning a new base object.
"""
dic = cls._load_remote_db(callback_etat)
callback_etat('Chargement...', 2, 3)
if out is None:
return cls(dic) # depends on [control=['if'], data=[]]
cls.__init__(out, datas=dic) |
def get_refund_transaction(self):
"""Retrieve the refund transaction for this transaction, immediately
after refunding.
After calling `refund()` to refund a transaction, call this method to
retrieve the new transaction representing the refund.
"""
try:
url = self._refund_transaction_url
except AttributeError:
raise ValueError("No refund transaction is available for this transaction")
resp, elem = self.element_for_url(url)
value = self.value_for_element(elem)
return value | def function[get_refund_transaction, parameter[self]]:
constant[Retrieve the refund transaction for this transaction, immediately
after refunding.
After calling `refund()` to refund a transaction, call this method to
retrieve the new transaction representing the refund.
]
<ast.Try object at 0x7da1b040c490>
<ast.Tuple object at 0x7da1b040f940> assign[=] call[name[self].element_for_url, parameter[name[url]]]
variable[value] assign[=] call[name[self].value_for_element, parameter[name[elem]]]
return[name[value]] | keyword[def] identifier[get_refund_transaction] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[url] = identifier[self] . identifier[_refund_transaction_url]
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[resp] , identifier[elem] = identifier[self] . identifier[element_for_url] ( identifier[url] )
identifier[value] = identifier[self] . identifier[value_for_element] ( identifier[elem] )
keyword[return] identifier[value] | def get_refund_transaction(self):
"""Retrieve the refund transaction for this transaction, immediately
after refunding.
After calling `refund()` to refund a transaction, call this method to
retrieve the new transaction representing the refund.
"""
try:
url = self._refund_transaction_url # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('No refund transaction is available for this transaction') # depends on [control=['except'], data=[]]
(resp, elem) = self.element_for_url(url)
value = self.value_for_element(elem)
return value |
def generate_p_star(num_groups):
"""Describe the order in which groups move
Arguments
---------
num_groups : int
Returns
-------
np.ndarray
Matrix P* - size (g-by-g)
"""
p_star = np.eye(num_groups, num_groups)
rd.shuffle(p_star)
return p_star | def function[generate_p_star, parameter[num_groups]]:
constant[Describe the order in which groups move
Arguments
---------
num_groups : int
Returns
-------
np.ndarray
Matrix P* - size (g-by-g)
]
variable[p_star] assign[=] call[name[np].eye, parameter[name[num_groups], name[num_groups]]]
call[name[rd].shuffle, parameter[name[p_star]]]
return[name[p_star]] | keyword[def] identifier[generate_p_star] ( identifier[num_groups] ):
literal[string]
identifier[p_star] = identifier[np] . identifier[eye] ( identifier[num_groups] , identifier[num_groups] )
identifier[rd] . identifier[shuffle] ( identifier[p_star] )
keyword[return] identifier[p_star] | def generate_p_star(num_groups):
"""Describe the order in which groups move
Arguments
---------
num_groups : int
Returns
-------
np.ndarray
Matrix P* - size (g-by-g)
"""
p_star = np.eye(num_groups, num_groups)
rd.shuffle(p_star)
return p_star |
def read(self, pos, size, **kwargs):
"""
Read a packet from the stream.
:param int pos: The packet number to read from the sequence of the stream. May be None to append to the stream.
:param size: The size to read. May be symbolic.
:param short_reads: Whether to replace the size with a symbolic value constrained to less than or equal to the original size. If unspecified, will be chosen based on the state option.
:return: A tuple of the data read (a bitvector of the length that is the maximum length of the read) and the actual size of the read.
"""
short_reads = kwargs.pop('short_reads', None)
# sanity check on read/write modes
if self.write_mode is None:
self.write_mode = False
elif self.write_mode is True:
raise SimFileError("Cannot read and write to the same SimPackets")
# sanity check on packet number and determine if data is already present
if pos is None:
pos = len(self.content)
if pos < 0:
raise SimFileError("SimPacket.read(%d): Negative packet number?" % pos)
elif pos > len(self.content):
raise SimFileError("SimPacket.read(%d): Packet number is past frontier of %d?" % (pos, len(self.content)))
elif pos != len(self.content):
_, realsize = self.content[pos]
self.state.solver.add(size <= realsize)
if not self.state.solver.satisfiable():
raise SimFileError("Packet read size constraint made state unsatisfiable???")
return self.content[pos] + (pos+1,)
# typecheck
if type(size) is int:
size = self.state.solver.BVV(size, self.state.arch.bits)
# The read is on the frontier. let's generate a new packet.
orig_size = size
max_size = None
# if short reads are enabled, replace size with a symbol
if short_reads is True or (short_reads is None and sim_options.SHORT_READS in self.state.options):
size = self.state.solver.BVS('packetsize_%d_%s' % (len(self.content), self.ident), self.state.arch.bits, key=('file', self.ident, 'packetsize', len(self.content)))
self.state.solver.add(size <= orig_size)
# figure out the maximum size of the read
if not self.state.solver.symbolic(size):
max_size = self.state.solver.eval(size)
elif self.state.solver.satisfiable(extra_constraints=(size <= self.state.libc.max_packet_size,)):
l.info("Constraining symbolic packet size to be less than %d", self.state.libc.max_packet_size)
if not self.state.solver.is_true(orig_size <= self.state.libc.max_packet_size):
self.state.solver.add(size <= self.state.libc.max_packet_size)
if not self.state.solver.symbolic(orig_size):
max_size = min(self.state.solver.eval(orig_size), self.state.libc.max_packet_size)
else:
max_size = self.state.solver.max(size)
else:
max_size = self.state.solver.min(size)
l.warning("Could not constrain symbolic packet size to <= %d; using minimum %d for size", self.state.libc.max_packet_size, max_size)
self.state.solver.add(size == max_size)
# generate the packet data and return it
data = self.state.solver.BVS('packet_%d_%s' % (len(self.content), self.ident), max_size * self.state.arch.byte_width, key=('file', self.ident, 'packet', len(self.content)))
packet = (data, size)
self.content.append(packet)
return packet + (pos+1,) | def function[read, parameter[self, pos, size]]:
constant[
Read a packet from the stream.
:param int pos: The packet number to read from the sequence of the stream. May be None to append to the stream.
:param size: The size to read. May be symbolic.
:param short_reads: Whether to replace the size with a symbolic value constrained to less than or equal to the original size. If unspecified, will be chosen based on the state option.
:return: A tuple of the data read (a bitvector of the length that is the maximum length of the read) and the actual size of the read.
]
variable[short_reads] assign[=] call[name[kwargs].pop, parameter[constant[short_reads], constant[None]]]
if compare[name[self].write_mode is constant[None]] begin[:]
name[self].write_mode assign[=] constant[False]
if compare[name[pos] is constant[None]] begin[:]
variable[pos] assign[=] call[name[len], parameter[name[self].content]]
if compare[name[pos] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da18ede6d10>
if compare[call[name[type], parameter[name[size]]] is name[int]] begin[:]
variable[size] assign[=] call[name[self].state.solver.BVV, parameter[name[size], name[self].state.arch.bits]]
variable[orig_size] assign[=] name[size]
variable[max_size] assign[=] constant[None]
if <ast.BoolOp object at 0x7da20c76cb20> begin[:]
variable[size] assign[=] call[name[self].state.solver.BVS, parameter[binary_operation[constant[packetsize_%d_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20c76e830>, <ast.Attribute object at 0x7da20c76cc40>]]], name[self].state.arch.bits]]
call[name[self].state.solver.add, parameter[compare[name[size] less_or_equal[<=] name[orig_size]]]]
if <ast.UnaryOp object at 0x7da20c76d960> begin[:]
variable[max_size] assign[=] call[name[self].state.solver.eval, parameter[name[size]]]
variable[data] assign[=] call[name[self].state.solver.BVS, parameter[binary_operation[constant[packet_%d_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18dc98580>, <ast.Attribute object at 0x7da18dc9b490>]]], binary_operation[name[max_size] * name[self].state.arch.byte_width]]]
variable[packet] assign[=] tuple[[<ast.Name object at 0x7da2047e9ff0>, <ast.Name object at 0x7da2047e80a0>]]
call[name[self].content.append, parameter[name[packet]]]
return[binary_operation[name[packet] + tuple[[<ast.BinOp object at 0x7da2047eaa40>]]]] | keyword[def] identifier[read] ( identifier[self] , identifier[pos] , identifier[size] ,** identifier[kwargs] ):
literal[string]
identifier[short_reads] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[self] . identifier[write_mode] keyword[is] keyword[None] :
identifier[self] . identifier[write_mode] = keyword[False]
keyword[elif] identifier[self] . identifier[write_mode] keyword[is] keyword[True] :
keyword[raise] identifier[SimFileError] ( literal[string] )
keyword[if] identifier[pos] keyword[is] keyword[None] :
identifier[pos] = identifier[len] ( identifier[self] . identifier[content] )
keyword[if] identifier[pos] < literal[int] :
keyword[raise] identifier[SimFileError] ( literal[string] % identifier[pos] )
keyword[elif] identifier[pos] > identifier[len] ( identifier[self] . identifier[content] ):
keyword[raise] identifier[SimFileError] ( literal[string] %( identifier[pos] , identifier[len] ( identifier[self] . identifier[content] )))
keyword[elif] identifier[pos] != identifier[len] ( identifier[self] . identifier[content] ):
identifier[_] , identifier[realsize] = identifier[self] . identifier[content] [ identifier[pos] ]
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[size] <= identifier[realsize] )
keyword[if] keyword[not] identifier[self] . identifier[state] . identifier[solver] . identifier[satisfiable] ():
keyword[raise] identifier[SimFileError] ( literal[string] )
keyword[return] identifier[self] . identifier[content] [ identifier[pos] ]+( identifier[pos] + literal[int] ,)
keyword[if] identifier[type] ( identifier[size] ) keyword[is] identifier[int] :
identifier[size] = identifier[self] . identifier[state] . identifier[solver] . identifier[BVV] ( identifier[size] , identifier[self] . identifier[state] . identifier[arch] . identifier[bits] )
identifier[orig_size] = identifier[size]
identifier[max_size] = keyword[None]
keyword[if] identifier[short_reads] keyword[is] keyword[True] keyword[or] ( identifier[short_reads] keyword[is] keyword[None] keyword[and] identifier[sim_options] . identifier[SHORT_READS] keyword[in] identifier[self] . identifier[state] . identifier[options] ):
identifier[size] = identifier[self] . identifier[state] . identifier[solver] . identifier[BVS] ( literal[string] %( identifier[len] ( identifier[self] . identifier[content] ), identifier[self] . identifier[ident] ), identifier[self] . identifier[state] . identifier[arch] . identifier[bits] , identifier[key] =( literal[string] , identifier[self] . identifier[ident] , literal[string] , identifier[len] ( identifier[self] . identifier[content] )))
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[size] <= identifier[orig_size] )
keyword[if] keyword[not] identifier[self] . identifier[state] . identifier[solver] . identifier[symbolic] ( identifier[size] ):
identifier[max_size] = identifier[self] . identifier[state] . identifier[solver] . identifier[eval] ( identifier[size] )
keyword[elif] identifier[self] . identifier[state] . identifier[solver] . identifier[satisfiable] ( identifier[extra_constraints] =( identifier[size] <= identifier[self] . identifier[state] . identifier[libc] . identifier[max_packet_size] ,)):
identifier[l] . identifier[info] ( literal[string] , identifier[self] . identifier[state] . identifier[libc] . identifier[max_packet_size] )
keyword[if] keyword[not] identifier[self] . identifier[state] . identifier[solver] . identifier[is_true] ( identifier[orig_size] <= identifier[self] . identifier[state] . identifier[libc] . identifier[max_packet_size] ):
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[size] <= identifier[self] . identifier[state] . identifier[libc] . identifier[max_packet_size] )
keyword[if] keyword[not] identifier[self] . identifier[state] . identifier[solver] . identifier[symbolic] ( identifier[orig_size] ):
identifier[max_size] = identifier[min] ( identifier[self] . identifier[state] . identifier[solver] . identifier[eval] ( identifier[orig_size] ), identifier[self] . identifier[state] . identifier[libc] . identifier[max_packet_size] )
keyword[else] :
identifier[max_size] = identifier[self] . identifier[state] . identifier[solver] . identifier[max] ( identifier[size] )
keyword[else] :
identifier[max_size] = identifier[self] . identifier[state] . identifier[solver] . identifier[min] ( identifier[size] )
identifier[l] . identifier[warning] ( literal[string] , identifier[self] . identifier[state] . identifier[libc] . identifier[max_packet_size] , identifier[max_size] )
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[size] == identifier[max_size] )
identifier[data] = identifier[self] . identifier[state] . identifier[solver] . identifier[BVS] ( literal[string] %( identifier[len] ( identifier[self] . identifier[content] ), identifier[self] . identifier[ident] ), identifier[max_size] * identifier[self] . identifier[state] . identifier[arch] . identifier[byte_width] , identifier[key] =( literal[string] , identifier[self] . identifier[ident] , literal[string] , identifier[len] ( identifier[self] . identifier[content] )))
identifier[packet] =( identifier[data] , identifier[size] )
identifier[self] . identifier[content] . identifier[append] ( identifier[packet] )
keyword[return] identifier[packet] +( identifier[pos] + literal[int] ,) | def read(self, pos, size, **kwargs):
"""
Read a packet from the stream.
:param int pos: The packet number to read from the sequence of the stream. May be None to append to the stream.
:param size: The size to read. May be symbolic.
:param short_reads: Whether to replace the size with a symbolic value constrained to less than or equal to the original size. If unspecified, will be chosen based on the state option.
:return: A tuple of the data read (a bitvector of the length that is the maximum length of the read) and the actual size of the read.
"""
short_reads = kwargs.pop('short_reads', None)
# sanity check on read/write modes
if self.write_mode is None:
self.write_mode = False # depends on [control=['if'], data=[]]
elif self.write_mode is True:
raise SimFileError('Cannot read and write to the same SimPackets') # depends on [control=['if'], data=[]]
# sanity check on packet number and determine if data is already present
if pos is None:
pos = len(self.content) # depends on [control=['if'], data=['pos']]
if pos < 0:
raise SimFileError('SimPacket.read(%d): Negative packet number?' % pos) # depends on [control=['if'], data=['pos']]
elif pos > len(self.content):
raise SimFileError('SimPacket.read(%d): Packet number is past frontier of %d?' % (pos, len(self.content))) # depends on [control=['if'], data=['pos']]
elif pos != len(self.content):
(_, realsize) = self.content[pos]
self.state.solver.add(size <= realsize)
if not self.state.solver.satisfiable():
raise SimFileError('Packet read size constraint made state unsatisfiable???') # depends on [control=['if'], data=[]]
return self.content[pos] + (pos + 1,) # depends on [control=['if'], data=['pos']]
# typecheck
if type(size) is int:
size = self.state.solver.BVV(size, self.state.arch.bits) # depends on [control=['if'], data=[]]
# The read is on the frontier. let's generate a new packet.
orig_size = size
max_size = None
# if short reads are enabled, replace size with a symbol
if short_reads is True or (short_reads is None and sim_options.SHORT_READS in self.state.options):
size = self.state.solver.BVS('packetsize_%d_%s' % (len(self.content), self.ident), self.state.arch.bits, key=('file', self.ident, 'packetsize', len(self.content)))
self.state.solver.add(size <= orig_size) # depends on [control=['if'], data=[]]
# figure out the maximum size of the read
if not self.state.solver.symbolic(size):
max_size = self.state.solver.eval(size) # depends on [control=['if'], data=[]]
elif self.state.solver.satisfiable(extra_constraints=(size <= self.state.libc.max_packet_size,)):
l.info('Constraining symbolic packet size to be less than %d', self.state.libc.max_packet_size)
if not self.state.solver.is_true(orig_size <= self.state.libc.max_packet_size):
self.state.solver.add(size <= self.state.libc.max_packet_size) # depends on [control=['if'], data=[]]
if not self.state.solver.symbolic(orig_size):
max_size = min(self.state.solver.eval(orig_size), self.state.libc.max_packet_size) # depends on [control=['if'], data=[]]
else:
max_size = self.state.solver.max(size) # depends on [control=['if'], data=[]]
else:
max_size = self.state.solver.min(size)
l.warning('Could not constrain symbolic packet size to <= %d; using minimum %d for size', self.state.libc.max_packet_size, max_size)
self.state.solver.add(size == max_size)
# generate the packet data and return it
data = self.state.solver.BVS('packet_%d_%s' % (len(self.content), self.ident), max_size * self.state.arch.byte_width, key=('file', self.ident, 'packet', len(self.content)))
packet = (data, size)
self.content.append(packet)
return packet + (pos + 1,) |
def join_right_in(self, *objs):
"""
Create a join condition, connect B and C
"""
if not objs:
return self.table.c[self.fielda]!=self.table.c[self.fielda]
else:
keys = get_objs_columns(objs, self.reference_fieldname)
return (self.table.c[self.fieldb] == self.reference_class.c[self.reference_fieldname]) & (self.table.c[self.fielda].in_(keys)) | def function[join_right_in, parameter[self]]:
constant[
Create a join condition, connect B and C
]
if <ast.UnaryOp object at 0x7da18c4cd420> begin[:]
return[compare[call[name[self].table.c][name[self].fielda] not_equal[!=] call[name[self].table.c][name[self].fielda]]] | keyword[def] identifier[join_right_in] ( identifier[self] ,* identifier[objs] ):
literal[string]
keyword[if] keyword[not] identifier[objs] :
keyword[return] identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fielda] ]!= identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fielda] ]
keyword[else] :
identifier[keys] = identifier[get_objs_columns] ( identifier[objs] , identifier[self] . identifier[reference_fieldname] )
keyword[return] ( identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fieldb] ]== identifier[self] . identifier[reference_class] . identifier[c] [ identifier[self] . identifier[reference_fieldname] ])&( identifier[self] . identifier[table] . identifier[c] [ identifier[self] . identifier[fielda] ]. identifier[in_] ( identifier[keys] )) | def join_right_in(self, *objs):
"""
Create a join condition, connect B and C
"""
if not objs:
return self.table.c[self.fielda] != self.table.c[self.fielda] # depends on [control=['if'], data=[]]
else:
keys = get_objs_columns(objs, self.reference_fieldname)
return (self.table.c[self.fieldb] == self.reference_class.c[self.reference_fieldname]) & self.table.c[self.fielda].in_(keys) |
async def text(self,
*,
encoding: Optional[str] = None,
errors: str = 'strict') -> str:
"""Read response payload and decode."""
return await self._aws_text(encoding=encoding, errors=errors) | <ast.AsyncFunctionDef object at 0x7da1b05328f0> | keyword[async] keyword[def] identifier[text] ( identifier[self] ,
*,
identifier[encoding] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[errors] : identifier[str] = literal[string] )-> identifier[str] :
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[_aws_text] ( identifier[encoding] = identifier[encoding] , identifier[errors] = identifier[errors] ) | async def text(self, *, encoding: Optional[str]=None, errors: str='strict') -> str:
"""Read response payload and decode."""
return await self._aws_text(encoding=encoding, errors=errors) |
def get_script(script_name):
'''get_script will return a build script_name, if it is included
in singularity/build/scripts, otherwise will alert the user and return None
:param script_name: the name of the script to look for
'''
install_dir = get_installdir()
script_path = "%s/build/scripts/%s" %(install_dir,script_name)
if os.path.exists(script_path):
return script_path
else:
bot.error("Script %s is not included in singularity-python!" %script_path)
return None | def function[get_script, parameter[script_name]]:
constant[get_script will return a build script_name, if it is included
in singularity/build/scripts, otherwise will alert the user and return None
:param script_name: the name of the script to look for
]
variable[install_dir] assign[=] call[name[get_installdir], parameter[]]
variable[script_path] assign[=] binary_operation[constant[%s/build/scripts/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2041d8e80>, <ast.Name object at 0x7da2041d8970>]]]
if call[name[os].path.exists, parameter[name[script_path]]] begin[:]
return[name[script_path]] | keyword[def] identifier[get_script] ( identifier[script_name] ):
literal[string]
identifier[install_dir] = identifier[get_installdir] ()
identifier[script_path] = literal[string] %( identifier[install_dir] , identifier[script_name] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[script_path] ):
keyword[return] identifier[script_path]
keyword[else] :
identifier[bot] . identifier[error] ( literal[string] % identifier[script_path] )
keyword[return] keyword[None] | def get_script(script_name):
"""get_script will return a build script_name, if it is included
in singularity/build/scripts, otherwise will alert the user and return None
:param script_name: the name of the script to look for
"""
install_dir = get_installdir()
script_path = '%s/build/scripts/%s' % (install_dir, script_name)
if os.path.exists(script_path):
return script_path # depends on [control=['if'], data=[]]
else:
bot.error('Script %s is not included in singularity-python!' % script_path)
return None |
def add(event, reactors, saltenv='base', test=None):
'''
Add a new reactor
CLI Example:
.. code-block:: bash
salt-run reactor.add 'salt/cloud/*/destroyed' reactors='/srv/reactor/destroy/*.sls'
'''
if isinstance(reactors, string_types):
reactors = [reactors]
sevent = salt.utils.event.get_event(
'master',
__opts__['sock_dir'],
__opts__['transport'],
opts=__opts__,
listen=True)
master_key = salt.utils.master.get_master_key('root', __opts__)
__jid_event__.fire_event({'event': event,
'reactors': reactors,
'key': master_key},
'salt/reactors/manage/add')
res = sevent.get_event(wait=30, tag='salt/reactors/manage/add-complete')
return res['result'] | def function[add, parameter[event, reactors, saltenv, test]]:
constant[
Add a new reactor
CLI Example:
.. code-block:: bash
salt-run reactor.add 'salt/cloud/*/destroyed' reactors='/srv/reactor/destroy/*.sls'
]
if call[name[isinstance], parameter[name[reactors], name[string_types]]] begin[:]
variable[reactors] assign[=] list[[<ast.Name object at 0x7da1b2184e20>]]
variable[sevent] assign[=] call[name[salt].utils.event.get_event, parameter[constant[master], call[name[__opts__]][constant[sock_dir]], call[name[__opts__]][constant[transport]]]]
variable[master_key] assign[=] call[name[salt].utils.master.get_master_key, parameter[constant[root], name[__opts__]]]
call[name[__jid_event__].fire_event, parameter[dictionary[[<ast.Constant object at 0x7da18ede7370>, <ast.Constant object at 0x7da18ede7fd0>, <ast.Constant object at 0x7da18ede4e80>], [<ast.Name object at 0x7da18ede5cf0>, <ast.Name object at 0x7da18ede73a0>, <ast.Name object at 0x7da18ede6230>]], constant[salt/reactors/manage/add]]]
variable[res] assign[=] call[name[sevent].get_event, parameter[]]
return[call[name[res]][constant[result]]] | keyword[def] identifier[add] ( identifier[event] , identifier[reactors] , identifier[saltenv] = literal[string] , identifier[test] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[reactors] , identifier[string_types] ):
identifier[reactors] =[ identifier[reactors] ]
identifier[sevent] = identifier[salt] . identifier[utils] . identifier[event] . identifier[get_event] (
literal[string] ,
identifier[__opts__] [ literal[string] ],
identifier[__opts__] [ literal[string] ],
identifier[opts] = identifier[__opts__] ,
identifier[listen] = keyword[True] )
identifier[master_key] = identifier[salt] . identifier[utils] . identifier[master] . identifier[get_master_key] ( literal[string] , identifier[__opts__] )
identifier[__jid_event__] . identifier[fire_event] ({ literal[string] : identifier[event] ,
literal[string] : identifier[reactors] ,
literal[string] : identifier[master_key] },
literal[string] )
identifier[res] = identifier[sevent] . identifier[get_event] ( identifier[wait] = literal[int] , identifier[tag] = literal[string] )
keyword[return] identifier[res] [ literal[string] ] | def add(event, reactors, saltenv='base', test=None):
"""
Add a new reactor
CLI Example:
.. code-block:: bash
salt-run reactor.add 'salt/cloud/*/destroyed' reactors='/srv/reactor/destroy/*.sls'
"""
if isinstance(reactors, string_types):
reactors = [reactors] # depends on [control=['if'], data=[]]
sevent = salt.utils.event.get_event('master', __opts__['sock_dir'], __opts__['transport'], opts=__opts__, listen=True)
master_key = salt.utils.master.get_master_key('root', __opts__)
__jid_event__.fire_event({'event': event, 'reactors': reactors, 'key': master_key}, 'salt/reactors/manage/add')
res = sevent.get_event(wait=30, tag='salt/reactors/manage/add-complete')
return res['result'] |
def get_feature_permission(request, feature, operation=None):
"""Check if a feature-specific field can be displayed.
This method check a permission for a feature-specific field.
Such field is usually provided through Neutron extension.
:param request: Request Object
:param feature: feature name defined in FEATURE_MAP
:param operation (optional): Operation type. The valid value should be
defined in FEATURE_MAP[feature]['policies']
It must be specified if FEATURE_MAP[feature] has 'policies'.
"""
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
feature_info = FEATURE_MAP.get(feature)
if not feature_info:
raise ValueError("The requested feature '%(feature)s' is unknown. "
"Please make sure to specify a feature defined "
"in FEATURE_MAP.")
# Check dashboard settings
feature_config = feature_info.get('config')
if feature_config:
if not network_config.get(feature_config['name'],
feature_config['default']):
return False
# Check policy
feature_policies = feature_info.get('policies')
if feature_policies:
policy_name = feature_policies.get(operation)
if not policy_name:
raise ValueError("The 'operation' parameter for "
"get_feature_permission '%(feature)s' "
"is invalid. It should be one of %(allowed)s"
% {'feature': feature,
'allowed': ' '.join(feature_policies.keys())})
role = (('network', policy_name),)
if not policy.check(role, request):
return False
# Check if a required extension is enabled
feature_extension = feature_info.get('extension')
if feature_extension:
try:
return is_extension_supported(request, feature_extension)
except Exception:
LOG.info("Failed to check Neutron '%s' extension is not supported",
feature_extension)
return False
# If all checks are passed, now a given feature is allowed.
return True | def function[get_feature_permission, parameter[request, feature, operation]]:
constant[Check if a feature-specific field can be displayed.
This method check a permission for a feature-specific field.
Such field is usually provided through Neutron extension.
:param request: Request Object
:param feature: feature name defined in FEATURE_MAP
:param operation (optional): Operation type. The valid value should be
defined in FEATURE_MAP[feature]['policies']
It must be specified if FEATURE_MAP[feature] has 'policies'.
]
variable[network_config] assign[=] call[name[getattr], parameter[name[settings], constant[OPENSTACK_NEUTRON_NETWORK], dictionary[[], []]]]
variable[feature_info] assign[=] call[name[FEATURE_MAP].get, parameter[name[feature]]]
if <ast.UnaryOp object at 0x7da1b19827a0> begin[:]
<ast.Raise object at 0x7da1b1983f70>
variable[feature_config] assign[=] call[name[feature_info].get, parameter[constant[config]]]
if name[feature_config] begin[:]
if <ast.UnaryOp object at 0x7da1b1980f10> begin[:]
return[constant[False]]
variable[feature_policies] assign[=] call[name[feature_info].get, parameter[constant[policies]]]
if name[feature_policies] begin[:]
variable[policy_name] assign[=] call[name[feature_policies].get, parameter[name[operation]]]
if <ast.UnaryOp object at 0x7da1b1980c10> begin[:]
<ast.Raise object at 0x7da1b1980b80>
variable[role] assign[=] tuple[[<ast.Tuple object at 0x7da1b1982860>]]
if <ast.UnaryOp object at 0x7da1b1981420> begin[:]
return[constant[False]]
variable[feature_extension] assign[=] call[name[feature_info].get, parameter[constant[extension]]]
if name[feature_extension] begin[:]
<ast.Try object at 0x7da1b1983d90>
return[constant[True]] | keyword[def] identifier[get_feature_permission] ( identifier[request] , identifier[feature] , identifier[operation] = keyword[None] ):
literal[string]
identifier[network_config] = identifier[getattr] ( identifier[settings] , literal[string] ,{})
identifier[feature_info] = identifier[FEATURE_MAP] . identifier[get] ( identifier[feature] )
keyword[if] keyword[not] identifier[feature_info] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] )
identifier[feature_config] = identifier[feature_info] . identifier[get] ( literal[string] )
keyword[if] identifier[feature_config] :
keyword[if] keyword[not] identifier[network_config] . identifier[get] ( identifier[feature_config] [ literal[string] ],
identifier[feature_config] [ literal[string] ]):
keyword[return] keyword[False]
identifier[feature_policies] = identifier[feature_info] . identifier[get] ( literal[string] )
keyword[if] identifier[feature_policies] :
identifier[policy_name] = identifier[feature_policies] . identifier[get] ( identifier[operation] )
keyword[if] keyword[not] identifier[policy_name] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string]
%{ literal[string] : identifier[feature] ,
literal[string] : literal[string] . identifier[join] ( identifier[feature_policies] . identifier[keys] ())})
identifier[role] =(( literal[string] , identifier[policy_name] ),)
keyword[if] keyword[not] identifier[policy] . identifier[check] ( identifier[role] , identifier[request] ):
keyword[return] keyword[False]
identifier[feature_extension] = identifier[feature_info] . identifier[get] ( literal[string] )
keyword[if] identifier[feature_extension] :
keyword[try] :
keyword[return] identifier[is_extension_supported] ( identifier[request] , identifier[feature_extension] )
keyword[except] identifier[Exception] :
identifier[LOG] . identifier[info] ( literal[string] ,
identifier[feature_extension] )
keyword[return] keyword[False]
keyword[return] keyword[True] | def get_feature_permission(request, feature, operation=None):
"""Check if a feature-specific field can be displayed.
This method check a permission for a feature-specific field.
Such field is usually provided through Neutron extension.
:param request: Request Object
:param feature: feature name defined in FEATURE_MAP
:param operation (optional): Operation type. The valid value should be
defined in FEATURE_MAP[feature]['policies']
It must be specified if FEATURE_MAP[feature] has 'policies'.
"""
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
feature_info = FEATURE_MAP.get(feature)
if not feature_info:
raise ValueError("The requested feature '%(feature)s' is unknown. Please make sure to specify a feature defined in FEATURE_MAP.") # depends on [control=['if'], data=[]]
# Check dashboard settings
feature_config = feature_info.get('config')
if feature_config:
if not network_config.get(feature_config['name'], feature_config['default']):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check policy
feature_policies = feature_info.get('policies')
if feature_policies:
policy_name = feature_policies.get(operation)
if not policy_name:
raise ValueError("The 'operation' parameter for get_feature_permission '%(feature)s' is invalid. It should be one of %(allowed)s" % {'feature': feature, 'allowed': ' '.join(feature_policies.keys())}) # depends on [control=['if'], data=[]]
role = (('network', policy_name),)
if not policy.check(role, request):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Check if a required extension is enabled
feature_extension = feature_info.get('extension')
if feature_extension:
try:
return is_extension_supported(request, feature_extension) # depends on [control=['try'], data=[]]
except Exception:
LOG.info("Failed to check Neutron '%s' extension is not supported", feature_extension)
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# If all checks are passed, now a given feature is allowed.
return True |
def dispatch(self, request, response):
"""Entry-point of the dispatch cycle for this resource.
Performs common work such as authentication, decoding, etc. before
handing complete control of the result to a function with the
same name as the request method.
"""
# Assert authentication and attempt to get a valid user object.
self.require_authentication(request)
# Assert accessibiltiy of the resource in question.
self.require_accessibility(request.user, request.method)
# Facilitate CORS by applying various headers.
# This must be done on every request.
# TODO: Provide cross_domain configuration that turns this off.
self._process_cross_domain_request(request, response)
# Route the HTTP/1.1 request to an appropriate method.
return self.route(request, response) | def function[dispatch, parameter[self, request, response]]:
constant[Entry-point of the dispatch cycle for this resource.
Performs common work such as authentication, decoding, etc. before
handing complete control of the result to a function with the
same name as the request method.
]
call[name[self].require_authentication, parameter[name[request]]]
call[name[self].require_accessibility, parameter[name[request].user, name[request].method]]
call[name[self]._process_cross_domain_request, parameter[name[request], name[response]]]
return[call[name[self].route, parameter[name[request], name[response]]]] | keyword[def] identifier[dispatch] ( identifier[self] , identifier[request] , identifier[response] ):
literal[string]
identifier[self] . identifier[require_authentication] ( identifier[request] )
identifier[self] . identifier[require_accessibility] ( identifier[request] . identifier[user] , identifier[request] . identifier[method] )
identifier[self] . identifier[_process_cross_domain_request] ( identifier[request] , identifier[response] )
keyword[return] identifier[self] . identifier[route] ( identifier[request] , identifier[response] ) | def dispatch(self, request, response):
"""Entry-point of the dispatch cycle for this resource.
Performs common work such as authentication, decoding, etc. before
handing complete control of the result to a function with the
same name as the request method.
"""
# Assert authentication and attempt to get a valid user object.
self.require_authentication(request)
# Assert accessibiltiy of the resource in question.
self.require_accessibility(request.user, request.method)
# Facilitate CORS by applying various headers.
# This must be done on every request.
# TODO: Provide cross_domain configuration that turns this off.
self._process_cross_domain_request(request, response)
# Route the HTTP/1.1 request to an appropriate method.
return self.route(request, response) |
def mkdir(*args):
"""Create a directory specified by a sequence of subdirectories
>>> mkdir("/tmp", "foo", "bar", "baz")
'/tmp/foo/bar/baz'
>>> os.path.isdir('/tmp/foo/bar/baz')
True
"""
path = ''
for chunk in args:
path = os.path.join(path, chunk)
if not os.path.isdir(path):
os.mkdir(path)
return path | def function[mkdir, parameter[]]:
constant[Create a directory specified by a sequence of subdirectories
>>> mkdir("/tmp", "foo", "bar", "baz")
'/tmp/foo/bar/baz'
>>> os.path.isdir('/tmp/foo/bar/baz')
True
]
variable[path] assign[=] constant[]
for taget[name[chunk]] in starred[name[args]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[path], name[chunk]]]
if <ast.UnaryOp object at 0x7da20c76e8f0> begin[:]
call[name[os].mkdir, parameter[name[path]]]
return[name[path]] | keyword[def] identifier[mkdir] (* identifier[args] ):
literal[string]
identifier[path] = literal[string]
keyword[for] identifier[chunk] keyword[in] identifier[args] :
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[chunk] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
identifier[os] . identifier[mkdir] ( identifier[path] )
keyword[return] identifier[path] | def mkdir(*args):
"""Create a directory specified by a sequence of subdirectories
>>> mkdir("/tmp", "foo", "bar", "baz")
'/tmp/foo/bar/baz'
>>> os.path.isdir('/tmp/foo/bar/baz')
True
"""
path = ''
for chunk in args:
path = os.path.join(path, chunk)
if not os.path.isdir(path):
os.mkdir(path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chunk']]
return path |
def _updateEndpoints(self,*args,**kwargs):
"""
Updates all endpoints except the one from which this slot was called.
Note: this method is probably not complete threadsafe. Maybe a lock is needed when setter self.ignoreEvents
"""
sender = self.sender()
if not self.ignoreEvents:
self.ignoreEvents = True
for binding in self.bindings.values():
if binding.instanceId == id(sender):
continue
if args:
binding.setter(*args,**kwargs)
else:
binding.setter(self.bindings[id(sender)].getter())
self.ignoreEvents = False | def function[_updateEndpoints, parameter[self]]:
constant[
Updates all endpoints except the one from which this slot was called.
Note: this method is probably not complete threadsafe. Maybe a lock is needed when setter self.ignoreEvents
]
variable[sender] assign[=] call[name[self].sender, parameter[]]
if <ast.UnaryOp object at 0x7da18bc716f0> begin[:]
name[self].ignoreEvents assign[=] constant[True]
for taget[name[binding]] in starred[call[name[self].bindings.values, parameter[]]] begin[:]
if compare[name[binding].instanceId equal[==] call[name[id], parameter[name[sender]]]] begin[:]
continue
if name[args] begin[:]
call[name[binding].setter, parameter[<ast.Starred object at 0x7da20c6a8130>]]
name[self].ignoreEvents assign[=] constant[False] | keyword[def] identifier[_updateEndpoints] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[sender] = identifier[self] . identifier[sender] ()
keyword[if] keyword[not] identifier[self] . identifier[ignoreEvents] :
identifier[self] . identifier[ignoreEvents] = keyword[True]
keyword[for] identifier[binding] keyword[in] identifier[self] . identifier[bindings] . identifier[values] ():
keyword[if] identifier[binding] . identifier[instanceId] == identifier[id] ( identifier[sender] ):
keyword[continue]
keyword[if] identifier[args] :
identifier[binding] . identifier[setter] (* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[binding] . identifier[setter] ( identifier[self] . identifier[bindings] [ identifier[id] ( identifier[sender] )]. identifier[getter] ())
identifier[self] . identifier[ignoreEvents] = keyword[False] | def _updateEndpoints(self, *args, **kwargs):
"""
Updates all endpoints except the one from which this slot was called.
Note: this method is probably not complete threadsafe. Maybe a lock is needed when setter self.ignoreEvents
"""
sender = self.sender()
if not self.ignoreEvents:
self.ignoreEvents = True
for binding in self.bindings.values():
if binding.instanceId == id(sender):
continue # depends on [control=['if'], data=[]]
if args:
binding.setter(*args, **kwargs) # depends on [control=['if'], data=[]]
else:
binding.setter(self.bindings[id(sender)].getter()) # depends on [control=['for'], data=['binding']]
self.ignoreEvents = False # depends on [control=['if'], data=[]] |
def get_commands_aliases_and_macros_for_completion(self) -> List[str]:
"""Return a list of visible commands, aliases, and macros for tab completion"""
visible_commands = set(self.get_visible_commands())
alias_names = set(self.get_alias_names())
macro_names = set(self.get_macro_names())
return list(visible_commands | alias_names | macro_names) | def function[get_commands_aliases_and_macros_for_completion, parameter[self]]:
constant[Return a list of visible commands, aliases, and macros for tab completion]
variable[visible_commands] assign[=] call[name[set], parameter[call[name[self].get_visible_commands, parameter[]]]]
variable[alias_names] assign[=] call[name[set], parameter[call[name[self].get_alias_names, parameter[]]]]
variable[macro_names] assign[=] call[name[set], parameter[call[name[self].get_macro_names, parameter[]]]]
return[call[name[list], parameter[binary_operation[binary_operation[name[visible_commands] <ast.BitOr object at 0x7da2590d6aa0> name[alias_names]] <ast.BitOr object at 0x7da2590d6aa0> name[macro_names]]]]] | keyword[def] identifier[get_commands_aliases_and_macros_for_completion] ( identifier[self] )-> identifier[List] [ identifier[str] ]:
literal[string]
identifier[visible_commands] = identifier[set] ( identifier[self] . identifier[get_visible_commands] ())
identifier[alias_names] = identifier[set] ( identifier[self] . identifier[get_alias_names] ())
identifier[macro_names] = identifier[set] ( identifier[self] . identifier[get_macro_names] ())
keyword[return] identifier[list] ( identifier[visible_commands] | identifier[alias_names] | identifier[macro_names] ) | def get_commands_aliases_and_macros_for_completion(self) -> List[str]:
"""Return a list of visible commands, aliases, and macros for tab completion"""
visible_commands = set(self.get_visible_commands())
alias_names = set(self.get_alias_names())
macro_names = set(self.get_macro_names())
return list(visible_commands | alias_names | macro_names) |
def destroy_elb(app='', env='dev', region='us-east-1', **_):
"""Destroy ELB Resources.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
True upon successful completion.
"""
task_json = get_template(
template_file='destroy/destroy_elb.json.j2',
app=app,
env=env,
region=region,
vpc=get_vpc_id(account=env, region=region))
wait_for_task(task_json)
return True | def function[destroy_elb, parameter[app, env, region]]:
constant[Destroy ELB Resources.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
True upon successful completion.
]
variable[task_json] assign[=] call[name[get_template], parameter[]]
call[name[wait_for_task], parameter[name[task_json]]]
return[constant[True]] | keyword[def] identifier[destroy_elb] ( identifier[app] = literal[string] , identifier[env] = literal[string] , identifier[region] = literal[string] ,** identifier[_] ):
literal[string]
identifier[task_json] = identifier[get_template] (
identifier[template_file] = literal[string] ,
identifier[app] = identifier[app] ,
identifier[env] = identifier[env] ,
identifier[region] = identifier[region] ,
identifier[vpc] = identifier[get_vpc_id] ( identifier[account] = identifier[env] , identifier[region] = identifier[region] ))
identifier[wait_for_task] ( identifier[task_json] )
keyword[return] keyword[True] | def destroy_elb(app='', env='dev', region='us-east-1', **_):
"""Destroy ELB Resources.
Args:
app (str): Spinnaker Application name.
env (str): Deployment environment.
region (str): AWS region.
Returns:
True upon successful completion.
"""
task_json = get_template(template_file='destroy/destroy_elb.json.j2', app=app, env=env, region=region, vpc=get_vpc_id(account=env, region=region))
wait_for_task(task_json)
return True |
def _FlowProcessingRequestHandlerLoop(self, handler):
"""The main loop for the flow processing request queue."""
while not self.flow_processing_request_handler_stop:
try:
msgs = self._LeaseFlowProcessingReqests()
if msgs:
for m in msgs:
self.flow_processing_request_handler_pool.AddTask(
target=handler, args=(m,))
else:
time.sleep(self._FLOW_REQUEST_POLL_TIME_SECS)
except Exception as e: # pylint: disable=broad-except
logging.exception("_FlowProcessingRequestHandlerLoop raised %s.", e)
break | def function[_FlowProcessingRequestHandlerLoop, parameter[self, handler]]:
constant[The main loop for the flow processing request queue.]
while <ast.UnaryOp object at 0x7da1b1b05450> begin[:]
<ast.Try object at 0x7da1b1b066b0> | keyword[def] identifier[_FlowProcessingRequestHandlerLoop] ( identifier[self] , identifier[handler] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[flow_processing_request_handler_stop] :
keyword[try] :
identifier[msgs] = identifier[self] . identifier[_LeaseFlowProcessingReqests] ()
keyword[if] identifier[msgs] :
keyword[for] identifier[m] keyword[in] identifier[msgs] :
identifier[self] . identifier[flow_processing_request_handler_pool] . identifier[AddTask] (
identifier[target] = identifier[handler] , identifier[args] =( identifier[m] ,))
keyword[else] :
identifier[time] . identifier[sleep] ( identifier[self] . identifier[_FLOW_REQUEST_POLL_TIME_SECS] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logging] . identifier[exception] ( literal[string] , identifier[e] )
keyword[break] | def _FlowProcessingRequestHandlerLoop(self, handler):
"""The main loop for the flow processing request queue."""
while not self.flow_processing_request_handler_stop:
try:
msgs = self._LeaseFlowProcessingReqests()
if msgs:
for m in msgs:
self.flow_processing_request_handler_pool.AddTask(target=handler, args=(m,)) # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]]
else:
time.sleep(self._FLOW_REQUEST_POLL_TIME_SECS) # depends on [control=['try'], data=[]]
except Exception as e: # pylint: disable=broad-except
logging.exception('_FlowProcessingRequestHandlerLoop raised %s.', e)
break # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] |
def terminate(self):
"""Terminate the child process.
It is not an error to call this method when the child has already exited.
"""
try:
self.send_signal(signal.SIGTERM)
except pyuv.error.ProcessError as e:
if e.args[0] != pyuv.errno.UV_ESRCH:
raise | def function[terminate, parameter[self]]:
constant[Terminate the child process.
It is not an error to call this method when the child has already exited.
]
<ast.Try object at 0x7da20c990190> | keyword[def] identifier[terminate] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[send_signal] ( identifier[signal] . identifier[SIGTERM] )
keyword[except] identifier[pyuv] . identifier[error] . identifier[ProcessError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[args] [ literal[int] ]!= identifier[pyuv] . identifier[errno] . identifier[UV_ESRCH] :
keyword[raise] | def terminate(self):
"""Terminate the child process.
It is not an error to call this method when the child has already exited.
"""
try:
self.send_signal(signal.SIGTERM) # depends on [control=['try'], data=[]]
except pyuv.error.ProcessError as e:
if e.args[0] != pyuv.errno.UV_ESRCH:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] |
def show_instance(name, call=None):
'''
Start a machine by name
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. versionadded:: 2015.8.0
'''
response = _query('grid', 'server/get', args={'name': name})
ret = {}
for item in response['list']:
name = item['name']
ret[name] = item
ret[name]['image_info'] = item['image']
ret[name]['image'] = item['image']['friendlyName']
ret[name]['size'] = item['ram']['name']
ret[name]['public_ips'] = [item['ip']['ip']]
ret[name]['private_ips'] = []
ret[name]['state_info'] = item['state']
if 'active' in item['state']['description']:
ret[name]['state'] = 'RUNNING'
return ret | def function[show_instance, parameter[name, call]]:
constant[
Start a machine by name
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. versionadded:: 2015.8.0
]
variable[response] assign[=] call[name[_query], parameter[constant[grid], constant[server/get]]]
variable[ret] assign[=] dictionary[[], []]
for taget[name[item]] in starred[call[name[response]][constant[list]]] begin[:]
variable[name] assign[=] call[name[item]][constant[name]]
call[name[ret]][name[name]] assign[=] name[item]
call[call[name[ret]][name[name]]][constant[image_info]] assign[=] call[name[item]][constant[image]]
call[call[name[ret]][name[name]]][constant[image]] assign[=] call[call[name[item]][constant[image]]][constant[friendlyName]]
call[call[name[ret]][name[name]]][constant[size]] assign[=] call[call[name[item]][constant[ram]]][constant[name]]
call[call[name[ret]][name[name]]][constant[public_ips]] assign[=] list[[<ast.Subscript object at 0x7da1b1c18e20>]]
call[call[name[ret]][name[name]]][constant[private_ips]] assign[=] list[[]]
call[call[name[ret]][name[name]]][constant[state_info]] assign[=] call[name[item]][constant[state]]
if compare[constant[active] in call[call[name[item]][constant[state]]][constant[description]]] begin[:]
call[call[name[ret]][name[name]]][constant[state]] assign[=] constant[RUNNING]
return[name[ret]] | keyword[def] identifier[show_instance] ( identifier[name] , identifier[call] = keyword[None] ):
literal[string]
identifier[response] = identifier[_query] ( literal[string] , literal[string] , identifier[args] ={ literal[string] : identifier[name] })
identifier[ret] ={}
keyword[for] identifier[item] keyword[in] identifier[response] [ literal[string] ]:
identifier[name] = identifier[item] [ literal[string] ]
identifier[ret] [ identifier[name] ]= identifier[item]
identifier[ret] [ identifier[name] ][ literal[string] ]= identifier[item] [ literal[string] ]
identifier[ret] [ identifier[name] ][ literal[string] ]= identifier[item] [ literal[string] ][ literal[string] ]
identifier[ret] [ identifier[name] ][ literal[string] ]= identifier[item] [ literal[string] ][ literal[string] ]
identifier[ret] [ identifier[name] ][ literal[string] ]=[ identifier[item] [ literal[string] ][ literal[string] ]]
identifier[ret] [ identifier[name] ][ literal[string] ]=[]
identifier[ret] [ identifier[name] ][ literal[string] ]= identifier[item] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[item] [ literal[string] ][ literal[string] ]:
identifier[ret] [ identifier[name] ][ literal[string] ]= literal[string]
keyword[return] identifier[ret] | def show_instance(name, call=None):
"""
Start a machine by name
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. versionadded:: 2015.8.0
"""
response = _query('grid', 'server/get', args={'name': name})
ret = {}
for item in response['list']:
name = item['name']
ret[name] = item
ret[name]['image_info'] = item['image']
ret[name]['image'] = item['image']['friendlyName']
ret[name]['size'] = item['ram']['name']
ret[name]['public_ips'] = [item['ip']['ip']]
ret[name]['private_ips'] = []
ret[name]['state_info'] = item['state']
if 'active' in item['state']['description']:
ret[name]['state'] = 'RUNNING' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
return ret |
def import_puppetclasses(self, synchronous=True, **kwargs):
"""Import puppet classes from puppet Capsule.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy()
kwargs.update(self._server_config.get_client_kwargs())
# Check if environment_id was sent and substitute it to the path
# but do not pass it to requests
if 'environment' in kwargs:
if isinstance(kwargs['environment'], Environment):
environment_id = kwargs.pop('environment').id
else:
environment_id = kwargs.pop('environment')
path = '{0}/environments/{1}/import_puppetclasses'.format(
self.path(), environment_id)
else:
path = '{0}/import_puppetclasses'.format(self.path())
return _handle_response(
client.post(path, **kwargs), self._server_config, synchronous) | def function[import_puppetclasses, parameter[self, synchronous]]:
constant[Import puppet classes from puppet Capsule.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
]
variable[kwargs] assign[=] call[name[kwargs].copy, parameter[]]
call[name[kwargs].update, parameter[call[name[self]._server_config.get_client_kwargs, parameter[]]]]
if compare[constant[environment] in name[kwargs]] begin[:]
if call[name[isinstance], parameter[call[name[kwargs]][constant[environment]], name[Environment]]] begin[:]
variable[environment_id] assign[=] call[name[kwargs].pop, parameter[constant[environment]]].id
variable[path] assign[=] call[constant[{0}/environments/{1}/import_puppetclasses].format, parameter[call[name[self].path, parameter[]], name[environment_id]]]
return[call[name[_handle_response], parameter[call[name[client].post, parameter[name[path]]], name[self]._server_config, name[synchronous]]]] | keyword[def] identifier[import_puppetclasses] ( identifier[self] , identifier[synchronous] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[kwargs] . identifier[copy] ()
identifier[kwargs] . identifier[update] ( identifier[self] . identifier[_server_config] . identifier[get_client_kwargs] ())
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[if] identifier[isinstance] ( identifier[kwargs] [ literal[string] ], identifier[Environment] ):
identifier[environment_id] = identifier[kwargs] . identifier[pop] ( literal[string] ). identifier[id]
keyword[else] :
identifier[environment_id] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[path] = literal[string] . identifier[format] (
identifier[self] . identifier[path] (), identifier[environment_id] )
keyword[else] :
identifier[path] = literal[string] . identifier[format] ( identifier[self] . identifier[path] ())
keyword[return] identifier[_handle_response] (
identifier[client] . identifier[post] ( identifier[path] ,** identifier[kwargs] ), identifier[self] . identifier[_server_config] , identifier[synchronous] ) | def import_puppetclasses(self, synchronous=True, **kwargs):
"""Import puppet classes from puppet Capsule.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
"""
kwargs = kwargs.copy()
kwargs.update(self._server_config.get_client_kwargs())
# Check if environment_id was sent and substitute it to the path
# but do not pass it to requests
if 'environment' in kwargs:
if isinstance(kwargs['environment'], Environment):
environment_id = kwargs.pop('environment').id # depends on [control=['if'], data=[]]
else:
environment_id = kwargs.pop('environment')
path = '{0}/environments/{1}/import_puppetclasses'.format(self.path(), environment_id) # depends on [control=['if'], data=['kwargs']]
else:
path = '{0}/import_puppetclasses'.format(self.path())
return _handle_response(client.post(path, **kwargs), self._server_config, synchronous) |
def finish(self):
"""
Respond to nsqd that you’ve processed this message successfully
(or would like to silently discard it).
"""
if self._has_responded:
raise NSQException('already responded')
self._has_responded = True
self.on_finish.send(self) | def function[finish, parameter[self]]:
constant[
Respond to nsqd that you’ve processed this message successfully
(or would like to silently discard it).
]
if name[self]._has_responded begin[:]
<ast.Raise object at 0x7da1b05b47c0>
name[self]._has_responded assign[=] constant[True]
call[name[self].on_finish.send, parameter[name[self]]] | keyword[def] identifier[finish] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_has_responded] :
keyword[raise] identifier[NSQException] ( literal[string] )
identifier[self] . identifier[_has_responded] = keyword[True]
identifier[self] . identifier[on_finish] . identifier[send] ( identifier[self] ) | def finish(self):
"""
Respond to nsqd that you’ve processed this message successfully
(or would like to silently discard it).
"""
if self._has_responded:
raise NSQException('already responded') # depends on [control=['if'], data=[]]
self._has_responded = True
self.on_finish.send(self) |
def resizeToContents(self):
"""
Resizes the list widget to fit its contents vertically.
"""
if self.count():
item = self.item(self.count() - 1)
rect = self.visualItemRect(item)
height = rect.bottom() + 8
height = max(28, height)
self.setFixedHeight(height)
else:
self.setFixedHeight(self.minimumHeight()) | def function[resizeToContents, parameter[self]]:
constant[
Resizes the list widget to fit its contents vertically.
]
if call[name[self].count, parameter[]] begin[:]
variable[item] assign[=] call[name[self].item, parameter[binary_operation[call[name[self].count, parameter[]] - constant[1]]]]
variable[rect] assign[=] call[name[self].visualItemRect, parameter[name[item]]]
variable[height] assign[=] binary_operation[call[name[rect].bottom, parameter[]] + constant[8]]
variable[height] assign[=] call[name[max], parameter[constant[28], name[height]]]
call[name[self].setFixedHeight, parameter[name[height]]] | keyword[def] identifier[resizeToContents] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[count] ():
identifier[item] = identifier[self] . identifier[item] ( identifier[self] . identifier[count] ()- literal[int] )
identifier[rect] = identifier[self] . identifier[visualItemRect] ( identifier[item] )
identifier[height] = identifier[rect] . identifier[bottom] ()+ literal[int]
identifier[height] = identifier[max] ( literal[int] , identifier[height] )
identifier[self] . identifier[setFixedHeight] ( identifier[height] )
keyword[else] :
identifier[self] . identifier[setFixedHeight] ( identifier[self] . identifier[minimumHeight] ()) | def resizeToContents(self):
"""
Resizes the list widget to fit its contents vertically.
"""
if self.count():
item = self.item(self.count() - 1)
rect = self.visualItemRect(item)
height = rect.bottom() + 8
height = max(28, height)
self.setFixedHeight(height) # depends on [control=['if'], data=[]]
else:
self.setFixedHeight(self.minimumHeight()) |
def step(self, actions):
"""Makes a step in all environments.
Does any preprocessing and records frames.
Args:
actions: Batch of actions.
Returns:
(obs, rewards, dones) - batches of observations, rewards and done flags
respectively.
Raises:
ValueError: when the data for current epoch has already been loaded.
"""
if self._store_rollouts and \
self._rollouts_by_epoch_and_split[self.current_epoch]:
raise ValueError(
"Data for current epoch has already been loaded from disk."
)
(obs, unclipped_rewards, dones) = self._step(actions)
obs = self._preprocess_observations(obs)
(min_reward, max_reward) = self.reward_range
rewards = np.around(np.clip(unclipped_rewards, min_reward, max_reward))
if self._store_rollouts:
unclipped_rewards = unclipped_rewards.astype(np.float64)
encoded_obs = self._encode_observations(obs)
for (rollout, frame, action) in zip(
self._current_batch_rollouts, self._current_batch_frames, actions
):
rollout.append(frame._replace(action=action))
# orud = (observation, reward, unclipped_reward, done)
self._current_batch_frames = [
Frame(*orud, action=None)
for orud in zip(encoded_obs, rewards, unclipped_rewards, dones)
]
return (obs, rewards, dones) | def function[step, parameter[self, actions]]:
constant[Makes a step in all environments.
Does any preprocessing and records frames.
Args:
actions: Batch of actions.
Returns:
(obs, rewards, dones) - batches of observations, rewards and done flags
respectively.
Raises:
ValueError: when the data for current epoch has already been loaded.
]
if <ast.BoolOp object at 0x7da1b201e200> begin[:]
<ast.Raise object at 0x7da1b201cc70>
<ast.Tuple object at 0x7da1b201f8b0> assign[=] call[name[self]._step, parameter[name[actions]]]
variable[obs] assign[=] call[name[self]._preprocess_observations, parameter[name[obs]]]
<ast.Tuple object at 0x7da1b201d630> assign[=] name[self].reward_range
variable[rewards] assign[=] call[name[np].around, parameter[call[name[np].clip, parameter[name[unclipped_rewards], name[min_reward], name[max_reward]]]]]
if name[self]._store_rollouts begin[:]
variable[unclipped_rewards] assign[=] call[name[unclipped_rewards].astype, parameter[name[np].float64]]
variable[encoded_obs] assign[=] call[name[self]._encode_observations, parameter[name[obs]]]
for taget[tuple[[<ast.Name object at 0x7da207f03a30>, <ast.Name object at 0x7da207f03fd0>, <ast.Name object at 0x7da207f00250>]]] in starred[call[name[zip], parameter[name[self]._current_batch_rollouts, name[self]._current_batch_frames, name[actions]]]] begin[:]
call[name[rollout].append, parameter[call[name[frame]._replace, parameter[]]]]
name[self]._current_batch_frames assign[=] <ast.ListComp object at 0x7da207f03ac0>
return[tuple[[<ast.Name object at 0x7da207f01de0>, <ast.Name object at 0x7da207f01b10>, <ast.Name object at 0x7da207f00520>]]] | keyword[def] identifier[step] ( identifier[self] , identifier[actions] ):
literal[string]
keyword[if] identifier[self] . identifier[_store_rollouts] keyword[and] identifier[self] . identifier[_rollouts_by_epoch_and_split] [ identifier[self] . identifier[current_epoch] ]:
keyword[raise] identifier[ValueError] (
literal[string]
)
( identifier[obs] , identifier[unclipped_rewards] , identifier[dones] )= identifier[self] . identifier[_step] ( identifier[actions] )
identifier[obs] = identifier[self] . identifier[_preprocess_observations] ( identifier[obs] )
( identifier[min_reward] , identifier[max_reward] )= identifier[self] . identifier[reward_range]
identifier[rewards] = identifier[np] . identifier[around] ( identifier[np] . identifier[clip] ( identifier[unclipped_rewards] , identifier[min_reward] , identifier[max_reward] ))
keyword[if] identifier[self] . identifier[_store_rollouts] :
identifier[unclipped_rewards] = identifier[unclipped_rewards] . identifier[astype] ( identifier[np] . identifier[float64] )
identifier[encoded_obs] = identifier[self] . identifier[_encode_observations] ( identifier[obs] )
keyword[for] ( identifier[rollout] , identifier[frame] , identifier[action] ) keyword[in] identifier[zip] (
identifier[self] . identifier[_current_batch_rollouts] , identifier[self] . identifier[_current_batch_frames] , identifier[actions]
):
identifier[rollout] . identifier[append] ( identifier[frame] . identifier[_replace] ( identifier[action] = identifier[action] ))
identifier[self] . identifier[_current_batch_frames] =[
identifier[Frame] (* identifier[orud] , identifier[action] = keyword[None] )
keyword[for] identifier[orud] keyword[in] identifier[zip] ( identifier[encoded_obs] , identifier[rewards] , identifier[unclipped_rewards] , identifier[dones] )
]
keyword[return] ( identifier[obs] , identifier[rewards] , identifier[dones] ) | def step(self, actions):
"""Makes a step in all environments.
Does any preprocessing and records frames.
Args:
actions: Batch of actions.
Returns:
(obs, rewards, dones) - batches of observations, rewards and done flags
respectively.
Raises:
ValueError: when the data for current epoch has already been loaded.
"""
if self._store_rollouts and self._rollouts_by_epoch_and_split[self.current_epoch]:
raise ValueError('Data for current epoch has already been loaded from disk.') # depends on [control=['if'], data=[]]
(obs, unclipped_rewards, dones) = self._step(actions)
obs = self._preprocess_observations(obs)
(min_reward, max_reward) = self.reward_range
rewards = np.around(np.clip(unclipped_rewards, min_reward, max_reward))
if self._store_rollouts:
unclipped_rewards = unclipped_rewards.astype(np.float64)
encoded_obs = self._encode_observations(obs)
for (rollout, frame, action) in zip(self._current_batch_rollouts, self._current_batch_frames, actions):
rollout.append(frame._replace(action=action)) # depends on [control=['for'], data=[]]
# orud = (observation, reward, unclipped_reward, done)
self._current_batch_frames = [Frame(*orud, action=None) for orud in zip(encoded_obs, rewards, unclipped_rewards, dones)] # depends on [control=['if'], data=[]]
return (obs, rewards, dones) |
def forum_topic(self):
"""
| Comment: The topic this ticket originated from, if any
"""
if self.api and self.forum_topic_id:
return self.api._get_topic(self.forum_topic_id) | def function[forum_topic, parameter[self]]:
constant[
| Comment: The topic this ticket originated from, if any
]
if <ast.BoolOp object at 0x7da204346710> begin[:]
return[call[name[self].api._get_topic, parameter[name[self].forum_topic_id]]] | keyword[def] identifier[forum_topic] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[api] keyword[and] identifier[self] . identifier[forum_topic_id] :
keyword[return] identifier[self] . identifier[api] . identifier[_get_topic] ( identifier[self] . identifier[forum_topic_id] ) | def forum_topic(self):
"""
| Comment: The topic this ticket originated from, if any
"""
if self.api and self.forum_topic_id:
return self.api._get_topic(self.forum_topic_id) # depends on [control=['if'], data=[]] |
def __cal_ma_bias_ratio_point(cls, data, sample=5,
positive_or_negative=False):
"""判斷轉折點位置
:param list data: 計算資料
:param int sample: 計算的區間樣本數量
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
:rtype: tuple
:returns: (True or False, 第幾個轉折日, 轉折點值)
"""
sample_data = data[-sample:]
if positive_or_negative: # 正
ckvalue = max(sample_data) # 尋找最大值
preckvalue = max(sample_data) > 0 # 區間最大值必須為正
else:
ckvalue = min(sample_data) # 尋找最小值
preckvalue = max(sample_data) < 0 # 區間最大值必須為負
return (sample - sample_data.index(ckvalue) < 4 and \
sample_data.index(ckvalue) != sample - 1 and preckvalue,
sample - sample_data.index(ckvalue) - 1,
ckvalue) | def function[__cal_ma_bias_ratio_point, parameter[cls, data, sample, positive_or_negative]]:
constant[判斷轉折點位置
:param list data: 計算資料
:param int sample: 計算的區間樣本數量
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
:rtype: tuple
:returns: (True or False, 第幾個轉折日, 轉折點值)
]
variable[sample_data] assign[=] call[name[data]][<ast.Slice object at 0x7da1b0687760>]
if name[positive_or_negative] begin[:]
variable[ckvalue] assign[=] call[name[max], parameter[name[sample_data]]]
variable[preckvalue] assign[=] compare[call[name[max], parameter[name[sample_data]]] greater[>] constant[0]]
return[tuple[[<ast.BoolOp object at 0x7da1b0687e80>, <ast.BinOp object at 0x7da1b0687190>, <ast.Name object at 0x7da1b0687c70>]]] | keyword[def] identifier[__cal_ma_bias_ratio_point] ( identifier[cls] , identifier[data] , identifier[sample] = literal[int] ,
identifier[positive_or_negative] = keyword[False] ):
literal[string]
identifier[sample_data] = identifier[data] [- identifier[sample] :]
keyword[if] identifier[positive_or_negative] :
identifier[ckvalue] = identifier[max] ( identifier[sample_data] )
identifier[preckvalue] = identifier[max] ( identifier[sample_data] )> literal[int]
keyword[else] :
identifier[ckvalue] = identifier[min] ( identifier[sample_data] )
identifier[preckvalue] = identifier[max] ( identifier[sample_data] )< literal[int]
keyword[return] ( identifier[sample] - identifier[sample_data] . identifier[index] ( identifier[ckvalue] )< literal[int] keyword[and] identifier[sample_data] . identifier[index] ( identifier[ckvalue] )!= identifier[sample] - literal[int] keyword[and] identifier[preckvalue] ,
identifier[sample] - identifier[sample_data] . identifier[index] ( identifier[ckvalue] )- literal[int] ,
identifier[ckvalue] ) | def __cal_ma_bias_ratio_point(cls, data, sample=5, positive_or_negative=False):
"""判斷轉折點位置
:param list data: 計算資料
:param int sample: 計算的區間樣本數量
:param bool positive_or_negative: 正乖離 為 True,負乖離 為 False
:rtype: tuple
:returns: (True or False, 第幾個轉折日, 轉折點值)
"""
sample_data = data[-sample:]
if positive_or_negative: # 正
ckvalue = max(sample_data) # 尋找最大值
preckvalue = max(sample_data) > 0 # 區間最大值必須為正 # depends on [control=['if'], data=[]]
else:
ckvalue = min(sample_data) # 尋找最小值
preckvalue = max(sample_data) < 0 # 區間最大值必須為負
return (sample - sample_data.index(ckvalue) < 4 and sample_data.index(ckvalue) != sample - 1 and preckvalue, sample - sample_data.index(ckvalue) - 1, ckvalue) |
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
if self.actions is None:
return OrderedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(
func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(
(name, (func, name, desc))
for func, name, desc in actions
)
if 'delete_selected' in actions:
del actions['delete_selected']
return actions | def function[get_actions, parameter[self, request]]:
constant[
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
]
if compare[name[self].actions is constant[None]] begin[:]
return[call[name[OrderedDict], parameter[]]]
variable[actions] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0d326e0>, <ast.Name object at 0x7da1b0d30190>]]] in starred[name[self].admin_site.actions] begin[:]
variable[description] assign[=] call[name[getattr], parameter[name[func], constant[short_description], call[name[name].replace, parameter[constant[_], constant[ ]]]]]
call[name[actions].append, parameter[tuple[[<ast.Name object at 0x7da1b0d32380>, <ast.Name object at 0x7da1b0d30280>, <ast.Name object at 0x7da1b0d31750>]]]]
for taget[name[klass]] in starred[call[call[name[self].__class__.mro, parameter[]]][<ast.Slice object at 0x7da1b0d32440>]] begin[:]
variable[class_actions] assign[=] call[name[getattr], parameter[name[klass], constant[actions], list[[]]]]
if <ast.UnaryOp object at 0x7da1b0d30430> begin[:]
continue
call[name[actions].extend, parameter[<ast.GeneratorExp object at 0x7da1b0d32350>]]
variable[actions] assign[=] call[name[filter], parameter[constant[None], name[actions]]]
variable[actions] assign[=] call[name[OrderedDict], parameter[<ast.GeneratorExp object at 0x7da1b0e4f3a0>]]
if compare[constant[delete_selected] in name[actions]] begin[:]
<ast.Delete object at 0x7da1b0e4fb20>
return[name[actions]] | keyword[def] identifier[get_actions] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] identifier[self] . identifier[actions] keyword[is] keyword[None] :
keyword[return] identifier[OrderedDict] ()
identifier[actions] =[]
keyword[for] ( identifier[name] , identifier[func] ) keyword[in] identifier[self] . identifier[admin_site] . identifier[actions] :
identifier[description] = identifier[getattr] (
identifier[func] , literal[string] , identifier[name] . identifier[replace] ( literal[string] , literal[string] ))
identifier[actions] . identifier[append] (( identifier[func] , identifier[name] , identifier[description] ))
keyword[for] identifier[klass] keyword[in] identifier[self] . identifier[__class__] . identifier[mro] ()[::- literal[int] ]:
identifier[class_actions] = identifier[getattr] ( identifier[klass] , literal[string] ,[])
keyword[if] keyword[not] identifier[class_actions] :
keyword[continue]
identifier[actions] . identifier[extend] ( identifier[self] . identifier[get_action] ( identifier[action] ) keyword[for] identifier[action] keyword[in] identifier[class_actions] )
identifier[actions] = identifier[filter] ( keyword[None] , identifier[actions] )
identifier[actions] = identifier[OrderedDict] (
( identifier[name] ,( identifier[func] , identifier[name] , identifier[desc] ))
keyword[for] identifier[func] , identifier[name] , identifier[desc] keyword[in] identifier[actions]
)
keyword[if] literal[string] keyword[in] identifier[actions] :
keyword[del] identifier[actions] [ literal[string] ]
keyword[return] identifier[actions] | def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
if self.actions is None:
return OrderedDict() # depends on [control=['if'], data=[]]
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description)) # depends on [control=['for'], data=[]]
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue # depends on [control=['if'], data=[]]
actions.extend((self.get_action(action) for action in class_actions)) # depends on [control=['for'], data=['klass']]
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(((name, (func, name, desc)) for (func, name, desc) in actions))
if 'delete_selected' in actions:
del actions['delete_selected'] # depends on [control=['if'], data=['actions']]
return actions |
def create_permission_request(self, customer, pos_id, pos_tid, scope,
ledger=None, text=None, callback_uri=None,
expires_in=None):
"""Create permission request
The call is idempotent; that is, if one posts the same pos_id and
pos_tid twice, only one Permission request is created.
"""
arguments = {'customer': customer,
'pos_id': pos_id,
'pos_tid': pos_tid,
'scope': scope,
'ledger': ledger,
'text': text,
'callback_uri': callback_uri,
'expires_in': expires_in}
return self.do_req('POST',
self.merchant_api_base_url + '/permission_request/',
arguments).json() | def function[create_permission_request, parameter[self, customer, pos_id, pos_tid, scope, ledger, text, callback_uri, expires_in]]:
constant[Create permission request
The call is idempotent; that is, if one posts the same pos_id and
pos_tid twice, only one Permission request is created.
]
variable[arguments] assign[=] dictionary[[<ast.Constant object at 0x7da2044c38b0>, <ast.Constant object at 0x7da2044c0340>, <ast.Constant object at 0x7da2044c3d30>, <ast.Constant object at 0x7da2044c22c0>, <ast.Constant object at 0x7da2044c2ef0>, <ast.Constant object at 0x7da2044c0790>, <ast.Constant object at 0x7da2044c3040>, <ast.Constant object at 0x7da2044c34f0>], [<ast.Name object at 0x7da2044c04f0>, <ast.Name object at 0x7da2044c0550>, <ast.Name object at 0x7da2044c25c0>, <ast.Name object at 0x7da2044c2da0>, <ast.Name object at 0x7da2044c21d0>, <ast.Name object at 0x7da2044c1300>, <ast.Name object at 0x7da2044c2920>, <ast.Name object at 0x7da2044c2500>]]
return[call[call[name[self].do_req, parameter[constant[POST], binary_operation[name[self].merchant_api_base_url + constant[/permission_request/]], name[arguments]]].json, parameter[]]] | keyword[def] identifier[create_permission_request] ( identifier[self] , identifier[customer] , identifier[pos_id] , identifier[pos_tid] , identifier[scope] ,
identifier[ledger] = keyword[None] , identifier[text] = keyword[None] , identifier[callback_uri] = keyword[None] ,
identifier[expires_in] = keyword[None] ):
literal[string]
identifier[arguments] ={ literal[string] : identifier[customer] ,
literal[string] : identifier[pos_id] ,
literal[string] : identifier[pos_tid] ,
literal[string] : identifier[scope] ,
literal[string] : identifier[ledger] ,
literal[string] : identifier[text] ,
literal[string] : identifier[callback_uri] ,
literal[string] : identifier[expires_in] }
keyword[return] identifier[self] . identifier[do_req] ( literal[string] ,
identifier[self] . identifier[merchant_api_base_url] + literal[string] ,
identifier[arguments] ). identifier[json] () | def create_permission_request(self, customer, pos_id, pos_tid, scope, ledger=None, text=None, callback_uri=None, expires_in=None):
"""Create permission request
The call is idempotent; that is, if one posts the same pos_id and
pos_tid twice, only one Permission request is created.
"""
arguments = {'customer': customer, 'pos_id': pos_id, 'pos_tid': pos_tid, 'scope': scope, 'ledger': ledger, 'text': text, 'callback_uri': callback_uri, 'expires_in': expires_in}
return self.do_req('POST', self.merchant_api_base_url + '/permission_request/', arguments).json() |
def _tick(self):
"""Write progress info and move cursor to beginning of line."""
if (self.verbose >= 3 and not IS_REDIRECTED) or self.options.get("progress"):
stats = self.get_stats()
prefix = DRY_RUN_PREFIX if self.dry_run else ""
sys.stdout.write(
"{}Touched {}/{} entries in {} directories...\r".format(
prefix,
stats["entries_touched"],
stats["entries_seen"],
stats["local_dirs"],
)
)
sys.stdout.flush()
return | def function[_tick, parameter[self]]:
constant[Write progress info and move cursor to beginning of line.]
if <ast.BoolOp object at 0x7da1b0505c60> begin[:]
variable[stats] assign[=] call[name[self].get_stats, parameter[]]
variable[prefix] assign[=] <ast.IfExp object at 0x7da1b0506080>
call[name[sys].stdout.write, parameter[call[constant[{}Touched {}/{} entries in {} directories...
].format, parameter[name[prefix], call[name[stats]][constant[entries_touched]], call[name[stats]][constant[entries_seen]], call[name[stats]][constant[local_dirs]]]]]]
call[name[sys].stdout.flush, parameter[]]
return[None] | keyword[def] identifier[_tick] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[verbose] >= literal[int] keyword[and] keyword[not] identifier[IS_REDIRECTED] ) keyword[or] identifier[self] . identifier[options] . identifier[get] ( literal[string] ):
identifier[stats] = identifier[self] . identifier[get_stats] ()
identifier[prefix] = identifier[DRY_RUN_PREFIX] keyword[if] identifier[self] . identifier[dry_run] keyword[else] literal[string]
identifier[sys] . identifier[stdout] . identifier[write] (
literal[string] . identifier[format] (
identifier[prefix] ,
identifier[stats] [ literal[string] ],
identifier[stats] [ literal[string] ],
identifier[stats] [ literal[string] ],
)
)
identifier[sys] . identifier[stdout] . identifier[flush] ()
keyword[return] | def _tick(self):
"""Write progress info and move cursor to beginning of line."""
if self.verbose >= 3 and (not IS_REDIRECTED) or self.options.get('progress'):
stats = self.get_stats()
prefix = DRY_RUN_PREFIX if self.dry_run else ''
sys.stdout.write('{}Touched {}/{} entries in {} directories...\r'.format(prefix, stats['entries_touched'], stats['entries_seen'], stats['local_dirs'])) # depends on [control=['if'], data=[]]
sys.stdout.flush()
return |
def unselect_all(self):
""" Clearing the selected_item also clears the focused_item. """
items = self._get_selected_items()
with self._suppress_selection_events():
self._selection.clear()
self.queue_draw_item(*items)
self.emit('selection-changed', self._get_selected_items()) | def function[unselect_all, parameter[self]]:
constant[ Clearing the selected_item also clears the focused_item. ]
variable[items] assign[=] call[name[self]._get_selected_items, parameter[]]
with call[name[self]._suppress_selection_events, parameter[]] begin[:]
call[name[self]._selection.clear, parameter[]]
call[name[self].queue_draw_item, parameter[<ast.Starred object at 0x7da1b1a3dbd0>]]
call[name[self].emit, parameter[constant[selection-changed], call[name[self]._get_selected_items, parameter[]]]] | keyword[def] identifier[unselect_all] ( identifier[self] ):
literal[string]
identifier[items] = identifier[self] . identifier[_get_selected_items] ()
keyword[with] identifier[self] . identifier[_suppress_selection_events] ():
identifier[self] . identifier[_selection] . identifier[clear] ()
identifier[self] . identifier[queue_draw_item] (* identifier[items] )
identifier[self] . identifier[emit] ( literal[string] , identifier[self] . identifier[_get_selected_items] ()) | def unselect_all(self):
""" Clearing the selected_item also clears the focused_item. """
items = self._get_selected_items()
with self._suppress_selection_events():
self._selection.clear() # depends on [control=['with'], data=[]]
self.queue_draw_item(*items)
self.emit('selection-changed', self._get_selected_items()) |
def b58check_unpack(b58_s):
""" Takes in a base 58 check string and returns: the version byte, the
original encoded binary string, and the checksum.
"""
num_leading_zeros = len(re.match(r'^1*', b58_s).group(0))
# convert from b58 to b16
hex_s = change_charset(b58_s, B58_KEYSPACE, HEX_KEYSPACE)
# if an odd number of hex characters are present, add a zero to the front
if len(hex_s) % 2 == 1:
hex_s = "0" + hex_s
# convert from b16 to b2
bin_s = unhexlify(hex_s)
# add in the leading zeros
bin_s = '\x00' * num_leading_zeros + bin_s
# make sure the newly calculated checksum equals the embedded checksum
newly_calculated_checksum = bin_checksum(bin_s[:-4])
embedded_checksum = bin_s[-4:]
if not (newly_calculated_checksum == embedded_checksum):
raise ValueError('b58check value has an invalid checksum')
# return values
version_byte = bin_s[:1]
encoded_value = bin_s[1:-4]
checksum = bin_s[-4:]
return version_byte, encoded_value, checksum | def function[b58check_unpack, parameter[b58_s]]:
constant[ Takes in a base 58 check string and returns: the version byte, the
original encoded binary string, and the checksum.
]
variable[num_leading_zeros] assign[=] call[name[len], parameter[call[call[name[re].match, parameter[constant[^1*], name[b58_s]]].group, parameter[constant[0]]]]]
variable[hex_s] assign[=] call[name[change_charset], parameter[name[b58_s], name[B58_KEYSPACE], name[HEX_KEYSPACE]]]
if compare[binary_operation[call[name[len], parameter[name[hex_s]]] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[1]] begin[:]
variable[hex_s] assign[=] binary_operation[constant[0] + name[hex_s]]
variable[bin_s] assign[=] call[name[unhexlify], parameter[name[hex_s]]]
variable[bin_s] assign[=] binary_operation[binary_operation[constant[ ] * name[num_leading_zeros]] + name[bin_s]]
variable[newly_calculated_checksum] assign[=] call[name[bin_checksum], parameter[call[name[bin_s]][<ast.Slice object at 0x7da1b101ad10>]]]
variable[embedded_checksum] assign[=] call[name[bin_s]][<ast.Slice object at 0x7da1b101a650>]
if <ast.UnaryOp object at 0x7da1b1019f00> begin[:]
<ast.Raise object at 0x7da1b1019150>
variable[version_byte] assign[=] call[name[bin_s]][<ast.Slice object at 0x7da1b10191e0>]
variable[encoded_value] assign[=] call[name[bin_s]][<ast.Slice object at 0x7da1b101bfa0>]
variable[checksum] assign[=] call[name[bin_s]][<ast.Slice object at 0x7da1b10195d0>]
return[tuple[[<ast.Name object at 0x7da1b101bf10>, <ast.Name object at 0x7da1b101bb20>, <ast.Name object at 0x7da1b101a680>]]] | keyword[def] identifier[b58check_unpack] ( identifier[b58_s] ):
literal[string]
identifier[num_leading_zeros] = identifier[len] ( identifier[re] . identifier[match] ( literal[string] , identifier[b58_s] ). identifier[group] ( literal[int] ))
identifier[hex_s] = identifier[change_charset] ( identifier[b58_s] , identifier[B58_KEYSPACE] , identifier[HEX_KEYSPACE] )
keyword[if] identifier[len] ( identifier[hex_s] )% literal[int] == literal[int] :
identifier[hex_s] = literal[string] + identifier[hex_s]
identifier[bin_s] = identifier[unhexlify] ( identifier[hex_s] )
identifier[bin_s] = literal[string] * identifier[num_leading_zeros] + identifier[bin_s]
identifier[newly_calculated_checksum] = identifier[bin_checksum] ( identifier[bin_s] [:- literal[int] ])
identifier[embedded_checksum] = identifier[bin_s] [- literal[int] :]
keyword[if] keyword[not] ( identifier[newly_calculated_checksum] == identifier[embedded_checksum] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[version_byte] = identifier[bin_s] [: literal[int] ]
identifier[encoded_value] = identifier[bin_s] [ literal[int] :- literal[int] ]
identifier[checksum] = identifier[bin_s] [- literal[int] :]
keyword[return] identifier[version_byte] , identifier[encoded_value] , identifier[checksum] | def b58check_unpack(b58_s):
""" Takes in a base 58 check string and returns: the version byte, the
original encoded binary string, and the checksum.
"""
num_leading_zeros = len(re.match('^1*', b58_s).group(0))
# convert from b58 to b16
hex_s = change_charset(b58_s, B58_KEYSPACE, HEX_KEYSPACE)
# if an odd number of hex characters are present, add a zero to the front
if len(hex_s) % 2 == 1:
hex_s = '0' + hex_s # depends on [control=['if'], data=[]]
# convert from b16 to b2
bin_s = unhexlify(hex_s)
# add in the leading zeros
bin_s = '\x00' * num_leading_zeros + bin_s
# make sure the newly calculated checksum equals the embedded checksum
newly_calculated_checksum = bin_checksum(bin_s[:-4])
embedded_checksum = bin_s[-4:]
if not newly_calculated_checksum == embedded_checksum:
raise ValueError('b58check value has an invalid checksum') # depends on [control=['if'], data=[]]
# return values
version_byte = bin_s[:1]
encoded_value = bin_s[1:-4]
checksum = bin_s[-4:]
return (version_byte, encoded_value, checksum) |
def BuildPanel(self):
""" builds basic GUI panel and popup menu"""
figsize = (1.0*self.size[0]/self.dpi, 1.0*self.size[1]/self.dpi)
self.fig = Figure(figsize, dpi=self.dpi)
self.axes = self.fig.add_axes([0.0, 0.0, 1.0, 1.0])
self.canvas = FigureCanvasWxAgg(self, -1, self.fig)
self.fig.set_facecolor('#FFFFFD')
self.conf.axes = self.axes
self.conf.fig = self.fig
self.conf.canvas= self.canvas
# self.canvas.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
# This way of adding to sizer allows resizing
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.canvas, 1, wx.ALL|wx.GROW)
self.SetSizer(sizer)
self.Fit()
self.addCanvasEvents() | def function[BuildPanel, parameter[self]]:
constant[ builds basic GUI panel and popup menu]
variable[figsize] assign[=] tuple[[<ast.BinOp object at 0x7da20e9b29e0>, <ast.BinOp object at 0x7da20e9b2a40>]]
name[self].fig assign[=] call[name[Figure], parameter[name[figsize]]]
name[self].axes assign[=] call[name[self].fig.add_axes, parameter[list[[<ast.Constant object at 0x7da20e9b1690>, <ast.Constant object at 0x7da20e9b0370>, <ast.Constant object at 0x7da20e9b05e0>, <ast.Constant object at 0x7da20e9b3880>]]]]
name[self].canvas assign[=] call[name[FigureCanvasWxAgg], parameter[name[self], <ast.UnaryOp object at 0x7da20e9b3be0>, name[self].fig]]
call[name[self].fig.set_facecolor, parameter[constant[#FFFFFD]]]
name[self].conf.axes assign[=] name[self].axes
name[self].conf.fig assign[=] name[self].fig
name[self].conf.canvas assign[=] name[self].canvas
variable[sizer] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]]
call[name[sizer].Add, parameter[name[self].canvas, constant[1], binary_operation[name[wx].ALL <ast.BitOr object at 0x7da2590d6aa0> name[wx].GROW]]]
call[name[self].SetSizer, parameter[name[sizer]]]
call[name[self].Fit, parameter[]]
call[name[self].addCanvasEvents, parameter[]] | keyword[def] identifier[BuildPanel] ( identifier[self] ):
literal[string]
identifier[figsize] =( literal[int] * identifier[self] . identifier[size] [ literal[int] ]/ identifier[self] . identifier[dpi] , literal[int] * identifier[self] . identifier[size] [ literal[int] ]/ identifier[self] . identifier[dpi] )
identifier[self] . identifier[fig] = identifier[Figure] ( identifier[figsize] , identifier[dpi] = identifier[self] . identifier[dpi] )
identifier[self] . identifier[axes] = identifier[self] . identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[self] . identifier[canvas] = identifier[FigureCanvasWxAgg] ( identifier[self] ,- literal[int] , identifier[self] . identifier[fig] )
identifier[self] . identifier[fig] . identifier[set_facecolor] ( literal[string] )
identifier[self] . identifier[conf] . identifier[axes] = identifier[self] . identifier[axes]
identifier[self] . identifier[conf] . identifier[fig] = identifier[self] . identifier[fig]
identifier[self] . identifier[conf] . identifier[canvas] = identifier[self] . identifier[canvas]
identifier[sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] )
identifier[sizer] . identifier[Add] ( identifier[self] . identifier[canvas] , literal[int] , identifier[wx] . identifier[ALL] | identifier[wx] . identifier[GROW] )
identifier[self] . identifier[SetSizer] ( identifier[sizer] )
identifier[self] . identifier[Fit] ()
identifier[self] . identifier[addCanvasEvents] () | def BuildPanel(self):
""" builds basic GUI panel and popup menu"""
figsize = (1.0 * self.size[0] / self.dpi, 1.0 * self.size[1] / self.dpi)
self.fig = Figure(figsize, dpi=self.dpi)
self.axes = self.fig.add_axes([0.0, 0.0, 1.0, 1.0])
self.canvas = FigureCanvasWxAgg(self, -1, self.fig)
self.fig.set_facecolor('#FFFFFD')
self.conf.axes = self.axes
self.conf.fig = self.fig
self.conf.canvas = self.canvas
# self.canvas.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
# This way of adding to sizer allows resizing
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.canvas, 1, wx.ALL | wx.GROW)
self.SetSizer(sizer)
self.Fit()
self.addCanvasEvents() |
def load_xml_attrs(self):
"""
Load XML attributes as object attributes.
:returns: List of parsed attributes.
:rtype: list
"""
attrs_list = list()
if hasattr(self, 'xml_element'):
xml_attrs = self.xml_element.attrib
for variable, value in iter(xml_attrs.items()):
uri, tag = Element.get_namespace_and_tag(variable)
tag = tag.replace('-', '_')
attrs_list.append(tag)
setattr(self, tag, value)
self.attrs = attrs_list
return self.attrs | def function[load_xml_attrs, parameter[self]]:
constant[
Load XML attributes as object attributes.
:returns: List of parsed attributes.
:rtype: list
]
variable[attrs_list] assign[=] call[name[list], parameter[]]
if call[name[hasattr], parameter[name[self], constant[xml_element]]] begin[:]
variable[xml_attrs] assign[=] name[self].xml_element.attrib
for taget[tuple[[<ast.Name object at 0x7da1b0af1cc0>, <ast.Name object at 0x7da1b0af1660>]]] in starred[call[name[iter], parameter[call[name[xml_attrs].items, parameter[]]]]] begin[:]
<ast.Tuple object at 0x7da1b0af1120> assign[=] call[name[Element].get_namespace_and_tag, parameter[name[variable]]]
variable[tag] assign[=] call[name[tag].replace, parameter[constant[-], constant[_]]]
call[name[attrs_list].append, parameter[name[tag]]]
call[name[setattr], parameter[name[self], name[tag], name[value]]]
name[self].attrs assign[=] name[attrs_list]
return[name[self].attrs] | keyword[def] identifier[load_xml_attrs] ( identifier[self] ):
literal[string]
identifier[attrs_list] = identifier[list] ()
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[xml_attrs] = identifier[self] . identifier[xml_element] . identifier[attrib]
keyword[for] identifier[variable] , identifier[value] keyword[in] identifier[iter] ( identifier[xml_attrs] . identifier[items] ()):
identifier[uri] , identifier[tag] = identifier[Element] . identifier[get_namespace_and_tag] ( identifier[variable] )
identifier[tag] = identifier[tag] . identifier[replace] ( literal[string] , literal[string] )
identifier[attrs_list] . identifier[append] ( identifier[tag] )
identifier[setattr] ( identifier[self] , identifier[tag] , identifier[value] )
identifier[self] . identifier[attrs] = identifier[attrs_list]
keyword[return] identifier[self] . identifier[attrs] | def load_xml_attrs(self):
"""
Load XML attributes as object attributes.
:returns: List of parsed attributes.
:rtype: list
"""
attrs_list = list()
if hasattr(self, 'xml_element'):
xml_attrs = self.xml_element.attrib
for (variable, value) in iter(xml_attrs.items()):
(uri, tag) = Element.get_namespace_and_tag(variable)
tag = tag.replace('-', '_')
attrs_list.append(tag)
setattr(self, tag, value) # depends on [control=['for'], data=[]]
self.attrs = attrs_list # depends on [control=['if'], data=[]]
return self.attrs |
def submit_openmetric(self, metric_name, metric, scraper_config, hostname=None):
"""
For each sample in the metric, report it as a gauge with all labels as tags
except if a labels dict is passed, in which case keys are label names we'll extract
and corresponding values are tag names we'll use (eg: {'node': 'node'}).
Histograms generate a set of values instead of a unique metric.
send_histograms_buckets is used to specify if yes or no you want to
send the buckets as tagged values when dealing with histograms.
`custom_tags` is an array of 'tag:value' that will be added to the
metric when sending the gauge to Datadog.
"""
if metric.type in ["gauge", "counter", "rate"]:
metric_name_with_namespace = '{}.{}'.format(scraper_config['namespace'], metric_name)
for sample in metric.samples:
val = sample[self.SAMPLE_VALUE]
if not self._is_value_valid(val):
self.log.debug("Metric value is not supported for metric {}".format(sample[self.SAMPLE_NAME]))
continue
custom_hostname = self._get_hostname(hostname, sample, scraper_config)
# Determine the tags to send
tags = self._metric_tags(metric_name, val, sample, scraper_config, hostname=custom_hostname)
if metric.type == "counter" and scraper_config['send_monotonic_counter']:
self.monotonic_count(metric_name_with_namespace, val, tags=tags, hostname=custom_hostname)
elif metric.type == "rate":
self.rate(metric_name_with_namespace, val, tags=tags, hostname=custom_hostname)
else:
self.gauge(metric_name_with_namespace, val, tags=tags, hostname=custom_hostname)
elif metric.type == "histogram":
self._submit_gauges_from_histogram(metric_name, metric, scraper_config)
elif metric.type == "summary":
self._submit_gauges_from_summary(metric_name, metric, scraper_config)
else:
self.log.error("Metric type {} unsupported for metric {}.".format(metric.type, metric_name)) | def function[submit_openmetric, parameter[self, metric_name, metric, scraper_config, hostname]]:
constant[
For each sample in the metric, report it as a gauge with all labels as tags
except if a labels dict is passed, in which case keys are label names we'll extract
and corresponding values are tag names we'll use (eg: {'node': 'node'}).
Histograms generate a set of values instead of a unique metric.
send_histograms_buckets is used to specify if yes or no you want to
send the buckets as tagged values when dealing with histograms.
`custom_tags` is an array of 'tag:value' that will be added to the
metric when sending the gauge to Datadog.
]
if compare[name[metric].type in list[[<ast.Constant object at 0x7da204566710>, <ast.Constant object at 0x7da2045662c0>, <ast.Constant object at 0x7da204566800>]]] begin[:]
variable[metric_name_with_namespace] assign[=] call[constant[{}.{}].format, parameter[call[name[scraper_config]][constant[namespace]], name[metric_name]]]
for taget[name[sample]] in starred[name[metric].samples] begin[:]
variable[val] assign[=] call[name[sample]][name[self].SAMPLE_VALUE]
if <ast.UnaryOp object at 0x7da2045655a0> begin[:]
call[name[self].log.debug, parameter[call[constant[Metric value is not supported for metric {}].format, parameter[call[name[sample]][name[self].SAMPLE_NAME]]]]]
continue
variable[custom_hostname] assign[=] call[name[self]._get_hostname, parameter[name[hostname], name[sample], name[scraper_config]]]
variable[tags] assign[=] call[name[self]._metric_tags, parameter[name[metric_name], name[val], name[sample], name[scraper_config]]]
if <ast.BoolOp object at 0x7da204567460> begin[:]
call[name[self].monotonic_count, parameter[name[metric_name_with_namespace], name[val]]] | keyword[def] identifier[submit_openmetric] ( identifier[self] , identifier[metric_name] , identifier[metric] , identifier[scraper_config] , identifier[hostname] = keyword[None] ):
literal[string]
keyword[if] identifier[metric] . identifier[type] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[metric_name_with_namespace] = literal[string] . identifier[format] ( identifier[scraper_config] [ literal[string] ], identifier[metric_name] )
keyword[for] identifier[sample] keyword[in] identifier[metric] . identifier[samples] :
identifier[val] = identifier[sample] [ identifier[self] . identifier[SAMPLE_VALUE] ]
keyword[if] keyword[not] identifier[self] . identifier[_is_value_valid] ( identifier[val] ):
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sample] [ identifier[self] . identifier[SAMPLE_NAME] ]))
keyword[continue]
identifier[custom_hostname] = identifier[self] . identifier[_get_hostname] ( identifier[hostname] , identifier[sample] , identifier[scraper_config] )
identifier[tags] = identifier[self] . identifier[_metric_tags] ( identifier[metric_name] , identifier[val] , identifier[sample] , identifier[scraper_config] , identifier[hostname] = identifier[custom_hostname] )
keyword[if] identifier[metric] . identifier[type] == literal[string] keyword[and] identifier[scraper_config] [ literal[string] ]:
identifier[self] . identifier[monotonic_count] ( identifier[metric_name_with_namespace] , identifier[val] , identifier[tags] = identifier[tags] , identifier[hostname] = identifier[custom_hostname] )
keyword[elif] identifier[metric] . identifier[type] == literal[string] :
identifier[self] . identifier[rate] ( identifier[metric_name_with_namespace] , identifier[val] , identifier[tags] = identifier[tags] , identifier[hostname] = identifier[custom_hostname] )
keyword[else] :
identifier[self] . identifier[gauge] ( identifier[metric_name_with_namespace] , identifier[val] , identifier[tags] = identifier[tags] , identifier[hostname] = identifier[custom_hostname] )
keyword[elif] identifier[metric] . identifier[type] == literal[string] :
identifier[self] . identifier[_submit_gauges_from_histogram] ( identifier[metric_name] , identifier[metric] , identifier[scraper_config] )
keyword[elif] identifier[metric] . identifier[type] == literal[string] :
identifier[self] . identifier[_submit_gauges_from_summary] ( identifier[metric_name] , identifier[metric] , identifier[scraper_config] )
keyword[else] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[metric] . identifier[type] , identifier[metric_name] )) | def submit_openmetric(self, metric_name, metric, scraper_config, hostname=None):
"""
For each sample in the metric, report it as a gauge with all labels as tags
except if a labels dict is passed, in which case keys are label names we'll extract
and corresponding values are tag names we'll use (eg: {'node': 'node'}).
Histograms generate a set of values instead of a unique metric.
send_histograms_buckets is used to specify if yes or no you want to
send the buckets as tagged values when dealing with histograms.
`custom_tags` is an array of 'tag:value' that will be added to the
metric when sending the gauge to Datadog.
"""
if metric.type in ['gauge', 'counter', 'rate']:
metric_name_with_namespace = '{}.{}'.format(scraper_config['namespace'], metric_name)
for sample in metric.samples:
val = sample[self.SAMPLE_VALUE]
if not self._is_value_valid(val):
self.log.debug('Metric value is not supported for metric {}'.format(sample[self.SAMPLE_NAME]))
continue # depends on [control=['if'], data=[]]
custom_hostname = self._get_hostname(hostname, sample, scraper_config)
# Determine the tags to send
tags = self._metric_tags(metric_name, val, sample, scraper_config, hostname=custom_hostname)
if metric.type == 'counter' and scraper_config['send_monotonic_counter']:
self.monotonic_count(metric_name_with_namespace, val, tags=tags, hostname=custom_hostname) # depends on [control=['if'], data=[]]
elif metric.type == 'rate':
self.rate(metric_name_with_namespace, val, tags=tags, hostname=custom_hostname) # depends on [control=['if'], data=[]]
else:
self.gauge(metric_name_with_namespace, val, tags=tags, hostname=custom_hostname) # depends on [control=['for'], data=['sample']] # depends on [control=['if'], data=[]]
elif metric.type == 'histogram':
self._submit_gauges_from_histogram(metric_name, metric, scraper_config) # depends on [control=['if'], data=[]]
elif metric.type == 'summary':
self._submit_gauges_from_summary(metric_name, metric, scraper_config) # depends on [control=['if'], data=[]]
else:
self.log.error('Metric type {} unsupported for metric {}.'.format(metric.type, metric_name)) |
def qtax(mt, x, t, q, m=1):
""" geometrica """
q = float(q)
j = (mt.i - q) / (1 + q)
mtj = Actuarial(nt=mt.nt, i=j)
return tax(mtj, x, t) + ((float(m - 1) / float(m * 2)) * (1 - nEx(mt, x, t))) | def function[qtax, parameter[mt, x, t, q, m]]:
constant[ geometrica ]
variable[q] assign[=] call[name[float], parameter[name[q]]]
variable[j] assign[=] binary_operation[binary_operation[name[mt].i - name[q]] / binary_operation[constant[1] + name[q]]]
variable[mtj] assign[=] call[name[Actuarial], parameter[]]
return[binary_operation[call[name[tax], parameter[name[mtj], name[x], name[t]]] + binary_operation[binary_operation[call[name[float], parameter[binary_operation[name[m] - constant[1]]]] / call[name[float], parameter[binary_operation[name[m] * constant[2]]]]] * binary_operation[constant[1] - call[name[nEx], parameter[name[mt], name[x], name[t]]]]]]] | keyword[def] identifier[qtax] ( identifier[mt] , identifier[x] , identifier[t] , identifier[q] , identifier[m] = literal[int] ):
literal[string]
identifier[q] = identifier[float] ( identifier[q] )
identifier[j] =( identifier[mt] . identifier[i] - identifier[q] )/( literal[int] + identifier[q] )
identifier[mtj] = identifier[Actuarial] ( identifier[nt] = identifier[mt] . identifier[nt] , identifier[i] = identifier[j] )
keyword[return] identifier[tax] ( identifier[mtj] , identifier[x] , identifier[t] )+(( identifier[float] ( identifier[m] - literal[int] )/ identifier[float] ( identifier[m] * literal[int] ))*( literal[int] - identifier[nEx] ( identifier[mt] , identifier[x] , identifier[t] ))) | def qtax(mt, x, t, q, m=1):
""" geometrica """
q = float(q)
j = (mt.i - q) / (1 + q)
mtj = Actuarial(nt=mt.nt, i=j)
return tax(mtj, x, t) + float(m - 1) / float(m * 2) * (1 - nEx(mt, x, t)) |
def coalescence_waiting_times(self, backward=True):
'''Generator over the waiting times of successive coalescence events
Args:
``backward`` (``bool``): ``True`` to go backward in time (i.e., leaves to root), otherwise ``False``
'''
if not isinstance(backward, bool):
raise TypeError("backward must be a bool")
times = list(); lowest_leaf_dist = float('-inf')
for n,d in self.distances_from_root():
if len(n.children) > 1:
times.append(d)
elif len(n.children) == 0 and d > lowest_leaf_dist:
lowest_leaf_dist = d
times.append(lowest_leaf_dist)
times.sort(reverse=backward)
for i in range(len(times)-1):
yield abs(times[i]-times[i+1]) | def function[coalescence_waiting_times, parameter[self, backward]]:
constant[Generator over the waiting times of successive coalescence events
Args:
``backward`` (``bool``): ``True`` to go backward in time (i.e., leaves to root), otherwise ``False``
]
if <ast.UnaryOp object at 0x7da1b0b5f730> begin[:]
<ast.Raise object at 0x7da1b0b5c190>
variable[times] assign[=] call[name[list], parameter[]]
variable[lowest_leaf_dist] assign[=] call[name[float], parameter[constant[-inf]]]
for taget[tuple[[<ast.Name object at 0x7da1b0b5f7f0>, <ast.Name object at 0x7da1b0b5fd90>]]] in starred[call[name[self].distances_from_root, parameter[]]] begin[:]
if compare[call[name[len], parameter[name[n].children]] greater[>] constant[1]] begin[:]
call[name[times].append, parameter[name[d]]]
call[name[times].append, parameter[name[lowest_leaf_dist]]]
call[name[times].sort, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[times]]] - constant[1]]]]] begin[:]
<ast.Yield object at 0x7da1b0b5ece0> | keyword[def] identifier[coalescence_waiting_times] ( identifier[self] , identifier[backward] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[backward] , identifier[bool] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[times] = identifier[list] (); identifier[lowest_leaf_dist] = identifier[float] ( literal[string] )
keyword[for] identifier[n] , identifier[d] keyword[in] identifier[self] . identifier[distances_from_root] ():
keyword[if] identifier[len] ( identifier[n] . identifier[children] )> literal[int] :
identifier[times] . identifier[append] ( identifier[d] )
keyword[elif] identifier[len] ( identifier[n] . identifier[children] )== literal[int] keyword[and] identifier[d] > identifier[lowest_leaf_dist] :
identifier[lowest_leaf_dist] = identifier[d]
identifier[times] . identifier[append] ( identifier[lowest_leaf_dist] )
identifier[times] . identifier[sort] ( identifier[reverse] = identifier[backward] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[times] )- literal[int] ):
keyword[yield] identifier[abs] ( identifier[times] [ identifier[i] ]- identifier[times] [ identifier[i] + literal[int] ]) | def coalescence_waiting_times(self, backward=True):
"""Generator over the waiting times of successive coalescence events
Args:
``backward`` (``bool``): ``True`` to go backward in time (i.e., leaves to root), otherwise ``False``
"""
if not isinstance(backward, bool):
raise TypeError('backward must be a bool') # depends on [control=['if'], data=[]]
times = list()
lowest_leaf_dist = float('-inf')
for (n, d) in self.distances_from_root():
if len(n.children) > 1:
times.append(d) # depends on [control=['if'], data=[]]
elif len(n.children) == 0 and d > lowest_leaf_dist:
lowest_leaf_dist = d # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
times.append(lowest_leaf_dist)
times.sort(reverse=backward)
for i in range(len(times) - 1):
yield abs(times[i] - times[i + 1]) # depends on [control=['for'], data=['i']] |
def labels_to_indexed_coref_values(content_objs, labels):
'''[(content_id, FeatureCollection)] -> [Label] -> [({-1,1}, i, j)]
where 0 <= i, j < len(content_objs).
'''
cids_to_idx = {}
for i, (content_id, _) in enumerate(content_objs):
cids_to_idx[content_id] = i
idx = lambda cid: cids_to_idx[cid]
labs = []
for lab in labels:
if lab.content_id1 in cids_to_idx and lab.content_id2 in cids_to_idx:
labs.append((lab.value.value, idx(lab.content_id1), idx(lab.content_id2)))
return labs | def function[labels_to_indexed_coref_values, parameter[content_objs, labels]]:
constant[[(content_id, FeatureCollection)] -> [Label] -> [({-1,1}, i, j)]
where 0 <= i, j < len(content_objs).
]
variable[cids_to_idx] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2054a4250>, <ast.Tuple object at 0x7da2054a6ec0>]]] in starred[call[name[enumerate], parameter[name[content_objs]]]] begin[:]
call[name[cids_to_idx]][name[content_id]] assign[=] name[i]
variable[idx] assign[=] <ast.Lambda object at 0x7da2054a75b0>
variable[labs] assign[=] list[[]]
for taget[name[lab]] in starred[name[labels]] begin[:]
if <ast.BoolOp object at 0x7da1b15a20e0> begin[:]
call[name[labs].append, parameter[tuple[[<ast.Attribute object at 0x7da1b15a3c10>, <ast.Call object at 0x7da1b15a10c0>, <ast.Call object at 0x7da1b15a0fd0>]]]]
return[name[labs]] | keyword[def] identifier[labels_to_indexed_coref_values] ( identifier[content_objs] , identifier[labels] ):
literal[string]
identifier[cids_to_idx] ={}
keyword[for] identifier[i] ,( identifier[content_id] , identifier[_] ) keyword[in] identifier[enumerate] ( identifier[content_objs] ):
identifier[cids_to_idx] [ identifier[content_id] ]= identifier[i]
identifier[idx] = keyword[lambda] identifier[cid] : identifier[cids_to_idx] [ identifier[cid] ]
identifier[labs] =[]
keyword[for] identifier[lab] keyword[in] identifier[labels] :
keyword[if] identifier[lab] . identifier[content_id1] keyword[in] identifier[cids_to_idx] keyword[and] identifier[lab] . identifier[content_id2] keyword[in] identifier[cids_to_idx] :
identifier[labs] . identifier[append] (( identifier[lab] . identifier[value] . identifier[value] , identifier[idx] ( identifier[lab] . identifier[content_id1] ), identifier[idx] ( identifier[lab] . identifier[content_id2] )))
keyword[return] identifier[labs] | def labels_to_indexed_coref_values(content_objs, labels):
"""[(content_id, FeatureCollection)] -> [Label] -> [({-1,1}, i, j)]
where 0 <= i, j < len(content_objs).
"""
cids_to_idx = {}
for (i, (content_id, _)) in enumerate(content_objs):
cids_to_idx[content_id] = i # depends on [control=['for'], data=[]]
idx = lambda cid: cids_to_idx[cid]
labs = []
for lab in labels:
if lab.content_id1 in cids_to_idx and lab.content_id2 in cids_to_idx:
labs.append((lab.value.value, idx(lab.content_id1), idx(lab.content_id2))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['lab']]
return labs |
def group(args):
"""
%prog group tabfile > tabfile.grouped
Given a tab-delimited file, either group all elements within the file or
group the elements in the value column(s) based on the key (groupby) column
For example, convert this | into this
---------------------------------------
a 2 3 4 | a,2,3,4,5,6
a 5 6 | b,7,8
b 7 8 | c,9,10,11
c 9 |
c 10 11 |
If grouping by a particular column,
convert this | into this:
---------------------------------------------
a 2 3 4 | a 2,5 3,6 4
a 5 6 | b 7 8
b 7 8 | c 9,10 11
c 9 |
c 10 11 |
By default, it uniqifies all the grouped elements
"""
from jcvi.utils.cbook import AutoVivification
from jcvi.utils.grouper import Grouper
p = OptionParser(group.__doc__)
p.set_sep()
p.add_option("--groupby", default=None, type='int',
help="Default column to groupby [default: %default]")
p.add_option("--groupsep", default=',',
help="Separator to join the grouped elements [default: `%default`]")
p.add_option("--nouniq", default=False, action="store_true",
help="Do not uniqify the grouped elements [default: %default]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
tabfile, = args
sep = opts.sep
groupby = opts.groupby
groupsep = opts.groupsep
cols = []
grouper = AutoVivification() if groupby is not None else Grouper()
fp = must_open(tabfile)
for row in fp:
row = row.rstrip()
atoms = row.split(sep)
if groupby is not None:
if len(cols) < len(atoms):
cols = [x for x in xrange(len(atoms))]
if groupby not in cols:
logging.error("groupby col index `{0}` is out of range".format(groupby))
sys.exit()
key = atoms[groupby]
for col in cols:
if col == groupby:
continue
if not grouper[key][col]:
grouper[key][col] = [] if opts.nouniq else set()
if col < len(atoms):
if groupsep in atoms[col]:
for atom in atoms[col].split(groupsep):
if opts.nouniq:
grouper[key][col].append(atom)
else:
grouper[key][col].add(atom)
else:
if opts.nouniq:
grouper[key][col].append(atoms[col])
else:
grouper[key][col].add(atoms[col])
else:
grouper.join(*atoms)
for key in grouper:
if groupby is not None:
line = []
for col in cols:
if col == groupby:
line.append(key)
elif col in grouper[key].keys():
line.append(groupsep.join(grouper[key][col]))
else:
line.append("na")
print(sep.join(line))
else:
print(groupsep.join(key)) | def function[group, parameter[args]]:
constant[
%prog group tabfile > tabfile.grouped
Given a tab-delimited file, either group all elements within the file or
group the elements in the value column(s) based on the key (groupby) column
For example, convert this | into this
---------------------------------------
a 2 3 4 | a,2,3,4,5,6
a 5 6 | b,7,8
b 7 8 | c,9,10,11
c 9 |
c 10 11 |
If grouping by a particular column,
convert this | into this:
---------------------------------------------
a 2 3 4 | a 2,5 3,6 4
a 5 6 | b 7 8
b 7 8 | c 9,10 11
c 9 |
c 10 11 |
By default, it uniqifies all the grouped elements
]
from relative_module[jcvi.utils.cbook] import module[AutoVivification]
from relative_module[jcvi.utils.grouper] import module[Grouper]
variable[p] assign[=] call[name[OptionParser], parameter[name[group].__doc__]]
call[name[p].set_sep, parameter[]]
call[name[p].add_option, parameter[constant[--groupby]]]
call[name[p].add_option, parameter[constant[--groupsep]]]
call[name[p].add_option, parameter[constant[--nouniq]]]
<ast.Tuple object at 0x7da20eb29a80> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20c7c9ed0>]]
<ast.Tuple object at 0x7da18eb568c0> assign[=] name[args]
variable[sep] assign[=] name[opts].sep
variable[groupby] assign[=] name[opts].groupby
variable[groupsep] assign[=] name[opts].groupsep
variable[cols] assign[=] list[[]]
variable[grouper] assign[=] <ast.IfExp object at 0x7da18bc73f70>
variable[fp] assign[=] call[name[must_open], parameter[name[tabfile]]]
for taget[name[row]] in starred[name[fp]] begin[:]
variable[row] assign[=] call[name[row].rstrip, parameter[]]
variable[atoms] assign[=] call[name[row].split, parameter[name[sep]]]
if compare[name[groupby] is_not constant[None]] begin[:]
if compare[call[name[len], parameter[name[cols]]] less[<] call[name[len], parameter[name[atoms]]]] begin[:]
variable[cols] assign[=] <ast.ListComp object at 0x7da18bc71c60>
if compare[name[groupby] <ast.NotIn object at 0x7da2590d7190> name[cols]] begin[:]
call[name[logging].error, parameter[call[constant[groupby col index `{0}` is out of range].format, parameter[name[groupby]]]]]
call[name[sys].exit, parameter[]]
variable[key] assign[=] call[name[atoms]][name[groupby]]
for taget[name[col]] in starred[name[cols]] begin[:]
if compare[name[col] equal[==] name[groupby]] begin[:]
continue
if <ast.UnaryOp object at 0x7da18f812740> begin[:]
call[call[name[grouper]][name[key]]][name[col]] assign[=] <ast.IfExp object at 0x7da18f811510>
if compare[name[col] less[<] call[name[len], parameter[name[atoms]]]] begin[:]
if compare[name[groupsep] in call[name[atoms]][name[col]]] begin[:]
for taget[name[atom]] in starred[call[call[name[atoms]][name[col]].split, parameter[name[groupsep]]]] begin[:]
if name[opts].nouniq begin[:]
call[call[call[name[grouper]][name[key]]][name[col]].append, parameter[name[atom]]]
for taget[name[key]] in starred[name[grouper]] begin[:]
if compare[name[groupby] is_not constant[None]] begin[:]
variable[line] assign[=] list[[]]
for taget[name[col]] in starred[name[cols]] begin[:]
if compare[name[col] equal[==] name[groupby]] begin[:]
call[name[line].append, parameter[name[key]]]
call[name[print], parameter[call[name[sep].join, parameter[name[line]]]]] | keyword[def] identifier[group] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[utils] . identifier[cbook] keyword[import] identifier[AutoVivification]
keyword[from] identifier[jcvi] . identifier[utils] . identifier[grouper] keyword[import] identifier[Grouper]
identifier[p] = identifier[OptionParser] ( identifier[group] . identifier[__doc__] )
identifier[p] . identifier[set_sep] ()
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[None] , identifier[type] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[tabfile] ,= identifier[args]
identifier[sep] = identifier[opts] . identifier[sep]
identifier[groupby] = identifier[opts] . identifier[groupby]
identifier[groupsep] = identifier[opts] . identifier[groupsep]
identifier[cols] =[]
identifier[grouper] = identifier[AutoVivification] () keyword[if] identifier[groupby] keyword[is] keyword[not] keyword[None] keyword[else] identifier[Grouper] ()
identifier[fp] = identifier[must_open] ( identifier[tabfile] )
keyword[for] identifier[row] keyword[in] identifier[fp] :
identifier[row] = identifier[row] . identifier[rstrip] ()
identifier[atoms] = identifier[row] . identifier[split] ( identifier[sep] )
keyword[if] identifier[groupby] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[len] ( identifier[cols] )< identifier[len] ( identifier[atoms] ):
identifier[cols] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[xrange] ( identifier[len] ( identifier[atoms] ))]
keyword[if] identifier[groupby] keyword[not] keyword[in] identifier[cols] :
identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[groupby] ))
identifier[sys] . identifier[exit] ()
identifier[key] = identifier[atoms] [ identifier[groupby] ]
keyword[for] identifier[col] keyword[in] identifier[cols] :
keyword[if] identifier[col] == identifier[groupby] :
keyword[continue]
keyword[if] keyword[not] identifier[grouper] [ identifier[key] ][ identifier[col] ]:
identifier[grouper] [ identifier[key] ][ identifier[col] ]=[] keyword[if] identifier[opts] . identifier[nouniq] keyword[else] identifier[set] ()
keyword[if] identifier[col] < identifier[len] ( identifier[atoms] ):
keyword[if] identifier[groupsep] keyword[in] identifier[atoms] [ identifier[col] ]:
keyword[for] identifier[atom] keyword[in] identifier[atoms] [ identifier[col] ]. identifier[split] ( identifier[groupsep] ):
keyword[if] identifier[opts] . identifier[nouniq] :
identifier[grouper] [ identifier[key] ][ identifier[col] ]. identifier[append] ( identifier[atom] )
keyword[else] :
identifier[grouper] [ identifier[key] ][ identifier[col] ]. identifier[add] ( identifier[atom] )
keyword[else] :
keyword[if] identifier[opts] . identifier[nouniq] :
identifier[grouper] [ identifier[key] ][ identifier[col] ]. identifier[append] ( identifier[atoms] [ identifier[col] ])
keyword[else] :
identifier[grouper] [ identifier[key] ][ identifier[col] ]. identifier[add] ( identifier[atoms] [ identifier[col] ])
keyword[else] :
identifier[grouper] . identifier[join] (* identifier[atoms] )
keyword[for] identifier[key] keyword[in] identifier[grouper] :
keyword[if] identifier[groupby] keyword[is] keyword[not] keyword[None] :
identifier[line] =[]
keyword[for] identifier[col] keyword[in] identifier[cols] :
keyword[if] identifier[col] == identifier[groupby] :
identifier[line] . identifier[append] ( identifier[key] )
keyword[elif] identifier[col] keyword[in] identifier[grouper] [ identifier[key] ]. identifier[keys] ():
identifier[line] . identifier[append] ( identifier[groupsep] . identifier[join] ( identifier[grouper] [ identifier[key] ][ identifier[col] ]))
keyword[else] :
identifier[line] . identifier[append] ( literal[string] )
identifier[print] ( identifier[sep] . identifier[join] ( identifier[line] ))
keyword[else] :
identifier[print] ( identifier[groupsep] . identifier[join] ( identifier[key] )) | def group(args):
"""
%prog group tabfile > tabfile.grouped
Given a tab-delimited file, either group all elements within the file or
group the elements in the value column(s) based on the key (groupby) column
For example, convert this | into this
---------------------------------------
a 2 3 4 | a,2,3,4,5,6
a 5 6 | b,7,8
b 7 8 | c,9,10,11
c 9 |
c 10 11 |
If grouping by a particular column,
convert this | into this:
---------------------------------------------
a 2 3 4 | a 2,5 3,6 4
a 5 6 | b 7 8
b 7 8 | c 9,10 11
c 9 |
c 10 11 |
By default, it uniqifies all the grouped elements
"""
from jcvi.utils.cbook import AutoVivification
from jcvi.utils.grouper import Grouper
p = OptionParser(group.__doc__)
p.set_sep()
p.add_option('--groupby', default=None, type='int', help='Default column to groupby [default: %default]')
p.add_option('--groupsep', default=',', help='Separator to join the grouped elements [default: `%default`]')
p.add_option('--nouniq', default=False, action='store_true', help='Do not uniqify the grouped elements [default: %default]')
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(tabfile,) = args
sep = opts.sep
groupby = opts.groupby
groupsep = opts.groupsep
cols = []
grouper = AutoVivification() if groupby is not None else Grouper()
fp = must_open(tabfile)
for row in fp:
row = row.rstrip()
atoms = row.split(sep)
if groupby is not None:
if len(cols) < len(atoms):
cols = [x for x in xrange(len(atoms))] # depends on [control=['if'], data=[]]
if groupby not in cols:
logging.error('groupby col index `{0}` is out of range'.format(groupby))
sys.exit() # depends on [control=['if'], data=['groupby']]
key = atoms[groupby]
for col in cols:
if col == groupby:
continue # depends on [control=['if'], data=[]]
if not grouper[key][col]:
grouper[key][col] = [] if opts.nouniq else set() # depends on [control=['if'], data=[]]
if col < len(atoms):
if groupsep in atoms[col]:
for atom in atoms[col].split(groupsep):
if opts.nouniq:
grouper[key][col].append(atom) # depends on [control=['if'], data=[]]
else:
grouper[key][col].add(atom) # depends on [control=['for'], data=['atom']] # depends on [control=['if'], data=['groupsep']]
elif opts.nouniq:
grouper[key][col].append(atoms[col]) # depends on [control=['if'], data=[]]
else:
grouper[key][col].add(atoms[col]) # depends on [control=['if'], data=['col']] # depends on [control=['for'], data=['col']] # depends on [control=['if'], data=['groupby']]
else:
grouper.join(*atoms) # depends on [control=['for'], data=['row']]
for key in grouper:
if groupby is not None:
line = []
for col in cols:
if col == groupby:
line.append(key) # depends on [control=['if'], data=[]]
elif col in grouper[key].keys():
line.append(groupsep.join(grouper[key][col])) # depends on [control=['if'], data=['col']]
else:
line.append('na') # depends on [control=['for'], data=['col']]
print(sep.join(line)) # depends on [control=['if'], data=['groupby']]
else:
print(groupsep.join(key)) # depends on [control=['for'], data=['key']] |
def master(self, name):
"""Returns a dictionary containing the specified masters state."""
fut = self.execute(b'MASTER', name, encoding='utf-8')
return wait_convert(fut, parse_sentinel_master) | def function[master, parameter[self, name]]:
constant[Returns a dictionary containing the specified masters state.]
variable[fut] assign[=] call[name[self].execute, parameter[constant[b'MASTER'], name[name]]]
return[call[name[wait_convert], parameter[name[fut], name[parse_sentinel_master]]]] | keyword[def] identifier[master] ( identifier[self] , identifier[name] ):
literal[string]
identifier[fut] = identifier[self] . identifier[execute] ( literal[string] , identifier[name] , identifier[encoding] = literal[string] )
keyword[return] identifier[wait_convert] ( identifier[fut] , identifier[parse_sentinel_master] ) | def master(self, name):
"""Returns a dictionary containing the specified masters state."""
fut = self.execute(b'MASTER', name, encoding='utf-8')
return wait_convert(fut, parse_sentinel_master) |
def getdict(self, key):
"""Convert a multi values header to a case-insensitive dict:
.. code-block:: python
>>> resp = Message({
... 'Response': 'Success',
... 'ChanVariable': [
... 'FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242'],
... })
>>> print(resp.chanvariable)
['FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242']
>>> value = resp.getdict('chanvariable')
>>> print(value['sipuri'])
sip:42@10.10.10.1:4242
"""
values = self.get(key, None)
if not isinstance(values, list):
raise TypeError("{0} must be a list. got {1}".format(key, values))
result = utils.CaseInsensitiveDict()
for item in values:
k, v = item.split('=', 1)
result[k] = v
return result | def function[getdict, parameter[self, key]]:
constant[Convert a multi values header to a case-insensitive dict:
.. code-block:: python
>>> resp = Message({
... 'Response': 'Success',
... 'ChanVariable': [
... 'FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242'],
... })
>>> print(resp.chanvariable)
['FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242']
>>> value = resp.getdict('chanvariable')
>>> print(value['sipuri'])
sip:42@10.10.10.1:4242
]
variable[values] assign[=] call[name[self].get, parameter[name[key], constant[None]]]
if <ast.UnaryOp object at 0x7da20c6e4e50> begin[:]
<ast.Raise object at 0x7da20c6e66e0>
variable[result] assign[=] call[name[utils].CaseInsensitiveDict, parameter[]]
for taget[name[item]] in starred[name[values]] begin[:]
<ast.Tuple object at 0x7da18c4cf460> assign[=] call[name[item].split, parameter[constant[=], constant[1]]]
call[name[result]][name[k]] assign[=] name[v]
return[name[result]] | keyword[def] identifier[getdict] ( identifier[self] , identifier[key] ):
literal[string]
identifier[values] = identifier[self] . identifier[get] ( identifier[key] , keyword[None] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[values] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[key] , identifier[values] ))
identifier[result] = identifier[utils] . identifier[CaseInsensitiveDict] ()
keyword[for] identifier[item] keyword[in] identifier[values] :
identifier[k] , identifier[v] = identifier[item] . identifier[split] ( literal[string] , literal[int] )
identifier[result] [ identifier[k] ]= identifier[v]
keyword[return] identifier[result] | def getdict(self, key):
"""Convert a multi values header to a case-insensitive dict:
.. code-block:: python
>>> resp = Message({
... 'Response': 'Success',
... 'ChanVariable': [
... 'FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242'],
... })
>>> print(resp.chanvariable)
['FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242']
>>> value = resp.getdict('chanvariable')
>>> print(value['sipuri'])
sip:42@10.10.10.1:4242
"""
values = self.get(key, None)
if not isinstance(values, list):
raise TypeError('{0} must be a list. got {1}'.format(key, values)) # depends on [control=['if'], data=[]]
result = utils.CaseInsensitiveDict()
for item in values:
(k, v) = item.split('=', 1)
result[k] = v # depends on [control=['for'], data=['item']]
return result |
def rn_theory(af, b):
""" R(n) ratio expected from theory for given noise type
alpha = b + 2
"""
# From IEEE1139-2008
# alpha beta ADEV_mu MDEV_mu Rn_mu
# -2 -4 1 1 0 Random Walk FM
# -1 -3 0 0 0 Flicker FM
# 0 -2 -1 -1 0 White FM
# 1 -1 -2 -2 0 Flicker PM
# 2 0 -2 -3 -1 White PM
# (a=-3 flicker walk FM)
# (a=-4 random run FM)
if b==0:
return pow(af,-1)
elif b==-1:
# f_h = 0.5/tau0 (assumed!)
# af = tau/tau0
# so f_h*tau = 0.5/tau0 * af*tau0 = 0.5*af
avar = (1.038+3*np.log(2*np.pi*0.5*af)) / (4.0*pow(np.pi,2))
mvar = 3*np.log(256.0/27.0)/(8.0*pow(np.pi,2))
return mvar/avar
else:
return pow(af,0) | def function[rn_theory, parameter[af, b]]:
constant[ R(n) ratio expected from theory for given noise type
alpha = b + 2
]
if compare[name[b] equal[==] constant[0]] begin[:]
return[call[name[pow], parameter[name[af], <ast.UnaryOp object at 0x7da2046211e0>]]] | keyword[def] identifier[rn_theory] ( identifier[af] , identifier[b] ):
literal[string]
keyword[if] identifier[b] == literal[int] :
keyword[return] identifier[pow] ( identifier[af] ,- literal[int] )
keyword[elif] identifier[b] ==- literal[int] :
identifier[avar] =( literal[int] + literal[int] * identifier[np] . identifier[log] ( literal[int] * identifier[np] . identifier[pi] * literal[int] * identifier[af] ))/( literal[int] * identifier[pow] ( identifier[np] . identifier[pi] , literal[int] ))
identifier[mvar] = literal[int] * identifier[np] . identifier[log] ( literal[int] / literal[int] )/( literal[int] * identifier[pow] ( identifier[np] . identifier[pi] , literal[int] ))
keyword[return] identifier[mvar] / identifier[avar]
keyword[else] :
keyword[return] identifier[pow] ( identifier[af] , literal[int] ) | def rn_theory(af, b):
""" R(n) ratio expected from theory for given noise type
alpha = b + 2
"""
# From IEEE1139-2008
# alpha beta ADEV_mu MDEV_mu Rn_mu
# -2 -4 1 1 0 Random Walk FM
# -1 -3 0 0 0 Flicker FM
# 0 -2 -1 -1 0 White FM
# 1 -1 -2 -2 0 Flicker PM
# 2 0 -2 -3 -1 White PM
# (a=-3 flicker walk FM)
# (a=-4 random run FM)
if b == 0:
return pow(af, -1) # depends on [control=['if'], data=[]]
elif b == -1:
# f_h = 0.5/tau0 (assumed!)
# af = tau/tau0
# so f_h*tau = 0.5/tau0 * af*tau0 = 0.5*af
avar = (1.038 + 3 * np.log(2 * np.pi * 0.5 * af)) / (4.0 * pow(np.pi, 2))
mvar = 3 * np.log(256.0 / 27.0) / (8.0 * pow(np.pi, 2))
return mvar / avar # depends on [control=['if'], data=[]]
else:
return pow(af, 0) |
def media_upload(self, filename, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload
:allowed_param:
"""
f = kwargs.pop('file', None)
headers, post_data = API._pack_image(filename, 4883, form_field='media', f=f)
kwargs.update({'headers': headers, 'post_data': post_data})
return bind_api(
api=self,
path='/media/upload.json',
method='POST',
payload_type='media',
allowed_param=[],
require_auth=True,
upload_api=True
)(*args, **kwargs) | def function[media_upload, parameter[self, filename]]:
constant[ :reference: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload
:allowed_param:
]
variable[f] assign[=] call[name[kwargs].pop, parameter[constant[file], constant[None]]]
<ast.Tuple object at 0x7da2054a4df0> assign[=] call[name[API]._pack_image, parameter[name[filename], constant[4883]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da2054a4040>, <ast.Constant object at 0x7da2054a67d0>], [<ast.Name object at 0x7da2054a47c0>, <ast.Name object at 0x7da2054a52d0>]]]]
return[call[call[name[bind_api], parameter[]], parameter[<ast.Starred object at 0x7da2054a7340>]]] | keyword[def] identifier[media_upload] ( identifier[self] , identifier[filename] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[f] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[headers] , identifier[post_data] = identifier[API] . identifier[_pack_image] ( identifier[filename] , literal[int] , identifier[form_field] = literal[string] , identifier[f] = identifier[f] )
identifier[kwargs] . identifier[update] ({ literal[string] : identifier[headers] , literal[string] : identifier[post_data] })
keyword[return] identifier[bind_api] (
identifier[api] = identifier[self] ,
identifier[path] = literal[string] ,
identifier[method] = literal[string] ,
identifier[payload_type] = literal[string] ,
identifier[allowed_param] =[],
identifier[require_auth] = keyword[True] ,
identifier[upload_api] = keyword[True]
)(* identifier[args] ,** identifier[kwargs] ) | def media_upload(self, filename, *args, **kwargs):
""" :reference: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload
:allowed_param:
"""
f = kwargs.pop('file', None)
(headers, post_data) = API._pack_image(filename, 4883, form_field='media', f=f)
kwargs.update({'headers': headers, 'post_data': post_data})
return bind_api(api=self, path='/media/upload.json', method='POST', payload_type='media', allowed_param=[], require_auth=True, upload_api=True)(*args, **kwargs) |
def oper(inst):
""" Returns operands of an ASM instruction.
Even "indirect" operands, like SP if RET or CALL is used.
"""
i = inst.strip(' \t\n').split(' ')
I = i[0].lower() # Instruction
i = ''.join(i[1:])
op = i.split(',')
if I in {'call', 'jp', 'jr'} and len(op) > 1:
op = op[1:] + ['f']
elif I == 'djnz':
op.append('b')
elif I in {'push', 'pop', 'call'}:
op.append('sp') # Sp is also affected by push, pop and call
elif I in {'or', 'and', 'xor', 'neg', 'cpl', 'rrca', 'rlca'}:
op.append('a')
elif I in {'rra', 'rla'}:
op.extend(['a', 'f'])
elif I in ('rr', 'rl'):
op.append('f')
elif I in {'adc', 'sbc'}:
if len(op) == 1:
op = ['a', 'f'] + op
elif I in {'add', 'sub'}:
if len(op) == 1:
op = ['a'] + op
elif I in {'ldd', 'ldi', 'lddr', 'ldir'}:
op = ['hl', 'de', 'bc']
elif I in {'cpd', 'cpi', 'cpdr', 'cpir'}:
op = ['a', 'hl', 'bc']
elif I == 'exx':
op = ['*', 'bc', 'de', 'hl', 'b', 'c', 'd', 'e', 'h', 'l']
elif I in {'ret', 'reti', 'retn'}:
op += ['sp']
elif I == 'out':
if len(op) and RE_OUTC.match(op[0]):
op[0] = 'c'
else:
op.pop(0)
elif I == 'in':
if len(op) > 1 and RE_OUTC.match(op[1]):
op[1] = 'c'
else:
op.pop(1)
for i in range(len(op)):
tmp = RE_INDIR16.match(op[i])
if tmp is not None:
op[i] = '(' + op[i].strip()[1:-1].strip().lower() + ')' # ' ( dE ) ' => '(de)'
return op | def function[oper, parameter[inst]]:
constant[ Returns operands of an ASM instruction.
Even "indirect" operands, like SP if RET or CALL is used.
]
variable[i] assign[=] call[call[name[inst].strip, parameter[constant[
]]].split, parameter[constant[ ]]]
variable[I] assign[=] call[call[name[i]][constant[0]].lower, parameter[]]
variable[i] assign[=] call[constant[].join, parameter[call[name[i]][<ast.Slice object at 0x7da18f58dab0>]]]
variable[op] assign[=] call[name[i].split, parameter[constant[,]]]
if <ast.BoolOp object at 0x7da18f58c220> begin[:]
variable[op] assign[=] binary_operation[call[name[op]][<ast.Slice object at 0x7da18f58f6a0>] + list[[<ast.Constant object at 0x7da18f58fcd0>]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[op]]]]]] begin[:]
variable[tmp] assign[=] call[name[RE_INDIR16].match, parameter[call[name[op]][name[i]]]]
if compare[name[tmp] is_not constant[None]] begin[:]
call[name[op]][name[i]] assign[=] binary_operation[binary_operation[constant[(] + call[call[call[call[call[name[op]][name[i]].strip, parameter[]]][<ast.Slice object at 0x7da18f813c40>].strip, parameter[]].lower, parameter[]]] + constant[)]]
return[name[op]] | keyword[def] identifier[oper] ( identifier[inst] ):
literal[string]
identifier[i] = identifier[inst] . identifier[strip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[I] = identifier[i] [ literal[int] ]. identifier[lower] ()
identifier[i] = literal[string] . identifier[join] ( identifier[i] [ literal[int] :])
identifier[op] = identifier[i] . identifier[split] ( literal[string] )
keyword[if] identifier[I] keyword[in] { literal[string] , literal[string] , literal[string] } keyword[and] identifier[len] ( identifier[op] )> literal[int] :
identifier[op] = identifier[op] [ literal[int] :]+[ literal[string] ]
keyword[elif] identifier[I] == literal[string] :
identifier[op] . identifier[append] ( literal[string] )
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] , literal[string] }:
identifier[op] . identifier[append] ( literal[string] )
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }:
identifier[op] . identifier[append] ( literal[string] )
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] }:
identifier[op] . identifier[extend] ([ literal[string] , literal[string] ])
keyword[elif] identifier[I] keyword[in] ( literal[string] , literal[string] ):
identifier[op] . identifier[append] ( literal[string] )
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] }:
keyword[if] identifier[len] ( identifier[op] )== literal[int] :
identifier[op] =[ literal[string] , literal[string] ]+ identifier[op]
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] }:
keyword[if] identifier[len] ( identifier[op] )== literal[int] :
identifier[op] =[ literal[string] ]+ identifier[op]
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }:
identifier[op] =[ literal[string] , literal[string] , literal[string] ]
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }:
identifier[op] =[ literal[string] , literal[string] , literal[string] ]
keyword[elif] identifier[I] == literal[string] :
identifier[op] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[elif] identifier[I] keyword[in] { literal[string] , literal[string] , literal[string] }:
identifier[op] +=[ literal[string] ]
keyword[elif] identifier[I] == literal[string] :
keyword[if] identifier[len] ( identifier[op] ) keyword[and] identifier[RE_OUTC] . identifier[match] ( identifier[op] [ literal[int] ]):
identifier[op] [ literal[int] ]= literal[string]
keyword[else] :
identifier[op] . identifier[pop] ( literal[int] )
keyword[elif] identifier[I] == literal[string] :
keyword[if] identifier[len] ( identifier[op] )> literal[int] keyword[and] identifier[RE_OUTC] . identifier[match] ( identifier[op] [ literal[int] ]):
identifier[op] [ literal[int] ]= literal[string]
keyword[else] :
identifier[op] . identifier[pop] ( literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[op] )):
identifier[tmp] = identifier[RE_INDIR16] . identifier[match] ( identifier[op] [ identifier[i] ])
keyword[if] identifier[tmp] keyword[is] keyword[not] keyword[None] :
identifier[op] [ identifier[i] ]= literal[string] + identifier[op] [ identifier[i] ]. identifier[strip] ()[ literal[int] :- literal[int] ]. identifier[strip] (). identifier[lower] ()+ literal[string]
keyword[return] identifier[op] | def oper(inst):
""" Returns operands of an ASM instruction.
Even "indirect" operands, like SP if RET or CALL is used.
"""
i = inst.strip(' \t\n').split(' ')
I = i[0].lower() # Instruction
i = ''.join(i[1:])
op = i.split(',')
if I in {'call', 'jp', 'jr'} and len(op) > 1:
op = op[1:] + ['f'] # depends on [control=['if'], data=[]]
elif I == 'djnz':
op.append('b') # depends on [control=['if'], data=[]]
elif I in {'push', 'pop', 'call'}:
op.append('sp') # Sp is also affected by push, pop and call # depends on [control=['if'], data=[]]
elif I in {'or', 'and', 'xor', 'neg', 'cpl', 'rrca', 'rlca'}:
op.append('a') # depends on [control=['if'], data=[]]
elif I in {'rra', 'rla'}:
op.extend(['a', 'f']) # depends on [control=['if'], data=[]]
elif I in ('rr', 'rl'):
op.append('f') # depends on [control=['if'], data=[]]
elif I in {'adc', 'sbc'}:
if len(op) == 1:
op = ['a', 'f'] + op # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif I in {'add', 'sub'}:
if len(op) == 1:
op = ['a'] + op # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif I in {'ldd', 'ldi', 'lddr', 'ldir'}:
op = ['hl', 'de', 'bc'] # depends on [control=['if'], data=[]]
elif I in {'cpd', 'cpi', 'cpdr', 'cpir'}:
op = ['a', 'hl', 'bc'] # depends on [control=['if'], data=[]]
elif I == 'exx':
op = ['*', 'bc', 'de', 'hl', 'b', 'c', 'd', 'e', 'h', 'l'] # depends on [control=['if'], data=[]]
elif I in {'ret', 'reti', 'retn'}:
op += ['sp'] # depends on [control=['if'], data=[]]
elif I == 'out':
if len(op) and RE_OUTC.match(op[0]):
op[0] = 'c' # depends on [control=['if'], data=[]]
else:
op.pop(0) # depends on [control=['if'], data=[]]
elif I == 'in':
if len(op) > 1 and RE_OUTC.match(op[1]):
op[1] = 'c' # depends on [control=['if'], data=[]]
else:
op.pop(1) # depends on [control=['if'], data=[]]
for i in range(len(op)):
tmp = RE_INDIR16.match(op[i])
if tmp is not None:
op[i] = '(' + op[i].strip()[1:-1].strip().lower() + ')' # ' ( dE ) ' => '(de)' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return op |
def method_name(func):
"""Method wrapper that adds the name of the method being called to its arguments list in Pascal case
"""
@wraps(func)
def _method_name(*args, **kwargs):
name = to_pascal_case(func.__name__)
return func(name=name, *args, **kwargs)
return _method_name | def function[method_name, parameter[func]]:
constant[Method wrapper that adds the name of the method being called to its arguments list in Pascal case
]
def function[_method_name, parameter[]]:
variable[name] assign[=] call[name[to_pascal_case], parameter[name[func].__name__]]
return[call[name[func], parameter[<ast.Starred object at 0x7da1b1649060>]]]
return[name[_method_name]] | keyword[def] identifier[method_name] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[_method_name] (* identifier[args] ,** identifier[kwargs] ):
identifier[name] = identifier[to_pascal_case] ( identifier[func] . identifier[__name__] )
keyword[return] identifier[func] ( identifier[name] = identifier[name] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[_method_name] | def method_name(func):
"""Method wrapper that adds the name of the method being called to its arguments list in Pascal case
"""
@wraps(func)
def _method_name(*args, **kwargs):
name = to_pascal_case(func.__name__)
return func(*args, name=name, **kwargs)
return _method_name |
def create_env(env, args, recreate=False, ignore_activated=False, quiet=False):
"""Create virtual environment.
:param env: Virtual environment name.
:param args: Pass given arguments to ``virtualenv`` script.
:param recerate: Recreate virtual environment? By default: False
:param ignore_activated:
Ignore already activated virtual environment and create new one. By
default: False
:param quiet: Do not output messages into terminal. By default: False
"""
cmd = None
result = True
inside_env = hasattr(sys, 'real_prefix') or os.environ.get('VIRTUAL_ENV')
env_exists = os.path.isdir(env)
if not quiet:
print_message('== Step 1. Create virtual environment ==')
if (
recreate or (not inside_env and not env_exists)
) or (
ignore_activated and not env_exists
):
cmd = ('virtualenv', ) + args + (env, )
if not cmd and not quiet:
if inside_env:
message = 'Working inside of virtual environment, done...'
else:
message = 'Virtual environment {0!r} already created, done...'
print_message(message.format(env))
if cmd:
with disable_error_handler():
result = not run_cmd(cmd, echo=not quiet)
if not quiet:
print_message()
return result | def function[create_env, parameter[env, args, recreate, ignore_activated, quiet]]:
constant[Create virtual environment.
:param env: Virtual environment name.
:param args: Pass given arguments to ``virtualenv`` script.
:param recerate: Recreate virtual environment? By default: False
:param ignore_activated:
Ignore already activated virtual environment and create new one. By
default: False
:param quiet: Do not output messages into terminal. By default: False
]
variable[cmd] assign[=] constant[None]
variable[result] assign[=] constant[True]
variable[inside_env] assign[=] <ast.BoolOp object at 0x7da1b008dc60>
variable[env_exists] assign[=] call[name[os].path.isdir, parameter[name[env]]]
if <ast.UnaryOp object at 0x7da1b008c2e0> begin[:]
call[name[print_message], parameter[constant[== Step 1. Create virtual environment ==]]]
if <ast.BoolOp object at 0x7da1b008d600> begin[:]
variable[cmd] assign[=] binary_operation[binary_operation[tuple[[<ast.Constant object at 0x7da1b008f7c0>]] + name[args]] + tuple[[<ast.Name object at 0x7da1b008f070>]]]
if <ast.BoolOp object at 0x7da1b008f550> begin[:]
if name[inside_env] begin[:]
variable[message] assign[=] constant[Working inside of virtual environment, done...]
call[name[print_message], parameter[call[name[message].format, parameter[name[env]]]]]
if name[cmd] begin[:]
with call[name[disable_error_handler], parameter[]] begin[:]
variable[result] assign[=] <ast.UnaryOp object at 0x7da1b008faf0>
if <ast.UnaryOp object at 0x7da1b008d750> begin[:]
call[name[print_message], parameter[]]
return[name[result]] | keyword[def] identifier[create_env] ( identifier[env] , identifier[args] , identifier[recreate] = keyword[False] , identifier[ignore_activated] = keyword[False] , identifier[quiet] = keyword[False] ):
literal[string]
identifier[cmd] = keyword[None]
identifier[result] = keyword[True]
identifier[inside_env] = identifier[hasattr] ( identifier[sys] , literal[string] ) keyword[or] identifier[os] . identifier[environ] . identifier[get] ( literal[string] )
identifier[env_exists] = identifier[os] . identifier[path] . identifier[isdir] ( identifier[env] )
keyword[if] keyword[not] identifier[quiet] :
identifier[print_message] ( literal[string] )
keyword[if] (
identifier[recreate] keyword[or] ( keyword[not] identifier[inside_env] keyword[and] keyword[not] identifier[env_exists] )
) keyword[or] (
identifier[ignore_activated] keyword[and] keyword[not] identifier[env_exists]
):
identifier[cmd] =( literal[string] ,)+ identifier[args] +( identifier[env] ,)
keyword[if] keyword[not] identifier[cmd] keyword[and] keyword[not] identifier[quiet] :
keyword[if] identifier[inside_env] :
identifier[message] = literal[string]
keyword[else] :
identifier[message] = literal[string]
identifier[print_message] ( identifier[message] . identifier[format] ( identifier[env] ))
keyword[if] identifier[cmd] :
keyword[with] identifier[disable_error_handler] ():
identifier[result] = keyword[not] identifier[run_cmd] ( identifier[cmd] , identifier[echo] = keyword[not] identifier[quiet] )
keyword[if] keyword[not] identifier[quiet] :
identifier[print_message] ()
keyword[return] identifier[result] | def create_env(env, args, recreate=False, ignore_activated=False, quiet=False):
"""Create virtual environment.
:param env: Virtual environment name.
:param args: Pass given arguments to ``virtualenv`` script.
:param recerate: Recreate virtual environment? By default: False
:param ignore_activated:
Ignore already activated virtual environment and create new one. By
default: False
:param quiet: Do not output messages into terminal. By default: False
"""
cmd = None
result = True
inside_env = hasattr(sys, 'real_prefix') or os.environ.get('VIRTUAL_ENV')
env_exists = os.path.isdir(env)
if not quiet:
print_message('== Step 1. Create virtual environment ==') # depends on [control=['if'], data=[]]
if (recreate or (not inside_env and (not env_exists))) or (ignore_activated and (not env_exists)):
cmd = ('virtualenv',) + args + (env,) # depends on [control=['if'], data=[]]
if not cmd and (not quiet):
if inside_env:
message = 'Working inside of virtual environment, done...' # depends on [control=['if'], data=[]]
else:
message = 'Virtual environment {0!r} already created, done...'
print_message(message.format(env)) # depends on [control=['if'], data=[]]
if cmd:
with disable_error_handler():
result = not run_cmd(cmd, echo=not quiet) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
if not quiet:
print_message() # depends on [control=['if'], data=[]]
return result |
def _handle_actionconstantpool(self, _):
"""Handle the ActionConstantPool action."""
obj = _make_object("ActionConstantPool")
obj.Count = count = unpack_ui16(self._src)
obj.ConstantPool = pool = []
for _ in range(count):
pool.append(self._get_struct_string())
yield obj | def function[_handle_actionconstantpool, parameter[self, _]]:
constant[Handle the ActionConstantPool action.]
variable[obj] assign[=] call[name[_make_object], parameter[constant[ActionConstantPool]]]
name[obj].Count assign[=] call[name[unpack_ui16], parameter[name[self]._src]]
name[obj].ConstantPool assign[=] list[[]]
for taget[name[_]] in starred[call[name[range], parameter[name[count]]]] begin[:]
call[name[pool].append, parameter[call[name[self]._get_struct_string, parameter[]]]]
<ast.Yield object at 0x7da18bcc93c0> | keyword[def] identifier[_handle_actionconstantpool] ( identifier[self] , identifier[_] ):
literal[string]
identifier[obj] = identifier[_make_object] ( literal[string] )
identifier[obj] . identifier[Count] = identifier[count] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] )
identifier[obj] . identifier[ConstantPool] = identifier[pool] =[]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[count] ):
identifier[pool] . identifier[append] ( identifier[self] . identifier[_get_struct_string] ())
keyword[yield] identifier[obj] | def _handle_actionconstantpool(self, _):
"""Handle the ActionConstantPool action."""
obj = _make_object('ActionConstantPool')
obj.Count = count = unpack_ui16(self._src)
obj.ConstantPool = pool = []
for _ in range(count):
pool.append(self._get_struct_string()) # depends on [control=['for'], data=[]]
yield obj |
def _make_jsmin(python_only=False):
"""
Generate JS minifier based on `jsmin.c by Douglas Crockford`_
.. _jsmin.c by Douglas Crockford:
http://www.crockford.com/javascript/jsmin.c
:Parameters:
`python_only` : ``bool``
Use only the python variant. If true, the c extension is not even
tried to be loaded.
:Return: Minifier
:Rtype: ``callable``
"""
# pylint: disable = R0912, R0914, W0612
if not python_only:
try:
import _rjsmin # pylint: disable = F0401
except ImportError:
pass
else:
return _rjsmin.jsmin
try:
xrange
except NameError:
xrange = range # pylint: disable = W0622
space_chars = r'[\000-\011\013\014\016-\040]'
line_comment = r'(?://[^\r\n]*)'
space_comment = r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)'
space_comment_nobang = r'(?:/\*(?!!)[^*]*\*+(?:[^/*][^*]*\*+)*/)'
bang_comment = r'(?:/\*![^*]*\*+(?:[^/*][^*]*\*+)*/)'
string1 = \
r'(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^\047\\\r\n]*)*\047)'
string2 = r'(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^"\\\r\n]*)*")'
strings = r'(?:%s|%s)' % (string1, string2)
charclass = r'(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\])'
nospecial = r'[^/\\\[\r\n]'
regex = r'(?:/(?![\r\n/*])%s*(?:(?:\\[^\r\n]|%s)%s*)*/)' % (
nospecial, charclass, nospecial
)
space = r'(?:%s|%s)' % (space_chars, space_comment)
space_nobang = r'(?:%s|%s)' % (space_chars, space_comment_nobang)
newline = r'(?:%s?[\r\n])' % line_comment
def fix_charclass(result):
""" Fixup string of chars to fit into a regex char class """
pos = result.find('-')
if pos >= 0:
result = r'%s%s-' % (result[:pos], result[pos + 1:])
def sequentize(string):
"""
Notate consecutive characters as sequence
(1-4 instead of 1234)
"""
first, last, result = None, None, []
for char in map(ord, string):
if last is None:
first = last = char
elif last + 1 == char:
last = char
else:
result.append((first, last))
first = last = char
if last is not None:
result.append((first, last))
return ''.join(['%s%s%s' % (
chr(first),
last > first + 1 and '-' or '',
last != first and chr(last) or ''
) for first, last in result])
return _re.sub(
r'([\000-\040\047])', # \047 for better portability
lambda m: '\\%03o' % ord(m.group(1)), (
sequentize(result)
.replace('\\', '\\\\')
.replace('[', '\\[')
.replace(']', '\\]')
)
)
def id_literal_(what):
""" Make id_literal like char class """
match = _re.compile(what).match
result = ''.join([
chr(c) for c in xrange(127) if not match(chr(c))
])
return '[^%s]' % fix_charclass(result)
def not_id_literal_(keep):
""" Make negated id_literal like char class """
match = _re.compile(id_literal_(keep)).match
result = ''.join([
chr(c) for c in xrange(127) if not match(chr(c))
])
return r'[%s]' % fix_charclass(result)
not_id_literal = not_id_literal_(r'[a-zA-Z0-9_$]')
preregex1 = r'[(,=:\[!&|?{};\r\n]'
preregex2 = r'%(not_id_literal)sreturn' % locals()
id_literal = id_literal_(r'[a-zA-Z0-9_$]')
id_literal_open = id_literal_(r'[a-zA-Z0-9_${\[(!+-]')
id_literal_close = id_literal_(r'[a-zA-Z0-9_$}\])"\047+-]')
dull = r'[^\047"/\000-\040]'
space_sub_simple = _re.compile((
# noqa pylint: disable = C0330
r'(%(dull)s+)'
r'|(%(strings)s%(dull)s*)'
r'|(?<=%(preregex1)s)'
r'%(space)s*(?:%(newline)s%(space)s*)*'
r'(%(regex)s%(dull)s*)'
r'|(?<=%(preregex2)s)'
r'%(space)s*(?:%(newline)s%(space)s)*'
r'(%(regex)s%(dull)s*)'
r'|(?<=%(id_literal_close)s)'
r'%(space)s*(?:(%(newline)s)%(space)s*)+'
r'(?=%(id_literal_open)s)'
r'|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)'
r'|(?<=\+)(%(space)s)+(?=\+)'
r'|(?<=-)(%(space)s)+(?=-)'
r'|%(space)s+'
r'|(?:%(newline)s%(space)s*)+'
) % locals()).sub
#print space_sub_simple.__self__.pattern
def space_subber_simple(match):
""" Substitution callback """
# pylint: disable = R0911
groups = match.groups()
if groups[0]:
return groups[0]
elif groups[1]:
return groups[1]
elif groups[2]:
return groups[2]
elif groups[3]:
return groups[3]
elif groups[4]:
return '\n'
elif groups[5] or groups[6] or groups[7]:
return ' '
else:
return ''
space_sub_banged = _re.compile((
# noqa pylint: disable = C0330
r'(%(dull)s+)'
r'|(%(strings)s%(dull)s*)'
r'|(%(bang_comment)s%(dull)s*)'
r'|(?<=%(preregex1)s)'
r'%(space)s*(?:%(newline)s%(space)s*)*'
r'(%(regex)s%(dull)s*)'
r'|(?<=%(preregex2)s)'
r'%(space)s*(?:%(newline)s%(space)s)*'
r'(%(regex)s%(dull)s*)'
r'|(?<=%(id_literal_close)s)'
r'%(space)s*(?:(%(newline)s)%(space)s*)+'
r'(?=%(id_literal_open)s)'
r'|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)'
r'|(?<=\+)(%(space)s)+(?=\+)'
r'|(?<=-)(%(space)s)+(?=-)'
r'|%(space)s+'
r'|(?:%(newline)s%(space)s*)+'
) % dict(locals(), space=space_nobang)).sub
#print space_sub_banged.__self__.pattern
def space_subber_banged(match):
""" Substitution callback """
# pylint: disable = R0911
groups = match.groups()
if groups[0]:
return groups[0]
elif groups[1]:
return groups[1]
elif groups[2]:
return groups[2]
elif groups[3]:
return groups[3]
elif groups[4]:
return groups[4]
elif groups[5]:
return '\n'
elif groups[6] or groups[7] or groups[8]:
return ' '
else:
return ''
def jsmin(script, keep_bang_comments=False): # pylint: disable = W0621
r"""
Minify javascript based on `jsmin.c by Douglas Crockford`_\.
Instead of parsing the stream char by char, it uses a regular
expression approach which minifies the whole script with one big
substitution regex.
.. _jsmin.c by Douglas Crockford:
http://www.crockford.com/javascript/jsmin.c
:Parameters:
`script` : ``str``
Script to minify
`keep_bang_comments` : ``bool``
Keep comments starting with an exclamation mark? (``/*!...*/``)
:Return: Minified script
:Rtype: ``str``
"""
if keep_bang_comments:
return space_sub_banged(
space_subber_banged, '\n%s\n' % script
).strip()
else:
return space_sub_simple(
space_subber_simple, '\n%s\n' % script
).strip()
return jsmin | def function[_make_jsmin, parameter[python_only]]:
constant[
Generate JS minifier based on `jsmin.c by Douglas Crockford`_
.. _jsmin.c by Douglas Crockford:
http://www.crockford.com/javascript/jsmin.c
:Parameters:
`python_only` : ``bool``
Use only the python variant. If true, the c extension is not even
tried to be loaded.
:Return: Minifier
:Rtype: ``callable``
]
if <ast.UnaryOp object at 0x7da1b0e3b550> begin[:]
<ast.Try object at 0x7da1b0e3a890>
<ast.Try object at 0x7da1b0e38370>
variable[space_chars] assign[=] constant[[\000-\011\013\014\016-\040]]
variable[line_comment] assign[=] constant[(?://[^\r\n]*)]
variable[space_comment] assign[=] constant[(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)]
variable[space_comment_nobang] assign[=] constant[(?:/\*(?!!)[^*]*\*+(?:[^/*][^*]*\*+)*/)]
variable[bang_comment] assign[=] constant[(?:/\*![^*]*\*+(?:[^/*][^*]*\*+)*/)]
variable[string1] assign[=] constant[(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^\047\\\r\n]*)*\047)]
variable[string2] assign[=] constant[(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^"\\\r\n]*)*")]
variable[strings] assign[=] binary_operation[constant[(?:%s|%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e397e0>, <ast.Name object at 0x7da1b0e3b2b0>]]]
variable[charclass] assign[=] constant[(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\])]
variable[nospecial] assign[=] constant[[^/\\\[\r\n]]
variable[regex] assign[=] binary_operation[constant[(?:/(?![\r\n/*])%s*(?:(?:\\[^\r\n]|%s)%s*)*/)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0be1090>, <ast.Name object at 0x7da1b0be25f0>, <ast.Name object at 0x7da1b0be0af0>]]]
variable[space] assign[=] binary_operation[constant[(?:%s|%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0be30a0>, <ast.Name object at 0x7da1b0be2980>]]]
variable[space_nobang] assign[=] binary_operation[constant[(?:%s|%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0be3be0>, <ast.Name object at 0x7da1b0be3190>]]]
variable[newline] assign[=] binary_operation[constant[(?:%s?[\r\n])] <ast.Mod object at 0x7da2590d6920> name[line_comment]]
def function[fix_charclass, parameter[result]]:
constant[ Fixup string of chars to fit into a regex char class ]
variable[pos] assign[=] call[name[result].find, parameter[constant[-]]]
if compare[name[pos] greater_or_equal[>=] constant[0]] begin[:]
variable[result] assign[=] binary_operation[constant[%s%s-] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0be25c0>, <ast.Subscript object at 0x7da1b0be2500>]]]
def function[sequentize, parameter[string]]:
constant[
Notate consecutive characters as sequence
(1-4 instead of 1234)
]
<ast.Tuple object at 0x7da1b0be15a0> assign[=] tuple[[<ast.Constant object at 0x7da1b0be0d60>, <ast.Constant object at 0x7da1b0be1ba0>, <ast.List object at 0x7da1b0be2050>]]
for taget[name[char]] in starred[call[name[map], parameter[name[ord], name[string]]]] begin[:]
if compare[name[last] is constant[None]] begin[:]
variable[first] assign[=] name[char]
if compare[name[last] is_not constant[None]] begin[:]
call[name[result].append, parameter[tuple[[<ast.Name object at 0x7da1b0be0c10>, <ast.Name object at 0x7da1b0be2320>]]]]
return[call[constant[].join, parameter[<ast.ListComp object at 0x7da1b0be0bb0>]]]
return[call[name[_re].sub, parameter[constant[([\000-\040\047])], <ast.Lambda object at 0x7da1b0be2fe0>, call[call[call[call[name[sequentize], parameter[name[result]]].replace, parameter[constant[\], constant[\\]]].replace, parameter[constant[[], constant[\[]]].replace, parameter[constant[]], constant[\]]]]]]]
def function[id_literal_, parameter[what]]:
constant[ Make id_literal like char class ]
variable[match] assign[=] call[name[_re].compile, parameter[name[what]]].match
variable[result] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b0be2d10>]]
return[binary_operation[constant[[^%s]] <ast.Mod object at 0x7da2590d6920> call[name[fix_charclass], parameter[name[result]]]]]
def function[not_id_literal_, parameter[keep]]:
constant[ Make negated id_literal like char class ]
variable[match] assign[=] call[name[_re].compile, parameter[call[name[id_literal_], parameter[name[keep]]]]].match
variable[result] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b0bd9f30>]]
return[binary_operation[constant[[%s]] <ast.Mod object at 0x7da2590d6920> call[name[fix_charclass], parameter[name[result]]]]]
variable[not_id_literal] assign[=] call[name[not_id_literal_], parameter[constant[[a-zA-Z0-9_$]]]]
variable[preregex1] assign[=] constant[[(,=:\[!&|?{};\r\n]]
variable[preregex2] assign[=] binary_operation[constant[%(not_id_literal)sreturn] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]
variable[id_literal] assign[=] call[name[id_literal_], parameter[constant[[a-zA-Z0-9_$]]]]
variable[id_literal_open] assign[=] call[name[id_literal_], parameter[constant[[a-zA-Z0-9_${\[(!+-]]]]
variable[id_literal_close] assign[=] call[name[id_literal_], parameter[constant[[a-zA-Z0-9_$}\])"\047+-]]]]
variable[dull] assign[=] constant[[^\047"/\000-\040]]
variable[space_sub_simple] assign[=] call[name[_re].compile, parameter[binary_operation[constant[(%(dull)s+)|(%(strings)s%(dull)s*)|(?<=%(preregex1)s)%(space)s*(?:%(newline)s%(space)s*)*(%(regex)s%(dull)s*)|(?<=%(preregex2)s)%(space)s*(?:%(newline)s%(space)s)*(%(regex)s%(dull)s*)|(?<=%(id_literal_close)s)%(space)s*(?:(%(newline)s)%(space)s*)+(?=%(id_literal_open)s)|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)|(?<=\+)(%(space)s)+(?=\+)|(?<=-)(%(space)s)+(?=-)|%(space)s+|(?:%(newline)s%(space)s*)+] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]]].sub
def function[space_subber_simple, parameter[match]]:
constant[ Substitution callback ]
variable[groups] assign[=] call[name[match].groups, parameter[]]
if call[name[groups]][constant[0]] begin[:]
return[call[name[groups]][constant[0]]]
variable[space_sub_banged] assign[=] call[name[_re].compile, parameter[binary_operation[constant[(%(dull)s+)|(%(strings)s%(dull)s*)|(%(bang_comment)s%(dull)s*)|(?<=%(preregex1)s)%(space)s*(?:%(newline)s%(space)s*)*(%(regex)s%(dull)s*)|(?<=%(preregex2)s)%(space)s*(?:%(newline)s%(space)s)*(%(regex)s%(dull)s*)|(?<=%(id_literal_close)s)%(space)s*(?:(%(newline)s)%(space)s*)+(?=%(id_literal_open)s)|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)|(?<=\+)(%(space)s)+(?=\+)|(?<=-)(%(space)s)+(?=-)|%(space)s+|(?:%(newline)s%(space)s*)+] <ast.Mod object at 0x7da2590d6920> call[name[dict], parameter[call[name[locals], parameter[]]]]]]].sub
def function[space_subber_banged, parameter[match]]:
constant[ Substitution callback ]
variable[groups] assign[=] call[name[match].groups, parameter[]]
if call[name[groups]][constant[0]] begin[:]
return[call[name[groups]][constant[0]]]
def function[jsmin, parameter[script, keep_bang_comments]]:
constant[
Minify javascript based on `jsmin.c by Douglas Crockford`_\.
Instead of parsing the stream char by char, it uses a regular
expression approach which minifies the whole script with one big
substitution regex.
.. _jsmin.c by Douglas Crockford:
http://www.crockford.com/javascript/jsmin.c
:Parameters:
`script` : ``str``
Script to minify
`keep_bang_comments` : ``bool``
Keep comments starting with an exclamation mark? (``/*!...*/``)
:Return: Minified script
:Rtype: ``str``
]
if name[keep_bang_comments] begin[:]
return[call[call[name[space_sub_banged], parameter[name[space_subber_banged], binary_operation[constant[
%s
] <ast.Mod object at 0x7da2590d6920> name[script]]]].strip, parameter[]]]
return[name[jsmin]] | keyword[def] identifier[_make_jsmin] ( identifier[python_only] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[python_only] :
keyword[try] :
keyword[import] identifier[_rjsmin]
keyword[except] identifier[ImportError] :
keyword[pass]
keyword[else] :
keyword[return] identifier[_rjsmin] . identifier[jsmin]
keyword[try] :
identifier[xrange]
keyword[except] identifier[NameError] :
identifier[xrange] = identifier[range]
identifier[space_chars] = literal[string]
identifier[line_comment] = literal[string]
identifier[space_comment] = literal[string]
identifier[space_comment_nobang] = literal[string]
identifier[bang_comment] = literal[string]
identifier[string1] = literal[string]
identifier[string2] = literal[string]
identifier[strings] = literal[string] %( identifier[string1] , identifier[string2] )
identifier[charclass] = literal[string]
identifier[nospecial] = literal[string]
identifier[regex] = literal[string] %(
identifier[nospecial] , identifier[charclass] , identifier[nospecial]
)
identifier[space] = literal[string] %( identifier[space_chars] , identifier[space_comment] )
identifier[space_nobang] = literal[string] %( identifier[space_chars] , identifier[space_comment_nobang] )
identifier[newline] = literal[string] % identifier[line_comment]
keyword[def] identifier[fix_charclass] ( identifier[result] ):
literal[string]
identifier[pos] = identifier[result] . identifier[find] ( literal[string] )
keyword[if] identifier[pos] >= literal[int] :
identifier[result] = literal[string] %( identifier[result] [: identifier[pos] ], identifier[result] [ identifier[pos] + literal[int] :])
keyword[def] identifier[sequentize] ( identifier[string] ):
literal[string]
identifier[first] , identifier[last] , identifier[result] = keyword[None] , keyword[None] ,[]
keyword[for] identifier[char] keyword[in] identifier[map] ( identifier[ord] , identifier[string] ):
keyword[if] identifier[last] keyword[is] keyword[None] :
identifier[first] = identifier[last] = identifier[char]
keyword[elif] identifier[last] + literal[int] == identifier[char] :
identifier[last] = identifier[char]
keyword[else] :
identifier[result] . identifier[append] (( identifier[first] , identifier[last] ))
identifier[first] = identifier[last] = identifier[char]
keyword[if] identifier[last] keyword[is] keyword[not] keyword[None] :
identifier[result] . identifier[append] (( identifier[first] , identifier[last] ))
keyword[return] literal[string] . identifier[join] ([ literal[string] %(
identifier[chr] ( identifier[first] ),
identifier[last] > identifier[first] + literal[int] keyword[and] literal[string] keyword[or] literal[string] ,
identifier[last] != identifier[first] keyword[and] identifier[chr] ( identifier[last] ) keyword[or] literal[string]
) keyword[for] identifier[first] , identifier[last] keyword[in] identifier[result] ])
keyword[return] identifier[_re] . identifier[sub] (
literal[string] ,
keyword[lambda] identifier[m] : literal[string] % identifier[ord] ( identifier[m] . identifier[group] ( literal[int] )),(
identifier[sequentize] ( identifier[result] )
. identifier[replace] ( literal[string] , literal[string] )
. identifier[replace] ( literal[string] , literal[string] )
. identifier[replace] ( literal[string] , literal[string] )
)
)
keyword[def] identifier[id_literal_] ( identifier[what] ):
literal[string]
identifier[match] = identifier[_re] . identifier[compile] ( identifier[what] ). identifier[match]
identifier[result] = literal[string] . identifier[join] ([
identifier[chr] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[xrange] ( literal[int] ) keyword[if] keyword[not] identifier[match] ( identifier[chr] ( identifier[c] ))
])
keyword[return] literal[string] % identifier[fix_charclass] ( identifier[result] )
keyword[def] identifier[not_id_literal_] ( identifier[keep] ):
literal[string]
identifier[match] = identifier[_re] . identifier[compile] ( identifier[id_literal_] ( identifier[keep] )). identifier[match]
identifier[result] = literal[string] . identifier[join] ([
identifier[chr] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[xrange] ( literal[int] ) keyword[if] keyword[not] identifier[match] ( identifier[chr] ( identifier[c] ))
])
keyword[return] literal[string] % identifier[fix_charclass] ( identifier[result] )
identifier[not_id_literal] = identifier[not_id_literal_] ( literal[string] )
identifier[preregex1] = literal[string]
identifier[preregex2] = literal[string] % identifier[locals] ()
identifier[id_literal] = identifier[id_literal_] ( literal[string] )
identifier[id_literal_open] = identifier[id_literal_] ( literal[string] )
identifier[id_literal_close] = identifier[id_literal_] ( literal[string] )
identifier[dull] = literal[string]
identifier[space_sub_simple] = identifier[_re] . identifier[compile] ((
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
)% identifier[locals] ()). identifier[sub]
keyword[def] identifier[space_subber_simple] ( identifier[match] ):
literal[string]
identifier[groups] = identifier[match] . identifier[groups] ()
keyword[if] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] literal[string]
keyword[elif] identifier[groups] [ literal[int] ] keyword[or] identifier[groups] [ literal[int] ] keyword[or] identifier[groups] [ literal[int] ]:
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string]
identifier[space_sub_banged] = identifier[_re] . identifier[compile] ((
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
)% identifier[dict] ( identifier[locals] (), identifier[space] = identifier[space_nobang] )). identifier[sub]
keyword[def] identifier[space_subber_banged] ( identifier[match] ):
literal[string]
identifier[groups] = identifier[match] . identifier[groups] ()
keyword[if] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] identifier[groups] [ literal[int] ]
keyword[elif] identifier[groups] [ literal[int] ]:
keyword[return] literal[string]
keyword[elif] identifier[groups] [ literal[int] ] keyword[or] identifier[groups] [ literal[int] ] keyword[or] identifier[groups] [ literal[int] ]:
keyword[return] literal[string]
keyword[else] :
keyword[return] literal[string]
keyword[def] identifier[jsmin] ( identifier[script] , identifier[keep_bang_comments] = keyword[False] ):
literal[string]
keyword[if] identifier[keep_bang_comments] :
keyword[return] identifier[space_sub_banged] (
identifier[space_subber_banged] , literal[string] % identifier[script]
). identifier[strip] ()
keyword[else] :
keyword[return] identifier[space_sub_simple] (
identifier[space_subber_simple] , literal[string] % identifier[script]
). identifier[strip] ()
keyword[return] identifier[jsmin] | def _make_jsmin(python_only=False):
"""
Generate JS minifier based on `jsmin.c by Douglas Crockford`_
.. _jsmin.c by Douglas Crockford:
http://www.crockford.com/javascript/jsmin.c
:Parameters:
`python_only` : ``bool``
Use only the python variant. If true, the c extension is not even
tried to be loaded.
:Return: Minifier
:Rtype: ``callable``
"""
# pylint: disable = R0912, R0914, W0612
if not python_only:
try:
import _rjsmin # pylint: disable = F0401 # depends on [control=['try'], data=[]]
except ImportError:
pass # depends on [control=['except'], data=[]]
else:
return _rjsmin.jsmin # depends on [control=['if'], data=[]]
try:
xrange # depends on [control=['try'], data=[]]
except NameError:
xrange = range # pylint: disable = W0622 # depends on [control=['except'], data=[]]
space_chars = '[\\000-\\011\\013\\014\\016-\\040]'
line_comment = '(?://[^\\r\\n]*)'
space_comment = '(?:/\\*[^*]*\\*+(?:[^/*][^*]*\\*+)*/)'
space_comment_nobang = '(?:/\\*(?!!)[^*]*\\*+(?:[^/*][^*]*\\*+)*/)'
bang_comment = '(?:/\\*![^*]*\\*+(?:[^/*][^*]*\\*+)*/)'
string1 = '(?:\\047[^\\047\\\\\\r\\n]*(?:\\\\(?:[^\\r\\n]|\\r?\\n|\\r)[^\\047\\\\\\r\\n]*)*\\047)'
string2 = '(?:"[^"\\\\\\r\\n]*(?:\\\\(?:[^\\r\\n]|\\r?\\n|\\r)[^"\\\\\\r\\n]*)*")'
strings = '(?:%s|%s)' % (string1, string2)
charclass = '(?:\\[[^\\\\\\]\\r\\n]*(?:\\\\[^\\r\\n][^\\\\\\]\\r\\n]*)*\\])'
nospecial = '[^/\\\\\\[\\r\\n]'
regex = '(?:/(?![\\r\\n/*])%s*(?:(?:\\\\[^\\r\\n]|%s)%s*)*/)' % (nospecial, charclass, nospecial)
space = '(?:%s|%s)' % (space_chars, space_comment)
space_nobang = '(?:%s|%s)' % (space_chars, space_comment_nobang)
newline = '(?:%s?[\\r\\n])' % line_comment
def fix_charclass(result):
""" Fixup string of chars to fit into a regex char class """
pos = result.find('-')
if pos >= 0:
result = '%s%s-' % (result[:pos], result[pos + 1:]) # depends on [control=['if'], data=['pos']]
def sequentize(string):
"""
Notate consecutive characters as sequence
(1-4 instead of 1234)
"""
(first, last, result) = (None, None, [])
for char in map(ord, string):
if last is None:
first = last = char # depends on [control=['if'], data=['last']]
elif last + 1 == char:
last = char # depends on [control=['if'], data=['char']]
else:
result.append((first, last))
first = last = char # depends on [control=['for'], data=['char']]
if last is not None:
result.append((first, last)) # depends on [control=['if'], data=['last']]
return ''.join(['%s%s%s' % (chr(first), last > first + 1 and '-' or '', last != first and chr(last) or '') for (first, last) in result]) # \047 for better portability
return _re.sub('([\\000-\\040\\047])', lambda m: '\\%03o' % ord(m.group(1)), sequentize(result).replace('\\', '\\\\').replace('[', '\\[').replace(']', '\\]'))
def id_literal_(what):
""" Make id_literal like char class """
match = _re.compile(what).match
result = ''.join([chr(c) for c in xrange(127) if not match(chr(c))])
return '[^%s]' % fix_charclass(result)
def not_id_literal_(keep):
""" Make negated id_literal like char class """
match = _re.compile(id_literal_(keep)).match
result = ''.join([chr(c) for c in xrange(127) if not match(chr(c))])
return '[%s]' % fix_charclass(result)
not_id_literal = not_id_literal_('[a-zA-Z0-9_$]')
preregex1 = '[(,=:\\[!&|?{};\\r\\n]'
preregex2 = '%(not_id_literal)sreturn' % locals()
id_literal = id_literal_('[a-zA-Z0-9_$]')
id_literal_open = id_literal_('[a-zA-Z0-9_${\\[(!+-]')
id_literal_close = id_literal_('[a-zA-Z0-9_$}\\])"\\047+-]')
dull = '[^\\047"/\\000-\\040]'
# noqa pylint: disable = C0330
space_sub_simple = _re.compile('(%(dull)s+)|(%(strings)s%(dull)s*)|(?<=%(preregex1)s)%(space)s*(?:%(newline)s%(space)s*)*(%(regex)s%(dull)s*)|(?<=%(preregex2)s)%(space)s*(?:%(newline)s%(space)s)*(%(regex)s%(dull)s*)|(?<=%(id_literal_close)s)%(space)s*(?:(%(newline)s)%(space)s*)+(?=%(id_literal_open)s)|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)|(?<=\\+)(%(space)s)+(?=\\+)|(?<=-)(%(space)s)+(?=-)|%(space)s+|(?:%(newline)s%(space)s*)+' % locals()).sub
#print space_sub_simple.__self__.pattern
def space_subber_simple(match):
""" Substitution callback """
# pylint: disable = R0911
groups = match.groups()
if groups[0]:
return groups[0] # depends on [control=['if'], data=[]]
elif groups[1]:
return groups[1] # depends on [control=['if'], data=[]]
elif groups[2]:
return groups[2] # depends on [control=['if'], data=[]]
elif groups[3]:
return groups[3] # depends on [control=['if'], data=[]]
elif groups[4]:
return '\n' # depends on [control=['if'], data=[]]
elif groups[5] or groups[6] or groups[7]:
return ' ' # depends on [control=['if'], data=[]]
else:
return ''
# noqa pylint: disable = C0330
space_sub_banged = _re.compile('(%(dull)s+)|(%(strings)s%(dull)s*)|(%(bang_comment)s%(dull)s*)|(?<=%(preregex1)s)%(space)s*(?:%(newline)s%(space)s*)*(%(regex)s%(dull)s*)|(?<=%(preregex2)s)%(space)s*(?:%(newline)s%(space)s)*(%(regex)s%(dull)s*)|(?<=%(id_literal_close)s)%(space)s*(?:(%(newline)s)%(space)s*)+(?=%(id_literal_open)s)|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)|(?<=\\+)(%(space)s)+(?=\\+)|(?<=-)(%(space)s)+(?=-)|%(space)s+|(?:%(newline)s%(space)s*)+' % dict(locals(), space=space_nobang)).sub
#print space_sub_banged.__self__.pattern
def space_subber_banged(match):
""" Substitution callback """
# pylint: disable = R0911
groups = match.groups()
if groups[0]:
return groups[0] # depends on [control=['if'], data=[]]
elif groups[1]:
return groups[1] # depends on [control=['if'], data=[]]
elif groups[2]:
return groups[2] # depends on [control=['if'], data=[]]
elif groups[3]:
return groups[3] # depends on [control=['if'], data=[]]
elif groups[4]:
return groups[4] # depends on [control=['if'], data=[]]
elif groups[5]:
return '\n' # depends on [control=['if'], data=[]]
elif groups[6] or groups[7] or groups[8]:
return ' ' # depends on [control=['if'], data=[]]
else:
return ''
def jsmin(script, keep_bang_comments=False): # pylint: disable = W0621
'\n Minify javascript based on `jsmin.c by Douglas Crockford`_\\.\n\n Instead of parsing the stream char by char, it uses a regular\n expression approach which minifies the whole script with one big\n substitution regex.\n\n .. _jsmin.c by Douglas Crockford:\n http://www.crockford.com/javascript/jsmin.c\n\n :Parameters:\n `script` : ``str``\n Script to minify\n\n `keep_bang_comments` : ``bool``\n Keep comments starting with an exclamation mark? (``/*!...*/``)\n\n :Return: Minified script\n :Rtype: ``str``\n '
if keep_bang_comments:
return space_sub_banged(space_subber_banged, '\n%s\n' % script).strip() # depends on [control=['if'], data=[]]
else:
return space_sub_simple(space_subber_simple, '\n%s\n' % script).strip()
return jsmin |
def _discover_ideal_backend(orm_backend):
"""Auto-discover the ideal backend based on what is installed.
Right now, handles discovery of:
* PeeWee
* SQLAlchemy
Args:
orm_backend (str): The ``orm_backend`` value that was passed to the
``create_app`` function. That is, the ORM Backend the User
indicated they wanted to use.
Returns:
str|fleaker.missing.MissingSentinel: Returns a string for the ideal
backend if it found one, or :obj:`fleaker.MISSING` if we couldn't
find one.
Raises:
RuntimeError: Raised if no user provided ORM Backend is given and BOTH
PeeWee and SQLAlchemy are installed.
"""
if orm_backend:
return orm_backend
if peewee is not MISSING and sqlalchemy is not MISSING:
raise RuntimeError('Both PeeWee and SQLAlchemy detected as installed, '
'but no explicit backend provided! Please specify '
'one!')
if peewee is not MISSING:
return _PEEWEE_BACKEND
elif sqlalchemy is not MISSING:
return _SQLALCHEMY_BACKEND
else:
return MISSING | def function[_discover_ideal_backend, parameter[orm_backend]]:
constant[Auto-discover the ideal backend based on what is installed.
Right now, handles discovery of:
* PeeWee
* SQLAlchemy
Args:
orm_backend (str): The ``orm_backend`` value that was passed to the
``create_app`` function. That is, the ORM Backend the User
indicated they wanted to use.
Returns:
str|fleaker.missing.MissingSentinel: Returns a string for the ideal
backend if it found one, or :obj:`fleaker.MISSING` if we couldn't
find one.
Raises:
RuntimeError: Raised if no user provided ORM Backend is given and BOTH
PeeWee and SQLAlchemy are installed.
]
if name[orm_backend] begin[:]
return[name[orm_backend]]
if <ast.BoolOp object at 0x7da18f09dde0> begin[:]
<ast.Raise object at 0x7da18ede6d70>
if compare[name[peewee] is_not name[MISSING]] begin[:]
return[name[_PEEWEE_BACKEND]] | keyword[def] identifier[_discover_ideal_backend] ( identifier[orm_backend] ):
literal[string]
keyword[if] identifier[orm_backend] :
keyword[return] identifier[orm_backend]
keyword[if] identifier[peewee] keyword[is] keyword[not] identifier[MISSING] keyword[and] identifier[sqlalchemy] keyword[is] keyword[not] identifier[MISSING] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string] )
keyword[if] identifier[peewee] keyword[is] keyword[not] identifier[MISSING] :
keyword[return] identifier[_PEEWEE_BACKEND]
keyword[elif] identifier[sqlalchemy] keyword[is] keyword[not] identifier[MISSING] :
keyword[return] identifier[_SQLALCHEMY_BACKEND]
keyword[else] :
keyword[return] identifier[MISSING] | def _discover_ideal_backend(orm_backend):
"""Auto-discover the ideal backend based on what is installed.
Right now, handles discovery of:
* PeeWee
* SQLAlchemy
Args:
orm_backend (str): The ``orm_backend`` value that was passed to the
``create_app`` function. That is, the ORM Backend the User
indicated they wanted to use.
Returns:
str|fleaker.missing.MissingSentinel: Returns a string for the ideal
backend if it found one, or :obj:`fleaker.MISSING` if we couldn't
find one.
Raises:
RuntimeError: Raised if no user provided ORM Backend is given and BOTH
PeeWee and SQLAlchemy are installed.
"""
if orm_backend:
return orm_backend # depends on [control=['if'], data=[]]
if peewee is not MISSING and sqlalchemy is not MISSING:
raise RuntimeError('Both PeeWee and SQLAlchemy detected as installed, but no explicit backend provided! Please specify one!') # depends on [control=['if'], data=[]]
if peewee is not MISSING:
return _PEEWEE_BACKEND # depends on [control=['if'], data=[]]
elif sqlalchemy is not MISSING:
return _SQLALCHEMY_BACKEND # depends on [control=['if'], data=[]]
else:
return MISSING |
def info(self, **kwargs):
"""
Get the basic movie information for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any movie method.
Returns:
A dict representation of the JSON returned from the API.
"""
path = self._get_id_path('info')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response | def function[info, parameter[self]]:
constant[
Get the basic movie information for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any movie method.
Returns:
A dict representation of the JSON returned from the API.
]
variable[path] assign[=] call[name[self]._get_id_path, parameter[constant[info]]]
variable[response] assign[=] call[name[self]._GET, parameter[name[path], name[kwargs]]]
call[name[self]._set_attrs_to_values, parameter[name[response]]]
return[name[response]] | keyword[def] identifier[info] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = identifier[self] . identifier[_get_id_path] ( literal[string] )
identifier[response] = identifier[self] . identifier[_GET] ( identifier[path] , identifier[kwargs] )
identifier[self] . identifier[_set_attrs_to_values] ( identifier[response] )
keyword[return] identifier[response] | def info(self, **kwargs):
"""
Get the basic movie information for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any movie method.
Returns:
A dict representation of the JSON returned from the API.
"""
path = self._get_id_path('info')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.