code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def update(self, parent=None):
"""
Updates the resource. This will trigger an api PATCH request.
:param parent ResourceBase: the parent of the resource - used for nesting the request url, optional
:raises ResourceError: if the resource does not have an id (does not exist yet)
:returns: the resource itself
"""
if not self.id:
raise self.ResourceError('cannot update a resource without an ID')
data = self.__class__._process_request(
connection.patch,
parent=parent,
id=self.id,
payload=self.payload()
)
return self._reload(data)
|
def function[update, parameter[self, parent]]:
constant[
Updates the resource. This will trigger an api PATCH request.
:param parent ResourceBase: the parent of the resource - used for nesting the request url, optional
:raises ResourceError: if the resource does not have an id (does not exist yet)
:returns: the resource itself
]
if <ast.UnaryOp object at 0x7da1b0863370> begin[:]
<ast.Raise object at 0x7da1b0860610>
variable[data] assign[=] call[name[self].__class__._process_request, parameter[name[connection].patch]]
return[call[name[self]._reload, parameter[name[data]]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[parent] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[id] :
keyword[raise] identifier[self] . identifier[ResourceError] ( literal[string] )
identifier[data] = identifier[self] . identifier[__class__] . identifier[_process_request] (
identifier[connection] . identifier[patch] ,
identifier[parent] = identifier[parent] ,
identifier[id] = identifier[self] . identifier[id] ,
identifier[payload] = identifier[self] . identifier[payload] ()
)
keyword[return] identifier[self] . identifier[_reload] ( identifier[data] )
|
def update(self, parent=None):
"""
Updates the resource. This will trigger an api PATCH request.
:param parent ResourceBase: the parent of the resource - used for nesting the request url, optional
:raises ResourceError: if the resource does not have an id (does not exist yet)
:returns: the resource itself
"""
if not self.id:
raise self.ResourceError('cannot update a resource without an ID') # depends on [control=['if'], data=[]]
data = self.__class__._process_request(connection.patch, parent=parent, id=self.id, payload=self.payload())
return self._reload(data)
|
def get_hash160data(self, rawtx, output_index):
"""TODO doc string"""
tx = deserialize.unsignedtx(rawtx)
output_index = deserialize.positive_integer(output_index)
data = control.get_hash160_data(tx, output_index)
return serialize.data(data)
|
def function[get_hash160data, parameter[self, rawtx, output_index]]:
constant[TODO doc string]
variable[tx] assign[=] call[name[deserialize].unsignedtx, parameter[name[rawtx]]]
variable[output_index] assign[=] call[name[deserialize].positive_integer, parameter[name[output_index]]]
variable[data] assign[=] call[name[control].get_hash160_data, parameter[name[tx], name[output_index]]]
return[call[name[serialize].data, parameter[name[data]]]]
|
keyword[def] identifier[get_hash160data] ( identifier[self] , identifier[rawtx] , identifier[output_index] ):
literal[string]
identifier[tx] = identifier[deserialize] . identifier[unsignedtx] ( identifier[rawtx] )
identifier[output_index] = identifier[deserialize] . identifier[positive_integer] ( identifier[output_index] )
identifier[data] = identifier[control] . identifier[get_hash160_data] ( identifier[tx] , identifier[output_index] )
keyword[return] identifier[serialize] . identifier[data] ( identifier[data] )
|
def get_hash160data(self, rawtx, output_index):
"""TODO doc string"""
tx = deserialize.unsignedtx(rawtx)
output_index = deserialize.positive_integer(output_index)
data = control.get_hash160_data(tx, output_index)
return serialize.data(data)
|
def gather_metrics(self, runs):
"""Write a JSON file with the result of every runs
"""
for run_dirs in runs.values():
with open(JSON_METRICS_FILE, 'w') as ostr:
ostr.write('[\n')
for i in range(len(run_dirs)):
with open(osp.join(run_dirs[i], YAML_REPORT_FILE)) as istr:
data = yaml.safe_load(istr)
data.pop('category', None)
data.pop('command', None)
data['id'] = run_dirs[i]
json.dump(data, ostr, indent=2)
if i != len(run_dirs) - 1:
ostr.write(',')
ostr.write('\n')
ostr.write(']\n')
|
def function[gather_metrics, parameter[self, runs]]:
constant[Write a JSON file with the result of every runs
]
for taget[name[run_dirs]] in starred[call[name[runs].values, parameter[]]] begin[:]
with call[name[open], parameter[name[JSON_METRICS_FILE], constant[w]]] begin[:]
call[name[ostr].write, parameter[constant[[
]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[run_dirs]]]]]] begin[:]
with call[name[open], parameter[call[name[osp].join, parameter[call[name[run_dirs]][name[i]], name[YAML_REPORT_FILE]]]]] begin[:]
variable[data] assign[=] call[name[yaml].safe_load, parameter[name[istr]]]
call[name[data].pop, parameter[constant[category], constant[None]]]
call[name[data].pop, parameter[constant[command], constant[None]]]
call[name[data]][constant[id]] assign[=] call[name[run_dirs]][name[i]]
call[name[json].dump, parameter[name[data], name[ostr]]]
if compare[name[i] not_equal[!=] binary_operation[call[name[len], parameter[name[run_dirs]]] - constant[1]]] begin[:]
call[name[ostr].write, parameter[constant[,]]]
call[name[ostr].write, parameter[constant[
]]]
call[name[ostr].write, parameter[constant[]
]]]
|
keyword[def] identifier[gather_metrics] ( identifier[self] , identifier[runs] ):
literal[string]
keyword[for] identifier[run_dirs] keyword[in] identifier[runs] . identifier[values] ():
keyword[with] identifier[open] ( identifier[JSON_METRICS_FILE] , literal[string] ) keyword[as] identifier[ostr] :
identifier[ostr] . identifier[write] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[run_dirs] )):
keyword[with] identifier[open] ( identifier[osp] . identifier[join] ( identifier[run_dirs] [ identifier[i] ], identifier[YAML_REPORT_FILE] )) keyword[as] identifier[istr] :
identifier[data] = identifier[yaml] . identifier[safe_load] ( identifier[istr] )
identifier[data] . identifier[pop] ( literal[string] , keyword[None] )
identifier[data] . identifier[pop] ( literal[string] , keyword[None] )
identifier[data] [ literal[string] ]= identifier[run_dirs] [ identifier[i] ]
identifier[json] . identifier[dump] ( identifier[data] , identifier[ostr] , identifier[indent] = literal[int] )
keyword[if] identifier[i] != identifier[len] ( identifier[run_dirs] )- literal[int] :
identifier[ostr] . identifier[write] ( literal[string] )
identifier[ostr] . identifier[write] ( literal[string] )
identifier[ostr] . identifier[write] ( literal[string] )
|
def gather_metrics(self, runs):
"""Write a JSON file with the result of every runs
"""
for run_dirs in runs.values():
with open(JSON_METRICS_FILE, 'w') as ostr:
ostr.write('[\n')
for i in range(len(run_dirs)):
with open(osp.join(run_dirs[i], YAML_REPORT_FILE)) as istr:
data = yaml.safe_load(istr)
data.pop('category', None)
data.pop('command', None)
data['id'] = run_dirs[i]
json.dump(data, ostr, indent=2) # depends on [control=['with'], data=['istr']]
if i != len(run_dirs) - 1:
ostr.write(',') # depends on [control=['if'], data=[]]
ostr.write('\n') # depends on [control=['for'], data=['i']]
ostr.write(']\n') # depends on [control=['with'], data=['open', 'ostr']] # depends on [control=['for'], data=['run_dirs']]
|
def _get_axis_mode(self, axis):
"will get the axis mode for the current series"
if all([isinstance(getattr(s, axis), TimeVariable) for s in self._series]):
return 'time'
return None
|
def function[_get_axis_mode, parameter[self, axis]]:
constant[will get the axis mode for the current series]
if call[name[all], parameter[<ast.ListComp object at 0x7da1b021e440>]] begin[:]
return[constant[time]]
return[constant[None]]
|
keyword[def] identifier[_get_axis_mode] ( identifier[self] , identifier[axis] ):
literal[string]
keyword[if] identifier[all] ([ identifier[isinstance] ( identifier[getattr] ( identifier[s] , identifier[axis] ), identifier[TimeVariable] ) keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_series] ]):
keyword[return] literal[string]
keyword[return] keyword[None]
|
def _get_axis_mode(self, axis):
"""will get the axis mode for the current series"""
if all([isinstance(getattr(s, axis), TimeVariable) for s in self._series]):
return 'time' # depends on [control=['if'], data=[]]
return None
|
def extract_srcset(self, srcset):
"""
Handle ``srcset="image.png 1x, image@2x.jpg 2x"``
"""
urls = []
for item in srcset.split(','):
if item:
urls.append(unquote_utf8(item.rsplit(' ', 1)[0]))
return urls
|
def function[extract_srcset, parameter[self, srcset]]:
constant[
Handle ``srcset="image.png 1x, image@2x.jpg 2x"``
]
variable[urls] assign[=] list[[]]
for taget[name[item]] in starred[call[name[srcset].split, parameter[constant[,]]]] begin[:]
if name[item] begin[:]
call[name[urls].append, parameter[call[name[unquote_utf8], parameter[call[call[name[item].rsplit, parameter[constant[ ], constant[1]]]][constant[0]]]]]]
return[name[urls]]
|
keyword[def] identifier[extract_srcset] ( identifier[self] , identifier[srcset] ):
literal[string]
identifier[urls] =[]
keyword[for] identifier[item] keyword[in] identifier[srcset] . identifier[split] ( literal[string] ):
keyword[if] identifier[item] :
identifier[urls] . identifier[append] ( identifier[unquote_utf8] ( identifier[item] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]))
keyword[return] identifier[urls]
|
def extract_srcset(self, srcset):
"""
Handle ``srcset="image.png 1x, image@2x.jpg 2x"``
"""
urls = []
for item in srcset.split(','):
if item:
urls.append(unquote_utf8(item.rsplit(' ', 1)[0])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
return urls
|
def report_and_save_keywords(self,relaxation_kwds,precomputed_kwds):
"""Save relaxation keywords to .txt and .pyc file"""
report_name = os.path.join(self.backup_dir,'relaxation_keywords.txt')
pretty_relax_kwds = pprint.pformat(relaxation_kwds,indent=4)
with open(report_name,'w') as wf:
wf.write(pretty_relax_kwds)
wf.close()
origin_name = os.path.join(self.backup_dir,'relaxation_keywords.pyc')
with open(origin_name,'wb') as ro:
pickle.dump(relaxation_kwds,ro,protocol=pickle.HIGHEST_PROTOCOL)
ro.close()
if relaxation_kwds['presave']:
precomp_kwds_name = os.path.join(self.backup_dir,
'precomputed_keywords.pyc')
with open(precomp_kwds_name, 'wb') as po:
pickle.dump(precomputed_kwds, po,
protocol=pickle.HIGHEST_PROTOCOL)
po.close()
|
def function[report_and_save_keywords, parameter[self, relaxation_kwds, precomputed_kwds]]:
constant[Save relaxation keywords to .txt and .pyc file]
variable[report_name] assign[=] call[name[os].path.join, parameter[name[self].backup_dir, constant[relaxation_keywords.txt]]]
variable[pretty_relax_kwds] assign[=] call[name[pprint].pformat, parameter[name[relaxation_kwds]]]
with call[name[open], parameter[name[report_name], constant[w]]] begin[:]
call[name[wf].write, parameter[name[pretty_relax_kwds]]]
call[name[wf].close, parameter[]]
variable[origin_name] assign[=] call[name[os].path.join, parameter[name[self].backup_dir, constant[relaxation_keywords.pyc]]]
with call[name[open], parameter[name[origin_name], constant[wb]]] begin[:]
call[name[pickle].dump, parameter[name[relaxation_kwds], name[ro]]]
call[name[ro].close, parameter[]]
if call[name[relaxation_kwds]][constant[presave]] begin[:]
variable[precomp_kwds_name] assign[=] call[name[os].path.join, parameter[name[self].backup_dir, constant[precomputed_keywords.pyc]]]
with call[name[open], parameter[name[precomp_kwds_name], constant[wb]]] begin[:]
call[name[pickle].dump, parameter[name[precomputed_kwds], name[po]]]
call[name[po].close, parameter[]]
|
keyword[def] identifier[report_and_save_keywords] ( identifier[self] , identifier[relaxation_kwds] , identifier[precomputed_kwds] ):
literal[string]
identifier[report_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[backup_dir] , literal[string] )
identifier[pretty_relax_kwds] = identifier[pprint] . identifier[pformat] ( identifier[relaxation_kwds] , identifier[indent] = literal[int] )
keyword[with] identifier[open] ( identifier[report_name] , literal[string] ) keyword[as] identifier[wf] :
identifier[wf] . identifier[write] ( identifier[pretty_relax_kwds] )
identifier[wf] . identifier[close] ()
identifier[origin_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[backup_dir] , literal[string] )
keyword[with] identifier[open] ( identifier[origin_name] , literal[string] ) keyword[as] identifier[ro] :
identifier[pickle] . identifier[dump] ( identifier[relaxation_kwds] , identifier[ro] , identifier[protocol] = identifier[pickle] . identifier[HIGHEST_PROTOCOL] )
identifier[ro] . identifier[close] ()
keyword[if] identifier[relaxation_kwds] [ literal[string] ]:
identifier[precomp_kwds_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[backup_dir] ,
literal[string] )
keyword[with] identifier[open] ( identifier[precomp_kwds_name] , literal[string] ) keyword[as] identifier[po] :
identifier[pickle] . identifier[dump] ( identifier[precomputed_kwds] , identifier[po] ,
identifier[protocol] = identifier[pickle] . identifier[HIGHEST_PROTOCOL] )
identifier[po] . identifier[close] ()
|
def report_and_save_keywords(self, relaxation_kwds, precomputed_kwds):
"""Save relaxation keywords to .txt and .pyc file"""
report_name = os.path.join(self.backup_dir, 'relaxation_keywords.txt')
pretty_relax_kwds = pprint.pformat(relaxation_kwds, indent=4)
with open(report_name, 'w') as wf:
wf.write(pretty_relax_kwds) # depends on [control=['with'], data=['wf']]
wf.close()
origin_name = os.path.join(self.backup_dir, 'relaxation_keywords.pyc')
with open(origin_name, 'wb') as ro:
pickle.dump(relaxation_kwds, ro, protocol=pickle.HIGHEST_PROTOCOL) # depends on [control=['with'], data=['ro']]
ro.close()
if relaxation_kwds['presave']:
precomp_kwds_name = os.path.join(self.backup_dir, 'precomputed_keywords.pyc')
with open(precomp_kwds_name, 'wb') as po:
pickle.dump(precomputed_kwds, po, protocol=pickle.HIGHEST_PROTOCOL) # depends on [control=['with'], data=['po']]
po.close() # depends on [control=['if'], data=[]]
|
def init_distributed(cuda):
"""
Initializes distributed backend.
:param cuda: (bool) if True initializes nccl backend, if False initializes
gloo backend
"""
world_size = int(os.environ.get('WORLD_SIZE', 1))
distributed = (world_size > 1)
if distributed:
backend = 'nccl' if cuda else 'gloo'
dist.init_process_group(backend=backend,
init_method='env://')
assert dist.is_initialized()
return distributed
|
def function[init_distributed, parameter[cuda]]:
constant[
Initializes distributed backend.
:param cuda: (bool) if True initializes nccl backend, if False initializes
gloo backend
]
variable[world_size] assign[=] call[name[int], parameter[call[name[os].environ.get, parameter[constant[WORLD_SIZE], constant[1]]]]]
variable[distributed] assign[=] compare[name[world_size] greater[>] constant[1]]
if name[distributed] begin[:]
variable[backend] assign[=] <ast.IfExp object at 0x7da2054a5960>
call[name[dist].init_process_group, parameter[]]
assert[call[name[dist].is_initialized, parameter[]]]
return[name[distributed]]
|
keyword[def] identifier[init_distributed] ( identifier[cuda] ):
literal[string]
identifier[world_size] = identifier[int] ( identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[int] ))
identifier[distributed] =( identifier[world_size] > literal[int] )
keyword[if] identifier[distributed] :
identifier[backend] = literal[string] keyword[if] identifier[cuda] keyword[else] literal[string]
identifier[dist] . identifier[init_process_group] ( identifier[backend] = identifier[backend] ,
identifier[init_method] = literal[string] )
keyword[assert] identifier[dist] . identifier[is_initialized] ()
keyword[return] identifier[distributed]
|
def init_distributed(cuda):
"""
Initializes distributed backend.
:param cuda: (bool) if True initializes nccl backend, if False initializes
gloo backend
"""
world_size = int(os.environ.get('WORLD_SIZE', 1))
distributed = world_size > 1
if distributed:
backend = 'nccl' if cuda else 'gloo'
dist.init_process_group(backend=backend, init_method='env://')
assert dist.is_initialized() # depends on [control=['if'], data=[]]
return distributed
|
def get_json_tree_path(self, *args, **kwargs):
"""
Return path to ricecooker json tree file. Override this method to use
a custom filename, e.g., for channel with multiple languages.
"""
json_tree_path = os.path.join(self.TREES_DATA_DIR, self.RICECOOKER_JSON_TREE)
return json_tree_path
|
def function[get_json_tree_path, parameter[self]]:
constant[
Return path to ricecooker json tree file. Override this method to use
a custom filename, e.g., for channel with multiple languages.
]
variable[json_tree_path] assign[=] call[name[os].path.join, parameter[name[self].TREES_DATA_DIR, name[self].RICECOOKER_JSON_TREE]]
return[name[json_tree_path]]
|
keyword[def] identifier[get_json_tree_path] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[json_tree_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[TREES_DATA_DIR] , identifier[self] . identifier[RICECOOKER_JSON_TREE] )
keyword[return] identifier[json_tree_path]
|
def get_json_tree_path(self, *args, **kwargs):
"""
Return path to ricecooker json tree file. Override this method to use
a custom filename, e.g., for channel with multiple languages.
"""
json_tree_path = os.path.join(self.TREES_DATA_DIR, self.RICECOOKER_JSON_TREE)
return json_tree_path
|
def _apply_memory_config(config_spec, memory):
'''
Sets memory size to the given value
config_spec
vm.ConfigSpec object
memory
Memory size and unit
'''
log.trace('Configuring virtual machine memory '
'settings memory=%s', memory)
if 'size' in memory and 'unit' in memory:
try:
if memory['unit'].lower() == 'kb':
memory_mb = memory['size'] / 1024
elif memory['unit'].lower() == 'mb':
memory_mb = memory['size']
elif memory['unit'].lower() == 'gb':
memory_mb = int(float(memory['size']) * 1024)
except (TypeError, ValueError):
memory_mb = int(memory['size'])
config_spec.memoryMB = memory_mb
if 'reservation_max' in memory:
config_spec.memoryReservationLockedToMax = memory['reservation_max']
if 'hotadd' in memory:
config_spec.memoryHotAddEnabled = memory['hotadd']
|
def function[_apply_memory_config, parameter[config_spec, memory]]:
constant[
Sets memory size to the given value
config_spec
vm.ConfigSpec object
memory
Memory size and unit
]
call[name[log].trace, parameter[constant[Configuring virtual machine memory settings memory=%s], name[memory]]]
if <ast.BoolOp object at 0x7da1b21878e0> begin[:]
<ast.Try object at 0x7da1b2184eb0>
name[config_spec].memoryMB assign[=] name[memory_mb]
if compare[constant[reservation_max] in name[memory]] begin[:]
name[config_spec].memoryReservationLockedToMax assign[=] call[name[memory]][constant[reservation_max]]
if compare[constant[hotadd] in name[memory]] begin[:]
name[config_spec].memoryHotAddEnabled assign[=] call[name[memory]][constant[hotadd]]
|
keyword[def] identifier[_apply_memory_config] ( identifier[config_spec] , identifier[memory] ):
literal[string]
identifier[log] . identifier[trace] ( literal[string]
literal[string] , identifier[memory] )
keyword[if] literal[string] keyword[in] identifier[memory] keyword[and] literal[string] keyword[in] identifier[memory] :
keyword[try] :
keyword[if] identifier[memory] [ literal[string] ]. identifier[lower] ()== literal[string] :
identifier[memory_mb] = identifier[memory] [ literal[string] ]/ literal[int]
keyword[elif] identifier[memory] [ literal[string] ]. identifier[lower] ()== literal[string] :
identifier[memory_mb] = identifier[memory] [ literal[string] ]
keyword[elif] identifier[memory] [ literal[string] ]. identifier[lower] ()== literal[string] :
identifier[memory_mb] = identifier[int] ( identifier[float] ( identifier[memory] [ literal[string] ])* literal[int] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
identifier[memory_mb] = identifier[int] ( identifier[memory] [ literal[string] ])
identifier[config_spec] . identifier[memoryMB] = identifier[memory_mb]
keyword[if] literal[string] keyword[in] identifier[memory] :
identifier[config_spec] . identifier[memoryReservationLockedToMax] = identifier[memory] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[memory] :
identifier[config_spec] . identifier[memoryHotAddEnabled] = identifier[memory] [ literal[string] ]
|
def _apply_memory_config(config_spec, memory):
"""
Sets memory size to the given value
config_spec
vm.ConfigSpec object
memory
Memory size and unit
"""
log.trace('Configuring virtual machine memory settings memory=%s', memory)
if 'size' in memory and 'unit' in memory:
try:
if memory['unit'].lower() == 'kb':
memory_mb = memory['size'] / 1024 # depends on [control=['if'], data=[]]
elif memory['unit'].lower() == 'mb':
memory_mb = memory['size'] # depends on [control=['if'], data=[]]
elif memory['unit'].lower() == 'gb':
memory_mb = int(float(memory['size']) * 1024) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
memory_mb = int(memory['size']) # depends on [control=['except'], data=[]]
config_spec.memoryMB = memory_mb # depends on [control=['if'], data=[]]
if 'reservation_max' in memory:
config_spec.memoryReservationLockedToMax = memory['reservation_max'] # depends on [control=['if'], data=['memory']]
if 'hotadd' in memory:
config_spec.memoryHotAddEnabled = memory['hotadd'] # depends on [control=['if'], data=['memory']]
|
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
"""
self.Y = self.Pcn(self.AX + self.U)
|
def function[ystep, parameter[self]]:
constant[Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
]
name[self].Y assign[=] call[name[self].Pcn, parameter[binary_operation[name[self].AX + name[self].U]]]
|
keyword[def] identifier[ystep] ( identifier[self] ):
literal[string]
identifier[self] . identifier[Y] = identifier[self] . identifier[Pcn] ( identifier[self] . identifier[AX] + identifier[self] . identifier[U] )
|
def ystep(self):
"""Minimise Augmented Lagrangian with respect to
:math:`\\mathbf{y}`.
"""
self.Y = self.Pcn(self.AX + self.U)
|
def authorize_redirect(self, oauth_scope, callback_uri=None,
ax_attrs=["name","email","language","username"]):
"""Authenticates and authorizes for the given Google resource.
Some of the available resources are:
* Gmail Contacts - http://www.google.com/m8/feeds/
* Calendar - http://www.google.com/calendar/feeds/
* Finance - http://finance.google.com/finance/feeds/
You can authorize multiple resources by separating the resource
URLs with a space.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs,
oauth_scope=oauth_scope)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
|
def function[authorize_redirect, parameter[self, oauth_scope, callback_uri, ax_attrs]]:
constant[Authenticates and authorizes for the given Google resource.
Some of the available resources are:
* Gmail Contacts - http://www.google.com/m8/feeds/
* Calendar - http://www.google.com/calendar/feeds/
* Finance - http://finance.google.com/finance/feeds/
You can authorize multiple resources by separating the resource
URLs with a space.
]
variable[callback_uri] assign[=] <ast.BoolOp object at 0x7da20e956cb0>
variable[args] assign[=] call[name[self]._openid_args, parameter[name[callback_uri]]]
call[name[self].redirect, parameter[binary_operation[binary_operation[name[self]._OPENID_ENDPOINT + constant[?]] + call[name[urllib].urlencode, parameter[name[args]]]]]]
|
keyword[def] identifier[authorize_redirect] ( identifier[self] , identifier[oauth_scope] , identifier[callback_uri] = keyword[None] ,
identifier[ax_attrs] =[ literal[string] , literal[string] , literal[string] , literal[string] ]):
literal[string]
identifier[callback_uri] = identifier[callback_uri] keyword[or] identifier[self] . identifier[request] . identifier[uri]
identifier[args] = identifier[self] . identifier[_openid_args] ( identifier[callback_uri] , identifier[ax_attrs] = identifier[ax_attrs] ,
identifier[oauth_scope] = identifier[oauth_scope] )
identifier[self] . identifier[redirect] ( identifier[self] . identifier[_OPENID_ENDPOINT] + literal[string] + identifier[urllib] . identifier[urlencode] ( identifier[args] ))
|
def authorize_redirect(self, oauth_scope, callback_uri=None, ax_attrs=['name', 'email', 'language', 'username']):
"""Authenticates and authorizes for the given Google resource.
Some of the available resources are:
* Gmail Contacts - http://www.google.com/m8/feeds/
* Calendar - http://www.google.com/calendar/feeds/
* Finance - http://finance.google.com/finance/feeds/
You can authorize multiple resources by separating the resource
URLs with a space.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs, oauth_scope=oauth_scope)
self.redirect(self._OPENID_ENDPOINT + '?' + urllib.urlencode(args))
|
def make_ccw(points):
"""
Static method. Returns a counterclock wise ordered sequence of
points. If there are any repeated point, the method will raise
an error.
Due to the 3D character of the package, the order or the points
will be tried following this order:
1. z=0 pprojection
2. x=0 pprojection
3. y=0 pprojection
:param points: Points to form a polygon (xyz or xy)
:type points: ndarray with points (xyz or xy) in rows
:returns: ccw version of the points.
:rtype: ndarray (shape=(N, 2 or 3))
"""
from scipy.spatial import ConvexHull
from pyny3d.utils import sort_numpy
# Repeated points
points_aux = sort_numpy(points)
check = np.sum(np.abs(np.diff(points_aux, axis=0)), axis=1)
if check.min() == 0: raise ValueError('Repeated point: \n'+str(points))
# Convexity
hull = None
for cols in [(0, 1), (1, 2), (0, 2)]:
try:
hull = ConvexHull(points[:, cols])
except:
pass
if hull is not None: return points[hull.vertices]
if hull is None: raise ValueError('Wrong polygon: \n'+str(points))
|
def function[make_ccw, parameter[points]]:
constant[
Static method. Returns a counterclock wise ordered sequence of
points. If there are any repeated point, the method will raise
an error.
Due to the 3D character of the package, the order or the points
will be tried following this order:
1. z=0 pprojection
2. x=0 pprojection
3. y=0 pprojection
:param points: Points to form a polygon (xyz or xy)
:type points: ndarray with points (xyz or xy) in rows
:returns: ccw version of the points.
:rtype: ndarray (shape=(N, 2 or 3))
]
from relative_module[scipy.spatial] import module[ConvexHull]
from relative_module[pyny3d.utils] import module[sort_numpy]
variable[points_aux] assign[=] call[name[sort_numpy], parameter[name[points]]]
variable[check] assign[=] call[name[np].sum, parameter[call[name[np].abs, parameter[call[name[np].diff, parameter[name[points_aux]]]]]]]
if compare[call[name[check].min, parameter[]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b24ff880>
variable[hull] assign[=] constant[None]
for taget[name[cols]] in starred[list[[<ast.Tuple object at 0x7da1b24ad6f0>, <ast.Tuple object at 0x7da1b24ae9e0>, <ast.Tuple object at 0x7da18dc04df0>]]] begin[:]
<ast.Try object at 0x7da18dc05fc0>
if compare[name[hull] is_not constant[None]] begin[:]
return[call[name[points]][name[hull].vertices]]
if compare[name[hull] is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc051b0>
|
keyword[def] identifier[make_ccw] ( identifier[points] ):
literal[string]
keyword[from] identifier[scipy] . identifier[spatial] keyword[import] identifier[ConvexHull]
keyword[from] identifier[pyny3d] . identifier[utils] keyword[import] identifier[sort_numpy]
identifier[points_aux] = identifier[sort_numpy] ( identifier[points] )
identifier[check] = identifier[np] . identifier[sum] ( identifier[np] . identifier[abs] ( identifier[np] . identifier[diff] ( identifier[points_aux] , identifier[axis] = literal[int] )), identifier[axis] = literal[int] )
keyword[if] identifier[check] . identifier[min] ()== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[points] ))
identifier[hull] = keyword[None]
keyword[for] identifier[cols] keyword[in] [( literal[int] , literal[int] ),( literal[int] , literal[int] ),( literal[int] , literal[int] )]:
keyword[try] :
identifier[hull] = identifier[ConvexHull] ( identifier[points] [:, identifier[cols] ])
keyword[except] :
keyword[pass]
keyword[if] identifier[hull] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[points] [ identifier[hull] . identifier[vertices] ]
keyword[if] identifier[hull] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[points] ))
|
def make_ccw(points):
"""
Static method. Returns a counterclock wise ordered sequence of
points. If there are any repeated point, the method will raise
an error.
Due to the 3D character of the package, the order or the points
will be tried following this order:
1. z=0 pprojection
2. x=0 pprojection
3. y=0 pprojection
:param points: Points to form a polygon (xyz or xy)
:type points: ndarray with points (xyz or xy) in rows
:returns: ccw version of the points.
:rtype: ndarray (shape=(N, 2 or 3))
"""
from scipy.spatial import ConvexHull
from pyny3d.utils import sort_numpy # Repeated points
points_aux = sort_numpy(points)
check = np.sum(np.abs(np.diff(points_aux, axis=0)), axis=1)
if check.min() == 0:
raise ValueError('Repeated point: \n' + str(points)) # depends on [control=['if'], data=[]] # Convexity
hull = None
for cols in [(0, 1), (1, 2), (0, 2)]:
try:
hull = ConvexHull(points[:, cols]) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
if hull is not None:
return points[hull.vertices] # depends on [control=['if'], data=['hull']] # depends on [control=['for'], data=['cols']]
if hull is None:
raise ValueError('Wrong polygon: \n' + str(points)) # depends on [control=['if'], data=[]]
|
def history_report(history, config=None, html=True):
"""
Test a model and save a history report.
Parameters
----------
history : memote.HistoryManager
The manager grants access to previous results.
config : dict, optional
The final test report configuration.
html : bool, optional
Whether to render the report as full HTML or JSON (default True).
"""
if config is None:
config = ReportConfiguration.load()
report = HistoryReport(history=history, configuration=config)
if html:
return report.render_html()
else:
return report.render_json()
|
def function[history_report, parameter[history, config, html]]:
constant[
Test a model and save a history report.
Parameters
----------
history : memote.HistoryManager
The manager grants access to previous results.
config : dict, optional
The final test report configuration.
html : bool, optional
Whether to render the report as full HTML or JSON (default True).
]
if compare[name[config] is constant[None]] begin[:]
variable[config] assign[=] call[name[ReportConfiguration].load, parameter[]]
variable[report] assign[=] call[name[HistoryReport], parameter[]]
if name[html] begin[:]
return[call[name[report].render_html, parameter[]]]
|
keyword[def] identifier[history_report] ( identifier[history] , identifier[config] = keyword[None] , identifier[html] = keyword[True] ):
literal[string]
keyword[if] identifier[config] keyword[is] keyword[None] :
identifier[config] = identifier[ReportConfiguration] . identifier[load] ()
identifier[report] = identifier[HistoryReport] ( identifier[history] = identifier[history] , identifier[configuration] = identifier[config] )
keyword[if] identifier[html] :
keyword[return] identifier[report] . identifier[render_html] ()
keyword[else] :
keyword[return] identifier[report] . identifier[render_json] ()
|
def history_report(history, config=None, html=True):
"""
Test a model and save a history report.
Parameters
----------
history : memote.HistoryManager
The manager grants access to previous results.
config : dict, optional
The final test report configuration.
html : bool, optional
Whether to render the report as full HTML or JSON (default True).
"""
if config is None:
config = ReportConfiguration.load() # depends on [control=['if'], data=['config']]
report = HistoryReport(history=history, configuration=config)
if html:
return report.render_html() # depends on [control=['if'], data=[]]
else:
return report.render_json()
|
def update_pass(user_id, newpass):
'''
Update the password of a user.
'''
out_dic = {'success': False, 'code': '00'}
entry = TabMember.update(user_pass=tools.md5(newpass)).where(TabMember.uid == user_id)
entry.execute()
out_dic['success'] = True
return out_dic
|
def function[update_pass, parameter[user_id, newpass]]:
constant[
Update the password of a user.
]
variable[out_dic] assign[=] dictionary[[<ast.Constant object at 0x7da1b04f59c0>, <ast.Constant object at 0x7da1b04f4610>], [<ast.Constant object at 0x7da1b04f61a0>, <ast.Constant object at 0x7da1b04f5c30>]]
variable[entry] assign[=] call[call[name[TabMember].update, parameter[]].where, parameter[compare[name[TabMember].uid equal[==] name[user_id]]]]
call[name[entry].execute, parameter[]]
call[name[out_dic]][constant[success]] assign[=] constant[True]
return[name[out_dic]]
|
keyword[def] identifier[update_pass] ( identifier[user_id] , identifier[newpass] ):
literal[string]
identifier[out_dic] ={ literal[string] : keyword[False] , literal[string] : literal[string] }
identifier[entry] = identifier[TabMember] . identifier[update] ( identifier[user_pass] = identifier[tools] . identifier[md5] ( identifier[newpass] )). identifier[where] ( identifier[TabMember] . identifier[uid] == identifier[user_id] )
identifier[entry] . identifier[execute] ()
identifier[out_dic] [ literal[string] ]= keyword[True]
keyword[return] identifier[out_dic]
|
def update_pass(user_id, newpass):
"""
Update the password of a user.
"""
out_dic = {'success': False, 'code': '00'}
entry = TabMember.update(user_pass=tools.md5(newpass)).where(TabMember.uid == user_id)
entry.execute()
out_dic['success'] = True
return out_dic
|
def distribution(self, **slice_kwargs):
"""
Calculates the number of papers in each slice, as defined by
``slice_kwargs``.
Examples
--------
.. code-block:: python
>>> corpus.distribution(step_size=1, window_size=1)
[5, 5]
Parameters
----------
slice_kwargs : kwargs
Keyword arguments to be passed to :meth:`.Corpus.slice`\.
Returns
-------
list
"""
values = []
keys = []
for key, size in self.slice(count_only=True, **slice_kwargs):
values.append(size)
keys.append(key)
return keys, values
|
def function[distribution, parameter[self]]:
constant[
Calculates the number of papers in each slice, as defined by
``slice_kwargs``.
Examples
--------
.. code-block:: python
>>> corpus.distribution(step_size=1, window_size=1)
[5, 5]
Parameters
----------
slice_kwargs : kwargs
Keyword arguments to be passed to :meth:`.Corpus.slice`\.
Returns
-------
list
]
variable[values] assign[=] list[[]]
variable[keys] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b11942b0>, <ast.Name object at 0x7da1b1195570>]]] in starred[call[name[self].slice, parameter[]]] begin[:]
call[name[values].append, parameter[name[size]]]
call[name[keys].append, parameter[name[key]]]
return[tuple[[<ast.Name object at 0x7da1b1194580>, <ast.Name object at 0x7da1b1194940>]]]
|
keyword[def] identifier[distribution] ( identifier[self] ,** identifier[slice_kwargs] ):
literal[string]
identifier[values] =[]
identifier[keys] =[]
keyword[for] identifier[key] , identifier[size] keyword[in] identifier[self] . identifier[slice] ( identifier[count_only] = keyword[True] ,** identifier[slice_kwargs] ):
identifier[values] . identifier[append] ( identifier[size] )
identifier[keys] . identifier[append] ( identifier[key] )
keyword[return] identifier[keys] , identifier[values]
|
def distribution(self, **slice_kwargs):
"""
Calculates the number of papers in each slice, as defined by
``slice_kwargs``.
Examples
--------
.. code-block:: python
>>> corpus.distribution(step_size=1, window_size=1)
[5, 5]
Parameters
----------
slice_kwargs : kwargs
Keyword arguments to be passed to :meth:`.Corpus.slice`\\.
Returns
-------
list
"""
values = []
keys = []
for (key, size) in self.slice(count_only=True, **slice_kwargs):
values.append(size)
keys.append(key) # depends on [control=['for'], data=[]]
return (keys, values)
|
def intersperse(iterable, element):
"""Generator yielding all elements of `iterable`, but with `element`
inserted between each two consecutive elements"""
iterable = iter(iterable)
yield next(iterable)
while True:
next_from_iterable = next(iterable)
yield element
yield next_from_iterable
|
def function[intersperse, parameter[iterable, element]]:
constant[Generator yielding all elements of `iterable`, but with `element`
inserted between each two consecutive elements]
variable[iterable] assign[=] call[name[iter], parameter[name[iterable]]]
<ast.Yield object at 0x7da204346530>
while constant[True] begin[:]
variable[next_from_iterable] assign[=] call[name[next], parameter[name[iterable]]]
<ast.Yield object at 0x7da2043461a0>
<ast.Yield object at 0x7da204346440>
|
keyword[def] identifier[intersperse] ( identifier[iterable] , identifier[element] ):
literal[string]
identifier[iterable] = identifier[iter] ( identifier[iterable] )
keyword[yield] identifier[next] ( identifier[iterable] )
keyword[while] keyword[True] :
identifier[next_from_iterable] = identifier[next] ( identifier[iterable] )
keyword[yield] identifier[element]
keyword[yield] identifier[next_from_iterable]
|
def intersperse(iterable, element):
"""Generator yielding all elements of `iterable`, but with `element`
inserted between each two consecutive elements"""
iterable = iter(iterable)
yield next(iterable)
while True:
next_from_iterable = next(iterable)
yield element
yield next_from_iterable # depends on [control=['while'], data=[]]
|
def pack(array, sub_field_array, mask, inplace=False):
""" Packs a sub field's array into another array using a mask
Parameters:
----------
array : numpy.ndarray
The array in which the sub field array will be packed into
array_in : numpy.ndarray
sub field array to pack
mask : mask (ie: 0b00001111)
Mask of the sub field
inplace : {bool}, optional
If true a new array is returned. (the default is False, which modifies the array in place)
Raises
------
OverflowError
If the values contained in the sub field array are greater than its mask's number of bits
allows
"""
lsb = least_significant_bit(mask)
max_value = int(mask >> lsb)
if sub_field_array.max() > max_value:
raise OverflowError(
"value ({}) is greater than allowed (max: {})".format(
sub_field_array.max(), max_value
)
)
if inplace:
array[:] = array & ~mask
array[:] = array | ((sub_field_array << lsb) & mask).astype(array.dtype)
else:
array = array & ~mask
return array | ((sub_field_array << lsb) & mask).astype(array.dtype)
|
def function[pack, parameter[array, sub_field_array, mask, inplace]]:
constant[ Packs a sub field's array into another array using a mask
Parameters:
----------
array : numpy.ndarray
The array in which the sub field array will be packed into
array_in : numpy.ndarray
sub field array to pack
mask : mask (ie: 0b00001111)
Mask of the sub field
inplace : {bool}, optional
If true a new array is returned. (the default is False, which modifies the array in place)
Raises
------
OverflowError
If the values contained in the sub field array are greater than its mask's number of bits
allows
]
variable[lsb] assign[=] call[name[least_significant_bit], parameter[name[mask]]]
variable[max_value] assign[=] call[name[int], parameter[binary_operation[name[mask] <ast.RShift object at 0x7da2590d6a40> name[lsb]]]]
if compare[call[name[sub_field_array].max, parameter[]] greater[>] name[max_value]] begin[:]
<ast.Raise object at 0x7da20c9911b0>
if name[inplace] begin[:]
call[name[array]][<ast.Slice object at 0x7da20c9911e0>] assign[=] binary_operation[name[array] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da20c990700>]
call[name[array]][<ast.Slice object at 0x7da18fe927d0>] assign[=] binary_operation[name[array] <ast.BitOr object at 0x7da2590d6aa0> call[binary_operation[binary_operation[name[sub_field_array] <ast.LShift object at 0x7da2590d69e0> name[lsb]] <ast.BitAnd object at 0x7da2590d6b60> name[mask]].astype, parameter[name[array].dtype]]]
|
keyword[def] identifier[pack] ( identifier[array] , identifier[sub_field_array] , identifier[mask] , identifier[inplace] = keyword[False] ):
literal[string]
identifier[lsb] = identifier[least_significant_bit] ( identifier[mask] )
identifier[max_value] = identifier[int] ( identifier[mask] >> identifier[lsb] )
keyword[if] identifier[sub_field_array] . identifier[max] ()> identifier[max_value] :
keyword[raise] identifier[OverflowError] (
literal[string] . identifier[format] (
identifier[sub_field_array] . identifier[max] (), identifier[max_value]
)
)
keyword[if] identifier[inplace] :
identifier[array] [:]= identifier[array] &~ identifier[mask]
identifier[array] [:]= identifier[array] |(( identifier[sub_field_array] << identifier[lsb] )& identifier[mask] ). identifier[astype] ( identifier[array] . identifier[dtype] )
keyword[else] :
identifier[array] = identifier[array] &~ identifier[mask]
keyword[return] identifier[array] |(( identifier[sub_field_array] << identifier[lsb] )& identifier[mask] ). identifier[astype] ( identifier[array] . identifier[dtype] )
|
def pack(array, sub_field_array, mask, inplace=False):
""" Packs a sub field's array into another array using a mask
Parameters:
----------
array : numpy.ndarray
The array in which the sub field array will be packed into
array_in : numpy.ndarray
sub field array to pack
mask : mask (ie: 0b00001111)
Mask of the sub field
inplace : {bool}, optional
If true a new array is returned. (the default is False, which modifies the array in place)
Raises
------
OverflowError
If the values contained in the sub field array are greater than its mask's number of bits
allows
"""
lsb = least_significant_bit(mask)
max_value = int(mask >> lsb)
if sub_field_array.max() > max_value:
raise OverflowError('value ({}) is greater than allowed (max: {})'.format(sub_field_array.max(), max_value)) # depends on [control=['if'], data=['max_value']]
if inplace:
array[:] = array & ~mask
array[:] = array | (sub_field_array << lsb & mask).astype(array.dtype) # depends on [control=['if'], data=[]]
else:
array = array & ~mask
return array | (sub_field_array << lsb & mask).astype(array.dtype)
|
def is_sortable_index(self, index_name, catalog):
"""Returns whether the index is sortable
"""
index = self.get_index(index_name, catalog)
if not index:
return False
return index.meta_type in ["FieldIndex", "DateIndex"]
|
def function[is_sortable_index, parameter[self, index_name, catalog]]:
constant[Returns whether the index is sortable
]
variable[index] assign[=] call[name[self].get_index, parameter[name[index_name], name[catalog]]]
if <ast.UnaryOp object at 0x7da1b2317340> begin[:]
return[constant[False]]
return[compare[name[index].meta_type in list[[<ast.Constant object at 0x7da1b2315210>, <ast.Constant object at 0x7da1b2314490>]]]]
|
keyword[def] identifier[is_sortable_index] ( identifier[self] , identifier[index_name] , identifier[catalog] ):
literal[string]
identifier[index] = identifier[self] . identifier[get_index] ( identifier[index_name] , identifier[catalog] )
keyword[if] keyword[not] identifier[index] :
keyword[return] keyword[False]
keyword[return] identifier[index] . identifier[meta_type] keyword[in] [ literal[string] , literal[string] ]
|
def is_sortable_index(self, index_name, catalog):
"""Returns whether the index is sortable
"""
index = self.get_index(index_name, catalog)
if not index:
return False # depends on [control=['if'], data=[]]
return index.meta_type in ['FieldIndex', 'DateIndex']
|
def get_index(self, name):
"""get an index by name
TODO: Combine indexes of relevant catalogs depending on the portal_type
which is searched for.
"""
catalog = self.get_catalog()
index = catalog._catalog.getIndex(name)
logger.debug("get_index={} of catalog '{}' --> {}".format(
name, catalog.__name__, index))
return index
|
def function[get_index, parameter[self, name]]:
constant[get an index by name
TODO: Combine indexes of relevant catalogs depending on the portal_type
which is searched for.
]
variable[catalog] assign[=] call[name[self].get_catalog, parameter[]]
variable[index] assign[=] call[name[catalog]._catalog.getIndex, parameter[name[name]]]
call[name[logger].debug, parameter[call[constant[get_index={} of catalog '{}' --> {}].format, parameter[name[name], name[catalog].__name__, name[index]]]]]
return[name[index]]
|
keyword[def] identifier[get_index] ( identifier[self] , identifier[name] ):
literal[string]
identifier[catalog] = identifier[self] . identifier[get_catalog] ()
identifier[index] = identifier[catalog] . identifier[_catalog] . identifier[getIndex] ( identifier[name] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[name] , identifier[catalog] . identifier[__name__] , identifier[index] ))
keyword[return] identifier[index]
|
def get_index(self, name):
"""get an index by name
TODO: Combine indexes of relevant catalogs depending on the portal_type
which is searched for.
"""
catalog = self.get_catalog()
index = catalog._catalog.getIndex(name)
logger.debug("get_index={} of catalog '{}' --> {}".format(name, catalog.__name__, index))
return index
|
def dataframe_to_smp(dataframe,smp_filename,name_col="name",
datetime_col="datetime",value_col="value",
datetime_format="dd/mm/yyyy",
value_format="{0:15.6E}",
max_name_len=12):
""" write a dataframe as an smp file
Parameters
----------
dataframe : pandas.DataFrame
smp_filename : str
smp file to write
name_col: str
the column in the dataframe the marks the site namne
datetime_col: str
the column in the dataframe that is a datetime instance
value_col: str
the column in the dataframe that is the values
datetime_format: str
either 'dd/mm/yyyy' or 'mm/dd/yyy'
value_format: str
a python float-compatible format
"""
formatters = {"name":lambda x:"{0:<20s}".format(str(x)[:max_name_len]),
"value":lambda x:value_format.format(x)}
if datetime_format.lower().startswith("d"):
dt_fmt = "%d/%m/%Y %H:%M:%S"
elif datetime_format.lower().startswith("m"):
dt_fmt = "%m/%d/%Y %H:%M:%S"
else:
raise Exception("unrecognized datetime_format: " +\
"{0}".format(str(datetime_format)))
for col in [name_col,datetime_col,value_col]:
assert col in dataframe.columns
dataframe.loc[:,"datetime_str"] = dataframe.loc[:,"datetime"].\
apply(lambda x:x.strftime(dt_fmt))
if isinstance(smp_filename,str):
smp_filename = open(smp_filename,'w')
# need this to remove the leading space that pandas puts in front
s = dataframe.loc[:,[name_col,"datetime_str",value_col]].\
to_string(col_space=0,
formatters=formatters,
justify=None,
header=False,
index=False)
for ss in s.split('\n'):
smp_filename.write("{0:<s}\n".format(ss.strip()))
dataframe.pop("datetime_str")
|
def function[dataframe_to_smp, parameter[dataframe, smp_filename, name_col, datetime_col, value_col, datetime_format, value_format, max_name_len]]:
constant[ write a dataframe as an smp file
Parameters
----------
dataframe : pandas.DataFrame
smp_filename : str
smp file to write
name_col: str
the column in the dataframe the marks the site namne
datetime_col: str
the column in the dataframe that is a datetime instance
value_col: str
the column in the dataframe that is the values
datetime_format: str
either 'dd/mm/yyyy' or 'mm/dd/yyy'
value_format: str
a python float-compatible format
]
variable[formatters] assign[=] dictionary[[<ast.Constant object at 0x7da20e74b340>, <ast.Constant object at 0x7da20e748820>], [<ast.Lambda object at 0x7da20e74a890>, <ast.Lambda object at 0x7da20e7484f0>]]
if call[call[name[datetime_format].lower, parameter[]].startswith, parameter[constant[d]]] begin[:]
variable[dt_fmt] assign[=] constant[%d/%m/%Y %H:%M:%S]
for taget[name[col]] in starred[list[[<ast.Name object at 0x7da18ede6bc0>, <ast.Name object at 0x7da18ede44f0>, <ast.Name object at 0x7da18ede4a00>]]] begin[:]
assert[compare[name[col] in name[dataframe].columns]]
call[name[dataframe].loc][tuple[[<ast.Slice object at 0x7da18ede4820>, <ast.Constant object at 0x7da18ede6e90>]]] assign[=] call[call[name[dataframe].loc][tuple[[<ast.Slice object at 0x7da18ede48e0>, <ast.Constant object at 0x7da18ede5420>]]].apply, parameter[<ast.Lambda object at 0x7da18ede4c10>]]
if call[name[isinstance], parameter[name[smp_filename], name[str]]] begin[:]
variable[smp_filename] assign[=] call[name[open], parameter[name[smp_filename], constant[w]]]
variable[s] assign[=] call[call[name[dataframe].loc][tuple[[<ast.Slice object at 0x7da18ede5540>, <ast.List object at 0x7da18ede4550>]]].to_string, parameter[]]
for taget[name[ss]] in starred[call[name[s].split, parameter[constant[
]]]] begin[:]
call[name[smp_filename].write, parameter[call[constant[{0:<s}
].format, parameter[call[name[ss].strip, parameter[]]]]]]
call[name[dataframe].pop, parameter[constant[datetime_str]]]
|
keyword[def] identifier[dataframe_to_smp] ( identifier[dataframe] , identifier[smp_filename] , identifier[name_col] = literal[string] ,
identifier[datetime_col] = literal[string] , identifier[value_col] = literal[string] ,
identifier[datetime_format] = literal[string] ,
identifier[value_format] = literal[string] ,
identifier[max_name_len] = literal[int] ):
literal[string]
identifier[formatters] ={ literal[string] : keyword[lambda] identifier[x] : literal[string] . identifier[format] ( identifier[str] ( identifier[x] )[: identifier[max_name_len] ]),
literal[string] : keyword[lambda] identifier[x] : identifier[value_format] . identifier[format] ( identifier[x] )}
keyword[if] identifier[datetime_format] . identifier[lower] (). identifier[startswith] ( literal[string] ):
identifier[dt_fmt] = literal[string]
keyword[elif] identifier[datetime_format] . identifier[lower] (). identifier[startswith] ( literal[string] ):
identifier[dt_fmt] = literal[string]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] + literal[string] . identifier[format] ( identifier[str] ( identifier[datetime_format] )))
keyword[for] identifier[col] keyword[in] [ identifier[name_col] , identifier[datetime_col] , identifier[value_col] ]:
keyword[assert] identifier[col] keyword[in] identifier[dataframe] . identifier[columns]
identifier[dataframe] . identifier[loc] [:, literal[string] ]= identifier[dataframe] . identifier[loc] [:, literal[string] ]. identifier[apply] ( keyword[lambda] identifier[x] : identifier[x] . identifier[strftime] ( identifier[dt_fmt] ))
keyword[if] identifier[isinstance] ( identifier[smp_filename] , identifier[str] ):
identifier[smp_filename] = identifier[open] ( identifier[smp_filename] , literal[string] )
identifier[s] = identifier[dataframe] . identifier[loc] [:,[ identifier[name_col] , literal[string] , identifier[value_col] ]]. identifier[to_string] ( identifier[col_space] = literal[int] ,
identifier[formatters] = identifier[formatters] ,
identifier[justify] = keyword[None] ,
identifier[header] = keyword[False] ,
identifier[index] = keyword[False] )
keyword[for] identifier[ss] keyword[in] identifier[s] . identifier[split] ( literal[string] ):
identifier[smp_filename] . identifier[write] ( literal[string] . identifier[format] ( identifier[ss] . identifier[strip] ()))
identifier[dataframe] . identifier[pop] ( literal[string] )
|
def dataframe_to_smp(dataframe, smp_filename, name_col='name', datetime_col='datetime', value_col='value', datetime_format='dd/mm/yyyy', value_format='{0:15.6E}', max_name_len=12):
""" write a dataframe as an smp file
Parameters
----------
dataframe : pandas.DataFrame
smp_filename : str
smp file to write
name_col: str
the column in the dataframe the marks the site namne
datetime_col: str
the column in the dataframe that is a datetime instance
value_col: str
the column in the dataframe that is the values
datetime_format: str
either 'dd/mm/yyyy' or 'mm/dd/yyy'
value_format: str
a python float-compatible format
"""
formatters = {'name': lambda x: '{0:<20s}'.format(str(x)[:max_name_len]), 'value': lambda x: value_format.format(x)}
if datetime_format.lower().startswith('d'):
dt_fmt = '%d/%m/%Y %H:%M:%S' # depends on [control=['if'], data=[]]
elif datetime_format.lower().startswith('m'):
dt_fmt = '%m/%d/%Y %H:%M:%S' # depends on [control=['if'], data=[]]
else:
raise Exception('unrecognized datetime_format: ' + '{0}'.format(str(datetime_format)))
for col in [name_col, datetime_col, value_col]:
assert col in dataframe.columns # depends on [control=['for'], data=['col']]
dataframe.loc[:, 'datetime_str'] = dataframe.loc[:, 'datetime'].apply(lambda x: x.strftime(dt_fmt))
if isinstance(smp_filename, str):
smp_filename = open(smp_filename, 'w')
# need this to remove the leading space that pandas puts in front
s = dataframe.loc[:, [name_col, 'datetime_str', value_col]].to_string(col_space=0, formatters=formatters, justify=None, header=False, index=False)
for ss in s.split('\n'):
smp_filename.write('{0:<s}\n'.format(ss.strip())) # depends on [control=['for'], data=['ss']] # depends on [control=['if'], data=[]]
dataframe.pop('datetime_str')
|
def merge_to(self, target):
"""Starts merging the contents of this medium and all intermediate
differencing media in the chain to the given target medium.
The target medium must be either a descendant of this medium or
its ancestor (otherwise this method will immediately return a failure).
It follows that there are two logical directions of the merge operation:
from ancestor to descendant (*forward merge*) and from descendant to
ancestor (*backward merge*). Let us consider the following medium
chain:
Base <- Diff_1 <- Diff_2
Here, calling this method on the Base medium object with
Diff_2 as an argument will be a forward merge; calling it on
Diff_2 with Base as an argument will be a backward
merge. Note that in both cases the contents of the resulting medium
will be the same, the only difference is the medium object that takes
the result of the merge operation. In case of the forward merge in the
above example, the result will be written to Diff_2; in case of
the backward merge, the result will be written to Base. In
other words, the result of the operation is always stored in the target
medium.
Upon successful operation completion, the storage units of all media in
the chain between this (source) medium and the target medium, including
the source medium itself, will be automatically deleted and the
relevant medium objects (including this medium) will become
uninitialized. This means that any attempt to call any of
their methods or attributes will fail with the
"Object not ready" (E_ACCESSDENIED) error. Applied to the above
example, the forward merge of Base to Diff_2 will
delete and uninitialize both Base and Diff_1 media.
Note that Diff_2 in this case will become a base medium
itself since it will no longer be based on any other medium.
Considering the above, all of the following conditions must be met in
order for the merge operation to succeed:
Neither this (source) medium nor any intermediate
differencing medium in the chain between it and the target
medium is attached to any virtual machine.
Neither the source medium nor the target medium is an
:py:attr:`MediumType.immutable` medium.
The part of the medium tree from the source medium to the
target medium is a linear chain, i.e. all medium in this
chain have exactly one child which is the next medium in this
chain. The only exception from this rule is the target medium in
the forward merge operation; it is allowed to have any number of
child media because the merge operation will not change its
logical contents (as it is seen by the guest OS or by children).
None of the involved media are in
:py:attr:`MediumState.locked_read` or
:py:attr:`MediumState.locked_write` state.
This (source) medium and all intermediates will be placed to :py:attr:`MediumState.deleting` state and the target medium will be
placed to :py:attr:`MediumState.locked_write` state and for the
duration of this operation.
in target of type :class:`IMedium`
Target medium.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
"""
if not isinstance(target, IMedium):
raise TypeError("target can only be an instance of type IMedium")
progress = self._call("mergeTo",
in_p=[target])
progress = IProgress(progress)
return progress
|
def function[merge_to, parameter[self, target]]:
constant[Starts merging the contents of this medium and all intermediate
differencing media in the chain to the given target medium.
The target medium must be either a descendant of this medium or
its ancestor (otherwise this method will immediately return a failure).
It follows that there are two logical directions of the merge operation:
from ancestor to descendant (*forward merge*) and from descendant to
ancestor (*backward merge*). Let us consider the following medium
chain:
Base <- Diff_1 <- Diff_2
Here, calling this method on the Base medium object with
Diff_2 as an argument will be a forward merge; calling it on
Diff_2 with Base as an argument will be a backward
merge. Note that in both cases the contents of the resulting medium
will be the same, the only difference is the medium object that takes
the result of the merge operation. In case of the forward merge in the
above example, the result will be written to Diff_2; in case of
the backward merge, the result will be written to Base. In
other words, the result of the operation is always stored in the target
medium.
Upon successful operation completion, the storage units of all media in
the chain between this (source) medium and the target medium, including
the source medium itself, will be automatically deleted and the
relevant medium objects (including this medium) will become
uninitialized. This means that any attempt to call any of
their methods or attributes will fail with the
"Object not ready" (E_ACCESSDENIED) error. Applied to the above
example, the forward merge of Base to Diff_2 will
delete and uninitialize both Base and Diff_1 media.
Note that Diff_2 in this case will become a base medium
itself since it will no longer be based on any other medium.
Considering the above, all of the following conditions must be met in
order for the merge operation to succeed:
Neither this (source) medium nor any intermediate
differencing medium in the chain between it and the target
medium is attached to any virtual machine.
Neither the source medium nor the target medium is an
:py:attr:`MediumType.immutable` medium.
The part of the medium tree from the source medium to the
target medium is a linear chain, i.e. all medium in this
chain have exactly one child which is the next medium in this
chain. The only exception from this rule is the target medium in
the forward merge operation; it is allowed to have any number of
child media because the merge operation will not change its
logical contents (as it is seen by the guest OS or by children).
None of the involved media are in
:py:attr:`MediumState.locked_read` or
:py:attr:`MediumState.locked_write` state.
This (source) medium and all intermediates will be placed to :py:attr:`MediumState.deleting` state and the target medium will be
placed to :py:attr:`MediumState.locked_write` state and for the
duration of this operation.
in target of type :class:`IMedium`
Target medium.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
]
if <ast.UnaryOp object at 0x7da18eb54520> begin[:]
<ast.Raise object at 0x7da20c6c73d0>
variable[progress] assign[=] call[name[self]._call, parameter[constant[mergeTo]]]
variable[progress] assign[=] call[name[IProgress], parameter[name[progress]]]
return[name[progress]]
|
keyword[def] identifier[merge_to] ( identifier[self] , identifier[target] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[target] , identifier[IMedium] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[progress] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[target] ])
identifier[progress] = identifier[IProgress] ( identifier[progress] )
keyword[return] identifier[progress]
|
def merge_to(self, target):
"""Starts merging the contents of this medium and all intermediate
differencing media in the chain to the given target medium.
The target medium must be either a descendant of this medium or
its ancestor (otherwise this method will immediately return a failure).
It follows that there are two logical directions of the merge operation:
from ancestor to descendant (*forward merge*) and from descendant to
ancestor (*backward merge*). Let us consider the following medium
chain:
Base <- Diff_1 <- Diff_2
Here, calling this method on the Base medium object with
Diff_2 as an argument will be a forward merge; calling it on
Diff_2 with Base as an argument will be a backward
merge. Note that in both cases the contents of the resulting medium
will be the same, the only difference is the medium object that takes
the result of the merge operation. In case of the forward merge in the
above example, the result will be written to Diff_2; in case of
the backward merge, the result will be written to Base. In
other words, the result of the operation is always stored in the target
medium.
Upon successful operation completion, the storage units of all media in
the chain between this (source) medium and the target medium, including
the source medium itself, will be automatically deleted and the
relevant medium objects (including this medium) will become
uninitialized. This means that any attempt to call any of
their methods or attributes will fail with the
"Object not ready" (E_ACCESSDENIED) error. Applied to the above
example, the forward merge of Base to Diff_2 will
delete and uninitialize both Base and Diff_1 media.
Note that Diff_2 in this case will become a base medium
itself since it will no longer be based on any other medium.
Considering the above, all of the following conditions must be met in
order for the merge operation to succeed:
Neither this (source) medium nor any intermediate
differencing medium in the chain between it and the target
medium is attached to any virtual machine.
Neither the source medium nor the target medium is an
:py:attr:`MediumType.immutable` medium.
The part of the medium tree from the source medium to the
target medium is a linear chain, i.e. all medium in this
chain have exactly one child which is the next medium in this
chain. The only exception from this rule is the target medium in
the forward merge operation; it is allowed to have any number of
child media because the merge operation will not change its
logical contents (as it is seen by the guest OS or by children).
None of the involved media are in
:py:attr:`MediumState.locked_read` or
:py:attr:`MediumState.locked_write` state.
This (source) medium and all intermediates will be placed to :py:attr:`MediumState.deleting` state and the target medium will be
placed to :py:attr:`MediumState.locked_write` state and for the
duration of this operation.
in target of type :class:`IMedium`
Target medium.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
"""
if not isinstance(target, IMedium):
raise TypeError('target can only be an instance of type IMedium') # depends on [control=['if'], data=[]]
progress = self._call('mergeTo', in_p=[target])
progress = IProgress(progress)
return progress
|
def send_data_to_server(self, data, time_out=5):
"""
Sends given data to the Server.
:param data: Data to send.
:type data: unicode
:param time_out: Connection timeout in seconds.
:type time_out: float
:return: Method success.
:rtype: bool
"""
if not data.endswith(self.__connection_end):
data = "{0}{1}".format(data, foundations.strings.to_string(self.__connection_end).decode("string_escape"))
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.settimeout(time_out)
connection.connect((foundations.strings.to_string(self.__address), int(self.__port)))
connection.send(data)
self.__engine.notifications_manager.notify(
"{0} | Socket connection command dispatched!".format(self.__class__.__name__))
connection.close()
return True
|
def function[send_data_to_server, parameter[self, data, time_out]]:
constant[
Sends given data to the Server.
:param data: Data to send.
:type data: unicode
:param time_out: Connection timeout in seconds.
:type time_out: float
:return: Method success.
:rtype: bool
]
if <ast.UnaryOp object at 0x7da1b0ab8280> begin[:]
variable[data] assign[=] call[constant[{0}{1}].format, parameter[name[data], call[call[name[foundations].strings.to_string, parameter[name[self].__connection_end]].decode, parameter[constant[string_escape]]]]]
variable[connection] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
call[name[connection].settimeout, parameter[name[time_out]]]
call[name[connection].connect, parameter[tuple[[<ast.Call object at 0x7da1b0ab8c40>, <ast.Call object at 0x7da1b0ab9540>]]]]
call[name[connection].send, parameter[name[data]]]
call[name[self].__engine.notifications_manager.notify, parameter[call[constant[{0} | Socket connection command dispatched!].format, parameter[name[self].__class__.__name__]]]]
call[name[connection].close, parameter[]]
return[constant[True]]
|
keyword[def] identifier[send_data_to_server] ( identifier[self] , identifier[data] , identifier[time_out] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[data] . identifier[endswith] ( identifier[self] . identifier[__connection_end] ):
identifier[data] = literal[string] . identifier[format] ( identifier[data] , identifier[foundations] . identifier[strings] . identifier[to_string] ( identifier[self] . identifier[__connection_end] ). identifier[decode] ( literal[string] ))
identifier[connection] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
identifier[connection] . identifier[settimeout] ( identifier[time_out] )
identifier[connection] . identifier[connect] (( identifier[foundations] . identifier[strings] . identifier[to_string] ( identifier[self] . identifier[__address] ), identifier[int] ( identifier[self] . identifier[__port] )))
identifier[connection] . identifier[send] ( identifier[data] )
identifier[self] . identifier[__engine] . identifier[notifications_manager] . identifier[notify] (
literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] ))
identifier[connection] . identifier[close] ()
keyword[return] keyword[True]
|
def send_data_to_server(self, data, time_out=5):
"""
Sends given data to the Server.
:param data: Data to send.
:type data: unicode
:param time_out: Connection timeout in seconds.
:type time_out: float
:return: Method success.
:rtype: bool
"""
if not data.endswith(self.__connection_end):
data = '{0}{1}'.format(data, foundations.strings.to_string(self.__connection_end).decode('string_escape')) # depends on [control=['if'], data=[]]
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.settimeout(time_out)
connection.connect((foundations.strings.to_string(self.__address), int(self.__port)))
connection.send(data)
self.__engine.notifications_manager.notify('{0} | Socket connection command dispatched!'.format(self.__class__.__name__))
connection.close()
return True
|
def in_constraints(self):
"""
returns a list of the expressions in which this parameter constrains another
"""
expressions = []
for uniqueid in self._in_constraints:
expressions.append(self._bundle.get_parameter(context='constraint', uniqueid=uniqueid))
return expressions
|
def function[in_constraints, parameter[self]]:
constant[
returns a list of the expressions in which this parameter constrains another
]
variable[expressions] assign[=] list[[]]
for taget[name[uniqueid]] in starred[name[self]._in_constraints] begin[:]
call[name[expressions].append, parameter[call[name[self]._bundle.get_parameter, parameter[]]]]
return[name[expressions]]
|
keyword[def] identifier[in_constraints] ( identifier[self] ):
literal[string]
identifier[expressions] =[]
keyword[for] identifier[uniqueid] keyword[in] identifier[self] . identifier[_in_constraints] :
identifier[expressions] . identifier[append] ( identifier[self] . identifier[_bundle] . identifier[get_parameter] ( identifier[context] = literal[string] , identifier[uniqueid] = identifier[uniqueid] ))
keyword[return] identifier[expressions]
|
def in_constraints(self):
"""
returns a list of the expressions in which this parameter constrains another
"""
expressions = []
for uniqueid in self._in_constraints:
expressions.append(self._bundle.get_parameter(context='constraint', uniqueid=uniqueid)) # depends on [control=['for'], data=['uniqueid']]
return expressions
|
def get(cls, payment_service_provider_credential_id, custom_headers=None):
"""
:type api_context: context.ApiContext
:type payment_service_provider_credential_id: int
:type custom_headers: dict[str, str]|None
:rtype: BunqResponsePaymentServiceProviderCredential
"""
if custom_headers is None:
custom_headers = {}
api_client = client.ApiClient(cls._get_api_context())
endpoint_url = cls._ENDPOINT_URL_READ.format(
payment_service_provider_credential_id)
response_raw = api_client.get(endpoint_url, {}, custom_headers)
return BunqResponsePaymentServiceProviderCredential.cast_from_bunq_response(
cls._from_json(response_raw, cls._OBJECT_TYPE_GET)
)
|
def function[get, parameter[cls, payment_service_provider_credential_id, custom_headers]]:
constant[
:type api_context: context.ApiContext
:type payment_service_provider_credential_id: int
:type custom_headers: dict[str, str]|None
:rtype: BunqResponsePaymentServiceProviderCredential
]
if compare[name[custom_headers] is constant[None]] begin[:]
variable[custom_headers] assign[=] dictionary[[], []]
variable[api_client] assign[=] call[name[client].ApiClient, parameter[call[name[cls]._get_api_context, parameter[]]]]
variable[endpoint_url] assign[=] call[name[cls]._ENDPOINT_URL_READ.format, parameter[name[payment_service_provider_credential_id]]]
variable[response_raw] assign[=] call[name[api_client].get, parameter[name[endpoint_url], dictionary[[], []], name[custom_headers]]]
return[call[name[BunqResponsePaymentServiceProviderCredential].cast_from_bunq_response, parameter[call[name[cls]._from_json, parameter[name[response_raw], name[cls]._OBJECT_TYPE_GET]]]]]
|
keyword[def] identifier[get] ( identifier[cls] , identifier[payment_service_provider_credential_id] , identifier[custom_headers] = keyword[None] ):
literal[string]
keyword[if] identifier[custom_headers] keyword[is] keyword[None] :
identifier[custom_headers] ={}
identifier[api_client] = identifier[client] . identifier[ApiClient] ( identifier[cls] . identifier[_get_api_context] ())
identifier[endpoint_url] = identifier[cls] . identifier[_ENDPOINT_URL_READ] . identifier[format] (
identifier[payment_service_provider_credential_id] )
identifier[response_raw] = identifier[api_client] . identifier[get] ( identifier[endpoint_url] ,{}, identifier[custom_headers] )
keyword[return] identifier[BunqResponsePaymentServiceProviderCredential] . identifier[cast_from_bunq_response] (
identifier[cls] . identifier[_from_json] ( identifier[response_raw] , identifier[cls] . identifier[_OBJECT_TYPE_GET] )
)
|
def get(cls, payment_service_provider_credential_id, custom_headers=None):
"""
:type api_context: context.ApiContext
:type payment_service_provider_credential_id: int
:type custom_headers: dict[str, str]|None
:rtype: BunqResponsePaymentServiceProviderCredential
"""
if custom_headers is None:
custom_headers = {} # depends on [control=['if'], data=['custom_headers']]
api_client = client.ApiClient(cls._get_api_context())
endpoint_url = cls._ENDPOINT_URL_READ.format(payment_service_provider_credential_id)
response_raw = api_client.get(endpoint_url, {}, custom_headers)
return BunqResponsePaymentServiceProviderCredential.cast_from_bunq_response(cls._from_json(response_raw, cls._OBJECT_TYPE_GET))
|
def key(self):
"""Embedded supports curies."""
if self.curie is None:
return self.name
return ":".join((self.curie.name, self.name))
|
def function[key, parameter[self]]:
constant[Embedded supports curies.]
if compare[name[self].curie is constant[None]] begin[:]
return[name[self].name]
return[call[constant[:].join, parameter[tuple[[<ast.Attribute object at 0x7da1b1a4b8b0>, <ast.Attribute object at 0x7da1b1a48040>]]]]]
|
keyword[def] identifier[key] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[curie] keyword[is] keyword[None] :
keyword[return] identifier[self] . identifier[name]
keyword[return] literal[string] . identifier[join] (( identifier[self] . identifier[curie] . identifier[name] , identifier[self] . identifier[name] ))
|
def key(self):
"""Embedded supports curies."""
if self.curie is None:
return self.name # depends on [control=['if'], data=[]]
return ':'.join((self.curie.name, self.name))
|
def is_logged_in(username=None):
"""Checks if user is logged in if `username`
is passed check if specified user is logged in
username can be a list"""
if username:
if not isinstance(username, (list, tuple)):
username = [username]
return 'simple_logged_in' in session and get_username() in username
return 'simple_logged_in' in session
|
def function[is_logged_in, parameter[username]]:
constant[Checks if user is logged in if `username`
is passed check if specified user is logged in
username can be a list]
if name[username] begin[:]
if <ast.UnaryOp object at 0x7da1b07bdc60> begin[:]
variable[username] assign[=] list[[<ast.Name object at 0x7da1b07bd300>]]
return[<ast.BoolOp object at 0x7da1b07bf070>]
return[compare[constant[simple_logged_in] in name[session]]]
|
keyword[def] identifier[is_logged_in] ( identifier[username] = keyword[None] ):
literal[string]
keyword[if] identifier[username] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[username] ,( identifier[list] , identifier[tuple] )):
identifier[username] =[ identifier[username] ]
keyword[return] literal[string] keyword[in] identifier[session] keyword[and] identifier[get_username] () keyword[in] identifier[username]
keyword[return] literal[string] keyword[in] identifier[session]
|
def is_logged_in(username=None):
"""Checks if user is logged in if `username`
is passed check if specified user is logged in
username can be a list"""
if username:
if not isinstance(username, (list, tuple)):
username = [username] # depends on [control=['if'], data=[]]
return 'simple_logged_in' in session and get_username() in username # depends on [control=['if'], data=[]]
return 'simple_logged_in' in session
|
def transpose(self):
"""Create a transpose of this matrix."""
ma4 = Matrix4(self.get_col(0),
self.get_col(1),
self.get_col(2),
self.get_col(3))
return ma4
|
def function[transpose, parameter[self]]:
constant[Create a transpose of this matrix.]
variable[ma4] assign[=] call[name[Matrix4], parameter[call[name[self].get_col, parameter[constant[0]]], call[name[self].get_col, parameter[constant[1]]], call[name[self].get_col, parameter[constant[2]]], call[name[self].get_col, parameter[constant[3]]]]]
return[name[ma4]]
|
keyword[def] identifier[transpose] ( identifier[self] ):
literal[string]
identifier[ma4] = identifier[Matrix4] ( identifier[self] . identifier[get_col] ( literal[int] ),
identifier[self] . identifier[get_col] ( literal[int] ),
identifier[self] . identifier[get_col] ( literal[int] ),
identifier[self] . identifier[get_col] ( literal[int] ))
keyword[return] identifier[ma4]
|
def transpose(self):
"""Create a transpose of this matrix."""
ma4 = Matrix4(self.get_col(0), self.get_col(1), self.get_col(2), self.get_col(3))
return ma4
|
def set_coordinates(self, x, y, z=None):
"""Set all coordinate dimensions at once."""
self.x = x
self.y = y
self.z = z
|
def function[set_coordinates, parameter[self, x, y, z]]:
constant[Set all coordinate dimensions at once.]
name[self].x assign[=] name[x]
name[self].y assign[=] name[y]
name[self].z assign[=] name[z]
|
keyword[def] identifier[set_coordinates] ( identifier[self] , identifier[x] , identifier[y] , identifier[z] = keyword[None] ):
literal[string]
identifier[self] . identifier[x] = identifier[x]
identifier[self] . identifier[y] = identifier[y]
identifier[self] . identifier[z] = identifier[z]
|
def set_coordinates(self, x, y, z=None):
"""Set all coordinate dimensions at once."""
self.x = x
self.y = y
self.z = z
|
def _parse_conf(conf_file=_DEFAULT_CONF):
'''
Parse a logrotate configuration file.
Includes will also be parsed, and their configuration will be stored in the
return dict, as if they were part of the main config file. A dict of which
configs came from which includes will be stored in the 'include files' dict
inside the return dict, for later reference by the user or module.
'''
ret = {}
mode = 'single'
multi_names = []
multi = {}
prev_comps = None
with salt.utils.files.fopen(conf_file, 'r') as ifile:
for line in ifile:
line = salt.utils.stringutils.to_unicode(line).strip()
if not line:
continue
if line.startswith('#'):
continue
comps = line.split()
if '{' in line and '}' not in line:
mode = 'multi'
if len(comps) == 1 and prev_comps:
multi_names = prev_comps
else:
multi_names = comps
multi_names.pop()
continue
if '}' in line:
mode = 'single'
for multi_name in multi_names:
ret[multi_name] = multi
multi_names = []
multi = {}
continue
if mode == 'single':
key = ret
else:
key = multi
if comps[0] == 'include':
if 'include files' not in ret:
ret['include files'] = {}
for include in os.listdir(comps[1]):
if include not in ret['include files']:
ret['include files'][include] = []
include_path = os.path.join(comps[1], include)
include_conf = _parse_conf(include_path)
for file_key in include_conf:
ret[file_key] = include_conf[file_key]
ret['include files'][include].append(file_key)
prev_comps = comps
if len(comps) > 2:
key[comps[0]] = ' '.join(comps[1:])
elif len(comps) > 1:
key[comps[0]] = _convert_if_int(comps[1])
else:
key[comps[0]] = True
return ret
|
def function[_parse_conf, parameter[conf_file]]:
constant[
Parse a logrotate configuration file.
Includes will also be parsed, and their configuration will be stored in the
return dict, as if they were part of the main config file. A dict of which
configs came from which includes will be stored in the 'include files' dict
inside the return dict, for later reference by the user or module.
]
variable[ret] assign[=] dictionary[[], []]
variable[mode] assign[=] constant[single]
variable[multi_names] assign[=] list[[]]
variable[multi] assign[=] dictionary[[], []]
variable[prev_comps] assign[=] constant[None]
with call[name[salt].utils.files.fopen, parameter[name[conf_file], constant[r]]] begin[:]
for taget[name[line]] in starred[name[ifile]] begin[:]
variable[line] assign[=] call[call[name[salt].utils.stringutils.to_unicode, parameter[name[line]]].strip, parameter[]]
if <ast.UnaryOp object at 0x7da18c4ccf10> begin[:]
continue
if call[name[line].startswith, parameter[constant[#]]] begin[:]
continue
variable[comps] assign[=] call[name[line].split, parameter[]]
if <ast.BoolOp object at 0x7da18c4cf7f0> begin[:]
variable[mode] assign[=] constant[multi]
if <ast.BoolOp object at 0x7da18c4cd030> begin[:]
variable[multi_names] assign[=] name[prev_comps]
continue
if compare[constant[}] in name[line]] begin[:]
variable[mode] assign[=] constant[single]
for taget[name[multi_name]] in starred[name[multi_names]] begin[:]
call[name[ret]][name[multi_name]] assign[=] name[multi]
variable[multi_names] assign[=] list[[]]
variable[multi] assign[=] dictionary[[], []]
continue
if compare[name[mode] equal[==] constant[single]] begin[:]
variable[key] assign[=] name[ret]
if compare[call[name[comps]][constant[0]] equal[==] constant[include]] begin[:]
if compare[constant[include files] <ast.NotIn object at 0x7da2590d7190> name[ret]] begin[:]
call[name[ret]][constant[include files]] assign[=] dictionary[[], []]
for taget[name[include]] in starred[call[name[os].listdir, parameter[call[name[comps]][constant[1]]]]] begin[:]
if compare[name[include] <ast.NotIn object at 0x7da2590d7190> call[name[ret]][constant[include files]]] begin[:]
call[call[name[ret]][constant[include files]]][name[include]] assign[=] list[[]]
variable[include_path] assign[=] call[name[os].path.join, parameter[call[name[comps]][constant[1]], name[include]]]
variable[include_conf] assign[=] call[name[_parse_conf], parameter[name[include_path]]]
for taget[name[file_key]] in starred[name[include_conf]] begin[:]
call[name[ret]][name[file_key]] assign[=] call[name[include_conf]][name[file_key]]
call[call[call[name[ret]][constant[include files]]][name[include]].append, parameter[name[file_key]]]
variable[prev_comps] assign[=] name[comps]
if compare[call[name[len], parameter[name[comps]]] greater[>] constant[2]] begin[:]
call[name[key]][call[name[comps]][constant[0]]] assign[=] call[constant[ ].join, parameter[call[name[comps]][<ast.Slice object at 0x7da18f7203d0>]]]
return[name[ret]]
|
keyword[def] identifier[_parse_conf] ( identifier[conf_file] = identifier[_DEFAULT_CONF] ):
literal[string]
identifier[ret] ={}
identifier[mode] = literal[string]
identifier[multi_names] =[]
identifier[multi] ={}
identifier[prev_comps] = keyword[None]
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[conf_file] , literal[string] ) keyword[as] identifier[ifile] :
keyword[for] identifier[line] keyword[in] identifier[ifile] :
identifier[line] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[line] ). identifier[strip] ()
keyword[if] keyword[not] identifier[line] :
keyword[continue]
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[comps] = identifier[line] . identifier[split] ()
keyword[if] literal[string] keyword[in] identifier[line] keyword[and] literal[string] keyword[not] keyword[in] identifier[line] :
identifier[mode] = literal[string]
keyword[if] identifier[len] ( identifier[comps] )== literal[int] keyword[and] identifier[prev_comps] :
identifier[multi_names] = identifier[prev_comps]
keyword[else] :
identifier[multi_names] = identifier[comps]
identifier[multi_names] . identifier[pop] ()
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[mode] = literal[string]
keyword[for] identifier[multi_name] keyword[in] identifier[multi_names] :
identifier[ret] [ identifier[multi_name] ]= identifier[multi]
identifier[multi_names] =[]
identifier[multi] ={}
keyword[continue]
keyword[if] identifier[mode] == literal[string] :
identifier[key] = identifier[ret]
keyword[else] :
identifier[key] = identifier[multi]
keyword[if] identifier[comps] [ literal[int] ]== literal[string] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[ret] :
identifier[ret] [ literal[string] ]={}
keyword[for] identifier[include] keyword[in] identifier[os] . identifier[listdir] ( identifier[comps] [ literal[int] ]):
keyword[if] identifier[include] keyword[not] keyword[in] identifier[ret] [ literal[string] ]:
identifier[ret] [ literal[string] ][ identifier[include] ]=[]
identifier[include_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[comps] [ literal[int] ], identifier[include] )
identifier[include_conf] = identifier[_parse_conf] ( identifier[include_path] )
keyword[for] identifier[file_key] keyword[in] identifier[include_conf] :
identifier[ret] [ identifier[file_key] ]= identifier[include_conf] [ identifier[file_key] ]
identifier[ret] [ literal[string] ][ identifier[include] ]. identifier[append] ( identifier[file_key] )
identifier[prev_comps] = identifier[comps]
keyword[if] identifier[len] ( identifier[comps] )> literal[int] :
identifier[key] [ identifier[comps] [ literal[int] ]]= literal[string] . identifier[join] ( identifier[comps] [ literal[int] :])
keyword[elif] identifier[len] ( identifier[comps] )> literal[int] :
identifier[key] [ identifier[comps] [ literal[int] ]]= identifier[_convert_if_int] ( identifier[comps] [ literal[int] ])
keyword[else] :
identifier[key] [ identifier[comps] [ literal[int] ]]= keyword[True]
keyword[return] identifier[ret]
|
def _parse_conf(conf_file=_DEFAULT_CONF):
"""
Parse a logrotate configuration file.
Includes will also be parsed, and their configuration will be stored in the
return dict, as if they were part of the main config file. A dict of which
configs came from which includes will be stored in the 'include files' dict
inside the return dict, for later reference by the user or module.
"""
ret = {}
mode = 'single'
multi_names = []
multi = {}
prev_comps = None
with salt.utils.files.fopen(conf_file, 'r') as ifile:
for line in ifile:
line = salt.utils.stringutils.to_unicode(line).strip()
if not line:
continue # depends on [control=['if'], data=[]]
if line.startswith('#'):
continue # depends on [control=['if'], data=[]]
comps = line.split()
if '{' in line and '}' not in line:
mode = 'multi'
if len(comps) == 1 and prev_comps:
multi_names = prev_comps # depends on [control=['if'], data=[]]
else:
multi_names = comps
multi_names.pop()
continue # depends on [control=['if'], data=[]]
if '}' in line:
mode = 'single'
for multi_name in multi_names:
ret[multi_name] = multi # depends on [control=['for'], data=['multi_name']]
multi_names = []
multi = {}
continue # depends on [control=['if'], data=[]]
if mode == 'single':
key = ret # depends on [control=['if'], data=[]]
else:
key = multi
if comps[0] == 'include':
if 'include files' not in ret:
ret['include files'] = {} # depends on [control=['if'], data=['ret']]
for include in os.listdir(comps[1]):
if include not in ret['include files']:
ret['include files'][include] = [] # depends on [control=['if'], data=['include']]
include_path = os.path.join(comps[1], include)
include_conf = _parse_conf(include_path)
for file_key in include_conf:
ret[file_key] = include_conf[file_key]
ret['include files'][include].append(file_key) # depends on [control=['for'], data=['file_key']] # depends on [control=['for'], data=['include']] # depends on [control=['if'], data=[]]
prev_comps = comps
if len(comps) > 2:
key[comps[0]] = ' '.join(comps[1:]) # depends on [control=['if'], data=[]]
elif len(comps) > 1:
key[comps[0]] = _convert_if_int(comps[1]) # depends on [control=['if'], data=[]]
else:
key[comps[0]] = True # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['ifile']]
return ret
|
def delete_user(self, user_id, **kwargs): # noqa: E501
"""Delete a user. # noqa: E501
An endpoint for deleting a user. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/users/{user-id} -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.delete_user(user_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str user_id: The ID of the user to be deleted. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.delete_user_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.delete_user_with_http_info(user_id, **kwargs) # noqa: E501
return data
|
def function[delete_user, parameter[self, user_id]]:
constant[Delete a user. # noqa: E501
An endpoint for deleting a user. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/users/{user-id} -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.delete_user(user_id, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param str user_id: The ID of the user to be deleted. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:]
return[call[name[self].delete_user_with_http_info, parameter[name[user_id]]]]
|
keyword[def] identifier[delete_user] ( identifier[self] , identifier[user_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[delete_user_with_http_info] ( identifier[user_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[delete_user_with_http_info] ( identifier[user_id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def delete_user(self, user_id, **kwargs): # noqa: E501
"Delete a user. # noqa: E501\n\n An endpoint for deleting a user. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/users/{user-id} -H 'Authorization: Bearer API_KEY'` # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.delete_user(user_id, asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str user_id: The ID of the user to be deleted. (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.delete_user_with_http_info(user_id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.delete_user_with_http_info(user_id, **kwargs) # noqa: E501
return data
|
def is_linear(self):
"""
Tests whether all filters in the list are linear. CascadeFilter and
ParallelFilter instances are also linear if all filters they group are
linear.
"""
return all(isinstance(filt, LinearFilter) or
(hasattr(filt, "is_linear") and filt.is_linear())
for filt in self.callables)
|
def function[is_linear, parameter[self]]:
constant[
Tests whether all filters in the list are linear. CascadeFilter and
ParallelFilter instances are also linear if all filters they group are
linear.
]
return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b0617ee0>]]]
|
keyword[def] identifier[is_linear] ( identifier[self] ):
literal[string]
keyword[return] identifier[all] ( identifier[isinstance] ( identifier[filt] , identifier[LinearFilter] ) keyword[or]
( identifier[hasattr] ( identifier[filt] , literal[string] ) keyword[and] identifier[filt] . identifier[is_linear] ())
keyword[for] identifier[filt] keyword[in] identifier[self] . identifier[callables] )
|
def is_linear(self):
"""
Tests whether all filters in the list are linear. CascadeFilter and
ParallelFilter instances are also linear if all filters they group are
linear.
"""
return all((isinstance(filt, LinearFilter) or (hasattr(filt, 'is_linear') and filt.is_linear()) for filt in self.callables))
|
def replace_variables(sentence: List[str],
sentence_variables: Dict[str, str]) -> Tuple[List[str], List[str]]:
"""
Replaces abstract variables in text with their concrete counterparts.
"""
tokens = []
tags = []
for token in sentence:
if token not in sentence_variables:
tokens.append(token)
tags.append("O")
else:
for word in sentence_variables[token].split():
tokens.append(word)
tags.append(token)
return tokens, tags
|
def function[replace_variables, parameter[sentence, sentence_variables]]:
constant[
Replaces abstract variables in text with their concrete counterparts.
]
variable[tokens] assign[=] list[[]]
variable[tags] assign[=] list[[]]
for taget[name[token]] in starred[name[sentence]] begin[:]
if compare[name[token] <ast.NotIn object at 0x7da2590d7190> name[sentence_variables]] begin[:]
call[name[tokens].append, parameter[name[token]]]
call[name[tags].append, parameter[constant[O]]]
return[tuple[[<ast.Name object at 0x7da18f810b20>, <ast.Name object at 0x7da18f812c80>]]]
|
keyword[def] identifier[replace_variables] ( identifier[sentence] : identifier[List] [ identifier[str] ],
identifier[sentence_variables] : identifier[Dict] [ identifier[str] , identifier[str] ])-> identifier[Tuple] [ identifier[List] [ identifier[str] ], identifier[List] [ identifier[str] ]]:
literal[string]
identifier[tokens] =[]
identifier[tags] =[]
keyword[for] identifier[token] keyword[in] identifier[sentence] :
keyword[if] identifier[token] keyword[not] keyword[in] identifier[sentence_variables] :
identifier[tokens] . identifier[append] ( identifier[token] )
identifier[tags] . identifier[append] ( literal[string] )
keyword[else] :
keyword[for] identifier[word] keyword[in] identifier[sentence_variables] [ identifier[token] ]. identifier[split] ():
identifier[tokens] . identifier[append] ( identifier[word] )
identifier[tags] . identifier[append] ( identifier[token] )
keyword[return] identifier[tokens] , identifier[tags]
|
def replace_variables(sentence: List[str], sentence_variables: Dict[str, str]) -> Tuple[List[str], List[str]]:
"""
Replaces abstract variables in text with their concrete counterparts.
"""
tokens = []
tags = []
for token in sentence:
if token not in sentence_variables:
tokens.append(token)
tags.append('O') # depends on [control=['if'], data=['token']]
else:
for word in sentence_variables[token].split():
tokens.append(word)
tags.append(token) # depends on [control=['for'], data=['word']] # depends on [control=['for'], data=['token']]
return (tokens, tags)
|
def generate_signed_url_v4(
credentials,
resource,
expiration,
api_access_endpoint=DEFAULT_ENDPOINT,
method="GET",
content_md5=None,
content_type=None,
response_type=None,
response_disposition=None,
generation=None,
headers=None,
query_parameters=None,
_request_timestamp=None, # for testing only
):
"""Generate a V4 signed URL to provide query-string auth'n to a resource.
.. note::
Assumes ``credentials`` implements the
:class:`google.auth.credentials.Signing` interface. Also assumes
``credentials`` has a ``service_account_email`` property which
identifies the credentials.
.. note::
If you are on Google Compute Engine, you can't generate a signed URL.
Follow `Issue 922`_ for updates on this. If you'd like to be able to
generate a signed URL from GCE, you can use a standard service account
from a JSON file rather than a GCE service account.
See headers `reference`_ for more details on optional arguments.
.. _Issue 922: https://github.com/GoogleCloudPlatform/\
google-cloud-python/issues/922
.. _reference: https://cloud.google.com/storage/docs/reference-headers
:type credentials: :class:`google.auth.credentials.Signing`
:param credentials: Credentials object with an associated private key to
sign text.
:type resource: str
:param resource: A pointer to a specific resource
(typically, ``/bucket-name/path/to/blob.txt``).
:type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
:param expiration: Point in time when the signed URL should expire.
:type api_access_endpoint: str
:param api_access_endpoint: Optional URI base. Defaults to
"https://storage.googleapis.com/"
:type method: str
:param method: The HTTP verb that will be used when requesting the URL.
Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
signature will additionally contain the `x-goog-resumable`
header, and the method changed to POST. See the signed URL
docs regarding this flow:
https://cloud.google.com/storage/docs/access-control/signed-urls
:type content_md5: str
:param content_md5: (Optional) The MD5 hash of the object referenced by
``resource``.
:type content_type: str
:param content_type: (Optional) The content type of the object referenced
by ``resource``.
:type response_type: str
:param response_type: (Optional) Content type of responses to requests for
the signed URL. Used to over-ride the content type of
the underlying resource.
:type response_disposition: str
:param response_disposition: (Optional) Content disposition of responses to
requests for the signed URL.
:type generation: str
:param generation: (Optional) A value that indicates which generation of
the resource to fetch.
:type headers: dict
:param headers:
(Optional) Additional HTTP headers to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
:type query_parameters: dict
:param query_parameters:
(Optional) Additional query paramtersto be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers#query
:raises: :exc:`TypeError` when expiration is not a valid type.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
:rtype: str
:returns: A signed URL you can use to access the resource
until expiration.
"""
ensure_signed_credentials(credentials)
expiration_seconds = get_expiration_seconds_v4(expiration)
if _request_timestamp is None:
now = NOW()
request_timestamp = now.strftime("%Y%m%dT%H%M%SZ")
datestamp = now.date().strftime("%Y%m%d")
else:
request_timestamp = _request_timestamp
datestamp = _request_timestamp[:8]
client_email = credentials.signer_email
credential_scope = "{}/auto/storage/goog4_request".format(datestamp)
credential = "{}/{}".format(client_email, credential_scope)
if headers is None:
headers = {}
if content_type is not None:
headers["Content-Type"] = content_type
if content_md5 is not None:
headers["Content-MD5"] = content_md5
header_names = [key.lower() for key in headers]
if "host" not in header_names:
headers["Host"] = "storage.googleapis.com"
if method.upper() == "RESUMABLE":
method = "POST"
headers["x-goog-resumable"] = "start"
canonical_headers, ordered_headers = get_canonical_headers(headers)
canonical_header_string = (
"\n".join(canonical_headers) + "\n"
) # Yes, Virginia, the extra newline is part of the spec.
signed_headers = ";".join([key for key, _ in ordered_headers])
if query_parameters is None:
query_parameters = {}
else:
query_parameters = {key: value or "" for key, value in query_parameters.items()}
query_parameters["X-Goog-Algorithm"] = "GOOG4-RSA-SHA256"
query_parameters["X-Goog-Credential"] = credential
query_parameters["X-Goog-Date"] = request_timestamp
query_parameters["X-Goog-Expires"] = expiration_seconds
query_parameters["X-Goog-SignedHeaders"] = signed_headers
if response_type is not None:
query_parameters["response-content-type"] = response_type
if response_disposition is not None:
query_parameters["response-content-disposition"] = response_disposition
if generation is not None:
query_parameters["generation"] = generation
ordered_query_parameters = sorted(query_parameters.items())
canonical_query_string = six.moves.urllib.parse.urlencode(ordered_query_parameters)
canonical_elements = [
method,
resource,
canonical_query_string,
canonical_header_string,
signed_headers,
"UNSIGNED-PAYLOAD",
]
canonical_request = "\n".join(canonical_elements)
canonical_request_hash = hashlib.sha256(
canonical_request.encode("ascii")
).hexdigest()
string_elements = [
"GOOG4-RSA-SHA256",
request_timestamp,
credential_scope,
canonical_request_hash,
]
string_to_sign = "\n".join(string_elements)
signature_bytes = credentials.sign_bytes(string_to_sign.encode("ascii"))
signature = binascii.hexlify(signature_bytes).decode("ascii")
return "{}{}?{}&X-Goog-Signature={}".format(
api_access_endpoint, resource, canonical_query_string, signature
)
|
def function[generate_signed_url_v4, parameter[credentials, resource, expiration, api_access_endpoint, method, content_md5, content_type, response_type, response_disposition, generation, headers, query_parameters, _request_timestamp]]:
constant[Generate a V4 signed URL to provide query-string auth'n to a resource.
.. note::
Assumes ``credentials`` implements the
:class:`google.auth.credentials.Signing` interface. Also assumes
``credentials`` has a ``service_account_email`` property which
identifies the credentials.
.. note::
If you are on Google Compute Engine, you can't generate a signed URL.
Follow `Issue 922`_ for updates on this. If you'd like to be able to
generate a signed URL from GCE, you can use a standard service account
from a JSON file rather than a GCE service account.
See headers `reference`_ for more details on optional arguments.
.. _Issue 922: https://github.com/GoogleCloudPlatform/ google-cloud-python/issues/922
.. _reference: https://cloud.google.com/storage/docs/reference-headers
:type credentials: :class:`google.auth.credentials.Signing`
:param credentials: Credentials object with an associated private key to
sign text.
:type resource: str
:param resource: A pointer to a specific resource
(typically, ``/bucket-name/path/to/blob.txt``).
:type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
:param expiration: Point in time when the signed URL should expire.
:type api_access_endpoint: str
:param api_access_endpoint: Optional URI base. Defaults to
"https://storage.googleapis.com/"
:type method: str
:param method: The HTTP verb that will be used when requesting the URL.
Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
signature will additionally contain the `x-goog-resumable`
header, and the method changed to POST. See the signed URL
docs regarding this flow:
https://cloud.google.com/storage/docs/access-control/signed-urls
:type content_md5: str
:param content_md5: (Optional) The MD5 hash of the object referenced by
``resource``.
:type content_type: str
:param content_type: (Optional) The content type of the object referenced
by ``resource``.
:type response_type: str
:param response_type: (Optional) Content type of responses to requests for
the signed URL. Used to over-ride the content type of
the underlying resource.
:type response_disposition: str
:param response_disposition: (Optional) Content disposition of responses to
requests for the signed URL.
:type generation: str
:param generation: (Optional) A value that indicates which generation of
the resource to fetch.
:type headers: dict
:param headers:
(Optional) Additional HTTP headers to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
:type query_parameters: dict
:param query_parameters:
(Optional) Additional query paramtersto be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers#query
:raises: :exc:`TypeError` when expiration is not a valid type.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
:rtype: str
:returns: A signed URL you can use to access the resource
until expiration.
]
call[name[ensure_signed_credentials], parameter[name[credentials]]]
variable[expiration_seconds] assign[=] call[name[get_expiration_seconds_v4], parameter[name[expiration]]]
if compare[name[_request_timestamp] is constant[None]] begin[:]
variable[now] assign[=] call[name[NOW], parameter[]]
variable[request_timestamp] assign[=] call[name[now].strftime, parameter[constant[%Y%m%dT%H%M%SZ]]]
variable[datestamp] assign[=] call[call[name[now].date, parameter[]].strftime, parameter[constant[%Y%m%d]]]
variable[client_email] assign[=] name[credentials].signer_email
variable[credential_scope] assign[=] call[constant[{}/auto/storage/goog4_request].format, parameter[name[datestamp]]]
variable[credential] assign[=] call[constant[{}/{}].format, parameter[name[client_email], name[credential_scope]]]
if compare[name[headers] is constant[None]] begin[:]
variable[headers] assign[=] dictionary[[], []]
if compare[name[content_type] is_not constant[None]] begin[:]
call[name[headers]][constant[Content-Type]] assign[=] name[content_type]
if compare[name[content_md5] is_not constant[None]] begin[:]
call[name[headers]][constant[Content-MD5]] assign[=] name[content_md5]
variable[header_names] assign[=] <ast.ListComp object at 0x7da18f811150>
if compare[constant[host] <ast.NotIn object at 0x7da2590d7190> name[header_names]] begin[:]
call[name[headers]][constant[Host]] assign[=] constant[storage.googleapis.com]
if compare[call[name[method].upper, parameter[]] equal[==] constant[RESUMABLE]] begin[:]
variable[method] assign[=] constant[POST]
call[name[headers]][constant[x-goog-resumable]] assign[=] constant[start]
<ast.Tuple object at 0x7da18f8137f0> assign[=] call[name[get_canonical_headers], parameter[name[headers]]]
variable[canonical_header_string] assign[=] binary_operation[call[constant[
].join, parameter[name[canonical_headers]]] + constant[
]]
variable[signed_headers] assign[=] call[constant[;].join, parameter[<ast.ListComp object at 0x7da18f811f30>]]
if compare[name[query_parameters] is constant[None]] begin[:]
variable[query_parameters] assign[=] dictionary[[], []]
call[name[query_parameters]][constant[X-Goog-Algorithm]] assign[=] constant[GOOG4-RSA-SHA256]
call[name[query_parameters]][constant[X-Goog-Credential]] assign[=] name[credential]
call[name[query_parameters]][constant[X-Goog-Date]] assign[=] name[request_timestamp]
call[name[query_parameters]][constant[X-Goog-Expires]] assign[=] name[expiration_seconds]
call[name[query_parameters]][constant[X-Goog-SignedHeaders]] assign[=] name[signed_headers]
if compare[name[response_type] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[response-content-type]] assign[=] name[response_type]
if compare[name[response_disposition] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[response-content-disposition]] assign[=] name[response_disposition]
if compare[name[generation] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[generation]] assign[=] name[generation]
variable[ordered_query_parameters] assign[=] call[name[sorted], parameter[call[name[query_parameters].items, parameter[]]]]
variable[canonical_query_string] assign[=] call[name[six].moves.urllib.parse.urlencode, parameter[name[ordered_query_parameters]]]
variable[canonical_elements] assign[=] list[[<ast.Name object at 0x7da18f812f20>, <ast.Name object at 0x7da18f813a00>, <ast.Name object at 0x7da18f811060>, <ast.Name object at 0x7da18f813640>, <ast.Name object at 0x7da18f812b90>, <ast.Constant object at 0x7da18f810f10>]]
variable[canonical_request] assign[=] call[constant[
].join, parameter[name[canonical_elements]]]
variable[canonical_request_hash] assign[=] call[call[name[hashlib].sha256, parameter[call[name[canonical_request].encode, parameter[constant[ascii]]]]].hexdigest, parameter[]]
variable[string_elements] assign[=] list[[<ast.Constant object at 0x7da204344670>, <ast.Name object at 0x7da204345720>, <ast.Name object at 0x7da2043476d0>, <ast.Name object at 0x7da204345090>]]
variable[string_to_sign] assign[=] call[constant[
].join, parameter[name[string_elements]]]
variable[signature_bytes] assign[=] call[name[credentials].sign_bytes, parameter[call[name[string_to_sign].encode, parameter[constant[ascii]]]]]
variable[signature] assign[=] call[call[name[binascii].hexlify, parameter[name[signature_bytes]]].decode, parameter[constant[ascii]]]
return[call[constant[{}{}?{}&X-Goog-Signature={}].format, parameter[name[api_access_endpoint], name[resource], name[canonical_query_string], name[signature]]]]
|
keyword[def] identifier[generate_signed_url_v4] (
identifier[credentials] ,
identifier[resource] ,
identifier[expiration] ,
identifier[api_access_endpoint] = identifier[DEFAULT_ENDPOINT] ,
identifier[method] = literal[string] ,
identifier[content_md5] = keyword[None] ,
identifier[content_type] = keyword[None] ,
identifier[response_type] = keyword[None] ,
identifier[response_disposition] = keyword[None] ,
identifier[generation] = keyword[None] ,
identifier[headers] = keyword[None] ,
identifier[query_parameters] = keyword[None] ,
identifier[_request_timestamp] = keyword[None] ,
):
literal[string]
identifier[ensure_signed_credentials] ( identifier[credentials] )
identifier[expiration_seconds] = identifier[get_expiration_seconds_v4] ( identifier[expiration] )
keyword[if] identifier[_request_timestamp] keyword[is] keyword[None] :
identifier[now] = identifier[NOW] ()
identifier[request_timestamp] = identifier[now] . identifier[strftime] ( literal[string] )
identifier[datestamp] = identifier[now] . identifier[date] (). identifier[strftime] ( literal[string] )
keyword[else] :
identifier[request_timestamp] = identifier[_request_timestamp]
identifier[datestamp] = identifier[_request_timestamp] [: literal[int] ]
identifier[client_email] = identifier[credentials] . identifier[signer_email]
identifier[credential_scope] = literal[string] . identifier[format] ( identifier[datestamp] )
identifier[credential] = literal[string] . identifier[format] ( identifier[client_email] , identifier[credential_scope] )
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[headers] ={}
keyword[if] identifier[content_type] keyword[is] keyword[not] keyword[None] :
identifier[headers] [ literal[string] ]= identifier[content_type]
keyword[if] identifier[content_md5] keyword[is] keyword[not] keyword[None] :
identifier[headers] [ literal[string] ]= identifier[content_md5]
identifier[header_names] =[ identifier[key] . identifier[lower] () keyword[for] identifier[key] keyword[in] identifier[headers] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[header_names] :
identifier[headers] [ literal[string] ]= literal[string]
keyword[if] identifier[method] . identifier[upper] ()== literal[string] :
identifier[method] = literal[string]
identifier[headers] [ literal[string] ]= literal[string]
identifier[canonical_headers] , identifier[ordered_headers] = identifier[get_canonical_headers] ( identifier[headers] )
identifier[canonical_header_string] =(
literal[string] . identifier[join] ( identifier[canonical_headers] )+ literal[string]
)
identifier[signed_headers] = literal[string] . identifier[join] ([ identifier[key] keyword[for] identifier[key] , identifier[_] keyword[in] identifier[ordered_headers] ])
keyword[if] identifier[query_parameters] keyword[is] keyword[None] :
identifier[query_parameters] ={}
keyword[else] :
identifier[query_parameters] ={ identifier[key] : identifier[value] keyword[or] literal[string] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[query_parameters] . identifier[items] ()}
identifier[query_parameters] [ literal[string] ]= literal[string]
identifier[query_parameters] [ literal[string] ]= identifier[credential]
identifier[query_parameters] [ literal[string] ]= identifier[request_timestamp]
identifier[query_parameters] [ literal[string] ]= identifier[expiration_seconds]
identifier[query_parameters] [ literal[string] ]= identifier[signed_headers]
keyword[if] identifier[response_type] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[response_type]
keyword[if] identifier[response_disposition] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[response_disposition]
keyword[if] identifier[generation] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[generation]
identifier[ordered_query_parameters] = identifier[sorted] ( identifier[query_parameters] . identifier[items] ())
identifier[canonical_query_string] = identifier[six] . identifier[moves] . identifier[urllib] . identifier[parse] . identifier[urlencode] ( identifier[ordered_query_parameters] )
identifier[canonical_elements] =[
identifier[method] ,
identifier[resource] ,
identifier[canonical_query_string] ,
identifier[canonical_header_string] ,
identifier[signed_headers] ,
literal[string] ,
]
identifier[canonical_request] = literal[string] . identifier[join] ( identifier[canonical_elements] )
identifier[canonical_request_hash] = identifier[hashlib] . identifier[sha256] (
identifier[canonical_request] . identifier[encode] ( literal[string] )
). identifier[hexdigest] ()
identifier[string_elements] =[
literal[string] ,
identifier[request_timestamp] ,
identifier[credential_scope] ,
identifier[canonical_request_hash] ,
]
identifier[string_to_sign] = literal[string] . identifier[join] ( identifier[string_elements] )
identifier[signature_bytes] = identifier[credentials] . identifier[sign_bytes] ( identifier[string_to_sign] . identifier[encode] ( literal[string] ))
identifier[signature] = identifier[binascii] . identifier[hexlify] ( identifier[signature_bytes] ). identifier[decode] ( literal[string] )
keyword[return] literal[string] . identifier[format] (
identifier[api_access_endpoint] , identifier[resource] , identifier[canonical_query_string] , identifier[signature]
)
|
def generate_signed_url_v4(credentials, resource, expiration, api_access_endpoint=DEFAULT_ENDPOINT, method='GET', content_md5=None, content_type=None, response_type=None, response_disposition=None, generation=None, headers=None, query_parameters=None, _request_timestamp=None): # for testing only
'Generate a V4 signed URL to provide query-string auth\'n to a resource.\n\n .. note::\n\n Assumes ``credentials`` implements the\n :class:`google.auth.credentials.Signing` interface. Also assumes\n ``credentials`` has a ``service_account_email`` property which\n identifies the credentials.\n\n .. note::\n\n If you are on Google Compute Engine, you can\'t generate a signed URL.\n Follow `Issue 922`_ for updates on this. If you\'d like to be able to\n generate a signed URL from GCE, you can use a standard service account\n from a JSON file rather than a GCE service account.\n\n See headers `reference`_ for more details on optional arguments.\n\n .. _Issue 922: https://github.com/GoogleCloudPlatform/ google-cloud-python/issues/922\n .. _reference: https://cloud.google.com/storage/docs/reference-headers\n\n :type credentials: :class:`google.auth.credentials.Signing`\n :param credentials: Credentials object with an associated private key to\n sign text.\n\n :type resource: str\n :param resource: A pointer to a specific resource\n (typically, ``/bucket-name/path/to/blob.txt``).\n\n :type expiration: Union[Integer, datetime.datetime, datetime.timedelta]\n :param expiration: Point in time when the signed URL should expire.\n\n :type api_access_endpoint: str\n :param api_access_endpoint: Optional URI base. Defaults to\n "https://storage.googleapis.com/"\n\n :type method: str\n :param method: The HTTP verb that will be used when requesting the URL.\n Defaults to ``\'GET\'``. If method is ``\'RESUMABLE\'`` then the\n signature will additionally contain the `x-goog-resumable`\n header, and the method changed to POST. See the signed URL\n docs regarding this flow:\n https://cloud.google.com/storage/docs/access-control/signed-urls\n\n\n :type content_md5: str\n :param content_md5: (Optional) The MD5 hash of the object referenced by\n ``resource``.\n\n :type content_type: str\n :param content_type: (Optional) The content type of the object referenced\n by ``resource``.\n\n :type response_type: str\n :param response_type: (Optional) Content type of responses to requests for\n the signed URL. Used to over-ride the content type of\n the underlying resource.\n\n :type response_disposition: str\n :param response_disposition: (Optional) Content disposition of responses to\n requests for the signed URL.\n\n :type generation: str\n :param generation: (Optional) A value that indicates which generation of\n the resource to fetch.\n\n :type headers: dict\n :param headers:\n (Optional) Additional HTTP headers to be included as part of the\n signed URLs. See:\n https://cloud.google.com/storage/docs/xml-api/reference-headers\n Requests using the signed URL *must* pass the specified header\n (name and value) with each request for the URL.\n\n :type query_parameters: dict\n :param query_parameters:\n (Optional) Additional query paramtersto be included as part of the\n signed URLs. See:\n https://cloud.google.com/storage/docs/xml-api/reference-headers#query\n\n :raises: :exc:`TypeError` when expiration is not a valid type.\n :raises: :exc:`AttributeError` if credentials is not an instance\n of :class:`google.auth.credentials.Signing`.\n\n :rtype: str\n :returns: A signed URL you can use to access the resource\n until expiration.\n '
ensure_signed_credentials(credentials)
expiration_seconds = get_expiration_seconds_v4(expiration)
if _request_timestamp is None:
now = NOW()
request_timestamp = now.strftime('%Y%m%dT%H%M%SZ')
datestamp = now.date().strftime('%Y%m%d') # depends on [control=['if'], data=[]]
else:
request_timestamp = _request_timestamp
datestamp = _request_timestamp[:8]
client_email = credentials.signer_email
credential_scope = '{}/auto/storage/goog4_request'.format(datestamp)
credential = '{}/{}'.format(client_email, credential_scope)
if headers is None:
headers = {} # depends on [control=['if'], data=['headers']]
if content_type is not None:
headers['Content-Type'] = content_type # depends on [control=['if'], data=['content_type']]
if content_md5 is not None:
headers['Content-MD5'] = content_md5 # depends on [control=['if'], data=['content_md5']]
header_names = [key.lower() for key in headers]
if 'host' not in header_names:
headers['Host'] = 'storage.googleapis.com' # depends on [control=['if'], data=[]]
if method.upper() == 'RESUMABLE':
method = 'POST'
headers['x-goog-resumable'] = 'start' # depends on [control=['if'], data=[]]
(canonical_headers, ordered_headers) = get_canonical_headers(headers)
canonical_header_string = '\n'.join(canonical_headers) + '\n' # Yes, Virginia, the extra newline is part of the spec.
signed_headers = ';'.join([key for (key, _) in ordered_headers])
if query_parameters is None:
query_parameters = {} # depends on [control=['if'], data=['query_parameters']]
else:
query_parameters = {key: value or '' for (key, value) in query_parameters.items()}
query_parameters['X-Goog-Algorithm'] = 'GOOG4-RSA-SHA256'
query_parameters['X-Goog-Credential'] = credential
query_parameters['X-Goog-Date'] = request_timestamp
query_parameters['X-Goog-Expires'] = expiration_seconds
query_parameters['X-Goog-SignedHeaders'] = signed_headers
if response_type is not None:
query_parameters['response-content-type'] = response_type # depends on [control=['if'], data=['response_type']]
if response_disposition is not None:
query_parameters['response-content-disposition'] = response_disposition # depends on [control=['if'], data=['response_disposition']]
if generation is not None:
query_parameters['generation'] = generation # depends on [control=['if'], data=['generation']]
ordered_query_parameters = sorted(query_parameters.items())
canonical_query_string = six.moves.urllib.parse.urlencode(ordered_query_parameters)
canonical_elements = [method, resource, canonical_query_string, canonical_header_string, signed_headers, 'UNSIGNED-PAYLOAD']
canonical_request = '\n'.join(canonical_elements)
canonical_request_hash = hashlib.sha256(canonical_request.encode('ascii')).hexdigest()
string_elements = ['GOOG4-RSA-SHA256', request_timestamp, credential_scope, canonical_request_hash]
string_to_sign = '\n'.join(string_elements)
signature_bytes = credentials.sign_bytes(string_to_sign.encode('ascii'))
signature = binascii.hexlify(signature_bytes).decode('ascii')
return '{}{}?{}&X-Goog-Signature={}'.format(api_access_endpoint, resource, canonical_query_string, signature)
|
def parse_nvra(nvra):
"""
Parse RPM N-E:V-R.A string to a dict.
:param nvra: N-E:V-R.A string. This can be a file name or a file path including the '.rpm' suffix.
:type nvra: str
:rtype: dict, with "name", "epoch", "version", "release", and "arch" elements.
"""
if nvra.endswith(".rpm"):
nvra = nvra[:-4]
result = RPM_NVRA_RE.match(nvra).groupdict()
result["epoch"] = result["epoch"] or 0
result["epoch"] = int(result["epoch"])
return result
|
def function[parse_nvra, parameter[nvra]]:
constant[
Parse RPM N-E:V-R.A string to a dict.
:param nvra: N-E:V-R.A string. This can be a file name or a file path including the '.rpm' suffix.
:type nvra: str
:rtype: dict, with "name", "epoch", "version", "release", and "arch" elements.
]
if call[name[nvra].endswith, parameter[constant[.rpm]]] begin[:]
variable[nvra] assign[=] call[name[nvra]][<ast.Slice object at 0x7da18ede49a0>]
variable[result] assign[=] call[call[name[RPM_NVRA_RE].match, parameter[name[nvra]]].groupdict, parameter[]]
call[name[result]][constant[epoch]] assign[=] <ast.BoolOp object at 0x7da18dc054e0>
call[name[result]][constant[epoch]] assign[=] call[name[int], parameter[call[name[result]][constant[epoch]]]]
return[name[result]]
|
keyword[def] identifier[parse_nvra] ( identifier[nvra] ):
literal[string]
keyword[if] identifier[nvra] . identifier[endswith] ( literal[string] ):
identifier[nvra] = identifier[nvra] [:- literal[int] ]
identifier[result] = identifier[RPM_NVRA_RE] . identifier[match] ( identifier[nvra] ). identifier[groupdict] ()
identifier[result] [ literal[string] ]= identifier[result] [ literal[string] ] keyword[or] literal[int]
identifier[result] [ literal[string] ]= identifier[int] ( identifier[result] [ literal[string] ])
keyword[return] identifier[result]
|
def parse_nvra(nvra):
"""
Parse RPM N-E:V-R.A string to a dict.
:param nvra: N-E:V-R.A string. This can be a file name or a file path including the '.rpm' suffix.
:type nvra: str
:rtype: dict, with "name", "epoch", "version", "release", and "arch" elements.
"""
if nvra.endswith('.rpm'):
nvra = nvra[:-4] # depends on [control=['if'], data=[]]
result = RPM_NVRA_RE.match(nvra).groupdict()
result['epoch'] = result['epoch'] or 0
result['epoch'] = int(result['epoch'])
return result
|
def setup_page(self):
"""Setup config page widgets and options."""
settings_group = QGroupBox(_("Settings"))
hist_spin = self.create_spinbox(
_("History depth: "), _(" entries"),
'max_entries', min_=10, max_=10000, step=10,
tip=_("Set maximum line count"))
sourcecode_group = QGroupBox(_("Source code"))
wrap_mode_box = self.create_checkbox(_("Wrap lines"), 'wrap')
linenumbers_mode_box = self.create_checkbox(_("Show line numbers"),
'line_numbers')
go_to_eof_box = self.create_checkbox(
_("Scroll automatically to last entry"), 'go_to_eof')
settings_layout = QVBoxLayout()
settings_layout.addWidget(hist_spin)
settings_group.setLayout(settings_layout)
sourcecode_layout = QVBoxLayout()
sourcecode_layout.addWidget(wrap_mode_box)
sourcecode_layout.addWidget(linenumbers_mode_box)
sourcecode_layout.addWidget(go_to_eof_box)
sourcecode_group.setLayout(sourcecode_layout)
vlayout = QVBoxLayout()
vlayout.addWidget(settings_group)
vlayout.addWidget(sourcecode_group)
vlayout.addStretch(1)
self.setLayout(vlayout)
|
def function[setup_page, parameter[self]]:
constant[Setup config page widgets and options.]
variable[settings_group] assign[=] call[name[QGroupBox], parameter[call[name[_], parameter[constant[Settings]]]]]
variable[hist_spin] assign[=] call[name[self].create_spinbox, parameter[call[name[_], parameter[constant[History depth: ]]], call[name[_], parameter[constant[ entries]]], constant[max_entries]]]
variable[sourcecode_group] assign[=] call[name[QGroupBox], parameter[call[name[_], parameter[constant[Source code]]]]]
variable[wrap_mode_box] assign[=] call[name[self].create_checkbox, parameter[call[name[_], parameter[constant[Wrap lines]]], constant[wrap]]]
variable[linenumbers_mode_box] assign[=] call[name[self].create_checkbox, parameter[call[name[_], parameter[constant[Show line numbers]]], constant[line_numbers]]]
variable[go_to_eof_box] assign[=] call[name[self].create_checkbox, parameter[call[name[_], parameter[constant[Scroll automatically to last entry]]], constant[go_to_eof]]]
variable[settings_layout] assign[=] call[name[QVBoxLayout], parameter[]]
call[name[settings_layout].addWidget, parameter[name[hist_spin]]]
call[name[settings_group].setLayout, parameter[name[settings_layout]]]
variable[sourcecode_layout] assign[=] call[name[QVBoxLayout], parameter[]]
call[name[sourcecode_layout].addWidget, parameter[name[wrap_mode_box]]]
call[name[sourcecode_layout].addWidget, parameter[name[linenumbers_mode_box]]]
call[name[sourcecode_layout].addWidget, parameter[name[go_to_eof_box]]]
call[name[sourcecode_group].setLayout, parameter[name[sourcecode_layout]]]
variable[vlayout] assign[=] call[name[QVBoxLayout], parameter[]]
call[name[vlayout].addWidget, parameter[name[settings_group]]]
call[name[vlayout].addWidget, parameter[name[sourcecode_group]]]
call[name[vlayout].addStretch, parameter[constant[1]]]
call[name[self].setLayout, parameter[name[vlayout]]]
|
keyword[def] identifier[setup_page] ( identifier[self] ):
literal[string]
identifier[settings_group] = identifier[QGroupBox] ( identifier[_] ( literal[string] ))
identifier[hist_spin] = identifier[self] . identifier[create_spinbox] (
identifier[_] ( literal[string] ), identifier[_] ( literal[string] ),
literal[string] , identifier[min_] = literal[int] , identifier[max_] = literal[int] , identifier[step] = literal[int] ,
identifier[tip] = identifier[_] ( literal[string] ))
identifier[sourcecode_group] = identifier[QGroupBox] ( identifier[_] ( literal[string] ))
identifier[wrap_mode_box] = identifier[self] . identifier[create_checkbox] ( identifier[_] ( literal[string] ), literal[string] )
identifier[linenumbers_mode_box] = identifier[self] . identifier[create_checkbox] ( identifier[_] ( literal[string] ),
literal[string] )
identifier[go_to_eof_box] = identifier[self] . identifier[create_checkbox] (
identifier[_] ( literal[string] ), literal[string] )
identifier[settings_layout] = identifier[QVBoxLayout] ()
identifier[settings_layout] . identifier[addWidget] ( identifier[hist_spin] )
identifier[settings_group] . identifier[setLayout] ( identifier[settings_layout] )
identifier[sourcecode_layout] = identifier[QVBoxLayout] ()
identifier[sourcecode_layout] . identifier[addWidget] ( identifier[wrap_mode_box] )
identifier[sourcecode_layout] . identifier[addWidget] ( identifier[linenumbers_mode_box] )
identifier[sourcecode_layout] . identifier[addWidget] ( identifier[go_to_eof_box] )
identifier[sourcecode_group] . identifier[setLayout] ( identifier[sourcecode_layout] )
identifier[vlayout] = identifier[QVBoxLayout] ()
identifier[vlayout] . identifier[addWidget] ( identifier[settings_group] )
identifier[vlayout] . identifier[addWidget] ( identifier[sourcecode_group] )
identifier[vlayout] . identifier[addStretch] ( literal[int] )
identifier[self] . identifier[setLayout] ( identifier[vlayout] )
|
def setup_page(self):
"""Setup config page widgets and options."""
settings_group = QGroupBox(_('Settings'))
hist_spin = self.create_spinbox(_('History depth: '), _(' entries'), 'max_entries', min_=10, max_=10000, step=10, tip=_('Set maximum line count'))
sourcecode_group = QGroupBox(_('Source code'))
wrap_mode_box = self.create_checkbox(_('Wrap lines'), 'wrap')
linenumbers_mode_box = self.create_checkbox(_('Show line numbers'), 'line_numbers')
go_to_eof_box = self.create_checkbox(_('Scroll automatically to last entry'), 'go_to_eof')
settings_layout = QVBoxLayout()
settings_layout.addWidget(hist_spin)
settings_group.setLayout(settings_layout)
sourcecode_layout = QVBoxLayout()
sourcecode_layout.addWidget(wrap_mode_box)
sourcecode_layout.addWidget(linenumbers_mode_box)
sourcecode_layout.addWidget(go_to_eof_box)
sourcecode_group.setLayout(sourcecode_layout)
vlayout = QVBoxLayout()
vlayout.addWidget(settings_group)
vlayout.addWidget(sourcecode_group)
vlayout.addStretch(1)
self.setLayout(vlayout)
|
def create_review_node(self, review):
"""
Return a review node.
"""
review_node = BNode()
type_triple = (review_node, RDF.type, self.spdx_namespace.Review)
self.graph.add(type_triple)
reviewer_node = Literal(review.reviewer.to_value())
self.graph.add((review_node, self.spdx_namespace.reviewer, reviewer_node))
reviewed_date_node = Literal(review.review_date_iso_format)
reviewed_triple = (review_node, self.spdx_namespace.reviewDate, reviewed_date_node)
self.graph.add(reviewed_triple)
if review.has_comment:
comment_node = Literal(review.comment)
comment_triple = (review_node, RDFS.comment, comment_node)
self.graph.add(comment_triple)
return review_node
|
def function[create_review_node, parameter[self, review]]:
constant[
Return a review node.
]
variable[review_node] assign[=] call[name[BNode], parameter[]]
variable[type_triple] assign[=] tuple[[<ast.Name object at 0x7da18ede7c10>, <ast.Attribute object at 0x7da18ede7be0>, <ast.Attribute object at 0x7da18ede4040>]]
call[name[self].graph.add, parameter[name[type_triple]]]
variable[reviewer_node] assign[=] call[name[Literal], parameter[call[name[review].reviewer.to_value, parameter[]]]]
call[name[self].graph.add, parameter[tuple[[<ast.Name object at 0x7da1b020d240>, <ast.Attribute object at 0x7da1b020c850>, <ast.Name object at 0x7da1b020c190>]]]]
variable[reviewed_date_node] assign[=] call[name[Literal], parameter[name[review].review_date_iso_format]]
variable[reviewed_triple] assign[=] tuple[[<ast.Name object at 0x7da1b01fe2c0>, <ast.Attribute object at 0x7da1b01ff4f0>, <ast.Name object at 0x7da1b01fdd80>]]
call[name[self].graph.add, parameter[name[reviewed_triple]]]
if name[review].has_comment begin[:]
variable[comment_node] assign[=] call[name[Literal], parameter[name[review].comment]]
variable[comment_triple] assign[=] tuple[[<ast.Name object at 0x7da1b01fd570>, <ast.Attribute object at 0x7da1b01ff160>, <ast.Name object at 0x7da1b01ff610>]]
call[name[self].graph.add, parameter[name[comment_triple]]]
return[name[review_node]]
|
keyword[def] identifier[create_review_node] ( identifier[self] , identifier[review] ):
literal[string]
identifier[review_node] = identifier[BNode] ()
identifier[type_triple] =( identifier[review_node] , identifier[RDF] . identifier[type] , identifier[self] . identifier[spdx_namespace] . identifier[Review] )
identifier[self] . identifier[graph] . identifier[add] ( identifier[type_triple] )
identifier[reviewer_node] = identifier[Literal] ( identifier[review] . identifier[reviewer] . identifier[to_value] ())
identifier[self] . identifier[graph] . identifier[add] (( identifier[review_node] , identifier[self] . identifier[spdx_namespace] . identifier[reviewer] , identifier[reviewer_node] ))
identifier[reviewed_date_node] = identifier[Literal] ( identifier[review] . identifier[review_date_iso_format] )
identifier[reviewed_triple] =( identifier[review_node] , identifier[self] . identifier[spdx_namespace] . identifier[reviewDate] , identifier[reviewed_date_node] )
identifier[self] . identifier[graph] . identifier[add] ( identifier[reviewed_triple] )
keyword[if] identifier[review] . identifier[has_comment] :
identifier[comment_node] = identifier[Literal] ( identifier[review] . identifier[comment] )
identifier[comment_triple] =( identifier[review_node] , identifier[RDFS] . identifier[comment] , identifier[comment_node] )
identifier[self] . identifier[graph] . identifier[add] ( identifier[comment_triple] )
keyword[return] identifier[review_node]
|
def create_review_node(self, review):
"""
Return a review node.
"""
review_node = BNode()
type_triple = (review_node, RDF.type, self.spdx_namespace.Review)
self.graph.add(type_triple)
reviewer_node = Literal(review.reviewer.to_value())
self.graph.add((review_node, self.spdx_namespace.reviewer, reviewer_node))
reviewed_date_node = Literal(review.review_date_iso_format)
reviewed_triple = (review_node, self.spdx_namespace.reviewDate, reviewed_date_node)
self.graph.add(reviewed_triple)
if review.has_comment:
comment_node = Literal(review.comment)
comment_triple = (review_node, RDFS.comment, comment_node)
self.graph.add(comment_triple) # depends on [control=['if'], data=[]]
return review_node
|
def initialize_hashes(self):
"""Create new hashlib objects for each hash we are going to calculate."""
if ('md5' in self.hashes):
self.md5_calc = hashlib.md5()
if ('sha-1' in self.hashes):
self.sha1_calc = hashlib.sha1()
if ('sha-256' in self.hashes):
self.sha256_calc = hashlib.sha256()
|
def function[initialize_hashes, parameter[self]]:
constant[Create new hashlib objects for each hash we are going to calculate.]
if compare[constant[md5] in name[self].hashes] begin[:]
name[self].md5_calc assign[=] call[name[hashlib].md5, parameter[]]
if compare[constant[sha-1] in name[self].hashes] begin[:]
name[self].sha1_calc assign[=] call[name[hashlib].sha1, parameter[]]
if compare[constant[sha-256] in name[self].hashes] begin[:]
name[self].sha256_calc assign[=] call[name[hashlib].sha256, parameter[]]
|
keyword[def] identifier[initialize_hashes] ( identifier[self] ):
literal[string]
keyword[if] ( literal[string] keyword[in] identifier[self] . identifier[hashes] ):
identifier[self] . identifier[md5_calc] = identifier[hashlib] . identifier[md5] ()
keyword[if] ( literal[string] keyword[in] identifier[self] . identifier[hashes] ):
identifier[self] . identifier[sha1_calc] = identifier[hashlib] . identifier[sha1] ()
keyword[if] ( literal[string] keyword[in] identifier[self] . identifier[hashes] ):
identifier[self] . identifier[sha256_calc] = identifier[hashlib] . identifier[sha256] ()
|
def initialize_hashes(self):
"""Create new hashlib objects for each hash we are going to calculate."""
if 'md5' in self.hashes:
self.md5_calc = hashlib.md5() # depends on [control=['if'], data=[]]
if 'sha-1' in self.hashes:
self.sha1_calc = hashlib.sha1() # depends on [control=['if'], data=[]]
if 'sha-256' in self.hashes:
self.sha256_calc = hashlib.sha256() # depends on [control=['if'], data=[]]
|
def on_connect(self, connection):
"Re-subscribe to any channels and patterns previously subscribed to"
# NOTE: for python3, we can't pass bytestrings as keyword arguments
# so we need to decode channel/pattern names back to unicode strings
# before passing them to [p]subscribe.
self.pending_unsubscribe_channels.clear()
self.pending_unsubscribe_patterns.clear()
if self.channels:
channels = {}
for k, v in iteritems(self.channels):
channels[self.encoder.decode(k, force=True)] = v
self.subscribe(**channels)
if self.patterns:
patterns = {}
for k, v in iteritems(self.patterns):
patterns[self.encoder.decode(k, force=True)] = v
self.psubscribe(**patterns)
|
def function[on_connect, parameter[self, connection]]:
constant[Re-subscribe to any channels and patterns previously subscribed to]
call[name[self].pending_unsubscribe_channels.clear, parameter[]]
call[name[self].pending_unsubscribe_patterns.clear, parameter[]]
if name[self].channels begin[:]
variable[channels] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2041d8400>, <ast.Name object at 0x7da2041d8880>]]] in starred[call[name[iteritems], parameter[name[self].channels]]] begin[:]
call[name[channels]][call[name[self].encoder.decode, parameter[name[k]]]] assign[=] name[v]
call[name[self].subscribe, parameter[]]
if name[self].patterns begin[:]
variable[patterns] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18f00cc10>, <ast.Name object at 0x7da18f00e0e0>]]] in starred[call[name[iteritems], parameter[name[self].patterns]]] begin[:]
call[name[patterns]][call[name[self].encoder.decode, parameter[name[k]]]] assign[=] name[v]
call[name[self].psubscribe, parameter[]]
|
keyword[def] identifier[on_connect] ( identifier[self] , identifier[connection] ):
literal[string]
identifier[self] . identifier[pending_unsubscribe_channels] . identifier[clear] ()
identifier[self] . identifier[pending_unsubscribe_patterns] . identifier[clear] ()
keyword[if] identifier[self] . identifier[channels] :
identifier[channels] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[self] . identifier[channels] ):
identifier[channels] [ identifier[self] . identifier[encoder] . identifier[decode] ( identifier[k] , identifier[force] = keyword[True] )]= identifier[v]
identifier[self] . identifier[subscribe] (** identifier[channels] )
keyword[if] identifier[self] . identifier[patterns] :
identifier[patterns] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[self] . identifier[patterns] ):
identifier[patterns] [ identifier[self] . identifier[encoder] . identifier[decode] ( identifier[k] , identifier[force] = keyword[True] )]= identifier[v]
identifier[self] . identifier[psubscribe] (** identifier[patterns] )
|
def on_connect(self, connection):
"""Re-subscribe to any channels and patterns previously subscribed to"""
# NOTE: for python3, we can't pass bytestrings as keyword arguments
# so we need to decode channel/pattern names back to unicode strings
# before passing them to [p]subscribe.
self.pending_unsubscribe_channels.clear()
self.pending_unsubscribe_patterns.clear()
if self.channels:
channels = {}
for (k, v) in iteritems(self.channels):
channels[self.encoder.decode(k, force=True)] = v # depends on [control=['for'], data=[]]
self.subscribe(**channels) # depends on [control=['if'], data=[]]
if self.patterns:
patterns = {}
for (k, v) in iteritems(self.patterns):
patterns[self.encoder.decode(k, force=True)] = v # depends on [control=['for'], data=[]]
self.psubscribe(**patterns) # depends on [control=['if'], data=[]]
|
def updateMetadata(self, new):
"""
Update the metadata stored for this broker.
Future connections made to the broker will use the host and port
defined in the new metadata. Any existing connection is not dropped,
however.
:param new:
:clas:`afkak.common.BrokerMetadata` with the same node ID as the
current metadata.
"""
if self.node_id != new.node_id:
raise ValueError("Broker metadata {!r} doesn't match node_id={}".format(new, self.node_id))
self.node_id = new.node_id
self.host = new.host
self.port = new.port
|
def function[updateMetadata, parameter[self, new]]:
constant[
Update the metadata stored for this broker.
Future connections made to the broker will use the host and port
defined in the new metadata. Any existing connection is not dropped,
however.
:param new:
:clas:`afkak.common.BrokerMetadata` with the same node ID as the
current metadata.
]
if compare[name[self].node_id not_equal[!=] name[new].node_id] begin[:]
<ast.Raise object at 0x7da1b04f9e10>
name[self].node_id assign[=] name[new].node_id
name[self].host assign[=] name[new].host
name[self].port assign[=] name[new].port
|
keyword[def] identifier[updateMetadata] ( identifier[self] , identifier[new] ):
literal[string]
keyword[if] identifier[self] . identifier[node_id] != identifier[new] . identifier[node_id] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[new] , identifier[self] . identifier[node_id] ))
identifier[self] . identifier[node_id] = identifier[new] . identifier[node_id]
identifier[self] . identifier[host] = identifier[new] . identifier[host]
identifier[self] . identifier[port] = identifier[new] . identifier[port]
|
def updateMetadata(self, new):
"""
Update the metadata stored for this broker.
Future connections made to the broker will use the host and port
defined in the new metadata. Any existing connection is not dropped,
however.
:param new:
:clas:`afkak.common.BrokerMetadata` with the same node ID as the
current metadata.
"""
if self.node_id != new.node_id:
raise ValueError("Broker metadata {!r} doesn't match node_id={}".format(new, self.node_id)) # depends on [control=['if'], data=[]]
self.node_id = new.node_id
self.host = new.host
self.port = new.port
|
def is_valid_image(self, raw_data):
"""
Checks if the supplied raw data is valid image data.
:param str raw_data: A string representation of the image data.
:rtype: bool
:returns: ``True`` if ``raw_data`` is valid, ``False`` if not.
"""
buf = StringIO(raw_data)
try:
trial_image = Image.open(buf)
trial_image.verify()
except Exception:
# TODO: Get more specific with this exception handling.
return False
return True
|
def function[is_valid_image, parameter[self, raw_data]]:
constant[
Checks if the supplied raw data is valid image data.
:param str raw_data: A string representation of the image data.
:rtype: bool
:returns: ``True`` if ``raw_data`` is valid, ``False`` if not.
]
variable[buf] assign[=] call[name[StringIO], parameter[name[raw_data]]]
<ast.Try object at 0x7da18f810550>
return[constant[True]]
|
keyword[def] identifier[is_valid_image] ( identifier[self] , identifier[raw_data] ):
literal[string]
identifier[buf] = identifier[StringIO] ( identifier[raw_data] )
keyword[try] :
identifier[trial_image] = identifier[Image] . identifier[open] ( identifier[buf] )
identifier[trial_image] . identifier[verify] ()
keyword[except] identifier[Exception] :
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def is_valid_image(self, raw_data):
"""
Checks if the supplied raw data is valid image data.
:param str raw_data: A string representation of the image data.
:rtype: bool
:returns: ``True`` if ``raw_data`` is valid, ``False`` if not.
"""
buf = StringIO(raw_data)
try:
trial_image = Image.open(buf)
trial_image.verify() # depends on [control=['try'], data=[]]
except Exception:
# TODO: Get more specific with this exception handling.
return False # depends on [control=['except'], data=[]]
return True
|
def get_vowel(syll):
'''Return the firstmost vowel in 'syll'.'''
return re.search(r'([ieaouäöy]{1})', syll, flags=FLAGS).group(1).upper()
|
def function[get_vowel, parameter[syll]]:
constant[Return the firstmost vowel in 'syll'.]
return[call[call[call[name[re].search, parameter[constant[([ieaouäöy]{1})], name[syll]]].group, parameter[constant[1]]].upper, parameter[]]]
|
keyword[def] identifier[get_vowel] ( identifier[syll] ):
literal[string]
keyword[return] identifier[re] . identifier[search] ( literal[string] , identifier[syll] , identifier[flags] = identifier[FLAGS] ). identifier[group] ( literal[int] ). identifier[upper] ()
|
def get_vowel(syll):
"""Return the firstmost vowel in 'syll'."""
return re.search('([ieaouäöy]{1})', syll, flags=FLAGS).group(1).upper()
|
def RemoveClass(self, class_name):
"""Removes an entry from the list of known classes.
Args:
class_name: A string with the class name that is to be removed.
Raises:
NonexistentMapping if there is no class with the specified class_name.
"""
if class_name not in self._class_mapping:
raise problems.NonexistentMapping(class_name)
del self._class_mapping[class_name]
|
def function[RemoveClass, parameter[self, class_name]]:
constant[Removes an entry from the list of known classes.
Args:
class_name: A string with the class name that is to be removed.
Raises:
NonexistentMapping if there is no class with the specified class_name.
]
if compare[name[class_name] <ast.NotIn object at 0x7da2590d7190> name[self]._class_mapping] begin[:]
<ast.Raise object at 0x7da18bcc9db0>
<ast.Delete object at 0x7da18bcc9ff0>
|
keyword[def] identifier[RemoveClass] ( identifier[self] , identifier[class_name] ):
literal[string]
keyword[if] identifier[class_name] keyword[not] keyword[in] identifier[self] . identifier[_class_mapping] :
keyword[raise] identifier[problems] . identifier[NonexistentMapping] ( identifier[class_name] )
keyword[del] identifier[self] . identifier[_class_mapping] [ identifier[class_name] ]
|
def RemoveClass(self, class_name):
"""Removes an entry from the list of known classes.
Args:
class_name: A string with the class name that is to be removed.
Raises:
NonexistentMapping if there is no class with the specified class_name.
"""
if class_name not in self._class_mapping:
raise problems.NonexistentMapping(class_name) # depends on [control=['if'], data=['class_name']]
del self._class_mapping[class_name]
|
def url_request(target_url, output_file):
"""
Use urllib to download the requested file from the target URL. Use the click progress bar to print download
progress
:param target_url: URL from which the file is to be downloaded
:param output_file: Name and path of local copy of file
"""
# Create the request
request = urllib.request.urlopen(target_url)
# Open the destination file to write
with open(output_file, 'wb') as targets:
# Calculate the total file size - will be used by the progress bar
total_length = int(request.headers.get('content-length'))
# Create a click progress bar using the total length calculated above
with click.progressbar(length=total_length,
label='Downloading files') as bar:
while True:
# Break up the download into chunks of 4096 bytes
data = request.read(4096)
# Break the loop when the download finishes/errors
if not data:
break
# Write the chunk to file
targets.write(data)
# Update the progress bar
bar.update(len(data))
|
def function[url_request, parameter[target_url, output_file]]:
constant[
Use urllib to download the requested file from the target URL. Use the click progress bar to print download
progress
:param target_url: URL from which the file is to be downloaded
:param output_file: Name and path of local copy of file
]
variable[request] assign[=] call[name[urllib].request.urlopen, parameter[name[target_url]]]
with call[name[open], parameter[name[output_file], constant[wb]]] begin[:]
variable[total_length] assign[=] call[name[int], parameter[call[name[request].headers.get, parameter[constant[content-length]]]]]
with call[name[click].progressbar, parameter[]] begin[:]
while constant[True] begin[:]
variable[data] assign[=] call[name[request].read, parameter[constant[4096]]]
if <ast.UnaryOp object at 0x7da2047e8250> begin[:]
break
call[name[targets].write, parameter[name[data]]]
call[name[bar].update, parameter[call[name[len], parameter[name[data]]]]]
|
keyword[def] identifier[url_request] ( identifier[target_url] , identifier[output_file] ):
literal[string]
identifier[request] = identifier[urllib] . identifier[request] . identifier[urlopen] ( identifier[target_url] )
keyword[with] identifier[open] ( identifier[output_file] , literal[string] ) keyword[as] identifier[targets] :
identifier[total_length] = identifier[int] ( identifier[request] . identifier[headers] . identifier[get] ( literal[string] ))
keyword[with] identifier[click] . identifier[progressbar] ( identifier[length] = identifier[total_length] ,
identifier[label] = literal[string] ) keyword[as] identifier[bar] :
keyword[while] keyword[True] :
identifier[data] = identifier[request] . identifier[read] ( literal[int] )
keyword[if] keyword[not] identifier[data] :
keyword[break]
identifier[targets] . identifier[write] ( identifier[data] )
identifier[bar] . identifier[update] ( identifier[len] ( identifier[data] ))
|
def url_request(target_url, output_file):
"""
Use urllib to download the requested file from the target URL. Use the click progress bar to print download
progress
:param target_url: URL from which the file is to be downloaded
:param output_file: Name and path of local copy of file
"""
# Create the request
request = urllib.request.urlopen(target_url)
# Open the destination file to write
with open(output_file, 'wb') as targets:
# Calculate the total file size - will be used by the progress bar
total_length = int(request.headers.get('content-length'))
# Create a click progress bar using the total length calculated above
with click.progressbar(length=total_length, label='Downloading files') as bar:
while True:
# Break up the download into chunks of 4096 bytes
data = request.read(4096)
# Break the loop when the download finishes/errors
if not data:
break # depends on [control=['if'], data=[]]
# Write the chunk to file
targets.write(data)
# Update the progress bar
bar.update(len(data)) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['bar']] # depends on [control=['with'], data=['targets']]
|
def weld_cast_array(array, weld_type, to_weld_type):
"""Cast array to a different type.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
weld_type : WeldType
Type of each element in the input array.
to_weld_type : WeldType
Desired type.
Returns
-------
WeldObject
Representation of this computation.
"""
if not is_numeric(weld_type) or not is_numeric(to_weld_type):
raise TypeError('Cannot cast array of type={} to type={}'.format(weld_type, to_weld_type))
obj_id, weld_obj = create_weld_object(array)
weld_template = """map(
{array},
|e: {type}|
{to}(e)
)"""
weld_obj.weld_code = weld_template.format(array=obj_id,
type=weld_type,
to=to_weld_type)
return weld_obj
|
def function[weld_cast_array, parameter[array, weld_type, to_weld_type]]:
constant[Cast array to a different type.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
weld_type : WeldType
Type of each element in the input array.
to_weld_type : WeldType
Desired type.
Returns
-------
WeldObject
Representation of this computation.
]
if <ast.BoolOp object at 0x7da1b0ad90c0> begin[:]
<ast.Raise object at 0x7da1b0ad8190>
<ast.Tuple object at 0x7da1b0ad8970> assign[=] call[name[create_weld_object], parameter[name[array]]]
variable[weld_template] assign[=] constant[map(
{array},
|e: {type}|
{to}(e)
)]
name[weld_obj].weld_code assign[=] call[name[weld_template].format, parameter[]]
return[name[weld_obj]]
|
keyword[def] identifier[weld_cast_array] ( identifier[array] , identifier[weld_type] , identifier[to_weld_type] ):
literal[string]
keyword[if] keyword[not] identifier[is_numeric] ( identifier[weld_type] ) keyword[or] keyword[not] identifier[is_numeric] ( identifier[to_weld_type] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[weld_type] , identifier[to_weld_type] ))
identifier[obj_id] , identifier[weld_obj] = identifier[create_weld_object] ( identifier[array] )
identifier[weld_template] = literal[string]
identifier[weld_obj] . identifier[weld_code] = identifier[weld_template] . identifier[format] ( identifier[array] = identifier[obj_id] ,
identifier[type] = identifier[weld_type] ,
identifier[to] = identifier[to_weld_type] )
keyword[return] identifier[weld_obj]
|
def weld_cast_array(array, weld_type, to_weld_type):
"""Cast array to a different type.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
weld_type : WeldType
Type of each element in the input array.
to_weld_type : WeldType
Desired type.
Returns
-------
WeldObject
Representation of this computation.
"""
if not is_numeric(weld_type) or not is_numeric(to_weld_type):
raise TypeError('Cannot cast array of type={} to type={}'.format(weld_type, to_weld_type)) # depends on [control=['if'], data=[]]
(obj_id, weld_obj) = create_weld_object(array)
weld_template = 'map(\n {array},\n |e: {type}|\n {to}(e)\n)'
weld_obj.weld_code = weld_template.format(array=obj_id, type=weld_type, to=to_weld_type)
return weld_obj
|
def replace(self, pat, rep):
"""Replace first occurrence of pat with rep in each element.
Parameters
----------
pat : str
rep : str
Returns
-------
Series
"""
check_type(pat, str)
check_type(rep, str)
return _series_str_result(self, weld_str_replace, pat=pat, rep=rep)
|
def function[replace, parameter[self, pat, rep]]:
constant[Replace first occurrence of pat with rep in each element.
Parameters
----------
pat : str
rep : str
Returns
-------
Series
]
call[name[check_type], parameter[name[pat], name[str]]]
call[name[check_type], parameter[name[rep], name[str]]]
return[call[name[_series_str_result], parameter[name[self], name[weld_str_replace]]]]
|
keyword[def] identifier[replace] ( identifier[self] , identifier[pat] , identifier[rep] ):
literal[string]
identifier[check_type] ( identifier[pat] , identifier[str] )
identifier[check_type] ( identifier[rep] , identifier[str] )
keyword[return] identifier[_series_str_result] ( identifier[self] , identifier[weld_str_replace] , identifier[pat] = identifier[pat] , identifier[rep] = identifier[rep] )
|
def replace(self, pat, rep):
"""Replace first occurrence of pat with rep in each element.
Parameters
----------
pat : str
rep : str
Returns
-------
Series
"""
check_type(pat, str)
check_type(rep, str)
return _series_str_result(self, weld_str_replace, pat=pat, rep=rep)
|
def dynamic_part_name(raml_resource, route_name, pk_field):
""" Generate a dynamic part for a resource :raml_resource:.
A dynamic part is generated using 2 parts: :route_name: of the
resource and the dynamic part of first dynamic child resources. If
:raml_resource: has no dynamic child resources, 'id' is used as the
2nd part.
E.g. if your dynamic part on route 'stories' is named 'superId' then
dynamic part will be 'stories_superId'.
:param raml_resource: Instance of ramlfications.raml.ResourceNode for
which dynamic part name is being generated.
:param route_name: Cleaned name of :raml_resource:
:param pk_field: Model Primary Key field name.
"""
subresources = get_resource_children(raml_resource)
dynamic_uris = [res.path for res in subresources
if is_dynamic_uri(res.path)]
if dynamic_uris:
dynamic_part = extract_dynamic_part(dynamic_uris[0])
else:
dynamic_part = pk_field
return '_'.join([route_name, dynamic_part])
|
def function[dynamic_part_name, parameter[raml_resource, route_name, pk_field]]:
constant[ Generate a dynamic part for a resource :raml_resource:.
A dynamic part is generated using 2 parts: :route_name: of the
resource and the dynamic part of first dynamic child resources. If
:raml_resource: has no dynamic child resources, 'id' is used as the
2nd part.
E.g. if your dynamic part on route 'stories' is named 'superId' then
dynamic part will be 'stories_superId'.
:param raml_resource: Instance of ramlfications.raml.ResourceNode for
which dynamic part name is being generated.
:param route_name: Cleaned name of :raml_resource:
:param pk_field: Model Primary Key field name.
]
variable[subresources] assign[=] call[name[get_resource_children], parameter[name[raml_resource]]]
variable[dynamic_uris] assign[=] <ast.ListComp object at 0x7da2054a4ee0>
if name[dynamic_uris] begin[:]
variable[dynamic_part] assign[=] call[name[extract_dynamic_part], parameter[call[name[dynamic_uris]][constant[0]]]]
return[call[constant[_].join, parameter[list[[<ast.Name object at 0x7da2054a5ed0>, <ast.Name object at 0x7da2054a5810>]]]]]
|
keyword[def] identifier[dynamic_part_name] ( identifier[raml_resource] , identifier[route_name] , identifier[pk_field] ):
literal[string]
identifier[subresources] = identifier[get_resource_children] ( identifier[raml_resource] )
identifier[dynamic_uris] =[ identifier[res] . identifier[path] keyword[for] identifier[res] keyword[in] identifier[subresources]
keyword[if] identifier[is_dynamic_uri] ( identifier[res] . identifier[path] )]
keyword[if] identifier[dynamic_uris] :
identifier[dynamic_part] = identifier[extract_dynamic_part] ( identifier[dynamic_uris] [ literal[int] ])
keyword[else] :
identifier[dynamic_part] = identifier[pk_field]
keyword[return] literal[string] . identifier[join] ([ identifier[route_name] , identifier[dynamic_part] ])
|
def dynamic_part_name(raml_resource, route_name, pk_field):
""" Generate a dynamic part for a resource :raml_resource:.
A dynamic part is generated using 2 parts: :route_name: of the
resource and the dynamic part of first dynamic child resources. If
:raml_resource: has no dynamic child resources, 'id' is used as the
2nd part.
E.g. if your dynamic part on route 'stories' is named 'superId' then
dynamic part will be 'stories_superId'.
:param raml_resource: Instance of ramlfications.raml.ResourceNode for
which dynamic part name is being generated.
:param route_name: Cleaned name of :raml_resource:
:param pk_field: Model Primary Key field name.
"""
subresources = get_resource_children(raml_resource)
dynamic_uris = [res.path for res in subresources if is_dynamic_uri(res.path)]
if dynamic_uris:
dynamic_part = extract_dynamic_part(dynamic_uris[0]) # depends on [control=['if'], data=[]]
else:
dynamic_part = pk_field
return '_'.join([route_name, dynamic_part])
|
def _process_diff(self, yes_work, maybe_work, work_dir, ym_results_path,
yn_results_path, stats):
"""Returns statistics on the difference between the intersection of
`yes_work` and `maybe_work` and the intersection of `yes_work`
and "no" works.
:param yes_work: name of work for which stats are collected
:type yes_work: `str`
:param maybe_work: name of work being compared with `yes_work`
:type maybe_work: `str`
:param work_dir: directory where generated files are saved
:type work_dir: `str`
:param ym_results_path: path to results intersecting
`yes_work` with `maybe_work`
:type yn_results_path: `str`
:param yn_results_path: path to results intersecting
`yes_work` with "no" works
:type yn_results_path: `str`
:param stats: data structure to hold the statistical data
:type stats: `dict`
:rtype: `dict`
"""
distinct_results_path = os.path.join(
work_dir, 'distinct_{}.csv'.format(maybe_work))
results = [yn_results_path, ym_results_path]
labels = [self._no_label, self._maybe_label]
self._run_query(distinct_results_path, self._store.diff_supplied,
[results, labels, self._tokenizer])
return self._update_stats('diff', work_dir, distinct_results_path,
yes_work, maybe_work, stats, SHARED, COMMON)
|
def function[_process_diff, parameter[self, yes_work, maybe_work, work_dir, ym_results_path, yn_results_path, stats]]:
constant[Returns statistics on the difference between the intersection of
`yes_work` and `maybe_work` and the intersection of `yes_work`
and "no" works.
:param yes_work: name of work for which stats are collected
:type yes_work: `str`
:param maybe_work: name of work being compared with `yes_work`
:type maybe_work: `str`
:param work_dir: directory where generated files are saved
:type work_dir: `str`
:param ym_results_path: path to results intersecting
`yes_work` with `maybe_work`
:type yn_results_path: `str`
:param yn_results_path: path to results intersecting
`yes_work` with "no" works
:type yn_results_path: `str`
:param stats: data structure to hold the statistical data
:type stats: `dict`
:rtype: `dict`
]
variable[distinct_results_path] assign[=] call[name[os].path.join, parameter[name[work_dir], call[constant[distinct_{}.csv].format, parameter[name[maybe_work]]]]]
variable[results] assign[=] list[[<ast.Name object at 0x7da1b198d360>, <ast.Name object at 0x7da1b198c7f0>]]
variable[labels] assign[=] list[[<ast.Attribute object at 0x7da1b198c9d0>, <ast.Attribute object at 0x7da1b198dff0>]]
call[name[self]._run_query, parameter[name[distinct_results_path], name[self]._store.diff_supplied, list[[<ast.Name object at 0x7da1b198c580>, <ast.Name object at 0x7da1b198c940>, <ast.Attribute object at 0x7da1b198f5b0>]]]]
return[call[name[self]._update_stats, parameter[constant[diff], name[work_dir], name[distinct_results_path], name[yes_work], name[maybe_work], name[stats], name[SHARED], name[COMMON]]]]
|
keyword[def] identifier[_process_diff] ( identifier[self] , identifier[yes_work] , identifier[maybe_work] , identifier[work_dir] , identifier[ym_results_path] ,
identifier[yn_results_path] , identifier[stats] ):
literal[string]
identifier[distinct_results_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[work_dir] , literal[string] . identifier[format] ( identifier[maybe_work] ))
identifier[results] =[ identifier[yn_results_path] , identifier[ym_results_path] ]
identifier[labels] =[ identifier[self] . identifier[_no_label] , identifier[self] . identifier[_maybe_label] ]
identifier[self] . identifier[_run_query] ( identifier[distinct_results_path] , identifier[self] . identifier[_store] . identifier[diff_supplied] ,
[ identifier[results] , identifier[labels] , identifier[self] . identifier[_tokenizer] ])
keyword[return] identifier[self] . identifier[_update_stats] ( literal[string] , identifier[work_dir] , identifier[distinct_results_path] ,
identifier[yes_work] , identifier[maybe_work] , identifier[stats] , identifier[SHARED] , identifier[COMMON] )
|
def _process_diff(self, yes_work, maybe_work, work_dir, ym_results_path, yn_results_path, stats):
"""Returns statistics on the difference between the intersection of
`yes_work` and `maybe_work` and the intersection of `yes_work`
and "no" works.
:param yes_work: name of work for which stats are collected
:type yes_work: `str`
:param maybe_work: name of work being compared with `yes_work`
:type maybe_work: `str`
:param work_dir: directory where generated files are saved
:type work_dir: `str`
:param ym_results_path: path to results intersecting
`yes_work` with `maybe_work`
:type yn_results_path: `str`
:param yn_results_path: path to results intersecting
`yes_work` with "no" works
:type yn_results_path: `str`
:param stats: data structure to hold the statistical data
:type stats: `dict`
:rtype: `dict`
"""
distinct_results_path = os.path.join(work_dir, 'distinct_{}.csv'.format(maybe_work))
results = [yn_results_path, ym_results_path]
labels = [self._no_label, self._maybe_label]
self._run_query(distinct_results_path, self._store.diff_supplied, [results, labels, self._tokenizer])
return self._update_stats('diff', work_dir, distinct_results_path, yes_work, maybe_work, stats, SHARED, COMMON)
|
def fetch_items(self, category, **kwargs):
"""Fetch the bugs
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
"""
from_date = kwargs['from_date']
logger.info("Looking for bugs: '%s' updated from '%s'",
self.url, str(from_date))
nbugs = 0
for bug in self.__fetch_and_parse_bugs(from_date):
nbugs += 1
yield bug
logger.info("Fetch process completed: %s bugs fetched", nbugs)
|
def function[fetch_items, parameter[self, category]]:
constant[Fetch the bugs
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
]
variable[from_date] assign[=] call[name[kwargs]][constant[from_date]]
call[name[logger].info, parameter[constant[Looking for bugs: '%s' updated from '%s'], name[self].url, call[name[str], parameter[name[from_date]]]]]
variable[nbugs] assign[=] constant[0]
for taget[name[bug]] in starred[call[name[self].__fetch_and_parse_bugs, parameter[name[from_date]]]] begin[:]
<ast.AugAssign object at 0x7da1b020d630>
<ast.Yield object at 0x7da1b020cd30>
call[name[logger].info, parameter[constant[Fetch process completed: %s bugs fetched], name[nbugs]]]
|
keyword[def] identifier[fetch_items] ( identifier[self] , identifier[category] ,** identifier[kwargs] ):
literal[string]
identifier[from_date] = identifier[kwargs] [ literal[string] ]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[self] . identifier[url] , identifier[str] ( identifier[from_date] ))
identifier[nbugs] = literal[int]
keyword[for] identifier[bug] keyword[in] identifier[self] . identifier[__fetch_and_parse_bugs] ( identifier[from_date] ):
identifier[nbugs] += literal[int]
keyword[yield] identifier[bug]
identifier[logger] . identifier[info] ( literal[string] , identifier[nbugs] )
|
def fetch_items(self, category, **kwargs):
"""Fetch the bugs
:param category: the category of items to fetch
:param kwargs: backend arguments
:returns: a generator of items
"""
from_date = kwargs['from_date']
logger.info("Looking for bugs: '%s' updated from '%s'", self.url, str(from_date))
nbugs = 0
for bug in self.__fetch_and_parse_bugs(from_date):
nbugs += 1
yield bug # depends on [control=['for'], data=['bug']]
logger.info('Fetch process completed: %s bugs fetched', nbugs)
|
def pl2nvp(plane):
"""
Return a unit normal vector and point that define a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2nvp_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return: A unit normal vector and point that define plane.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
normal = stypes.emptyDoubleVector(3)
point = stypes.emptyDoubleVector(3)
libspice.pl2nvp_c(ctypes.byref(plane), normal, point)
return stypes.cVectorToPython(normal), stypes.cVectorToPython(point)
|
def function[pl2nvp, parameter[plane]]:
constant[
Return a unit normal vector and point that define a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2nvp_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return: A unit normal vector and point that define plane.
:rtype: tuple
]
assert[call[name[isinstance], parameter[name[plane], name[stypes].Plane]]]
variable[normal] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
variable[point] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
call[name[libspice].pl2nvp_c, parameter[call[name[ctypes].byref, parameter[name[plane]]], name[normal], name[point]]]
return[tuple[[<ast.Call object at 0x7da18f09d7b0>, <ast.Call object at 0x7da18f09cdf0>]]]
|
keyword[def] identifier[pl2nvp] ( identifier[plane] ):
literal[string]
keyword[assert] ( identifier[isinstance] ( identifier[plane] , identifier[stypes] . identifier[Plane] ))
identifier[normal] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[point] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[pl2nvp_c] ( identifier[ctypes] . identifier[byref] ( identifier[plane] ), identifier[normal] , identifier[point] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[normal] ), identifier[stypes] . identifier[cVectorToPython] ( identifier[point] )
|
def pl2nvp(plane):
"""
Return a unit normal vector and point that define a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2nvp_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return: A unit normal vector and point that define plane.
:rtype: tuple
"""
assert isinstance(plane, stypes.Plane)
normal = stypes.emptyDoubleVector(3)
point = stypes.emptyDoubleVector(3)
libspice.pl2nvp_c(ctypes.byref(plane), normal, point)
return (stypes.cVectorToPython(normal), stypes.cVectorToPython(point))
|
def _set_preferred_lifetime(self, v, load=False):
"""
Setter method for preferred_lifetime, mapped from YANG variable /interface/fortygigabitethernet/ipv6/ipv6_nd_ra/ipv6_intf_cmds/nd/prefix/lifetime/preferred/preferred_lifetime (common-def:time-interval-sec)
If this variable is read-only (config: false) in the
source YANG file, then _set_preferred_lifetime is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_preferred_lifetime() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..4294967295']}), is_leaf=True, yang_name="preferred-lifetime", rest_name="preferred-lifetime", parent=self, choice=(u'ch-preferred-type', u'ca-preferred-lifetime'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configures preferred lifetime', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-nd-ra', defining_module='brocade-ipv6-nd-ra', yang_type='common-def:time-interval-sec', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """preferred_lifetime must be of a type compatible with common-def:time-interval-sec""",
'defined-type': "common-def:time-interval-sec",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..4294967295']}), is_leaf=True, yang_name="preferred-lifetime", rest_name="preferred-lifetime", parent=self, choice=(u'ch-preferred-type', u'ca-preferred-lifetime'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configures preferred lifetime', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-nd-ra', defining_module='brocade-ipv6-nd-ra', yang_type='common-def:time-interval-sec', is_config=True)""",
})
self.__preferred_lifetime = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_preferred_lifetime, parameter[self, v, load]]:
constant[
Setter method for preferred_lifetime, mapped from YANG variable /interface/fortygigabitethernet/ipv6/ipv6_nd_ra/ipv6_intf_cmds/nd/prefix/lifetime/preferred/preferred_lifetime (common-def:time-interval-sec)
If this variable is read-only (config: false) in the
source YANG file, then _set_preferred_lifetime is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_preferred_lifetime() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f00ffa0>
name[self].__preferred_lifetime assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_preferred_lifetime] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[choice] =( literal[string] , literal[string] ), identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__preferred_lifetime] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_preferred_lifetime(self, v, load=False):
"""
Setter method for preferred_lifetime, mapped from YANG variable /interface/fortygigabitethernet/ipv6/ipv6_nd_ra/ipv6_intf_cmds/nd/prefix/lifetime/preferred/preferred_lifetime (common-def:time-interval-sec)
If this variable is read-only (config: false) in the
source YANG file, then _set_preferred_lifetime is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_preferred_lifetime() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..4294967295']}), is_leaf=True, yang_name='preferred-lifetime', rest_name='preferred-lifetime', parent=self, choice=(u'ch-preferred-type', u'ca-preferred-lifetime'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configures preferred lifetime', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ipv6-nd-ra', defining_module='brocade-ipv6-nd-ra', yang_type='common-def:time-interval-sec', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'preferred_lifetime must be of a type compatible with common-def:time-interval-sec', 'defined-type': 'common-def:time-interval-sec', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'0..4294967295\']}, int_size=32), restriction_dict={\'range\': [u\'0..4294967295\']}), is_leaf=True, yang_name="preferred-lifetime", rest_name="preferred-lifetime", parent=self, choice=(u\'ch-preferred-type\', u\'ca-preferred-lifetime\'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configures preferred lifetime\', u\'cli-drop-node-name\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-ipv6-nd-ra\', defining_module=\'brocade-ipv6-nd-ra\', yang_type=\'common-def:time-interval-sec\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__preferred_lifetime = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def offset_overlays(self, text, offset=0, run_deps=True, **kw):
"""
The heavy lifting is done by fit_overlays. Override just that for
alternatie implementation.
"""
if run_deps and self.dependencies:
text.overlay(self.dependencies)
for ovlf in self.matchers[0].offset_overlays(text,
goffset=offset,
**kw):
for ovll in self._fit_overlay_lists(text, ovlf.end,
self.matchers[1:]):
yield self._merge_ovls([ovlf] + ovll)
|
def function[offset_overlays, parameter[self, text, offset, run_deps]]:
constant[
The heavy lifting is done by fit_overlays. Override just that for
alternatie implementation.
]
if <ast.BoolOp object at 0x7da2045673a0> begin[:]
call[name[text].overlay, parameter[name[self].dependencies]]
for taget[name[ovlf]] in starred[call[call[name[self].matchers][constant[0]].offset_overlays, parameter[name[text]]]] begin[:]
for taget[name[ovll]] in starred[call[name[self]._fit_overlay_lists, parameter[name[text], name[ovlf].end, call[name[self].matchers][<ast.Slice object at 0x7da204564910>]]]] begin[:]
<ast.Yield object at 0x7da2045659f0>
|
keyword[def] identifier[offset_overlays] ( identifier[self] , identifier[text] , identifier[offset] = literal[int] , identifier[run_deps] = keyword[True] ,** identifier[kw] ):
literal[string]
keyword[if] identifier[run_deps] keyword[and] identifier[self] . identifier[dependencies] :
identifier[text] . identifier[overlay] ( identifier[self] . identifier[dependencies] )
keyword[for] identifier[ovlf] keyword[in] identifier[self] . identifier[matchers] [ literal[int] ]. identifier[offset_overlays] ( identifier[text] ,
identifier[goffset] = identifier[offset] ,
** identifier[kw] ):
keyword[for] identifier[ovll] keyword[in] identifier[self] . identifier[_fit_overlay_lists] ( identifier[text] , identifier[ovlf] . identifier[end] ,
identifier[self] . identifier[matchers] [ literal[int] :]):
keyword[yield] identifier[self] . identifier[_merge_ovls] ([ identifier[ovlf] ]+ identifier[ovll] )
|
def offset_overlays(self, text, offset=0, run_deps=True, **kw):
"""
The heavy lifting is done by fit_overlays. Override just that for
alternatie implementation.
"""
if run_deps and self.dependencies:
text.overlay(self.dependencies) # depends on [control=['if'], data=[]]
for ovlf in self.matchers[0].offset_overlays(text, goffset=offset, **kw):
for ovll in self._fit_overlay_lists(text, ovlf.end, self.matchers[1:]):
yield self._merge_ovls([ovlf] + ovll) # depends on [control=['for'], data=['ovll']] # depends on [control=['for'], data=['ovlf']]
|
def checkout(lancet, force, issue):
"""
Checkout the branch for the given issue.
It is an error if the branch does no exist yet.
"""
issue = get_issue(lancet, issue)
# Get the working branch
branch = get_branch(lancet, issue, create=force)
with taskstatus("Checking out working branch") as ts:
if not branch:
ts.abort("Working branch not found")
lancet.repo.checkout(branch.name)
ts.ok('Checked out "{}"', branch.name)
|
def function[checkout, parameter[lancet, force, issue]]:
constant[
Checkout the branch for the given issue.
It is an error if the branch does no exist yet.
]
variable[issue] assign[=] call[name[get_issue], parameter[name[lancet], name[issue]]]
variable[branch] assign[=] call[name[get_branch], parameter[name[lancet], name[issue]]]
with call[name[taskstatus], parameter[constant[Checking out working branch]]] begin[:]
if <ast.UnaryOp object at 0x7da1b10224a0> begin[:]
call[name[ts].abort, parameter[constant[Working branch not found]]]
call[name[lancet].repo.checkout, parameter[name[branch].name]]
call[name[ts].ok, parameter[constant[Checked out "{}"], name[branch].name]]
|
keyword[def] identifier[checkout] ( identifier[lancet] , identifier[force] , identifier[issue] ):
literal[string]
identifier[issue] = identifier[get_issue] ( identifier[lancet] , identifier[issue] )
identifier[branch] = identifier[get_branch] ( identifier[lancet] , identifier[issue] , identifier[create] = identifier[force] )
keyword[with] identifier[taskstatus] ( literal[string] ) keyword[as] identifier[ts] :
keyword[if] keyword[not] identifier[branch] :
identifier[ts] . identifier[abort] ( literal[string] )
identifier[lancet] . identifier[repo] . identifier[checkout] ( identifier[branch] . identifier[name] )
identifier[ts] . identifier[ok] ( literal[string] , identifier[branch] . identifier[name] )
|
def checkout(lancet, force, issue):
"""
Checkout the branch for the given issue.
It is an error if the branch does no exist yet.
"""
issue = get_issue(lancet, issue)
# Get the working branch
branch = get_branch(lancet, issue, create=force)
with taskstatus('Checking out working branch') as ts:
if not branch:
ts.abort('Working branch not found') # depends on [control=['if'], data=[]]
lancet.repo.checkout(branch.name)
ts.ok('Checked out "{}"', branch.name) # depends on [control=['with'], data=['ts']]
|
def __get_handle_record_if_necessary(self, handle, handlerecord_json):
'''
Returns the handle record if it is None or if its handle is not the
same as the specified handle.
'''
if handlerecord_json is None:
handlerecord_json = self.retrieve_handle_record_json(handle)
else:
if handle != handlerecord_json['handle']:
handlerecord_json = self.retrieve_handle_record_json(handle)
return handlerecord_json
|
def function[__get_handle_record_if_necessary, parameter[self, handle, handlerecord_json]]:
constant[
Returns the handle record if it is None or if its handle is not the
same as the specified handle.
]
if compare[name[handlerecord_json] is constant[None]] begin[:]
variable[handlerecord_json] assign[=] call[name[self].retrieve_handle_record_json, parameter[name[handle]]]
return[name[handlerecord_json]]
|
keyword[def] identifier[__get_handle_record_if_necessary] ( identifier[self] , identifier[handle] , identifier[handlerecord_json] ):
literal[string]
keyword[if] identifier[handlerecord_json] keyword[is] keyword[None] :
identifier[handlerecord_json] = identifier[self] . identifier[retrieve_handle_record_json] ( identifier[handle] )
keyword[else] :
keyword[if] identifier[handle] != identifier[handlerecord_json] [ literal[string] ]:
identifier[handlerecord_json] = identifier[self] . identifier[retrieve_handle_record_json] ( identifier[handle] )
keyword[return] identifier[handlerecord_json]
|
def __get_handle_record_if_necessary(self, handle, handlerecord_json):
"""
Returns the handle record if it is None or if its handle is not the
same as the specified handle.
"""
if handlerecord_json is None:
handlerecord_json = self.retrieve_handle_record_json(handle) # depends on [control=['if'], data=['handlerecord_json']]
elif handle != handlerecord_json['handle']:
handlerecord_json = self.retrieve_handle_record_json(handle) # depends on [control=['if'], data=['handle']]
return handlerecord_json
|
def get_attachment(self, scope_identifier, hub_name, plan_id, timeline_id, record_id, type, name):
"""GetAttachment.
[Preview API]
:param str scope_identifier: The project GUID to scope the request
:param str hub_name: The name of the server hub: "build" for the Build server or "rm" for the Release Management server
:param str plan_id:
:param str timeline_id:
:param str record_id:
:param str type:
:param str name:
:rtype: :class:`<TaskAttachment> <azure.devops.v5_0.task.models.TaskAttachment>`
"""
route_values = {}
if scope_identifier is not None:
route_values['scopeIdentifier'] = self._serialize.url('scope_identifier', scope_identifier, 'str')
if hub_name is not None:
route_values['hubName'] = self._serialize.url('hub_name', hub_name, 'str')
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'str')
if timeline_id is not None:
route_values['timelineId'] = self._serialize.url('timeline_id', timeline_id, 'str')
if record_id is not None:
route_values['recordId'] = self._serialize.url('record_id', record_id, 'str')
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str')
if name is not None:
route_values['name'] = self._serialize.url('name', name, 'str')
response = self._send(http_method='GET',
location_id='7898f959-9cdf-4096-b29e-7f293031629e',
version='5.0-preview.1',
route_values=route_values)
return self._deserialize('TaskAttachment', response)
|
def function[get_attachment, parameter[self, scope_identifier, hub_name, plan_id, timeline_id, record_id, type, name]]:
constant[GetAttachment.
[Preview API]
:param str scope_identifier: The project GUID to scope the request
:param str hub_name: The name of the server hub: "build" for the Build server or "rm" for the Release Management server
:param str plan_id:
:param str timeline_id:
:param str record_id:
:param str type:
:param str name:
:rtype: :class:`<TaskAttachment> <azure.devops.v5_0.task.models.TaskAttachment>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[scope_identifier] is_not constant[None]] begin[:]
call[name[route_values]][constant[scopeIdentifier]] assign[=] call[name[self]._serialize.url, parameter[constant[scope_identifier], name[scope_identifier], constant[str]]]
if compare[name[hub_name] is_not constant[None]] begin[:]
call[name[route_values]][constant[hubName]] assign[=] call[name[self]._serialize.url, parameter[constant[hub_name], name[hub_name], constant[str]]]
if compare[name[plan_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[planId]] assign[=] call[name[self]._serialize.url, parameter[constant[plan_id], name[plan_id], constant[str]]]
if compare[name[timeline_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[timelineId]] assign[=] call[name[self]._serialize.url, parameter[constant[timeline_id], name[timeline_id], constant[str]]]
if compare[name[record_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[recordId]] assign[=] call[name[self]._serialize.url, parameter[constant[record_id], name[record_id], constant[str]]]
if compare[name[type] is_not constant[None]] begin[:]
call[name[route_values]][constant[type]] assign[=] call[name[self]._serialize.url, parameter[constant[type], name[type], constant[str]]]
if compare[name[name] is_not constant[None]] begin[:]
call[name[route_values]][constant[name]] assign[=] call[name[self]._serialize.url, parameter[constant[name], name[name], constant[str]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[TaskAttachment], name[response]]]]
|
keyword[def] identifier[get_attachment] ( identifier[self] , identifier[scope_identifier] , identifier[hub_name] , identifier[plan_id] , identifier[timeline_id] , identifier[record_id] , identifier[type] , identifier[name] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[scope_identifier] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[scope_identifier] , literal[string] )
keyword[if] identifier[hub_name] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[hub_name] , literal[string] )
keyword[if] identifier[plan_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[plan_id] , literal[string] )
keyword[if] identifier[timeline_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[timeline_id] , literal[string] )
keyword[if] identifier[record_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[record_id] , literal[string] )
keyword[if] identifier[type] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[type] , literal[string] )
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[name] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
|
def get_attachment(self, scope_identifier, hub_name, plan_id, timeline_id, record_id, type, name):
"""GetAttachment.
[Preview API]
:param str scope_identifier: The project GUID to scope the request
:param str hub_name: The name of the server hub: "build" for the Build server or "rm" for the Release Management server
:param str plan_id:
:param str timeline_id:
:param str record_id:
:param str type:
:param str name:
:rtype: :class:`<TaskAttachment> <azure.devops.v5_0.task.models.TaskAttachment>`
"""
route_values = {}
if scope_identifier is not None:
route_values['scopeIdentifier'] = self._serialize.url('scope_identifier', scope_identifier, 'str') # depends on [control=['if'], data=['scope_identifier']]
if hub_name is not None:
route_values['hubName'] = self._serialize.url('hub_name', hub_name, 'str') # depends on [control=['if'], data=['hub_name']]
if plan_id is not None:
route_values['planId'] = self._serialize.url('plan_id', plan_id, 'str') # depends on [control=['if'], data=['plan_id']]
if timeline_id is not None:
route_values['timelineId'] = self._serialize.url('timeline_id', timeline_id, 'str') # depends on [control=['if'], data=['timeline_id']]
if record_id is not None:
route_values['recordId'] = self._serialize.url('record_id', record_id, 'str') # depends on [control=['if'], data=['record_id']]
if type is not None:
route_values['type'] = self._serialize.url('type', type, 'str') # depends on [control=['if'], data=['type']]
if name is not None:
route_values['name'] = self._serialize.url('name', name, 'str') # depends on [control=['if'], data=['name']]
response = self._send(http_method='GET', location_id='7898f959-9cdf-4096-b29e-7f293031629e', version='5.0-preview.1', route_values=route_values)
return self._deserialize('TaskAttachment', response)
|
def views_show_many(self, ids=None, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/views#list-views-by-id"
api_path = "/api/v2/views/show_many.json"
api_query = {}
if "query" in kwargs.keys():
api_query.update(kwargs["query"])
del kwargs["query"]
if ids:
api_query.update({
"ids": ids,
})
return self.call(api_path, query=api_query, **kwargs)
|
def function[views_show_many, parameter[self, ids]]:
constant[https://developer.zendesk.com/rest_api/docs/core/views#list-views-by-id]
variable[api_path] assign[=] constant[/api/v2/views/show_many.json]
variable[api_query] assign[=] dictionary[[], []]
if compare[constant[query] in call[name[kwargs].keys, parameter[]]] begin[:]
call[name[api_query].update, parameter[call[name[kwargs]][constant[query]]]]
<ast.Delete object at 0x7da1b0ebd690>
if name[ids] begin[:]
call[name[api_query].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0ebc100>], [<ast.Name object at 0x7da1b0ebea70>]]]]
return[call[name[self].call, parameter[name[api_path]]]]
|
keyword[def] identifier[views_show_many] ( identifier[self] , identifier[ids] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_query] ={}
keyword[if] literal[string] keyword[in] identifier[kwargs] . identifier[keys] ():
identifier[api_query] . identifier[update] ( identifier[kwargs] [ literal[string] ])
keyword[del] identifier[kwargs] [ literal[string] ]
keyword[if] identifier[ids] :
identifier[api_query] . identifier[update] ({
literal[string] : identifier[ids] ,
})
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[query] = identifier[api_query] ,** identifier[kwargs] )
|
def views_show_many(self, ids=None, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/views#list-views-by-id"""
api_path = '/api/v2/views/show_many.json'
api_query = {}
if 'query' in kwargs.keys():
api_query.update(kwargs['query'])
del kwargs['query'] # depends on [control=['if'], data=[]]
if ids:
api_query.update({'ids': ids}) # depends on [control=['if'], data=[]]
return self.call(api_path, query=api_query, **kwargs)
|
def update(self, callback=None, errback=None, **kwargs):
"""
Update record configuration. Pass list of keywords and their values to
update. For the list of keywords available for zone configuration, see
:attr:`ns1.rest.records.Records.INT_FIELDS`,
:attr:`ns1.rest.records.Records.PASSTHRU_FIELDS`,
:attr:`ns1.rest.records.Records.BOOL_FIELDS`
"""
if not self.data:
raise RecordException('record not loaded')
def success(result, *args):
self._parseModel(result)
if callback:
return callback(self)
else:
return self
return self._rest.update(self.parentZone.zone, self.domain, self.type,
callback=success, errback=errback, **kwargs)
|
def function[update, parameter[self, callback, errback]]:
constant[
Update record configuration. Pass list of keywords and their values to
update. For the list of keywords available for zone configuration, see
:attr:`ns1.rest.records.Records.INT_FIELDS`,
:attr:`ns1.rest.records.Records.PASSTHRU_FIELDS`,
:attr:`ns1.rest.records.Records.BOOL_FIELDS`
]
if <ast.UnaryOp object at 0x7da1b068a2c0> begin[:]
<ast.Raise object at 0x7da1b0689690>
def function[success, parameter[result]]:
call[name[self]._parseModel, parameter[name[result]]]
if name[callback] begin[:]
return[call[name[callback], parameter[name[self]]]]
return[call[name[self]._rest.update, parameter[name[self].parentZone.zone, name[self].domain, name[self].type]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[callback] = keyword[None] , identifier[errback] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[data] :
keyword[raise] identifier[RecordException] ( literal[string] )
keyword[def] identifier[success] ( identifier[result] ,* identifier[args] ):
identifier[self] . identifier[_parseModel] ( identifier[result] )
keyword[if] identifier[callback] :
keyword[return] identifier[callback] ( identifier[self] )
keyword[else] :
keyword[return] identifier[self]
keyword[return] identifier[self] . identifier[_rest] . identifier[update] ( identifier[self] . identifier[parentZone] . identifier[zone] , identifier[self] . identifier[domain] , identifier[self] . identifier[type] ,
identifier[callback] = identifier[success] , identifier[errback] = identifier[errback] ,** identifier[kwargs] )
|
def update(self, callback=None, errback=None, **kwargs):
"""
Update record configuration. Pass list of keywords and their values to
update. For the list of keywords available for zone configuration, see
:attr:`ns1.rest.records.Records.INT_FIELDS`,
:attr:`ns1.rest.records.Records.PASSTHRU_FIELDS`,
:attr:`ns1.rest.records.Records.BOOL_FIELDS`
"""
if not self.data:
raise RecordException('record not loaded') # depends on [control=['if'], data=[]]
def success(result, *args):
self._parseModel(result)
if callback:
return callback(self) # depends on [control=['if'], data=[]]
else:
return self
return self._rest.update(self.parentZone.zone, self.domain, self.type, callback=success, errback=errback, **kwargs)
|
def euc_to_unicode(hexstr):
"""
Return EUC-CN (GB2312) hex to a Python unicode.
Parameters
----------
hexstr : bytes
Returns
-------
unicode :
Python unicode e.g. ``u'\\u4e00'`` / '一'.
Examples
--------
>>> u'\u4e00'.encode('gb2312').decode('utf-8')
u'\u04bb'
>>> (b'\\x' + b'd2' + b'\\x' + b'bb').replace('\\x', '') \\
... .decode('hex').decode('utf-8')
u'\u04bb'
Note: bytes don't have a ``.replace``:
>>> gb_enc = gb_enc.replace('\\x', '').decode('hex')
>>> gb_enc.decode('string_escape') # Won't work with Python 3.x.
"""
hi = hexstr[0:2]
lo = hexstr[2:4]
# hi and lo are only 2 characters long, no risk with eval-ing them
gb_enc = b'\\x' + hi + b'\\x' + lo
assert isinstance(gb_enc, bytes)
# Requires coercing back to text_type in 2.7
gb_enc = gb_enc.decode('unicode_escape')
gb_enc = gb_enc.encode('latin1')
gb_enc = gb_enc.decode('gb2312')
assert isinstance(gb_enc, text_type)
return gb_enc
|
def function[euc_to_unicode, parameter[hexstr]]:
constant[
Return EUC-CN (GB2312) hex to a Python unicode.
Parameters
----------
hexstr : bytes
Returns
-------
unicode :
Python unicode e.g. ``u'\u4e00'`` / '一'.
Examples
--------
>>> u'一'.encode('gb2312').decode('utf-8')
u'һ'
>>> (b'\x' + b'd2' + b'\x' + b'bb').replace('\x', '') \
... .decode('hex').decode('utf-8')
u'һ'
Note: bytes don't have a ``.replace``:
>>> gb_enc = gb_enc.replace('\x', '').decode('hex')
>>> gb_enc.decode('string_escape') # Won't work with Python 3.x.
]
variable[hi] assign[=] call[name[hexstr]][<ast.Slice object at 0x7da1b1971b70>]
variable[lo] assign[=] call[name[hexstr]][<ast.Slice object at 0x7da1b1972230>]
variable[gb_enc] assign[=] binary_operation[binary_operation[binary_operation[constant[b'\\x'] + name[hi]] + constant[b'\\x']] + name[lo]]
assert[call[name[isinstance], parameter[name[gb_enc], name[bytes]]]]
variable[gb_enc] assign[=] call[name[gb_enc].decode, parameter[constant[unicode_escape]]]
variable[gb_enc] assign[=] call[name[gb_enc].encode, parameter[constant[latin1]]]
variable[gb_enc] assign[=] call[name[gb_enc].decode, parameter[constant[gb2312]]]
assert[call[name[isinstance], parameter[name[gb_enc], name[text_type]]]]
return[name[gb_enc]]
|
keyword[def] identifier[euc_to_unicode] ( identifier[hexstr] ):
literal[string]
identifier[hi] = identifier[hexstr] [ literal[int] : literal[int] ]
identifier[lo] = identifier[hexstr] [ literal[int] : literal[int] ]
identifier[gb_enc] = literal[string] + identifier[hi] + literal[string] + identifier[lo]
keyword[assert] identifier[isinstance] ( identifier[gb_enc] , identifier[bytes] )
identifier[gb_enc] = identifier[gb_enc] . identifier[decode] ( literal[string] )
identifier[gb_enc] = identifier[gb_enc] . identifier[encode] ( literal[string] )
identifier[gb_enc] = identifier[gb_enc] . identifier[decode] ( literal[string] )
keyword[assert] identifier[isinstance] ( identifier[gb_enc] , identifier[text_type] )
keyword[return] identifier[gb_enc]
|
def euc_to_unicode(hexstr):
"""
Return EUC-CN (GB2312) hex to a Python unicode.
Parameters
----------
hexstr : bytes
Returns
-------
unicode :
Python unicode e.g. ``u'\\u4e00'`` / '一'.
Examples
--------
>>> u'一'.encode('gb2312').decode('utf-8')
u'һ'
>>> (b'\\x' + b'd2' + b'\\x' + b'bb').replace('\\x', '') \\
... .decode('hex').decode('utf-8')
u'һ'
Note: bytes don't have a ``.replace``:
>>> gb_enc = gb_enc.replace('\\x', '').decode('hex')
>>> gb_enc.decode('string_escape') # Won't work with Python 3.x.
"""
hi = hexstr[0:2]
lo = hexstr[2:4]
# hi and lo are only 2 characters long, no risk with eval-ing them
gb_enc = b'\\x' + hi + b'\\x' + lo
assert isinstance(gb_enc, bytes)
# Requires coercing back to text_type in 2.7
gb_enc = gb_enc.decode('unicode_escape')
gb_enc = gb_enc.encode('latin1')
gb_enc = gb_enc.decode('gb2312')
assert isinstance(gb_enc, text_type)
return gb_enc
|
def _t_update_b(self):
r"""
A method to update 'b' array at each time step according to
't_scheme' and the source term value
"""
network = self.project.network
phase = self.project.phases()[self.settings['phase']]
Vi = network['pore.volume']
dt = self.settings['t_step']
s = self.settings['t_scheme']
if (s == 'implicit'):
f1, f2, f3 = 1, 1, 0
elif (s == 'cranknicolson'):
f1, f2, f3 = 0.5, 1, 0
elif (s == 'steady'):
f1, f2, f3 = 1, 0, 1
x_old = self[self.settings['quantity']]
b = (f2*(1-f1)*(-self._A_steady)*x_old +
f2*(Vi/dt)*x_old +
f3*np.zeros(shape=(self.Np, ), dtype=float))
self._update_physics()
for item in self.settings['sources']:
Ps = self.pores(item)
# Update b
b[Ps] = b[Ps] - f2*(1-f1)*(phase[item+'.'+'rate'][Ps])
self._b = b
return b
|
def function[_t_update_b, parameter[self]]:
constant[
A method to update 'b' array at each time step according to
't_scheme' and the source term value
]
variable[network] assign[=] name[self].project.network
variable[phase] assign[=] call[call[name[self].project.phases, parameter[]]][call[name[self].settings][constant[phase]]]
variable[Vi] assign[=] call[name[network]][constant[pore.volume]]
variable[dt] assign[=] call[name[self].settings][constant[t_step]]
variable[s] assign[=] call[name[self].settings][constant[t_scheme]]
if compare[name[s] equal[==] constant[implicit]] begin[:]
<ast.Tuple object at 0x7da18dc06530> assign[=] tuple[[<ast.Constant object at 0x7da18dc06c80>, <ast.Constant object at 0x7da18dc05ba0>, <ast.Constant object at 0x7da18dc04700>]]
variable[x_old] assign[=] call[name[self]][call[name[self].settings][constant[quantity]]]
variable[b] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[f2] * binary_operation[constant[1] - name[f1]]] * <ast.UnaryOp object at 0x7da18dc07af0>] * name[x_old]] + binary_operation[binary_operation[name[f2] * binary_operation[name[Vi] / name[dt]]] * name[x_old]]] + binary_operation[name[f3] * call[name[np].zeros, parameter[]]]]
call[name[self]._update_physics, parameter[]]
for taget[name[item]] in starred[call[name[self].settings][constant[sources]]] begin[:]
variable[Ps] assign[=] call[name[self].pores, parameter[name[item]]]
call[name[b]][name[Ps]] assign[=] binary_operation[call[name[b]][name[Ps]] - binary_operation[binary_operation[name[f2] * binary_operation[constant[1] - name[f1]]] * call[call[name[phase]][binary_operation[binary_operation[name[item] + constant[.]] + constant[rate]]]][name[Ps]]]]
name[self]._b assign[=] name[b]
return[name[b]]
|
keyword[def] identifier[_t_update_b] ( identifier[self] ):
literal[string]
identifier[network] = identifier[self] . identifier[project] . identifier[network]
identifier[phase] = identifier[self] . identifier[project] . identifier[phases] ()[ identifier[self] . identifier[settings] [ literal[string] ]]
identifier[Vi] = identifier[network] [ literal[string] ]
identifier[dt] = identifier[self] . identifier[settings] [ literal[string] ]
identifier[s] = identifier[self] . identifier[settings] [ literal[string] ]
keyword[if] ( identifier[s] == literal[string] ):
identifier[f1] , identifier[f2] , identifier[f3] = literal[int] , literal[int] , literal[int]
keyword[elif] ( identifier[s] == literal[string] ):
identifier[f1] , identifier[f2] , identifier[f3] = literal[int] , literal[int] , literal[int]
keyword[elif] ( identifier[s] == literal[string] ):
identifier[f1] , identifier[f2] , identifier[f3] = literal[int] , literal[int] , literal[int]
identifier[x_old] = identifier[self] [ identifier[self] . identifier[settings] [ literal[string] ]]
identifier[b] =( identifier[f2] *( literal[int] - identifier[f1] )*(- identifier[self] . identifier[_A_steady] )* identifier[x_old] +
identifier[f2] *( identifier[Vi] / identifier[dt] )* identifier[x_old] +
identifier[f3] * identifier[np] . identifier[zeros] ( identifier[shape] =( identifier[self] . identifier[Np] ,), identifier[dtype] = identifier[float] ))
identifier[self] . identifier[_update_physics] ()
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[settings] [ literal[string] ]:
identifier[Ps] = identifier[self] . identifier[pores] ( identifier[item] )
identifier[b] [ identifier[Ps] ]= identifier[b] [ identifier[Ps] ]- identifier[f2] *( literal[int] - identifier[f1] )*( identifier[phase] [ identifier[item] + literal[string] + literal[string] ][ identifier[Ps] ])
identifier[self] . identifier[_b] = identifier[b]
keyword[return] identifier[b]
|
def _t_update_b(self):
"""
A method to update 'b' array at each time step according to
't_scheme' and the source term value
"""
network = self.project.network
phase = self.project.phases()[self.settings['phase']]
Vi = network['pore.volume']
dt = self.settings['t_step']
s = self.settings['t_scheme']
if s == 'implicit':
(f1, f2, f3) = (1, 1, 0) # depends on [control=['if'], data=[]]
elif s == 'cranknicolson':
(f1, f2, f3) = (0.5, 1, 0) # depends on [control=['if'], data=[]]
elif s == 'steady':
(f1, f2, f3) = (1, 0, 1) # depends on [control=['if'], data=[]]
x_old = self[self.settings['quantity']]
b = f2 * (1 - f1) * -self._A_steady * x_old + f2 * (Vi / dt) * x_old + f3 * np.zeros(shape=(self.Np,), dtype=float)
self._update_physics()
for item in self.settings['sources']:
Ps = self.pores(item)
# Update b
b[Ps] = b[Ps] - f2 * (1 - f1) * phase[item + '.' + 'rate'][Ps] # depends on [control=['for'], data=['item']]
self._b = b
return b
|
def list_users(self, limit=20):
"""
List the public users in the system.
:param limit: (optional) The number of users to fetch.
:type limit: int | long
:returns: The list of users.
:rtype: list[dict]
"""
parameters = dict()
parameters['limit'] = limit
response = self.request('midas.user.list', parameters)
return response
|
def function[list_users, parameter[self, limit]]:
constant[
List the public users in the system.
:param limit: (optional) The number of users to fetch.
:type limit: int | long
:returns: The list of users.
:rtype: list[dict]
]
variable[parameters] assign[=] call[name[dict], parameter[]]
call[name[parameters]][constant[limit]] assign[=] name[limit]
variable[response] assign[=] call[name[self].request, parameter[constant[midas.user.list], name[parameters]]]
return[name[response]]
|
keyword[def] identifier[list_users] ( identifier[self] , identifier[limit] = literal[int] ):
literal[string]
identifier[parameters] = identifier[dict] ()
identifier[parameters] [ literal[string] ]= identifier[limit]
identifier[response] = identifier[self] . identifier[request] ( literal[string] , identifier[parameters] )
keyword[return] identifier[response]
|
def list_users(self, limit=20):
"""
List the public users in the system.
:param limit: (optional) The number of users to fetch.
:type limit: int | long
:returns: The list of users.
:rtype: list[dict]
"""
parameters = dict()
parameters['limit'] = limit
response = self.request('midas.user.list', parameters)
return response
|
def ParseRow(self, parser_mediator, query, row, **unused_kwargs):
"""Parses a row from the database.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
"""
query_hash = hash(query)
event_data = AndroidWebViewCacheEventData()
event_data.content_length = self._GetRowValue(
query_hash, row, 'contentlength')
event_data.query = query
event_data.url = self._GetRowValue(query_hash, row, 'url')
timestamp = self._GetRowValue(query_hash, row, 'expires')
if timestamp is not None:
date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
timestamp = self._GetRowValue(query_hash, row, 'lastmodify')
if timestamp is not None:
date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
|
def function[ParseRow, parameter[self, parser_mediator, query, row]]:
constant[Parses a row from the database.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
]
variable[query_hash] assign[=] call[name[hash], parameter[name[query]]]
variable[event_data] assign[=] call[name[AndroidWebViewCacheEventData], parameter[]]
name[event_data].content_length assign[=] call[name[self]._GetRowValue, parameter[name[query_hash], name[row], constant[contentlength]]]
name[event_data].query assign[=] name[query]
name[event_data].url assign[=] call[name[self]._GetRowValue, parameter[name[query_hash], name[row], constant[url]]]
variable[timestamp] assign[=] call[name[self]._GetRowValue, parameter[name[query_hash], name[row], constant[expires]]]
if compare[name[timestamp] is_not constant[None]] begin[:]
variable[date_time] assign[=] call[name[dfdatetime_java_time].JavaTime, parameter[]]
variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[date_time], name[definitions].TIME_DESCRIPTION_EXPIRATION]]
call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]]
variable[timestamp] assign[=] call[name[self]._GetRowValue, parameter[name[query_hash], name[row], constant[lastmodify]]]
if compare[name[timestamp] is_not constant[None]] begin[:]
variable[date_time] assign[=] call[name[dfdatetime_java_time].JavaTime, parameter[]]
variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[date_time], name[definitions].TIME_DESCRIPTION_MODIFICATION]]
call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]]
|
keyword[def] identifier[ParseRow] ( identifier[self] , identifier[parser_mediator] , identifier[query] , identifier[row] ,** identifier[unused_kwargs] ):
literal[string]
identifier[query_hash] = identifier[hash] ( identifier[query] )
identifier[event_data] = identifier[AndroidWebViewCacheEventData] ()
identifier[event_data] . identifier[content_length] = identifier[self] . identifier[_GetRowValue] (
identifier[query_hash] , identifier[row] , literal[string] )
identifier[event_data] . identifier[query] = identifier[query]
identifier[event_data] . identifier[url] = identifier[self] . identifier[_GetRowValue] ( identifier[query_hash] , identifier[row] , literal[string] )
identifier[timestamp] = identifier[self] . identifier[_GetRowValue] ( identifier[query_hash] , identifier[row] , literal[string] )
keyword[if] identifier[timestamp] keyword[is] keyword[not] keyword[None] :
identifier[date_time] = identifier[dfdatetime_java_time] . identifier[JavaTime] ( identifier[timestamp] = identifier[timestamp] )
identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] (
identifier[date_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_EXPIRATION] )
identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] )
identifier[timestamp] = identifier[self] . identifier[_GetRowValue] ( identifier[query_hash] , identifier[row] , literal[string] )
keyword[if] identifier[timestamp] keyword[is] keyword[not] keyword[None] :
identifier[date_time] = identifier[dfdatetime_java_time] . identifier[JavaTime] ( identifier[timestamp] = identifier[timestamp] )
identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] (
identifier[date_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_MODIFICATION] )
identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] )
|
def ParseRow(self, parser_mediator, query, row, **unused_kwargs):
"""Parses a row from the database.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
"""
query_hash = hash(query)
event_data = AndroidWebViewCacheEventData()
event_data.content_length = self._GetRowValue(query_hash, row, 'contentlength')
event_data.query = query
event_data.url = self._GetRowValue(query_hash, row, 'url')
timestamp = self._GetRowValue(query_hash, row, 'expires')
if timestamp is not None:
date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=['timestamp']]
timestamp = self._GetRowValue(query_hash, row, 'lastmodify')
if timestamp is not None:
date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=['timestamp']]
|
def mBank_set_iph_id(transactions, tag, tag_dict, *args):
"""
mBank Collect uses ID IPH to distinguish between virtual accounts,
adding iph_id may be helpful in further processing
"""
matches = iph_id_re.search(tag_dict[tag.slug])
if matches: # pragma no branch
tag_dict['iph_id'] = matches.groupdict()['iph_id']
return tag_dict
|
def function[mBank_set_iph_id, parameter[transactions, tag, tag_dict]]:
constant[
mBank Collect uses ID IPH to distinguish between virtual accounts,
adding iph_id may be helpful in further processing
]
variable[matches] assign[=] call[name[iph_id_re].search, parameter[call[name[tag_dict]][name[tag].slug]]]
if name[matches] begin[:]
call[name[tag_dict]][constant[iph_id]] assign[=] call[call[name[matches].groupdict, parameter[]]][constant[iph_id]]
return[name[tag_dict]]
|
keyword[def] identifier[mBank_set_iph_id] ( identifier[transactions] , identifier[tag] , identifier[tag_dict] ,* identifier[args] ):
literal[string]
identifier[matches] = identifier[iph_id_re] . identifier[search] ( identifier[tag_dict] [ identifier[tag] . identifier[slug] ])
keyword[if] identifier[matches] :
identifier[tag_dict] [ literal[string] ]= identifier[matches] . identifier[groupdict] ()[ literal[string] ]
keyword[return] identifier[tag_dict]
|
def mBank_set_iph_id(transactions, tag, tag_dict, *args):
"""
mBank Collect uses ID IPH to distinguish between virtual accounts,
adding iph_id may be helpful in further processing
"""
matches = iph_id_re.search(tag_dict[tag.slug])
if matches: # pragma no branch
tag_dict['iph_id'] = matches.groupdict()['iph_id'] # depends on [control=['if'], data=[]]
return tag_dict
|
def parse_cmd_arguments():
"""Parse command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str,
help='Filename to be staticfied')
parser.add_argument('--static-endpoint',
help='Static endpoint which is "static" by default')
parser.add_argument('--add-tags', type=str,
help='Additional tags to staticfy')
parser.add_argument('--exc-tags', type=str, help='tags to exclude')
parser.add_argument('--framework', type=str,
help='Web Framework: Defaults to Flask')
parser.add_argument('--namespace', type=str,
help='String to prefix url with')
parser.add_argument('-o', type=str, help='Specify output file')
parser.add_argument('--output', type=str, help='Specify output file')
args = parser.parse_args()
return args
|
def function[parse_cmd_arguments, parameter[]]:
constant[Parse command line arguments.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[file]]]
call[name[parser].add_argument, parameter[constant[--static-endpoint]]]
call[name[parser].add_argument, parameter[constant[--add-tags]]]
call[name[parser].add_argument, parameter[constant[--exc-tags]]]
call[name[parser].add_argument, parameter[constant[--framework]]]
call[name[parser].add_argument, parameter[constant[--namespace]]]
call[name[parser].add_argument, parameter[constant[-o]]]
call[name[parser].add_argument, parameter[constant[--output]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
return[name[args]]
|
keyword[def] identifier[parse_cmd_arguments] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
keyword[return] identifier[args]
|
def parse_cmd_arguments():
"""Parse command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str, help='Filename to be staticfied')
parser.add_argument('--static-endpoint', help='Static endpoint which is "static" by default')
parser.add_argument('--add-tags', type=str, help='Additional tags to staticfy')
parser.add_argument('--exc-tags', type=str, help='tags to exclude')
parser.add_argument('--framework', type=str, help='Web Framework: Defaults to Flask')
parser.add_argument('--namespace', type=str, help='String to prefix url with')
parser.add_argument('-o', type=str, help='Specify output file')
parser.add_argument('--output', type=str, help='Specify output file')
args = parser.parse_args()
return args
|
def role_settings(self):
""" Filter out unwanted to show groups """
result = super(SharingView, self).role_settings()
uid = self.context.UID()
filter_func = lambda x: not any((
x["id"].endswith(uid),
x["id"] == "AuthenticatedUsers",
x["id"] == INTRANET_USERS_GROUP_ID,
))
return filter(filter_func, result)
|
def function[role_settings, parameter[self]]:
constant[ Filter out unwanted to show groups ]
variable[result] assign[=] call[call[name[super], parameter[name[SharingView], name[self]]].role_settings, parameter[]]
variable[uid] assign[=] call[name[self].context.UID, parameter[]]
variable[filter_func] assign[=] <ast.Lambda object at 0x7da1b14c4370>
return[call[name[filter], parameter[name[filter_func], name[result]]]]
|
keyword[def] identifier[role_settings] ( identifier[self] ):
literal[string]
identifier[result] = identifier[super] ( identifier[SharingView] , identifier[self] ). identifier[role_settings] ()
identifier[uid] = identifier[self] . identifier[context] . identifier[UID] ()
identifier[filter_func] = keyword[lambda] identifier[x] : keyword[not] identifier[any] ((
identifier[x] [ literal[string] ]. identifier[endswith] ( identifier[uid] ),
identifier[x] [ literal[string] ]== literal[string] ,
identifier[x] [ literal[string] ]== identifier[INTRANET_USERS_GROUP_ID] ,
))
keyword[return] identifier[filter] ( identifier[filter_func] , identifier[result] )
|
def role_settings(self):
""" Filter out unwanted to show groups """
result = super(SharingView, self).role_settings()
uid = self.context.UID()
filter_func = lambda x: not any((x['id'].endswith(uid), x['id'] == 'AuthenticatedUsers', x['id'] == INTRANET_USERS_GROUP_ID))
return filter(filter_func, result)
|
def _addsub_offset_array(self, other, op):
"""
Add or subtract array-like of DateOffset objects
Parameters
----------
other : Index, np.ndarray
object-dtype containing pd.DateOffset objects
op : {operator.add, operator.sub}
Returns
-------
result : same class as self
"""
assert op in [operator.add, operator.sub]
if len(other) == 1:
return op(self, other[0])
warnings.warn("Adding/subtracting array of DateOffsets to "
"{cls} not vectorized"
.format(cls=type(self).__name__), PerformanceWarning)
# For EA self.astype('O') returns a numpy array, not an Index
left = lib.values_from_object(self.astype('O'))
res_values = op(left, np.array(other))
kwargs = {}
if not is_period_dtype(self):
kwargs['freq'] = 'infer'
return self._from_sequence(res_values, **kwargs)
|
def function[_addsub_offset_array, parameter[self, other, op]]:
constant[
Add or subtract array-like of DateOffset objects
Parameters
----------
other : Index, np.ndarray
object-dtype containing pd.DateOffset objects
op : {operator.add, operator.sub}
Returns
-------
result : same class as self
]
assert[compare[name[op] in list[[<ast.Attribute object at 0x7da18ede7f10>, <ast.Attribute object at 0x7da18ede51e0>]]]]
if compare[call[name[len], parameter[name[other]]] equal[==] constant[1]] begin[:]
return[call[name[op], parameter[name[self], call[name[other]][constant[0]]]]]
call[name[warnings].warn, parameter[call[constant[Adding/subtracting array of DateOffsets to {cls} not vectorized].format, parameter[]], name[PerformanceWarning]]]
variable[left] assign[=] call[name[lib].values_from_object, parameter[call[name[self].astype, parameter[constant[O]]]]]
variable[res_values] assign[=] call[name[op], parameter[name[left], call[name[np].array, parameter[name[other]]]]]
variable[kwargs] assign[=] dictionary[[], []]
if <ast.UnaryOp object at 0x7da18ede46d0> begin[:]
call[name[kwargs]][constant[freq]] assign[=] constant[infer]
return[call[name[self]._from_sequence, parameter[name[res_values]]]]
|
keyword[def] identifier[_addsub_offset_array] ( identifier[self] , identifier[other] , identifier[op] ):
literal[string]
keyword[assert] identifier[op] keyword[in] [ identifier[operator] . identifier[add] , identifier[operator] . identifier[sub] ]
keyword[if] identifier[len] ( identifier[other] )== literal[int] :
keyword[return] identifier[op] ( identifier[self] , identifier[other] [ literal[int] ])
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
. identifier[format] ( identifier[cls] = identifier[type] ( identifier[self] ). identifier[__name__] ), identifier[PerformanceWarning] )
identifier[left] = identifier[lib] . identifier[values_from_object] ( identifier[self] . identifier[astype] ( literal[string] ))
identifier[res_values] = identifier[op] ( identifier[left] , identifier[np] . identifier[array] ( identifier[other] ))
identifier[kwargs] ={}
keyword[if] keyword[not] identifier[is_period_dtype] ( identifier[self] ):
identifier[kwargs] [ literal[string] ]= literal[string]
keyword[return] identifier[self] . identifier[_from_sequence] ( identifier[res_values] ,** identifier[kwargs] )
|
def _addsub_offset_array(self, other, op):
"""
Add or subtract array-like of DateOffset objects
Parameters
----------
other : Index, np.ndarray
object-dtype containing pd.DateOffset objects
op : {operator.add, operator.sub}
Returns
-------
result : same class as self
"""
assert op in [operator.add, operator.sub]
if len(other) == 1:
return op(self, other[0]) # depends on [control=['if'], data=[]]
warnings.warn('Adding/subtracting array of DateOffsets to {cls} not vectorized'.format(cls=type(self).__name__), PerformanceWarning)
# For EA self.astype('O') returns a numpy array, not an Index
left = lib.values_from_object(self.astype('O'))
res_values = op(left, np.array(other))
kwargs = {}
if not is_period_dtype(self):
kwargs['freq'] = 'infer' # depends on [control=['if'], data=[]]
return self._from_sequence(res_values, **kwargs)
|
def apply(self, manifest, db, force=False):
"""
Run the given patch to update the datamodel
:return: the list of post-scriptum returned by the fixes
"""
fixes_pss = []
if not force:
self.can_be_applied(manifest, db)
for fix in self.fixes:
print('\t%s...' % fix.__name__, flush=True, end='')
ps = fix(db)
if ps:
fixes_pss.append("%s: %s" % (fix.__name__, ps))
print(' Done !')
manifest.update(self.target_version,
reason='Upgrade from %s' % self.base_version)
return fixes_pss
|
def function[apply, parameter[self, manifest, db, force]]:
constant[
Run the given patch to update the datamodel
:return: the list of post-scriptum returned by the fixes
]
variable[fixes_pss] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b095f9a0> begin[:]
call[name[self].can_be_applied, parameter[name[manifest], name[db]]]
for taget[name[fix]] in starred[name[self].fixes] begin[:]
call[name[print], parameter[binary_operation[constant[ %s...] <ast.Mod object at 0x7da2590d6920> name[fix].__name__]]]
variable[ps] assign[=] call[name[fix], parameter[name[db]]]
if name[ps] begin[:]
call[name[fixes_pss].append, parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b095ca30>, <ast.Name object at 0x7da1b095c730>]]]]]
call[name[print], parameter[constant[ Done !]]]
call[name[manifest].update, parameter[name[self].target_version]]
return[name[fixes_pss]]
|
keyword[def] identifier[apply] ( identifier[self] , identifier[manifest] , identifier[db] , identifier[force] = keyword[False] ):
literal[string]
identifier[fixes_pss] =[]
keyword[if] keyword[not] identifier[force] :
identifier[self] . identifier[can_be_applied] ( identifier[manifest] , identifier[db] )
keyword[for] identifier[fix] keyword[in] identifier[self] . identifier[fixes] :
identifier[print] ( literal[string] % identifier[fix] . identifier[__name__] , identifier[flush] = keyword[True] , identifier[end] = literal[string] )
identifier[ps] = identifier[fix] ( identifier[db] )
keyword[if] identifier[ps] :
identifier[fixes_pss] . identifier[append] ( literal[string] %( identifier[fix] . identifier[__name__] , identifier[ps] ))
identifier[print] ( literal[string] )
identifier[manifest] . identifier[update] ( identifier[self] . identifier[target_version] ,
identifier[reason] = literal[string] % identifier[self] . identifier[base_version] )
keyword[return] identifier[fixes_pss]
|
def apply(self, manifest, db, force=False):
"""
Run the given patch to update the datamodel
:return: the list of post-scriptum returned by the fixes
"""
fixes_pss = []
if not force:
self.can_be_applied(manifest, db) # depends on [control=['if'], data=[]]
for fix in self.fixes:
print('\t%s...' % fix.__name__, flush=True, end='')
ps = fix(db)
if ps:
fixes_pss.append('%s: %s' % (fix.__name__, ps)) # depends on [control=['if'], data=[]]
print(' Done !') # depends on [control=['for'], data=['fix']]
manifest.update(self.target_version, reason='Upgrade from %s' % self.base_version)
return fixes_pss
|
def as_pil(ndarray, min_val=None, max_val=None):
"""
Converts an ndarray to a PIL image.
:param ndarray: The numpy ndarray to convert
:param min_val: The minimum pixel value in the image format
:param max_val: The maximum pixel valie in the image format
If min_val and max_val are not specified, attempts to
infer whether the image is in any of the common ranges:
[0, 1], [-1, 1], [0, 255]
This can be ambiguous, so it is better to specify if known.
"""
assert isinstance(ndarray, np.ndarray)
# rows x cols for grayscale image
# rows x cols x channels for color
assert ndarray.ndim in [2, 3]
if ndarray.ndim == 3:
channels = ndarray.shape[2]
# grayscale or RGB
assert channels in [1, 3]
actual_min = ndarray.min()
actual_max = ndarray.max()
if min_val is not None:
assert actual_min >= min_val
assert actual_max <= max_val
if np.issubdtype(ndarray.dtype, np.floating):
if min_val is None:
if actual_min < -1.:
raise ValueError("Unrecognized range")
if actual_min < 0:
min_val = -1.
else:
min_val = 0.
if max_val is None:
if actual_max > 255.:
raise ValueError("Unrecognized range")
if actual_max > 1.:
max_val = 255.
else:
max_val = 1.
ndarray = (ndarray - min_val)
value_range = max_val - min_val
ndarray *= (255. / value_range)
ndarray = np.cast['uint8'](ndarray)
elif 'int' in str(ndarray.dtype):
if min_val is not None:
assert min_val == 0
else:
assert actual_min >= 0.
if max_val is not None:
assert max_val == 255
else:
assert actual_max <= 255.
else:
raise ValueError("Unrecognized dtype")
out = Image.fromarray(ndarray)
return out
|
def function[as_pil, parameter[ndarray, min_val, max_val]]:
constant[
Converts an ndarray to a PIL image.
:param ndarray: The numpy ndarray to convert
:param min_val: The minimum pixel value in the image format
:param max_val: The maximum pixel valie in the image format
If min_val and max_val are not specified, attempts to
infer whether the image is in any of the common ranges:
[0, 1], [-1, 1], [0, 255]
This can be ambiguous, so it is better to specify if known.
]
assert[call[name[isinstance], parameter[name[ndarray], name[np].ndarray]]]
assert[compare[name[ndarray].ndim in list[[<ast.Constant object at 0x7da207f02770>, <ast.Constant object at 0x7da207f022c0>]]]]
if compare[name[ndarray].ndim equal[==] constant[3]] begin[:]
variable[channels] assign[=] call[name[ndarray].shape][constant[2]]
assert[compare[name[channels] in list[[<ast.Constant object at 0x7da207f03bb0>, <ast.Constant object at 0x7da207f00b50>]]]]
variable[actual_min] assign[=] call[name[ndarray].min, parameter[]]
variable[actual_max] assign[=] call[name[ndarray].max, parameter[]]
if compare[name[min_val] is_not constant[None]] begin[:]
assert[compare[name[actual_min] greater_or_equal[>=] name[min_val]]]
assert[compare[name[actual_max] less_or_equal[<=] name[max_val]]]
if call[name[np].issubdtype, parameter[name[ndarray].dtype, name[np].floating]] begin[:]
if compare[name[min_val] is constant[None]] begin[:]
if compare[name[actual_min] less[<] <ast.UnaryOp object at 0x7da207f028f0>] begin[:]
<ast.Raise object at 0x7da207f010c0>
if compare[name[actual_min] less[<] constant[0]] begin[:]
variable[min_val] assign[=] <ast.UnaryOp object at 0x7da207f01f90>
if compare[name[max_val] is constant[None]] begin[:]
if compare[name[actual_max] greater[>] constant[255.0]] begin[:]
<ast.Raise object at 0x7da207f00f70>
if compare[name[actual_max] greater[>] constant[1.0]] begin[:]
variable[max_val] assign[=] constant[255.0]
variable[ndarray] assign[=] binary_operation[name[ndarray] - name[min_val]]
variable[value_range] assign[=] binary_operation[name[max_val] - name[min_val]]
<ast.AugAssign object at 0x7da20e957f40>
variable[ndarray] assign[=] call[call[name[np].cast][constant[uint8]], parameter[name[ndarray]]]
variable[out] assign[=] call[name[Image].fromarray, parameter[name[ndarray]]]
return[name[out]]
|
keyword[def] identifier[as_pil] ( identifier[ndarray] , identifier[min_val] = keyword[None] , identifier[max_val] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[ndarray] , identifier[np] . identifier[ndarray] )
keyword[assert] identifier[ndarray] . identifier[ndim] keyword[in] [ literal[int] , literal[int] ]
keyword[if] identifier[ndarray] . identifier[ndim] == literal[int] :
identifier[channels] = identifier[ndarray] . identifier[shape] [ literal[int] ]
keyword[assert] identifier[channels] keyword[in] [ literal[int] , literal[int] ]
identifier[actual_min] = identifier[ndarray] . identifier[min] ()
identifier[actual_max] = identifier[ndarray] . identifier[max] ()
keyword[if] identifier[min_val] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[actual_min] >= identifier[min_val]
keyword[assert] identifier[actual_max] <= identifier[max_val]
keyword[if] identifier[np] . identifier[issubdtype] ( identifier[ndarray] . identifier[dtype] , identifier[np] . identifier[floating] ):
keyword[if] identifier[min_val] keyword[is] keyword[None] :
keyword[if] identifier[actual_min] <- literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[actual_min] < literal[int] :
identifier[min_val] =- literal[int]
keyword[else] :
identifier[min_val] = literal[int]
keyword[if] identifier[max_val] keyword[is] keyword[None] :
keyword[if] identifier[actual_max] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[actual_max] > literal[int] :
identifier[max_val] = literal[int]
keyword[else] :
identifier[max_val] = literal[int]
identifier[ndarray] =( identifier[ndarray] - identifier[min_val] )
identifier[value_range] = identifier[max_val] - identifier[min_val]
identifier[ndarray] *=( literal[int] / identifier[value_range] )
identifier[ndarray] = identifier[np] . identifier[cast] [ literal[string] ]( identifier[ndarray] )
keyword[elif] literal[string] keyword[in] identifier[str] ( identifier[ndarray] . identifier[dtype] ):
keyword[if] identifier[min_val] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[min_val] == literal[int]
keyword[else] :
keyword[assert] identifier[actual_min] >= literal[int]
keyword[if] identifier[max_val] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[max_val] == literal[int]
keyword[else] :
keyword[assert] identifier[actual_max] <= literal[int]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[out] = identifier[Image] . identifier[fromarray] ( identifier[ndarray] )
keyword[return] identifier[out]
|
def as_pil(ndarray, min_val=None, max_val=None):
"""
Converts an ndarray to a PIL image.
:param ndarray: The numpy ndarray to convert
:param min_val: The minimum pixel value in the image format
:param max_val: The maximum pixel valie in the image format
If min_val and max_val are not specified, attempts to
infer whether the image is in any of the common ranges:
[0, 1], [-1, 1], [0, 255]
This can be ambiguous, so it is better to specify if known.
"""
assert isinstance(ndarray, np.ndarray)
# rows x cols for grayscale image
# rows x cols x channels for color
assert ndarray.ndim in [2, 3]
if ndarray.ndim == 3:
channels = ndarray.shape[2]
# grayscale or RGB
assert channels in [1, 3] # depends on [control=['if'], data=[]]
actual_min = ndarray.min()
actual_max = ndarray.max()
if min_val is not None:
assert actual_min >= min_val
assert actual_max <= max_val # depends on [control=['if'], data=['min_val']]
if np.issubdtype(ndarray.dtype, np.floating):
if min_val is None:
if actual_min < -1.0:
raise ValueError('Unrecognized range') # depends on [control=['if'], data=[]]
if actual_min < 0:
min_val = -1.0 # depends on [control=['if'], data=[]]
else:
min_val = 0.0 # depends on [control=['if'], data=['min_val']]
if max_val is None:
if actual_max > 255.0:
raise ValueError('Unrecognized range') # depends on [control=['if'], data=[]]
if actual_max > 1.0:
max_val = 255.0 # depends on [control=['if'], data=[]]
else:
max_val = 1.0 # depends on [control=['if'], data=['max_val']]
ndarray = ndarray - min_val
value_range = max_val - min_val
ndarray *= 255.0 / value_range
ndarray = np.cast['uint8'](ndarray) # depends on [control=['if'], data=[]]
elif 'int' in str(ndarray.dtype):
if min_val is not None:
assert min_val == 0 # depends on [control=['if'], data=['min_val']]
else:
assert actual_min >= 0.0
if max_val is not None:
assert max_val == 255 # depends on [control=['if'], data=['max_val']]
else:
assert actual_max <= 255.0 # depends on [control=['if'], data=[]]
else:
raise ValueError('Unrecognized dtype')
out = Image.fromarray(ndarray)
return out
|
def _execute_backend_on_spec(self):
"""Renders a source file into its final form."""
api_no_aliases_cache = None
for attr_key in dir(self.backend_module):
attr_value = getattr(self.backend_module, attr_key)
if (inspect.isclass(attr_value) and
issubclass(attr_value, Backend) and
not inspect.isabstract(attr_value)):
self._logger.info('Running backend: %s', attr_value.__name__)
backend = attr_value(self.build_path, self.backend_args)
if backend.preserve_aliases:
api = self.api
else:
if not api_no_aliases_cache:
api_no_aliases_cache = remove_aliases_from_api(self.api)
api = api_no_aliases_cache
try:
backend.generate(api)
except Exception:
# Wrap this exception so that it isn't thought of as a bug
# in the stone parser, but rather a bug in the backend.
# Remove the last char of the traceback b/c it's a newline.
raise BackendException(
attr_value.__name__, traceback.format_exc()[:-1])
|
def function[_execute_backend_on_spec, parameter[self]]:
constant[Renders a source file into its final form.]
variable[api_no_aliases_cache] assign[=] constant[None]
for taget[name[attr_key]] in starred[call[name[dir], parameter[name[self].backend_module]]] begin[:]
variable[attr_value] assign[=] call[name[getattr], parameter[name[self].backend_module, name[attr_key]]]
if <ast.BoolOp object at 0x7da18f58d540> begin[:]
call[name[self]._logger.info, parameter[constant[Running backend: %s], name[attr_value].__name__]]
variable[backend] assign[=] call[name[attr_value], parameter[name[self].build_path, name[self].backend_args]]
if name[backend].preserve_aliases begin[:]
variable[api] assign[=] name[self].api
<ast.Try object at 0x7da18f58eaa0>
|
keyword[def] identifier[_execute_backend_on_spec] ( identifier[self] ):
literal[string]
identifier[api_no_aliases_cache] = keyword[None]
keyword[for] identifier[attr_key] keyword[in] identifier[dir] ( identifier[self] . identifier[backend_module] ):
identifier[attr_value] = identifier[getattr] ( identifier[self] . identifier[backend_module] , identifier[attr_key] )
keyword[if] ( identifier[inspect] . identifier[isclass] ( identifier[attr_value] ) keyword[and]
identifier[issubclass] ( identifier[attr_value] , identifier[Backend] ) keyword[and]
keyword[not] identifier[inspect] . identifier[isabstract] ( identifier[attr_value] )):
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[attr_value] . identifier[__name__] )
identifier[backend] = identifier[attr_value] ( identifier[self] . identifier[build_path] , identifier[self] . identifier[backend_args] )
keyword[if] identifier[backend] . identifier[preserve_aliases] :
identifier[api] = identifier[self] . identifier[api]
keyword[else] :
keyword[if] keyword[not] identifier[api_no_aliases_cache] :
identifier[api_no_aliases_cache] = identifier[remove_aliases_from_api] ( identifier[self] . identifier[api] )
identifier[api] = identifier[api_no_aliases_cache]
keyword[try] :
identifier[backend] . identifier[generate] ( identifier[api] )
keyword[except] identifier[Exception] :
keyword[raise] identifier[BackendException] (
identifier[attr_value] . identifier[__name__] , identifier[traceback] . identifier[format_exc] ()[:- literal[int] ])
|
def _execute_backend_on_spec(self):
"""Renders a source file into its final form."""
api_no_aliases_cache = None
for attr_key in dir(self.backend_module):
attr_value = getattr(self.backend_module, attr_key)
if inspect.isclass(attr_value) and issubclass(attr_value, Backend) and (not inspect.isabstract(attr_value)):
self._logger.info('Running backend: %s', attr_value.__name__)
backend = attr_value(self.build_path, self.backend_args)
if backend.preserve_aliases:
api = self.api # depends on [control=['if'], data=[]]
else:
if not api_no_aliases_cache:
api_no_aliases_cache = remove_aliases_from_api(self.api) # depends on [control=['if'], data=[]]
api = api_no_aliases_cache
try:
backend.generate(api) # depends on [control=['try'], data=[]]
except Exception:
# Wrap this exception so that it isn't thought of as a bug
# in the stone parser, but rather a bug in the backend.
# Remove the last char of the traceback b/c it's a newline.
raise BackendException(attr_value.__name__, traceback.format_exc()[:-1]) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr_key']]
|
def do_disable():
"""
Comment any lines that start with import in the .pth file
"""
from vext import vext_pth
try:
_lines = []
with open(vext_pth, mode='r') as f:
for line in f.readlines():
if not line.startswith('#') and line.startswith('import '):
_lines.append('# %s' % line)
else:
_lines.append(line)
try:
os.unlink('%s.tmp' % vext_pth)
except:
pass
with open('%s.tmp' % vext_pth, mode='w+') as f:
f.writelines(_lines)
try:
os.unlink('%s~' % vext_pth)
except:
pass
os.rename(vext_pth, '%s~' % vext_pth)
os.rename('%s.tmp' % vext_pth, vext_pth)
except IOError as e:
if e.errno == 2: # file didn't exist == disabled
return
|
def function[do_disable, parameter[]]:
constant[
Comment any lines that start with import in the .pth file
]
from relative_module[vext] import module[vext_pth]
<ast.Try object at 0x7da20c7c8a30>
|
keyword[def] identifier[do_disable] ():
literal[string]
keyword[from] identifier[vext] keyword[import] identifier[vext_pth]
keyword[try] :
identifier[_lines] =[]
keyword[with] identifier[open] ( identifier[vext_pth] , identifier[mode] = literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[line] keyword[in] identifier[f] . identifier[readlines] ():
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[_lines] . identifier[append] ( literal[string] % identifier[line] )
keyword[else] :
identifier[_lines] . identifier[append] ( identifier[line] )
keyword[try] :
identifier[os] . identifier[unlink] ( literal[string] % identifier[vext_pth] )
keyword[except] :
keyword[pass]
keyword[with] identifier[open] ( literal[string] % identifier[vext_pth] , identifier[mode] = literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[writelines] ( identifier[_lines] )
keyword[try] :
identifier[os] . identifier[unlink] ( literal[string] % identifier[vext_pth] )
keyword[except] :
keyword[pass]
identifier[os] . identifier[rename] ( identifier[vext_pth] , literal[string] % identifier[vext_pth] )
identifier[os] . identifier[rename] ( literal[string] % identifier[vext_pth] , identifier[vext_pth] )
keyword[except] identifier[IOError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] == literal[int] :
keyword[return]
|
def do_disable():
"""
Comment any lines that start with import in the .pth file
"""
from vext import vext_pth
try:
_lines = []
with open(vext_pth, mode='r') as f:
for line in f.readlines():
if not line.startswith('#') and line.startswith('import '):
_lines.append('# %s' % line) # depends on [control=['if'], data=[]]
else:
_lines.append(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
try:
os.unlink('%s.tmp' % vext_pth) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
with open('%s.tmp' % vext_pth, mode='w+') as f:
f.writelines(_lines) # depends on [control=['with'], data=['f']]
try:
os.unlink('%s~' % vext_pth) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
os.rename(vext_pth, '%s~' % vext_pth)
os.rename('%s.tmp' % vext_pth, vext_pth) # depends on [control=['try'], data=[]]
except IOError as e:
if e.errno == 2: # file didn't exist == disabled
return # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
|
def determine_hooks(self, controller=None):
'''
Determines the hooks to be run, in which order.
:param controller: If specified, includes hooks for a specific
controller.
'''
controller_hooks = []
if controller:
controller_hooks = _cfg(controller).get('hooks', [])
if controller_hooks:
return list(
sorted(
chain(controller_hooks, self.hooks),
key=operator.attrgetter('priority')
)
)
return self.hooks
|
def function[determine_hooks, parameter[self, controller]]:
constant[
Determines the hooks to be run, in which order.
:param controller: If specified, includes hooks for a specific
controller.
]
variable[controller_hooks] assign[=] list[[]]
if name[controller] begin[:]
variable[controller_hooks] assign[=] call[call[name[_cfg], parameter[name[controller]]].get, parameter[constant[hooks], list[[]]]]
if name[controller_hooks] begin[:]
return[call[name[list], parameter[call[name[sorted], parameter[call[name[chain], parameter[name[controller_hooks], name[self].hooks]]]]]]]
return[name[self].hooks]
|
keyword[def] identifier[determine_hooks] ( identifier[self] , identifier[controller] = keyword[None] ):
literal[string]
identifier[controller_hooks] =[]
keyword[if] identifier[controller] :
identifier[controller_hooks] = identifier[_cfg] ( identifier[controller] ). identifier[get] ( literal[string] ,[])
keyword[if] identifier[controller_hooks] :
keyword[return] identifier[list] (
identifier[sorted] (
identifier[chain] ( identifier[controller_hooks] , identifier[self] . identifier[hooks] ),
identifier[key] = identifier[operator] . identifier[attrgetter] ( literal[string] )
)
)
keyword[return] identifier[self] . identifier[hooks]
|
def determine_hooks(self, controller=None):
"""
Determines the hooks to be run, in which order.
:param controller: If specified, includes hooks for a specific
controller.
"""
controller_hooks = []
if controller:
controller_hooks = _cfg(controller).get('hooks', [])
if controller_hooks:
return list(sorted(chain(controller_hooks, self.hooks), key=operator.attrgetter('priority'))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self.hooks
|
def _unquote_or_none(s: Optional[str]) -> Optional[bytes]: # noqa: F811
"""None-safe wrapper around url_unescape to handle unmatched optional
groups correctly.
Note that args are passed as bytes so the handler can decide what
encoding to use.
"""
if s is None:
return s
return url_unescape(s, encoding=None, plus=False)
|
def function[_unquote_or_none, parameter[s]]:
constant[None-safe wrapper around url_unescape to handle unmatched optional
groups correctly.
Note that args are passed as bytes so the handler can decide what
encoding to use.
]
if compare[name[s] is constant[None]] begin[:]
return[name[s]]
return[call[name[url_unescape], parameter[name[s]]]]
|
keyword[def] identifier[_unquote_or_none] ( identifier[s] : identifier[Optional] [ identifier[str] ])-> identifier[Optional] [ identifier[bytes] ]:
literal[string]
keyword[if] identifier[s] keyword[is] keyword[None] :
keyword[return] identifier[s]
keyword[return] identifier[url_unescape] ( identifier[s] , identifier[encoding] = keyword[None] , identifier[plus] = keyword[False] )
|
def _unquote_or_none(s: Optional[str]) -> Optional[bytes]: # noqa: F811
'None-safe wrapper around url_unescape to handle unmatched optional\n groups correctly.\n\n Note that args are passed as bytes so the handler can decide what\n encoding to use.\n '
if s is None:
return s # depends on [control=['if'], data=['s']]
return url_unescape(s, encoding=None, plus=False)
|
def create_value(self, label, vtype, lang=None, description=None, unit=None):
"""Create a value on this Point. Values are descriptions in semantic metadata of the individual data items
you are sharing (or expecting to receive, if this Point is a control). This will help others to search for
your feed or control. If a value with the given label (and language) already exists, its fields are updated
with the provided ones (or unset, if None).
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`label` (mandatory) (string) the label for this value e.g. "Temperature". The label must be unique for this
Point. E.g. You can't have two data values called "Volts" but you can have "volts1" and "volts2".
`lang` (optional) (string) The two-character ISO 639-1 language code to use for the description. None means use
the default language for your agent.
See [Config](./Config.m.html#IoticAgent.IOT.Config.Config.__init__)
`vtype` (mandatory) (xsd:datatype) the datatype of the data you are describing, e.g. dateTime
We recommend you use a Iotic Labs-defined constant from
[Datatypes](../Datatypes.m.html#IoticAgent.Datatypes.Datatypes) such as:
[DECIMAL](../Datatypes.m.html#IoticAgent.Datatypes.DECIMAL)
`description` (optional) (string) The longer descriptive text for this value.
`unit` (optional) (ontology url) The url of the ontological description of the unit of your value
We recommend you use a constant from [Units](../Units.m.html#IoticAgent.Units.Units), such as:
[CELSIUS](../Units.m.html#IoticAgent.Units.Units.CELSIUS)
#!python
# example with no units as time is unit-less
my_feed.create_value("timestamp",
Datatypes.DATETIME,
"en",
"time of reading")
# example with a unit from the Units class
my_feed.create_value("temperature",
Datatypes.DECIMAL,
"en",
"Fish-tank temperature in celsius",
Units.CELSIUS)
"""
evt = self._client._request_point_value_create(self.__lid, self.__pid, self._type, label, vtype, lang,
description, unit)
self._client._wait_and_except_if_failed(evt)
|
def function[create_value, parameter[self, label, vtype, lang, description, unit]]:
constant[Create a value on this Point. Values are descriptions in semantic metadata of the individual data items
you are sharing (or expecting to receive, if this Point is a control). This will help others to search for
your feed or control. If a value with the given label (and language) already exists, its fields are updated
with the provided ones (or unset, if None).
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`label` (mandatory) (string) the label for this value e.g. "Temperature". The label must be unique for this
Point. E.g. You can't have two data values called "Volts" but you can have "volts1" and "volts2".
`lang` (optional) (string) The two-character ISO 639-1 language code to use for the description. None means use
the default language for your agent.
See [Config](./Config.m.html#IoticAgent.IOT.Config.Config.__init__)
`vtype` (mandatory) (xsd:datatype) the datatype of the data you are describing, e.g. dateTime
We recommend you use a Iotic Labs-defined constant from
[Datatypes](../Datatypes.m.html#IoticAgent.Datatypes.Datatypes) such as:
[DECIMAL](../Datatypes.m.html#IoticAgent.Datatypes.DECIMAL)
`description` (optional) (string) The longer descriptive text for this value.
`unit` (optional) (ontology url) The url of the ontological description of the unit of your value
We recommend you use a constant from [Units](../Units.m.html#IoticAgent.Units.Units), such as:
[CELSIUS](../Units.m.html#IoticAgent.Units.Units.CELSIUS)
#!python
# example with no units as time is unit-less
my_feed.create_value("timestamp",
Datatypes.DATETIME,
"en",
"time of reading")
# example with a unit from the Units class
my_feed.create_value("temperature",
Datatypes.DECIMAL,
"en",
"Fish-tank temperature in celsius",
Units.CELSIUS)
]
variable[evt] assign[=] call[name[self]._client._request_point_value_create, parameter[name[self].__lid, name[self].__pid, name[self]._type, name[label], name[vtype], name[lang], name[description], name[unit]]]
call[name[self]._client._wait_and_except_if_failed, parameter[name[evt]]]
|
keyword[def] identifier[create_value] ( identifier[self] , identifier[label] , identifier[vtype] , identifier[lang] = keyword[None] , identifier[description] = keyword[None] , identifier[unit] = keyword[None] ):
literal[string]
identifier[evt] = identifier[self] . identifier[_client] . identifier[_request_point_value_create] ( identifier[self] . identifier[__lid] , identifier[self] . identifier[__pid] , identifier[self] . identifier[_type] , identifier[label] , identifier[vtype] , identifier[lang] ,
identifier[description] , identifier[unit] )
identifier[self] . identifier[_client] . identifier[_wait_and_except_if_failed] ( identifier[evt] )
|
def create_value(self, label, vtype, lang=None, description=None, unit=None):
"""Create a value on this Point. Values are descriptions in semantic metadata of the individual data items
you are sharing (or expecting to receive, if this Point is a control). This will help others to search for
your feed or control. If a value with the given label (and language) already exists, its fields are updated
with the provided ones (or unset, if None).
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure
`label` (mandatory) (string) the label for this value e.g. "Temperature". The label must be unique for this
Point. E.g. You can't have two data values called "Volts" but you can have "volts1" and "volts2".
`lang` (optional) (string) The two-character ISO 639-1 language code to use for the description. None means use
the default language for your agent.
See [Config](./Config.m.html#IoticAgent.IOT.Config.Config.__init__)
`vtype` (mandatory) (xsd:datatype) the datatype of the data you are describing, e.g. dateTime
We recommend you use a Iotic Labs-defined constant from
[Datatypes](../Datatypes.m.html#IoticAgent.Datatypes.Datatypes) such as:
[DECIMAL](../Datatypes.m.html#IoticAgent.Datatypes.DECIMAL)
`description` (optional) (string) The longer descriptive text for this value.
`unit` (optional) (ontology url) The url of the ontological description of the unit of your value
We recommend you use a constant from [Units](../Units.m.html#IoticAgent.Units.Units), such as:
[CELSIUS](../Units.m.html#IoticAgent.Units.Units.CELSIUS)
#!python
# example with no units as time is unit-less
my_feed.create_value("timestamp",
Datatypes.DATETIME,
"en",
"time of reading")
# example with a unit from the Units class
my_feed.create_value("temperature",
Datatypes.DECIMAL,
"en",
"Fish-tank temperature in celsius",
Units.CELSIUS)
"""
evt = self._client._request_point_value_create(self.__lid, self.__pid, self._type, label, vtype, lang, description, unit)
self._client._wait_and_except_if_failed(evt)
|
def update_state(self,
slots: Union[List[Tuple[str, Any]], Dict[str, Any]]) -> 'Tracker':
"""
Updates dialogue state with new ``slots``, calculates features.
Returns:
Tracker: ."""
pass
|
def function[update_state, parameter[self, slots]]:
constant[
Updates dialogue state with new ``slots``, calculates features.
Returns:
Tracker: .]
pass
|
keyword[def] identifier[update_state] ( identifier[self] ,
identifier[slots] : identifier[Union] [ identifier[List] [ identifier[Tuple] [ identifier[str] , identifier[Any] ]], identifier[Dict] [ identifier[str] , identifier[Any] ]])-> literal[string] :
literal[string]
keyword[pass]
|
def update_state(self, slots: Union[List[Tuple[str, Any]], Dict[str, Any]]) -> 'Tracker':
"""
Updates dialogue state with new ``slots``, calculates features.
Returns:
Tracker: ."""
pass
|
def _handshake(self, conn, addr):
'''
Ensures that the client receives the AES key.
'''
# waiting for the magic request message
msg = conn.recv(len(MAGIC_REQ))
log.debug('Received message %s from %s', msg, addr)
if msg != MAGIC_REQ:
log.warning('%s is not a valid REQ message from %s', msg, addr)
return
log.debug('Sending the private key')
conn.send(self.__key)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return
log.debug('Sending the signature key')
conn.send(self.__sgn)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return
log.info('%s is now authenticated', addr)
self.keep_alive(conn)
|
def function[_handshake, parameter[self, conn, addr]]:
constant[
Ensures that the client receives the AES key.
]
variable[msg] assign[=] call[name[conn].recv, parameter[call[name[len], parameter[name[MAGIC_REQ]]]]]
call[name[log].debug, parameter[constant[Received message %s from %s], name[msg], name[addr]]]
if compare[name[msg] not_equal[!=] name[MAGIC_REQ]] begin[:]
call[name[log].warning, parameter[constant[%s is not a valid REQ message from %s], name[msg], name[addr]]]
return[None]
call[name[log].debug, parameter[constant[Sending the private key]]]
call[name[conn].send, parameter[name[self].__key]]
call[name[log].debug, parameter[constant[Waiting for the client to confirm]]]
variable[msg] assign[=] call[name[conn].recv, parameter[call[name[len], parameter[name[MAGIC_ACK]]]]]
if compare[name[msg] not_equal[!=] name[MAGIC_ACK]] begin[:]
return[None]
call[name[log].debug, parameter[constant[Sending the signature key]]]
call[name[conn].send, parameter[name[self].__sgn]]
call[name[log].debug, parameter[constant[Waiting for the client to confirm]]]
variable[msg] assign[=] call[name[conn].recv, parameter[call[name[len], parameter[name[MAGIC_ACK]]]]]
if compare[name[msg] not_equal[!=] name[MAGIC_ACK]] begin[:]
return[None]
call[name[log].info, parameter[constant[%s is now authenticated], name[addr]]]
call[name[self].keep_alive, parameter[name[conn]]]
|
keyword[def] identifier[_handshake] ( identifier[self] , identifier[conn] , identifier[addr] ):
literal[string]
identifier[msg] = identifier[conn] . identifier[recv] ( identifier[len] ( identifier[MAGIC_REQ] ))
identifier[log] . identifier[debug] ( literal[string] , identifier[msg] , identifier[addr] )
keyword[if] identifier[msg] != identifier[MAGIC_REQ] :
identifier[log] . identifier[warning] ( literal[string] , identifier[msg] , identifier[addr] )
keyword[return]
identifier[log] . identifier[debug] ( literal[string] )
identifier[conn] . identifier[send] ( identifier[self] . identifier[__key] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[msg] = identifier[conn] . identifier[recv] ( identifier[len] ( identifier[MAGIC_ACK] ))
keyword[if] identifier[msg] != identifier[MAGIC_ACK] :
keyword[return]
identifier[log] . identifier[debug] ( literal[string] )
identifier[conn] . identifier[send] ( identifier[self] . identifier[__sgn] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[msg] = identifier[conn] . identifier[recv] ( identifier[len] ( identifier[MAGIC_ACK] ))
keyword[if] identifier[msg] != identifier[MAGIC_ACK] :
keyword[return]
identifier[log] . identifier[info] ( literal[string] , identifier[addr] )
identifier[self] . identifier[keep_alive] ( identifier[conn] )
|
def _handshake(self, conn, addr):
"""
Ensures that the client receives the AES key.
"""
# waiting for the magic request message
msg = conn.recv(len(MAGIC_REQ))
log.debug('Received message %s from %s', msg, addr)
if msg != MAGIC_REQ:
log.warning('%s is not a valid REQ message from %s', msg, addr)
return # depends on [control=['if'], data=['msg']]
log.debug('Sending the private key')
conn.send(self.__key)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return # depends on [control=['if'], data=[]]
log.debug('Sending the signature key')
conn.send(self.__sgn)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return # depends on [control=['if'], data=[]]
log.info('%s is now authenticated', addr)
self.keep_alive(conn)
|
def device_from_request(request):
"""
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
"""
from yacms.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES["yacms-device"]:
return device
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META["HTTP_USER_AGENT"].lower()
except KeyError:
pass
else:
try:
user_agent = user_agent.decode("utf-8")
for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device
except (AttributeError, UnicodeDecodeError, UnicodeEncodeError):
pass
return ""
|
def function[device_from_request, parameter[request]]:
constant[
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
]
from relative_module[yacms.conf] import module[settings]
<ast.Try object at 0x7da1b13ce740>
return[constant[]]
|
keyword[def] identifier[device_from_request] ( identifier[request] ):
literal[string]
keyword[from] identifier[yacms] . identifier[conf] keyword[import] identifier[settings]
keyword[try] :
keyword[for] ( identifier[device] , identifier[_] ) keyword[in] identifier[settings] . identifier[DEVICE_USER_AGENTS] :
keyword[if] identifier[device] == identifier[request] . identifier[COOKIES] [ literal[string] ]:
keyword[return] identifier[device]
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[user_agent] = identifier[request] . identifier[META] [ literal[string] ]. identifier[lower] ()
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
keyword[try] :
identifier[user_agent] = identifier[user_agent] . identifier[decode] ( literal[string] )
keyword[for] ( identifier[device] , identifier[ua_strings] ) keyword[in] identifier[settings] . identifier[DEVICE_USER_AGENTS] :
keyword[for] identifier[ua_string] keyword[in] identifier[ua_strings] :
keyword[if] identifier[ua_string] . identifier[lower] () keyword[in] identifier[user_agent] :
keyword[return] identifier[device]
keyword[except] ( identifier[AttributeError] , identifier[UnicodeDecodeError] , identifier[UnicodeEncodeError] ):
keyword[pass]
keyword[return] literal[string]
|
def device_from_request(request):
"""
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
"""
from yacms.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES['yacms-device']:
return device # depends on [control=['if'], data=['device']] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META['HTTP_USER_AGENT'].lower() # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
try:
user_agent = user_agent.decode('utf-8')
for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ua_string']] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except (AttributeError, UnicodeDecodeError, UnicodeEncodeError):
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
return ''
|
def avg(self, vars_list: List[str]) -> 'TensorFluent':
'''Returns the TensorFluent for the avg aggregation function.
Args:
vars_list: The list of variables to be aggregated over.
Returns:
A TensorFluent wrapping the avg aggregation function.
'''
operand = self
if operand.dtype == tf.bool:
operand = operand.cast(tf.float32)
return self._aggregation_op(tf.reduce_mean, operand, vars_list)
|
def function[avg, parameter[self, vars_list]]:
constant[Returns the TensorFluent for the avg aggregation function.
Args:
vars_list: The list of variables to be aggregated over.
Returns:
A TensorFluent wrapping the avg aggregation function.
]
variable[operand] assign[=] name[self]
if compare[name[operand].dtype equal[==] name[tf].bool] begin[:]
variable[operand] assign[=] call[name[operand].cast, parameter[name[tf].float32]]
return[call[name[self]._aggregation_op, parameter[name[tf].reduce_mean, name[operand], name[vars_list]]]]
|
keyword[def] identifier[avg] ( identifier[self] , identifier[vars_list] : identifier[List] [ identifier[str] ])-> literal[string] :
literal[string]
identifier[operand] = identifier[self]
keyword[if] identifier[operand] . identifier[dtype] == identifier[tf] . identifier[bool] :
identifier[operand] = identifier[operand] . identifier[cast] ( identifier[tf] . identifier[float32] )
keyword[return] identifier[self] . identifier[_aggregation_op] ( identifier[tf] . identifier[reduce_mean] , identifier[operand] , identifier[vars_list] )
|
def avg(self, vars_list: List[str]) -> 'TensorFluent':
"""Returns the TensorFluent for the avg aggregation function.
Args:
vars_list: The list of variables to be aggregated over.
Returns:
A TensorFluent wrapping the avg aggregation function.
"""
operand = self
if operand.dtype == tf.bool:
operand = operand.cast(tf.float32) # depends on [control=['if'], data=[]]
return self._aggregation_op(tf.reduce_mean, operand, vars_list)
|
def add_inputs(self, es):
"""
returns the list of state pairs (stateF, stateB) obtained by adding
inputs to both forward (stateF) and backward (stateB) RNNs.
@param es: a list of Expression
see also transduce(xs)
.transduce(xs) is different from .add_inputs(xs) in the following way:
.add_inputs(xs) returns a list of RNNState pairs. RNNState objects can be
queried in various ways. In particular, they allow access to the previous
state, as well as to the state-vectors (h() and s() )
.transduce(xs) returns a list of Expression. These are just the output
expressions. For many cases, this suffices.
transduce is much more memory efficient than add_inputs.
"""
for e in es:
ensure_freshness(e)
for (fb,bb) in self.builder_layers[:-1]:
fs = fb.initial_state().transduce(es)
bs = bb.initial_state().transduce(reversed(es))
es = [concatenate([f,b]) for f,b in zip(fs, reversed(bs))]
(fb,bb) = self.builder_layers[-1]
fs = fb.initial_state().add_inputs(es)
bs = bb.initial_state().add_inputs(reversed(es))
return [(f,b) for f,b in zip(fs, reversed(bs))]
|
def function[add_inputs, parameter[self, es]]:
constant[
returns the list of state pairs (stateF, stateB) obtained by adding
inputs to both forward (stateF) and backward (stateB) RNNs.
@param es: a list of Expression
see also transduce(xs)
.transduce(xs) is different from .add_inputs(xs) in the following way:
.add_inputs(xs) returns a list of RNNState pairs. RNNState objects can be
queried in various ways. In particular, they allow access to the previous
state, as well as to the state-vectors (h() and s() )
.transduce(xs) returns a list of Expression. These are just the output
expressions. For many cases, this suffices.
transduce is much more memory efficient than add_inputs.
]
for taget[name[e]] in starred[name[es]] begin[:]
call[name[ensure_freshness], parameter[name[e]]]
for taget[tuple[[<ast.Name object at 0x7da18ede44f0>, <ast.Name object at 0x7da18ede7100>]]] in starred[call[name[self].builder_layers][<ast.Slice object at 0x7da18ede6c20>]] begin[:]
variable[fs] assign[=] call[call[name[fb].initial_state, parameter[]].transduce, parameter[name[es]]]
variable[bs] assign[=] call[call[name[bb].initial_state, parameter[]].transduce, parameter[call[name[reversed], parameter[name[es]]]]]
variable[es] assign[=] <ast.ListComp object at 0x7da18ede5fc0>
<ast.Tuple object at 0x7da18ede7010> assign[=] call[name[self].builder_layers][<ast.UnaryOp object at 0x7da18ede68c0>]
variable[fs] assign[=] call[call[name[fb].initial_state, parameter[]].add_inputs, parameter[name[es]]]
variable[bs] assign[=] call[call[name[bb].initial_state, parameter[]].add_inputs, parameter[call[name[reversed], parameter[name[es]]]]]
return[<ast.ListComp object at 0x7da18ede4280>]
|
keyword[def] identifier[add_inputs] ( identifier[self] , identifier[es] ):
literal[string]
keyword[for] identifier[e] keyword[in] identifier[es] :
identifier[ensure_freshness] ( identifier[e] )
keyword[for] ( identifier[fb] , identifier[bb] ) keyword[in] identifier[self] . identifier[builder_layers] [:- literal[int] ]:
identifier[fs] = identifier[fb] . identifier[initial_state] (). identifier[transduce] ( identifier[es] )
identifier[bs] = identifier[bb] . identifier[initial_state] (). identifier[transduce] ( identifier[reversed] ( identifier[es] ))
identifier[es] =[ identifier[concatenate] ([ identifier[f] , identifier[b] ]) keyword[for] identifier[f] , identifier[b] keyword[in] identifier[zip] ( identifier[fs] , identifier[reversed] ( identifier[bs] ))]
( identifier[fb] , identifier[bb] )= identifier[self] . identifier[builder_layers] [- literal[int] ]
identifier[fs] = identifier[fb] . identifier[initial_state] (). identifier[add_inputs] ( identifier[es] )
identifier[bs] = identifier[bb] . identifier[initial_state] (). identifier[add_inputs] ( identifier[reversed] ( identifier[es] ))
keyword[return] [( identifier[f] , identifier[b] ) keyword[for] identifier[f] , identifier[b] keyword[in] identifier[zip] ( identifier[fs] , identifier[reversed] ( identifier[bs] ))]
|
def add_inputs(self, es):
"""
returns the list of state pairs (stateF, stateB) obtained by adding
inputs to both forward (stateF) and backward (stateB) RNNs.
@param es: a list of Expression
see also transduce(xs)
.transduce(xs) is different from .add_inputs(xs) in the following way:
.add_inputs(xs) returns a list of RNNState pairs. RNNState objects can be
queried in various ways. In particular, they allow access to the previous
state, as well as to the state-vectors (h() and s() )
.transduce(xs) returns a list of Expression. These are just the output
expressions. For many cases, this suffices.
transduce is much more memory efficient than add_inputs.
"""
for e in es:
ensure_freshness(e) # depends on [control=['for'], data=['e']]
for (fb, bb) in self.builder_layers[:-1]:
fs = fb.initial_state().transduce(es)
bs = bb.initial_state().transduce(reversed(es))
es = [concatenate([f, b]) for (f, b) in zip(fs, reversed(bs))] # depends on [control=['for'], data=[]]
(fb, bb) = self.builder_layers[-1]
fs = fb.initial_state().add_inputs(es)
bs = bb.initial_state().add_inputs(reversed(es))
return [(f, b) for (f, b) in zip(fs, reversed(bs))]
|
def url(self, host):
"""Generate url for coap client."""
path = '/'.join(str(v) for v in self._path)
return 'coaps://{}:5684/{}'.format(host, path)
|
def function[url, parameter[self, host]]:
constant[Generate url for coap client.]
variable[path] assign[=] call[constant[/].join, parameter[<ast.GeneratorExp object at 0x7da207f00610>]]
return[call[constant[coaps://{}:5684/{}].format, parameter[name[host], name[path]]]]
|
keyword[def] identifier[url] ( identifier[self] , identifier[host] ):
literal[string]
identifier[path] = literal[string] . identifier[join] ( identifier[str] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[self] . identifier[_path] )
keyword[return] literal[string] . identifier[format] ( identifier[host] , identifier[path] )
|
def url(self, host):
"""Generate url for coap client."""
path = '/'.join((str(v) for v in self._path))
return 'coaps://{}:5684/{}'.format(host, path)
|
def tag(self, text):
"""Retrieves list of regex_matches in text.
Parameters
----------
text: Text
The estnltk text object to search for events.
Returns
-------
list of matches
"""
matches = self._match(text.text)
matches = self._resolve_conflicts(matches)
if self.return_layer:
return matches
else:
text[self.layer_name] = matches
|
def function[tag, parameter[self, text]]:
constant[Retrieves list of regex_matches in text.
Parameters
----------
text: Text
The estnltk text object to search for events.
Returns
-------
list of matches
]
variable[matches] assign[=] call[name[self]._match, parameter[name[text].text]]
variable[matches] assign[=] call[name[self]._resolve_conflicts, parameter[name[matches]]]
if name[self].return_layer begin[:]
return[name[matches]]
|
keyword[def] identifier[tag] ( identifier[self] , identifier[text] ):
literal[string]
identifier[matches] = identifier[self] . identifier[_match] ( identifier[text] . identifier[text] )
identifier[matches] = identifier[self] . identifier[_resolve_conflicts] ( identifier[matches] )
keyword[if] identifier[self] . identifier[return_layer] :
keyword[return] identifier[matches]
keyword[else] :
identifier[text] [ identifier[self] . identifier[layer_name] ]= identifier[matches]
|
def tag(self, text):
"""Retrieves list of regex_matches in text.
Parameters
----------
text: Text
The estnltk text object to search for events.
Returns
-------
list of matches
"""
matches = self._match(text.text)
matches = self._resolve_conflicts(matches)
if self.return_layer:
return matches # depends on [control=['if'], data=[]]
else:
text[self.layer_name] = matches
|
def handle_error(self, error, download_request):
"""
Checks what error occured and looks for an appropriate solution.
Args:
error: Exception
The error that has occured.
download_request:
The request which resulted in the error.
"""
if hasattr(error, "errno") and error.errno == errno.EACCES:
self.handle_certificate_problem(str(error))
else:
self.handle_general_download_error(str(error), download_request)
|
def function[handle_error, parameter[self, error, download_request]]:
constant[
Checks what error occured and looks for an appropriate solution.
Args:
error: Exception
The error that has occured.
download_request:
The request which resulted in the error.
]
if <ast.BoolOp object at 0x7da1b1b0f430> begin[:]
call[name[self].handle_certificate_problem, parameter[call[name[str], parameter[name[error]]]]]
|
keyword[def] identifier[handle_error] ( identifier[self] , identifier[error] , identifier[download_request] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[error] , literal[string] ) keyword[and] identifier[error] . identifier[errno] == identifier[errno] . identifier[EACCES] :
identifier[self] . identifier[handle_certificate_problem] ( identifier[str] ( identifier[error] ))
keyword[else] :
identifier[self] . identifier[handle_general_download_error] ( identifier[str] ( identifier[error] ), identifier[download_request] )
|
def handle_error(self, error, download_request):
"""
Checks what error occured and looks for an appropriate solution.
Args:
error: Exception
The error that has occured.
download_request:
The request which resulted in the error.
"""
if hasattr(error, 'errno') and error.errno == errno.EACCES:
self.handle_certificate_problem(str(error)) # depends on [control=['if'], data=[]]
else:
self.handle_general_download_error(str(error), download_request)
|
def clear_messages(self):
"""
Clears all messages.
"""
while len(self._messages):
msg = self._messages.pop(0)
usd = msg.block.userData()
if usd and hasattr(usd, 'messages'):
usd.messages[:] = []
if msg.decoration:
self.editor.decorations.remove(msg.decoration)
|
def function[clear_messages, parameter[self]]:
constant[
Clears all messages.
]
while call[name[len], parameter[name[self]._messages]] begin[:]
variable[msg] assign[=] call[name[self]._messages.pop, parameter[constant[0]]]
variable[usd] assign[=] call[name[msg].block.userData, parameter[]]
if <ast.BoolOp object at 0x7da20c6c63e0> begin[:]
call[name[usd].messages][<ast.Slice object at 0x7da20c6c4100>] assign[=] list[[]]
if name[msg].decoration begin[:]
call[name[self].editor.decorations.remove, parameter[name[msg].decoration]]
|
keyword[def] identifier[clear_messages] ( identifier[self] ):
literal[string]
keyword[while] identifier[len] ( identifier[self] . identifier[_messages] ):
identifier[msg] = identifier[self] . identifier[_messages] . identifier[pop] ( literal[int] )
identifier[usd] = identifier[msg] . identifier[block] . identifier[userData] ()
keyword[if] identifier[usd] keyword[and] identifier[hasattr] ( identifier[usd] , literal[string] ):
identifier[usd] . identifier[messages] [:]=[]
keyword[if] identifier[msg] . identifier[decoration] :
identifier[self] . identifier[editor] . identifier[decorations] . identifier[remove] ( identifier[msg] . identifier[decoration] )
|
def clear_messages(self):
"""
Clears all messages.
"""
while len(self._messages):
msg = self._messages.pop(0)
usd = msg.block.userData()
if usd and hasattr(usd, 'messages'):
usd.messages[:] = [] # depends on [control=['if'], data=[]]
if msg.decoration:
self.editor.decorations.remove(msg.decoration) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def add_file(self, *args):
"""
Add single file or list of files to bundle
:type: file_path: str|unicode
"""
for file_path in args:
self.files.append(FilePath(file_path, self))
|
def function[add_file, parameter[self]]:
constant[
Add single file or list of files to bundle
:type: file_path: str|unicode
]
for taget[name[file_path]] in starred[name[args]] begin[:]
call[name[self].files.append, parameter[call[name[FilePath], parameter[name[file_path], name[self]]]]]
|
keyword[def] identifier[add_file] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[for] identifier[file_path] keyword[in] identifier[args] :
identifier[self] . identifier[files] . identifier[append] ( identifier[FilePath] ( identifier[file_path] , identifier[self] ))
|
def add_file(self, *args):
"""
Add single file or list of files to bundle
:type: file_path: str|unicode
"""
for file_path in args:
self.files.append(FilePath(file_path, self)) # depends on [control=['for'], data=['file_path']]
|
def setupUI(self):
'''Create graphical objects for menus.'''
labelSizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
labelSizePolicy.setHorizontalStretch(0)
labelSizePolicy.setVerticalStretch(0)
menuSizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
menuSizePolicy.setHorizontalStretch(0)
menuSizePolicy.setVerticalStretch(0)
logTypeLayout = QHBoxLayout()
logTypeLayout.setSpacing(0)
typeLabel = QLabel("Log Type:")
typeLabel.setMinimumSize(QSize(65, 0))
typeLabel.setMaximumSize(QSize(65, 16777215))
typeLabel.setSizePolicy(labelSizePolicy)
logTypeLayout.addWidget(typeLabel)
self.logType = QComboBox(self)
self.logType.setMinimumSize(QSize(100, 0))
self.logType.setMaximumSize(QSize(150, 16777215))
menuSizePolicy.setHeightForWidth(self.logType.sizePolicy().hasHeightForWidth())
self.logType.setSizePolicy(menuSizePolicy)
logTypeLayout.addWidget(self.logType)
logTypeLayout.setStretch(1, 6)
programLayout = QHBoxLayout()
programLayout.setSpacing(0)
programLabel = QLabel("Program:")
programLabel.setMinimumSize(QSize(60, 0))
programLabel.setMaximumSize(QSize(60, 16777215))
programLabel.setSizePolicy(labelSizePolicy)
programLayout.addWidget(programLabel)
self.programName = QComboBox(self)
self.programName.setMinimumSize(QSize(100, 0))
self.programName.setMaximumSize(QSize(150, 16777215))
menuSizePolicy.setHeightForWidth(self.programName.sizePolicy().hasHeightForWidth())
self.programName.setSizePolicy(menuSizePolicy)
programLayout.addWidget(self.programName)
programLayout.setStretch(1, 6)
# Initial instance allows adding additional menus, all following menus can only remove themselves.
if self.initialInstance:
self.logButton = QPushButton("+", self)
self.logButton.setToolTip("Add logbook")
else:
self.logButton = QPushButton("-")
self.logButton.setToolTip("Remove logbook")
self.logButton.setMinimumSize(QSize(16, 16)) # 24x24
self.logButton.setMaximumSize(QSize(16, 16)) # 24x24
self.logButton.setObjectName("roundButton")
# self.logButton.setAutoFillBackground(True)
# region = QRegion(QRect(self.logButton.x()+15, self.logButton.y()+14, 20, 20), QRegion.Ellipse)
# self.logButton.setMask(region)
self.logButton.setStyleSheet("QPushButton {border-radius: 8px;}")
self._logSelectLayout = QHBoxLayout()
self._logSelectLayout.setSpacing(6)
self._logSelectLayout.addLayout(logTypeLayout)
self._logSelectLayout.addLayout(programLayout)
self._logSelectLayout.addWidget(self.logButton)
self._logSelectLayout.setStretch(0, 6)
self._logSelectLayout.setStretch(1, 6)
|
def function[setupUI, parameter[self]]:
constant[Create graphical objects for menus.]
variable[labelSizePolicy] assign[=] call[name[QSizePolicy], parameter[name[QSizePolicy].Fixed, name[QSizePolicy].Fixed]]
call[name[labelSizePolicy].setHorizontalStretch, parameter[constant[0]]]
call[name[labelSizePolicy].setVerticalStretch, parameter[constant[0]]]
variable[menuSizePolicy] assign[=] call[name[QSizePolicy], parameter[name[QSizePolicy].Expanding, name[QSizePolicy].Fixed]]
call[name[menuSizePolicy].setHorizontalStretch, parameter[constant[0]]]
call[name[menuSizePolicy].setVerticalStretch, parameter[constant[0]]]
variable[logTypeLayout] assign[=] call[name[QHBoxLayout], parameter[]]
call[name[logTypeLayout].setSpacing, parameter[constant[0]]]
variable[typeLabel] assign[=] call[name[QLabel], parameter[constant[Log Type:]]]
call[name[typeLabel].setMinimumSize, parameter[call[name[QSize], parameter[constant[65], constant[0]]]]]
call[name[typeLabel].setMaximumSize, parameter[call[name[QSize], parameter[constant[65], constant[16777215]]]]]
call[name[typeLabel].setSizePolicy, parameter[name[labelSizePolicy]]]
call[name[logTypeLayout].addWidget, parameter[name[typeLabel]]]
name[self].logType assign[=] call[name[QComboBox], parameter[name[self]]]
call[name[self].logType.setMinimumSize, parameter[call[name[QSize], parameter[constant[100], constant[0]]]]]
call[name[self].logType.setMaximumSize, parameter[call[name[QSize], parameter[constant[150], constant[16777215]]]]]
call[name[menuSizePolicy].setHeightForWidth, parameter[call[call[name[self].logType.sizePolicy, parameter[]].hasHeightForWidth, parameter[]]]]
call[name[self].logType.setSizePolicy, parameter[name[menuSizePolicy]]]
call[name[logTypeLayout].addWidget, parameter[name[self].logType]]
call[name[logTypeLayout].setStretch, parameter[constant[1], constant[6]]]
variable[programLayout] assign[=] call[name[QHBoxLayout], parameter[]]
call[name[programLayout].setSpacing, parameter[constant[0]]]
variable[programLabel] assign[=] call[name[QLabel], parameter[constant[Program:]]]
call[name[programLabel].setMinimumSize, parameter[call[name[QSize], parameter[constant[60], constant[0]]]]]
call[name[programLabel].setMaximumSize, parameter[call[name[QSize], parameter[constant[60], constant[16777215]]]]]
call[name[programLabel].setSizePolicy, parameter[name[labelSizePolicy]]]
call[name[programLayout].addWidget, parameter[name[programLabel]]]
name[self].programName assign[=] call[name[QComboBox], parameter[name[self]]]
call[name[self].programName.setMinimumSize, parameter[call[name[QSize], parameter[constant[100], constant[0]]]]]
call[name[self].programName.setMaximumSize, parameter[call[name[QSize], parameter[constant[150], constant[16777215]]]]]
call[name[menuSizePolicy].setHeightForWidth, parameter[call[call[name[self].programName.sizePolicy, parameter[]].hasHeightForWidth, parameter[]]]]
call[name[self].programName.setSizePolicy, parameter[name[menuSizePolicy]]]
call[name[programLayout].addWidget, parameter[name[self].programName]]
call[name[programLayout].setStretch, parameter[constant[1], constant[6]]]
if name[self].initialInstance begin[:]
name[self].logButton assign[=] call[name[QPushButton], parameter[constant[+], name[self]]]
call[name[self].logButton.setToolTip, parameter[constant[Add logbook]]]
call[name[self].logButton.setMinimumSize, parameter[call[name[QSize], parameter[constant[16], constant[16]]]]]
call[name[self].logButton.setMaximumSize, parameter[call[name[QSize], parameter[constant[16], constant[16]]]]]
call[name[self].logButton.setObjectName, parameter[constant[roundButton]]]
call[name[self].logButton.setStyleSheet, parameter[constant[QPushButton {border-radius: 8px;}]]]
name[self]._logSelectLayout assign[=] call[name[QHBoxLayout], parameter[]]
call[name[self]._logSelectLayout.setSpacing, parameter[constant[6]]]
call[name[self]._logSelectLayout.addLayout, parameter[name[logTypeLayout]]]
call[name[self]._logSelectLayout.addLayout, parameter[name[programLayout]]]
call[name[self]._logSelectLayout.addWidget, parameter[name[self].logButton]]
call[name[self]._logSelectLayout.setStretch, parameter[constant[0], constant[6]]]
call[name[self]._logSelectLayout.setStretch, parameter[constant[1], constant[6]]]
|
keyword[def] identifier[setupUI] ( identifier[self] ):
literal[string]
identifier[labelSizePolicy] = identifier[QSizePolicy] ( identifier[QSizePolicy] . identifier[Fixed] , identifier[QSizePolicy] . identifier[Fixed] )
identifier[labelSizePolicy] . identifier[setHorizontalStretch] ( literal[int] )
identifier[labelSizePolicy] . identifier[setVerticalStretch] ( literal[int] )
identifier[menuSizePolicy] = identifier[QSizePolicy] ( identifier[QSizePolicy] . identifier[Expanding] , identifier[QSizePolicy] . identifier[Fixed] )
identifier[menuSizePolicy] . identifier[setHorizontalStretch] ( literal[int] )
identifier[menuSizePolicy] . identifier[setVerticalStretch] ( literal[int] )
identifier[logTypeLayout] = identifier[QHBoxLayout] ()
identifier[logTypeLayout] . identifier[setSpacing] ( literal[int] )
identifier[typeLabel] = identifier[QLabel] ( literal[string] )
identifier[typeLabel] . identifier[setMinimumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[typeLabel] . identifier[setMaximumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[typeLabel] . identifier[setSizePolicy] ( identifier[labelSizePolicy] )
identifier[logTypeLayout] . identifier[addWidget] ( identifier[typeLabel] )
identifier[self] . identifier[logType] = identifier[QComboBox] ( identifier[self] )
identifier[self] . identifier[logType] . identifier[setMinimumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[self] . identifier[logType] . identifier[setMaximumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[menuSizePolicy] . identifier[setHeightForWidth] ( identifier[self] . identifier[logType] . identifier[sizePolicy] (). identifier[hasHeightForWidth] ())
identifier[self] . identifier[logType] . identifier[setSizePolicy] ( identifier[menuSizePolicy] )
identifier[logTypeLayout] . identifier[addWidget] ( identifier[self] . identifier[logType] )
identifier[logTypeLayout] . identifier[setStretch] ( literal[int] , literal[int] )
identifier[programLayout] = identifier[QHBoxLayout] ()
identifier[programLayout] . identifier[setSpacing] ( literal[int] )
identifier[programLabel] = identifier[QLabel] ( literal[string] )
identifier[programLabel] . identifier[setMinimumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[programLabel] . identifier[setMaximumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[programLabel] . identifier[setSizePolicy] ( identifier[labelSizePolicy] )
identifier[programLayout] . identifier[addWidget] ( identifier[programLabel] )
identifier[self] . identifier[programName] = identifier[QComboBox] ( identifier[self] )
identifier[self] . identifier[programName] . identifier[setMinimumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[self] . identifier[programName] . identifier[setMaximumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[menuSizePolicy] . identifier[setHeightForWidth] ( identifier[self] . identifier[programName] . identifier[sizePolicy] (). identifier[hasHeightForWidth] ())
identifier[self] . identifier[programName] . identifier[setSizePolicy] ( identifier[menuSizePolicy] )
identifier[programLayout] . identifier[addWidget] ( identifier[self] . identifier[programName] )
identifier[programLayout] . identifier[setStretch] ( literal[int] , literal[int] )
keyword[if] identifier[self] . identifier[initialInstance] :
identifier[self] . identifier[logButton] = identifier[QPushButton] ( literal[string] , identifier[self] )
identifier[self] . identifier[logButton] . identifier[setToolTip] ( literal[string] )
keyword[else] :
identifier[self] . identifier[logButton] = identifier[QPushButton] ( literal[string] )
identifier[self] . identifier[logButton] . identifier[setToolTip] ( literal[string] )
identifier[self] . identifier[logButton] . identifier[setMinimumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[self] . identifier[logButton] . identifier[setMaximumSize] ( identifier[QSize] ( literal[int] , literal[int] ))
identifier[self] . identifier[logButton] . identifier[setObjectName] ( literal[string] )
identifier[self] . identifier[logButton] . identifier[setStyleSheet] ( literal[string] )
identifier[self] . identifier[_logSelectLayout] = identifier[QHBoxLayout] ()
identifier[self] . identifier[_logSelectLayout] . identifier[setSpacing] ( literal[int] )
identifier[self] . identifier[_logSelectLayout] . identifier[addLayout] ( identifier[logTypeLayout] )
identifier[self] . identifier[_logSelectLayout] . identifier[addLayout] ( identifier[programLayout] )
identifier[self] . identifier[_logSelectLayout] . identifier[addWidget] ( identifier[self] . identifier[logButton] )
identifier[self] . identifier[_logSelectLayout] . identifier[setStretch] ( literal[int] , literal[int] )
identifier[self] . identifier[_logSelectLayout] . identifier[setStretch] ( literal[int] , literal[int] )
|
def setupUI(self):
"""Create graphical objects for menus."""
labelSizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
labelSizePolicy.setHorizontalStretch(0)
labelSizePolicy.setVerticalStretch(0)
menuSizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
menuSizePolicy.setHorizontalStretch(0)
menuSizePolicy.setVerticalStretch(0)
logTypeLayout = QHBoxLayout()
logTypeLayout.setSpacing(0)
typeLabel = QLabel('Log Type:')
typeLabel.setMinimumSize(QSize(65, 0))
typeLabel.setMaximumSize(QSize(65, 16777215))
typeLabel.setSizePolicy(labelSizePolicy)
logTypeLayout.addWidget(typeLabel)
self.logType = QComboBox(self)
self.logType.setMinimumSize(QSize(100, 0))
self.logType.setMaximumSize(QSize(150, 16777215))
menuSizePolicy.setHeightForWidth(self.logType.sizePolicy().hasHeightForWidth())
self.logType.setSizePolicy(menuSizePolicy)
logTypeLayout.addWidget(self.logType)
logTypeLayout.setStretch(1, 6)
programLayout = QHBoxLayout()
programLayout.setSpacing(0)
programLabel = QLabel('Program:')
programLabel.setMinimumSize(QSize(60, 0))
programLabel.setMaximumSize(QSize(60, 16777215))
programLabel.setSizePolicy(labelSizePolicy)
programLayout.addWidget(programLabel)
self.programName = QComboBox(self)
self.programName.setMinimumSize(QSize(100, 0))
self.programName.setMaximumSize(QSize(150, 16777215))
menuSizePolicy.setHeightForWidth(self.programName.sizePolicy().hasHeightForWidth())
self.programName.setSizePolicy(menuSizePolicy)
programLayout.addWidget(self.programName)
programLayout.setStretch(1, 6)
# Initial instance allows adding additional menus, all following menus can only remove themselves.
if self.initialInstance:
self.logButton = QPushButton('+', self)
self.logButton.setToolTip('Add logbook') # depends on [control=['if'], data=[]]
else:
self.logButton = QPushButton('-')
self.logButton.setToolTip('Remove logbook')
self.logButton.setMinimumSize(QSize(16, 16)) # 24x24
self.logButton.setMaximumSize(QSize(16, 16)) # 24x24
self.logButton.setObjectName('roundButton')
# self.logButton.setAutoFillBackground(True)
# region = QRegion(QRect(self.logButton.x()+15, self.logButton.y()+14, 20, 20), QRegion.Ellipse)
# self.logButton.setMask(region)
self.logButton.setStyleSheet('QPushButton {border-radius: 8px;}')
self._logSelectLayout = QHBoxLayout()
self._logSelectLayout.setSpacing(6)
self._logSelectLayout.addLayout(logTypeLayout)
self._logSelectLayout.addLayout(programLayout)
self._logSelectLayout.addWidget(self.logButton)
self._logSelectLayout.setStretch(0, 6)
self._logSelectLayout.setStretch(1, 6)
|
def load(path=None, root=None, db=None, load_user=True):
"Load all of the config files. "
config = load_config(path, load_user=load_user)
remotes = load_remotes(path, load_user=load_user)
# The external file overwrites the main config
if remotes:
if not 'remotes' in config:
config.remotes = AttrDict()
for k, v in remotes.remotes.items():
config.remotes[k] = v
accounts = load_accounts(path, load_user=load_user)
# The external file overwrites the main config
if accounts:
if not 'accounts' in config:
config.accounts = AttrDict()
for k, v in accounts.accounts.items():
config.accounts[k] = v
update_config(config)
if root:
config.library.filesystem_root = root
if db:
config.library.database = db
return config
|
def function[load, parameter[path, root, db, load_user]]:
constant[Load all of the config files. ]
variable[config] assign[=] call[name[load_config], parameter[name[path]]]
variable[remotes] assign[=] call[name[load_remotes], parameter[name[path]]]
if name[remotes] begin[:]
if <ast.UnaryOp object at 0x7da20c6a9ed0> begin[:]
name[config].remotes assign[=] call[name[AttrDict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18c4cdbd0>, <ast.Name object at 0x7da18c4cd720>]]] in starred[call[name[remotes].remotes.items, parameter[]]] begin[:]
call[name[config].remotes][name[k]] assign[=] name[v]
variable[accounts] assign[=] call[name[load_accounts], parameter[name[path]]]
if name[accounts] begin[:]
if <ast.UnaryOp object at 0x7da18c4ce770> begin[:]
name[config].accounts assign[=] call[name[AttrDict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18c4cf280>, <ast.Name object at 0x7da18c4cd4e0>]]] in starred[call[name[accounts].accounts.items, parameter[]]] begin[:]
call[name[config].accounts][name[k]] assign[=] name[v]
call[name[update_config], parameter[name[config]]]
if name[root] begin[:]
name[config].library.filesystem_root assign[=] name[root]
if name[db] begin[:]
name[config].library.database assign[=] name[db]
return[name[config]]
|
keyword[def] identifier[load] ( identifier[path] = keyword[None] , identifier[root] = keyword[None] , identifier[db] = keyword[None] , identifier[load_user] = keyword[True] ):
literal[string]
identifier[config] = identifier[load_config] ( identifier[path] , identifier[load_user] = identifier[load_user] )
identifier[remotes] = identifier[load_remotes] ( identifier[path] , identifier[load_user] = identifier[load_user] )
keyword[if] identifier[remotes] :
keyword[if] keyword[not] literal[string] keyword[in] identifier[config] :
identifier[config] . identifier[remotes] = identifier[AttrDict] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[remotes] . identifier[remotes] . identifier[items] ():
identifier[config] . identifier[remotes] [ identifier[k] ]= identifier[v]
identifier[accounts] = identifier[load_accounts] ( identifier[path] , identifier[load_user] = identifier[load_user] )
keyword[if] identifier[accounts] :
keyword[if] keyword[not] literal[string] keyword[in] identifier[config] :
identifier[config] . identifier[accounts] = identifier[AttrDict] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[accounts] . identifier[accounts] . identifier[items] ():
identifier[config] . identifier[accounts] [ identifier[k] ]= identifier[v]
identifier[update_config] ( identifier[config] )
keyword[if] identifier[root] :
identifier[config] . identifier[library] . identifier[filesystem_root] = identifier[root]
keyword[if] identifier[db] :
identifier[config] . identifier[library] . identifier[database] = identifier[db]
keyword[return] identifier[config]
|
def load(path=None, root=None, db=None, load_user=True):
"""Load all of the config files. """
config = load_config(path, load_user=load_user)
remotes = load_remotes(path, load_user=load_user)
# The external file overwrites the main config
if remotes:
if not 'remotes' in config:
config.remotes = AttrDict() # depends on [control=['if'], data=[]]
for (k, v) in remotes.remotes.items():
config.remotes[k] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
accounts = load_accounts(path, load_user=load_user)
# The external file overwrites the main config
if accounts:
if not 'accounts' in config:
config.accounts = AttrDict() # depends on [control=['if'], data=[]]
for (k, v) in accounts.accounts.items():
config.accounts[k] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
update_config(config)
if root:
config.library.filesystem_root = root # depends on [control=['if'], data=[]]
if db:
config.library.database = db # depends on [control=['if'], data=[]]
return config
|
def intf_up(self, interface):
'''
Can be called when an interface is put in service.
FIXME: not currently used; more needs to be done to
correctly put a new intf into service.
'''
if interface.name not in self._devinfo:
self._devinfo[interface.name] = interface
if self._devupdown_callback:
self._devupdown_callback(interface, 'up')
else:
raise ValueError("Interface already registered")
|
def function[intf_up, parameter[self, interface]]:
constant[
Can be called when an interface is put in service.
FIXME: not currently used; more needs to be done to
correctly put a new intf into service.
]
if compare[name[interface].name <ast.NotIn object at 0x7da2590d7190> name[self]._devinfo] begin[:]
call[name[self]._devinfo][name[interface].name] assign[=] name[interface]
if name[self]._devupdown_callback begin[:]
call[name[self]._devupdown_callback, parameter[name[interface], constant[up]]]
|
keyword[def] identifier[intf_up] ( identifier[self] , identifier[interface] ):
literal[string]
keyword[if] identifier[interface] . identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_devinfo] :
identifier[self] . identifier[_devinfo] [ identifier[interface] . identifier[name] ]= identifier[interface]
keyword[if] identifier[self] . identifier[_devupdown_callback] :
identifier[self] . identifier[_devupdown_callback] ( identifier[interface] , literal[string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
|
def intf_up(self, interface):
"""
Can be called when an interface is put in service.
FIXME: not currently used; more needs to be done to
correctly put a new intf into service.
"""
if interface.name not in self._devinfo:
self._devinfo[interface.name] = interface
if self._devupdown_callback:
self._devupdown_callback(interface, 'up') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise ValueError('Interface already registered')
|
def prob_classify(self, text):
"""Return the label probability distribution for classifying a string
of text.
Example:
::
>>> classifier = NaiveBayesClassifier(train_data)
>>> prob_dist = classifier.prob_classify("I feel happy this morning.")
>>> prob_dist.max()
'positive'
>>> prob_dist.prob("positive")
0.7
:rtype: nltk.probability.DictionaryProbDist
"""
text_features = self.extract_features(text)
return self.classifier.prob_classify(text_features)
|
def function[prob_classify, parameter[self, text]]:
constant[Return the label probability distribution for classifying a string
of text.
Example:
::
>>> classifier = NaiveBayesClassifier(train_data)
>>> prob_dist = classifier.prob_classify("I feel happy this morning.")
>>> prob_dist.max()
'positive'
>>> prob_dist.prob("positive")
0.7
:rtype: nltk.probability.DictionaryProbDist
]
variable[text_features] assign[=] call[name[self].extract_features, parameter[name[text]]]
return[call[name[self].classifier.prob_classify, parameter[name[text_features]]]]
|
keyword[def] identifier[prob_classify] ( identifier[self] , identifier[text] ):
literal[string]
identifier[text_features] = identifier[self] . identifier[extract_features] ( identifier[text] )
keyword[return] identifier[self] . identifier[classifier] . identifier[prob_classify] ( identifier[text_features] )
|
def prob_classify(self, text):
"""Return the label probability distribution for classifying a string
of text.
Example:
::
>>> classifier = NaiveBayesClassifier(train_data)
>>> prob_dist = classifier.prob_classify("I feel happy this morning.")
>>> prob_dist.max()
'positive'
>>> prob_dist.prob("positive")
0.7
:rtype: nltk.probability.DictionaryProbDist
"""
text_features = self.extract_features(text)
return self.classifier.prob_classify(text_features)
|
def _merge_defaults(self, config):
"""The config object loads its values from two sources, with the
following precedence:
1. data/default_config.yaml
2. The config file itself, passed in to this object in the
constructor as `path`.
in case of conflict, the config file dominates.
"""
fn = resource_filename('osprey', join('data', 'default_config.yaml'))
with open(fn) as f:
default = parse(f)
return reduce(dict_merge, [default, config])
|
def function[_merge_defaults, parameter[self, config]]:
constant[The config object loads its values from two sources, with the
following precedence:
1. data/default_config.yaml
2. The config file itself, passed in to this object in the
constructor as `path`.
in case of conflict, the config file dominates.
]
variable[fn] assign[=] call[name[resource_filename], parameter[constant[osprey], call[name[join], parameter[constant[data], constant[default_config.yaml]]]]]
with call[name[open], parameter[name[fn]]] begin[:]
variable[default] assign[=] call[name[parse], parameter[name[f]]]
return[call[name[reduce], parameter[name[dict_merge], list[[<ast.Name object at 0x7da1aff56500>, <ast.Name object at 0x7da1aff547c0>]]]]]
|
keyword[def] identifier[_merge_defaults] ( identifier[self] , identifier[config] ):
literal[string]
identifier[fn] = identifier[resource_filename] ( literal[string] , identifier[join] ( literal[string] , literal[string] ))
keyword[with] identifier[open] ( identifier[fn] ) keyword[as] identifier[f] :
identifier[default] = identifier[parse] ( identifier[f] )
keyword[return] identifier[reduce] ( identifier[dict_merge] ,[ identifier[default] , identifier[config] ])
|
def _merge_defaults(self, config):
"""The config object loads its values from two sources, with the
following precedence:
1. data/default_config.yaml
2. The config file itself, passed in to this object in the
constructor as `path`.
in case of conflict, the config file dominates.
"""
fn = resource_filename('osprey', join('data', 'default_config.yaml'))
with open(fn) as f:
default = parse(f) # depends on [control=['with'], data=['f']]
return reduce(dict_merge, [default, config])
|
def parse_index_parameter(self, node):
"""
Parses <IndexParameter>
@param node: Node containing the <IndexParameter> element
@type node: xml.etree.Element
@raise ParseError: Raised when the IndexParameter does not have a name.
"""
if self.current_component_type == None:
self.raise_error('IndexParameters can only be defined in ' +
'a component type')
try:
name = node.lattrib['name']
except:
self.raise_error('<IndexParameter> must specify a name')
index_parameter = IndexParameter(name)
self.current_component_type.add_index_parameter(index_parameter)
|
def function[parse_index_parameter, parameter[self, node]]:
constant[
Parses <IndexParameter>
@param node: Node containing the <IndexParameter> element
@type node: xml.etree.Element
@raise ParseError: Raised when the IndexParameter does not have a name.
]
if compare[name[self].current_component_type equal[==] constant[None]] begin[:]
call[name[self].raise_error, parameter[binary_operation[constant[IndexParameters can only be defined in ] + constant[a component type]]]]
<ast.Try object at 0x7da1b24cb3d0>
variable[index_parameter] assign[=] call[name[IndexParameter], parameter[name[name]]]
call[name[self].current_component_type.add_index_parameter, parameter[name[index_parameter]]]
|
keyword[def] identifier[parse_index_parameter] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[self] . identifier[current_component_type] == keyword[None] :
identifier[self] . identifier[raise_error] ( literal[string] +
literal[string] )
keyword[try] :
identifier[name] = identifier[node] . identifier[lattrib] [ literal[string] ]
keyword[except] :
identifier[self] . identifier[raise_error] ( literal[string] )
identifier[index_parameter] = identifier[IndexParameter] ( identifier[name] )
identifier[self] . identifier[current_component_type] . identifier[add_index_parameter] ( identifier[index_parameter] )
|
def parse_index_parameter(self, node):
"""
Parses <IndexParameter>
@param node: Node containing the <IndexParameter> element
@type node: xml.etree.Element
@raise ParseError: Raised when the IndexParameter does not have a name.
"""
if self.current_component_type == None:
self.raise_error('IndexParameters can only be defined in ' + 'a component type') # depends on [control=['if'], data=[]]
try:
name = node.lattrib['name'] # depends on [control=['try'], data=[]]
except:
self.raise_error('<IndexParameter> must specify a name') # depends on [control=['except'], data=[]]
index_parameter = IndexParameter(name)
self.current_component_type.add_index_parameter(index_parameter)
|
def _growSynapses(cls, connections, random, segment, nDesiredNewSynapes,
prevWinnerCells, initialPermanence, maxSynapsesPerSegment):
"""
Creates nDesiredNewSynapes synapses on the segment passed in if
possible, choosing random cells from the previous winner cells that are
not already on the segment.
:param connections: (Object) Connections instance for the tm
:param random: (Object) TM object used to generate random
numbers
:param segment: (int) Segment to grow synapses on.
:param nDesiredNewSynapes: (int) Desired number of synapses to grow
:param prevWinnerCells: (list) Winner cells in `t-1`
:param initialPermanence: (float) Initial permanence of a new synapse.
"""
candidates = list(prevWinnerCells)
for synapse in connections.synapsesForSegment(segment):
i = binSearch(candidates, synapse.presynapticCell)
if i != -1:
del candidates[i]
nActual = min(nDesiredNewSynapes, len(candidates))
# Check if we're going to surpass the maximum number of synapses.
overrun = connections.numSynapses(segment) + nActual - maxSynapsesPerSegment
if overrun > 0:
cls._destroyMinPermanenceSynapses(connections, random, segment, overrun,
prevWinnerCells)
# Recalculate in case we weren't able to destroy as many synapses as needed.
nActual = min(nActual,
maxSynapsesPerSegment - connections.numSynapses(segment))
for _ in range(nActual):
i = random.getUInt32(len(candidates))
connections.createSynapse(segment, candidates[i], initialPermanence)
del candidates[i]
|
def function[_growSynapses, parameter[cls, connections, random, segment, nDesiredNewSynapes, prevWinnerCells, initialPermanence, maxSynapsesPerSegment]]:
constant[
Creates nDesiredNewSynapes synapses on the segment passed in if
possible, choosing random cells from the previous winner cells that are
not already on the segment.
:param connections: (Object) Connections instance for the tm
:param random: (Object) TM object used to generate random
numbers
:param segment: (int) Segment to grow synapses on.
:param nDesiredNewSynapes: (int) Desired number of synapses to grow
:param prevWinnerCells: (list) Winner cells in `t-1`
:param initialPermanence: (float) Initial permanence of a new synapse.
]
variable[candidates] assign[=] call[name[list], parameter[name[prevWinnerCells]]]
for taget[name[synapse]] in starred[call[name[connections].synapsesForSegment, parameter[name[segment]]]] begin[:]
variable[i] assign[=] call[name[binSearch], parameter[name[candidates], name[synapse].presynapticCell]]
if compare[name[i] not_equal[!=] <ast.UnaryOp object at 0x7da18dc057e0>] begin[:]
<ast.Delete object at 0x7da18dc05120>
variable[nActual] assign[=] call[name[min], parameter[name[nDesiredNewSynapes], call[name[len], parameter[name[candidates]]]]]
variable[overrun] assign[=] binary_operation[binary_operation[call[name[connections].numSynapses, parameter[name[segment]]] + name[nActual]] - name[maxSynapsesPerSegment]]
if compare[name[overrun] greater[>] constant[0]] begin[:]
call[name[cls]._destroyMinPermanenceSynapses, parameter[name[connections], name[random], name[segment], name[overrun], name[prevWinnerCells]]]
variable[nActual] assign[=] call[name[min], parameter[name[nActual], binary_operation[name[maxSynapsesPerSegment] - call[name[connections].numSynapses, parameter[name[segment]]]]]]
for taget[name[_]] in starred[call[name[range], parameter[name[nActual]]]] begin[:]
variable[i] assign[=] call[name[random].getUInt32, parameter[call[name[len], parameter[name[candidates]]]]]
call[name[connections].createSynapse, parameter[name[segment], call[name[candidates]][name[i]], name[initialPermanence]]]
<ast.Delete object at 0x7da18dc99090>
|
keyword[def] identifier[_growSynapses] ( identifier[cls] , identifier[connections] , identifier[random] , identifier[segment] , identifier[nDesiredNewSynapes] ,
identifier[prevWinnerCells] , identifier[initialPermanence] , identifier[maxSynapsesPerSegment] ):
literal[string]
identifier[candidates] = identifier[list] ( identifier[prevWinnerCells] )
keyword[for] identifier[synapse] keyword[in] identifier[connections] . identifier[synapsesForSegment] ( identifier[segment] ):
identifier[i] = identifier[binSearch] ( identifier[candidates] , identifier[synapse] . identifier[presynapticCell] )
keyword[if] identifier[i] !=- literal[int] :
keyword[del] identifier[candidates] [ identifier[i] ]
identifier[nActual] = identifier[min] ( identifier[nDesiredNewSynapes] , identifier[len] ( identifier[candidates] ))
identifier[overrun] = identifier[connections] . identifier[numSynapses] ( identifier[segment] )+ identifier[nActual] - identifier[maxSynapsesPerSegment]
keyword[if] identifier[overrun] > literal[int] :
identifier[cls] . identifier[_destroyMinPermanenceSynapses] ( identifier[connections] , identifier[random] , identifier[segment] , identifier[overrun] ,
identifier[prevWinnerCells] )
identifier[nActual] = identifier[min] ( identifier[nActual] ,
identifier[maxSynapsesPerSegment] - identifier[connections] . identifier[numSynapses] ( identifier[segment] ))
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[nActual] ):
identifier[i] = identifier[random] . identifier[getUInt32] ( identifier[len] ( identifier[candidates] ))
identifier[connections] . identifier[createSynapse] ( identifier[segment] , identifier[candidates] [ identifier[i] ], identifier[initialPermanence] )
keyword[del] identifier[candidates] [ identifier[i] ]
|
def _growSynapses(cls, connections, random, segment, nDesiredNewSynapes, prevWinnerCells, initialPermanence, maxSynapsesPerSegment):
"""
Creates nDesiredNewSynapes synapses on the segment passed in if
possible, choosing random cells from the previous winner cells that are
not already on the segment.
:param connections: (Object) Connections instance for the tm
:param random: (Object) TM object used to generate random
numbers
:param segment: (int) Segment to grow synapses on.
:param nDesiredNewSynapes: (int) Desired number of synapses to grow
:param prevWinnerCells: (list) Winner cells in `t-1`
:param initialPermanence: (float) Initial permanence of a new synapse.
"""
candidates = list(prevWinnerCells)
for synapse in connections.synapsesForSegment(segment):
i = binSearch(candidates, synapse.presynapticCell)
if i != -1:
del candidates[i] # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['synapse']]
nActual = min(nDesiredNewSynapes, len(candidates))
# Check if we're going to surpass the maximum number of synapses.
overrun = connections.numSynapses(segment) + nActual - maxSynapsesPerSegment
if overrun > 0:
cls._destroyMinPermanenceSynapses(connections, random, segment, overrun, prevWinnerCells) # depends on [control=['if'], data=['overrun']]
# Recalculate in case we weren't able to destroy as many synapses as needed.
nActual = min(nActual, maxSynapsesPerSegment - connections.numSynapses(segment))
for _ in range(nActual):
i = random.getUInt32(len(candidates))
connections.createSynapse(segment, candidates[i], initialPermanence)
del candidates[i] # depends on [control=['for'], data=[]]
|
def threatlist(self, ip: str):
"""Return threatlist information we have for the given IPv{4,6} address with history of changes
"""
url_path = "/api/threatlist/{ip}".format(ip=ip)
return self._request(path=url_path)
|
def function[threatlist, parameter[self, ip]]:
constant[Return threatlist information we have for the given IPv{4,6} address with history of changes
]
variable[url_path] assign[=] call[constant[/api/threatlist/{ip}].format, parameter[]]
return[call[name[self]._request, parameter[]]]
|
keyword[def] identifier[threatlist] ( identifier[self] , identifier[ip] : identifier[str] ):
literal[string]
identifier[url_path] = literal[string] . identifier[format] ( identifier[ip] = identifier[ip] )
keyword[return] identifier[self] . identifier[_request] ( identifier[path] = identifier[url_path] )
|
def threatlist(self, ip: str):
"""Return threatlist information we have for the given IPv{4,6} address with history of changes
"""
url_path = '/api/threatlist/{ip}'.format(ip=ip)
return self._request(path=url_path)
|
def handleTrailingStops(self, tickerId):
""" software-based trailing stop """
# existing?
if tickerId not in self.trailingStops.keys():
return None
# continue
trailingStop = self.trailingStops[tickerId]
price = self.marketData[tickerId]['last'][0]
symbol = self.tickerSymbol(tickerId)
# contract = self.contracts[tickerId]
# contractString = self.contractString(contract)
# filled / no positions?
if (self._positions[symbol] == 0) | \
(self.orders[trailingStop['orderId']]['status'] == "FILLED"):
del self.trailingStops[tickerId]
return None
# continue...
newStop = trailingStop['lastPrice']
ticksize = trailingStop['ticksize']
# long
if (trailingStop['quantity'] < 0) & (trailingStop['lastPrice'] < price):
if abs(trailingStop['trailAmount']) >= 0:
newStop = price - abs(trailingStop['trailAmount'])
elif trailingStop['trailPercent'] >= 0:
newStop = price - (price * (abs(trailingStop['trailPercent']) / 100))
# short
elif (trailingStop['quantity'] > 0) & (trailingStop['lastPrice'] > price):
if abs(trailingStop['trailAmount']) >= 0:
newStop = price + abs(trailingStop['trailAmount'])
elif trailingStop['trailPercent'] >= 0:
newStop = price + (price * (abs(trailingStop['trailPercent']) / 100))
# valid newStop
newStop = self.roundClosestValid(newStop, ticksize)
# print("\n\n", trailingStop['lastPrice'], newStop, price, "\n\n")
# no change?
if newStop == trailingStop['lastPrice']:
return None
# submit order
trailingStopOrderId = self.modifyStopOrder(
orderId = trailingStop['orderId'],
parentId = trailingStop['parentId'],
newStop = newStop,
quantity = trailingStop['quantity']
)
if trailingStopOrderId:
self.trailingStops[tickerId]['lastPrice'] = price
return trailingStopOrderId
|
def function[handleTrailingStops, parameter[self, tickerId]]:
constant[ software-based trailing stop ]
if compare[name[tickerId] <ast.NotIn object at 0x7da2590d7190> call[name[self].trailingStops.keys, parameter[]]] begin[:]
return[constant[None]]
variable[trailingStop] assign[=] call[name[self].trailingStops][name[tickerId]]
variable[price] assign[=] call[call[call[name[self].marketData][name[tickerId]]][constant[last]]][constant[0]]
variable[symbol] assign[=] call[name[self].tickerSymbol, parameter[name[tickerId]]]
if binary_operation[compare[call[name[self]._positions][name[symbol]] equal[==] constant[0]] <ast.BitOr object at 0x7da2590d6aa0> compare[call[call[name[self].orders][call[name[trailingStop]][constant[orderId]]]][constant[status]] equal[==] constant[FILLED]]] begin[:]
<ast.Delete object at 0x7da1b18e72e0>
return[constant[None]]
variable[newStop] assign[=] call[name[trailingStop]][constant[lastPrice]]
variable[ticksize] assign[=] call[name[trailingStop]][constant[ticksize]]
if binary_operation[compare[call[name[trailingStop]][constant[quantity]] less[<] constant[0]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[trailingStop]][constant[lastPrice]] less[<] name[price]]] begin[:]
if compare[call[name[abs], parameter[call[name[trailingStop]][constant[trailAmount]]]] greater_or_equal[>=] constant[0]] begin[:]
variable[newStop] assign[=] binary_operation[name[price] - call[name[abs], parameter[call[name[trailingStop]][constant[trailAmount]]]]]
variable[newStop] assign[=] call[name[self].roundClosestValid, parameter[name[newStop], name[ticksize]]]
if compare[name[newStop] equal[==] call[name[trailingStop]][constant[lastPrice]]] begin[:]
return[constant[None]]
variable[trailingStopOrderId] assign[=] call[name[self].modifyStopOrder, parameter[]]
if name[trailingStopOrderId] begin[:]
call[call[name[self].trailingStops][name[tickerId]]][constant[lastPrice]] assign[=] name[price]
return[name[trailingStopOrderId]]
|
keyword[def] identifier[handleTrailingStops] ( identifier[self] , identifier[tickerId] ):
literal[string]
keyword[if] identifier[tickerId] keyword[not] keyword[in] identifier[self] . identifier[trailingStops] . identifier[keys] ():
keyword[return] keyword[None]
identifier[trailingStop] = identifier[self] . identifier[trailingStops] [ identifier[tickerId] ]
identifier[price] = identifier[self] . identifier[marketData] [ identifier[tickerId] ][ literal[string] ][ literal[int] ]
identifier[symbol] = identifier[self] . identifier[tickerSymbol] ( identifier[tickerId] )
keyword[if] ( identifier[self] . identifier[_positions] [ identifier[symbol] ]== literal[int] )|( identifier[self] . identifier[orders] [ identifier[trailingStop] [ literal[string] ]][ literal[string] ]== literal[string] ):
keyword[del] identifier[self] . identifier[trailingStops] [ identifier[tickerId] ]
keyword[return] keyword[None]
identifier[newStop] = identifier[trailingStop] [ literal[string] ]
identifier[ticksize] = identifier[trailingStop] [ literal[string] ]
keyword[if] ( identifier[trailingStop] [ literal[string] ]< literal[int] )&( identifier[trailingStop] [ literal[string] ]< identifier[price] ):
keyword[if] identifier[abs] ( identifier[trailingStop] [ literal[string] ])>= literal[int] :
identifier[newStop] = identifier[price] - identifier[abs] ( identifier[trailingStop] [ literal[string] ])
keyword[elif] identifier[trailingStop] [ literal[string] ]>= literal[int] :
identifier[newStop] = identifier[price] -( identifier[price] *( identifier[abs] ( identifier[trailingStop] [ literal[string] ])/ literal[int] ))
keyword[elif] ( identifier[trailingStop] [ literal[string] ]> literal[int] )&( identifier[trailingStop] [ literal[string] ]> identifier[price] ):
keyword[if] identifier[abs] ( identifier[trailingStop] [ literal[string] ])>= literal[int] :
identifier[newStop] = identifier[price] + identifier[abs] ( identifier[trailingStop] [ literal[string] ])
keyword[elif] identifier[trailingStop] [ literal[string] ]>= literal[int] :
identifier[newStop] = identifier[price] +( identifier[price] *( identifier[abs] ( identifier[trailingStop] [ literal[string] ])/ literal[int] ))
identifier[newStop] = identifier[self] . identifier[roundClosestValid] ( identifier[newStop] , identifier[ticksize] )
keyword[if] identifier[newStop] == identifier[trailingStop] [ literal[string] ]:
keyword[return] keyword[None]
identifier[trailingStopOrderId] = identifier[self] . identifier[modifyStopOrder] (
identifier[orderId] = identifier[trailingStop] [ literal[string] ],
identifier[parentId] = identifier[trailingStop] [ literal[string] ],
identifier[newStop] = identifier[newStop] ,
identifier[quantity] = identifier[trailingStop] [ literal[string] ]
)
keyword[if] identifier[trailingStopOrderId] :
identifier[self] . identifier[trailingStops] [ identifier[tickerId] ][ literal[string] ]= identifier[price]
keyword[return] identifier[trailingStopOrderId]
|
def handleTrailingStops(self, tickerId):
""" software-based trailing stop """
# existing?
if tickerId not in self.trailingStops.keys():
return None # depends on [control=['if'], data=[]]
# continue
trailingStop = self.trailingStops[tickerId]
price = self.marketData[tickerId]['last'][0]
symbol = self.tickerSymbol(tickerId)
# contract = self.contracts[tickerId]
# contractString = self.contractString(contract)
# filled / no positions?
if (self._positions[symbol] == 0) | (self.orders[trailingStop['orderId']]['status'] == 'FILLED'):
del self.trailingStops[tickerId]
return None # depends on [control=['if'], data=[]]
# continue...
newStop = trailingStop['lastPrice']
ticksize = trailingStop['ticksize']
# long
if (trailingStop['quantity'] < 0) & (trailingStop['lastPrice'] < price):
if abs(trailingStop['trailAmount']) >= 0:
newStop = price - abs(trailingStop['trailAmount']) # depends on [control=['if'], data=[]]
elif trailingStop['trailPercent'] >= 0:
newStop = price - price * (abs(trailingStop['trailPercent']) / 100) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# short
elif (trailingStop['quantity'] > 0) & (trailingStop['lastPrice'] > price):
if abs(trailingStop['trailAmount']) >= 0:
newStop = price + abs(trailingStop['trailAmount']) # depends on [control=['if'], data=[]]
elif trailingStop['trailPercent'] >= 0:
newStop = price + price * (abs(trailingStop['trailPercent']) / 100) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# valid newStop
newStop = self.roundClosestValid(newStop, ticksize)
# print("\n\n", trailingStop['lastPrice'], newStop, price, "\n\n")
# no change?
if newStop == trailingStop['lastPrice']:
return None # depends on [control=['if'], data=[]]
# submit order
trailingStopOrderId = self.modifyStopOrder(orderId=trailingStop['orderId'], parentId=trailingStop['parentId'], newStop=newStop, quantity=trailingStop['quantity'])
if trailingStopOrderId:
self.trailingStops[tickerId]['lastPrice'] = price # depends on [control=['if'], data=[]]
return trailingStopOrderId
|
def exceptAll(self, other):
"""Return a new :class:`DataFrame` containing rows in this :class:`DataFrame` but
not in another :class:`DataFrame` while preserving duplicates.
This is equivalent to `EXCEPT ALL` in SQL.
>>> df1 = spark.createDataFrame(
... [("a", 1), ("a", 1), ("a", 1), ("a", 2), ("b", 3), ("c", 4)], ["C1", "C2"])
>>> df2 = spark.createDataFrame([("a", 1), ("b", 3)], ["C1", "C2"])
>>> df1.exceptAll(df2).show()
+---+---+
| C1| C2|
+---+---+
| a| 1|
| a| 1|
| a| 2|
| c| 4|
+---+---+
Also as standard in SQL, this function resolves columns by position (not by name).
"""
return DataFrame(self._jdf.exceptAll(other._jdf), self.sql_ctx)
|
def function[exceptAll, parameter[self, other]]:
constant[Return a new :class:`DataFrame` containing rows in this :class:`DataFrame` but
not in another :class:`DataFrame` while preserving duplicates.
This is equivalent to `EXCEPT ALL` in SQL.
>>> df1 = spark.createDataFrame(
... [("a", 1), ("a", 1), ("a", 1), ("a", 2), ("b", 3), ("c", 4)], ["C1", "C2"])
>>> df2 = spark.createDataFrame([("a", 1), ("b", 3)], ["C1", "C2"])
>>> df1.exceptAll(df2).show()
+---+---+
| C1| C2|
+---+---+
| a| 1|
| a| 1|
| a| 2|
| c| 4|
+---+---+
Also as standard in SQL, this function resolves columns by position (not by name).
]
return[call[name[DataFrame], parameter[call[name[self]._jdf.exceptAll, parameter[name[other]._jdf]], name[self].sql_ctx]]]
|
keyword[def] identifier[exceptAll] ( identifier[self] , identifier[other] ):
literal[string]
keyword[return] identifier[DataFrame] ( identifier[self] . identifier[_jdf] . identifier[exceptAll] ( identifier[other] . identifier[_jdf] ), identifier[self] . identifier[sql_ctx] )
|
def exceptAll(self, other):
"""Return a new :class:`DataFrame` containing rows in this :class:`DataFrame` but
not in another :class:`DataFrame` while preserving duplicates.
This is equivalent to `EXCEPT ALL` in SQL.
>>> df1 = spark.createDataFrame(
... [("a", 1), ("a", 1), ("a", 1), ("a", 2), ("b", 3), ("c", 4)], ["C1", "C2"])
>>> df2 = spark.createDataFrame([("a", 1), ("b", 3)], ["C1", "C2"])
>>> df1.exceptAll(df2).show()
+---+---+
| C1| C2|
+---+---+
| a| 1|
| a| 1|
| a| 2|
| c| 4|
+---+---+
Also as standard in SQL, this function resolves columns by position (not by name).
"""
return DataFrame(self._jdf.exceptAll(other._jdf), self.sql_ctx)
|
def get_bounds_x0_eps(tuning_options):
"""compute bounds, x0 (the initial guess), and eps"""
values = tuning_options.tune_params.values()
if tuning_options.scaling:
#bounds = [(0, 1) for _ in values]
#x0 = [0.5 for _ in bounds]
eps = numpy.amin([1.0/len(v) for v in values])
#reducing interval from [0, 1] to [0, eps*len(v)]
bounds = [(0, eps*len(v)) for v in values]
x0 = [0.5*eps*len(v) for v in values]
else:
bounds = get_bounds(tuning_options.tune_params)
x0 = [(min_v+max_v)/2.0 for (min_v, max_v) in bounds]
eps = 1e9
for v_list in values:
vals = numpy.sort(v_list)
eps = min(eps, numpy.amin(numpy.gradient(vals)))
tuning_options["eps"] = eps
logging.debug('get_bounds_x0_eps called')
logging.debug('bounds ' + str(bounds))
logging.debug('x0 ' + str(x0))
logging.debug('eps ' + str(eps))
return bounds, x0, eps
|
def function[get_bounds_x0_eps, parameter[tuning_options]]:
constant[compute bounds, x0 (the initial guess), and eps]
variable[values] assign[=] call[name[tuning_options].tune_params.values, parameter[]]
if name[tuning_options].scaling begin[:]
variable[eps] assign[=] call[name[numpy].amin, parameter[<ast.ListComp object at 0x7da1b04f9c30>]]
variable[bounds] assign[=] <ast.ListComp object at 0x7da1b04ec910>
variable[x0] assign[=] <ast.ListComp object at 0x7da1b04effd0>
call[name[tuning_options]][constant[eps]] assign[=] name[eps]
call[name[logging].debug, parameter[constant[get_bounds_x0_eps called]]]
call[name[logging].debug, parameter[binary_operation[constant[bounds ] + call[name[str], parameter[name[bounds]]]]]]
call[name[logging].debug, parameter[binary_operation[constant[x0 ] + call[name[str], parameter[name[x0]]]]]]
call[name[logging].debug, parameter[binary_operation[constant[eps ] + call[name[str], parameter[name[eps]]]]]]
return[tuple[[<ast.Name object at 0x7da1b0405b70>, <ast.Name object at 0x7da1b0404220>, <ast.Name object at 0x7da1b0405ed0>]]]
|
keyword[def] identifier[get_bounds_x0_eps] ( identifier[tuning_options] ):
literal[string]
identifier[values] = identifier[tuning_options] . identifier[tune_params] . identifier[values] ()
keyword[if] identifier[tuning_options] . identifier[scaling] :
identifier[eps] = identifier[numpy] . identifier[amin] ([ literal[int] / identifier[len] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[values] ])
identifier[bounds] =[( literal[int] , identifier[eps] * identifier[len] ( identifier[v] )) keyword[for] identifier[v] keyword[in] identifier[values] ]
identifier[x0] =[ literal[int] * identifier[eps] * identifier[len] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[values] ]
keyword[else] :
identifier[bounds] = identifier[get_bounds] ( identifier[tuning_options] . identifier[tune_params] )
identifier[x0] =[( identifier[min_v] + identifier[max_v] )/ literal[int] keyword[for] ( identifier[min_v] , identifier[max_v] ) keyword[in] identifier[bounds] ]
identifier[eps] = literal[int]
keyword[for] identifier[v_list] keyword[in] identifier[values] :
identifier[vals] = identifier[numpy] . identifier[sort] ( identifier[v_list] )
identifier[eps] = identifier[min] ( identifier[eps] , identifier[numpy] . identifier[amin] ( identifier[numpy] . identifier[gradient] ( identifier[vals] )))
identifier[tuning_options] [ literal[string] ]= identifier[eps]
identifier[logging] . identifier[debug] ( literal[string] )
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[bounds] ))
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[x0] ))
identifier[logging] . identifier[debug] ( literal[string] + identifier[str] ( identifier[eps] ))
keyword[return] identifier[bounds] , identifier[x0] , identifier[eps]
|
def get_bounds_x0_eps(tuning_options):
"""compute bounds, x0 (the initial guess), and eps"""
values = tuning_options.tune_params.values()
if tuning_options.scaling:
#bounds = [(0, 1) for _ in values]
#x0 = [0.5 for _ in bounds]
eps = numpy.amin([1.0 / len(v) for v in values])
#reducing interval from [0, 1] to [0, eps*len(v)]
bounds = [(0, eps * len(v)) for v in values]
x0 = [0.5 * eps * len(v) for v in values] # depends on [control=['if'], data=[]]
else:
bounds = get_bounds(tuning_options.tune_params)
x0 = [(min_v + max_v) / 2.0 for (min_v, max_v) in bounds]
eps = 1000000000.0
for v_list in values:
vals = numpy.sort(v_list)
eps = min(eps, numpy.amin(numpy.gradient(vals))) # depends on [control=['for'], data=['v_list']]
tuning_options['eps'] = eps
logging.debug('get_bounds_x0_eps called')
logging.debug('bounds ' + str(bounds))
logging.debug('x0 ' + str(x0))
logging.debug('eps ' + str(eps))
return (bounds, x0, eps)
|
def send_media(self, media_id, user_ids, text='', thread_id=None):
"""
:param media_id:
:param self: bot
:param text: text of message
:param user_ids: list of user_ids for creating group or one user_id for send to one person
:param thread_id: thread_id
"""
user_ids = _get_user_ids(self, user_ids)
if not isinstance(text, str) and not isinstance(user_ids, (list, str)):
self.logger.error('Text must be an string, user_ids must be an list or string')
return False
if self.reached_limit('messages'):
self.logger.info("Out of messages for today.")
return False
media = self.get_media_info(media_id)
media = media[0] if isinstance(media, list) else media
self.delay('message')
if self.api.send_direct_item(
'media_share',
user_ids,
text=text,
thread=thread_id,
media_type=media.get('media_type'),
media_id=media.get('id')
):
self.total['messages'] += 1
return True
self.logger.info("Message to {user_ids} wasn't sent".format(user_ids=user_ids))
return False
|
def function[send_media, parameter[self, media_id, user_ids, text, thread_id]]:
constant[
:param media_id:
:param self: bot
:param text: text of message
:param user_ids: list of user_ids for creating group or one user_id for send to one person
:param thread_id: thread_id
]
variable[user_ids] assign[=] call[name[_get_user_ids], parameter[name[self], name[user_ids]]]
if <ast.BoolOp object at 0x7da1b26ae8f0> begin[:]
call[name[self].logger.error, parameter[constant[Text must be an string, user_ids must be an list or string]]]
return[constant[False]]
if call[name[self].reached_limit, parameter[constant[messages]]] begin[:]
call[name[self].logger.info, parameter[constant[Out of messages for today.]]]
return[constant[False]]
variable[media] assign[=] call[name[self].get_media_info, parameter[name[media_id]]]
variable[media] assign[=] <ast.IfExp object at 0x7da18c4ce2f0>
call[name[self].delay, parameter[constant[message]]]
if call[name[self].api.send_direct_item, parameter[constant[media_share], name[user_ids]]] begin[:]
<ast.AugAssign object at 0x7da207f03250>
return[constant[True]]
call[name[self].logger.info, parameter[call[constant[Message to {user_ids} wasn't sent].format, parameter[]]]]
return[constant[False]]
|
keyword[def] identifier[send_media] ( identifier[self] , identifier[media_id] , identifier[user_ids] , identifier[text] = literal[string] , identifier[thread_id] = keyword[None] ):
literal[string]
identifier[user_ids] = identifier[_get_user_ids] ( identifier[self] , identifier[user_ids] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[text] , identifier[str] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[user_ids] ,( identifier[list] , identifier[str] )):
identifier[self] . identifier[logger] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[reached_limit] ( literal[string] ):
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
keyword[return] keyword[False]
identifier[media] = identifier[self] . identifier[get_media_info] ( identifier[media_id] )
identifier[media] = identifier[media] [ literal[int] ] keyword[if] identifier[isinstance] ( identifier[media] , identifier[list] ) keyword[else] identifier[media]
identifier[self] . identifier[delay] ( literal[string] )
keyword[if] identifier[self] . identifier[api] . identifier[send_direct_item] (
literal[string] ,
identifier[user_ids] ,
identifier[text] = identifier[text] ,
identifier[thread] = identifier[thread_id] ,
identifier[media_type] = identifier[media] . identifier[get] ( literal[string] ),
identifier[media_id] = identifier[media] . identifier[get] ( literal[string] )
):
identifier[self] . identifier[total] [ literal[string] ]+= literal[int]
keyword[return] keyword[True]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[user_ids] = identifier[user_ids] ))
keyword[return] keyword[False]
|
def send_media(self, media_id, user_ids, text='', thread_id=None):
"""
:param media_id:
:param self: bot
:param text: text of message
:param user_ids: list of user_ids for creating group or one user_id for send to one person
:param thread_id: thread_id
"""
user_ids = _get_user_ids(self, user_ids)
if not isinstance(text, str) and (not isinstance(user_ids, (list, str))):
self.logger.error('Text must be an string, user_ids must be an list or string')
return False # depends on [control=['if'], data=[]]
if self.reached_limit('messages'):
self.logger.info('Out of messages for today.')
return False # depends on [control=['if'], data=[]]
media = self.get_media_info(media_id)
media = media[0] if isinstance(media, list) else media
self.delay('message')
if self.api.send_direct_item('media_share', user_ids, text=text, thread=thread_id, media_type=media.get('media_type'), media_id=media.get('id')):
self.total['messages'] += 1
return True # depends on [control=['if'], data=[]]
self.logger.info("Message to {user_ids} wasn't sent".format(user_ids=user_ids))
return False
|
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
|
def function[match, parameter[self, opr, path]]:
constant[ 检查操作员是否匹配资源
]
variable[_url] assign[=] call[name[urlparse].urlparse, parameter[name[path]]]
if <ast.BoolOp object at 0x7da1b0810760> begin[:]
return[constant[False]]
if compare[name[_url].path in name[self].free_routes] begin[:]
return[constant[True]]
if compare[name[_url].path <ast.NotIn object at 0x7da2590d7190> name[self].routes] begin[:]
return[constant[False]]
return[compare[name[opr] in call[call[name[self].routes][name[_url].path]][constant[oprs]]]]
|
keyword[def] identifier[match] ( identifier[self] , identifier[opr] , identifier[path] ):
literal[string]
identifier[_url] = identifier[urlparse] . identifier[urlparse] ( identifier[path] )
keyword[if] keyword[not] identifier[_url] . identifier[path] keyword[or] keyword[not] identifier[opr] :
keyword[return] keyword[False]
keyword[if] identifier[_url] . identifier[path] keyword[in] identifier[self] . identifier[free_routes] :
keyword[return] keyword[True]
keyword[if] identifier[_url] . identifier[path] keyword[not] keyword[in] identifier[self] . identifier[routes] :
keyword[return] keyword[False]
keyword[return] identifier[opr] keyword[in] identifier[self] . identifier[routes] [ identifier[_url] . identifier[path] ][ literal[string] ]
|
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False # depends on [control=['if'], data=[]]
if _url.path in self.free_routes:
return True # depends on [control=['if'], data=[]]
if _url.path not in self.routes:
return False # depends on [control=['if'], data=[]]
return opr in self.routes[_url.path]['oprs']
|
def _set_exception(self):
"""Called by a Job object to tell that an exception occured
during the processing of the function. The object will become
ready but not successful. The collector's notify_ready()
method will be called, but NOT the callback method"""
assert not self.ready()
self._data = sys.exc_info()
self._success = False
self._event.set()
if self._collector is not None:
self._collector.notify_ready(self)
|
def function[_set_exception, parameter[self]]:
constant[Called by a Job object to tell that an exception occured
during the processing of the function. The object will become
ready but not successful. The collector's notify_ready()
method will be called, but NOT the callback method]
assert[<ast.UnaryOp object at 0x7da20e955300>]
name[self]._data assign[=] call[name[sys].exc_info, parameter[]]
name[self]._success assign[=] constant[False]
call[name[self]._event.set, parameter[]]
if compare[name[self]._collector is_not constant[None]] begin[:]
call[name[self]._collector.notify_ready, parameter[name[self]]]
|
keyword[def] identifier[_set_exception] ( identifier[self] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[ready] ()
identifier[self] . identifier[_data] = identifier[sys] . identifier[exc_info] ()
identifier[self] . identifier[_success] = keyword[False]
identifier[self] . identifier[_event] . identifier[set] ()
keyword[if] identifier[self] . identifier[_collector] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_collector] . identifier[notify_ready] ( identifier[self] )
|
def _set_exception(self):
"""Called by a Job object to tell that an exception occured
during the processing of the function. The object will become
ready but not successful. The collector's notify_ready()
method will be called, but NOT the callback method"""
assert not self.ready()
self._data = sys.exc_info()
self._success = False
self._event.set()
if self._collector is not None:
self._collector.notify_ready(self) # depends on [control=['if'], data=[]]
|
def get_handlers(self, component_context, instance):
"""
Sets up service providers for the given component
:param component_context: The ComponentContext bean
:param instance: The component instance
:return: The list of handlers associated to the given component
"""
# Extract information from the context
configs = component_context.get_handler(
ipopo_constants.HANDLER_TEMPORAL
)
requires_filters = component_context.properties.get(
ipopo_constants.IPOPO_REQUIRES_FILTERS, None
)
temporal_timeouts = component_context.properties.get(
ipopo_constants.IPOPO_TEMPORAL_TIMEOUTS, None
)
# Prepare requirements
new_configs = self._prepare_configs(
configs, requires_filters, temporal_timeouts
)
# Return handlers
return [
TemporalDependency(field, requirement, timeout)
for field, (requirement, timeout) in new_configs.items()
]
|
def function[get_handlers, parameter[self, component_context, instance]]:
constant[
Sets up service providers for the given component
:param component_context: The ComponentContext bean
:param instance: The component instance
:return: The list of handlers associated to the given component
]
variable[configs] assign[=] call[name[component_context].get_handler, parameter[name[ipopo_constants].HANDLER_TEMPORAL]]
variable[requires_filters] assign[=] call[name[component_context].properties.get, parameter[name[ipopo_constants].IPOPO_REQUIRES_FILTERS, constant[None]]]
variable[temporal_timeouts] assign[=] call[name[component_context].properties.get, parameter[name[ipopo_constants].IPOPO_TEMPORAL_TIMEOUTS, constant[None]]]
variable[new_configs] assign[=] call[name[self]._prepare_configs, parameter[name[configs], name[requires_filters], name[temporal_timeouts]]]
return[<ast.ListComp object at 0x7da20c992740>]
|
keyword[def] identifier[get_handlers] ( identifier[self] , identifier[component_context] , identifier[instance] ):
literal[string]
identifier[configs] = identifier[component_context] . identifier[get_handler] (
identifier[ipopo_constants] . identifier[HANDLER_TEMPORAL]
)
identifier[requires_filters] = identifier[component_context] . identifier[properties] . identifier[get] (
identifier[ipopo_constants] . identifier[IPOPO_REQUIRES_FILTERS] , keyword[None]
)
identifier[temporal_timeouts] = identifier[component_context] . identifier[properties] . identifier[get] (
identifier[ipopo_constants] . identifier[IPOPO_TEMPORAL_TIMEOUTS] , keyword[None]
)
identifier[new_configs] = identifier[self] . identifier[_prepare_configs] (
identifier[configs] , identifier[requires_filters] , identifier[temporal_timeouts]
)
keyword[return] [
identifier[TemporalDependency] ( identifier[field] , identifier[requirement] , identifier[timeout] )
keyword[for] identifier[field] ,( identifier[requirement] , identifier[timeout] ) keyword[in] identifier[new_configs] . identifier[items] ()
]
|
def get_handlers(self, component_context, instance):
"""
Sets up service providers for the given component
:param component_context: The ComponentContext bean
:param instance: The component instance
:return: The list of handlers associated to the given component
"""
# Extract information from the context
configs = component_context.get_handler(ipopo_constants.HANDLER_TEMPORAL)
requires_filters = component_context.properties.get(ipopo_constants.IPOPO_REQUIRES_FILTERS, None)
temporal_timeouts = component_context.properties.get(ipopo_constants.IPOPO_TEMPORAL_TIMEOUTS, None)
# Prepare requirements
new_configs = self._prepare_configs(configs, requires_filters, temporal_timeouts)
# Return handlers
return [TemporalDependency(field, requirement, timeout) for (field, (requirement, timeout)) in new_configs.items()]
|
def is_in_data_type_range(self, raise_exception=True):
"""Check if collection values are in physically possible ranges for the data_type.
If this method returns False, the Data Collection's data is
physically or mathematically impossible for the data_type."""
return self._header.data_type.is_in_range(
self._values, self._header.unit, raise_exception)
|
def function[is_in_data_type_range, parameter[self, raise_exception]]:
constant[Check if collection values are in physically possible ranges for the data_type.
If this method returns False, the Data Collection's data is
physically or mathematically impossible for the data_type.]
return[call[name[self]._header.data_type.is_in_range, parameter[name[self]._values, name[self]._header.unit, name[raise_exception]]]]
|
keyword[def] identifier[is_in_data_type_range] ( identifier[self] , identifier[raise_exception] = keyword[True] ):
literal[string]
keyword[return] identifier[self] . identifier[_header] . identifier[data_type] . identifier[is_in_range] (
identifier[self] . identifier[_values] , identifier[self] . identifier[_header] . identifier[unit] , identifier[raise_exception] )
|
def is_in_data_type_range(self, raise_exception=True):
"""Check if collection values are in physically possible ranges for the data_type.
If this method returns False, the Data Collection's data is
physically or mathematically impossible for the data_type."""
return self._header.data_type.is_in_range(self._values, self._header.unit, raise_exception)
|
def change_owners(self, group_id, owner_id):
"""Change the owner of a group.
.. note:: you must be the owner to change owners
:param str group_id: the group_id of a group
:param str owner_id: the ID of the new owner
:return: the result
:rtype: :class:`~groupy.api.groups.ChangeOwnersResult`
"""
url = utils.urljoin(self.url, 'change_owners')
payload = {
'requests': [{
'group_id': group_id,
'owner_id': owner_id,
}],
}
response = self.session.post(url, json=payload)
result, = response.data['results'] # should be exactly one
return ChangeOwnersResult(**result)
|
def function[change_owners, parameter[self, group_id, owner_id]]:
constant[Change the owner of a group.
.. note:: you must be the owner to change owners
:param str group_id: the group_id of a group
:param str owner_id: the ID of the new owner
:return: the result
:rtype: :class:`~groupy.api.groups.ChangeOwnersResult`
]
variable[url] assign[=] call[name[utils].urljoin, parameter[name[self].url, constant[change_owners]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da18fe932b0>], [<ast.List object at 0x7da18fe929e0>]]
variable[response] assign[=] call[name[self].session.post, parameter[name[url]]]
<ast.Tuple object at 0x7da1b1104790> assign[=] call[name[response].data][constant[results]]
return[call[name[ChangeOwnersResult], parameter[]]]
|
keyword[def] identifier[change_owners] ( identifier[self] , identifier[group_id] , identifier[owner_id] ):
literal[string]
identifier[url] = identifier[utils] . identifier[urljoin] ( identifier[self] . identifier[url] , literal[string] )
identifier[payload] ={
literal[string] :[{
literal[string] : identifier[group_id] ,
literal[string] : identifier[owner_id] ,
}],
}
identifier[response] = identifier[self] . identifier[session] . identifier[post] ( identifier[url] , identifier[json] = identifier[payload] )
identifier[result] ,= identifier[response] . identifier[data] [ literal[string] ]
keyword[return] identifier[ChangeOwnersResult] (** identifier[result] )
|
def change_owners(self, group_id, owner_id):
"""Change the owner of a group.
.. note:: you must be the owner to change owners
:param str group_id: the group_id of a group
:param str owner_id: the ID of the new owner
:return: the result
:rtype: :class:`~groupy.api.groups.ChangeOwnersResult`
"""
url = utils.urljoin(self.url, 'change_owners')
payload = {'requests': [{'group_id': group_id, 'owner_id': owner_id}]}
response = self.session.post(url, json=payload)
(result,) = response.data['results'] # should be exactly one
return ChangeOwnersResult(**result)
|
def dispatch(self, test=False): # pylint: disable=too-many-branches
"""
Send configuration to satellites
:return: None
"""
if not self.new_to_dispatch:
raise DispatcherError("Dispatcher cannot dispatch, "
"because no configuration is prepared!")
if self.first_dispatch_done:
raise DispatcherError("Dispatcher cannot dispatch, "
"because the configuration is still dispatched!")
if self.dispatch_ok:
logger.info("Dispatching is already done and ok...")
return
logger.info("Trying to send configuration to the satellites...")
self.dispatch_ok = True
# todo: the 3 loops hereunder may be factorized
for link in self.arbiters:
# If not me and a spare arbiter...
if link == self.arbiter_link:
# I exclude myself from the dispatching, I have my configuration ;)
continue
if not link.active:
# I exclude the daemons that are not active
continue
if not link.spare:
# Do not dispatch to a master arbiter!
continue
if link.configuration_sent:
logger.debug("Arbiter %s already sent!", link.name)
continue
if not link.reachable:
logger.debug("Arbiter %s is not reachable to receive its configuration",
link.name)
continue
logger.info("Sending configuration to the arbiter %s", link.name)
logger.debug("- %s", link.cfg)
link.put_conf(link.cfg, test=test)
link.configuration_sent = True
logger.info("- sent")
# Now that the spare arbiter has a configuration, tell him it must not run,
# because I'm not dead ;)
link.do_not_run()
for link in self.schedulers:
if link.configuration_sent:
logger.debug("Scheduler %s already sent!", link.name)
continue
if not link.active:
# I exclude the daemons that are not active
continue
if not link.reachable:
logger.debug("Scheduler %s is not reachable to receive its configuration",
link.name)
continue
logger.info("Sending configuration to the scheduler %s", link.name)
logger.debug("- %s", link.cfg)
link.put_conf(link.cfg, test=test)
link.configuration_sent = True
logger.info("- sent")
for link in self.satellites:
if link.configuration_sent:
logger.debug("%s %s already sent!", link.type, link.name)
continue
if not link.active:
# I exclude the daemons that are not active
continue
if not link.reachable:
logger.warning("%s %s is not reachable to receive its configuration",
link.type, link.name)
continue
logger.info("Sending configuration to the %s %s", link.type, link.name)
logger.debug("- %s", link.cfg)
link.put_conf(link.cfg, test=test)
link.configuration_sent = True
logger.info("- sent")
if self.dispatch_ok:
# Newly prepared configuration got dispatched correctly
self.new_to_dispatch = False
self.first_dispatch_done = True
|
def function[dispatch, parameter[self, test]]:
constant[
Send configuration to satellites
:return: None
]
if <ast.UnaryOp object at 0x7da1b26af310> begin[:]
<ast.Raise object at 0x7da1b26af670>
if name[self].first_dispatch_done begin[:]
<ast.Raise object at 0x7da1b26ad9c0>
if name[self].dispatch_ok begin[:]
call[name[logger].info, parameter[constant[Dispatching is already done and ok...]]]
return[None]
call[name[logger].info, parameter[constant[Trying to send configuration to the satellites...]]]
name[self].dispatch_ok assign[=] constant[True]
for taget[name[link]] in starred[name[self].arbiters] begin[:]
if compare[name[link] equal[==] name[self].arbiter_link] begin[:]
continue
if <ast.UnaryOp object at 0x7da1b26aded0> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b26afc10> begin[:]
continue
if name[link].configuration_sent begin[:]
call[name[logger].debug, parameter[constant[Arbiter %s already sent!], name[link].name]]
continue
if <ast.UnaryOp object at 0x7da1b26adc60> begin[:]
call[name[logger].debug, parameter[constant[Arbiter %s is not reachable to receive its configuration], name[link].name]]
continue
call[name[logger].info, parameter[constant[Sending configuration to the arbiter %s], name[link].name]]
call[name[logger].debug, parameter[constant[- %s], name[link].cfg]]
call[name[link].put_conf, parameter[name[link].cfg]]
name[link].configuration_sent assign[=] constant[True]
call[name[logger].info, parameter[constant[- sent]]]
call[name[link].do_not_run, parameter[]]
for taget[name[link]] in starred[name[self].schedulers] begin[:]
if name[link].configuration_sent begin[:]
call[name[logger].debug, parameter[constant[Scheduler %s already sent!], name[link].name]]
continue
if <ast.UnaryOp object at 0x7da1b26afc40> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b26ae590> begin[:]
call[name[logger].debug, parameter[constant[Scheduler %s is not reachable to receive its configuration], name[link].name]]
continue
call[name[logger].info, parameter[constant[Sending configuration to the scheduler %s], name[link].name]]
call[name[logger].debug, parameter[constant[- %s], name[link].cfg]]
call[name[link].put_conf, parameter[name[link].cfg]]
name[link].configuration_sent assign[=] constant[True]
call[name[logger].info, parameter[constant[- sent]]]
for taget[name[link]] in starred[name[self].satellites] begin[:]
if name[link].configuration_sent begin[:]
call[name[logger].debug, parameter[constant[%s %s already sent!], name[link].type, name[link].name]]
continue
if <ast.UnaryOp object at 0x7da2041d92d0> begin[:]
continue
if <ast.UnaryOp object at 0x7da2041d9600> begin[:]
call[name[logger].warning, parameter[constant[%s %s is not reachable to receive its configuration], name[link].type, name[link].name]]
continue
call[name[logger].info, parameter[constant[Sending configuration to the %s %s], name[link].type, name[link].name]]
call[name[logger].debug, parameter[constant[- %s], name[link].cfg]]
call[name[link].put_conf, parameter[name[link].cfg]]
name[link].configuration_sent assign[=] constant[True]
call[name[logger].info, parameter[constant[- sent]]]
if name[self].dispatch_ok begin[:]
name[self].new_to_dispatch assign[=] constant[False]
name[self].first_dispatch_done assign[=] constant[True]
|
keyword[def] identifier[dispatch] ( identifier[self] , identifier[test] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[new_to_dispatch] :
keyword[raise] identifier[DispatcherError] ( literal[string]
literal[string] )
keyword[if] identifier[self] . identifier[first_dispatch_done] :
keyword[raise] identifier[DispatcherError] ( literal[string]
literal[string] )
keyword[if] identifier[self] . identifier[dispatch_ok] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[return]
identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[dispatch_ok] = keyword[True]
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[arbiters] :
keyword[if] identifier[link] == identifier[self] . identifier[arbiter_link] :
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[active] :
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[spare] :
keyword[continue]
keyword[if] identifier[link] . identifier[configuration_sent] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[link] . identifier[name] )
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[reachable] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[link] . identifier[name] )
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] , identifier[link] . identifier[name] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[link] . identifier[cfg] )
identifier[link] . identifier[put_conf] ( identifier[link] . identifier[cfg] , identifier[test] = identifier[test] )
identifier[link] . identifier[configuration_sent] = keyword[True]
identifier[logger] . identifier[info] ( literal[string] )
identifier[link] . identifier[do_not_run] ()
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[schedulers] :
keyword[if] identifier[link] . identifier[configuration_sent] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[link] . identifier[name] )
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[active] :
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[reachable] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[link] . identifier[name] )
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] , identifier[link] . identifier[name] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[link] . identifier[cfg] )
identifier[link] . identifier[put_conf] ( identifier[link] . identifier[cfg] , identifier[test] = identifier[test] )
identifier[link] . identifier[configuration_sent] = keyword[True]
identifier[logger] . identifier[info] ( literal[string] )
keyword[for] identifier[link] keyword[in] identifier[self] . identifier[satellites] :
keyword[if] identifier[link] . identifier[configuration_sent] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[link] . identifier[type] , identifier[link] . identifier[name] )
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[active] :
keyword[continue]
keyword[if] keyword[not] identifier[link] . identifier[reachable] :
identifier[logger] . identifier[warning] ( literal[string] ,
identifier[link] . identifier[type] , identifier[link] . identifier[name] )
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] , identifier[link] . identifier[type] , identifier[link] . identifier[name] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[link] . identifier[cfg] )
identifier[link] . identifier[put_conf] ( identifier[link] . identifier[cfg] , identifier[test] = identifier[test] )
identifier[link] . identifier[configuration_sent] = keyword[True]
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[self] . identifier[dispatch_ok] :
identifier[self] . identifier[new_to_dispatch] = keyword[False]
identifier[self] . identifier[first_dispatch_done] = keyword[True]
|
def dispatch(self, test=False): # pylint: disable=too-many-branches
'\n Send configuration to satellites\n\n :return: None\n '
if not self.new_to_dispatch:
raise DispatcherError('Dispatcher cannot dispatch, because no configuration is prepared!') # depends on [control=['if'], data=[]]
if self.first_dispatch_done:
raise DispatcherError('Dispatcher cannot dispatch, because the configuration is still dispatched!') # depends on [control=['if'], data=[]]
if self.dispatch_ok:
logger.info('Dispatching is already done and ok...')
return # depends on [control=['if'], data=[]]
logger.info('Trying to send configuration to the satellites...')
self.dispatch_ok = True
# todo: the 3 loops hereunder may be factorized
for link in self.arbiters:
# If not me and a spare arbiter...
if link == self.arbiter_link:
# I exclude myself from the dispatching, I have my configuration ;)
continue # depends on [control=['if'], data=[]]
if not link.active:
# I exclude the daemons that are not active
continue # depends on [control=['if'], data=[]]
if not link.spare:
# Do not dispatch to a master arbiter!
continue # depends on [control=['if'], data=[]]
if link.configuration_sent:
logger.debug('Arbiter %s already sent!', link.name)
continue # depends on [control=['if'], data=[]]
if not link.reachable:
logger.debug('Arbiter %s is not reachable to receive its configuration', link.name)
continue # depends on [control=['if'], data=[]]
logger.info('Sending configuration to the arbiter %s', link.name)
logger.debug('- %s', link.cfg)
link.put_conf(link.cfg, test=test)
link.configuration_sent = True
logger.info('- sent')
# Now that the spare arbiter has a configuration, tell him it must not run,
# because I'm not dead ;)
link.do_not_run() # depends on [control=['for'], data=['link']]
for link in self.schedulers:
if link.configuration_sent:
logger.debug('Scheduler %s already sent!', link.name)
continue # depends on [control=['if'], data=[]]
if not link.active:
# I exclude the daemons that are not active
continue # depends on [control=['if'], data=[]]
if not link.reachable:
logger.debug('Scheduler %s is not reachable to receive its configuration', link.name)
continue # depends on [control=['if'], data=[]]
logger.info('Sending configuration to the scheduler %s', link.name)
logger.debug('- %s', link.cfg)
link.put_conf(link.cfg, test=test)
link.configuration_sent = True
logger.info('- sent') # depends on [control=['for'], data=['link']]
for link in self.satellites:
if link.configuration_sent:
logger.debug('%s %s already sent!', link.type, link.name)
continue # depends on [control=['if'], data=[]]
if not link.active:
# I exclude the daemons that are not active
continue # depends on [control=['if'], data=[]]
if not link.reachable:
logger.warning('%s %s is not reachable to receive its configuration', link.type, link.name)
continue # depends on [control=['if'], data=[]]
logger.info('Sending configuration to the %s %s', link.type, link.name)
logger.debug('- %s', link.cfg)
link.put_conf(link.cfg, test=test)
link.configuration_sent = True
logger.info('- sent') # depends on [control=['for'], data=['link']]
if self.dispatch_ok:
# Newly prepared configuration got dispatched correctly
self.new_to_dispatch = False
self.first_dispatch_done = True # depends on [control=['if'], data=[]]
|
def generate_lines(text, ext=['.txt', '.md', '.rst', '.asciidoc', '.asc']):
r""" Yield text one line at a time from from a single file path, files in a directory, or a text string
>>> list(generate_lines('Hello crazy\r\nMS/Apple world\rof EOLS.\n'))
['Hello crazy\r\n', 'MS/Apple world\r', 'of EOLS.\n']
"""
if isinstance(text, basestring):
if len(text) <= 256:
if os.path.isfile(text) and os.path.splitext(text)[-1].lower() in ext:
return open(text)
elif os.path.isdir(text):
return chain.from_iterable(generate_lines(stat['path']) for stat in find_files(text, ext=ext))
else:
return (line for line in Split(text=text))
else:
return Split(text=text)
return chain.from_iterable(generate_lines(obj) for obj in text)
|
def function[generate_lines, parameter[text, ext]]:
constant[ Yield text one line at a time from from a single file path, files in a directory, or a text string
>>> list(generate_lines('Hello crazy\r\nMS/Apple world\rof EOLS.\n'))
['Hello crazy\r\n', 'MS/Apple world\r', 'of EOLS.\n']
]
if call[name[isinstance], parameter[name[text], name[basestring]]] begin[:]
if compare[call[name[len], parameter[name[text]]] less_or_equal[<=] constant[256]] begin[:]
if <ast.BoolOp object at 0x7da18fe92500> begin[:]
return[call[name[open], parameter[name[text]]]]
return[call[name[chain].from_iterable, parameter[<ast.GeneratorExp object at 0x7da18fe91b70>]]]
|
keyword[def] identifier[generate_lines] ( identifier[text] , identifier[ext] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]):
literal[string]
keyword[if] identifier[isinstance] ( identifier[text] , identifier[basestring] ):
keyword[if] identifier[len] ( identifier[text] )<= literal[int] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[text] ) keyword[and] identifier[os] . identifier[path] . identifier[splitext] ( identifier[text] )[- literal[int] ]. identifier[lower] () keyword[in] identifier[ext] :
keyword[return] identifier[open] ( identifier[text] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[text] ):
keyword[return] identifier[chain] . identifier[from_iterable] ( identifier[generate_lines] ( identifier[stat] [ literal[string] ]) keyword[for] identifier[stat] keyword[in] identifier[find_files] ( identifier[text] , identifier[ext] = identifier[ext] ))
keyword[else] :
keyword[return] ( identifier[line] keyword[for] identifier[line] keyword[in] identifier[Split] ( identifier[text] = identifier[text] ))
keyword[else] :
keyword[return] identifier[Split] ( identifier[text] = identifier[text] )
keyword[return] identifier[chain] . identifier[from_iterable] ( identifier[generate_lines] ( identifier[obj] ) keyword[for] identifier[obj] keyword[in] identifier[text] )
|
def generate_lines(text, ext=['.txt', '.md', '.rst', '.asciidoc', '.asc']):
""" Yield text one line at a time from from a single file path, files in a directory, or a text string
>>> list(generate_lines('Hello crazy\\r\\nMS/Apple world\\rof EOLS.\\n'))
['Hello crazy\\r\\n', 'MS/Apple world\\r', 'of EOLS.\\n']
"""
if isinstance(text, basestring):
if len(text) <= 256:
if os.path.isfile(text) and os.path.splitext(text)[-1].lower() in ext:
return open(text) # depends on [control=['if'], data=[]]
elif os.path.isdir(text):
return chain.from_iterable((generate_lines(stat['path']) for stat in find_files(text, ext=ext))) # depends on [control=['if'], data=[]]
else:
return (line for line in Split(text=text)) # depends on [control=['if'], data=[]]
else:
return Split(text=text) # depends on [control=['if'], data=[]]
return chain.from_iterable((generate_lines(obj) for obj in text))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.