code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def from_pickle(cls, filename):
"""Loads and Returns a Mesh from a pickle file, given a filename."""
with open(filename, 'rb') as f:
mesh = pickle.load(f).copy()
return mesh
|
def function[from_pickle, parameter[cls, filename]]:
constant[Loads and Returns a Mesh from a pickle file, given a filename.]
with call[name[open], parameter[name[filename], constant[rb]]] begin[:]
variable[mesh] assign[=] call[call[name[pickle].load, parameter[name[f]]].copy, parameter[]]
return[name[mesh]]
|
keyword[def] identifier[from_pickle] ( identifier[cls] , identifier[filename] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
identifier[mesh] = identifier[pickle] . identifier[load] ( identifier[f] ). identifier[copy] ()
keyword[return] identifier[mesh]
|
def from_pickle(cls, filename):
"""Loads and Returns a Mesh from a pickle file, given a filename."""
with open(filename, 'rb') as f:
mesh = pickle.load(f).copy() # depends on [control=['with'], data=['f']]
return mesh
|
def status_count(self, project):
'''
return a dict
'''
result = dict()
if project not in self.projects:
self._list_project()
if project not in self.projects:
return result
tablename = self._tablename(project)
for status, count in self._execute("SELECT `status`, count(1) FROM %s GROUP BY `status`" %
self.escape(tablename)):
result[status] = count
return result
|
def function[status_count, parameter[self, project]]:
constant[
return a dict
]
variable[result] assign[=] call[name[dict], parameter[]]
if compare[name[project] <ast.NotIn object at 0x7da2590d7190> name[self].projects] begin[:]
call[name[self]._list_project, parameter[]]
if compare[name[project] <ast.NotIn object at 0x7da2590d7190> name[self].projects] begin[:]
return[name[result]]
variable[tablename] assign[=] call[name[self]._tablename, parameter[name[project]]]
for taget[tuple[[<ast.Name object at 0x7da2044c1750>, <ast.Name object at 0x7da2044c3b80>]]] in starred[call[name[self]._execute, parameter[binary_operation[constant[SELECT `status`, count(1) FROM %s GROUP BY `status`] <ast.Mod object at 0x7da2590d6920> call[name[self].escape, parameter[name[tablename]]]]]]] begin[:]
call[name[result]][name[status]] assign[=] name[count]
return[name[result]]
|
keyword[def] identifier[status_count] ( identifier[self] , identifier[project] ):
literal[string]
identifier[result] = identifier[dict] ()
keyword[if] identifier[project] keyword[not] keyword[in] identifier[self] . identifier[projects] :
identifier[self] . identifier[_list_project] ()
keyword[if] identifier[project] keyword[not] keyword[in] identifier[self] . identifier[projects] :
keyword[return] identifier[result]
identifier[tablename] = identifier[self] . identifier[_tablename] ( identifier[project] )
keyword[for] identifier[status] , identifier[count] keyword[in] identifier[self] . identifier[_execute] ( literal[string] %
identifier[self] . identifier[escape] ( identifier[tablename] )):
identifier[result] [ identifier[status] ]= identifier[count]
keyword[return] identifier[result]
|
def status_count(self, project):
"""
return a dict
"""
result = dict()
if project not in self.projects:
self._list_project() # depends on [control=['if'], data=[]]
if project not in self.projects:
return result # depends on [control=['if'], data=[]]
tablename = self._tablename(project)
for (status, count) in self._execute('SELECT `status`, count(1) FROM %s GROUP BY `status`' % self.escape(tablename)):
result[status] = count # depends on [control=['for'], data=[]]
return result
|
def evaluate(self, dataset, metric='auto'):
"""
Evaluate the model by making predictions of target values and comparing
these to actual values.
Parameters
----------
dataset : SFrame
Dataset of new observations. Must include columns with the same
names as the session_id, target and features used for model training.
Additional columns are ignored.
metric : str, optional
Name of the evaluation metric. Possible values are:
- 'auto' : Returns all available metrics.
- 'accuracy' : Classification accuracy (micro average).
- 'auc' : Area under the ROC curve (macro average)
- 'precision' : Precision score (macro average)
- 'recall' : Recall score (macro average)
- 'f1_score' : F1 score (macro average)
- 'log_loss' : Log loss
- 'confusion_matrix' : An SFrame with counts of possible
prediction/true label combinations.
- 'roc_curve' : An SFrame containing information needed for an
ROC curve
Returns
-------
out : dict
Dictionary of evaluation results where the key is the name of the
evaluation metric (e.g. `accuracy`) and the value is the evaluation
score.
See Also
----------
create, predict
Examples
----------
.. sourcecode:: python
>>> results = model.evaluate(data)
>>> print results['accuracy']
"""
avail_metrics = ['accuracy', 'auc', 'precision', 'recall',
'f1_score', 'log_loss', 'confusion_matrix', 'roc_curve']
_tkutl._check_categorical_option_type(
'metric', metric, avail_metrics + ['auto'])
if metric == 'auto':
metrics = avail_metrics
else:
metrics = [metric]
probs = self.predict(dataset, output_type='probability_vector')
classes = self.predict(dataset, output_type='class')
ret = {}
if 'accuracy' in metrics:
ret['accuracy'] = _evaluation.accuracy(dataset[self.target], classes)
if 'auc' in metrics:
ret['auc'] = _evaluation.auc(dataset[self.target], probs, index_map=self._target_id_map)
if 'precision' in metrics:
ret['precision'] = _evaluation.precision(dataset[self.target], classes)
if 'recall' in metrics:
ret['recall'] = _evaluation.recall(dataset[self.target], classes)
if 'f1_score' in metrics:
ret['f1_score'] = _evaluation.f1_score(dataset[self.target], classes)
if 'log_loss' in metrics:
ret['log_loss'] = _evaluation.log_loss(dataset[self.target], probs, index_map=self._target_id_map)
if 'confusion_matrix' in metrics:
ret['confusion_matrix'] = _evaluation.confusion_matrix(dataset[self.target], classes)
if 'roc_curve' in metrics:
ret['roc_curve'] = _evaluation.roc_curve(dataset[self.target], probs, index_map=self._target_id_map)
return ret
|
def function[evaluate, parameter[self, dataset, metric]]:
constant[
Evaluate the model by making predictions of target values and comparing
these to actual values.
Parameters
----------
dataset : SFrame
Dataset of new observations. Must include columns with the same
names as the session_id, target and features used for model training.
Additional columns are ignored.
metric : str, optional
Name of the evaluation metric. Possible values are:
- 'auto' : Returns all available metrics.
- 'accuracy' : Classification accuracy (micro average).
- 'auc' : Area under the ROC curve (macro average)
- 'precision' : Precision score (macro average)
- 'recall' : Recall score (macro average)
- 'f1_score' : F1 score (macro average)
- 'log_loss' : Log loss
- 'confusion_matrix' : An SFrame with counts of possible
prediction/true label combinations.
- 'roc_curve' : An SFrame containing information needed for an
ROC curve
Returns
-------
out : dict
Dictionary of evaluation results where the key is the name of the
evaluation metric (e.g. `accuracy`) and the value is the evaluation
score.
See Also
----------
create, predict
Examples
----------
.. sourcecode:: python
>>> results = model.evaluate(data)
>>> print results['accuracy']
]
variable[avail_metrics] assign[=] list[[<ast.Constant object at 0x7da20c991ba0>, <ast.Constant object at 0x7da20c993460>, <ast.Constant object at 0x7da20c991210>, <ast.Constant object at 0x7da20c991ea0>, <ast.Constant object at 0x7da20c9932b0>, <ast.Constant object at 0x7da20c9926e0>, <ast.Constant object at 0x7da20c993fd0>, <ast.Constant object at 0x7da20c990070>]]
call[name[_tkutl]._check_categorical_option_type, parameter[constant[metric], name[metric], binary_operation[name[avail_metrics] + list[[<ast.Constant object at 0x7da20c993f10>]]]]]
if compare[name[metric] equal[==] constant[auto]] begin[:]
variable[metrics] assign[=] name[avail_metrics]
variable[probs] assign[=] call[name[self].predict, parameter[name[dataset]]]
variable[classes] assign[=] call[name[self].predict, parameter[name[dataset]]]
variable[ret] assign[=] dictionary[[], []]
if compare[constant[accuracy] in name[metrics]] begin[:]
call[name[ret]][constant[accuracy]] assign[=] call[name[_evaluation].accuracy, parameter[call[name[dataset]][name[self].target], name[classes]]]
if compare[constant[auc] in name[metrics]] begin[:]
call[name[ret]][constant[auc]] assign[=] call[name[_evaluation].auc, parameter[call[name[dataset]][name[self].target], name[probs]]]
if compare[constant[precision] in name[metrics]] begin[:]
call[name[ret]][constant[precision]] assign[=] call[name[_evaluation].precision, parameter[call[name[dataset]][name[self].target], name[classes]]]
if compare[constant[recall] in name[metrics]] begin[:]
call[name[ret]][constant[recall]] assign[=] call[name[_evaluation].recall, parameter[call[name[dataset]][name[self].target], name[classes]]]
if compare[constant[f1_score] in name[metrics]] begin[:]
call[name[ret]][constant[f1_score]] assign[=] call[name[_evaluation].f1_score, parameter[call[name[dataset]][name[self].target], name[classes]]]
if compare[constant[log_loss] in name[metrics]] begin[:]
call[name[ret]][constant[log_loss]] assign[=] call[name[_evaluation].log_loss, parameter[call[name[dataset]][name[self].target], name[probs]]]
if compare[constant[confusion_matrix] in name[metrics]] begin[:]
call[name[ret]][constant[confusion_matrix]] assign[=] call[name[_evaluation].confusion_matrix, parameter[call[name[dataset]][name[self].target], name[classes]]]
if compare[constant[roc_curve] in name[metrics]] begin[:]
call[name[ret]][constant[roc_curve]] assign[=] call[name[_evaluation].roc_curve, parameter[call[name[dataset]][name[self].target], name[probs]]]
return[name[ret]]
|
keyword[def] identifier[evaluate] ( identifier[self] , identifier[dataset] , identifier[metric] = literal[string] ):
literal[string]
identifier[avail_metrics] =[ literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[_tkutl] . identifier[_check_categorical_option_type] (
literal[string] , identifier[metric] , identifier[avail_metrics] +[ literal[string] ])
keyword[if] identifier[metric] == literal[string] :
identifier[metrics] = identifier[avail_metrics]
keyword[else] :
identifier[metrics] =[ identifier[metric] ]
identifier[probs] = identifier[self] . identifier[predict] ( identifier[dataset] , identifier[output_type] = literal[string] )
identifier[classes] = identifier[self] . identifier[predict] ( identifier[dataset] , identifier[output_type] = literal[string] )
identifier[ret] ={}
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[accuracy] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[classes] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[auc] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[probs] , identifier[index_map] = identifier[self] . identifier[_target_id_map] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[precision] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[classes] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[recall] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[classes] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[f1_score] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[classes] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[log_loss] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[probs] , identifier[index_map] = identifier[self] . identifier[_target_id_map] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[confusion_matrix] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[classes] )
keyword[if] literal[string] keyword[in] identifier[metrics] :
identifier[ret] [ literal[string] ]= identifier[_evaluation] . identifier[roc_curve] ( identifier[dataset] [ identifier[self] . identifier[target] ], identifier[probs] , identifier[index_map] = identifier[self] . identifier[_target_id_map] )
keyword[return] identifier[ret]
|
def evaluate(self, dataset, metric='auto'):
"""
Evaluate the model by making predictions of target values and comparing
these to actual values.
Parameters
----------
dataset : SFrame
Dataset of new observations. Must include columns with the same
names as the session_id, target and features used for model training.
Additional columns are ignored.
metric : str, optional
Name of the evaluation metric. Possible values are:
- 'auto' : Returns all available metrics.
- 'accuracy' : Classification accuracy (micro average).
- 'auc' : Area under the ROC curve (macro average)
- 'precision' : Precision score (macro average)
- 'recall' : Recall score (macro average)
- 'f1_score' : F1 score (macro average)
- 'log_loss' : Log loss
- 'confusion_matrix' : An SFrame with counts of possible
prediction/true label combinations.
- 'roc_curve' : An SFrame containing information needed for an
ROC curve
Returns
-------
out : dict
Dictionary of evaluation results where the key is the name of the
evaluation metric (e.g. `accuracy`) and the value is the evaluation
score.
See Also
----------
create, predict
Examples
----------
.. sourcecode:: python
>>> results = model.evaluate(data)
>>> print results['accuracy']
"""
avail_metrics = ['accuracy', 'auc', 'precision', 'recall', 'f1_score', 'log_loss', 'confusion_matrix', 'roc_curve']
_tkutl._check_categorical_option_type('metric', metric, avail_metrics + ['auto'])
if metric == 'auto':
metrics = avail_metrics # depends on [control=['if'], data=[]]
else:
metrics = [metric]
probs = self.predict(dataset, output_type='probability_vector')
classes = self.predict(dataset, output_type='class')
ret = {}
if 'accuracy' in metrics:
ret['accuracy'] = _evaluation.accuracy(dataset[self.target], classes) # depends on [control=['if'], data=[]]
if 'auc' in metrics:
ret['auc'] = _evaluation.auc(dataset[self.target], probs, index_map=self._target_id_map) # depends on [control=['if'], data=[]]
if 'precision' in metrics:
ret['precision'] = _evaluation.precision(dataset[self.target], classes) # depends on [control=['if'], data=[]]
if 'recall' in metrics:
ret['recall'] = _evaluation.recall(dataset[self.target], classes) # depends on [control=['if'], data=[]]
if 'f1_score' in metrics:
ret['f1_score'] = _evaluation.f1_score(dataset[self.target], classes) # depends on [control=['if'], data=[]]
if 'log_loss' in metrics:
ret['log_loss'] = _evaluation.log_loss(dataset[self.target], probs, index_map=self._target_id_map) # depends on [control=['if'], data=[]]
if 'confusion_matrix' in metrics:
ret['confusion_matrix'] = _evaluation.confusion_matrix(dataset[self.target], classes) # depends on [control=['if'], data=[]]
if 'roc_curve' in metrics:
ret['roc_curve'] = _evaluation.roc_curve(dataset[self.target], probs, index_map=self._target_id_map) # depends on [control=['if'], data=[]]
return ret
|
def list_permissions(vhost, runas=None):
'''
Lists permissions for vhost via rabbitmqctl list_permissions
CLI Example:
.. code-block:: bash
salt '*' rabbitmq.list_permissions /myvhost
'''
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'list_permissions', '-q', '-p', vhost],
reset_system_locale=False,
runas=runas,
python_shell=False)
return _output_to_dict(res)
|
def function[list_permissions, parameter[vhost, runas]]:
constant[
Lists permissions for vhost via rabbitmqctl list_permissions
CLI Example:
.. code-block:: bash
salt '*' rabbitmq.list_permissions /myvhost
]
if <ast.BoolOp object at 0x7da2047eb820> begin[:]
variable[runas] assign[=] call[name[salt].utils.user.get_user, parameter[]]
variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[list[[<ast.Name object at 0x7da2047eac50>, <ast.Constant object at 0x7da2047e8be0>, <ast.Constant object at 0x7da2047eba30>, <ast.Constant object at 0x7da2047ebb50>, <ast.Name object at 0x7da2047eac80>]]]]
return[call[name[_output_to_dict], parameter[name[res]]]]
|
keyword[def] identifier[list_permissions] ( identifier[vhost] , identifier[runas] = keyword[None] ):
literal[string]
keyword[if] identifier[runas] keyword[is] keyword[None] keyword[and] keyword[not] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] ():
identifier[runas] = identifier[salt] . identifier[utils] . identifier[user] . identifier[get_user] ()
identifier[res] = identifier[__salt__] [ literal[string] ](
[ identifier[RABBITMQCTL] , literal[string] , literal[string] , literal[string] , identifier[vhost] ],
identifier[reset_system_locale] = keyword[False] ,
identifier[runas] = identifier[runas] ,
identifier[python_shell] = keyword[False] )
keyword[return] identifier[_output_to_dict] ( identifier[res] )
|
def list_permissions(vhost, runas=None):
"""
Lists permissions for vhost via rabbitmqctl list_permissions
CLI Example:
.. code-block:: bash
salt '*' rabbitmq.list_permissions /myvhost
"""
if runas is None and (not salt.utils.platform.is_windows()):
runas = salt.utils.user.get_user() # depends on [control=['if'], data=[]]
res = __salt__['cmd.run_all']([RABBITMQCTL, 'list_permissions', '-q', '-p', vhost], reset_system_locale=False, runas=runas, python_shell=False)
return _output_to_dict(res)
|
def token_perplexity_micro(eval_data, predictions, scores, learner='ignored'):
'''
Return the micro-averaged per-token perplexity `exp(-score / num_tokens)`
computed over the entire corpus, as a length-1 list of floats.
The log scores in `scores` should be base e (`exp`, `log`).
>>> refs = [Instance(None, ''),
... Instance(None, ''),
... Instance(None, '2')]
>>> scores = [np.log(1.0), np.log(0.25), np.log(1 / 64.)]
>>> perplexity = token_perplexity_micro(refs, None, scores)
>>> [round(p) for p in perplexity]
... # sequence perplexities: [1, 4, 64]
... # per-token perplexities: [1, 4, 8]
... # micro-average: gmean([1, 4, 8, 8])
[4.0]
'''
lens = np.array([len(_maybe_tokenize(inst.output)) + 1 for inst in eval_data])
return [np.exp(np.average(-np.array(scores) / lens, weights=lens))]
|
def function[token_perplexity_micro, parameter[eval_data, predictions, scores, learner]]:
constant[
Return the micro-averaged per-token perplexity `exp(-score / num_tokens)`
computed over the entire corpus, as a length-1 list of floats.
The log scores in `scores` should be base e (`exp`, `log`).
>>> refs = [Instance(None, ''),
... Instance(None, ''),
... Instance(None, '2')]
>>> scores = [np.log(1.0), np.log(0.25), np.log(1 / 64.)]
>>> perplexity = token_perplexity_micro(refs, None, scores)
>>> [round(p) for p in perplexity]
... # sequence perplexities: [1, 4, 64]
... # per-token perplexities: [1, 4, 8]
... # micro-average: gmean([1, 4, 8, 8])
[4.0]
]
variable[lens] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b1029f30>]]
return[list[[<ast.Call object at 0x7da1b1029a20>]]]
|
keyword[def] identifier[token_perplexity_micro] ( identifier[eval_data] , identifier[predictions] , identifier[scores] , identifier[learner] = literal[string] ):
literal[string]
identifier[lens] = identifier[np] . identifier[array] ([ identifier[len] ( identifier[_maybe_tokenize] ( identifier[inst] . identifier[output] ))+ literal[int] keyword[for] identifier[inst] keyword[in] identifier[eval_data] ])
keyword[return] [ identifier[np] . identifier[exp] ( identifier[np] . identifier[average] (- identifier[np] . identifier[array] ( identifier[scores] )/ identifier[lens] , identifier[weights] = identifier[lens] ))]
|
def token_perplexity_micro(eval_data, predictions, scores, learner='ignored'):
"""
Return the micro-averaged per-token perplexity `exp(-score / num_tokens)`
computed over the entire corpus, as a length-1 list of floats.
The log scores in `scores` should be base e (`exp`, `log`).
>>> refs = [Instance(None, ''),
... Instance(None, ''),
... Instance(None, '2')]
>>> scores = [np.log(1.0), np.log(0.25), np.log(1 / 64.)]
>>> perplexity = token_perplexity_micro(refs, None, scores)
>>> [round(p) for p in perplexity]
... # sequence perplexities: [1, 4, 64]
... # per-token perplexities: [1, 4, 8]
... # micro-average: gmean([1, 4, 8, 8])
[4.0]
"""
lens = np.array([len(_maybe_tokenize(inst.output)) + 1 for inst in eval_data])
return [np.exp(np.average(-np.array(scores) / lens, weights=lens))]
|
def asDictionary(self):
""" returns the object as a dictionary """
template = {
"type" : "esriPMS",
"url" : self._url,
"imageData" : self._imageDate,
"contentType" : self._contentType,
"width" : self._width,
"height" : self._height,
"angle" : self._angle,
"xoffset" : self._xoffset,
"yoffset" : self._yoffset,
"xscale" : self._xscale,
"yscale" : self._yscale,
"outline" : self._outline
}
return template
|
def function[asDictionary, parameter[self]]:
constant[ returns the object as a dictionary ]
variable[template] assign[=] dictionary[[<ast.Constant object at 0x7da2041dbe50>, <ast.Constant object at 0x7da2041dafb0>, <ast.Constant object at 0x7da2041dae90>, <ast.Constant object at 0x7da2041da890>, <ast.Constant object at 0x7da2041d8af0>, <ast.Constant object at 0x7da2041d98a0>, <ast.Constant object at 0x7da2041dabf0>, <ast.Constant object at 0x7da2041d8430>, <ast.Constant object at 0x7da2041d8400>, <ast.Constant object at 0x7da2041da9e0>, <ast.Constant object at 0x7da2041d8bb0>, <ast.Constant object at 0x7da2041db1c0>], [<ast.Constant object at 0x7da2041dbc70>, <ast.Attribute object at 0x7da2041da590>, <ast.Attribute object at 0x7da2041d8970>, <ast.Attribute object at 0x7da2041d9060>, <ast.Attribute object at 0x7da2041da4d0>, <ast.Attribute object at 0x7da2041dada0>, <ast.Attribute object at 0x7da2041d9f90>, <ast.Attribute object at 0x7da2041d9ff0>, <ast.Attribute object at 0x7da2041d8580>, <ast.Attribute object at 0x7da2041d9450>, <ast.Attribute object at 0x7da18f09ebf0>, <ast.Attribute object at 0x7da18f09cfd0>]]
return[name[template]]
|
keyword[def] identifier[asDictionary] ( identifier[self] ):
literal[string]
identifier[template] ={
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[_url] ,
literal[string] : identifier[self] . identifier[_imageDate] ,
literal[string] : identifier[self] . identifier[_contentType] ,
literal[string] : identifier[self] . identifier[_width] ,
literal[string] : identifier[self] . identifier[_height] ,
literal[string] : identifier[self] . identifier[_angle] ,
literal[string] : identifier[self] . identifier[_xoffset] ,
literal[string] : identifier[self] . identifier[_yoffset] ,
literal[string] : identifier[self] . identifier[_xscale] ,
literal[string] : identifier[self] . identifier[_yscale] ,
literal[string] : identifier[self] . identifier[_outline]
}
keyword[return] identifier[template]
|
def asDictionary(self):
""" returns the object as a dictionary """
template = {'type': 'esriPMS', 'url': self._url, 'imageData': self._imageDate, 'contentType': self._contentType, 'width': self._width, 'height': self._height, 'angle': self._angle, 'xoffset': self._xoffset, 'yoffset': self._yoffset, 'xscale': self._xscale, 'yscale': self._yscale, 'outline': self._outline}
return template
|
def get_df(self, data_file):
"""
读取历史财务数据文件,并返回pandas结果 , 类似gpcw20171231.zip格式,具体字段含义参考
https://github.com/rainx/pytdx/issues/133
:param data_file: 数据文件地址, 数据文件类型可以为 .zip 文件,也可以为解压后的 .dat
:return: pandas DataFrame格式的历史财务数据
"""
crawler = QAHistoryFinancialCrawler()
with open(data_file, 'rb') as df:
data = crawler.parse(download_file=df)
return crawler.to_df(data)
|
def function[get_df, parameter[self, data_file]]:
constant[
读取历史财务数据文件,并返回pandas结果 , 类似gpcw20171231.zip格式,具体字段含义参考
https://github.com/rainx/pytdx/issues/133
:param data_file: 数据文件地址, 数据文件类型可以为 .zip 文件,也可以为解压后的 .dat
:return: pandas DataFrame格式的历史财务数据
]
variable[crawler] assign[=] call[name[QAHistoryFinancialCrawler], parameter[]]
with call[name[open], parameter[name[data_file], constant[rb]]] begin[:]
variable[data] assign[=] call[name[crawler].parse, parameter[]]
return[call[name[crawler].to_df, parameter[name[data]]]]
|
keyword[def] identifier[get_df] ( identifier[self] , identifier[data_file] ):
literal[string]
identifier[crawler] = identifier[QAHistoryFinancialCrawler] ()
keyword[with] identifier[open] ( identifier[data_file] , literal[string] ) keyword[as] identifier[df] :
identifier[data] = identifier[crawler] . identifier[parse] ( identifier[download_file] = identifier[df] )
keyword[return] identifier[crawler] . identifier[to_df] ( identifier[data] )
|
def get_df(self, data_file):
"""
读取历史财务数据文件,并返回pandas结果 , 类似gpcw20171231.zip格式,具体字段含义参考
https://github.com/rainx/pytdx/issues/133
:param data_file: 数据文件地址, 数据文件类型可以为 .zip 文件,也可以为解压后的 .dat
:return: pandas DataFrame格式的历史财务数据
"""
crawler = QAHistoryFinancialCrawler()
with open(data_file, 'rb') as df:
data = crawler.parse(download_file=df) # depends on [control=['with'], data=['df']]
return crawler.to_df(data)
|
def _set_dscp_mutation(self, v, load=False):
"""
Setter method for dscp_mutation, mapped from YANG variable /qos/map/dscp_mutation (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp_mutation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp_mutation() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure Dscp-to-Dscp mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dscp-to-Dscp mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dscp_mutation must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure Dscp-to-Dscp mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dscp-to-Dscp mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='list', is_config=True)""",
})
self.__dscp_mutation = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_dscp_mutation, parameter[self, v, load]]:
constant[
Setter method for dscp_mutation, mapped from YANG variable /qos/map/dscp_mutation (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp_mutation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp_mutation() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b25882e0>
name[self].__dscp_mutation assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_dscp_mutation] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[dscp_mutation] . identifier[dscp_mutation] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__dscp_mutation] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_dscp_mutation(self, v, load=False):
"""
Setter method for dscp_mutation, mapped from YANG variable /qos/map/dscp_mutation (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_dscp_mutation is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dscp_mutation() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('dscp_mutation_map_name', dscp_mutation.dscp_mutation, yang_name='dscp-mutation', rest_name='dscp-mutation', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='dscp-mutation-map-name', extensions={u'tailf-common': {u'info': u'Configure Dscp-to-Dscp mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}), is_container='list', yang_name='dscp-mutation', rest_name='dscp-mutation', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dscp-to-Dscp mutation map', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'dscp_mutation', u'cli-mode-name': u'dscp-mutation-$(dscp-mutation-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'dscp_mutation must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("dscp_mutation_map_name",dscp_mutation.dscp_mutation, yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'dscp-mutation-map-name\', extensions={u\'tailf-common\': {u\'info\': u\'Configure Dscp-to-Dscp mutation map\', u\'cli-no-key-completion\': None, u\'cli-suppress-list-no\': None, u\'cli-suppress-key-abbreviation\': None, u\'cli-full-command\': None, u\'callpoint\': u\'dscp_mutation\', u\'cli-mode-name\': u\'dscp-mutation-$(dscp-mutation-map-name)\'}}), is_container=\'list\', yang_name="dscp-mutation", rest_name="dscp-mutation", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure Dscp-to-Dscp mutation map\', u\'cli-no-key-completion\': None, u\'cli-suppress-list-no\': None, u\'cli-suppress-key-abbreviation\': None, u\'cli-full-command\': None, u\'callpoint\': u\'dscp_mutation\', u\'cli-mode-name\': u\'dscp-mutation-$(dscp-mutation-map-name)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-qos\', defining_module=\'brocade-qos\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__dscp_mutation = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def pull_isotime(voevent, index=0):
"""
Deprecated alias of :func:`.get_event_time_as_utc`
"""
import warnings
warnings.warn(
"""
The function `pull_isotime` has been renamed to
`get_event_time_as_utc`. This alias is preserved for backwards
compatibility, and may be removed in a future release.
""",
FutureWarning)
return get_event_time_as_utc(voevent, index)
|
def function[pull_isotime, parameter[voevent, index]]:
constant[
Deprecated alias of :func:`.get_event_time_as_utc`
]
import module[warnings]
call[name[warnings].warn, parameter[constant[
The function `pull_isotime` has been renamed to
`get_event_time_as_utc`. This alias is preserved for backwards
compatibility, and may be removed in a future release.
], name[FutureWarning]]]
return[call[name[get_event_time_as_utc], parameter[name[voevent], name[index]]]]
|
keyword[def] identifier[pull_isotime] ( identifier[voevent] , identifier[index] = literal[int] ):
literal[string]
keyword[import] identifier[warnings]
identifier[warnings] . identifier[warn] (
literal[string] ,
identifier[FutureWarning] )
keyword[return] identifier[get_event_time_as_utc] ( identifier[voevent] , identifier[index] )
|
def pull_isotime(voevent, index=0):
"""
Deprecated alias of :func:`.get_event_time_as_utc`
"""
import warnings
warnings.warn('\n The function `pull_isotime` has been renamed to\n `get_event_time_as_utc`. This alias is preserved for backwards\n compatibility, and may be removed in a future release.\n ', FutureWarning)
return get_event_time_as_utc(voevent, index)
|
def prompt(question, choices=None):
"""echo a prompt to the user and wait for an answer
question -- string -- the prompt for the user
choices -- list -- if given, only exit when prompt matches one of the choices
return -- string -- the answer that was given by the user
"""
if not re.match("\s$", question):
question = "{}: ".format(question)
while True:
if sys.version_info[0] > 2:
answer = input(question)
else:
answer = raw_input(question)
if not choices or answer in choices:
break
return answer
|
def function[prompt, parameter[question, choices]]:
constant[echo a prompt to the user and wait for an answer
question -- string -- the prompt for the user
choices -- list -- if given, only exit when prompt matches one of the choices
return -- string -- the answer that was given by the user
]
if <ast.UnaryOp object at 0x7da207f00d00> begin[:]
variable[question] assign[=] call[constant[{}: ].format, parameter[name[question]]]
while constant[True] begin[:]
if compare[call[name[sys].version_info][constant[0]] greater[>] constant[2]] begin[:]
variable[answer] assign[=] call[name[input], parameter[name[question]]]
if <ast.BoolOp object at 0x7da207f02530> begin[:]
break
return[name[answer]]
|
keyword[def] identifier[prompt] ( identifier[question] , identifier[choices] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[re] . identifier[match] ( literal[string] , identifier[question] ):
identifier[question] = literal[string] . identifier[format] ( identifier[question] )
keyword[while] keyword[True] :
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]> literal[int] :
identifier[answer] = identifier[input] ( identifier[question] )
keyword[else] :
identifier[answer] = identifier[raw_input] ( identifier[question] )
keyword[if] keyword[not] identifier[choices] keyword[or] identifier[answer] keyword[in] identifier[choices] :
keyword[break]
keyword[return] identifier[answer]
|
def prompt(question, choices=None):
"""echo a prompt to the user and wait for an answer
question -- string -- the prompt for the user
choices -- list -- if given, only exit when prompt matches one of the choices
return -- string -- the answer that was given by the user
"""
if not re.match('\\s$', question):
question = '{}: '.format(question) # depends on [control=['if'], data=[]]
while True:
if sys.version_info[0] > 2:
answer = input(question) # depends on [control=['if'], data=[]]
else:
answer = raw_input(question)
if not choices or answer in choices:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return answer
|
def xVal_xml(self):
"""
Return the ``<c:xVal>`` element for this series as unicode text. This
element contains the X values for this series.
"""
return self._xVal_tmpl.format(**{
'nsdecls': '',
'numRef_xml': self.numRef_xml(
self._series.x_values_ref, self._series.number_format,
self._series.x_values
),
})
|
def function[xVal_xml, parameter[self]]:
constant[
Return the ``<c:xVal>`` element for this series as unicode text. This
element contains the X values for this series.
]
return[call[name[self]._xVal_tmpl.format, parameter[]]]
|
keyword[def] identifier[xVal_xml] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_xVal_tmpl] . identifier[format] (**{
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[numRef_xml] (
identifier[self] . identifier[_series] . identifier[x_values_ref] , identifier[self] . identifier[_series] . identifier[number_format] ,
identifier[self] . identifier[_series] . identifier[x_values]
),
})
|
def xVal_xml(self):
"""
Return the ``<c:xVal>`` element for this series as unicode text. This
element contains the X values for this series.
"""
return self._xVal_tmpl.format(**{'nsdecls': '', 'numRef_xml': self.numRef_xml(self._series.x_values_ref, self._series.number_format, self._series.x_values)})
|
def _categorize(self, category):
"""Remove torrents with unwanted category from self.torrents"""
self.torrents = [result for result in self.torrents
if result.category == category]
|
def function[_categorize, parameter[self, category]]:
constant[Remove torrents with unwanted category from self.torrents]
name[self].torrents assign[=] <ast.ListComp object at 0x7da1b1fbadd0>
|
keyword[def] identifier[_categorize] ( identifier[self] , identifier[category] ):
literal[string]
identifier[self] . identifier[torrents] =[ identifier[result] keyword[for] identifier[result] keyword[in] identifier[self] . identifier[torrents]
keyword[if] identifier[result] . identifier[category] == identifier[category] ]
|
def _categorize(self, category):
"""Remove torrents with unwanted category from self.torrents"""
self.torrents = [result for result in self.torrents if result.category == category]
|
def base_string_uri(uri, host=None):
"""**Base String URI**
Per `section 3.4.1.2`_ of RFC 5849.
For example, the HTTP request::
GET /r%20v/X?id=123 HTTP/1.1
Host: EXAMPLE.COM:80
is represented by the base string URI: "http://example.com/r%20v/X".
In another example, the HTTPS request::
GET /?q=1 HTTP/1.1
Host: www.example.net:8080
is represented by the base string URI: "https://www.example.net:8080/".
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
The host argument overrides the netloc part of the uri argument.
"""
if not isinstance(uri, unicode_type):
raise ValueError('uri must be a unicode object.')
# FIXME: urlparse does not support unicode
scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
# The scheme, authority, and path of the request resource URI `RFC3986`
# are included by constructing an "http" or "https" URI representing
# the request resource (without the query or fragment) as follows:
#
# .. _`RFC3986`: https://tools.ietf.org/html/rfc3986
if not scheme or not netloc:
raise ValueError('uri must include a scheme and netloc')
# Per `RFC 2616 section 5.1.2`_:
#
# Note that the absolute path cannot be empty; if none is present in
# the original URI, it MUST be given as "/" (the server root).
#
# .. _`RFC 2616 section 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2
if not path:
path = '/'
# 1. The scheme and host MUST be in lowercase.
scheme = scheme.lower()
netloc = netloc.lower()
# 2. The host and port values MUST match the content of the HTTP
# request "Host" header field.
if host is not None:
netloc = host.lower()
# 3. The port MUST be included if it is not the default port for the
# scheme, and MUST be excluded if it is the default. Specifically,
# the port MUST be excluded when making an HTTP request `RFC2616`_
# to port 80 or when making an HTTPS request `RFC2818`_ to port 443.
# All other non-default port numbers MUST be included.
#
# .. _`RFC2616`: https://tools.ietf.org/html/rfc2616
# .. _`RFC2818`: https://tools.ietf.org/html/rfc2818
default_ports = (
('http', '80'),
('https', '443'),
)
if ':' in netloc:
host, port = netloc.split(':', 1)
if (scheme, port) in default_ports:
netloc = host
v = urlparse.urlunparse((scheme, netloc, path, params, '', ''))
# RFC 5849 does not specify which characters are encoded in the
# "base string URI", nor how they are encoded - which is very bad, since
# the signatures won't match if there are any differences. Fortunately,
# most URIs only use characters that are clearly not encoded (e.g. digits
# and A-Z, a-z), so have avoided any differences between implementations.
#
# The example from its section 3.4.1.2 illustrates that spaces in
# the path are percent encoded. But it provides no guidance as to what other
# characters (if any) must be encoded (nor how); nor if characters in the
# other components are to be encoded or not.
#
# This implementation **assumes** that **only** the space is percent-encoded
# and it is done to the entire value (not just to spaces in the path).
#
# This code may need to be changed if it is discovered that other characters
# are expected to be encoded.
#
# Note: the "base string URI" returned by this function will be encoded
# again before being concatenated into the "signature base string". So any
# spaces in the URI will actually appear in the "signature base string"
# as "%2520" (the "%20" further encoded according to section 3.6).
return v.replace(' ', '%20')
|
def function[base_string_uri, parameter[uri, host]]:
constant[**Base String URI**
Per `section 3.4.1.2`_ of RFC 5849.
For example, the HTTP request::
GET /r%20v/X?id=123 HTTP/1.1
Host: EXAMPLE.COM:80
is represented by the base string URI: "http://example.com/r%20v/X".
In another example, the HTTPS request::
GET /?q=1 HTTP/1.1
Host: www.example.net:8080
is represented by the base string URI: "https://www.example.net:8080/".
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
The host argument overrides the netloc part of the uri argument.
]
if <ast.UnaryOp object at 0x7da1b17f97e0> begin[:]
<ast.Raise object at 0x7da1b17f8dc0>
<ast.Tuple object at 0x7da1b17f9cf0> assign[=] call[name[urlparse].urlparse, parameter[name[uri]]]
if <ast.BoolOp object at 0x7da1b17fb550> begin[:]
<ast.Raise object at 0x7da1b17f8b50>
if <ast.UnaryOp object at 0x7da1b17f9f90> begin[:]
variable[path] assign[=] constant[/]
variable[scheme] assign[=] call[name[scheme].lower, parameter[]]
variable[netloc] assign[=] call[name[netloc].lower, parameter[]]
if compare[name[host] is_not constant[None]] begin[:]
variable[netloc] assign[=] call[name[host].lower, parameter[]]
variable[default_ports] assign[=] tuple[[<ast.Tuple object at 0x7da1b180db10>, <ast.Tuple object at 0x7da1b180c9a0>]]
if compare[constant[:] in name[netloc]] begin[:]
<ast.Tuple object at 0x7da1b180d150> assign[=] call[name[netloc].split, parameter[constant[:], constant[1]]]
if compare[tuple[[<ast.Name object at 0x7da1b180cca0>, <ast.Name object at 0x7da1b180c280>]] in name[default_ports]] begin[:]
variable[netloc] assign[=] name[host]
variable[v] assign[=] call[name[urlparse].urlunparse, parameter[tuple[[<ast.Name object at 0x7da1b180d7e0>, <ast.Name object at 0x7da1b175e9b0>, <ast.Name object at 0x7da1b175ded0>, <ast.Name object at 0x7da1b175e350>, <ast.Constant object at 0x7da1b175c370>, <ast.Constant object at 0x7da1b175e9e0>]]]]
return[call[name[v].replace, parameter[constant[ ], constant[%20]]]]
|
keyword[def] identifier[base_string_uri] ( identifier[uri] , identifier[host] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[uri] , identifier[unicode_type] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[scheme] , identifier[netloc] , identifier[path] , identifier[params] , identifier[query] , identifier[fragment] = identifier[urlparse] . identifier[urlparse] ( identifier[uri] )
keyword[if] keyword[not] identifier[scheme] keyword[or] keyword[not] identifier[netloc] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[path] :
identifier[path] = literal[string]
identifier[scheme] = identifier[scheme] . identifier[lower] ()
identifier[netloc] = identifier[netloc] . identifier[lower] ()
keyword[if] identifier[host] keyword[is] keyword[not] keyword[None] :
identifier[netloc] = identifier[host] . identifier[lower] ()
identifier[default_ports] =(
( literal[string] , literal[string] ),
( literal[string] , literal[string] ),
)
keyword[if] literal[string] keyword[in] identifier[netloc] :
identifier[host] , identifier[port] = identifier[netloc] . identifier[split] ( literal[string] , literal[int] )
keyword[if] ( identifier[scheme] , identifier[port] ) keyword[in] identifier[default_ports] :
identifier[netloc] = identifier[host]
identifier[v] = identifier[urlparse] . identifier[urlunparse] (( identifier[scheme] , identifier[netloc] , identifier[path] , identifier[params] , literal[string] , literal[string] ))
keyword[return] identifier[v] . identifier[replace] ( literal[string] , literal[string] )
|
def base_string_uri(uri, host=None):
"""**Base String URI**
Per `section 3.4.1.2`_ of RFC 5849.
For example, the HTTP request::
GET /r%20v/X?id=123 HTTP/1.1
Host: EXAMPLE.COM:80
is represented by the base string URI: "http://example.com/r%20v/X".
In another example, the HTTPS request::
GET /?q=1 HTTP/1.1
Host: www.example.net:8080
is represented by the base string URI: "https://www.example.net:8080/".
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
The host argument overrides the netloc part of the uri argument.
"""
if not isinstance(uri, unicode_type):
raise ValueError('uri must be a unicode object.') # depends on [control=['if'], data=[]]
# FIXME: urlparse does not support unicode
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(uri)
# The scheme, authority, and path of the request resource URI `RFC3986`
# are included by constructing an "http" or "https" URI representing
# the request resource (without the query or fragment) as follows:
#
# .. _`RFC3986`: https://tools.ietf.org/html/rfc3986
if not scheme or not netloc:
raise ValueError('uri must include a scheme and netloc') # depends on [control=['if'], data=[]]
# Per `RFC 2616 section 5.1.2`_:
#
# Note that the absolute path cannot be empty; if none is present in
# the original URI, it MUST be given as "/" (the server root).
#
# .. _`RFC 2616 section 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2
if not path:
path = '/' # depends on [control=['if'], data=[]]
# 1. The scheme and host MUST be in lowercase.
scheme = scheme.lower()
netloc = netloc.lower()
# 2. The host and port values MUST match the content of the HTTP
# request "Host" header field.
if host is not None:
netloc = host.lower() # depends on [control=['if'], data=['host']]
# 3. The port MUST be included if it is not the default port for the
# scheme, and MUST be excluded if it is the default. Specifically,
# the port MUST be excluded when making an HTTP request `RFC2616`_
# to port 80 or when making an HTTPS request `RFC2818`_ to port 443.
# All other non-default port numbers MUST be included.
#
# .. _`RFC2616`: https://tools.ietf.org/html/rfc2616
# .. _`RFC2818`: https://tools.ietf.org/html/rfc2818
default_ports = (('http', '80'), ('https', '443'))
if ':' in netloc:
(host, port) = netloc.split(':', 1)
if (scheme, port) in default_ports:
netloc = host # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['netloc']]
v = urlparse.urlunparse((scheme, netloc, path, params, '', ''))
# RFC 5849 does not specify which characters are encoded in the
# "base string URI", nor how they are encoded - which is very bad, since
# the signatures won't match if there are any differences. Fortunately,
# most URIs only use characters that are clearly not encoded (e.g. digits
# and A-Z, a-z), so have avoided any differences between implementations.
#
# The example from its section 3.4.1.2 illustrates that spaces in
# the path are percent encoded. But it provides no guidance as to what other
# characters (if any) must be encoded (nor how); nor if characters in the
# other components are to be encoded or not.
#
# This implementation **assumes** that **only** the space is percent-encoded
# and it is done to the entire value (not just to spaces in the path).
#
# This code may need to be changed if it is discovered that other characters
# are expected to be encoded.
#
# Note: the "base string URI" returned by this function will be encoded
# again before being concatenated into the "signature base string". So any
# spaces in the URI will actually appear in the "signature base string"
# as "%2520" (the "%20" further encoded according to section 3.6).
return v.replace(' ', '%20')
|
def Campbell_Thodos(T, Tb, Tc, Pc, M, dipole=None, hydroxyl=False):
r'''Calculate saturation liquid density using the Campbell-Thodos [1]_
CSP method.
An old and uncommon estimation method.
.. math::
V_s = \frac{RT_c}{P_c}{Z_{RA}}^{[1+(1-T_r)^{2/7}]}
Z_{RA} = \alpha + \beta(1-T_r)
\alpha = 0.3883-0.0179s
s = T_{br} \frac{\ln P_c}{(1-T_{br})}
\beta = 0.00318s-0.0211+0.625\Lambda^{1.35}
\Lambda = \frac{P_c^{1/3}} { M^{1/2} T_c^{5/6}}
For polar compounds:
.. math::
\theta = P_c \mu^2/T_c^2
\alpha = 0.3883 - 0.0179s - 130540\theta^{2.41}
\beta = 0.00318s - 0.0211 + 0.625\Lambda^{1.35} + 9.74\times
10^6 \theta^{3.38}
Polar Combounds with hydroxyl groups (water, alcohols)
.. math::
\alpha = \left[0.690T_{br} -0.3342 + \frac{5.79\times 10^{-10}}
{T_{br}^{32.75}}\right] P_c^{0.145}
\beta = 0.00318s - 0.0211 + 0.625 \Lambda^{1.35} + 5.90\Theta^{0.835}
Parameters
----------
T : float
Temperature of fluid [K]
Tb : float
Boiling temperature of the fluid [K]
Tc : float
Critical temperature of fluid [K]
Pc : float
Critical pressure of fluid [Pa]
M : float
Molecular weight of the fluid [g/mol]
dipole : float, optional
Dipole moment of the fluid [debye]
hydroxyl : bool, optional
Swith to use the hydroxyl variant for polar fluids
Returns
-------
Vs : float
Saturation liquid volume
Notes
-----
If a dipole is provided, the polar chemical method is used.
The paper is an excellent read.
Pc is internally converted to atm.
Examples
--------
Ammonia, from [1]_.
>>> Campbell_Thodos(T=405.45, Tb=239.82, Tc=405.45, Pc=111.7*101325, M=17.03, dipole=1.47)
7.347363635885525e-05
References
----------
.. [1] Campbell, Scott W., and George Thodos. "Prediction of Saturated
Liquid Densities and Critical Volumes for Polar and Nonpolar
Substances." Journal of Chemical & Engineering Data 30, no. 1
(January 1, 1985): 102-11. doi:10.1021/je00039a032.
'''
Tr = T/Tc
Tbr = Tb/Tc
Pc = Pc/101325.
s = Tbr * log(Pc)/(1-Tbr)
Lambda = Pc**(1/3.)/(M**0.5*Tc**(5/6.))
alpha = 0.3883 - 0.0179*s
beta = 0.00318*s - 0.0211 + 0.625*Lambda**(1.35)
if dipole:
theta = Pc*dipole**2/Tc**2
alpha -= 130540 * theta**2.41
beta += 9.74E6 * theta**3.38
if hydroxyl:
beta = 0.00318*s - 0.0211 + 0.625*Lambda**(1.35) + 5.90*theta**0.835
alpha = (0.69*Tbr - 0.3342 + 5.79E-10/Tbr**32.75)*Pc**0.145
Zra = alpha + beta*(1-Tr)
Vs = R*Tc/(Pc*101325)*Zra**(1+(1-Tr)**(2/7.))
return Vs
|
def function[Campbell_Thodos, parameter[T, Tb, Tc, Pc, M, dipole, hydroxyl]]:
constant[Calculate saturation liquid density using the Campbell-Thodos [1]_
CSP method.
An old and uncommon estimation method.
.. math::
V_s = \frac{RT_c}{P_c}{Z_{RA}}^{[1+(1-T_r)^{2/7}]}
Z_{RA} = \alpha + \beta(1-T_r)
\alpha = 0.3883-0.0179s
s = T_{br} \frac{\ln P_c}{(1-T_{br})}
\beta = 0.00318s-0.0211+0.625\Lambda^{1.35}
\Lambda = \frac{P_c^{1/3}} { M^{1/2} T_c^{5/6}}
For polar compounds:
.. math::
\theta = P_c \mu^2/T_c^2
\alpha = 0.3883 - 0.0179s - 130540\theta^{2.41}
\beta = 0.00318s - 0.0211 + 0.625\Lambda^{1.35} + 9.74\times
10^6 \theta^{3.38}
Polar Combounds with hydroxyl groups (water, alcohols)
.. math::
\alpha = \left[0.690T_{br} -0.3342 + \frac{5.79\times 10^{-10}}
{T_{br}^{32.75}}\right] P_c^{0.145}
\beta = 0.00318s - 0.0211 + 0.625 \Lambda^{1.35} + 5.90\Theta^{0.835}
Parameters
----------
T : float
Temperature of fluid [K]
Tb : float
Boiling temperature of the fluid [K]
Tc : float
Critical temperature of fluid [K]
Pc : float
Critical pressure of fluid [Pa]
M : float
Molecular weight of the fluid [g/mol]
dipole : float, optional
Dipole moment of the fluid [debye]
hydroxyl : bool, optional
Swith to use the hydroxyl variant for polar fluids
Returns
-------
Vs : float
Saturation liquid volume
Notes
-----
If a dipole is provided, the polar chemical method is used.
The paper is an excellent read.
Pc is internally converted to atm.
Examples
--------
Ammonia, from [1]_.
>>> Campbell_Thodos(T=405.45, Tb=239.82, Tc=405.45, Pc=111.7*101325, M=17.03, dipole=1.47)
7.347363635885525e-05
References
----------
.. [1] Campbell, Scott W., and George Thodos. "Prediction of Saturated
Liquid Densities and Critical Volumes for Polar and Nonpolar
Substances." Journal of Chemical & Engineering Data 30, no. 1
(January 1, 1985): 102-11. doi:10.1021/je00039a032.
]
variable[Tr] assign[=] binary_operation[name[T] / name[Tc]]
variable[Tbr] assign[=] binary_operation[name[Tb] / name[Tc]]
variable[Pc] assign[=] binary_operation[name[Pc] / constant[101325.0]]
variable[s] assign[=] binary_operation[binary_operation[name[Tbr] * call[name[log], parameter[name[Pc]]]] / binary_operation[constant[1] - name[Tbr]]]
variable[Lambda] assign[=] binary_operation[binary_operation[name[Pc] ** binary_operation[constant[1] / constant[3.0]]] / binary_operation[binary_operation[name[M] ** constant[0.5]] * binary_operation[name[Tc] ** binary_operation[constant[5] / constant[6.0]]]]]
variable[alpha] assign[=] binary_operation[constant[0.3883] - binary_operation[constant[0.0179] * name[s]]]
variable[beta] assign[=] binary_operation[binary_operation[binary_operation[constant[0.00318] * name[s]] - constant[0.0211]] + binary_operation[constant[0.625] * binary_operation[name[Lambda] ** constant[1.35]]]]
if name[dipole] begin[:]
variable[theta] assign[=] binary_operation[binary_operation[name[Pc] * binary_operation[name[dipole] ** constant[2]]] / binary_operation[name[Tc] ** constant[2]]]
<ast.AugAssign object at 0x7da2046223b0>
<ast.AugAssign object at 0x7da204621d20>
if name[hydroxyl] begin[:]
variable[beta] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[0.00318] * name[s]] - constant[0.0211]] + binary_operation[constant[0.625] * binary_operation[name[Lambda] ** constant[1.35]]]] + binary_operation[constant[5.9] * binary_operation[name[theta] ** constant[0.835]]]]
variable[alpha] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[0.69] * name[Tbr]] - constant[0.3342]] + binary_operation[constant[5.79e-10] / binary_operation[name[Tbr] ** constant[32.75]]]] * binary_operation[name[Pc] ** constant[0.145]]]
variable[Zra] assign[=] binary_operation[name[alpha] + binary_operation[name[beta] * binary_operation[constant[1] - name[Tr]]]]
variable[Vs] assign[=] binary_operation[binary_operation[binary_operation[name[R] * name[Tc]] / binary_operation[name[Pc] * constant[101325]]] * binary_operation[name[Zra] ** binary_operation[constant[1] + binary_operation[binary_operation[constant[1] - name[Tr]] ** binary_operation[constant[2] / constant[7.0]]]]]]
return[name[Vs]]
|
keyword[def] identifier[Campbell_Thodos] ( identifier[T] , identifier[Tb] , identifier[Tc] , identifier[Pc] , identifier[M] , identifier[dipole] = keyword[None] , identifier[hydroxyl] = keyword[False] ):
literal[string]
identifier[Tr] = identifier[T] / identifier[Tc]
identifier[Tbr] = identifier[Tb] / identifier[Tc]
identifier[Pc] = identifier[Pc] / literal[int]
identifier[s] = identifier[Tbr] * identifier[log] ( identifier[Pc] )/( literal[int] - identifier[Tbr] )
identifier[Lambda] = identifier[Pc] **( literal[int] / literal[int] )/( identifier[M] ** literal[int] * identifier[Tc] **( literal[int] / literal[int] ))
identifier[alpha] = literal[int] - literal[int] * identifier[s]
identifier[beta] = literal[int] * identifier[s] - literal[int] + literal[int] * identifier[Lambda] **( literal[int] )
keyword[if] identifier[dipole] :
identifier[theta] = identifier[Pc] * identifier[dipole] ** literal[int] / identifier[Tc] ** literal[int]
identifier[alpha] -= literal[int] * identifier[theta] ** literal[int]
identifier[beta] += literal[int] * identifier[theta] ** literal[int]
keyword[if] identifier[hydroxyl] :
identifier[beta] = literal[int] * identifier[s] - literal[int] + literal[int] * identifier[Lambda] **( literal[int] )+ literal[int] * identifier[theta] ** literal[int]
identifier[alpha] =( literal[int] * identifier[Tbr] - literal[int] + literal[int] / identifier[Tbr] ** literal[int] )* identifier[Pc] ** literal[int]
identifier[Zra] = identifier[alpha] + identifier[beta] *( literal[int] - identifier[Tr] )
identifier[Vs] = identifier[R] * identifier[Tc] /( identifier[Pc] * literal[int] )* identifier[Zra] **( literal[int] +( literal[int] - identifier[Tr] )**( literal[int] / literal[int] ))
keyword[return] identifier[Vs]
|
def Campbell_Thodos(T, Tb, Tc, Pc, M, dipole=None, hydroxyl=False):
"""Calculate saturation liquid density using the Campbell-Thodos [1]_
CSP method.
An old and uncommon estimation method.
.. math::
V_s = \\frac{RT_c}{P_c}{Z_{RA}}^{[1+(1-T_r)^{2/7}]}
Z_{RA} = \\alpha + \\beta(1-T_r)
\\alpha = 0.3883-0.0179s
s = T_{br} \\frac{\\ln P_c}{(1-T_{br})}
\\beta = 0.00318s-0.0211+0.625\\Lambda^{1.35}
\\Lambda = \\frac{P_c^{1/3}} { M^{1/2} T_c^{5/6}}
For polar compounds:
.. math::
\\theta = P_c \\mu^2/T_c^2
\\alpha = 0.3883 - 0.0179s - 130540\\theta^{2.41}
\\beta = 0.00318s - 0.0211 + 0.625\\Lambda^{1.35} + 9.74\\times
10^6 \\theta^{3.38}
Polar Combounds with hydroxyl groups (water, alcohols)
.. math::
\\alpha = \\left[0.690T_{br} -0.3342 + \\frac{5.79\\times 10^{-10}}
{T_{br}^{32.75}}\\right] P_c^{0.145}
\\beta = 0.00318s - 0.0211 + 0.625 \\Lambda^{1.35} + 5.90\\Theta^{0.835}
Parameters
----------
T : float
Temperature of fluid [K]
Tb : float
Boiling temperature of the fluid [K]
Tc : float
Critical temperature of fluid [K]
Pc : float
Critical pressure of fluid [Pa]
M : float
Molecular weight of the fluid [g/mol]
dipole : float, optional
Dipole moment of the fluid [debye]
hydroxyl : bool, optional
Swith to use the hydroxyl variant for polar fluids
Returns
-------
Vs : float
Saturation liquid volume
Notes
-----
If a dipole is provided, the polar chemical method is used.
The paper is an excellent read.
Pc is internally converted to atm.
Examples
--------
Ammonia, from [1]_.
>>> Campbell_Thodos(T=405.45, Tb=239.82, Tc=405.45, Pc=111.7*101325, M=17.03, dipole=1.47)
7.347363635885525e-05
References
----------
.. [1] Campbell, Scott W., and George Thodos. "Prediction of Saturated
Liquid Densities and Critical Volumes for Polar and Nonpolar
Substances." Journal of Chemical & Engineering Data 30, no. 1
(January 1, 1985): 102-11. doi:10.1021/je00039a032.
"""
Tr = T / Tc
Tbr = Tb / Tc
Pc = Pc / 101325.0
s = Tbr * log(Pc) / (1 - Tbr)
Lambda = Pc ** (1 / 3.0) / (M ** 0.5 * Tc ** (5 / 6.0))
alpha = 0.3883 - 0.0179 * s
beta = 0.00318 * s - 0.0211 + 0.625 * Lambda ** 1.35
if dipole:
theta = Pc * dipole ** 2 / Tc ** 2
alpha -= 130540 * theta ** 2.41
beta += 9740000.0 * theta ** 3.38 # depends on [control=['if'], data=[]]
if hydroxyl:
beta = 0.00318 * s - 0.0211 + 0.625 * Lambda ** 1.35 + 5.9 * theta ** 0.835
alpha = (0.69 * Tbr - 0.3342 + 5.79e-10 / Tbr ** 32.75) * Pc ** 0.145 # depends on [control=['if'], data=[]]
Zra = alpha + beta * (1 - Tr)
Vs = R * Tc / (Pc * 101325) * Zra ** (1 + (1 - Tr) ** (2 / 7.0))
return Vs
|
def write_pgpass(self, name=None, site=None, use_sudo=0, root=0):
"""
Write the file used to store login credentials for PostgreSQL.
"""
r = self.database_renderer(name=name, site=site)
root = int(root)
use_sudo = int(use_sudo)
r.run('touch {pgpass_path}')
if '~' in r.env.pgpass_path:
r.run('chmod {pgpass_chmod} {pgpass_path}')
else:
r.sudo('chmod {pgpass_chmod} {pgpass_path}')
if root:
r.env.shell_username = r.env.get('db_root_username', 'postgres')
r.env.shell_password = r.env.get('db_root_password', 'password')
else:
r.env.shell_username = r.env.db_user
r.env.shell_password = r.env.db_password
r.append(
'{db_host}:{port}:*:{shell_username}:{shell_password}',
r.env.pgpass_path,
use_sudo=use_sudo)
|
def function[write_pgpass, parameter[self, name, site, use_sudo, root]]:
constant[
Write the file used to store login credentials for PostgreSQL.
]
variable[r] assign[=] call[name[self].database_renderer, parameter[]]
variable[root] assign[=] call[name[int], parameter[name[root]]]
variable[use_sudo] assign[=] call[name[int], parameter[name[use_sudo]]]
call[name[r].run, parameter[constant[touch {pgpass_path}]]]
if compare[constant[~] in name[r].env.pgpass_path] begin[:]
call[name[r].run, parameter[constant[chmod {pgpass_chmod} {pgpass_path}]]]
if name[root] begin[:]
name[r].env.shell_username assign[=] call[name[r].env.get, parameter[constant[db_root_username], constant[postgres]]]
name[r].env.shell_password assign[=] call[name[r].env.get, parameter[constant[db_root_password], constant[password]]]
call[name[r].append, parameter[constant[{db_host}:{port}:*:{shell_username}:{shell_password}], name[r].env.pgpass_path]]
|
keyword[def] identifier[write_pgpass] ( identifier[self] , identifier[name] = keyword[None] , identifier[site] = keyword[None] , identifier[use_sudo] = literal[int] , identifier[root] = literal[int] ):
literal[string]
identifier[r] = identifier[self] . identifier[database_renderer] ( identifier[name] = identifier[name] , identifier[site] = identifier[site] )
identifier[root] = identifier[int] ( identifier[root] )
identifier[use_sudo] = identifier[int] ( identifier[use_sudo] )
identifier[r] . identifier[run] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[r] . identifier[env] . identifier[pgpass_path] :
identifier[r] . identifier[run] ( literal[string] )
keyword[else] :
identifier[r] . identifier[sudo] ( literal[string] )
keyword[if] identifier[root] :
identifier[r] . identifier[env] . identifier[shell_username] = identifier[r] . identifier[env] . identifier[get] ( literal[string] , literal[string] )
identifier[r] . identifier[env] . identifier[shell_password] = identifier[r] . identifier[env] . identifier[get] ( literal[string] , literal[string] )
keyword[else] :
identifier[r] . identifier[env] . identifier[shell_username] = identifier[r] . identifier[env] . identifier[db_user]
identifier[r] . identifier[env] . identifier[shell_password] = identifier[r] . identifier[env] . identifier[db_password]
identifier[r] . identifier[append] (
literal[string] ,
identifier[r] . identifier[env] . identifier[pgpass_path] ,
identifier[use_sudo] = identifier[use_sudo] )
|
def write_pgpass(self, name=None, site=None, use_sudo=0, root=0):
"""
Write the file used to store login credentials for PostgreSQL.
"""
r = self.database_renderer(name=name, site=site)
root = int(root)
use_sudo = int(use_sudo)
r.run('touch {pgpass_path}')
if '~' in r.env.pgpass_path:
r.run('chmod {pgpass_chmod} {pgpass_path}') # depends on [control=['if'], data=[]]
else:
r.sudo('chmod {pgpass_chmod} {pgpass_path}')
if root:
r.env.shell_username = r.env.get('db_root_username', 'postgres')
r.env.shell_password = r.env.get('db_root_password', 'password') # depends on [control=['if'], data=[]]
else:
r.env.shell_username = r.env.db_user
r.env.shell_password = r.env.db_password
r.append('{db_host}:{port}:*:{shell_username}:{shell_password}', r.env.pgpass_path, use_sudo=use_sudo)
|
def map_coords(func, obj):
"""
Returns the mapped coordinates from a Geometry after applying the provided
function to each dimension in tuples list (ie, linear scaling).
:param func: Function to apply to individual coordinate values
independently
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each dimension in the
array.
:rtype: list
:raises ValueError: if the provided object is not GeoJSON.
"""
def tuple_func(coord):
return (func(coord[0]), func(coord[1]))
return map_tuples(tuple_func, obj)
|
def function[map_coords, parameter[func, obj]]:
constant[
Returns the mapped coordinates from a Geometry after applying the provided
function to each dimension in tuples list (ie, linear scaling).
:param func: Function to apply to individual coordinate values
independently
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each dimension in the
array.
:rtype: list
:raises ValueError: if the provided object is not GeoJSON.
]
def function[tuple_func, parameter[coord]]:
return[tuple[[<ast.Call object at 0x7da1b1574bb0>, <ast.Call object at 0x7da1b155de70>]]]
return[call[name[map_tuples], parameter[name[tuple_func], name[obj]]]]
|
keyword[def] identifier[map_coords] ( identifier[func] , identifier[obj] ):
literal[string]
keyword[def] identifier[tuple_func] ( identifier[coord] ):
keyword[return] ( identifier[func] ( identifier[coord] [ literal[int] ]), identifier[func] ( identifier[coord] [ literal[int] ]))
keyword[return] identifier[map_tuples] ( identifier[tuple_func] , identifier[obj] )
|
def map_coords(func, obj):
"""
Returns the mapped coordinates from a Geometry after applying the provided
function to each dimension in tuples list (ie, linear scaling).
:param func: Function to apply to individual coordinate values
independently
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each dimension in the
array.
:rtype: list
:raises ValueError: if the provided object is not GeoJSON.
"""
def tuple_func(coord):
return (func(coord[0]), func(coord[1]))
return map_tuples(tuple_func, obj)
|
def _value_from_label(self, label):
'''Convert a label into a kvl value.
'''
unser_val = (label.rel_strength.value, label.meta)
return cbor.dumps(unser_val)
|
def function[_value_from_label, parameter[self, label]]:
constant[Convert a label into a kvl value.
]
variable[unser_val] assign[=] tuple[[<ast.Attribute object at 0x7da1b14343d0>, <ast.Attribute object at 0x7da1b14351b0>]]
return[call[name[cbor].dumps, parameter[name[unser_val]]]]
|
keyword[def] identifier[_value_from_label] ( identifier[self] , identifier[label] ):
literal[string]
identifier[unser_val] =( identifier[label] . identifier[rel_strength] . identifier[value] , identifier[label] . identifier[meta] )
keyword[return] identifier[cbor] . identifier[dumps] ( identifier[unser_val] )
|
def _value_from_label(self, label):
"""Convert a label into a kvl value.
"""
unser_val = (label.rel_strength.value, label.meta)
return cbor.dumps(unser_val)
|
def get_sub_node(dsp, path, node_attr='auto', solution=NONE, _level=0,
_dsp_name=NONE):
"""
Returns a sub node of a dispatcher.
:param dsp:
A dispatcher object or a sub dispatch function.
:type dsp: schedula.Dispatcher | SubDispatch
:param path:
A sequence of node ids or a single node id. Each id identifies a
sub-level node.
:type path: tuple, str
:param node_attr:
Output node attr.
If the searched node does not have this attribute, all its attributes
are returned.
When 'auto', returns the "default" attributes of the searched node,
which are:
- for data node: its output, and if not exists, all its attributes.
- for function and sub-dispatcher nodes: the 'function' attribute.
:type node_attr: str | None
:param solution:
Parent Solution.
:type solution: schedula.utils.Solution
:param _level:
Path level.
:type _level: int
:param _dsp_name:
dsp name to show when the function raise a value error.
:type _dsp_name: str
:return:
A sub node of a dispatcher and its path.
:rtype: dict | object, tuple[str]
**Example**:
.. dispatcher:: o
:opt: graph_attr={'ratio': '1'}, depth=-1
:code:
>>> from schedula import Dispatcher
>>> s_dsp = Dispatcher(name='Sub-dispatcher')
>>> def fun(a, b):
... return a + b
...
>>> s_dsp.add_function('a + b', fun, ['a', 'b'], ['c'])
'a + b'
>>> dispatch = SubDispatch(s_dsp, ['c'], output_type='dict')
>>> dsp = Dispatcher(name='Dispatcher')
>>> dsp.add_function('Sub-dispatcher', dispatch, ['a'], ['b'])
'Sub-dispatcher'
>>> o = dsp.dispatch(inputs={'a': {'a': 3, 'b': 1}})
...
Get the sub node 'c' output or type::
>>> get_sub_node(dsp, ('Sub-dispatcher', 'c'))
(4, ('Sub-dispatcher', 'c'))
>>> get_sub_node(dsp, ('Sub-dispatcher', 'c'), node_attr='type')
('data', ('Sub-dispatcher', 'c'))
Get the sub-dispatcher output:
.. dispatcher:: sol
:opt: graph_attr={'ratio': '1'}, depth=-1
:code:
>>> sol, p = get_sub_node(dsp, ('Sub-dispatcher',), node_attr='output')
>>> sol, p
(Solution([('a', 3), ('b', 1), ('c', 4)]), ('Sub-dispatcher',))
"""
path = list(path)
if isinstance(dsp, SubDispatch): # Take the dispatcher obj.
dsp = dsp.dsp
if _dsp_name is NONE: # Set origin dispatcher name for warning purpose.
_dsp_name = dsp.name
if solution is NONE: # Set origin dispatcher name for warning purpose.
solution = dsp.solution
node_id = path[_level] # Node id at given level.
try:
node_id, node = _get_node(dsp.nodes, node_id) # Get dispatcher node.
path[_level] = node_id
except KeyError:
if _level == len(path) - 1 and node_attr in ('auto', 'output') \
and solution is not EMPTY:
try:
# Get dispatcher node.
node_id, node = _get_node(solution, node_id, False)
path[_level] = node_id
return node, tuple(path)
except KeyError:
pass
msg = 'Path %s does not exist in %s dispatcher.' % (path, _dsp_name)
raise ValueError(msg)
_level += 1 # Next level.
if _level < len(path): # Is not path leaf?.
try:
if node['type'] in ('function', 'dispatcher'):
try:
solution = solution.workflow.node[node_id]['solution']
except (KeyError, AttributeError):
solution = EMPTY
dsp = parent_func(node['function']) # Get parent function.
else:
raise KeyError
except KeyError:
msg = 'Node of path %s at level %i is not a function or ' \
'sub-dispatcher node of %s ' \
'dispatcher.' % (path, _level, _dsp_name)
raise ValueError(msg)
# Continue the node search.
return get_sub_node(dsp, path, node_attr, solution, _level, _dsp_name)
else:
data, sol = EMPTY, solution
# Return the sub node.
if node_attr == 'auto' and node['type'] != 'data': # Auto: function.
node_attr = 'function'
elif node_attr == 'auto' and sol is not EMPTY and node_id in sol:
data = sol[node_id] # Auto: data output.
elif node_attr == 'output' and node['type'] != 'data':
data = sol.workflow.nodes[node_id]['solution']
elif node_attr == 'output' and node['type'] == 'data':
data = sol[node_id]
elif node_attr == 'description': # Search and return node description.
data = dsp.search_node_description(node_id)[0]
elif node_attr == 'value_type' and node['type'] == 'data':
# Search and return data node value's type.
data = dsp.search_node_description(node_id, node_attr)[0]
elif node_attr == 'default_value':
data = dsp.default_values[node_id]
elif node_attr == 'dsp':
data = dsp
elif node_attr == 'sol':
data = sol
if data is EMPTY:
data = node.get(node_attr, node)
return data, tuple(path)
|
def function[get_sub_node, parameter[dsp, path, node_attr, solution, _level, _dsp_name]]:
constant[
Returns a sub node of a dispatcher.
:param dsp:
A dispatcher object or a sub dispatch function.
:type dsp: schedula.Dispatcher | SubDispatch
:param path:
A sequence of node ids or a single node id. Each id identifies a
sub-level node.
:type path: tuple, str
:param node_attr:
Output node attr.
If the searched node does not have this attribute, all its attributes
are returned.
When 'auto', returns the "default" attributes of the searched node,
which are:
- for data node: its output, and if not exists, all its attributes.
- for function and sub-dispatcher nodes: the 'function' attribute.
:type node_attr: str | None
:param solution:
Parent Solution.
:type solution: schedula.utils.Solution
:param _level:
Path level.
:type _level: int
:param _dsp_name:
dsp name to show when the function raise a value error.
:type _dsp_name: str
:return:
A sub node of a dispatcher and its path.
:rtype: dict | object, tuple[str]
**Example**:
.. dispatcher:: o
:opt: graph_attr={'ratio': '1'}, depth=-1
:code:
>>> from schedula import Dispatcher
>>> s_dsp = Dispatcher(name='Sub-dispatcher')
>>> def fun(a, b):
... return a + b
...
>>> s_dsp.add_function('a + b', fun, ['a', 'b'], ['c'])
'a + b'
>>> dispatch = SubDispatch(s_dsp, ['c'], output_type='dict')
>>> dsp = Dispatcher(name='Dispatcher')
>>> dsp.add_function('Sub-dispatcher', dispatch, ['a'], ['b'])
'Sub-dispatcher'
>>> o = dsp.dispatch(inputs={'a': {'a': 3, 'b': 1}})
...
Get the sub node 'c' output or type::
>>> get_sub_node(dsp, ('Sub-dispatcher', 'c'))
(4, ('Sub-dispatcher', 'c'))
>>> get_sub_node(dsp, ('Sub-dispatcher', 'c'), node_attr='type')
('data', ('Sub-dispatcher', 'c'))
Get the sub-dispatcher output:
.. dispatcher:: sol
:opt: graph_attr={'ratio': '1'}, depth=-1
:code:
>>> sol, p = get_sub_node(dsp, ('Sub-dispatcher',), node_attr='output')
>>> sol, p
(Solution([('a', 3), ('b', 1), ('c', 4)]), ('Sub-dispatcher',))
]
variable[path] assign[=] call[name[list], parameter[name[path]]]
if call[name[isinstance], parameter[name[dsp], name[SubDispatch]]] begin[:]
variable[dsp] assign[=] name[dsp].dsp
if compare[name[_dsp_name] is name[NONE]] begin[:]
variable[_dsp_name] assign[=] name[dsp].name
if compare[name[solution] is name[NONE]] begin[:]
variable[solution] assign[=] name[dsp].solution
variable[node_id] assign[=] call[name[path]][name[_level]]
<ast.Try object at 0x7da20c992fe0>
<ast.AugAssign object at 0x7da20c990280>
if compare[name[_level] less[<] call[name[len], parameter[name[path]]]] begin[:]
<ast.Try object at 0x7da20c9929b0>
return[call[name[get_sub_node], parameter[name[dsp], name[path], name[node_attr], name[solution], name[_level], name[_dsp_name]]]]
|
keyword[def] identifier[get_sub_node] ( identifier[dsp] , identifier[path] , identifier[node_attr] = literal[string] , identifier[solution] = identifier[NONE] , identifier[_level] = literal[int] ,
identifier[_dsp_name] = identifier[NONE] ):
literal[string]
identifier[path] = identifier[list] ( identifier[path] )
keyword[if] identifier[isinstance] ( identifier[dsp] , identifier[SubDispatch] ):
identifier[dsp] = identifier[dsp] . identifier[dsp]
keyword[if] identifier[_dsp_name] keyword[is] identifier[NONE] :
identifier[_dsp_name] = identifier[dsp] . identifier[name]
keyword[if] identifier[solution] keyword[is] identifier[NONE] :
identifier[solution] = identifier[dsp] . identifier[solution]
identifier[node_id] = identifier[path] [ identifier[_level] ]
keyword[try] :
identifier[node_id] , identifier[node] = identifier[_get_node] ( identifier[dsp] . identifier[nodes] , identifier[node_id] )
identifier[path] [ identifier[_level] ]= identifier[node_id]
keyword[except] identifier[KeyError] :
keyword[if] identifier[_level] == identifier[len] ( identifier[path] )- literal[int] keyword[and] identifier[node_attr] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[solution] keyword[is] keyword[not] identifier[EMPTY] :
keyword[try] :
identifier[node_id] , identifier[node] = identifier[_get_node] ( identifier[solution] , identifier[node_id] , keyword[False] )
identifier[path] [ identifier[_level] ]= identifier[node_id]
keyword[return] identifier[node] , identifier[tuple] ( identifier[path] )
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[msg] = literal[string] %( identifier[path] , identifier[_dsp_name] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
identifier[_level] += literal[int]
keyword[if] identifier[_level] < identifier[len] ( identifier[path] ):
keyword[try] :
keyword[if] identifier[node] [ literal[string] ] keyword[in] ( literal[string] , literal[string] ):
keyword[try] :
identifier[solution] = identifier[solution] . identifier[workflow] . identifier[node] [ identifier[node_id] ][ literal[string] ]
keyword[except] ( identifier[KeyError] , identifier[AttributeError] ):
identifier[solution] = identifier[EMPTY]
identifier[dsp] = identifier[parent_func] ( identifier[node] [ literal[string] ])
keyword[else] :
keyword[raise] identifier[KeyError]
keyword[except] identifier[KeyError] :
identifier[msg] = literal[string] literal[string] literal[string] %( identifier[path] , identifier[_level] , identifier[_dsp_name] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[return] identifier[get_sub_node] ( identifier[dsp] , identifier[path] , identifier[node_attr] , identifier[solution] , identifier[_level] , identifier[_dsp_name] )
keyword[else] :
identifier[data] , identifier[sol] = identifier[EMPTY] , identifier[solution]
keyword[if] identifier[node_attr] == literal[string] keyword[and] identifier[node] [ literal[string] ]!= literal[string] :
identifier[node_attr] = literal[string]
keyword[elif] identifier[node_attr] == literal[string] keyword[and] identifier[sol] keyword[is] keyword[not] identifier[EMPTY] keyword[and] identifier[node_id] keyword[in] identifier[sol] :
identifier[data] = identifier[sol] [ identifier[node_id] ]
keyword[elif] identifier[node_attr] == literal[string] keyword[and] identifier[node] [ literal[string] ]!= literal[string] :
identifier[data] = identifier[sol] . identifier[workflow] . identifier[nodes] [ identifier[node_id] ][ literal[string] ]
keyword[elif] identifier[node_attr] == literal[string] keyword[and] identifier[node] [ literal[string] ]== literal[string] :
identifier[data] = identifier[sol] [ identifier[node_id] ]
keyword[elif] identifier[node_attr] == literal[string] :
identifier[data] = identifier[dsp] . identifier[search_node_description] ( identifier[node_id] )[ literal[int] ]
keyword[elif] identifier[node_attr] == literal[string] keyword[and] identifier[node] [ literal[string] ]== literal[string] :
identifier[data] = identifier[dsp] . identifier[search_node_description] ( identifier[node_id] , identifier[node_attr] )[ literal[int] ]
keyword[elif] identifier[node_attr] == literal[string] :
identifier[data] = identifier[dsp] . identifier[default_values] [ identifier[node_id] ]
keyword[elif] identifier[node_attr] == literal[string] :
identifier[data] = identifier[dsp]
keyword[elif] identifier[node_attr] == literal[string] :
identifier[data] = identifier[sol]
keyword[if] identifier[data] keyword[is] identifier[EMPTY] :
identifier[data] = identifier[node] . identifier[get] ( identifier[node_attr] , identifier[node] )
keyword[return] identifier[data] , identifier[tuple] ( identifier[path] )
|
def get_sub_node(dsp, path, node_attr='auto', solution=NONE, _level=0, _dsp_name=NONE):
"""
Returns a sub node of a dispatcher.
:param dsp:
A dispatcher object or a sub dispatch function.
:type dsp: schedula.Dispatcher | SubDispatch
:param path:
A sequence of node ids or a single node id. Each id identifies a
sub-level node.
:type path: tuple, str
:param node_attr:
Output node attr.
If the searched node does not have this attribute, all its attributes
are returned.
When 'auto', returns the "default" attributes of the searched node,
which are:
- for data node: its output, and if not exists, all its attributes.
- for function and sub-dispatcher nodes: the 'function' attribute.
:type node_attr: str | None
:param solution:
Parent Solution.
:type solution: schedula.utils.Solution
:param _level:
Path level.
:type _level: int
:param _dsp_name:
dsp name to show when the function raise a value error.
:type _dsp_name: str
:return:
A sub node of a dispatcher and its path.
:rtype: dict | object, tuple[str]
**Example**:
.. dispatcher:: o
:opt: graph_attr={'ratio': '1'}, depth=-1
:code:
>>> from schedula import Dispatcher
>>> s_dsp = Dispatcher(name='Sub-dispatcher')
>>> def fun(a, b):
... return a + b
...
>>> s_dsp.add_function('a + b', fun, ['a', 'b'], ['c'])
'a + b'
>>> dispatch = SubDispatch(s_dsp, ['c'], output_type='dict')
>>> dsp = Dispatcher(name='Dispatcher')
>>> dsp.add_function('Sub-dispatcher', dispatch, ['a'], ['b'])
'Sub-dispatcher'
>>> o = dsp.dispatch(inputs={'a': {'a': 3, 'b': 1}})
...
Get the sub node 'c' output or type::
>>> get_sub_node(dsp, ('Sub-dispatcher', 'c'))
(4, ('Sub-dispatcher', 'c'))
>>> get_sub_node(dsp, ('Sub-dispatcher', 'c'), node_attr='type')
('data', ('Sub-dispatcher', 'c'))
Get the sub-dispatcher output:
.. dispatcher:: sol
:opt: graph_attr={'ratio': '1'}, depth=-1
:code:
>>> sol, p = get_sub_node(dsp, ('Sub-dispatcher',), node_attr='output')
>>> sol, p
(Solution([('a', 3), ('b', 1), ('c', 4)]), ('Sub-dispatcher',))
"""
path = list(path)
if isinstance(dsp, SubDispatch): # Take the dispatcher obj.
dsp = dsp.dsp # depends on [control=['if'], data=[]]
if _dsp_name is NONE: # Set origin dispatcher name for warning purpose.
_dsp_name = dsp.name # depends on [control=['if'], data=['_dsp_name']]
if solution is NONE: # Set origin dispatcher name for warning purpose.
solution = dsp.solution # depends on [control=['if'], data=['solution']]
node_id = path[_level] # Node id at given level.
try:
(node_id, node) = _get_node(dsp.nodes, node_id) # Get dispatcher node.
path[_level] = node_id # depends on [control=['try'], data=[]]
except KeyError:
if _level == len(path) - 1 and node_attr in ('auto', 'output') and (solution is not EMPTY):
try:
# Get dispatcher node.
(node_id, node) = _get_node(solution, node_id, False)
path[_level] = node_id
return (node, tuple(path)) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
msg = 'Path %s does not exist in %s dispatcher.' % (path, _dsp_name)
raise ValueError(msg) # depends on [control=['except'], data=[]]
_level += 1 # Next level.
if _level < len(path): # Is not path leaf?.
try:
if node['type'] in ('function', 'dispatcher'):
try:
solution = solution.workflow.node[node_id]['solution'] # depends on [control=['try'], data=[]]
except (KeyError, AttributeError):
solution = EMPTY # depends on [control=['except'], data=[]]
dsp = parent_func(node['function']) # Get parent function. # depends on [control=['if'], data=[]]
else:
raise KeyError # depends on [control=['try'], data=[]]
except KeyError:
msg = 'Node of path %s at level %i is not a function or sub-dispatcher node of %s dispatcher.' % (path, _level, _dsp_name)
raise ValueError(msg) # depends on [control=['except'], data=[]]
# Continue the node search.
return get_sub_node(dsp, path, node_attr, solution, _level, _dsp_name) # depends on [control=['if'], data=['_level']]
else:
(data, sol) = (EMPTY, solution)
# Return the sub node.
if node_attr == 'auto' and node['type'] != 'data': # Auto: function.
node_attr = 'function' # depends on [control=['if'], data=[]]
elif node_attr == 'auto' and sol is not EMPTY and (node_id in sol):
data = sol[node_id] # Auto: data output. # depends on [control=['if'], data=[]]
elif node_attr == 'output' and node['type'] != 'data':
data = sol.workflow.nodes[node_id]['solution'] # depends on [control=['if'], data=[]]
elif node_attr == 'output' and node['type'] == 'data':
data = sol[node_id] # depends on [control=['if'], data=[]]
elif node_attr == 'description': # Search and return node description.
data = dsp.search_node_description(node_id)[0] # depends on [control=['if'], data=[]]
elif node_attr == 'value_type' and node['type'] == 'data':
# Search and return data node value's type.
data = dsp.search_node_description(node_id, node_attr)[0] # depends on [control=['if'], data=[]]
elif node_attr == 'default_value':
data = dsp.default_values[node_id] # depends on [control=['if'], data=[]]
elif node_attr == 'dsp':
data = dsp # depends on [control=['if'], data=[]]
elif node_attr == 'sol':
data = sol # depends on [control=['if'], data=[]]
if data is EMPTY:
data = node.get(node_attr, node) # depends on [control=['if'], data=['data']]
return (data, tuple(path))
|
def create_initializer_configuration(self, body, **kwargs): # noqa: E501
"""create_initializer_configuration # noqa: E501
create an InitializerConfiguration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_initializer_configuration(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1alpha1InitializerConfiguration body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1alpha1InitializerConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_initializer_configuration_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_initializer_configuration_with_http_info(body, **kwargs) # noqa: E501
return data
|
def function[create_initializer_configuration, parameter[self, body]]:
constant[create_initializer_configuration # noqa: E501
create an InitializerConfiguration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_initializer_configuration(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1alpha1InitializerConfiguration body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1alpha1InitializerConfiguration
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].create_initializer_configuration_with_http_info, parameter[name[body]]]]
|
keyword[def] identifier[create_initializer_configuration] ( identifier[self] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[create_initializer_configuration_with_http_info] ( identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[create_initializer_configuration_with_http_info] ( identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def create_initializer_configuration(self, body, **kwargs): # noqa: E501
"create_initializer_configuration # noqa: E501\n\n create an InitializerConfiguration # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.create_initializer_configuration(body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param V1alpha1InitializerConfiguration body: (required)\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1alpha1InitializerConfiguration\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_initializer_configuration_with_http_info(body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.create_initializer_configuration_with_http_info(body, **kwargs) # noqa: E501
return data
|
def create_current_pb(self, ):
"""Create a push button and place it in the corner of the tabwidget
:returns: the created button
:rtype: :class:`QtGui.QPushButton`
:raises: None
"""
pb = QtGui.QPushButton("Select current")
self.selection_tabw.setCornerWidget(pb)
return pb
|
def function[create_current_pb, parameter[self]]:
constant[Create a push button and place it in the corner of the tabwidget
:returns: the created button
:rtype: :class:`QtGui.QPushButton`
:raises: None
]
variable[pb] assign[=] call[name[QtGui].QPushButton, parameter[constant[Select current]]]
call[name[self].selection_tabw.setCornerWidget, parameter[name[pb]]]
return[name[pb]]
|
keyword[def] identifier[create_current_pb] ( identifier[self] ,):
literal[string]
identifier[pb] = identifier[QtGui] . identifier[QPushButton] ( literal[string] )
identifier[self] . identifier[selection_tabw] . identifier[setCornerWidget] ( identifier[pb] )
keyword[return] identifier[pb]
|
def create_current_pb(self):
"""Create a push button and place it in the corner of the tabwidget
:returns: the created button
:rtype: :class:`QtGui.QPushButton`
:raises: None
"""
pb = QtGui.QPushButton('Select current')
self.selection_tabw.setCornerWidget(pb)
return pb
|
def on_created(self, event):
"""Function called everytime a new file is created.
Args:
event: Event to process.
"""
self._logger.debug('Detected create event on watched path: %s', event.src_path)
self._process_event(event)
|
def function[on_created, parameter[self, event]]:
constant[Function called everytime a new file is created.
Args:
event: Event to process.
]
call[name[self]._logger.debug, parameter[constant[Detected create event on watched path: %s], name[event].src_path]]
call[name[self]._process_event, parameter[name[event]]]
|
keyword[def] identifier[on_created] ( identifier[self] , identifier[event] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] , identifier[event] . identifier[src_path] )
identifier[self] . identifier[_process_event] ( identifier[event] )
|
def on_created(self, event):
"""Function called everytime a new file is created.
Args:
event: Event to process.
"""
self._logger.debug('Detected create event on watched path: %s', event.src_path)
self._process_event(event)
|
def get_paginated_catalogs(self, querystring=None):
"""
Return a paginated list of course catalogs, including name and ID.
Returns:
dict: Paginated response containing catalogs available for the user.
"""
return self._load_data(
self.CATALOGS_ENDPOINT,
default=[],
querystring=querystring,
traverse_pagination=False,
many=False
)
|
def function[get_paginated_catalogs, parameter[self, querystring]]:
constant[
Return a paginated list of course catalogs, including name and ID.
Returns:
dict: Paginated response containing catalogs available for the user.
]
return[call[name[self]._load_data, parameter[name[self].CATALOGS_ENDPOINT]]]
|
keyword[def] identifier[get_paginated_catalogs] ( identifier[self] , identifier[querystring] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_load_data] (
identifier[self] . identifier[CATALOGS_ENDPOINT] ,
identifier[default] =[],
identifier[querystring] = identifier[querystring] ,
identifier[traverse_pagination] = keyword[False] ,
identifier[many] = keyword[False]
)
|
def get_paginated_catalogs(self, querystring=None):
"""
Return a paginated list of course catalogs, including name and ID.
Returns:
dict: Paginated response containing catalogs available for the user.
"""
return self._load_data(self.CATALOGS_ENDPOINT, default=[], querystring=querystring, traverse_pagination=False, many=False)
|
def streamserver_handle(cls, socket, address):
'''Translate this class for use in a StreamServer'''
request = cls.dummy_request()
request._sock = socket
server = None
cls(request, address, server)
|
def function[streamserver_handle, parameter[cls, socket, address]]:
constant[Translate this class for use in a StreamServer]
variable[request] assign[=] call[name[cls].dummy_request, parameter[]]
name[request]._sock assign[=] name[socket]
variable[server] assign[=] constant[None]
call[name[cls], parameter[name[request], name[address], name[server]]]
|
keyword[def] identifier[streamserver_handle] ( identifier[cls] , identifier[socket] , identifier[address] ):
literal[string]
identifier[request] = identifier[cls] . identifier[dummy_request] ()
identifier[request] . identifier[_sock] = identifier[socket]
identifier[server] = keyword[None]
identifier[cls] ( identifier[request] , identifier[address] , identifier[server] )
|
def streamserver_handle(cls, socket, address):
"""Translate this class for use in a StreamServer"""
request = cls.dummy_request()
request._sock = socket
server = None
cls(request, address, server)
|
def clr(args):
"""
%prog blastfile fastafiles
Calculate the vector clear range file based BLAST to the vectors.
"""
p = OptionParser(clr.__doc__)
opts, args = p.parse_args(args)
if len(args) < 2:
sys.exit(not p.print_help())
blastfile = args[0]
fastafiles = args[1:]
sizes = {}
for fa in fastafiles:
f = Fasta(fa)
sizes.update(f.itersizes())
b = Blast(blastfile)
for query, hits in b.iter_hits():
qsize = sizes[query]
vectors = list((x.qstart, x.qstop) for x in hits)
vmin, vmax = range_minmax(vectors)
left_size = vmin - 1
right_size = qsize - vmax
if left_size > right_size:
clr_start, clr_end = 0, vmin
else:
clr_start, clr_end = vmax, qsize
print("\t".join(str(x) for x in (query, clr_start, clr_end)))
del sizes[query]
for q, size in sorted(sizes.items()):
print("\t".join(str(x) for x in (q, 0, size)))
|
def function[clr, parameter[args]]:
constant[
%prog blastfile fastafiles
Calculate the vector clear range file based BLAST to the vectors.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[clr].__doc__]]
<ast.Tuple object at 0x7da18f723730> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] less[<] constant[2]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18f7230a0>]]
variable[blastfile] assign[=] call[name[args]][constant[0]]
variable[fastafiles] assign[=] call[name[args]][<ast.Slice object at 0x7da18f723cd0>]
variable[sizes] assign[=] dictionary[[], []]
for taget[name[fa]] in starred[name[fastafiles]] begin[:]
variable[f] assign[=] call[name[Fasta], parameter[name[fa]]]
call[name[sizes].update, parameter[call[name[f].itersizes, parameter[]]]]
variable[b] assign[=] call[name[Blast], parameter[name[blastfile]]]
for taget[tuple[[<ast.Name object at 0x7da18f721e70>, <ast.Name object at 0x7da18f722530>]]] in starred[call[name[b].iter_hits, parameter[]]] begin[:]
variable[qsize] assign[=] call[name[sizes]][name[query]]
variable[vectors] assign[=] call[name[list], parameter[<ast.GeneratorExp object at 0x7da18f721030>]]
<ast.Tuple object at 0x7da18f722410> assign[=] call[name[range_minmax], parameter[name[vectors]]]
variable[left_size] assign[=] binary_operation[name[vmin] - constant[1]]
variable[right_size] assign[=] binary_operation[name[qsize] - name[vmax]]
if compare[name[left_size] greater[>] name[right_size]] begin[:]
<ast.Tuple object at 0x7da20c6e4e50> assign[=] tuple[[<ast.Constant object at 0x7da20c6e52a0>, <ast.Name object at 0x7da20c6e56c0>]]
call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20c6e5420>]]]]
<ast.Delete object at 0x7da20c6e53f0>
for taget[tuple[[<ast.Name object at 0x7da20c6e61a0>, <ast.Name object at 0x7da20c6e7e50>]]] in starred[call[name[sorted], parameter[call[name[sizes].items, parameter[]]]]] begin[:]
call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20c6e5d20>]]]]
|
keyword[def] identifier[clr] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[clr] . identifier[__doc__] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )< literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[blastfile] = identifier[args] [ literal[int] ]
identifier[fastafiles] = identifier[args] [ literal[int] :]
identifier[sizes] ={}
keyword[for] identifier[fa] keyword[in] identifier[fastafiles] :
identifier[f] = identifier[Fasta] ( identifier[fa] )
identifier[sizes] . identifier[update] ( identifier[f] . identifier[itersizes] ())
identifier[b] = identifier[Blast] ( identifier[blastfile] )
keyword[for] identifier[query] , identifier[hits] keyword[in] identifier[b] . identifier[iter_hits] ():
identifier[qsize] = identifier[sizes] [ identifier[query] ]
identifier[vectors] = identifier[list] (( identifier[x] . identifier[qstart] , identifier[x] . identifier[qstop] ) keyword[for] identifier[x] keyword[in] identifier[hits] )
identifier[vmin] , identifier[vmax] = identifier[range_minmax] ( identifier[vectors] )
identifier[left_size] = identifier[vmin] - literal[int]
identifier[right_size] = identifier[qsize] - identifier[vmax]
keyword[if] identifier[left_size] > identifier[right_size] :
identifier[clr_start] , identifier[clr_end] = literal[int] , identifier[vmin]
keyword[else] :
identifier[clr_start] , identifier[clr_end] = identifier[vmax] , identifier[qsize]
identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] ( identifier[query] , identifier[clr_start] , identifier[clr_end] )))
keyword[del] identifier[sizes] [ identifier[query] ]
keyword[for] identifier[q] , identifier[size] keyword[in] identifier[sorted] ( identifier[sizes] . identifier[items] ()):
identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] ( identifier[q] , literal[int] , identifier[size] )))
|
def clr(args):
"""
%prog blastfile fastafiles
Calculate the vector clear range file based BLAST to the vectors.
"""
p = OptionParser(clr.__doc__)
(opts, args) = p.parse_args(args)
if len(args) < 2:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
blastfile = args[0]
fastafiles = args[1:]
sizes = {}
for fa in fastafiles:
f = Fasta(fa)
sizes.update(f.itersizes()) # depends on [control=['for'], data=['fa']]
b = Blast(blastfile)
for (query, hits) in b.iter_hits():
qsize = sizes[query]
vectors = list(((x.qstart, x.qstop) for x in hits))
(vmin, vmax) = range_minmax(vectors)
left_size = vmin - 1
right_size = qsize - vmax
if left_size > right_size:
(clr_start, clr_end) = (0, vmin) # depends on [control=['if'], data=[]]
else:
(clr_start, clr_end) = (vmax, qsize)
print('\t'.join((str(x) for x in (query, clr_start, clr_end))))
del sizes[query] # depends on [control=['for'], data=[]]
for (q, size) in sorted(sizes.items()):
print('\t'.join((str(x) for x in (q, 0, size)))) # depends on [control=['for'], data=[]]
|
def last(self):
"""Gets item with lowest priority. Performance: O(1)"""
with self.lock:
try:
return self.data[-1][0]
except IndexError as ex:
ex.args = ('DEPQ is empty',)
raise
|
def function[last, parameter[self]]:
constant[Gets item with lowest priority. Performance: O(1)]
with name[self].lock begin[:]
<ast.Try object at 0x7da207f02d70>
|
keyword[def] identifier[last] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[lock] :
keyword[try] :
keyword[return] identifier[self] . identifier[data] [- literal[int] ][ literal[int] ]
keyword[except] identifier[IndexError] keyword[as] identifier[ex] :
identifier[ex] . identifier[args] =( literal[string] ,)
keyword[raise]
|
def last(self):
"""Gets item with lowest priority. Performance: O(1)"""
with self.lock:
try:
return self.data[-1][0] # depends on [control=['try'], data=[]]
except IndexError as ex:
ex.args = ('DEPQ is empty',)
raise # depends on [control=['except'], data=['ex']] # depends on [control=['with'], data=[]]
|
def copyPort(port, targetLNode, reverseDir, topPortName=None):
"""
Create identical port on targetNode
"""
newP = _copyPort(port, targetLNode, reverseDir)
if topPortName is not None:
newP.name = topPortName
return newP
|
def function[copyPort, parameter[port, targetLNode, reverseDir, topPortName]]:
constant[
Create identical port on targetNode
]
variable[newP] assign[=] call[name[_copyPort], parameter[name[port], name[targetLNode], name[reverseDir]]]
if compare[name[topPortName] is_not constant[None]] begin[:]
name[newP].name assign[=] name[topPortName]
return[name[newP]]
|
keyword[def] identifier[copyPort] ( identifier[port] , identifier[targetLNode] , identifier[reverseDir] , identifier[topPortName] = keyword[None] ):
literal[string]
identifier[newP] = identifier[_copyPort] ( identifier[port] , identifier[targetLNode] , identifier[reverseDir] )
keyword[if] identifier[topPortName] keyword[is] keyword[not] keyword[None] :
identifier[newP] . identifier[name] = identifier[topPortName]
keyword[return] identifier[newP]
|
def copyPort(port, targetLNode, reverseDir, topPortName=None):
"""
Create identical port on targetNode
"""
newP = _copyPort(port, targetLNode, reverseDir)
if topPortName is not None:
newP.name = topPortName # depends on [control=['if'], data=['topPortName']]
return newP
|
def get_dict_of_all_args(self):
"""Generates a dictionary from a handler paths query string and returns it
:returns: Dictionary of all key/values in arguments list
:rtype: dict
"""
dictionary = {}
for arg in [arg for arg in self.request.arguments if arg not in self.settings.get("reserved_query_string_params", [])]:
val = self.get_argument(arg, default=None)
if val:
dictionary[arg] = val
return dictionary
|
def function[get_dict_of_all_args, parameter[self]]:
constant[Generates a dictionary from a handler paths query string and returns it
:returns: Dictionary of all key/values in arguments list
:rtype: dict
]
variable[dictionary] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[<ast.ListComp object at 0x7da1b14e63b0>] begin[:]
variable[val] assign[=] call[name[self].get_argument, parameter[name[arg]]]
if name[val] begin[:]
call[name[dictionary]][name[arg]] assign[=] name[val]
return[name[dictionary]]
|
keyword[def] identifier[get_dict_of_all_args] ( identifier[self] ):
literal[string]
identifier[dictionary] ={}
keyword[for] identifier[arg] keyword[in] [ identifier[arg] keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[request] . identifier[arguments] keyword[if] identifier[arg] keyword[not] keyword[in] identifier[self] . identifier[settings] . identifier[get] ( literal[string] ,[])]:
identifier[val] = identifier[self] . identifier[get_argument] ( identifier[arg] , identifier[default] = keyword[None] )
keyword[if] identifier[val] :
identifier[dictionary] [ identifier[arg] ]= identifier[val]
keyword[return] identifier[dictionary]
|
def get_dict_of_all_args(self):
"""Generates a dictionary from a handler paths query string and returns it
:returns: Dictionary of all key/values in arguments list
:rtype: dict
"""
dictionary = {}
for arg in [arg for arg in self.request.arguments if arg not in self.settings.get('reserved_query_string_params', [])]:
val = self.get_argument(arg, default=None)
if val:
dictionary[arg] = val # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
return dictionary
|
def clear_system_configuration(self):
"""Clear the BIOS/UEFI configuration
"""
biosinfo = self._do_web_request(self._biosurl)
rb = biosinfo.get('Actions', {}).get('#Bios.ResetBios', {})
rb = rb.get('target', '')
if not rb:
raise Exception('BIOS reset not detected on this system')
self._do_web_request(rb, {'Action': 'Bios.ResetBios'})
|
def function[clear_system_configuration, parameter[self]]:
constant[Clear the BIOS/UEFI configuration
]
variable[biosinfo] assign[=] call[name[self]._do_web_request, parameter[name[self]._biosurl]]
variable[rb] assign[=] call[call[name[biosinfo].get, parameter[constant[Actions], dictionary[[], []]]].get, parameter[constant[#Bios.ResetBios], dictionary[[], []]]]
variable[rb] assign[=] call[name[rb].get, parameter[constant[target], constant[]]]
if <ast.UnaryOp object at 0x7da20e9571c0> begin[:]
<ast.Raise object at 0x7da20e955120>
call[name[self]._do_web_request, parameter[name[rb], dictionary[[<ast.Constant object at 0x7da20e957c40>], [<ast.Constant object at 0x7da20e955030>]]]]
|
keyword[def] identifier[clear_system_configuration] ( identifier[self] ):
literal[string]
identifier[biosinfo] = identifier[self] . identifier[_do_web_request] ( identifier[self] . identifier[_biosurl] )
identifier[rb] = identifier[biosinfo] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{})
identifier[rb] = identifier[rb] . identifier[get] ( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[rb] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[self] . identifier[_do_web_request] ( identifier[rb] ,{ literal[string] : literal[string] })
|
def clear_system_configuration(self):
"""Clear the BIOS/UEFI configuration
"""
biosinfo = self._do_web_request(self._biosurl)
rb = biosinfo.get('Actions', {}).get('#Bios.ResetBios', {})
rb = rb.get('target', '')
if not rb:
raise Exception('BIOS reset not detected on this system') # depends on [control=['if'], data=[]]
self._do_web_request(rb, {'Action': 'Bios.ResetBios'})
|
def pseudo_partial_waves(self):
"""Dictionary with the pseudo partial waves indexed by state."""
pseudo_partial_waves = OrderedDict()
for (mesh, values, attrib) in self._parse_all_radfuncs("pseudo_partial_wave"):
state = attrib["state"]
#val_state = self.valence_states[state]
pseudo_partial_waves[state] = RadialFunction(mesh, values)
return pseudo_partial_waves
|
def function[pseudo_partial_waves, parameter[self]]:
constant[Dictionary with the pseudo partial waves indexed by state.]
variable[pseudo_partial_waves] assign[=] call[name[OrderedDict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18eb54430>, <ast.Name object at 0x7da18eb54a90>, <ast.Name object at 0x7da18eb560b0>]]] in starred[call[name[self]._parse_all_radfuncs, parameter[constant[pseudo_partial_wave]]]] begin[:]
variable[state] assign[=] call[name[attrib]][constant[state]]
call[name[pseudo_partial_waves]][name[state]] assign[=] call[name[RadialFunction], parameter[name[mesh], name[values]]]
return[name[pseudo_partial_waves]]
|
keyword[def] identifier[pseudo_partial_waves] ( identifier[self] ):
literal[string]
identifier[pseudo_partial_waves] = identifier[OrderedDict] ()
keyword[for] ( identifier[mesh] , identifier[values] , identifier[attrib] ) keyword[in] identifier[self] . identifier[_parse_all_radfuncs] ( literal[string] ):
identifier[state] = identifier[attrib] [ literal[string] ]
identifier[pseudo_partial_waves] [ identifier[state] ]= identifier[RadialFunction] ( identifier[mesh] , identifier[values] )
keyword[return] identifier[pseudo_partial_waves]
|
def pseudo_partial_waves(self):
"""Dictionary with the pseudo partial waves indexed by state."""
pseudo_partial_waves = OrderedDict()
for (mesh, values, attrib) in self._parse_all_radfuncs('pseudo_partial_wave'):
state = attrib['state']
#val_state = self.valence_states[state]
pseudo_partial_waves[state] = RadialFunction(mesh, values) # depends on [control=['for'], data=[]]
return pseudo_partial_waves
|
def _get_sof_terms(self, C, rake):
"""
Returns the style-of-faulting scaling parameters
"""
if rake >= 45.0 and rake <= 135.0:
# Reverse faulting
return C["b0R"], C["b1R"]
elif rake <= -45. and rake >= -135.0:
# Normal faulting
return C["b0N"], C["b1N"]
else:
# Strike slip
return C["b0SS"], C["b1SS"]
|
def function[_get_sof_terms, parameter[self, C, rake]]:
constant[
Returns the style-of-faulting scaling parameters
]
if <ast.BoolOp object at 0x7da18f00d4e0> begin[:]
return[tuple[[<ast.Subscript object at 0x7da18f00ee60>, <ast.Subscript object at 0x7da18f00d660>]]]
|
keyword[def] identifier[_get_sof_terms] ( identifier[self] , identifier[C] , identifier[rake] ):
literal[string]
keyword[if] identifier[rake] >= literal[int] keyword[and] identifier[rake] <= literal[int] :
keyword[return] identifier[C] [ literal[string] ], identifier[C] [ literal[string] ]
keyword[elif] identifier[rake] <=- literal[int] keyword[and] identifier[rake] >=- literal[int] :
keyword[return] identifier[C] [ literal[string] ], identifier[C] [ literal[string] ]
keyword[else] :
keyword[return] identifier[C] [ literal[string] ], identifier[C] [ literal[string] ]
|
def _get_sof_terms(self, C, rake):
"""
Returns the style-of-faulting scaling parameters
"""
if rake >= 45.0 and rake <= 135.0:
# Reverse faulting
return (C['b0R'], C['b1R']) # depends on [control=['if'], data=[]]
elif rake <= -45.0 and rake >= -135.0:
# Normal faulting
return (C['b0N'], C['b1N']) # depends on [control=['if'], data=[]]
else:
# Strike slip
return (C['b0SS'], C['b1SS'])
|
def screen_text(
self, text_content_type, text_content, language=None, autocorrect=False, pii=False, list_id=None, classify=False, custom_headers=None, raw=False, callback=None, **operation_config):
"""Detect profanity and match against custom and shared blacklists.
Detects profanity in more than 100 languages and match against custom
and shared blacklists.
:param text_content_type: The content type. Possible values include:
'text/plain', 'text/html', 'text/xml', 'text/markdown'
:type text_content_type: str
:param text_content: Content to screen.
:type text_content: Generator
:param language: Language of the text.
:type language: str
:param autocorrect: Autocorrect text.
:type autocorrect: bool
:param pii: Detect personal identifiable information.
:type pii: bool
:param list_id: The list Id.
:type list_id: str
:param classify: Classify input.
:type classify: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param callback: When specified, will be called with each chunk of
data that is streamed. The callback should take two arguments, the
bytes of the current chunk of data and the response object. If the
data is uploading, response will be None.
:type callback: Callable[Bytes, response=None]
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Screen or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Screen
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = self.screen_text.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if language is not None:
query_parameters['language'] = self._serialize.query("language", language, 'str')
if autocorrect is not None:
query_parameters['autocorrect'] = self._serialize.query("autocorrect", autocorrect, 'bool')
if pii is not None:
query_parameters['PII'] = self._serialize.query("pii", pii, 'bool')
if list_id is not None:
query_parameters['listId'] = self._serialize.query("list_id", list_id, 'str')
if classify is not None:
query_parameters['classify'] = self._serialize.query("classify", classify, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'text/plain'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Content-Type'] = self._serialize.header("text_content_type", text_content_type, 'str')
# Construct body
body_content = self._client.stream_upload(text_content, callback)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Screen', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
def function[screen_text, parameter[self, text_content_type, text_content, language, autocorrect, pii, list_id, classify, custom_headers, raw, callback]]:
constant[Detect profanity and match against custom and shared blacklists.
Detects profanity in more than 100 languages and match against custom
and shared blacklists.
:param text_content_type: The content type. Possible values include:
'text/plain', 'text/html', 'text/xml', 'text/markdown'
:type text_content_type: str
:param text_content: Content to screen.
:type text_content: Generator
:param language: Language of the text.
:type language: str
:param autocorrect: Autocorrect text.
:type autocorrect: bool
:param pii: Detect personal identifiable information.
:type pii: bool
:param list_id: The list Id.
:type list_id: str
:param classify: Classify input.
:type classify: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param callback: When specified, will be called with each chunk of
data that is streamed. The callback should take two arguments, the
bytes of the current chunk of data and the response object. If the
data is uploading, response will be None.
:type callback: Callable[Bytes, response=None]
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Screen or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Screen
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
]
variable[url] assign[=] call[name[self].screen_text.metadata][constant[url]]
variable[path_format_arguments] assign[=] dictionary[[<ast.Constant object at 0x7da2041dba30>], [<ast.Call object at 0x7da2041dbca0>]]
variable[url] assign[=] call[name[self]._client.format_url, parameter[name[url]]]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[language] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[language]] assign[=] call[name[self]._serialize.query, parameter[constant[language], name[language], constant[str]]]
if compare[name[autocorrect] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[autocorrect]] assign[=] call[name[self]._serialize.query, parameter[constant[autocorrect], name[autocorrect], constant[bool]]]
if compare[name[pii] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[PII]] assign[=] call[name[self]._serialize.query, parameter[constant[pii], name[pii], constant[bool]]]
if compare[name[list_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[listId]] assign[=] call[name[self]._serialize.query, parameter[constant[list_id], name[list_id], constant[str]]]
if compare[name[classify] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[classify]] assign[=] call[name[self]._serialize.query, parameter[constant[classify], name[classify], constant[bool]]]
variable[header_parameters] assign[=] dictionary[[], []]
call[name[header_parameters]][constant[Accept]] assign[=] constant[application/json]
call[name[header_parameters]][constant[Content-Type]] assign[=] constant[text/plain]
if name[custom_headers] begin[:]
call[name[header_parameters].update, parameter[name[custom_headers]]]
call[name[header_parameters]][constant[Content-Type]] assign[=] call[name[self]._serialize.header, parameter[constant[text_content_type], name[text_content_type], constant[str]]]
variable[body_content] assign[=] call[name[self]._client.stream_upload, parameter[name[text_content], name[callback]]]
variable[request] assign[=] call[name[self]._client.post, parameter[name[url], name[query_parameters], name[header_parameters], name[body_content]]]
variable[response] assign[=] call[name[self]._client.send, parameter[name[request]]]
if compare[name[response].status_code <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da2054a4d90>]]] begin[:]
<ast.Raise object at 0x7da2054a4970>
variable[deserialized] assign[=] constant[None]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[deserialized] assign[=] call[name[self]._deserialize, parameter[constant[Screen], name[response]]]
if name[raw] begin[:]
variable[client_raw_response] assign[=] call[name[ClientRawResponse], parameter[name[deserialized], name[response]]]
return[name[client_raw_response]]
return[name[deserialized]]
|
keyword[def] identifier[screen_text] (
identifier[self] , identifier[text_content_type] , identifier[text_content] , identifier[language] = keyword[None] , identifier[autocorrect] = keyword[False] , identifier[pii] = keyword[False] , identifier[list_id] = keyword[None] , identifier[classify] = keyword[False] , identifier[custom_headers] = keyword[None] , identifier[raw] = keyword[False] , identifier[callback] = keyword[None] ,** identifier[operation_config] ):
literal[string]
identifier[url] = identifier[self] . identifier[screen_text] . identifier[metadata] [ literal[string] ]
identifier[path_format_arguments] ={
literal[string] : identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[self] . identifier[config] . identifier[endpoint] , literal[string] , identifier[skip_quote] = keyword[True] )
}
identifier[url] = identifier[self] . identifier[_client] . identifier[format_url] ( identifier[url] ,** identifier[path_format_arguments] )
identifier[query_parameters] ={}
keyword[if] identifier[language] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[language] , literal[string] )
keyword[if] identifier[autocorrect] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[autocorrect] , literal[string] )
keyword[if] identifier[pii] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[pii] , literal[string] )
keyword[if] identifier[list_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[list_id] , literal[string] )
keyword[if] identifier[classify] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[classify] , literal[string] )
identifier[header_parameters] ={}
identifier[header_parameters] [ literal[string] ]= literal[string]
identifier[header_parameters] [ literal[string] ]= literal[string]
keyword[if] identifier[custom_headers] :
identifier[header_parameters] . identifier[update] ( identifier[custom_headers] )
identifier[header_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[header] ( literal[string] , identifier[text_content_type] , literal[string] )
identifier[body_content] = identifier[self] . identifier[_client] . identifier[stream_upload] ( identifier[text_content] , identifier[callback] )
identifier[request] = identifier[self] . identifier[_client] . identifier[post] ( identifier[url] , identifier[query_parameters] , identifier[header_parameters] , identifier[body_content] )
identifier[response] = identifier[self] . identifier[_client] . identifier[send] ( identifier[request] , identifier[stream] = keyword[False] ,** identifier[operation_config] )
keyword[if] identifier[response] . identifier[status_code] keyword[not] keyword[in] [ literal[int] ]:
keyword[raise] identifier[models] . identifier[APIErrorException] ( identifier[self] . identifier[_deserialize] , identifier[response] )
identifier[deserialized] = keyword[None]
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[deserialized] = identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
keyword[if] identifier[raw] :
identifier[client_raw_response] = identifier[ClientRawResponse] ( identifier[deserialized] , identifier[response] )
keyword[return] identifier[client_raw_response]
keyword[return] identifier[deserialized]
|
def screen_text(self, text_content_type, text_content, language=None, autocorrect=False, pii=False, list_id=None, classify=False, custom_headers=None, raw=False, callback=None, **operation_config):
"""Detect profanity and match against custom and shared blacklists.
Detects profanity in more than 100 languages and match against custom
and shared blacklists.
:param text_content_type: The content type. Possible values include:
'text/plain', 'text/html', 'text/xml', 'text/markdown'
:type text_content_type: str
:param text_content: Content to screen.
:type text_content: Generator
:param language: Language of the text.
:type language: str
:param autocorrect: Autocorrect text.
:type autocorrect: bool
:param pii: Detect personal identifiable information.
:type pii: bool
:param list_id: The list Id.
:type list_id: str
:param classify: Classify input.
:type classify: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param callback: When specified, will be called with each chunk of
data that is streamed. The callback should take two arguments, the
bytes of the current chunk of data and the response object. If the
data is uploading, response will be None.
:type callback: Callable[Bytes, response=None]
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Screen or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Screen
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = self.screen_text.metadata['url']
path_format_arguments = {'Endpoint': self._serialize.url('self.config.endpoint', self.config.endpoint, 'str', skip_quote=True)}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if language is not None:
query_parameters['language'] = self._serialize.query('language', language, 'str') # depends on [control=['if'], data=['language']]
if autocorrect is not None:
query_parameters['autocorrect'] = self._serialize.query('autocorrect', autocorrect, 'bool') # depends on [control=['if'], data=['autocorrect']]
if pii is not None:
query_parameters['PII'] = self._serialize.query('pii', pii, 'bool') # depends on [control=['if'], data=['pii']]
if list_id is not None:
query_parameters['listId'] = self._serialize.query('list_id', list_id, 'str') # depends on [control=['if'], data=['list_id']]
if classify is not None:
query_parameters['classify'] = self._serialize.query('classify', classify, 'bool') # depends on [control=['if'], data=['classify']]
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'text/plain'
if custom_headers:
header_parameters.update(custom_headers) # depends on [control=['if'], data=[]]
header_parameters['Content-Type'] = self._serialize.header('text_content_type', text_content_type, 'str')
# Construct body
body_content = self._client.stream_upload(text_content, callback)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response) # depends on [control=['if'], data=[]]
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Screen', response) # depends on [control=['if'], data=[]]
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response # depends on [control=['if'], data=[]]
return deserialized
|
def trigger_event(self, event, *args):
"""Dispatch an event to the proper handler method.
In the most common usage, this method is not overloaded by subclasses,
as it performs the routing of events to methods. However, this
method can be overriden if special dispatching rules are needed, or if
having a single method that catches all events is desired.
"""
handler_name = 'on_' + event
if hasattr(self, handler_name):
return getattr(self, handler_name)(*args)
|
def function[trigger_event, parameter[self, event]]:
constant[Dispatch an event to the proper handler method.
In the most common usage, this method is not overloaded by subclasses,
as it performs the routing of events to methods. However, this
method can be overriden if special dispatching rules are needed, or if
having a single method that catches all events is desired.
]
variable[handler_name] assign[=] binary_operation[constant[on_] + name[event]]
if call[name[hasattr], parameter[name[self], name[handler_name]]] begin[:]
return[call[call[name[getattr], parameter[name[self], name[handler_name]]], parameter[<ast.Starred object at 0x7da18ede7be0>]]]
|
keyword[def] identifier[trigger_event] ( identifier[self] , identifier[event] ,* identifier[args] ):
literal[string]
identifier[handler_name] = literal[string] + identifier[event]
keyword[if] identifier[hasattr] ( identifier[self] , identifier[handler_name] ):
keyword[return] identifier[getattr] ( identifier[self] , identifier[handler_name] )(* identifier[args] )
|
def trigger_event(self, event, *args):
"""Dispatch an event to the proper handler method.
In the most common usage, this method is not overloaded by subclasses,
as it performs the routing of events to methods. However, this
method can be overriden if special dispatching rules are needed, or if
having a single method that catches all events is desired.
"""
handler_name = 'on_' + event
if hasattr(self, handler_name):
return getattr(self, handler_name)(*args) # depends on [control=['if'], data=[]]
|
def unescape_utf8(msg):
''' convert escaped unicode web entities to unicode '''
def sub(m):
text = m.group(0)
if text[:3] == "&#x": return unichr(int(text[3:-1], 16))
else: return unichr(int(text[2:-1]))
return re.sub("&#?\w+;", sub, urllib.unquote(msg))
|
def function[unescape_utf8, parameter[msg]]:
constant[ convert escaped unicode web entities to unicode ]
def function[sub, parameter[m]]:
variable[text] assign[=] call[name[m].group, parameter[constant[0]]]
if compare[call[name[text]][<ast.Slice object at 0x7da1b2362170>] equal[==] constant[&#x]] begin[:]
return[call[name[unichr], parameter[call[name[int], parameter[call[name[text]][<ast.Slice object at 0x7da1b23d7370>], constant[16]]]]]]
return[call[name[re].sub, parameter[constant[&#?\w+;], name[sub], call[name[urllib].unquote, parameter[name[msg]]]]]]
|
keyword[def] identifier[unescape_utf8] ( identifier[msg] ):
literal[string]
keyword[def] identifier[sub] ( identifier[m] ):
identifier[text] = identifier[m] . identifier[group] ( literal[int] )
keyword[if] identifier[text] [: literal[int] ]== literal[string] : keyword[return] identifier[unichr] ( identifier[int] ( identifier[text] [ literal[int] :- literal[int] ], literal[int] ))
keyword[else] : keyword[return] identifier[unichr] ( identifier[int] ( identifier[text] [ literal[int] :- literal[int] ]))
keyword[return] identifier[re] . identifier[sub] ( literal[string] , identifier[sub] , identifier[urllib] . identifier[unquote] ( identifier[msg] ))
|
def unescape_utf8(msg):
""" convert escaped unicode web entities to unicode """
def sub(m):
text = m.group(0)
if text[:3] == '&#x':
return unichr(int(text[3:-1], 16)) # depends on [control=['if'], data=[]]
else:
return unichr(int(text[2:-1]))
return re.sub('&#?\\w+;', sub, urllib.unquote(msg))
|
def _handle_event(self, conv_event):
"""Handle updating and scrolling when a new event is added.
Automatically scroll down to show the new text if the bottom is
showing. This allows the user to scroll up to read previous messages
while new messages are arriving.
"""
if not self._is_scrolling:
self.set_focus(conv_event.id_)
else:
self._modified()
|
def function[_handle_event, parameter[self, conv_event]]:
constant[Handle updating and scrolling when a new event is added.
Automatically scroll down to show the new text if the bottom is
showing. This allows the user to scroll up to read previous messages
while new messages are arriving.
]
if <ast.UnaryOp object at 0x7da20c6c49d0> begin[:]
call[name[self].set_focus, parameter[name[conv_event].id_]]
|
keyword[def] identifier[_handle_event] ( identifier[self] , identifier[conv_event] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_is_scrolling] :
identifier[self] . identifier[set_focus] ( identifier[conv_event] . identifier[id_] )
keyword[else] :
identifier[self] . identifier[_modified] ()
|
def _handle_event(self, conv_event):
"""Handle updating and scrolling when a new event is added.
Automatically scroll down to show the new text if the bottom is
showing. This allows the user to scroll up to read previous messages
while new messages are arriving.
"""
if not self._is_scrolling:
self.set_focus(conv_event.id_) # depends on [control=['if'], data=[]]
else:
self._modified()
|
def label_correcting_get_cycle(self, j, pred):
'''
API:
label_correcting_get_cycle(self, labelled, pred)
Description:
In label correcting check cycle it is decided pred has a cycle and
nodes in the cycle are labelled. We will create a list of nodes
in the cycle using labelled and pred inputs.
Pre:
This method should be called from label_correcting_check_cycle(),
unless you are sure about what you are doing.
Input:
j: Node that predecessor is recently updated. We know that it is
in the cycle
pred: Predecessor dictionary that contains a cycle
Post:
Returns a list of nodes that represents cycle. It is in
[n_1, n_2, ..., n_k] form where the cycle has k nodes.
'''
cycle = []
cycle.append(j)
current = pred[j]
while current!=j:
cycle.append(current)
current = pred[current]
cycle.reverse()
return cycle
|
def function[label_correcting_get_cycle, parameter[self, j, pred]]:
constant[
API:
label_correcting_get_cycle(self, labelled, pred)
Description:
In label correcting check cycle it is decided pred has a cycle and
nodes in the cycle are labelled. We will create a list of nodes
in the cycle using labelled and pred inputs.
Pre:
This method should be called from label_correcting_check_cycle(),
unless you are sure about what you are doing.
Input:
j: Node that predecessor is recently updated. We know that it is
in the cycle
pred: Predecessor dictionary that contains a cycle
Post:
Returns a list of nodes that represents cycle. It is in
[n_1, n_2, ..., n_k] form where the cycle has k nodes.
]
variable[cycle] assign[=] list[[]]
call[name[cycle].append, parameter[name[j]]]
variable[current] assign[=] call[name[pred]][name[j]]
while compare[name[current] not_equal[!=] name[j]] begin[:]
call[name[cycle].append, parameter[name[current]]]
variable[current] assign[=] call[name[pred]][name[current]]
call[name[cycle].reverse, parameter[]]
return[name[cycle]]
|
keyword[def] identifier[label_correcting_get_cycle] ( identifier[self] , identifier[j] , identifier[pred] ):
literal[string]
identifier[cycle] =[]
identifier[cycle] . identifier[append] ( identifier[j] )
identifier[current] = identifier[pred] [ identifier[j] ]
keyword[while] identifier[current] != identifier[j] :
identifier[cycle] . identifier[append] ( identifier[current] )
identifier[current] = identifier[pred] [ identifier[current] ]
identifier[cycle] . identifier[reverse] ()
keyword[return] identifier[cycle]
|
def label_correcting_get_cycle(self, j, pred):
"""
API:
label_correcting_get_cycle(self, labelled, pred)
Description:
In label correcting check cycle it is decided pred has a cycle and
nodes in the cycle are labelled. We will create a list of nodes
in the cycle using labelled and pred inputs.
Pre:
This method should be called from label_correcting_check_cycle(),
unless you are sure about what you are doing.
Input:
j: Node that predecessor is recently updated. We know that it is
in the cycle
pred: Predecessor dictionary that contains a cycle
Post:
Returns a list of nodes that represents cycle. It is in
[n_1, n_2, ..., n_k] form where the cycle has k nodes.
"""
cycle = []
cycle.append(j)
current = pred[j]
while current != j:
cycle.append(current)
current = pred[current] # depends on [control=['while'], data=['current']]
cycle.reverse()
return cycle
|
def wiki_pages(self, extra_params=None):
"""
All Wiki Pages with access to this Space
"""
return self.api._get_json(
WikiPage,
space=self,
rel_path=self._build_rel_path('wiki_pages'),
extra_params=extra_params,
)
|
def function[wiki_pages, parameter[self, extra_params]]:
constant[
All Wiki Pages with access to this Space
]
return[call[name[self].api._get_json, parameter[name[WikiPage]]]]
|
keyword[def] identifier[wiki_pages] ( identifier[self] , identifier[extra_params] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[api] . identifier[_get_json] (
identifier[WikiPage] ,
identifier[space] = identifier[self] ,
identifier[rel_path] = identifier[self] . identifier[_build_rel_path] ( literal[string] ),
identifier[extra_params] = identifier[extra_params] ,
)
|
def wiki_pages(self, extra_params=None):
"""
All Wiki Pages with access to this Space
"""
return self.api._get_json(WikiPage, space=self, rel_path=self._build_rel_path('wiki_pages'), extra_params=extra_params)
|
def osu_run1(data_set='osu_run1', sample_every=4):
"""Ohio State University's Run1 motion capture data set."""
path = os.path.join(data_path, data_set)
if not data_available(data_set):
import zipfile
download_data(data_set)
zip = zipfile.ZipFile(os.path.join(data_path, data_set, 'run1TXT.ZIP'), 'r')
for name in zip.namelist():
zip.extract(name, path)
from . import mocap
Y, connect = mocap.load_text_data('Aug210106', path)
Y = Y[0:-1:sample_every, :]
return data_details_return({'Y': Y, 'connect' : connect}, data_set)
|
def function[osu_run1, parameter[data_set, sample_every]]:
constant[Ohio State University's Run1 motion capture data set.]
variable[path] assign[=] call[name[os].path.join, parameter[name[data_path], name[data_set]]]
if <ast.UnaryOp object at 0x7da1b0fb1090> begin[:]
import module[zipfile]
call[name[download_data], parameter[name[data_set]]]
variable[zip] assign[=] call[name[zipfile].ZipFile, parameter[call[name[os].path.join, parameter[name[data_path], name[data_set], constant[run1TXT.ZIP]]], constant[r]]]
for taget[name[name]] in starred[call[name[zip].namelist, parameter[]]] begin[:]
call[name[zip].extract, parameter[name[name], name[path]]]
from relative_module[None] import module[mocap]
<ast.Tuple object at 0x7da1b0fb24a0> assign[=] call[name[mocap].load_text_data, parameter[constant[Aug210106], name[path]]]
variable[Y] assign[=] call[name[Y]][tuple[[<ast.Slice object at 0x7da1b0fb1510>, <ast.Slice object at 0x7da1b0fb2b00>]]]
return[call[name[data_details_return], parameter[dictionary[[<ast.Constant object at 0x7da1b0fb2e90>, <ast.Constant object at 0x7da1b0fb2b90>], [<ast.Name object at 0x7da1b0fb2bc0>, <ast.Name object at 0x7da1b0fb1f00>]], name[data_set]]]]
|
keyword[def] identifier[osu_run1] ( identifier[data_set] = literal[string] , identifier[sample_every] = literal[int] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , identifier[data_set] )
keyword[if] keyword[not] identifier[data_available] ( identifier[data_set] ):
keyword[import] identifier[zipfile]
identifier[download_data] ( identifier[data_set] )
identifier[zip] = identifier[zipfile] . identifier[ZipFile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[data_path] , identifier[data_set] , literal[string] ), literal[string] )
keyword[for] identifier[name] keyword[in] identifier[zip] . identifier[namelist] ():
identifier[zip] . identifier[extract] ( identifier[name] , identifier[path] )
keyword[from] . keyword[import] identifier[mocap]
identifier[Y] , identifier[connect] = identifier[mocap] . identifier[load_text_data] ( literal[string] , identifier[path] )
identifier[Y] = identifier[Y] [ literal[int] :- literal[int] : identifier[sample_every] ,:]
keyword[return] identifier[data_details_return] ({ literal[string] : identifier[Y] , literal[string] : identifier[connect] }, identifier[data_set] )
|
def osu_run1(data_set='osu_run1', sample_every=4):
"""Ohio State University's Run1 motion capture data set."""
path = os.path.join(data_path, data_set)
if not data_available(data_set):
import zipfile
download_data(data_set)
zip = zipfile.ZipFile(os.path.join(data_path, data_set, 'run1TXT.ZIP'), 'r')
for name in zip.namelist():
zip.extract(name, path) # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]]
from . import mocap
(Y, connect) = mocap.load_text_data('Aug210106', path)
Y = Y[0:-1:sample_every, :]
return data_details_return({'Y': Y, 'connect': connect}, data_set)
|
def p(name="", **kwargs):
'''
really quick and dirty profiling
you start a profile by passing in name, you stop the top profiling by not
passing in a name. You can also call this method using a with statement
This is for when you just want to get a really back of envelope view of
how your fast your code is, super handy, not super accurate
since -- 2013-5-9
example --
p("starting profile")
time.sleep(1)
p() # stop the "starting profile" session
# you can go N levels deep
p("one")
p("two")
time.sleep(0.5)
p() # stop profiling of "two"
time.sleep(0.5)
p() # stop profiling of "one"
with pout.p("three"):
time.sleep(0.5)
name -- string -- pass this in to start a profiling session
return -- context manager
'''
with Reflect.context(**kwargs) as r:
if name:
instance = P_CLASS(r, stream, name, **kwargs)
else:
instance = P_CLASS.pop(r)
instance()
return instance
|
def function[p, parameter[name]]:
constant[
really quick and dirty profiling
you start a profile by passing in name, you stop the top profiling by not
passing in a name. You can also call this method using a with statement
This is for when you just want to get a really back of envelope view of
how your fast your code is, super handy, not super accurate
since -- 2013-5-9
example --
p("starting profile")
time.sleep(1)
p() # stop the "starting profile" session
# you can go N levels deep
p("one")
p("two")
time.sleep(0.5)
p() # stop profiling of "two"
time.sleep(0.5)
p() # stop profiling of "one"
with pout.p("three"):
time.sleep(0.5)
name -- string -- pass this in to start a profiling session
return -- context manager
]
with call[name[Reflect].context, parameter[]] begin[:]
if name[name] begin[:]
variable[instance] assign[=] call[name[P_CLASS], parameter[name[r], name[stream], name[name]]]
return[name[instance]]
|
keyword[def] identifier[p] ( identifier[name] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[Reflect] . identifier[context] (** identifier[kwargs] ) keyword[as] identifier[r] :
keyword[if] identifier[name] :
identifier[instance] = identifier[P_CLASS] ( identifier[r] , identifier[stream] , identifier[name] ,** identifier[kwargs] )
keyword[else] :
identifier[instance] = identifier[P_CLASS] . identifier[pop] ( identifier[r] )
identifier[instance] ()
keyword[return] identifier[instance]
|
def p(name='', **kwargs):
"""
really quick and dirty profiling
you start a profile by passing in name, you stop the top profiling by not
passing in a name. You can also call this method using a with statement
This is for when you just want to get a really back of envelope view of
how your fast your code is, super handy, not super accurate
since -- 2013-5-9
example --
p("starting profile")
time.sleep(1)
p() # stop the "starting profile" session
# you can go N levels deep
p("one")
p("two")
time.sleep(0.5)
p() # stop profiling of "two"
time.sleep(0.5)
p() # stop profiling of "one"
with pout.p("three"):
time.sleep(0.5)
name -- string -- pass this in to start a profiling session
return -- context manager
"""
with Reflect.context(**kwargs) as r:
if name:
instance = P_CLASS(r, stream, name, **kwargs) # depends on [control=['if'], data=[]]
else:
instance = P_CLASS.pop(r)
instance() # depends on [control=['with'], data=['r']]
return instance
|
def from_timestamp(
timestamp, tz=UTC # type: Union[int, float] # type: Union[str, _Timezone]
): # type: (...) -> DateTime
"""
Create a DateTime instance from a timestamp.
"""
dt = _datetime.datetime.utcfromtimestamp(timestamp)
dt = datetime(
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond
)
if tz is not UTC or tz != "UTC":
dt = dt.in_timezone(tz)
return dt
|
def function[from_timestamp, parameter[timestamp, tz]]:
constant[
Create a DateTime instance from a timestamp.
]
variable[dt] assign[=] call[name[_datetime].datetime.utcfromtimestamp, parameter[name[timestamp]]]
variable[dt] assign[=] call[name[datetime], parameter[name[dt].year, name[dt].month, name[dt].day, name[dt].hour, name[dt].minute, name[dt].second, name[dt].microsecond]]
if <ast.BoolOp object at 0x7da18fe92410> begin[:]
variable[dt] assign[=] call[name[dt].in_timezone, parameter[name[tz]]]
return[name[dt]]
|
keyword[def] identifier[from_timestamp] (
identifier[timestamp] , identifier[tz] = identifier[UTC]
):
literal[string]
identifier[dt] = identifier[_datetime] . identifier[datetime] . identifier[utcfromtimestamp] ( identifier[timestamp] )
identifier[dt] = identifier[datetime] (
identifier[dt] . identifier[year] , identifier[dt] . identifier[month] , identifier[dt] . identifier[day] , identifier[dt] . identifier[hour] , identifier[dt] . identifier[minute] , identifier[dt] . identifier[second] , identifier[dt] . identifier[microsecond]
)
keyword[if] identifier[tz] keyword[is] keyword[not] identifier[UTC] keyword[or] identifier[tz] != literal[string] :
identifier[dt] = identifier[dt] . identifier[in_timezone] ( identifier[tz] )
keyword[return] identifier[dt]
|
def from_timestamp(timestamp, tz=UTC): # type: Union[int, float] # type: Union[str, _Timezone]
# type: (...) -> DateTime
'\n Create a DateTime instance from a timestamp.\n '
dt = _datetime.datetime.utcfromtimestamp(timestamp)
dt = datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond)
if tz is not UTC or tz != 'UTC':
dt = dt.in_timezone(tz) # depends on [control=['if'], data=[]]
return dt
|
def complete_contexts(self):
'''
Returns a list of context interfaces that yield a complete context.
'''
interfaces = []
[interfaces.extend(i.complete_contexts())
for i in six.itervalues(self.templates)]
return interfaces
|
def function[complete_contexts, parameter[self]]:
constant[
Returns a list of context interfaces that yield a complete context.
]
variable[interfaces] assign[=] list[[]]
<ast.ListComp object at 0x7da18bc70f10>
return[name[interfaces]]
|
keyword[def] identifier[complete_contexts] ( identifier[self] ):
literal[string]
identifier[interfaces] =[]
[ identifier[interfaces] . identifier[extend] ( identifier[i] . identifier[complete_contexts] ())
keyword[for] identifier[i] keyword[in] identifier[six] . identifier[itervalues] ( identifier[self] . identifier[templates] )]
keyword[return] identifier[interfaces]
|
def complete_contexts(self):
"""
Returns a list of context interfaces that yield a complete context.
"""
interfaces = []
[interfaces.extend(i.complete_contexts()) for i in six.itervalues(self.templates)]
return interfaces
|
def third_property(CASRN=None, T=False, P=False, V=False):
r'''Function for calculating a critical property of a substance from its
other two critical properties, but retrieving the actual other critical
values for convenient calculation.
Calls functions Ihmels, Meissner, and
Grigoras, each of which use a general 'Critical surface' type of equation.
Limited accuracy is expected due to very limited theoretical backing.
Parameters
----------
CASRN : string
The CAS number of the desired chemical
T : bool
Estimate critical temperature
P : bool
Estimate critical pressure
V : bool
Estimate critical volume
Returns
-------
Tc, Pc or Vc : float
Critical property of fluid [K], [Pa], or [m^3/mol]
Notes
-----
Avoids recursion only by eliminating the None and critical surface options
for calculating each critical property. So long as it never calls itself.
Note that when used by Tc, Pc or Vc, this function results in said function
calling the other functions (to determine methods) and (with method specified)
Examples
--------
>>> # Decamethyltetrasiloxane [141-62-8]
>>> third_property('141-62-8', V=True)
0.0010920041152263375
>>> # Succinic acid 110-15-6
>>> third_property('110-15-6', P=True)
6095016.233766234
'''
Third = None
if V:
Tc_methods = Tc(CASRN, AvailableMethods=True)[0:-2]
Pc_methods = Pc(CASRN, AvailableMethods=True)[0:-2]
if Tc_methods and Pc_methods:
_Tc = Tc(CASRN=CASRN, Method=Tc_methods[0])
_Pc = Pc(CASRN=CASRN, Method=Pc_methods[0])
Third = critical_surface(Tc=_Tc, Pc=_Pc, Vc=None)
elif P:
Tc_methods = Tc(CASRN, AvailableMethods=True)[0:-2]
Vc_methods = Vc(CASRN, AvailableMethods=True)[0:-2]
if Tc_methods and Vc_methods:
_Tc = Tc(CASRN=CASRN, Method=Tc_methods[0])
_Vc = Vc(CASRN=CASRN, Method=Vc_methods[0])
Third = critical_surface(Tc=_Tc, Vc=_Vc, Pc=None)
elif T:
Pc_methods = Pc(CASRN, AvailableMethods=True)[0:-2]
Vc_methods = Vc(CASRN, AvailableMethods=True)[0:-2]
if Pc_methods and Vc_methods:
_Pc = Pc(CASRN=CASRN, Method=Pc_methods[0])
_Vc = Vc(CASRN=CASRN, Method=Vc_methods[0])
Third = critical_surface(Pc=_Pc, Vc=_Vc, Tc=None)
else:
raise Exception('Error in function')
if not Third:
return None
return Third
|
def function[third_property, parameter[CASRN, T, P, V]]:
constant[Function for calculating a critical property of a substance from its
other two critical properties, but retrieving the actual other critical
values for convenient calculation.
Calls functions Ihmels, Meissner, and
Grigoras, each of which use a general 'Critical surface' type of equation.
Limited accuracy is expected due to very limited theoretical backing.
Parameters
----------
CASRN : string
The CAS number of the desired chemical
T : bool
Estimate critical temperature
P : bool
Estimate critical pressure
V : bool
Estimate critical volume
Returns
-------
Tc, Pc or Vc : float
Critical property of fluid [K], [Pa], or [m^3/mol]
Notes
-----
Avoids recursion only by eliminating the None and critical surface options
for calculating each critical property. So long as it never calls itself.
Note that when used by Tc, Pc or Vc, this function results in said function
calling the other functions (to determine methods) and (with method specified)
Examples
--------
>>> # Decamethyltetrasiloxane [141-62-8]
>>> third_property('141-62-8', V=True)
0.0010920041152263375
>>> # Succinic acid 110-15-6
>>> third_property('110-15-6', P=True)
6095016.233766234
]
variable[Third] assign[=] constant[None]
if name[V] begin[:]
variable[Tc_methods] assign[=] call[call[name[Tc], parameter[name[CASRN]]]][<ast.Slice object at 0x7da18f00d120>]
variable[Pc_methods] assign[=] call[call[name[Pc], parameter[name[CASRN]]]][<ast.Slice object at 0x7da18f00c5b0>]
if <ast.BoolOp object at 0x7da18f00c2b0> begin[:]
variable[_Tc] assign[=] call[name[Tc], parameter[]]
variable[_Pc] assign[=] call[name[Pc], parameter[]]
variable[Third] assign[=] call[name[critical_surface], parameter[]]
if <ast.UnaryOp object at 0x7da18ede7f70> begin[:]
return[constant[None]]
return[name[Third]]
|
keyword[def] identifier[third_property] ( identifier[CASRN] = keyword[None] , identifier[T] = keyword[False] , identifier[P] = keyword[False] , identifier[V] = keyword[False] ):
literal[string]
identifier[Third] = keyword[None]
keyword[if] identifier[V] :
identifier[Tc_methods] = identifier[Tc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[True] )[ literal[int] :- literal[int] ]
identifier[Pc_methods] = identifier[Pc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[True] )[ literal[int] :- literal[int] ]
keyword[if] identifier[Tc_methods] keyword[and] identifier[Pc_methods] :
identifier[_Tc] = identifier[Tc] ( identifier[CASRN] = identifier[CASRN] , identifier[Method] = identifier[Tc_methods] [ literal[int] ])
identifier[_Pc] = identifier[Pc] ( identifier[CASRN] = identifier[CASRN] , identifier[Method] = identifier[Pc_methods] [ literal[int] ])
identifier[Third] = identifier[critical_surface] ( identifier[Tc] = identifier[_Tc] , identifier[Pc] = identifier[_Pc] , identifier[Vc] = keyword[None] )
keyword[elif] identifier[P] :
identifier[Tc_methods] = identifier[Tc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[True] )[ literal[int] :- literal[int] ]
identifier[Vc_methods] = identifier[Vc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[True] )[ literal[int] :- literal[int] ]
keyword[if] identifier[Tc_methods] keyword[and] identifier[Vc_methods] :
identifier[_Tc] = identifier[Tc] ( identifier[CASRN] = identifier[CASRN] , identifier[Method] = identifier[Tc_methods] [ literal[int] ])
identifier[_Vc] = identifier[Vc] ( identifier[CASRN] = identifier[CASRN] , identifier[Method] = identifier[Vc_methods] [ literal[int] ])
identifier[Third] = identifier[critical_surface] ( identifier[Tc] = identifier[_Tc] , identifier[Vc] = identifier[_Vc] , identifier[Pc] = keyword[None] )
keyword[elif] identifier[T] :
identifier[Pc_methods] = identifier[Pc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[True] )[ literal[int] :- literal[int] ]
identifier[Vc_methods] = identifier[Vc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[True] )[ literal[int] :- literal[int] ]
keyword[if] identifier[Pc_methods] keyword[and] identifier[Vc_methods] :
identifier[_Pc] = identifier[Pc] ( identifier[CASRN] = identifier[CASRN] , identifier[Method] = identifier[Pc_methods] [ literal[int] ])
identifier[_Vc] = identifier[Vc] ( identifier[CASRN] = identifier[CASRN] , identifier[Method] = identifier[Vc_methods] [ literal[int] ])
identifier[Third] = identifier[critical_surface] ( identifier[Pc] = identifier[_Pc] , identifier[Vc] = identifier[_Vc] , identifier[Tc] = keyword[None] )
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] keyword[not] identifier[Third] :
keyword[return] keyword[None]
keyword[return] identifier[Third]
|
def third_property(CASRN=None, T=False, P=False, V=False):
"""Function for calculating a critical property of a substance from its
other two critical properties, but retrieving the actual other critical
values for convenient calculation.
Calls functions Ihmels, Meissner, and
Grigoras, each of which use a general 'Critical surface' type of equation.
Limited accuracy is expected due to very limited theoretical backing.
Parameters
----------
CASRN : string
The CAS number of the desired chemical
T : bool
Estimate critical temperature
P : bool
Estimate critical pressure
V : bool
Estimate critical volume
Returns
-------
Tc, Pc or Vc : float
Critical property of fluid [K], [Pa], or [m^3/mol]
Notes
-----
Avoids recursion only by eliminating the None and critical surface options
for calculating each critical property. So long as it never calls itself.
Note that when used by Tc, Pc or Vc, this function results in said function
calling the other functions (to determine methods) and (with method specified)
Examples
--------
>>> # Decamethyltetrasiloxane [141-62-8]
>>> third_property('141-62-8', V=True)
0.0010920041152263375
>>> # Succinic acid 110-15-6
>>> third_property('110-15-6', P=True)
6095016.233766234
"""
Third = None
if V:
Tc_methods = Tc(CASRN, AvailableMethods=True)[0:-2]
Pc_methods = Pc(CASRN, AvailableMethods=True)[0:-2]
if Tc_methods and Pc_methods:
_Tc = Tc(CASRN=CASRN, Method=Tc_methods[0])
_Pc = Pc(CASRN=CASRN, Method=Pc_methods[0])
Third = critical_surface(Tc=_Tc, Pc=_Pc, Vc=None) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif P:
Tc_methods = Tc(CASRN, AvailableMethods=True)[0:-2]
Vc_methods = Vc(CASRN, AvailableMethods=True)[0:-2]
if Tc_methods and Vc_methods:
_Tc = Tc(CASRN=CASRN, Method=Tc_methods[0])
_Vc = Vc(CASRN=CASRN, Method=Vc_methods[0])
Third = critical_surface(Tc=_Tc, Vc=_Vc, Pc=None) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif T:
Pc_methods = Pc(CASRN, AvailableMethods=True)[0:-2]
Vc_methods = Vc(CASRN, AvailableMethods=True)[0:-2]
if Pc_methods and Vc_methods:
_Pc = Pc(CASRN=CASRN, Method=Pc_methods[0])
_Vc = Vc(CASRN=CASRN, Method=Vc_methods[0])
Third = critical_surface(Pc=_Pc, Vc=_Vc, Tc=None) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise Exception('Error in function')
if not Third:
return None # depends on [control=['if'], data=[]]
return Third
|
def restore_descriptor(self, table_name, columns, constraints, autoincrement_column=None):
"""Restore descriptor from SQL
"""
# Fields
fields = []
for column in columns:
if column.name == autoincrement_column:
continue
field_type = self.restore_type(column.type)
field = {'name': column.name, 'type': field_type}
if not column.nullable:
field['constraints'] = {'required': True}
fields.append(field)
# Primary key
pk = []
for constraint in constraints:
if isinstance(constraint, sa.PrimaryKeyConstraint):
for column in constraint.columns:
if column.name == autoincrement_column:
continue
pk.append(column.name)
# Foreign keys
fks = []
if self.__dialect == 'postgresql':
for constraint in constraints:
if isinstance(constraint, sa.ForeignKeyConstraint):
resource = ''
own_fields = []
foreign_fields = []
for element in constraint.elements:
own_fields.append(element.parent.name)
if element.column.table.name != table_name:
resource = self.restore_bucket(element.column.table.name)
foreign_fields.append(element.column.name)
if len(own_fields) == len(foreign_fields) == 1:
own_fields = own_fields.pop()
foreign_fields = foreign_fields.pop()
fks.append({
'fields': own_fields,
'reference': {'resource': resource, 'fields': foreign_fields},
})
# Desscriptor
descriptor = {}
descriptor['fields'] = fields
if len(pk) > 0:
if len(pk) == 1:
pk = pk.pop()
descriptor['primaryKey'] = pk
if len(fks) > 0:
descriptor['foreignKeys'] = fks
return descriptor
|
def function[restore_descriptor, parameter[self, table_name, columns, constraints, autoincrement_column]]:
constant[Restore descriptor from SQL
]
variable[fields] assign[=] list[[]]
for taget[name[column]] in starred[name[columns]] begin[:]
if compare[name[column].name equal[==] name[autoincrement_column]] begin[:]
continue
variable[field_type] assign[=] call[name[self].restore_type, parameter[name[column].type]]
variable[field] assign[=] dictionary[[<ast.Constant object at 0x7da1b100e410>, <ast.Constant object at 0x7da1b100f4f0>], [<ast.Attribute object at 0x7da1b100ea10>, <ast.Name object at 0x7da1b100d000>]]
if <ast.UnaryOp object at 0x7da1b100dc60> begin[:]
call[name[field]][constant[constraints]] assign[=] dictionary[[<ast.Constant object at 0x7da1b100fbb0>], [<ast.Constant object at 0x7da1b100c490>]]
call[name[fields].append, parameter[name[field]]]
variable[pk] assign[=] list[[]]
for taget[name[constraint]] in starred[name[constraints]] begin[:]
if call[name[isinstance], parameter[name[constraint], name[sa].PrimaryKeyConstraint]] begin[:]
for taget[name[column]] in starred[name[constraint].columns] begin[:]
if compare[name[column].name equal[==] name[autoincrement_column]] begin[:]
continue
call[name[pk].append, parameter[name[column].name]]
variable[fks] assign[=] list[[]]
if compare[name[self].__dialect equal[==] constant[postgresql]] begin[:]
for taget[name[constraint]] in starred[name[constraints]] begin[:]
if call[name[isinstance], parameter[name[constraint], name[sa].ForeignKeyConstraint]] begin[:]
variable[resource] assign[=] constant[]
variable[own_fields] assign[=] list[[]]
variable[foreign_fields] assign[=] list[[]]
for taget[name[element]] in starred[name[constraint].elements] begin[:]
call[name[own_fields].append, parameter[name[element].parent.name]]
if compare[name[element].column.table.name not_equal[!=] name[table_name]] begin[:]
variable[resource] assign[=] call[name[self].restore_bucket, parameter[name[element].column.table.name]]
call[name[foreign_fields].append, parameter[name[element].column.name]]
if compare[call[name[len], parameter[name[own_fields]]] equal[==] call[name[len], parameter[name[foreign_fields]]]] begin[:]
variable[own_fields] assign[=] call[name[own_fields].pop, parameter[]]
variable[foreign_fields] assign[=] call[name[foreign_fields].pop, parameter[]]
call[name[fks].append, parameter[dictionary[[<ast.Constant object at 0x7da2045670a0>, <ast.Constant object at 0x7da2045653c0>], [<ast.Name object at 0x7da204566e30>, <ast.Dict object at 0x7da204565b70>]]]]
variable[descriptor] assign[=] dictionary[[], []]
call[name[descriptor]][constant[fields]] assign[=] name[fields]
if compare[call[name[len], parameter[name[pk]]] greater[>] constant[0]] begin[:]
if compare[call[name[len], parameter[name[pk]]] equal[==] constant[1]] begin[:]
variable[pk] assign[=] call[name[pk].pop, parameter[]]
call[name[descriptor]][constant[primaryKey]] assign[=] name[pk]
if compare[call[name[len], parameter[name[fks]]] greater[>] constant[0]] begin[:]
call[name[descriptor]][constant[foreignKeys]] assign[=] name[fks]
return[name[descriptor]]
|
keyword[def] identifier[restore_descriptor] ( identifier[self] , identifier[table_name] , identifier[columns] , identifier[constraints] , identifier[autoincrement_column] = keyword[None] ):
literal[string]
identifier[fields] =[]
keyword[for] identifier[column] keyword[in] identifier[columns] :
keyword[if] identifier[column] . identifier[name] == identifier[autoincrement_column] :
keyword[continue]
identifier[field_type] = identifier[self] . identifier[restore_type] ( identifier[column] . identifier[type] )
identifier[field] ={ literal[string] : identifier[column] . identifier[name] , literal[string] : identifier[field_type] }
keyword[if] keyword[not] identifier[column] . identifier[nullable] :
identifier[field] [ literal[string] ]={ literal[string] : keyword[True] }
identifier[fields] . identifier[append] ( identifier[field] )
identifier[pk] =[]
keyword[for] identifier[constraint] keyword[in] identifier[constraints] :
keyword[if] identifier[isinstance] ( identifier[constraint] , identifier[sa] . identifier[PrimaryKeyConstraint] ):
keyword[for] identifier[column] keyword[in] identifier[constraint] . identifier[columns] :
keyword[if] identifier[column] . identifier[name] == identifier[autoincrement_column] :
keyword[continue]
identifier[pk] . identifier[append] ( identifier[column] . identifier[name] )
identifier[fks] =[]
keyword[if] identifier[self] . identifier[__dialect] == literal[string] :
keyword[for] identifier[constraint] keyword[in] identifier[constraints] :
keyword[if] identifier[isinstance] ( identifier[constraint] , identifier[sa] . identifier[ForeignKeyConstraint] ):
identifier[resource] = literal[string]
identifier[own_fields] =[]
identifier[foreign_fields] =[]
keyword[for] identifier[element] keyword[in] identifier[constraint] . identifier[elements] :
identifier[own_fields] . identifier[append] ( identifier[element] . identifier[parent] . identifier[name] )
keyword[if] identifier[element] . identifier[column] . identifier[table] . identifier[name] != identifier[table_name] :
identifier[resource] = identifier[self] . identifier[restore_bucket] ( identifier[element] . identifier[column] . identifier[table] . identifier[name] )
identifier[foreign_fields] . identifier[append] ( identifier[element] . identifier[column] . identifier[name] )
keyword[if] identifier[len] ( identifier[own_fields] )== identifier[len] ( identifier[foreign_fields] )== literal[int] :
identifier[own_fields] = identifier[own_fields] . identifier[pop] ()
identifier[foreign_fields] = identifier[foreign_fields] . identifier[pop] ()
identifier[fks] . identifier[append] ({
literal[string] : identifier[own_fields] ,
literal[string] :{ literal[string] : identifier[resource] , literal[string] : identifier[foreign_fields] },
})
identifier[descriptor] ={}
identifier[descriptor] [ literal[string] ]= identifier[fields]
keyword[if] identifier[len] ( identifier[pk] )> literal[int] :
keyword[if] identifier[len] ( identifier[pk] )== literal[int] :
identifier[pk] = identifier[pk] . identifier[pop] ()
identifier[descriptor] [ literal[string] ]= identifier[pk]
keyword[if] identifier[len] ( identifier[fks] )> literal[int] :
identifier[descriptor] [ literal[string] ]= identifier[fks]
keyword[return] identifier[descriptor]
|
def restore_descriptor(self, table_name, columns, constraints, autoincrement_column=None):
"""Restore descriptor from SQL
"""
# Fields
fields = []
for column in columns:
if column.name == autoincrement_column:
continue # depends on [control=['if'], data=[]]
field_type = self.restore_type(column.type)
field = {'name': column.name, 'type': field_type}
if not column.nullable:
field['constraints'] = {'required': True} # depends on [control=['if'], data=[]]
fields.append(field) # depends on [control=['for'], data=['column']]
# Primary key
pk = []
for constraint in constraints:
if isinstance(constraint, sa.PrimaryKeyConstraint):
for column in constraint.columns:
if column.name == autoincrement_column:
continue # depends on [control=['if'], data=[]]
pk.append(column.name) # depends on [control=['for'], data=['column']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['constraint']]
# Foreign keys
fks = []
if self.__dialect == 'postgresql':
for constraint in constraints:
if isinstance(constraint, sa.ForeignKeyConstraint):
resource = ''
own_fields = []
foreign_fields = []
for element in constraint.elements:
own_fields.append(element.parent.name)
if element.column.table.name != table_name:
resource = self.restore_bucket(element.column.table.name) # depends on [control=['if'], data=[]]
foreign_fields.append(element.column.name) # depends on [control=['for'], data=['element']]
if len(own_fields) == len(foreign_fields) == 1:
own_fields = own_fields.pop()
foreign_fields = foreign_fields.pop() # depends on [control=['if'], data=[]]
fks.append({'fields': own_fields, 'reference': {'resource': resource, 'fields': foreign_fields}}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['constraint']] # depends on [control=['if'], data=[]]
# Desscriptor
descriptor = {}
descriptor['fields'] = fields
if len(pk) > 0:
if len(pk) == 1:
pk = pk.pop() # depends on [control=['if'], data=[]]
descriptor['primaryKey'] = pk # depends on [control=['if'], data=[]]
if len(fks) > 0:
descriptor['foreignKeys'] = fks # depends on [control=['if'], data=[]]
return descriptor
|
def _read_mac_addr(self):
"""Read MAC address."""
_byte = self._read_fileng(6)
_addr = '-'.join(textwrap.wrap(_byte.hex(), 2))
return _addr
|
def function[_read_mac_addr, parameter[self]]:
constant[Read MAC address.]
variable[_byte] assign[=] call[name[self]._read_fileng, parameter[constant[6]]]
variable[_addr] assign[=] call[constant[-].join, parameter[call[name[textwrap].wrap, parameter[call[name[_byte].hex, parameter[]], constant[2]]]]]
return[name[_addr]]
|
keyword[def] identifier[_read_mac_addr] ( identifier[self] ):
literal[string]
identifier[_byte] = identifier[self] . identifier[_read_fileng] ( literal[int] )
identifier[_addr] = literal[string] . identifier[join] ( identifier[textwrap] . identifier[wrap] ( identifier[_byte] . identifier[hex] (), literal[int] ))
keyword[return] identifier[_addr]
|
def _read_mac_addr(self):
"""Read MAC address."""
_byte = self._read_fileng(6)
_addr = '-'.join(textwrap.wrap(_byte.hex(), 2))
return _addr
|
def _spot_check_that_elements_produced_by_this_generator_have_attribute(self, name):
"""
Helper function to spot-check that the items produces by this generator have the attribute `name`.
"""
g_tmp = self.values_gen.spawn()
sample_element = next(g_tmp)[0]
try:
getattr(sample_element, name)
except AttributeError:
raise AttributeError(f"Items produced by {self} do not have the attribute '{name}'")
|
def function[_spot_check_that_elements_produced_by_this_generator_have_attribute, parameter[self, name]]:
constant[
Helper function to spot-check that the items produces by this generator have the attribute `name`.
]
variable[g_tmp] assign[=] call[name[self].values_gen.spawn, parameter[]]
variable[sample_element] assign[=] call[call[name[next], parameter[name[g_tmp]]]][constant[0]]
<ast.Try object at 0x7da1b10c3520>
|
keyword[def] identifier[_spot_check_that_elements_produced_by_this_generator_have_attribute] ( identifier[self] , identifier[name] ):
literal[string]
identifier[g_tmp] = identifier[self] . identifier[values_gen] . identifier[spawn] ()
identifier[sample_element] = identifier[next] ( identifier[g_tmp] )[ literal[int] ]
keyword[try] :
identifier[getattr] ( identifier[sample_element] , identifier[name] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[AttributeError] ( literal[string] )
|
def _spot_check_that_elements_produced_by_this_generator_have_attribute(self, name):
"""
Helper function to spot-check that the items produces by this generator have the attribute `name`.
"""
g_tmp = self.values_gen.spawn()
sample_element = next(g_tmp)[0]
try:
getattr(sample_element, name) # depends on [control=['try'], data=[]]
except AttributeError:
raise AttributeError(f"Items produced by {self} do not have the attribute '{name}'") # depends on [control=['except'], data=[]]
|
def attendee_data(request, form, user_id=None):
''' Lists attendees for a given product/category selection along with
profile data.'''
status_display = {
commerce.Cart.STATUS_ACTIVE: "Unpaid",
commerce.Cart.STATUS_PAID: "Paid",
commerce.Cart.STATUS_RELEASED: "Refunded",
}
output = []
by_category = (
form.cleaned_data["group_by"] == forms.GroupByForm.GROUP_BY_CATEGORY)
products = form.cleaned_data["product"]
categories = form.cleaned_data["category"]
fields = form.cleaned_data["fields"]
name_field = AttendeeProfile.name_field()
items = commerce.ProductItem.objects.filter(
Q(product__in=products) | Q(product__category__in=categories),
).exclude(
cart__status=commerce.Cart.STATUS_RELEASED
).select_related(
"cart", "cart__user", "product", "product__category",
).order_by("cart__status")
# Add invoice nag link
links = []
invoice_mailout = reverse(views.invoice_mailout, args=[])
invoice_mailout += "?" + request.META["QUERY_STRING"]
links += [
(invoice_mailout + "&status=1", "Send invoice reminders",),
(invoice_mailout + "&status=2", "Send mail for paid invoices",),
]
if items.count() > 0:
output.append(Links("Actions", links))
# Make sure we select all of the related fields
related_fields = set(
field for field in fields
if isinstance(AttendeeProfile._meta.get_field(field), RelatedField)
)
# Get all of the relevant attendee profiles in one hit.
profiles = AttendeeProfile.objects.filter(
attendee__user__cart__productitem__in=items
).select_related("attendee__user").prefetch_related(*related_fields)
by_user = {}
for profile in profiles:
by_user[profile.attendee.user] = profile
cart = "attendee__user__cart"
cart_status = cart + "__status" # noqa
product = cart + "__productitem__product"
product_name = product + "__name"
category = product + "__category"
category_name = category + "__name"
if by_category:
grouping_fields = (category, category_name)
order_by = (category, )
first_column = "Category"
group_name = lambda i: "%s" % (i[category_name], ) # noqa
else:
grouping_fields = (product, product_name, category_name)
order_by = (category, )
first_column = "Product"
group_name = lambda i: "%s - %s" % (i[category_name], i[product_name]) # noqa
# Group the responses per-field.
for field in fields:
concrete_field = AttendeeProfile._meta.get_field(field)
field_verbose = concrete_field.verbose_name
# Render the correct values for related fields
if field in related_fields:
# Get all of the IDs that will appear
all_ids = profiles.order_by(field).values(field)
all_ids = [i[field] for i in all_ids if i[field] is not None]
# Get all of the concrete objects for those IDs
model = concrete_field.related_model
all_objects = model.objects.filter(id__in=all_ids)
all_objects_by_id = dict((i.id, i) for i in all_objects)
# Define a function to render those IDs.
def display_field(value):
if value in all_objects_by_id:
return all_objects_by_id[value]
else:
return None
else:
def display_field(value):
return value
status_count = lambda status: Case(When( # noqa
attendee__user__cart__status=status,
then=Value(1),
),
default=Value(0),
output_field=models.fields.IntegerField(),
)
paid_count = status_count(commerce.Cart.STATUS_PAID)
unpaid_count = status_count(commerce.Cart.STATUS_ACTIVE)
groups = profiles.order_by(
*(order_by + (field, ))
).values(
*(grouping_fields + (field, ))
).annotate(
paid_count=Sum(paid_count),
unpaid_count=Sum(unpaid_count),
)
output.append(ListReport(
"Grouped by %s" % field_verbose,
[first_column, field_verbose, "paid", "unpaid"],
[
(
group_name(group),
display_field(group[field]),
group["paid_count"] or 0,
group["unpaid_count"] or 0,
)
for group in groups
],
))
# DO the report for individual attendees
field_names = [
AttendeeProfile._meta.get_field(field).verbose_name for field in fields
]
def display_field(profile, field):
field_type = AttendeeProfile._meta.get_field(field)
attr = getattr(profile, field)
if isinstance(field_type, models.ManyToManyField):
return [str(i) for i in attr.all()] or ""
else:
return attr
headings = ["User ID", "Name", "Email", "Product", "Item Status"]
headings.extend(field_names)
data = []
for item in items:
profile = by_user[item.cart.user]
line = [
item.cart.user.id,
getattr(profile, name_field),
profile.attendee.user.email,
item.product,
status_display[item.cart.status],
] + [
display_field(profile, field) for field in fields
]
data.append(line)
output.append(AttendeeListReport(
"Attendees by item with profile data", headings, data,
link_view=attendee
))
return output
|
def function[attendee_data, parameter[request, form, user_id]]:
constant[ Lists attendees for a given product/category selection along with
profile data.]
variable[status_display] assign[=] dictionary[[<ast.Attribute object at 0x7da1b26afb20>, <ast.Attribute object at 0x7da1b26afbb0>, <ast.Attribute object at 0x7da1b26af400>], [<ast.Constant object at 0x7da1b26aec20>, <ast.Constant object at 0x7da1b26ad1e0>, <ast.Constant object at 0x7da1b26acf40>]]
variable[output] assign[=] list[[]]
variable[by_category] assign[=] compare[call[name[form].cleaned_data][constant[group_by]] equal[==] name[forms].GroupByForm.GROUP_BY_CATEGORY]
variable[products] assign[=] call[name[form].cleaned_data][constant[product]]
variable[categories] assign[=] call[name[form].cleaned_data][constant[category]]
variable[fields] assign[=] call[name[form].cleaned_data][constant[fields]]
variable[name_field] assign[=] call[name[AttendeeProfile].name_field, parameter[]]
variable[items] assign[=] call[call[call[call[name[commerce].ProductItem.objects.filter, parameter[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]]]].exclude, parameter[]].select_related, parameter[constant[cart], constant[cart__user], constant[product], constant[product__category]]].order_by, parameter[constant[cart__status]]]
variable[links] assign[=] list[[]]
variable[invoice_mailout] assign[=] call[name[reverse], parameter[name[views].invoice_mailout]]
<ast.AugAssign object at 0x7da1b26ad660>
<ast.AugAssign object at 0x7da1b26af4f0>
if compare[call[name[items].count, parameter[]] greater[>] constant[0]] begin[:]
call[name[output].append, parameter[call[name[Links], parameter[constant[Actions], name[links]]]]]
variable[related_fields] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b26ad600>]]
variable[profiles] assign[=] call[call[call[name[AttendeeProfile].objects.filter, parameter[]].select_related, parameter[constant[attendee__user]]].prefetch_related, parameter[<ast.Starred object at 0x7da1b26af9a0>]]
variable[by_user] assign[=] dictionary[[], []]
for taget[name[profile]] in starred[name[profiles]] begin[:]
call[name[by_user]][name[profile].attendee.user] assign[=] name[profile]
variable[cart] assign[=] constant[attendee__user__cart]
variable[cart_status] assign[=] binary_operation[name[cart] + constant[__status]]
variable[product] assign[=] binary_operation[name[cart] + constant[__productitem__product]]
variable[product_name] assign[=] binary_operation[name[product] + constant[__name]]
variable[category] assign[=] binary_operation[name[product] + constant[__category]]
variable[category_name] assign[=] binary_operation[name[category] + constant[__name]]
if name[by_category] begin[:]
variable[grouping_fields] assign[=] tuple[[<ast.Name object at 0x7da1b26adf30>, <ast.Name object at 0x7da1b26adb40>]]
variable[order_by] assign[=] tuple[[<ast.Name object at 0x7da1b26ac4c0>]]
variable[first_column] assign[=] constant[Category]
variable[group_name] assign[=] <ast.Lambda object at 0x7da1b26aeb00>
for taget[name[field]] in starred[name[fields]] begin[:]
variable[concrete_field] assign[=] call[name[AttendeeProfile]._meta.get_field, parameter[name[field]]]
variable[field_verbose] assign[=] name[concrete_field].verbose_name
if compare[name[field] in name[related_fields]] begin[:]
variable[all_ids] assign[=] call[call[name[profiles].order_by, parameter[name[field]]].values, parameter[name[field]]]
variable[all_ids] assign[=] <ast.ListComp object at 0x7da1b26acee0>
variable[model] assign[=] name[concrete_field].related_model
variable[all_objects] assign[=] call[name[model].objects.filter, parameter[]]
variable[all_objects_by_id] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b26ae620>]]
def function[display_field, parameter[value]]:
if compare[name[value] in name[all_objects_by_id]] begin[:]
return[call[name[all_objects_by_id]][name[value]]]
variable[status_count] assign[=] <ast.Lambda object at 0x7da1b01b81c0>
variable[paid_count] assign[=] call[name[status_count], parameter[name[commerce].Cart.STATUS_PAID]]
variable[unpaid_count] assign[=] call[name[status_count], parameter[name[commerce].Cart.STATUS_ACTIVE]]
variable[groups] assign[=] call[call[call[name[profiles].order_by, parameter[<ast.Starred object at 0x7da1b01ba500>]].values, parameter[<ast.Starred object at 0x7da1b01b8940>]].annotate, parameter[]]
call[name[output].append, parameter[call[name[ListReport], parameter[binary_operation[constant[Grouped by %s] <ast.Mod object at 0x7da2590d6920> name[field_verbose]], list[[<ast.Name object at 0x7da1b01b9e70>, <ast.Name object at 0x7da1b01b8070>, <ast.Constant object at 0x7da1b01b98a0>, <ast.Constant object at 0x7da1b01ba8f0>]], <ast.ListComp object at 0x7da1b01bb730>]]]]
variable[field_names] assign[=] <ast.ListComp object at 0x7da20c794cd0>
def function[display_field, parameter[profile, field]]:
variable[field_type] assign[=] call[name[AttendeeProfile]._meta.get_field, parameter[name[field]]]
variable[attr] assign[=] call[name[getattr], parameter[name[profile], name[field]]]
if call[name[isinstance], parameter[name[field_type], name[models].ManyToManyField]] begin[:]
return[<ast.BoolOp object at 0x7da2044c3fa0>]
variable[headings] assign[=] list[[<ast.Constant object at 0x7da2044c3b50>, <ast.Constant object at 0x7da2044c1780>, <ast.Constant object at 0x7da2044c0550>, <ast.Constant object at 0x7da2044c2c80>, <ast.Constant object at 0x7da2044c1390>]]
call[name[headings].extend, parameter[name[field_names]]]
variable[data] assign[=] list[[]]
for taget[name[item]] in starred[name[items]] begin[:]
variable[profile] assign[=] call[name[by_user]][name[item].cart.user]
variable[line] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da2044c0370>, <ast.Call object at 0x7da2044c1810>, <ast.Attribute object at 0x7da2044c2950>, <ast.Attribute object at 0x7da2044c38e0>, <ast.Subscript object at 0x7da2044c24d0>]] + <ast.ListComp object at 0x7da2044c1480>]
call[name[data].append, parameter[name[line]]]
call[name[output].append, parameter[call[name[AttendeeListReport], parameter[constant[Attendees by item with profile data], name[headings], name[data]]]]]
return[name[output]]
|
keyword[def] identifier[attendee_data] ( identifier[request] , identifier[form] , identifier[user_id] = keyword[None] ):
literal[string]
identifier[status_display] ={
identifier[commerce] . identifier[Cart] . identifier[STATUS_ACTIVE] : literal[string] ,
identifier[commerce] . identifier[Cart] . identifier[STATUS_PAID] : literal[string] ,
identifier[commerce] . identifier[Cart] . identifier[STATUS_RELEASED] : literal[string] ,
}
identifier[output] =[]
identifier[by_category] =(
identifier[form] . identifier[cleaned_data] [ literal[string] ]== identifier[forms] . identifier[GroupByForm] . identifier[GROUP_BY_CATEGORY] )
identifier[products] = identifier[form] . identifier[cleaned_data] [ literal[string] ]
identifier[categories] = identifier[form] . identifier[cleaned_data] [ literal[string] ]
identifier[fields] = identifier[form] . identifier[cleaned_data] [ literal[string] ]
identifier[name_field] = identifier[AttendeeProfile] . identifier[name_field] ()
identifier[items] = identifier[commerce] . identifier[ProductItem] . identifier[objects] . identifier[filter] (
identifier[Q] ( identifier[product__in] = identifier[products] )| identifier[Q] ( identifier[product__category__in] = identifier[categories] ),
). identifier[exclude] (
identifier[cart__status] = identifier[commerce] . identifier[Cart] . identifier[STATUS_RELEASED]
). identifier[select_related] (
literal[string] , literal[string] , literal[string] , literal[string] ,
). identifier[order_by] ( literal[string] )
identifier[links] =[]
identifier[invoice_mailout] = identifier[reverse] ( identifier[views] . identifier[invoice_mailout] , identifier[args] =[])
identifier[invoice_mailout] += literal[string] + identifier[request] . identifier[META] [ literal[string] ]
identifier[links] +=[
( identifier[invoice_mailout] + literal[string] , literal[string] ,),
( identifier[invoice_mailout] + literal[string] , literal[string] ,),
]
keyword[if] identifier[items] . identifier[count] ()> literal[int] :
identifier[output] . identifier[append] ( identifier[Links] ( literal[string] , identifier[links] ))
identifier[related_fields] = identifier[set] (
identifier[field] keyword[for] identifier[field] keyword[in] identifier[fields]
keyword[if] identifier[isinstance] ( identifier[AttendeeProfile] . identifier[_meta] . identifier[get_field] ( identifier[field] ), identifier[RelatedField] )
)
identifier[profiles] = identifier[AttendeeProfile] . identifier[objects] . identifier[filter] (
identifier[attendee__user__cart__productitem__in] = identifier[items]
). identifier[select_related] ( literal[string] ). identifier[prefetch_related] (* identifier[related_fields] )
identifier[by_user] ={}
keyword[for] identifier[profile] keyword[in] identifier[profiles] :
identifier[by_user] [ identifier[profile] . identifier[attendee] . identifier[user] ]= identifier[profile]
identifier[cart] = literal[string]
identifier[cart_status] = identifier[cart] + literal[string]
identifier[product] = identifier[cart] + literal[string]
identifier[product_name] = identifier[product] + literal[string]
identifier[category] = identifier[product] + literal[string]
identifier[category_name] = identifier[category] + literal[string]
keyword[if] identifier[by_category] :
identifier[grouping_fields] =( identifier[category] , identifier[category_name] )
identifier[order_by] =( identifier[category] ,)
identifier[first_column] = literal[string]
identifier[group_name] = keyword[lambda] identifier[i] : literal[string] %( identifier[i] [ identifier[category_name] ],)
keyword[else] :
identifier[grouping_fields] =( identifier[product] , identifier[product_name] , identifier[category_name] )
identifier[order_by] =( identifier[category] ,)
identifier[first_column] = literal[string]
identifier[group_name] = keyword[lambda] identifier[i] : literal[string] %( identifier[i] [ identifier[category_name] ], identifier[i] [ identifier[product_name] ])
keyword[for] identifier[field] keyword[in] identifier[fields] :
identifier[concrete_field] = identifier[AttendeeProfile] . identifier[_meta] . identifier[get_field] ( identifier[field] )
identifier[field_verbose] = identifier[concrete_field] . identifier[verbose_name]
keyword[if] identifier[field] keyword[in] identifier[related_fields] :
identifier[all_ids] = identifier[profiles] . identifier[order_by] ( identifier[field] ). identifier[values] ( identifier[field] )
identifier[all_ids] =[ identifier[i] [ identifier[field] ] keyword[for] identifier[i] keyword[in] identifier[all_ids] keyword[if] identifier[i] [ identifier[field] ] keyword[is] keyword[not] keyword[None] ]
identifier[model] = identifier[concrete_field] . identifier[related_model]
identifier[all_objects] = identifier[model] . identifier[objects] . identifier[filter] ( identifier[id__in] = identifier[all_ids] )
identifier[all_objects_by_id] = identifier[dict] (( identifier[i] . identifier[id] , identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[all_objects] )
keyword[def] identifier[display_field] ( identifier[value] ):
keyword[if] identifier[value] keyword[in] identifier[all_objects_by_id] :
keyword[return] identifier[all_objects_by_id] [ identifier[value] ]
keyword[else] :
keyword[return] keyword[None]
keyword[else] :
keyword[def] identifier[display_field] ( identifier[value] ):
keyword[return] identifier[value]
identifier[status_count] = keyword[lambda] identifier[status] : identifier[Case] ( identifier[When] (
identifier[attendee__user__cart__status] = identifier[status] ,
identifier[then] = identifier[Value] ( literal[int] ),
),
identifier[default] = identifier[Value] ( literal[int] ),
identifier[output_field] = identifier[models] . identifier[fields] . identifier[IntegerField] (),
)
identifier[paid_count] = identifier[status_count] ( identifier[commerce] . identifier[Cart] . identifier[STATUS_PAID] )
identifier[unpaid_count] = identifier[status_count] ( identifier[commerce] . identifier[Cart] . identifier[STATUS_ACTIVE] )
identifier[groups] = identifier[profiles] . identifier[order_by] (
*( identifier[order_by] +( identifier[field] ,))
). identifier[values] (
*( identifier[grouping_fields] +( identifier[field] ,))
). identifier[annotate] (
identifier[paid_count] = identifier[Sum] ( identifier[paid_count] ),
identifier[unpaid_count] = identifier[Sum] ( identifier[unpaid_count] ),
)
identifier[output] . identifier[append] ( identifier[ListReport] (
literal[string] % identifier[field_verbose] ,
[ identifier[first_column] , identifier[field_verbose] , literal[string] , literal[string] ],
[
(
identifier[group_name] ( identifier[group] ),
identifier[display_field] ( identifier[group] [ identifier[field] ]),
identifier[group] [ literal[string] ] keyword[or] literal[int] ,
identifier[group] [ literal[string] ] keyword[or] literal[int] ,
)
keyword[for] identifier[group] keyword[in] identifier[groups]
],
))
identifier[field_names] =[
identifier[AttendeeProfile] . identifier[_meta] . identifier[get_field] ( identifier[field] ). identifier[verbose_name] keyword[for] identifier[field] keyword[in] identifier[fields]
]
keyword[def] identifier[display_field] ( identifier[profile] , identifier[field] ):
identifier[field_type] = identifier[AttendeeProfile] . identifier[_meta] . identifier[get_field] ( identifier[field] )
identifier[attr] = identifier[getattr] ( identifier[profile] , identifier[field] )
keyword[if] identifier[isinstance] ( identifier[field_type] , identifier[models] . identifier[ManyToManyField] ):
keyword[return] [ identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[attr] . identifier[all] ()] keyword[or] literal[string]
keyword[else] :
keyword[return] identifier[attr]
identifier[headings] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[headings] . identifier[extend] ( identifier[field_names] )
identifier[data] =[]
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[profile] = identifier[by_user] [ identifier[item] . identifier[cart] . identifier[user] ]
identifier[line] =[
identifier[item] . identifier[cart] . identifier[user] . identifier[id] ,
identifier[getattr] ( identifier[profile] , identifier[name_field] ),
identifier[profile] . identifier[attendee] . identifier[user] . identifier[email] ,
identifier[item] . identifier[product] ,
identifier[status_display] [ identifier[item] . identifier[cart] . identifier[status] ],
]+[
identifier[display_field] ( identifier[profile] , identifier[field] ) keyword[for] identifier[field] keyword[in] identifier[fields]
]
identifier[data] . identifier[append] ( identifier[line] )
identifier[output] . identifier[append] ( identifier[AttendeeListReport] (
literal[string] , identifier[headings] , identifier[data] ,
identifier[link_view] = identifier[attendee]
))
keyword[return] identifier[output]
|
def attendee_data(request, form, user_id=None):
""" Lists attendees for a given product/category selection along with
profile data."""
status_display = {commerce.Cart.STATUS_ACTIVE: 'Unpaid', commerce.Cart.STATUS_PAID: 'Paid', commerce.Cart.STATUS_RELEASED: 'Refunded'}
output = []
by_category = form.cleaned_data['group_by'] == forms.GroupByForm.GROUP_BY_CATEGORY
products = form.cleaned_data['product']
categories = form.cleaned_data['category']
fields = form.cleaned_data['fields']
name_field = AttendeeProfile.name_field()
items = commerce.ProductItem.objects.filter(Q(product__in=products) | Q(product__category__in=categories)).exclude(cart__status=commerce.Cart.STATUS_RELEASED).select_related('cart', 'cart__user', 'product', 'product__category').order_by('cart__status')
# Add invoice nag link
links = []
invoice_mailout = reverse(views.invoice_mailout, args=[])
invoice_mailout += '?' + request.META['QUERY_STRING']
links += [(invoice_mailout + '&status=1', 'Send invoice reminders'), (invoice_mailout + '&status=2', 'Send mail for paid invoices')]
if items.count() > 0:
output.append(Links('Actions', links)) # depends on [control=['if'], data=[]]
# Make sure we select all of the related fields
related_fields = set((field for field in fields if isinstance(AttendeeProfile._meta.get_field(field), RelatedField)))
# Get all of the relevant attendee profiles in one hit.
profiles = AttendeeProfile.objects.filter(attendee__user__cart__productitem__in=items).select_related('attendee__user').prefetch_related(*related_fields)
by_user = {}
for profile in profiles:
by_user[profile.attendee.user] = profile # depends on [control=['for'], data=['profile']]
cart = 'attendee__user__cart'
cart_status = cart + '__status' # noqa
product = cart + '__productitem__product'
product_name = product + '__name'
category = product + '__category'
category_name = category + '__name'
if by_category:
grouping_fields = (category, category_name)
order_by = (category,)
first_column = 'Category'
group_name = lambda i: '%s' % (i[category_name],) # noqa # depends on [control=['if'], data=[]]
else:
grouping_fields = (product, product_name, category_name)
order_by = (category,)
first_column = 'Product'
group_name = lambda i: '%s - %s' % (i[category_name], i[product_name]) # noqa
# Group the responses per-field.
for field in fields:
concrete_field = AttendeeProfile._meta.get_field(field)
field_verbose = concrete_field.verbose_name
# Render the correct values for related fields
if field in related_fields:
# Get all of the IDs that will appear
all_ids = profiles.order_by(field).values(field)
all_ids = [i[field] for i in all_ids if i[field] is not None]
# Get all of the concrete objects for those IDs
model = concrete_field.related_model
all_objects = model.objects.filter(id__in=all_ids)
all_objects_by_id = dict(((i.id, i) for i in all_objects))
# Define a function to render those IDs.
def display_field(value):
if value in all_objects_by_id:
return all_objects_by_id[value] # depends on [control=['if'], data=['value', 'all_objects_by_id']]
else:
return None # depends on [control=['if'], data=['field']]
else:
def display_field(value):
return value # noqa
status_count = lambda status: Case(When(attendee__user__cart__status=status, then=Value(1)), default=Value(0), output_field=models.fields.IntegerField())
paid_count = status_count(commerce.Cart.STATUS_PAID)
unpaid_count = status_count(commerce.Cart.STATUS_ACTIVE)
groups = profiles.order_by(*order_by + (field,)).values(*grouping_fields + (field,)).annotate(paid_count=Sum(paid_count), unpaid_count=Sum(unpaid_count))
output.append(ListReport('Grouped by %s' % field_verbose, [first_column, field_verbose, 'paid', 'unpaid'], [(group_name(group), display_field(group[field]), group['paid_count'] or 0, group['unpaid_count'] or 0) for group in groups])) # depends on [control=['for'], data=['field']]
# DO the report for individual attendees
field_names = [AttendeeProfile._meta.get_field(field).verbose_name for field in fields]
def display_field(profile, field):
field_type = AttendeeProfile._meta.get_field(field)
attr = getattr(profile, field)
if isinstance(field_type, models.ManyToManyField):
return [str(i) for i in attr.all()] or '' # depends on [control=['if'], data=[]]
else:
return attr
headings = ['User ID', 'Name', 'Email', 'Product', 'Item Status']
headings.extend(field_names)
data = []
for item in items:
profile = by_user[item.cart.user]
line = [item.cart.user.id, getattr(profile, name_field), profile.attendee.user.email, item.product, status_display[item.cart.status]] + [display_field(profile, field) for field in fields]
data.append(line) # depends on [control=['for'], data=['item']]
output.append(AttendeeListReport('Attendees by item with profile data', headings, data, link_view=attendee))
return output
|
def replace_widgets(self, widgets, team_context, dashboard_id, eTag=None):
"""ReplaceWidgets.
[Preview API] Replace the widgets on specified dashboard with the supplied widgets.
:param [Widget] widgets: Revised state of widgets to store for the dashboard.
:param :class:`<TeamContext> <azure.devops.v5_0.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the Dashboard to modify.
:param String eTag: Dashboard Widgets Version
:rtype: :class:`<WidgetsVersionedList> <azure.devops.v5_0.dashboard.models.WidgetsVersionedList>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str')
content = self._serialize.body(widgets, '[Widget]')
response = self._send(http_method='PUT',
location_id='bdcff53a-8355-4172-a00a-40497ea23afc',
version='5.0-preview.2',
route_values=route_values,
content=content)
response_object = models.WidgetsVersionedList()
response_object.widgets = self._deserialize('[Widget]', self._unwrap_collection(response))
response_object.eTag = response.headers.get('ETag')
return response_object
|
def function[replace_widgets, parameter[self, widgets, team_context, dashboard_id, eTag]]:
constant[ReplaceWidgets.
[Preview API] Replace the widgets on specified dashboard with the supplied widgets.
:param [Widget] widgets: Revised state of widgets to store for the dashboard.
:param :class:`<TeamContext> <azure.devops.v5_0.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the Dashboard to modify.
:param String eTag: Dashboard Widgets Version
:rtype: :class:`<WidgetsVersionedList> <azure.devops.v5_0.dashboard.models.WidgetsVersionedList>`
]
variable[project] assign[=] constant[None]
variable[team] assign[=] constant[None]
if compare[name[team_context] is_not constant[None]] begin[:]
if name[team_context].project_id begin[:]
variable[project] assign[=] name[team_context].project_id
if name[team_context].team_id begin[:]
variable[team] assign[=] name[team_context].team_id
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[string]]]
if compare[name[team] is_not constant[None]] begin[:]
call[name[route_values]][constant[team]] assign[=] call[name[self]._serialize.url, parameter[constant[team], name[team], constant[string]]]
if compare[name[dashboard_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[dashboardId]] assign[=] call[name[self]._serialize.url, parameter[constant[dashboard_id], name[dashboard_id], constant[str]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[widgets], constant[[Widget]]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
variable[response_object] assign[=] call[name[models].WidgetsVersionedList, parameter[]]
name[response_object].widgets assign[=] call[name[self]._deserialize, parameter[constant[[Widget]], call[name[self]._unwrap_collection, parameter[name[response]]]]]
name[response_object].eTag assign[=] call[name[response].headers.get, parameter[constant[ETag]]]
return[name[response_object]]
|
keyword[def] identifier[replace_widgets] ( identifier[self] , identifier[widgets] , identifier[team_context] , identifier[dashboard_id] , identifier[eTag] = keyword[None] ):
literal[string]
identifier[project] = keyword[None]
identifier[team] = keyword[None]
keyword[if] identifier[team_context] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[team_context] . identifier[project_id] :
identifier[project] = identifier[team_context] . identifier[project_id]
keyword[else] :
identifier[project] = identifier[team_context] . identifier[project]
keyword[if] identifier[team_context] . identifier[team_id] :
identifier[team] = identifier[team_context] . identifier[team_id]
keyword[else] :
identifier[team] = identifier[team_context] . identifier[team]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
keyword[if] identifier[team] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[team] , literal[string] )
keyword[if] identifier[dashboard_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[dashboard_id] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[widgets] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[content] = identifier[content] )
identifier[response_object] = identifier[models] . identifier[WidgetsVersionedList] ()
identifier[response_object] . identifier[widgets] = identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] ))
identifier[response_object] . identifier[eTag] = identifier[response] . identifier[headers] . identifier[get] ( literal[string] )
keyword[return] identifier[response_object]
|
def replace_widgets(self, widgets, team_context, dashboard_id, eTag=None):
"""ReplaceWidgets.
[Preview API] Replace the widgets on specified dashboard with the supplied widgets.
:param [Widget] widgets: Revised state of widgets to store for the dashboard.
:param :class:`<TeamContext> <azure.devops.v5_0.dashboard.models.TeamContext>` team_context: The team context for the operation
:param str dashboard_id: ID of the Dashboard to modify.
:param String eTag: Dashboard Widgets Version
:rtype: :class:`<WidgetsVersionedList> <azure.devops.v5_0.dashboard.models.WidgetsVersionedList>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id # depends on [control=['if'], data=[]]
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id # depends on [control=['if'], data=[]]
else:
team = team_context.team # depends on [control=['if'], data=['team_context']]
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string') # depends on [control=['if'], data=['project']]
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string') # depends on [control=['if'], data=['team']]
if dashboard_id is not None:
route_values['dashboardId'] = self._serialize.url('dashboard_id', dashboard_id, 'str') # depends on [control=['if'], data=['dashboard_id']]
content = self._serialize.body(widgets, '[Widget]')
response = self._send(http_method='PUT', location_id='bdcff53a-8355-4172-a00a-40497ea23afc', version='5.0-preview.2', route_values=route_values, content=content)
response_object = models.WidgetsVersionedList()
response_object.widgets = self._deserialize('[Widget]', self._unwrap_collection(response))
response_object.eTag = response.headers.get('ETag')
return response_object
|
def _get_related_fields(self, model):
"Returns the names of all related fields for model class."
reverse_fk = self._get_all_related_objects(model)
reverse_m2m = self._get_all_related_many_to_many_objects(model)
fields = tuple(reverse_fk + reverse_m2m)
names = tuple([x.get_accessor_name() for x in fields])
return {
':related': dict(list(zip(names, fields))),
}
|
def function[_get_related_fields, parameter[self, model]]:
constant[Returns the names of all related fields for model class.]
variable[reverse_fk] assign[=] call[name[self]._get_all_related_objects, parameter[name[model]]]
variable[reverse_m2m] assign[=] call[name[self]._get_all_related_many_to_many_objects, parameter[name[model]]]
variable[fields] assign[=] call[name[tuple], parameter[binary_operation[name[reverse_fk] + name[reverse_m2m]]]]
variable[names] assign[=] call[name[tuple], parameter[<ast.ListComp object at 0x7da1b25895a0>]]
return[dictionary[[<ast.Constant object at 0x7da1b258a1d0>], [<ast.Call object at 0x7da1b2589ae0>]]]
|
keyword[def] identifier[_get_related_fields] ( identifier[self] , identifier[model] ):
literal[string]
identifier[reverse_fk] = identifier[self] . identifier[_get_all_related_objects] ( identifier[model] )
identifier[reverse_m2m] = identifier[self] . identifier[_get_all_related_many_to_many_objects] ( identifier[model] )
identifier[fields] = identifier[tuple] ( identifier[reverse_fk] + identifier[reverse_m2m] )
identifier[names] = identifier[tuple] ([ identifier[x] . identifier[get_accessor_name] () keyword[for] identifier[x] keyword[in] identifier[fields] ])
keyword[return] {
literal[string] : identifier[dict] ( identifier[list] ( identifier[zip] ( identifier[names] , identifier[fields] ))),
}
|
def _get_related_fields(self, model):
"""Returns the names of all related fields for model class."""
reverse_fk = self._get_all_related_objects(model)
reverse_m2m = self._get_all_related_many_to_many_objects(model)
fields = tuple(reverse_fk + reverse_m2m)
names = tuple([x.get_accessor_name() for x in fields])
return {':related': dict(list(zip(names, fields)))}
|
def remove_highdepth_regions(in_file, items):
"""Remove high depth regions from a BED file for analyzing a set of calls.
Tries to avoid spurious errors and slow run times in collapsed repeat regions.
Also adds ENCODE blacklist regions which capture additional collapsed repeats
around centromeres.
"""
encode_bed = tz.get_in(["genome_resources", "variation", "encode_blacklist"], items[0])
if encode_bed and os.path.exists(encode_bed):
return _remove_regions(in_file, [encode_bed], "glimit", items[0])
else:
return in_file
|
def function[remove_highdepth_regions, parameter[in_file, items]]:
constant[Remove high depth regions from a BED file for analyzing a set of calls.
Tries to avoid spurious errors and slow run times in collapsed repeat regions.
Also adds ENCODE blacklist regions which capture additional collapsed repeats
around centromeres.
]
variable[encode_bed] assign[=] call[name[tz].get_in, parameter[list[[<ast.Constant object at 0x7da1b17aae60>, <ast.Constant object at 0x7da1b17aaec0>, <ast.Constant object at 0x7da1b17aafb0>]], call[name[items]][constant[0]]]]
if <ast.BoolOp object at 0x7da1b17ab1c0> begin[:]
return[call[name[_remove_regions], parameter[name[in_file], list[[<ast.Name object at 0x7da1b17aa080>]], constant[glimit], call[name[items]][constant[0]]]]]
|
keyword[def] identifier[remove_highdepth_regions] ( identifier[in_file] , identifier[items] ):
literal[string]
identifier[encode_bed] = identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] , literal[string] ], identifier[items] [ literal[int] ])
keyword[if] identifier[encode_bed] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[encode_bed] ):
keyword[return] identifier[_remove_regions] ( identifier[in_file] ,[ identifier[encode_bed] ], literal[string] , identifier[items] [ literal[int] ])
keyword[else] :
keyword[return] identifier[in_file]
|
def remove_highdepth_regions(in_file, items):
"""Remove high depth regions from a BED file for analyzing a set of calls.
Tries to avoid spurious errors and slow run times in collapsed repeat regions.
Also adds ENCODE blacklist regions which capture additional collapsed repeats
around centromeres.
"""
encode_bed = tz.get_in(['genome_resources', 'variation', 'encode_blacklist'], items[0])
if encode_bed and os.path.exists(encode_bed):
return _remove_regions(in_file, [encode_bed], 'glimit', items[0]) # depends on [control=['if'], data=[]]
else:
return in_file
|
def grab_key(conn, wid, modifiers, key):
"""
Grabs a key for a particular window and a modifiers/key value.
If the grab was successful, return True. Otherwise, return False.
If your client is grabbing keys, it is useful to notify the user if a
key wasn't grabbed. Keyboard shortcuts not responding is disorienting!
Also, this function will grab several keys based on varying modifiers.
Namely, this accounts for all of the "trivial" modifiers that may have
an effect on X events, but probably shouldn't effect key grabbing. (i.e.,
whether num lock or caps lock is on.)
N.B. You should probably be using 'bind_key' or 'bind_global_key' instead.
:param wid: A window identifier.
:type wid: int
:param modifiers: A modifier mask.
:type modifiers: int
:param key: A keycode.
:type key: int
:rtype: bool
"""
try:
for mod in TRIVIAL_MODS:
conn.core.GrabKeyChecked(True, wid, modifiers | mod, key, GM.Async,
GM.Async).check()
return True
except xproto.BadAccess:
return False
|
def function[grab_key, parameter[conn, wid, modifiers, key]]:
constant[
Grabs a key for a particular window and a modifiers/key value.
If the grab was successful, return True. Otherwise, return False.
If your client is grabbing keys, it is useful to notify the user if a
key wasn't grabbed. Keyboard shortcuts not responding is disorienting!
Also, this function will grab several keys based on varying modifiers.
Namely, this accounts for all of the "trivial" modifiers that may have
an effect on X events, but probably shouldn't effect key grabbing. (i.e.,
whether num lock or caps lock is on.)
N.B. You should probably be using 'bind_key' or 'bind_global_key' instead.
:param wid: A window identifier.
:type wid: int
:param modifiers: A modifier mask.
:type modifiers: int
:param key: A keycode.
:type key: int
:rtype: bool
]
<ast.Try object at 0x7da1b0eb93f0>
|
keyword[def] identifier[grab_key] ( identifier[conn] , identifier[wid] , identifier[modifiers] , identifier[key] ):
literal[string]
keyword[try] :
keyword[for] identifier[mod] keyword[in] identifier[TRIVIAL_MODS] :
identifier[conn] . identifier[core] . identifier[GrabKeyChecked] ( keyword[True] , identifier[wid] , identifier[modifiers] | identifier[mod] , identifier[key] , identifier[GM] . identifier[Async] ,
identifier[GM] . identifier[Async] ). identifier[check] ()
keyword[return] keyword[True]
keyword[except] identifier[xproto] . identifier[BadAccess] :
keyword[return] keyword[False]
|
def grab_key(conn, wid, modifiers, key):
"""
Grabs a key for a particular window and a modifiers/key value.
If the grab was successful, return True. Otherwise, return False.
If your client is grabbing keys, it is useful to notify the user if a
key wasn't grabbed. Keyboard shortcuts not responding is disorienting!
Also, this function will grab several keys based on varying modifiers.
Namely, this accounts for all of the "trivial" modifiers that may have
an effect on X events, but probably shouldn't effect key grabbing. (i.e.,
whether num lock or caps lock is on.)
N.B. You should probably be using 'bind_key' or 'bind_global_key' instead.
:param wid: A window identifier.
:type wid: int
:param modifiers: A modifier mask.
:type modifiers: int
:param key: A keycode.
:type key: int
:rtype: bool
"""
try:
for mod in TRIVIAL_MODS:
conn.core.GrabKeyChecked(True, wid, modifiers | mod, key, GM.Async, GM.Async).check() # depends on [control=['for'], data=['mod']]
return True # depends on [control=['try'], data=[]]
except xproto.BadAccess:
return False # depends on [control=['except'], data=[]]
|
def supported_languages(self, task=None):
"""Languages that are covered by a specific task.
Args:
task (string): Task name.
"""
if task:
collection = self.get_collection(task=task)
return [isoLangs[x.id.split('.')[1]]["name"]
for x in collection.packages]
else:
return [x.name.split()[0] for x in self.collections()
if Downloader.LANG_PREFIX in x.id]
|
def function[supported_languages, parameter[self, task]]:
constant[Languages that are covered by a specific task.
Args:
task (string): Task name.
]
if name[task] begin[:]
variable[collection] assign[=] call[name[self].get_collection, parameter[]]
return[<ast.ListComp object at 0x7da20c6aad10>]
|
keyword[def] identifier[supported_languages] ( identifier[self] , identifier[task] = keyword[None] ):
literal[string]
keyword[if] identifier[task] :
identifier[collection] = identifier[self] . identifier[get_collection] ( identifier[task] = identifier[task] )
keyword[return] [ identifier[isoLangs] [ identifier[x] . identifier[id] . identifier[split] ( literal[string] )[ literal[int] ]][ literal[string] ]
keyword[for] identifier[x] keyword[in] identifier[collection] . identifier[packages] ]
keyword[else] :
keyword[return] [ identifier[x] . identifier[name] . identifier[split] ()[ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[collections] ()
keyword[if] identifier[Downloader] . identifier[LANG_PREFIX] keyword[in] identifier[x] . identifier[id] ]
|
def supported_languages(self, task=None):
"""Languages that are covered by a specific task.
Args:
task (string): Task name.
"""
if task:
collection = self.get_collection(task=task)
return [isoLangs[x.id.split('.')[1]]['name'] for x in collection.packages] # depends on [control=['if'], data=[]]
else:
return [x.name.split()[0] for x in self.collections() if Downloader.LANG_PREFIX in x.id]
|
def sql_solid(name, select_statement, materialization_strategy, table_name=None, inputs=None):
'''Return a new solid that executes and materializes a SQL select statement.
Args:
name (str): The name of the new solid.
select_statement (str): The select statement to execute.
materialization_strategy (str): Must be 'table', the only currently supported
materialization strategy. If 'table', the kwarg `table_name` must also be passed.
Kwargs:
table_name (str): THe name of the new table to create, if the materialization strategy
is 'table'. Default: None.
inputs (list[InputDefinition]): Inputs, if any, for the new solid. Default: None.
Returns:
function:
The new SQL solid.
'''
inputs = check.opt_list_param(inputs, 'inputs', InputDefinition)
materialization_strategy_output_types = { # pylint:disable=C0103
'table': SqlTableName,
# 'view': String,
# 'query': SqlAlchemyQueryType,
# 'subquery': SqlAlchemySubqueryType,
# 'result_proxy': SqlAlchemyResultProxyType,
# could also materialize as a Pandas table, as a Spark table, as an intermediate file, etc.
}
if materialization_strategy not in materialization_strategy_output_types:
raise Exception(
'Invalid materialization strategy {materialization_strategy}, must '
'be one of {materialization_strategies}'.format(
materialization_strategy=materialization_strategy,
materialization_strategies=str(list(materialization_strategy_output_types.keys())),
)
)
if materialization_strategy == 'table':
if table_name is None:
raise Exception('Missing table_name: required for materialization strategy \'table\'')
output_description = (
'The string name of the new table created by the solid'
if materialization_strategy == 'table'
else 'The materialized SQL statement. If the materialization_strategy is '
'\'table\', this is the string name of the new table created by the solid.'
)
description = '''This solid executes the following SQL statement:
{select_statement}'''.format(
select_statement=select_statement
)
# n.b., we will eventually want to make this resources key configurable
sql_statement = (
'drop table if exists {table_name};\n' 'create table {table_name} as {select_statement};'
).format(table_name=table_name, select_statement=select_statement)
def transform_fn(context, _inputs):
'''Inner function defining the new solid.
Args:
context (TransformExecutionContext): Must expose a `db` resource with an `execute` method,
like a SQLAlchemy engine, that can execute raw SQL against a database.
Returns:
str:
The table name of the newly materialized SQL select statement.
'''
context.log.info(
'Executing sql statement:\n{sql_statement}'.format(sql_statement=sql_statement)
)
context.resources.db_info.engine.execute(text(sql_statement))
yield Result(value=table_name, output_name='result')
return SolidDefinition(
name=name,
inputs=inputs,
outputs=[
OutputDefinition(
materialization_strategy_output_types[materialization_strategy],
description=output_description,
)
],
transform_fn=transform_fn,
description=description,
metadata={'kind': 'sql', 'sql': sql_statement},
)
|
def function[sql_solid, parameter[name, select_statement, materialization_strategy, table_name, inputs]]:
constant[Return a new solid that executes and materializes a SQL select statement.
Args:
name (str): The name of the new solid.
select_statement (str): The select statement to execute.
materialization_strategy (str): Must be 'table', the only currently supported
materialization strategy. If 'table', the kwarg `table_name` must also be passed.
Kwargs:
table_name (str): THe name of the new table to create, if the materialization strategy
is 'table'. Default: None.
inputs (list[InputDefinition]): Inputs, if any, for the new solid. Default: None.
Returns:
function:
The new SQL solid.
]
variable[inputs] assign[=] call[name[check].opt_list_param, parameter[name[inputs], constant[inputs], name[InputDefinition]]]
variable[materialization_strategy_output_types] assign[=] dictionary[[<ast.Constant object at 0x7da1b0381990>], [<ast.Name object at 0x7da1b0381180>]]
if compare[name[materialization_strategy] <ast.NotIn object at 0x7da2590d7190> name[materialization_strategy_output_types]] begin[:]
<ast.Raise object at 0x7da1b03809a0>
if compare[name[materialization_strategy] equal[==] constant[table]] begin[:]
if compare[name[table_name] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0383700>
variable[output_description] assign[=] <ast.IfExp object at 0x7da1b0381420>
variable[description] assign[=] call[constant[This solid executes the following SQL statement:
{select_statement}].format, parameter[]]
variable[sql_statement] assign[=] call[constant[drop table if exists {table_name};
create table {table_name} as {select_statement};].format, parameter[]]
def function[transform_fn, parameter[context, _inputs]]:
constant[Inner function defining the new solid.
Args:
context (TransformExecutionContext): Must expose a `db` resource with an `execute` method,
like a SQLAlchemy engine, that can execute raw SQL against a database.
Returns:
str:
The table name of the newly materialized SQL select statement.
]
call[name[context].log.info, parameter[call[constant[Executing sql statement:
{sql_statement}].format, parameter[]]]]
call[name[context].resources.db_info.engine.execute, parameter[call[name[text], parameter[name[sql_statement]]]]]
<ast.Yield object at 0x7da1b059ef50>
return[call[name[SolidDefinition], parameter[]]]
|
keyword[def] identifier[sql_solid] ( identifier[name] , identifier[select_statement] , identifier[materialization_strategy] , identifier[table_name] = keyword[None] , identifier[inputs] = keyword[None] ):
literal[string]
identifier[inputs] = identifier[check] . identifier[opt_list_param] ( identifier[inputs] , literal[string] , identifier[InputDefinition] )
identifier[materialization_strategy_output_types] ={
literal[string] : identifier[SqlTableName] ,
}
keyword[if] identifier[materialization_strategy] keyword[not] keyword[in] identifier[materialization_strategy_output_types] :
keyword[raise] identifier[Exception] (
literal[string]
literal[string] . identifier[format] (
identifier[materialization_strategy] = identifier[materialization_strategy] ,
identifier[materialization_strategies] = identifier[str] ( identifier[list] ( identifier[materialization_strategy_output_types] . identifier[keys] ())),
)
)
keyword[if] identifier[materialization_strategy] == literal[string] :
keyword[if] identifier[table_name] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[output_description] =(
literal[string]
keyword[if] identifier[materialization_strategy] == literal[string]
keyword[else] literal[string]
literal[string]
)
identifier[description] = literal[string] . identifier[format] (
identifier[select_statement] = identifier[select_statement]
)
identifier[sql_statement] =(
literal[string] literal[string]
). identifier[format] ( identifier[table_name] = identifier[table_name] , identifier[select_statement] = identifier[select_statement] )
keyword[def] identifier[transform_fn] ( identifier[context] , identifier[_inputs] ):
literal[string]
identifier[context] . identifier[log] . identifier[info] (
literal[string] . identifier[format] ( identifier[sql_statement] = identifier[sql_statement] )
)
identifier[context] . identifier[resources] . identifier[db_info] . identifier[engine] . identifier[execute] ( identifier[text] ( identifier[sql_statement] ))
keyword[yield] identifier[Result] ( identifier[value] = identifier[table_name] , identifier[output_name] = literal[string] )
keyword[return] identifier[SolidDefinition] (
identifier[name] = identifier[name] ,
identifier[inputs] = identifier[inputs] ,
identifier[outputs] =[
identifier[OutputDefinition] (
identifier[materialization_strategy_output_types] [ identifier[materialization_strategy] ],
identifier[description] = identifier[output_description] ,
)
],
identifier[transform_fn] = identifier[transform_fn] ,
identifier[description] = identifier[description] ,
identifier[metadata] ={ literal[string] : literal[string] , literal[string] : identifier[sql_statement] },
)
|
def sql_solid(name, select_statement, materialization_strategy, table_name=None, inputs=None):
"""Return a new solid that executes and materializes a SQL select statement.
Args:
name (str): The name of the new solid.
select_statement (str): The select statement to execute.
materialization_strategy (str): Must be 'table', the only currently supported
materialization strategy. If 'table', the kwarg `table_name` must also be passed.
Kwargs:
table_name (str): THe name of the new table to create, if the materialization strategy
is 'table'. Default: None.
inputs (list[InputDefinition]): Inputs, if any, for the new solid. Default: None.
Returns:
function:
The new SQL solid.
"""
inputs = check.opt_list_param(inputs, 'inputs', InputDefinition) # pylint:disable=C0103
# 'view': String,
# 'query': SqlAlchemyQueryType,
# 'subquery': SqlAlchemySubqueryType,
# 'result_proxy': SqlAlchemyResultProxyType,
# could also materialize as a Pandas table, as a Spark table, as an intermediate file, etc.
materialization_strategy_output_types = {'table': SqlTableName}
if materialization_strategy not in materialization_strategy_output_types:
raise Exception('Invalid materialization strategy {materialization_strategy}, must be one of {materialization_strategies}'.format(materialization_strategy=materialization_strategy, materialization_strategies=str(list(materialization_strategy_output_types.keys())))) # depends on [control=['if'], data=['materialization_strategy', 'materialization_strategy_output_types']]
if materialization_strategy == 'table':
if table_name is None:
raise Exception("Missing table_name: required for materialization strategy 'table'") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
output_description = 'The string name of the new table created by the solid' if materialization_strategy == 'table' else "The materialized SQL statement. If the materialization_strategy is 'table', this is the string name of the new table created by the solid."
description = 'This solid executes the following SQL statement:\n {select_statement}'.format(select_statement=select_statement)
# n.b., we will eventually want to make this resources key configurable
sql_statement = 'drop table if exists {table_name};\ncreate table {table_name} as {select_statement};'.format(table_name=table_name, select_statement=select_statement)
def transform_fn(context, _inputs):
"""Inner function defining the new solid.
Args:
context (TransformExecutionContext): Must expose a `db` resource with an `execute` method,
like a SQLAlchemy engine, that can execute raw SQL against a database.
Returns:
str:
The table name of the newly materialized SQL select statement.
"""
context.log.info('Executing sql statement:\n{sql_statement}'.format(sql_statement=sql_statement))
context.resources.db_info.engine.execute(text(sql_statement))
yield Result(value=table_name, output_name='result')
return SolidDefinition(name=name, inputs=inputs, outputs=[OutputDefinition(materialization_strategy_output_types[materialization_strategy], description=output_description)], transform_fn=transform_fn, description=description, metadata={'kind': 'sql', 'sql': sql_statement})
|
def elasticsearch(delete, index_name):
"""Setup Elasticsearch namespace indexes
This will by default only create the indexes and run the namespace index mapping
if the indexes don't exist. The --delete option will force removal of the
index if it exists.
The index_name should be aliased to the index 'terms' when it's ready"""
if delete:
bel.db.elasticsearch.get_client(delete=True)
else:
bel.db.elasticsearch.get_client()
|
def function[elasticsearch, parameter[delete, index_name]]:
constant[Setup Elasticsearch namespace indexes
This will by default only create the indexes and run the namespace index mapping
if the indexes don't exist. The --delete option will force removal of the
index if it exists.
The index_name should be aliased to the index 'terms' when it's ready]
if name[delete] begin[:]
call[name[bel].db.elasticsearch.get_client, parameter[]]
|
keyword[def] identifier[elasticsearch] ( identifier[delete] , identifier[index_name] ):
literal[string]
keyword[if] identifier[delete] :
identifier[bel] . identifier[db] . identifier[elasticsearch] . identifier[get_client] ( identifier[delete] = keyword[True] )
keyword[else] :
identifier[bel] . identifier[db] . identifier[elasticsearch] . identifier[get_client] ()
|
def elasticsearch(delete, index_name):
"""Setup Elasticsearch namespace indexes
This will by default only create the indexes and run the namespace index mapping
if the indexes don't exist. The --delete option will force removal of the
index if it exists.
The index_name should be aliased to the index 'terms' when it's ready"""
if delete:
bel.db.elasticsearch.get_client(delete=True) # depends on [control=['if'], data=[]]
else:
bel.db.elasticsearch.get_client()
|
def rank_reference_paragraphs(wiki_title, references_content, normalize=True):
"""Rank and return reference paragraphs by tf-idf score on title tokens."""
normalized_title = _normalize_text(wiki_title)
title_tokens = _tokens_to_score(
set(tokenizer.encode(text_encoder.native_to_unicode(normalized_title))))
ref_paragraph_info = []
doc_counts = collections.defaultdict(int)
for ref in references_content:
for paragraph in ref.split("\n"):
normalized_paragraph = _normalize_text(paragraph)
if cc_utils.filter_paragraph(normalized_paragraph):
# Skip paragraph
continue
counts = _token_counts(normalized_paragraph, title_tokens)
for token in title_tokens:
if counts[token]:
doc_counts[token] += 1
content = normalized_paragraph if normalize else paragraph
info = {"content": content, "counts": counts}
ref_paragraph_info.append(info)
for info in ref_paragraph_info:
score = 0.
for token in title_tokens:
term_frequency = info["counts"][token]
inv_doc_frequency = (
float(len(ref_paragraph_info)) / max(doc_counts[token], 1))
score += term_frequency * math.log(inv_doc_frequency)
info["score"] = score
ref_paragraph_info.sort(key=lambda el: el["score"], reverse=True)
return [info["content"] for info in ref_paragraph_info]
|
def function[rank_reference_paragraphs, parameter[wiki_title, references_content, normalize]]:
constant[Rank and return reference paragraphs by tf-idf score on title tokens.]
variable[normalized_title] assign[=] call[name[_normalize_text], parameter[name[wiki_title]]]
variable[title_tokens] assign[=] call[name[_tokens_to_score], parameter[call[name[set], parameter[call[name[tokenizer].encode, parameter[call[name[text_encoder].native_to_unicode, parameter[name[normalized_title]]]]]]]]]
variable[ref_paragraph_info] assign[=] list[[]]
variable[doc_counts] assign[=] call[name[collections].defaultdict, parameter[name[int]]]
for taget[name[ref]] in starred[name[references_content]] begin[:]
for taget[name[paragraph]] in starred[call[name[ref].split, parameter[constant[
]]]] begin[:]
variable[normalized_paragraph] assign[=] call[name[_normalize_text], parameter[name[paragraph]]]
if call[name[cc_utils].filter_paragraph, parameter[name[normalized_paragraph]]] begin[:]
continue
variable[counts] assign[=] call[name[_token_counts], parameter[name[normalized_paragraph], name[title_tokens]]]
for taget[name[token]] in starred[name[title_tokens]] begin[:]
if call[name[counts]][name[token]] begin[:]
<ast.AugAssign object at 0x7da1b2059090>
variable[content] assign[=] <ast.IfExp object at 0x7da1b205b6d0>
variable[info] assign[=] dictionary[[<ast.Constant object at 0x7da1b205bbb0>, <ast.Constant object at 0x7da1b205bac0>], [<ast.Name object at 0x7da1b2058f40>, <ast.Name object at 0x7da1b205b010>]]
call[name[ref_paragraph_info].append, parameter[name[info]]]
for taget[name[info]] in starred[name[ref_paragraph_info]] begin[:]
variable[score] assign[=] constant[0.0]
for taget[name[token]] in starred[name[title_tokens]] begin[:]
variable[term_frequency] assign[=] call[call[name[info]][constant[counts]]][name[token]]
variable[inv_doc_frequency] assign[=] binary_operation[call[name[float], parameter[call[name[len], parameter[name[ref_paragraph_info]]]]] / call[name[max], parameter[call[name[doc_counts]][name[token]], constant[1]]]]
<ast.AugAssign object at 0x7da1b2058cd0>
call[name[info]][constant[score]] assign[=] name[score]
call[name[ref_paragraph_info].sort, parameter[]]
return[<ast.ListComp object at 0x7da1b2059d20>]
|
keyword[def] identifier[rank_reference_paragraphs] ( identifier[wiki_title] , identifier[references_content] , identifier[normalize] = keyword[True] ):
literal[string]
identifier[normalized_title] = identifier[_normalize_text] ( identifier[wiki_title] )
identifier[title_tokens] = identifier[_tokens_to_score] (
identifier[set] ( identifier[tokenizer] . identifier[encode] ( identifier[text_encoder] . identifier[native_to_unicode] ( identifier[normalized_title] ))))
identifier[ref_paragraph_info] =[]
identifier[doc_counts] = identifier[collections] . identifier[defaultdict] ( identifier[int] )
keyword[for] identifier[ref] keyword[in] identifier[references_content] :
keyword[for] identifier[paragraph] keyword[in] identifier[ref] . identifier[split] ( literal[string] ):
identifier[normalized_paragraph] = identifier[_normalize_text] ( identifier[paragraph] )
keyword[if] identifier[cc_utils] . identifier[filter_paragraph] ( identifier[normalized_paragraph] ):
keyword[continue]
identifier[counts] = identifier[_token_counts] ( identifier[normalized_paragraph] , identifier[title_tokens] )
keyword[for] identifier[token] keyword[in] identifier[title_tokens] :
keyword[if] identifier[counts] [ identifier[token] ]:
identifier[doc_counts] [ identifier[token] ]+= literal[int]
identifier[content] = identifier[normalized_paragraph] keyword[if] identifier[normalize] keyword[else] identifier[paragraph]
identifier[info] ={ literal[string] : identifier[content] , literal[string] : identifier[counts] }
identifier[ref_paragraph_info] . identifier[append] ( identifier[info] )
keyword[for] identifier[info] keyword[in] identifier[ref_paragraph_info] :
identifier[score] = literal[int]
keyword[for] identifier[token] keyword[in] identifier[title_tokens] :
identifier[term_frequency] = identifier[info] [ literal[string] ][ identifier[token] ]
identifier[inv_doc_frequency] =(
identifier[float] ( identifier[len] ( identifier[ref_paragraph_info] ))/ identifier[max] ( identifier[doc_counts] [ identifier[token] ], literal[int] ))
identifier[score] += identifier[term_frequency] * identifier[math] . identifier[log] ( identifier[inv_doc_frequency] )
identifier[info] [ literal[string] ]= identifier[score]
identifier[ref_paragraph_info] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[el] : identifier[el] [ literal[string] ], identifier[reverse] = keyword[True] )
keyword[return] [ identifier[info] [ literal[string] ] keyword[for] identifier[info] keyword[in] identifier[ref_paragraph_info] ]
|
def rank_reference_paragraphs(wiki_title, references_content, normalize=True):
"""Rank and return reference paragraphs by tf-idf score on title tokens."""
normalized_title = _normalize_text(wiki_title)
title_tokens = _tokens_to_score(set(tokenizer.encode(text_encoder.native_to_unicode(normalized_title))))
ref_paragraph_info = []
doc_counts = collections.defaultdict(int)
for ref in references_content:
for paragraph in ref.split('\n'):
normalized_paragraph = _normalize_text(paragraph)
if cc_utils.filter_paragraph(normalized_paragraph):
# Skip paragraph
continue # depends on [control=['if'], data=[]]
counts = _token_counts(normalized_paragraph, title_tokens)
for token in title_tokens:
if counts[token]:
doc_counts[token] += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['token']]
content = normalized_paragraph if normalize else paragraph
info = {'content': content, 'counts': counts}
ref_paragraph_info.append(info) # depends on [control=['for'], data=['paragraph']] # depends on [control=['for'], data=['ref']]
for info in ref_paragraph_info:
score = 0.0
for token in title_tokens:
term_frequency = info['counts'][token]
inv_doc_frequency = float(len(ref_paragraph_info)) / max(doc_counts[token], 1)
score += term_frequency * math.log(inv_doc_frequency) # depends on [control=['for'], data=['token']]
info['score'] = score # depends on [control=['for'], data=['info']]
ref_paragraph_info.sort(key=lambda el: el['score'], reverse=True)
return [info['content'] for info in ref_paragraph_info]
|
def filter(self, value=None, model=None, context=None):
"""
Sequentially applies all the filters to provided value
:param value: a value to filter
:param model: parent entity
:param context: filtering context, usually parent entity
:return: filtered value
"""
if value is None:
return value
for filter_obj in self.filters:
value = filter_obj.filter(
value=value,
model=model,
context=context if self.use_context else None
)
return value
|
def function[filter, parameter[self, value, model, context]]:
constant[
Sequentially applies all the filters to provided value
:param value: a value to filter
:param model: parent entity
:param context: filtering context, usually parent entity
:return: filtered value
]
if compare[name[value] is constant[None]] begin[:]
return[name[value]]
for taget[name[filter_obj]] in starred[name[self].filters] begin[:]
variable[value] assign[=] call[name[filter_obj].filter, parameter[]]
return[name[value]]
|
keyword[def] identifier[filter] ( identifier[self] , identifier[value] = keyword[None] , identifier[model] = keyword[None] , identifier[context] = keyword[None] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[value]
keyword[for] identifier[filter_obj] keyword[in] identifier[self] . identifier[filters] :
identifier[value] = identifier[filter_obj] . identifier[filter] (
identifier[value] = identifier[value] ,
identifier[model] = identifier[model] ,
identifier[context] = identifier[context] keyword[if] identifier[self] . identifier[use_context] keyword[else] keyword[None]
)
keyword[return] identifier[value]
|
def filter(self, value=None, model=None, context=None):
"""
Sequentially applies all the filters to provided value
:param value: a value to filter
:param model: parent entity
:param context: filtering context, usually parent entity
:return: filtered value
"""
if value is None:
return value # depends on [control=['if'], data=['value']]
for filter_obj in self.filters:
value = filter_obj.filter(value=value, model=model, context=context if self.use_context else None) # depends on [control=['for'], data=['filter_obj']]
return value
|
def decode(self, hashid):
"""Restore a tuple of numbers from the passed `hashid`.
:param hashid The hashid to decode
>>> hashids = Hashids('arbitrary salt', 16, 'abcdefghijkl0123456')
>>> hashids.decode('1d6216i30h53elk3')
(1, 23, 456)
"""
if not hashid or not _is_str(hashid):
return ()
try:
numbers = tuple(_decode(hashid, self._salt, self._alphabet,
self._separators, self._guards))
return numbers if hashid == self.encode(*numbers) else ()
except ValueError:
return ()
|
def function[decode, parameter[self, hashid]]:
constant[Restore a tuple of numbers from the passed `hashid`.
:param hashid The hashid to decode
>>> hashids = Hashids('arbitrary salt', 16, 'abcdefghijkl0123456')
>>> hashids.decode('1d6216i30h53elk3')
(1, 23, 456)
]
if <ast.BoolOp object at 0x7da20c796260> begin[:]
return[tuple[[]]]
<ast.Try object at 0x7da20c795c60>
|
keyword[def] identifier[decode] ( identifier[self] , identifier[hashid] ):
literal[string]
keyword[if] keyword[not] identifier[hashid] keyword[or] keyword[not] identifier[_is_str] ( identifier[hashid] ):
keyword[return] ()
keyword[try] :
identifier[numbers] = identifier[tuple] ( identifier[_decode] ( identifier[hashid] , identifier[self] . identifier[_salt] , identifier[self] . identifier[_alphabet] ,
identifier[self] . identifier[_separators] , identifier[self] . identifier[_guards] ))
keyword[return] identifier[numbers] keyword[if] identifier[hashid] == identifier[self] . identifier[encode] (* identifier[numbers] ) keyword[else] ()
keyword[except] identifier[ValueError] :
keyword[return] ()
|
def decode(self, hashid):
"""Restore a tuple of numbers from the passed `hashid`.
:param hashid The hashid to decode
>>> hashids = Hashids('arbitrary salt', 16, 'abcdefghijkl0123456')
>>> hashids.decode('1d6216i30h53elk3')
(1, 23, 456)
"""
if not hashid or not _is_str(hashid):
return () # depends on [control=['if'], data=[]]
try:
numbers = tuple(_decode(hashid, self._salt, self._alphabet, self._separators, self._guards))
return numbers if hashid == self.encode(*numbers) else () # depends on [control=['try'], data=[]]
except ValueError:
return () # depends on [control=['except'], data=[]]
|
def getJsonFromApi(view, request):
"""Return json from querying Web Api
Args:
view: django view function.
request: http request object got from django.
Returns: json format dictionary
"""
jsonText = view(request)
jsonText = json.loads(jsonText.content.decode('utf-8'))
return jsonText
|
def function[getJsonFromApi, parameter[view, request]]:
constant[Return json from querying Web Api
Args:
view: django view function.
request: http request object got from django.
Returns: json format dictionary
]
variable[jsonText] assign[=] call[name[view], parameter[name[request]]]
variable[jsonText] assign[=] call[name[json].loads, parameter[call[name[jsonText].content.decode, parameter[constant[utf-8]]]]]
return[name[jsonText]]
|
keyword[def] identifier[getJsonFromApi] ( identifier[view] , identifier[request] ):
literal[string]
identifier[jsonText] = identifier[view] ( identifier[request] )
identifier[jsonText] = identifier[json] . identifier[loads] ( identifier[jsonText] . identifier[content] . identifier[decode] ( literal[string] ))
keyword[return] identifier[jsonText]
|
def getJsonFromApi(view, request):
"""Return json from querying Web Api
Args:
view: django view function.
request: http request object got from django.
Returns: json format dictionary
"""
jsonText = view(request)
jsonText = json.loads(jsonText.content.decode('utf-8'))
return jsonText
|
def list_all_available(self, id_vlan):
"""
List all environment vips availables
:return: Following dictionary:
::
{'environment_vip': [{'id': <id>,
'finalidade_txt': <finalidade_txt>,
'cliente_txt': <cliente_txt>,
'ambiente_p44_txt': <ambiente_p44_txt> }
{... other environments vip ...}]}
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
url = 'environmentvip/search/' + str(id_vlan)
code, xml = self.submit(None, 'GET', url)
key = 'environment_vip'
return get_list_map(self.response(code, xml, [key]), key)
|
def function[list_all_available, parameter[self, id_vlan]]:
constant[
List all environment vips availables
:return: Following dictionary:
::
{'environment_vip': [{'id': <id>,
'finalidade_txt': <finalidade_txt>,
'cliente_txt': <cliente_txt>,
'ambiente_p44_txt': <ambiente_p44_txt> }
{... other environments vip ...}]}
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
]
variable[url] assign[=] binary_operation[constant[environmentvip/search/] + call[name[str], parameter[name[id_vlan]]]]
<ast.Tuple object at 0x7da1b2344190> assign[=] call[name[self].submit, parameter[constant[None], constant[GET], name[url]]]
variable[key] assign[=] constant[environment_vip]
return[call[name[get_list_map], parameter[call[name[self].response, parameter[name[code], name[xml], list[[<ast.Name object at 0x7da1b2344160>]]]], name[key]]]]
|
keyword[def] identifier[list_all_available] ( identifier[self] , identifier[id_vlan] ):
literal[string]
identifier[url] = literal[string] + identifier[str] ( identifier[id_vlan] )
identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] )
identifier[key] = literal[string]
keyword[return] identifier[get_list_map] ( identifier[self] . identifier[response] ( identifier[code] , identifier[xml] ,[ identifier[key] ]), identifier[key] )
|
def list_all_available(self, id_vlan):
"""
List all environment vips availables
:return: Following dictionary:
::
{'environment_vip': [{'id': <id>,
'finalidade_txt': <finalidade_txt>,
'cliente_txt': <cliente_txt>,
'ambiente_p44_txt': <ambiente_p44_txt> }
{... other environments vip ...}]}
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
url = 'environmentvip/search/' + str(id_vlan)
(code, xml) = self.submit(None, 'GET', url)
key = 'environment_vip'
return get_list_map(self.response(code, xml, [key]), key)
|
def find_closed_date_by_commit(self, issue):
"""
Fill "actual_date" parameter of specified issue by closed date of
the commit, if it was closed by commit.
:param dict issue: issue to edit
"""
if not issue.get('events'):
return
# if it's PR -> then find "merged event", in case
# of usual issue -> find closed date
compare_string = "merged" if 'merged_at' in issue else "closed"
# reverse! - to find latest closed event. (event goes in date order)
# if it were reopened and closed again.
issue['events'].reverse()
found_date = False
for event in issue['events']:
if event["event"] == compare_string:
self.set_date_from_event(event, issue)
found_date = True
break
if not found_date:
# TODO: assert issues, that remain without
# 'actual_date' hash for some reason.
print("\nWARNING: Issue without 'actual_date':"
" #{0} {1}".format(issue["number"], issue["title"]))
|
def function[find_closed_date_by_commit, parameter[self, issue]]:
constant[
Fill "actual_date" parameter of specified issue by closed date of
the commit, if it was closed by commit.
:param dict issue: issue to edit
]
if <ast.UnaryOp object at 0x7da18f00f5b0> begin[:]
return[None]
variable[compare_string] assign[=] <ast.IfExp object at 0x7da18f00e740>
call[call[name[issue]][constant[events]].reverse, parameter[]]
variable[found_date] assign[=] constant[False]
for taget[name[event]] in starred[call[name[issue]][constant[events]]] begin[:]
if compare[call[name[event]][constant[event]] equal[==] name[compare_string]] begin[:]
call[name[self].set_date_from_event, parameter[name[event], name[issue]]]
variable[found_date] assign[=] constant[True]
break
if <ast.UnaryOp object at 0x7da1b00d8e20> begin[:]
call[name[print], parameter[call[constant[
WARNING: Issue without 'actual_date': #{0} {1}].format, parameter[call[name[issue]][constant[number]], call[name[issue]][constant[title]]]]]]
|
keyword[def] identifier[find_closed_date_by_commit] ( identifier[self] , identifier[issue] ):
literal[string]
keyword[if] keyword[not] identifier[issue] . identifier[get] ( literal[string] ):
keyword[return]
identifier[compare_string] = literal[string] keyword[if] literal[string] keyword[in] identifier[issue] keyword[else] literal[string]
identifier[issue] [ literal[string] ]. identifier[reverse] ()
identifier[found_date] = keyword[False]
keyword[for] identifier[event] keyword[in] identifier[issue] [ literal[string] ]:
keyword[if] identifier[event] [ literal[string] ]== identifier[compare_string] :
identifier[self] . identifier[set_date_from_event] ( identifier[event] , identifier[issue] )
identifier[found_date] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[found_date] :
identifier[print] ( literal[string]
literal[string] . identifier[format] ( identifier[issue] [ literal[string] ], identifier[issue] [ literal[string] ]))
|
def find_closed_date_by_commit(self, issue):
"""
Fill "actual_date" parameter of specified issue by closed date of
the commit, if it was closed by commit.
:param dict issue: issue to edit
"""
if not issue.get('events'):
return # depends on [control=['if'], data=[]]
# if it's PR -> then find "merged event", in case
# of usual issue -> find closed date
compare_string = 'merged' if 'merged_at' in issue else 'closed'
# reverse! - to find latest closed event. (event goes in date order)
# if it were reopened and closed again.
issue['events'].reverse()
found_date = False
for event in issue['events']:
if event['event'] == compare_string:
self.set_date_from_event(event, issue)
found_date = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']]
if not found_date:
# TODO: assert issues, that remain without
# 'actual_date' hash for some reason.
print("\nWARNING: Issue without 'actual_date': #{0} {1}".format(issue['number'], issue['title'])) # depends on [control=['if'], data=[]]
|
def deflections_from_grid(self, grid):
"""
Calculate the deflection angles at a given set of arc-second gridded coordinates.
For coordinates (0.0, 0.0) the analytic calculation of the deflection angle gives a NaN. Therefore, \
coordinates at (0.0, 0.0) are shifted slightly to (1.0e-8, 1.0e-8).
Parameters
----------
grid : grids.RegularGrid
The grid of (y,x) arc-second coordinates the deflection angles are computed on.
"""
factor = 2.0 * self.einstein_radius_rescaled * self.axis_ratio / np.sqrt(1 - self.axis_ratio ** 2)
psi = np.sqrt(np.add(np.multiply(self.axis_ratio ** 2, np.square(grid[:, 1])), np.square(grid[:, 0])))
deflection_y = np.arctanh(np.divide(np.multiply(np.sqrt(1 - self.axis_ratio ** 2), grid[:, 0]), psi))
deflection_x = np.arctan(np.divide(np.multiply(np.sqrt(1 - self.axis_ratio ** 2), grid[:, 1]), psi))
return self.rotate_grid_from_profile(np.multiply(factor, np.vstack((deflection_y, deflection_x)).T))
|
def function[deflections_from_grid, parameter[self, grid]]:
constant[
Calculate the deflection angles at a given set of arc-second gridded coordinates.
For coordinates (0.0, 0.0) the analytic calculation of the deflection angle gives a NaN. Therefore, coordinates at (0.0, 0.0) are shifted slightly to (1.0e-8, 1.0e-8).
Parameters
----------
grid : grids.RegularGrid
The grid of (y,x) arc-second coordinates the deflection angles are computed on.
]
variable[factor] assign[=] binary_operation[binary_operation[binary_operation[constant[2.0] * name[self].einstein_radius_rescaled] * name[self].axis_ratio] / call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[self].axis_ratio ** constant[2]]]]]]
variable[psi] assign[=] call[name[np].sqrt, parameter[call[name[np].add, parameter[call[name[np].multiply, parameter[binary_operation[name[self].axis_ratio ** constant[2]], call[name[np].square, parameter[call[name[grid]][tuple[[<ast.Slice object at 0x7da20c76ff10>, <ast.Constant object at 0x7da20c76f0a0>]]]]]]], call[name[np].square, parameter[call[name[grid]][tuple[[<ast.Slice object at 0x7da20c76fdc0>, <ast.Constant object at 0x7da20c76f070>]]]]]]]]]
variable[deflection_y] assign[=] call[name[np].arctanh, parameter[call[name[np].divide, parameter[call[name[np].multiply, parameter[call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[self].axis_ratio ** constant[2]]]]], call[name[grid]][tuple[[<ast.Slice object at 0x7da20c76fa60>, <ast.Constant object at 0x7da20c76d1b0>]]]]], name[psi]]]]]
variable[deflection_x] assign[=] call[name[np].arctan, parameter[call[name[np].divide, parameter[call[name[np].multiply, parameter[call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[self].axis_ratio ** constant[2]]]]], call[name[grid]][tuple[[<ast.Slice object at 0x7da20c76f730>, <ast.Constant object at 0x7da20c76f880>]]]]], name[psi]]]]]
return[call[name[self].rotate_grid_from_profile, parameter[call[name[np].multiply, parameter[name[factor], call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da204622830>, <ast.Name object at 0x7da204622b30>]]]].T]]]]]
|
keyword[def] identifier[deflections_from_grid] ( identifier[self] , identifier[grid] ):
literal[string]
identifier[factor] = literal[int] * identifier[self] . identifier[einstein_radius_rescaled] * identifier[self] . identifier[axis_ratio] / identifier[np] . identifier[sqrt] ( literal[int] - identifier[self] . identifier[axis_ratio] ** literal[int] )
identifier[psi] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[add] ( identifier[np] . identifier[multiply] ( identifier[self] . identifier[axis_ratio] ** literal[int] , identifier[np] . identifier[square] ( identifier[grid] [:, literal[int] ])), identifier[np] . identifier[square] ( identifier[grid] [:, literal[int] ])))
identifier[deflection_y] = identifier[np] . identifier[arctanh] ( identifier[np] . identifier[divide] ( identifier[np] . identifier[multiply] ( identifier[np] . identifier[sqrt] ( literal[int] - identifier[self] . identifier[axis_ratio] ** literal[int] ), identifier[grid] [:, literal[int] ]), identifier[psi] ))
identifier[deflection_x] = identifier[np] . identifier[arctan] ( identifier[np] . identifier[divide] ( identifier[np] . identifier[multiply] ( identifier[np] . identifier[sqrt] ( literal[int] - identifier[self] . identifier[axis_ratio] ** literal[int] ), identifier[grid] [:, literal[int] ]), identifier[psi] ))
keyword[return] identifier[self] . identifier[rotate_grid_from_profile] ( identifier[np] . identifier[multiply] ( identifier[factor] , identifier[np] . identifier[vstack] (( identifier[deflection_y] , identifier[deflection_x] )). identifier[T] ))
|
def deflections_from_grid(self, grid):
"""
Calculate the deflection angles at a given set of arc-second gridded coordinates.
For coordinates (0.0, 0.0) the analytic calculation of the deflection angle gives a NaN. Therefore, coordinates at (0.0, 0.0) are shifted slightly to (1.0e-8, 1.0e-8).
Parameters
----------
grid : grids.RegularGrid
The grid of (y,x) arc-second coordinates the deflection angles are computed on.
"""
factor = 2.0 * self.einstein_radius_rescaled * self.axis_ratio / np.sqrt(1 - self.axis_ratio ** 2)
psi = np.sqrt(np.add(np.multiply(self.axis_ratio ** 2, np.square(grid[:, 1])), np.square(grid[:, 0])))
deflection_y = np.arctanh(np.divide(np.multiply(np.sqrt(1 - self.axis_ratio ** 2), grid[:, 0]), psi))
deflection_x = np.arctan(np.divide(np.multiply(np.sqrt(1 - self.axis_ratio ** 2), grid[:, 1]), psi))
return self.rotate_grid_from_profile(np.multiply(factor, np.vstack((deflection_y, deflection_x)).T))
|
def get_or_default(func=None, default=None):
"""
Wrapper around Django's ORM `get` functionality.
Wrap anything that raises ObjectDoesNotExist exception
and provide the default value if necessary.
`default` by default is None. `default` can be any callable,
if it is callable it will be called when ObjectDoesNotExist
exception will be raised.
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except ObjectDoesNotExist:
if callable(default):
return default()
else:
return default
return wrapper
if func is None:
return decorator
else:
return decorator(func)
|
def function[get_or_default, parameter[func, default]]:
constant[
Wrapper around Django's ORM `get` functionality.
Wrap anything that raises ObjectDoesNotExist exception
and provide the default value if necessary.
`default` by default is None. `default` can be any callable,
if it is callable it will be called when ObjectDoesNotExist
exception will be raised.
]
def function[decorator, parameter[func]]:
def function[wrapper, parameter[]]:
<ast.Try object at 0x7da1aff6d6f0>
return[name[wrapper]]
if compare[name[func] is constant[None]] begin[:]
return[name[decorator]]
|
keyword[def] identifier[get_or_default] ( identifier[func] = keyword[None] , identifier[default] = keyword[None] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[func] ):
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[try] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[ObjectDoesNotExist] :
keyword[if] identifier[callable] ( identifier[default] ):
keyword[return] identifier[default] ()
keyword[else] :
keyword[return] identifier[default]
keyword[return] identifier[wrapper]
keyword[if] identifier[func] keyword[is] keyword[None] :
keyword[return] identifier[decorator]
keyword[else] :
keyword[return] identifier[decorator] ( identifier[func] )
|
def get_or_default(func=None, default=None):
"""
Wrapper around Django's ORM `get` functionality.
Wrap anything that raises ObjectDoesNotExist exception
and provide the default value if necessary.
`default` by default is None. `default` can be any callable,
if it is callable it will be called when ObjectDoesNotExist
exception will be raised.
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs) # depends on [control=['try'], data=[]]
except ObjectDoesNotExist:
if callable(default):
return default() # depends on [control=['if'], data=[]]
else:
return default # depends on [control=['except'], data=[]]
return wrapper
if func is None:
return decorator # depends on [control=['if'], data=[]]
else:
return decorator(func)
|
async def logs(
self,
service_id: str,
*,
details: bool = False,
follow: bool = False,
stdout: bool = False,
stderr: bool = False,
since: int = 0,
timestamps: bool = False,
is_tty: bool = False,
tail: str = "all"
) -> Union[str, AsyncIterator[str]]:
"""
Retrieve logs of the given service
Args:
details: show service context and extra details provided to logs
follow: return the logs as a stream.
stdout: return logs from stdout
stderr: return logs from stderr
since: return logs since this time, as a UNIX timestamp
timestamps: add timestamps to every log line
is_tty: the service has a pseudo-TTY allocated
tail: only return this number of log lines
from the end of the logs, specify as an integer
or `all` to output all log lines.
"""
if stdout is False and stderr is False:
raise TypeError("Need one of stdout or stderr")
params = {
"details": details,
"follow": follow,
"stdout": stdout,
"stderr": stderr,
"since": since,
"timestamps": timestamps,
"tail": tail,
}
response = await self.docker._query(
"services/{service_id}/logs".format(service_id=service_id),
method="GET",
params=params,
)
return await multiplexed_result(response, follow, is_tty=is_tty)
|
<ast.AsyncFunctionDef object at 0x7da1b08b3250>
|
keyword[async] keyword[def] identifier[logs] (
identifier[self] ,
identifier[service_id] : identifier[str] ,
*,
identifier[details] : identifier[bool] = keyword[False] ,
identifier[follow] : identifier[bool] = keyword[False] ,
identifier[stdout] : identifier[bool] = keyword[False] ,
identifier[stderr] : identifier[bool] = keyword[False] ,
identifier[since] : identifier[int] = literal[int] ,
identifier[timestamps] : identifier[bool] = keyword[False] ,
identifier[is_tty] : identifier[bool] = keyword[False] ,
identifier[tail] : identifier[str] = literal[string]
)-> identifier[Union] [ identifier[str] , identifier[AsyncIterator] [ identifier[str] ]]:
literal[string]
keyword[if] identifier[stdout] keyword[is] keyword[False] keyword[and] identifier[stderr] keyword[is] keyword[False] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[params] ={
literal[string] : identifier[details] ,
literal[string] : identifier[follow] ,
literal[string] : identifier[stdout] ,
literal[string] : identifier[stderr] ,
literal[string] : identifier[since] ,
literal[string] : identifier[timestamps] ,
literal[string] : identifier[tail] ,
}
identifier[response] = keyword[await] identifier[self] . identifier[docker] . identifier[_query] (
literal[string] . identifier[format] ( identifier[service_id] = identifier[service_id] ),
identifier[method] = literal[string] ,
identifier[params] = identifier[params] ,
)
keyword[return] keyword[await] identifier[multiplexed_result] ( identifier[response] , identifier[follow] , identifier[is_tty] = identifier[is_tty] )
|
async def logs(self, service_id: str, *, details: bool=False, follow: bool=False, stdout: bool=False, stderr: bool=False, since: int=0, timestamps: bool=False, is_tty: bool=False, tail: str='all') -> Union[str, AsyncIterator[str]]:
"""
Retrieve logs of the given service
Args:
details: show service context and extra details provided to logs
follow: return the logs as a stream.
stdout: return logs from stdout
stderr: return logs from stderr
since: return logs since this time, as a UNIX timestamp
timestamps: add timestamps to every log line
is_tty: the service has a pseudo-TTY allocated
tail: only return this number of log lines
from the end of the logs, specify as an integer
or `all` to output all log lines.
"""
if stdout is False and stderr is False:
raise TypeError('Need one of stdout or stderr') # depends on [control=['if'], data=[]]
params = {'details': details, 'follow': follow, 'stdout': stdout, 'stderr': stderr, 'since': since, 'timestamps': timestamps, 'tail': tail}
response = await self.docker._query('services/{service_id}/logs'.format(service_id=service_id), method='GET', params=params)
return await multiplexed_result(response, follow, is_tty=is_tty)
|
def load_by_name(name):
"""
Load a spec from either a file path or a fully qualified name.
"""
if os.path.exists(name):
load_from_path(name)
else:
__import__(name)
|
def function[load_by_name, parameter[name]]:
constant[
Load a spec from either a file path or a fully qualified name.
]
if call[name[os].path.exists, parameter[name[name]]] begin[:]
call[name[load_from_path], parameter[name[name]]]
|
keyword[def] identifier[load_by_name] ( identifier[name] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[name] ):
identifier[load_from_path] ( identifier[name] )
keyword[else] :
identifier[__import__] ( identifier[name] )
|
def load_by_name(name):
"""
Load a spec from either a file path or a fully qualified name.
"""
if os.path.exists(name):
load_from_path(name) # depends on [control=['if'], data=[]]
else:
__import__(name)
|
def _send(self, **req_kwargs):
"""Send an authenticated request to a Google API.
Args:
**req_kwargs: Arbitrary keyword arguments to pass to Requests.
Return:
requests.Response: The raw response.
Raises:
LoginException: If :py:meth:`login` has not been called.
"""
auth_token = self._auth.getAuthToken()
if auth_token is None:
raise exception.LoginException('Not logged in')
req_kwargs.setdefault('headers', {
'Authorization': 'OAuth ' + auth_token
})
return self._session.request(**req_kwargs)
|
def function[_send, parameter[self]]:
constant[Send an authenticated request to a Google API.
Args:
**req_kwargs: Arbitrary keyword arguments to pass to Requests.
Return:
requests.Response: The raw response.
Raises:
LoginException: If :py:meth:`login` has not been called.
]
variable[auth_token] assign[=] call[name[self]._auth.getAuthToken, parameter[]]
if compare[name[auth_token] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f00d8a0>
call[name[req_kwargs].setdefault, parameter[constant[headers], dictionary[[<ast.Constant object at 0x7da18f00eaa0>], [<ast.BinOp object at 0x7da18f00ca90>]]]]
return[call[name[self]._session.request, parameter[]]]
|
keyword[def] identifier[_send] ( identifier[self] ,** identifier[req_kwargs] ):
literal[string]
identifier[auth_token] = identifier[self] . identifier[_auth] . identifier[getAuthToken] ()
keyword[if] identifier[auth_token] keyword[is] keyword[None] :
keyword[raise] identifier[exception] . identifier[LoginException] ( literal[string] )
identifier[req_kwargs] . identifier[setdefault] ( literal[string] ,{
literal[string] : literal[string] + identifier[auth_token]
})
keyword[return] identifier[self] . identifier[_session] . identifier[request] (** identifier[req_kwargs] )
|
def _send(self, **req_kwargs):
"""Send an authenticated request to a Google API.
Args:
**req_kwargs: Arbitrary keyword arguments to pass to Requests.
Return:
requests.Response: The raw response.
Raises:
LoginException: If :py:meth:`login` has not been called.
"""
auth_token = self._auth.getAuthToken()
if auth_token is None:
raise exception.LoginException('Not logged in') # depends on [control=['if'], data=[]]
req_kwargs.setdefault('headers', {'Authorization': 'OAuth ' + auth_token})
return self._session.request(**req_kwargs)
|
def truncateGraph(graph, root_nodes):
"""Create a set of all nodes containg the root_nodes and
all nodes reacheable from them
"""
subgraph = Graph()
for node in root_nodes:
subgraph = GraphUtils.joinGraphs(subgraph, GraphUtils.getReacheableSubgraph(graph, node))
return subgraph
|
def function[truncateGraph, parameter[graph, root_nodes]]:
constant[Create a set of all nodes containg the root_nodes and
all nodes reacheable from them
]
variable[subgraph] assign[=] call[name[Graph], parameter[]]
for taget[name[node]] in starred[name[root_nodes]] begin[:]
variable[subgraph] assign[=] call[name[GraphUtils].joinGraphs, parameter[name[subgraph], call[name[GraphUtils].getReacheableSubgraph, parameter[name[graph], name[node]]]]]
return[name[subgraph]]
|
keyword[def] identifier[truncateGraph] ( identifier[graph] , identifier[root_nodes] ):
literal[string]
identifier[subgraph] = identifier[Graph] ()
keyword[for] identifier[node] keyword[in] identifier[root_nodes] :
identifier[subgraph] = identifier[GraphUtils] . identifier[joinGraphs] ( identifier[subgraph] , identifier[GraphUtils] . identifier[getReacheableSubgraph] ( identifier[graph] , identifier[node] ))
keyword[return] identifier[subgraph]
|
def truncateGraph(graph, root_nodes):
"""Create a set of all nodes containg the root_nodes and
all nodes reacheable from them
"""
subgraph = Graph()
for node in root_nodes:
subgraph = GraphUtils.joinGraphs(subgraph, GraphUtils.getReacheableSubgraph(graph, node)) # depends on [control=['for'], data=['node']]
return subgraph
|
def version(self):
""" Return kernel and btrfs version. """
return dict(
buttersink=theVersion,
btrfs=self.butterStore.butter.btrfsVersion,
linux=platform.platform(),
)
|
def function[version, parameter[self]]:
constant[ Return kernel and btrfs version. ]
return[call[name[dict], parameter[]]]
|
keyword[def] identifier[version] ( identifier[self] ):
literal[string]
keyword[return] identifier[dict] (
identifier[buttersink] = identifier[theVersion] ,
identifier[btrfs] = identifier[self] . identifier[butterStore] . identifier[butter] . identifier[btrfsVersion] ,
identifier[linux] = identifier[platform] . identifier[platform] (),
)
|
def version(self):
""" Return kernel and btrfs version. """
return dict(buttersink=theVersion, btrfs=self.butterStore.butter.btrfsVersion, linux=platform.platform())
|
def get_next_valid_time_from_t(self, timestamp):
"""Get next valid time for time range
:param timestamp: time we compute from
:type timestamp: int
:return: timestamp of the next valid time (LOCAL TIME)
:rtype: int | None
"""
if self.is_time_valid(timestamp):
return timestamp
# First we search for the day of t
t_day = self.get_next_valid_day(timestamp)
if t_day is None:
return t_day
# We search for the min of all tr.start > sec_from_morning
# if it's the next day, use a start of the day search for timerange
if timestamp < t_day:
sec_from_morning = self.get_next_future_timerange_valid(t_day)
else: # it is in this day, so look from t (can be in the evening or so)
sec_from_morning = self.get_next_future_timerange_valid(timestamp)
if sec_from_morning is not None:
if t_day is not None and sec_from_morning is not None:
return t_day + sec_from_morning
# Then we search for the next day of t
# The sec will be the min of the day
timestamp = get_day(timestamp) + 86400
t_day2 = self.get_next_valid_day(timestamp)
sec_from_morning = self.get_next_future_timerange_valid(t_day2)
if t_day2 is not None and sec_from_morning is not None:
return t_day2 + sec_from_morning
# I did not found any valid time
return None
|
def function[get_next_valid_time_from_t, parameter[self, timestamp]]:
constant[Get next valid time for time range
:param timestamp: time we compute from
:type timestamp: int
:return: timestamp of the next valid time (LOCAL TIME)
:rtype: int | None
]
if call[name[self].is_time_valid, parameter[name[timestamp]]] begin[:]
return[name[timestamp]]
variable[t_day] assign[=] call[name[self].get_next_valid_day, parameter[name[timestamp]]]
if compare[name[t_day] is constant[None]] begin[:]
return[name[t_day]]
if compare[name[timestamp] less[<] name[t_day]] begin[:]
variable[sec_from_morning] assign[=] call[name[self].get_next_future_timerange_valid, parameter[name[t_day]]]
if compare[name[sec_from_morning] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da207f03910> begin[:]
return[binary_operation[name[t_day] + name[sec_from_morning]]]
variable[timestamp] assign[=] binary_operation[call[name[get_day], parameter[name[timestamp]]] + constant[86400]]
variable[t_day2] assign[=] call[name[self].get_next_valid_day, parameter[name[timestamp]]]
variable[sec_from_morning] assign[=] call[name[self].get_next_future_timerange_valid, parameter[name[t_day2]]]
if <ast.BoolOp object at 0x7da207f00550> begin[:]
return[binary_operation[name[t_day2] + name[sec_from_morning]]]
return[constant[None]]
|
keyword[def] identifier[get_next_valid_time_from_t] ( identifier[self] , identifier[timestamp] ):
literal[string]
keyword[if] identifier[self] . identifier[is_time_valid] ( identifier[timestamp] ):
keyword[return] identifier[timestamp]
identifier[t_day] = identifier[self] . identifier[get_next_valid_day] ( identifier[timestamp] )
keyword[if] identifier[t_day] keyword[is] keyword[None] :
keyword[return] identifier[t_day]
keyword[if] identifier[timestamp] < identifier[t_day] :
identifier[sec_from_morning] = identifier[self] . identifier[get_next_future_timerange_valid] ( identifier[t_day] )
keyword[else] :
identifier[sec_from_morning] = identifier[self] . identifier[get_next_future_timerange_valid] ( identifier[timestamp] )
keyword[if] identifier[sec_from_morning] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[t_day] keyword[is] keyword[not] keyword[None] keyword[and] identifier[sec_from_morning] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[t_day] + identifier[sec_from_morning]
identifier[timestamp] = identifier[get_day] ( identifier[timestamp] )+ literal[int]
identifier[t_day2] = identifier[self] . identifier[get_next_valid_day] ( identifier[timestamp] )
identifier[sec_from_morning] = identifier[self] . identifier[get_next_future_timerange_valid] ( identifier[t_day2] )
keyword[if] identifier[t_day2] keyword[is] keyword[not] keyword[None] keyword[and] identifier[sec_from_morning] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[t_day2] + identifier[sec_from_morning]
keyword[return] keyword[None]
|
def get_next_valid_time_from_t(self, timestamp):
"""Get next valid time for time range
:param timestamp: time we compute from
:type timestamp: int
:return: timestamp of the next valid time (LOCAL TIME)
:rtype: int | None
"""
if self.is_time_valid(timestamp):
return timestamp # depends on [control=['if'], data=[]]
# First we search for the day of t
t_day = self.get_next_valid_day(timestamp)
if t_day is None:
return t_day # depends on [control=['if'], data=['t_day']]
# We search for the min of all tr.start > sec_from_morning
# if it's the next day, use a start of the day search for timerange
if timestamp < t_day:
sec_from_morning = self.get_next_future_timerange_valid(t_day) # depends on [control=['if'], data=['t_day']]
else: # it is in this day, so look from t (can be in the evening or so)
sec_from_morning = self.get_next_future_timerange_valid(timestamp)
if sec_from_morning is not None:
if t_day is not None and sec_from_morning is not None:
return t_day + sec_from_morning # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['sec_from_morning']]
# Then we search for the next day of t
# The sec will be the min of the day
timestamp = get_day(timestamp) + 86400
t_day2 = self.get_next_valid_day(timestamp)
sec_from_morning = self.get_next_future_timerange_valid(t_day2)
if t_day2 is not None and sec_from_morning is not None:
return t_day2 + sec_from_morning # depends on [control=['if'], data=[]]
# I did not found any valid time
return None
|
def __float_window(window_spec):
'''Decorator function for windows with fractional input.
This function guarantees that for fractional `x`, the following hold:
1. `__float_window(window_function)(x)` has length `np.ceil(x)`
2. all values from `np.floor(x)` are set to 0.
For integer-valued `x`, there should be no change in behavior.
'''
def _wrap(n, *args, **kwargs):
'''The wrapped window'''
n_min, n_max = int(np.floor(n)), int(np.ceil(n))
window = get_window(window_spec, n_min)
if len(window) < n_max:
window = np.pad(window, [(0, n_max - len(window))],
mode='constant')
window[n_min:] = 0.0
return window
return _wrap
|
def function[__float_window, parameter[window_spec]]:
constant[Decorator function for windows with fractional input.
This function guarantees that for fractional `x`, the following hold:
1. `__float_window(window_function)(x)` has length `np.ceil(x)`
2. all values from `np.floor(x)` are set to 0.
For integer-valued `x`, there should be no change in behavior.
]
def function[_wrap, parameter[n]]:
constant[The wrapped window]
<ast.Tuple object at 0x7da1b05f8a90> assign[=] tuple[[<ast.Call object at 0x7da1b05f9ff0>, <ast.Call object at 0x7da1b05faa40>]]
variable[window] assign[=] call[name[get_window], parameter[name[window_spec], name[n_min]]]
if compare[call[name[len], parameter[name[window]]] less[<] name[n_max]] begin[:]
variable[window] assign[=] call[name[np].pad, parameter[name[window], list[[<ast.Tuple object at 0x7da1b05fbca0>]]]]
call[name[window]][<ast.Slice object at 0x7da1b05f8f70>] assign[=] constant[0.0]
return[name[window]]
return[name[_wrap]]
|
keyword[def] identifier[__float_window] ( identifier[window_spec] ):
literal[string]
keyword[def] identifier[_wrap] ( identifier[n] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[n_min] , identifier[n_max] = identifier[int] ( identifier[np] . identifier[floor] ( identifier[n] )), identifier[int] ( identifier[np] . identifier[ceil] ( identifier[n] ))
identifier[window] = identifier[get_window] ( identifier[window_spec] , identifier[n_min] )
keyword[if] identifier[len] ( identifier[window] )< identifier[n_max] :
identifier[window] = identifier[np] . identifier[pad] ( identifier[window] ,[( literal[int] , identifier[n_max] - identifier[len] ( identifier[window] ))],
identifier[mode] = literal[string] )
identifier[window] [ identifier[n_min] :]= literal[int]
keyword[return] identifier[window]
keyword[return] identifier[_wrap]
|
def __float_window(window_spec):
"""Decorator function for windows with fractional input.
This function guarantees that for fractional `x`, the following hold:
1. `__float_window(window_function)(x)` has length `np.ceil(x)`
2. all values from `np.floor(x)` are set to 0.
For integer-valued `x`, there should be no change in behavior.
"""
def _wrap(n, *args, **kwargs):
"""The wrapped window"""
(n_min, n_max) = (int(np.floor(n)), int(np.ceil(n)))
window = get_window(window_spec, n_min)
if len(window) < n_max:
window = np.pad(window, [(0, n_max - len(window))], mode='constant') # depends on [control=['if'], data=['n_max']]
window[n_min:] = 0.0
return window
return _wrap
|
def riak_http_search_query(self, solr_core, solr_params, count_deleted=False):
"""
This method is for advanced SOLR queries. Riak HTTP search query endpoint,
sends solr_params and query string as a proxy and returns solr reponse.
Args:
solr_core (str): solr core on which query will be executed
solr_params (str): solr specific query params, such as rows, start, fl, df, wt etc..
count_deleted (bool): ignore deleted records or not
Returns:
(dict): dict of solr response
"""
# append current _solr_query params
sq = ["%s%%3A%s" % (q[0], q[1]) for q in self._solr_query]
if not count_deleted:
sq.append("-deleted%3ATrue")
search_host = "http://%s:%s/search/query/%s?wt=json&q=%s&%s" % (
settings.RIAK_SERVER,
settings.RIAK_HTTP_PORT,
solr_core,
"+AND+".join(sq),
solr_params
)
return json.loads(bytes_to_str(urlopen(search_host).read()))
|
def function[riak_http_search_query, parameter[self, solr_core, solr_params, count_deleted]]:
constant[
This method is for advanced SOLR queries. Riak HTTP search query endpoint,
sends solr_params and query string as a proxy and returns solr reponse.
Args:
solr_core (str): solr core on which query will be executed
solr_params (str): solr specific query params, such as rows, start, fl, df, wt etc..
count_deleted (bool): ignore deleted records or not
Returns:
(dict): dict of solr response
]
variable[sq] assign[=] <ast.ListComp object at 0x7da18f00f220>
if <ast.UnaryOp object at 0x7da18f00e920> begin[:]
call[name[sq].append, parameter[constant[-deleted%3ATrue]]]
variable[search_host] assign[=] binary_operation[constant[http://%s:%s/search/query/%s?wt=json&q=%s&%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f00cfd0>, <ast.Attribute object at 0x7da18f00cd60>, <ast.Name object at 0x7da18f00d810>, <ast.Call object at 0x7da18f00c850>, <ast.Name object at 0x7da18f00fc70>]]]
return[call[name[json].loads, parameter[call[name[bytes_to_str], parameter[call[call[name[urlopen], parameter[name[search_host]]].read, parameter[]]]]]]]
|
keyword[def] identifier[riak_http_search_query] ( identifier[self] , identifier[solr_core] , identifier[solr_params] , identifier[count_deleted] = keyword[False] ):
literal[string]
identifier[sq] =[ literal[string] %( identifier[q] [ literal[int] ], identifier[q] [ literal[int] ]) keyword[for] identifier[q] keyword[in] identifier[self] . identifier[_solr_query] ]
keyword[if] keyword[not] identifier[count_deleted] :
identifier[sq] . identifier[append] ( literal[string] )
identifier[search_host] = literal[string] %(
identifier[settings] . identifier[RIAK_SERVER] ,
identifier[settings] . identifier[RIAK_HTTP_PORT] ,
identifier[solr_core] ,
literal[string] . identifier[join] ( identifier[sq] ),
identifier[solr_params]
)
keyword[return] identifier[json] . identifier[loads] ( identifier[bytes_to_str] ( identifier[urlopen] ( identifier[search_host] ). identifier[read] ()))
|
def riak_http_search_query(self, solr_core, solr_params, count_deleted=False):
"""
This method is for advanced SOLR queries. Riak HTTP search query endpoint,
sends solr_params and query string as a proxy and returns solr reponse.
Args:
solr_core (str): solr core on which query will be executed
solr_params (str): solr specific query params, such as rows, start, fl, df, wt etc..
count_deleted (bool): ignore deleted records or not
Returns:
(dict): dict of solr response
"""
# append current _solr_query params
sq = ['%s%%3A%s' % (q[0], q[1]) for q in self._solr_query]
if not count_deleted:
sq.append('-deleted%3ATrue') # depends on [control=['if'], data=[]]
search_host = 'http://%s:%s/search/query/%s?wt=json&q=%s&%s' % (settings.RIAK_SERVER, settings.RIAK_HTTP_PORT, solr_core, '+AND+'.join(sq), solr_params)
return json.loads(bytes_to_str(urlopen(search_host).read()))
|
def nonpresent_module_filename():
"""Return module name that doesn't already exist"""
while True:
module_name = get_random_name()
loader = pkgutil.find_loader(module_name)
if loader is not None:
continue
importlib.invalidate_caches()
return "{}.py".format(module_name)
|
def function[nonpresent_module_filename, parameter[]]:
constant[Return module name that doesn't already exist]
while constant[True] begin[:]
variable[module_name] assign[=] call[name[get_random_name], parameter[]]
variable[loader] assign[=] call[name[pkgutil].find_loader, parameter[name[module_name]]]
if compare[name[loader] is_not constant[None]] begin[:]
continue
call[name[importlib].invalidate_caches, parameter[]]
return[call[constant[{}.py].format, parameter[name[module_name]]]]
|
keyword[def] identifier[nonpresent_module_filename] ():
literal[string]
keyword[while] keyword[True] :
identifier[module_name] = identifier[get_random_name] ()
identifier[loader] = identifier[pkgutil] . identifier[find_loader] ( identifier[module_name] )
keyword[if] identifier[loader] keyword[is] keyword[not] keyword[None] :
keyword[continue]
identifier[importlib] . identifier[invalidate_caches] ()
keyword[return] literal[string] . identifier[format] ( identifier[module_name] )
|
def nonpresent_module_filename():
"""Return module name that doesn't already exist"""
while True:
module_name = get_random_name()
loader = pkgutil.find_loader(module_name)
if loader is not None:
continue # depends on [control=['if'], data=[]]
importlib.invalidate_caches()
return '{}.py'.format(module_name) # depends on [control=['while'], data=[]]
|
def _validate_cert_path(name):
'''
Ensure that the certificate path, as determind from user input, is valid.
'''
cmd = r"Test-Path -Path '{0}'".format(name)
if not ast.literal_eval(_cmd_run(cmd=cmd)):
raise SaltInvocationError(r"Invalid path specified: {0}".format(name))
|
def function[_validate_cert_path, parameter[name]]:
constant[
Ensure that the certificate path, as determind from user input, is valid.
]
variable[cmd] assign[=] call[constant[Test-Path -Path '{0}'].format, parameter[name[name]]]
if <ast.UnaryOp object at 0x7da204622560> begin[:]
<ast.Raise object at 0x7da204620a90>
|
keyword[def] identifier[_validate_cert_path] ( identifier[name] ):
literal[string]
identifier[cmd] = literal[string] . identifier[format] ( identifier[name] )
keyword[if] keyword[not] identifier[ast] . identifier[literal_eval] ( identifier[_cmd_run] ( identifier[cmd] = identifier[cmd] )):
keyword[raise] identifier[SaltInvocationError] ( literal[string] . identifier[format] ( identifier[name] ))
|
def _validate_cert_path(name):
"""
Ensure that the certificate path, as determind from user input, is valid.
"""
cmd = "Test-Path -Path '{0}'".format(name)
if not ast.literal_eval(_cmd_run(cmd=cmd)):
raise SaltInvocationError('Invalid path specified: {0}'.format(name)) # depends on [control=['if'], data=[]]
|
def append(self, transitions, rows=None):
"""Append a batch of transitions to rows of the memory.
Args:
transitions: Tuple of transition quantities with batch dimension.
rows: Episodes to append to, defaults to all.
Returns:
Operation.
"""
rows = tf.range(self._capacity) if rows is None else rows
assert rows.shape.ndims == 1
assert_capacity = tf.assert_less(
rows, self._capacity,
message='capacity exceeded')
with tf.control_dependencies([assert_capacity]):
assert_max_length = tf.assert_less(
tf.gather(self._length, rows), self._max_length,
message='max length exceeded')
with tf.control_dependencies([assert_max_length]):
timestep = tf.gather(self._length, rows)
indices = tf.stack([rows, timestep], 1)
append_ops = tools.nested.map(
lambda var, val: tf.scatter_nd_update(var, indices, val),
self._buffers, transitions, flatten=True)
with tf.control_dependencies(append_ops):
episode_mask = tf.reduce_sum(tf.one_hot(
rows, self._capacity, dtype=tf.int32), 0)
return self._length.assign_add(episode_mask)
|
def function[append, parameter[self, transitions, rows]]:
constant[Append a batch of transitions to rows of the memory.
Args:
transitions: Tuple of transition quantities with batch dimension.
rows: Episodes to append to, defaults to all.
Returns:
Operation.
]
variable[rows] assign[=] <ast.IfExp object at 0x7da1b12c7ac0>
assert[compare[name[rows].shape.ndims equal[==] constant[1]]]
variable[assert_capacity] assign[=] call[name[tf].assert_less, parameter[name[rows], name[self]._capacity]]
with call[name[tf].control_dependencies, parameter[list[[<ast.Name object at 0x7da1b12c6d10>]]]] begin[:]
variable[assert_max_length] assign[=] call[name[tf].assert_less, parameter[call[name[tf].gather, parameter[name[self]._length, name[rows]]], name[self]._max_length]]
with call[name[tf].control_dependencies, parameter[list[[<ast.Name object at 0x7da18f723490>]]]] begin[:]
variable[timestep] assign[=] call[name[tf].gather, parameter[name[self]._length, name[rows]]]
variable[indices] assign[=] call[name[tf].stack, parameter[list[[<ast.Name object at 0x7da18f722290>, <ast.Name object at 0x7da18f721780>]], constant[1]]]
variable[append_ops] assign[=] call[name[tools].nested.map, parameter[<ast.Lambda object at 0x7da18f722ad0>, name[self]._buffers, name[transitions]]]
with call[name[tf].control_dependencies, parameter[name[append_ops]]] begin[:]
variable[episode_mask] assign[=] call[name[tf].reduce_sum, parameter[call[name[tf].one_hot, parameter[name[rows], name[self]._capacity]], constant[0]]]
return[call[name[self]._length.assign_add, parameter[name[episode_mask]]]]
|
keyword[def] identifier[append] ( identifier[self] , identifier[transitions] , identifier[rows] = keyword[None] ):
literal[string]
identifier[rows] = identifier[tf] . identifier[range] ( identifier[self] . identifier[_capacity] ) keyword[if] identifier[rows] keyword[is] keyword[None] keyword[else] identifier[rows]
keyword[assert] identifier[rows] . identifier[shape] . identifier[ndims] == literal[int]
identifier[assert_capacity] = identifier[tf] . identifier[assert_less] (
identifier[rows] , identifier[self] . identifier[_capacity] ,
identifier[message] = literal[string] )
keyword[with] identifier[tf] . identifier[control_dependencies] ([ identifier[assert_capacity] ]):
identifier[assert_max_length] = identifier[tf] . identifier[assert_less] (
identifier[tf] . identifier[gather] ( identifier[self] . identifier[_length] , identifier[rows] ), identifier[self] . identifier[_max_length] ,
identifier[message] = literal[string] )
keyword[with] identifier[tf] . identifier[control_dependencies] ([ identifier[assert_max_length] ]):
identifier[timestep] = identifier[tf] . identifier[gather] ( identifier[self] . identifier[_length] , identifier[rows] )
identifier[indices] = identifier[tf] . identifier[stack] ([ identifier[rows] , identifier[timestep] ], literal[int] )
identifier[append_ops] = identifier[tools] . identifier[nested] . identifier[map] (
keyword[lambda] identifier[var] , identifier[val] : identifier[tf] . identifier[scatter_nd_update] ( identifier[var] , identifier[indices] , identifier[val] ),
identifier[self] . identifier[_buffers] , identifier[transitions] , identifier[flatten] = keyword[True] )
keyword[with] identifier[tf] . identifier[control_dependencies] ( identifier[append_ops] ):
identifier[episode_mask] = identifier[tf] . identifier[reduce_sum] ( identifier[tf] . identifier[one_hot] (
identifier[rows] , identifier[self] . identifier[_capacity] , identifier[dtype] = identifier[tf] . identifier[int32] ), literal[int] )
keyword[return] identifier[self] . identifier[_length] . identifier[assign_add] ( identifier[episode_mask] )
|
def append(self, transitions, rows=None):
"""Append a batch of transitions to rows of the memory.
Args:
transitions: Tuple of transition quantities with batch dimension.
rows: Episodes to append to, defaults to all.
Returns:
Operation.
"""
rows = tf.range(self._capacity) if rows is None else rows
assert rows.shape.ndims == 1
assert_capacity = tf.assert_less(rows, self._capacity, message='capacity exceeded')
with tf.control_dependencies([assert_capacity]):
assert_max_length = tf.assert_less(tf.gather(self._length, rows), self._max_length, message='max length exceeded') # depends on [control=['with'], data=[]]
with tf.control_dependencies([assert_max_length]):
timestep = tf.gather(self._length, rows)
indices = tf.stack([rows, timestep], 1)
append_ops = tools.nested.map(lambda var, val: tf.scatter_nd_update(var, indices, val), self._buffers, transitions, flatten=True) # depends on [control=['with'], data=[]]
with tf.control_dependencies(append_ops):
episode_mask = tf.reduce_sum(tf.one_hot(rows, self._capacity, dtype=tf.int32), 0)
return self._length.assign_add(episode_mask) # depends on [control=['with'], data=[]]
|
def add_chunk(self,start,end,trig_start=0,trig_end=0):
"""
Add an AnalysisChunk to the list associated with this ScienceSegment.
@param start: GPS start time of chunk.
@param end: GPS end time of chunk.
@param trig_start: GPS start time for triggers from chunk
"""
self.__chunks.append(AnalysisChunk(start,end,trig_start,trig_end))
|
def function[add_chunk, parameter[self, start, end, trig_start, trig_end]]:
constant[
Add an AnalysisChunk to the list associated with this ScienceSegment.
@param start: GPS start time of chunk.
@param end: GPS end time of chunk.
@param trig_start: GPS start time for triggers from chunk
]
call[name[self].__chunks.append, parameter[call[name[AnalysisChunk], parameter[name[start], name[end], name[trig_start], name[trig_end]]]]]
|
keyword[def] identifier[add_chunk] ( identifier[self] , identifier[start] , identifier[end] , identifier[trig_start] = literal[int] , identifier[trig_end] = literal[int] ):
literal[string]
identifier[self] . identifier[__chunks] . identifier[append] ( identifier[AnalysisChunk] ( identifier[start] , identifier[end] , identifier[trig_start] , identifier[trig_end] ))
|
def add_chunk(self, start, end, trig_start=0, trig_end=0):
"""
Add an AnalysisChunk to the list associated with this ScienceSegment.
@param start: GPS start time of chunk.
@param end: GPS end time of chunk.
@param trig_start: GPS start time for triggers from chunk
"""
self.__chunks.append(AnalysisChunk(start, end, trig_start, trig_end))
|
def scan(subtitles):
"""Remove advertising from subtitles."""
from importlib.util import find_spec
try:
import subnuker
except ImportError:
fatal('Unable to scan subtitles. Please install subnuker.')
# check whether aeidon is available
aeidon = find_spec('aeidon') is not None
if sys.stdin.isatty():
# launch subnuker from the existing terminal
args = (['--aeidon'] if aeidon else []) + \
['--gui', '--regex'] + subtitles
subnuker.main(args)
else:
# launch subnuker from a new terminal
args = (['--aeidon'] if aeidon else []) + \
['--gui', '--regex']
execute(Config.TERMINAL,
'--execute',
'subnuker',
*args + subtitles)
|
def function[scan, parameter[subtitles]]:
constant[Remove advertising from subtitles.]
from relative_module[importlib.util] import module[find_spec]
<ast.Try object at 0x7da1b13514b0>
variable[aeidon] assign[=] compare[call[name[find_spec], parameter[constant[aeidon]]] is_not constant[None]]
if call[name[sys].stdin.isatty, parameter[]] begin[:]
variable[args] assign[=] binary_operation[binary_operation[<ast.IfExp object at 0x7da1b1350250> + list[[<ast.Constant object at 0x7da1b13503a0>, <ast.Constant object at 0x7da1b1353670>]]] + name[subtitles]]
call[name[subnuker].main, parameter[name[args]]]
|
keyword[def] identifier[scan] ( identifier[subtitles] ):
literal[string]
keyword[from] identifier[importlib] . identifier[util] keyword[import] identifier[find_spec]
keyword[try] :
keyword[import] identifier[subnuker]
keyword[except] identifier[ImportError] :
identifier[fatal] ( literal[string] )
identifier[aeidon] = identifier[find_spec] ( literal[string] ) keyword[is] keyword[not] keyword[None]
keyword[if] identifier[sys] . identifier[stdin] . identifier[isatty] ():
identifier[args] =([ literal[string] ] keyword[if] identifier[aeidon] keyword[else] [])+[ literal[string] , literal[string] ]+ identifier[subtitles]
identifier[subnuker] . identifier[main] ( identifier[args] )
keyword[else] :
identifier[args] =([ literal[string] ] keyword[if] identifier[aeidon] keyword[else] [])+[ literal[string] , literal[string] ]
identifier[execute] ( identifier[Config] . identifier[TERMINAL] ,
literal[string] ,
literal[string] ,
* identifier[args] + identifier[subtitles] )
|
def scan(subtitles):
"""Remove advertising from subtitles."""
from importlib.util import find_spec
try:
import subnuker # depends on [control=['try'], data=[]]
except ImportError:
fatal('Unable to scan subtitles. Please install subnuker.') # depends on [control=['except'], data=[]]
# check whether aeidon is available
aeidon = find_spec('aeidon') is not None
if sys.stdin.isatty():
# launch subnuker from the existing terminal
args = (['--aeidon'] if aeidon else []) + ['--gui', '--regex'] + subtitles
subnuker.main(args) # depends on [control=['if'], data=[]]
else:
# launch subnuker from a new terminal
args = (['--aeidon'] if aeidon else []) + ['--gui', '--regex']
execute(Config.TERMINAL, '--execute', 'subnuker', *args + subtitles)
|
def _generate_overview_note(pass_count, only_warning_count, error_count, total_count):
""" Generates and returns the HTML note that provides a summary of validation status. """
note_html = ['<div class="progress">']
pbars = [
[ float(error_count), 'danger', 'had errors' ],
[ float(only_warning_count), 'warning', 'had warnings' ],
[ float(pass_count), 'success', 'passed' ]
]
for b in pbars:
if b[0]:
note_html.append(
'<div class="progress-bar progress-bar-{pbcol}" style="width: {pct}%" data-toggle="tooltip" title="{count} {sample} {txt}">{count}</div>'. \
format(
pbcol = b[1],
count = int(b[0]),
pct = (b[0]/float(total_count))*100.0,
txt = b[2],
sample = 'samples' if b[0] > 1 else 'sample'
)
)
note_html.append('</div>')
return "\n".join(note_html)
|
def function[_generate_overview_note, parameter[pass_count, only_warning_count, error_count, total_count]]:
constant[ Generates and returns the HTML note that provides a summary of validation status. ]
variable[note_html] assign[=] list[[<ast.Constant object at 0x7da18ede4520>]]
variable[pbars] assign[=] list[[<ast.List object at 0x7da18ede5cf0>, <ast.List object at 0x7da18ede7c40>, <ast.List object at 0x7da20e956260>]]
for taget[name[b]] in starred[name[pbars]] begin[:]
if call[name[b]][constant[0]] begin[:]
call[name[note_html].append, parameter[call[constant[<div class="progress-bar progress-bar-{pbcol}" style="width: {pct}%" data-toggle="tooltip" title="{count} {sample} {txt}">{count}</div>].format, parameter[]]]]
call[name[note_html].append, parameter[constant[</div>]]]
return[call[constant[
].join, parameter[name[note_html]]]]
|
keyword[def] identifier[_generate_overview_note] ( identifier[pass_count] , identifier[only_warning_count] , identifier[error_count] , identifier[total_count] ):
literal[string]
identifier[note_html] =[ literal[string] ]
identifier[pbars] =[
[ identifier[float] ( identifier[error_count] ), literal[string] , literal[string] ],
[ identifier[float] ( identifier[only_warning_count] ), literal[string] , literal[string] ],
[ identifier[float] ( identifier[pass_count] ), literal[string] , literal[string] ]
]
keyword[for] identifier[b] keyword[in] identifier[pbars] :
keyword[if] identifier[b] [ literal[int] ]:
identifier[note_html] . identifier[append] (
literal[string] . identifier[format] (
identifier[pbcol] = identifier[b] [ literal[int] ],
identifier[count] = identifier[int] ( identifier[b] [ literal[int] ]),
identifier[pct] =( identifier[b] [ literal[int] ]/ identifier[float] ( identifier[total_count] ))* literal[int] ,
identifier[txt] = identifier[b] [ literal[int] ],
identifier[sample] = literal[string] keyword[if] identifier[b] [ literal[int] ]> literal[int] keyword[else] literal[string]
)
)
identifier[note_html] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[note_html] )
|
def _generate_overview_note(pass_count, only_warning_count, error_count, total_count):
""" Generates and returns the HTML note that provides a summary of validation status. """
note_html = ['<div class="progress">']
pbars = [[float(error_count), 'danger', 'had errors'], [float(only_warning_count), 'warning', 'had warnings'], [float(pass_count), 'success', 'passed']]
for b in pbars:
if b[0]:
note_html.append('<div class="progress-bar progress-bar-{pbcol}" style="width: {pct}%" data-toggle="tooltip" title="{count} {sample} {txt}">{count}</div>'.format(pbcol=b[1], count=int(b[0]), pct=b[0] / float(total_count) * 100.0, txt=b[2], sample='samples' if b[0] > 1 else 'sample')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['b']]
note_html.append('</div>')
return '\n'.join(note_html)
|
def setup_cmd_parser(cls):
"""Returns the RSS argument parser."""
parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES,
archive=True)
# Required arguments
parser.parser.add_argument('url',
help="URL of the RSS feed")
return parser
|
def function[setup_cmd_parser, parameter[cls]]:
constant[Returns the RSS argument parser.]
variable[parser] assign[=] call[name[BackendCommandArgumentParser], parameter[name[cls].BACKEND.CATEGORIES]]
call[name[parser].parser.add_argument, parameter[constant[url]]]
return[name[parser]]
|
keyword[def] identifier[setup_cmd_parser] ( identifier[cls] ):
literal[string]
identifier[parser] = identifier[BackendCommandArgumentParser] ( identifier[cls] . identifier[BACKEND] . identifier[CATEGORIES] ,
identifier[archive] = keyword[True] )
identifier[parser] . identifier[parser] . identifier[add_argument] ( literal[string] ,
identifier[help] = literal[string] )
keyword[return] identifier[parser]
|
def setup_cmd_parser(cls):
"""Returns the RSS argument parser."""
parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, archive=True)
# Required arguments
parser.parser.add_argument('url', help='URL of the RSS feed')
return parser
|
async def _deploy(self, charm_url, application, series, config,
constraints, endpoint_bindings, resources, storage,
channel=None, num_units=None, placement=None,
devices=None):
"""Logic shared between `Model.deploy` and `BundleHandler.deploy`.
"""
log.info('Deploying %s', charm_url)
# stringify all config values for API, and convert to YAML
config = {k: str(v) for k, v in config.items()}
config = yaml.dump({application: config},
default_flow_style=False)
app_facade = client.ApplicationFacade.from_connection(
self.connection())
app = client.ApplicationDeploy(
charm_url=charm_url,
application=application,
series=series,
channel=channel,
config_yaml=config,
constraints=parse_constraints(constraints),
endpoint_bindings=endpoint_bindings,
num_units=num_units,
resources=resources,
storage=storage,
placement=placement,
devices=devices,
)
result = await app_facade.Deploy([app])
errors = [r.error.message for r in result.results if r.error]
if errors:
raise JujuError('\n'.join(errors))
return await self._wait_for_new('application', application)
|
<ast.AsyncFunctionDef object at 0x7da20e954610>
|
keyword[async] keyword[def] identifier[_deploy] ( identifier[self] , identifier[charm_url] , identifier[application] , identifier[series] , identifier[config] ,
identifier[constraints] , identifier[endpoint_bindings] , identifier[resources] , identifier[storage] ,
identifier[channel] = keyword[None] , identifier[num_units] = keyword[None] , identifier[placement] = keyword[None] ,
identifier[devices] = keyword[None] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] , identifier[charm_url] )
identifier[config] ={ identifier[k] : identifier[str] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[config] . identifier[items] ()}
identifier[config] = identifier[yaml] . identifier[dump] ({ identifier[application] : identifier[config] },
identifier[default_flow_style] = keyword[False] )
identifier[app_facade] = identifier[client] . identifier[ApplicationFacade] . identifier[from_connection] (
identifier[self] . identifier[connection] ())
identifier[app] = identifier[client] . identifier[ApplicationDeploy] (
identifier[charm_url] = identifier[charm_url] ,
identifier[application] = identifier[application] ,
identifier[series] = identifier[series] ,
identifier[channel] = identifier[channel] ,
identifier[config_yaml] = identifier[config] ,
identifier[constraints] = identifier[parse_constraints] ( identifier[constraints] ),
identifier[endpoint_bindings] = identifier[endpoint_bindings] ,
identifier[num_units] = identifier[num_units] ,
identifier[resources] = identifier[resources] ,
identifier[storage] = identifier[storage] ,
identifier[placement] = identifier[placement] ,
identifier[devices] = identifier[devices] ,
)
identifier[result] = keyword[await] identifier[app_facade] . identifier[Deploy] ([ identifier[app] ])
identifier[errors] =[ identifier[r] . identifier[error] . identifier[message] keyword[for] identifier[r] keyword[in] identifier[result] . identifier[results] keyword[if] identifier[r] . identifier[error] ]
keyword[if] identifier[errors] :
keyword[raise] identifier[JujuError] ( literal[string] . identifier[join] ( identifier[errors] ))
keyword[return] keyword[await] identifier[self] . identifier[_wait_for_new] ( literal[string] , identifier[application] )
|
async def _deploy(self, charm_url, application, series, config, constraints, endpoint_bindings, resources, storage, channel=None, num_units=None, placement=None, devices=None):
"""Logic shared between `Model.deploy` and `BundleHandler.deploy`.
"""
log.info('Deploying %s', charm_url)
# stringify all config values for API, and convert to YAML
config = {k: str(v) for (k, v) in config.items()}
config = yaml.dump({application: config}, default_flow_style=False)
app_facade = client.ApplicationFacade.from_connection(self.connection())
app = client.ApplicationDeploy(charm_url=charm_url, application=application, series=series, channel=channel, config_yaml=config, constraints=parse_constraints(constraints), endpoint_bindings=endpoint_bindings, num_units=num_units, resources=resources, storage=storage, placement=placement, devices=devices)
result = await app_facade.Deploy([app])
errors = [r.error.message for r in result.results if r.error]
if errors:
raise JujuError('\n'.join(errors)) # depends on [control=['if'], data=[]]
return await self._wait_for_new('application', application)
|
def yaml_dump_hook(cfg, text: bool=False):
"""
Dumps all the data into a YAML file.
"""
data = cfg.config.dump()
if not text:
yaml.dump(data, cfg.fd, Dumper=cfg.dumper, default_flow_style=False)
else:
return yaml.dump(data, Dumper=cfg.dumper, default_flow_style=False)
|
def function[yaml_dump_hook, parameter[cfg, text]]:
constant[
Dumps all the data into a YAML file.
]
variable[data] assign[=] call[name[cfg].config.dump, parameter[]]
if <ast.UnaryOp object at 0x7da20e957fd0> begin[:]
call[name[yaml].dump, parameter[name[data], name[cfg].fd]]
|
keyword[def] identifier[yaml_dump_hook] ( identifier[cfg] , identifier[text] : identifier[bool] = keyword[False] ):
literal[string]
identifier[data] = identifier[cfg] . identifier[config] . identifier[dump] ()
keyword[if] keyword[not] identifier[text] :
identifier[yaml] . identifier[dump] ( identifier[data] , identifier[cfg] . identifier[fd] , identifier[Dumper] = identifier[cfg] . identifier[dumper] , identifier[default_flow_style] = keyword[False] )
keyword[else] :
keyword[return] identifier[yaml] . identifier[dump] ( identifier[data] , identifier[Dumper] = identifier[cfg] . identifier[dumper] , identifier[default_flow_style] = keyword[False] )
|
def yaml_dump_hook(cfg, text: bool=False):
"""
Dumps all the data into a YAML file.
"""
data = cfg.config.dump()
if not text:
yaml.dump(data, cfg.fd, Dumper=cfg.dumper, default_flow_style=False) # depends on [control=['if'], data=[]]
else:
return yaml.dump(data, Dumper=cfg.dumper, default_flow_style=False)
|
def load_yaml(filename):
"""
Loads a YAML-formatted file.
"""
with open(filename) as f:
ydoc = yaml.safe_load(f.read())
return (ydoc, serialize_tojson(ydoc))
|
def function[load_yaml, parameter[filename]]:
constant[
Loads a YAML-formatted file.
]
with call[name[open], parameter[name[filename]]] begin[:]
variable[ydoc] assign[=] call[name[yaml].safe_load, parameter[call[name[f].read, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da1b26ae860>, <ast.Call object at 0x7da1b26ac3d0>]]]
|
keyword[def] identifier[load_yaml] ( identifier[filename] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[f] :
identifier[ydoc] = identifier[yaml] . identifier[safe_load] ( identifier[f] . identifier[read] ())
keyword[return] ( identifier[ydoc] , identifier[serialize_tojson] ( identifier[ydoc] ))
|
def load_yaml(filename):
"""
Loads a YAML-formatted file.
"""
with open(filename) as f:
ydoc = yaml.safe_load(f.read()) # depends on [control=['with'], data=['f']]
return (ydoc, serialize_tojson(ydoc))
|
def findalliter(string, sub, regex=False, case_sensitive=False,
whole_word=False):
"""
Generator that finds all occurrences of ``sub`` in ``string``
:param string: string to parse
:param sub: string to search
:param regex: True to search using regex
:param case_sensitive: True to match case, False to ignore case
:param whole_word: True to returns only whole words
:return:
"""
if not sub:
return
if regex:
flags = re.MULTILINE
if not case_sensitive:
flags |= re.IGNORECASE
for val in re.finditer(sub, string, flags):
yield val.span()
else:
if not case_sensitive:
string = string.lower()
sub = sub.lower()
for val in finditer_noregex(string, sub, whole_word):
yield val, val + len(sub)
|
def function[findalliter, parameter[string, sub, regex, case_sensitive, whole_word]]:
constant[
Generator that finds all occurrences of ``sub`` in ``string``
:param string: string to parse
:param sub: string to search
:param regex: True to search using regex
:param case_sensitive: True to match case, False to ignore case
:param whole_word: True to returns only whole words
:return:
]
if <ast.UnaryOp object at 0x7da20c6c5270> begin[:]
return[None]
if name[regex] begin[:]
variable[flags] assign[=] name[re].MULTILINE
if <ast.UnaryOp object at 0x7da20c6c5f30> begin[:]
<ast.AugAssign object at 0x7da20c6c5210>
for taget[name[val]] in starred[call[name[re].finditer, parameter[name[sub], name[string], name[flags]]]] begin[:]
<ast.Yield object at 0x7da20c6c7e20>
|
keyword[def] identifier[findalliter] ( identifier[string] , identifier[sub] , identifier[regex] = keyword[False] , identifier[case_sensitive] = keyword[False] ,
identifier[whole_word] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[sub] :
keyword[return]
keyword[if] identifier[regex] :
identifier[flags] = identifier[re] . identifier[MULTILINE]
keyword[if] keyword[not] identifier[case_sensitive] :
identifier[flags] |= identifier[re] . identifier[IGNORECASE]
keyword[for] identifier[val] keyword[in] identifier[re] . identifier[finditer] ( identifier[sub] , identifier[string] , identifier[flags] ):
keyword[yield] identifier[val] . identifier[span] ()
keyword[else] :
keyword[if] keyword[not] identifier[case_sensitive] :
identifier[string] = identifier[string] . identifier[lower] ()
identifier[sub] = identifier[sub] . identifier[lower] ()
keyword[for] identifier[val] keyword[in] identifier[finditer_noregex] ( identifier[string] , identifier[sub] , identifier[whole_word] ):
keyword[yield] identifier[val] , identifier[val] + identifier[len] ( identifier[sub] )
|
def findalliter(string, sub, regex=False, case_sensitive=False, whole_word=False):
"""
Generator that finds all occurrences of ``sub`` in ``string``
:param string: string to parse
:param sub: string to search
:param regex: True to search using regex
:param case_sensitive: True to match case, False to ignore case
:param whole_word: True to returns only whole words
:return:
"""
if not sub:
return # depends on [control=['if'], data=[]]
if regex:
flags = re.MULTILINE
if not case_sensitive:
flags |= re.IGNORECASE # depends on [control=['if'], data=[]]
for val in re.finditer(sub, string, flags):
yield val.span() # depends on [control=['for'], data=['val']] # depends on [control=['if'], data=[]]
else:
if not case_sensitive:
string = string.lower()
sub = sub.lower() # depends on [control=['if'], data=[]]
for val in finditer_noregex(string, sub, whole_word):
yield (val, val + len(sub)) # depends on [control=['for'], data=['val']]
|
def qteConnectHook(self, hookName: str,
slot: (types.FunctionType, types.MethodType)):
"""
Connect the method or function ``slot`` to ``hookName``.
|Args|
* ``hookName`` (**str**): name of the hook.
* ``slot`` (**function**, **method**): the routine to execute
when the hook triggers.
|Returns|
* **None**
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
"""
# Shorthand.
reg = self._qteRegistryHooks
if hookName in reg:
reg[hookName].append(slot)
else:
reg[hookName] = [slot]
|
def function[qteConnectHook, parameter[self, hookName, slot]]:
constant[
Connect the method or function ``slot`` to ``hookName``.
|Args|
* ``hookName`` (**str**): name of the hook.
* ``slot`` (**function**, **method**): the routine to execute
when the hook triggers.
|Returns|
* **None**
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
]
variable[reg] assign[=] name[self]._qteRegistryHooks
if compare[name[hookName] in name[reg]] begin[:]
call[call[name[reg]][name[hookName]].append, parameter[name[slot]]]
|
keyword[def] identifier[qteConnectHook] ( identifier[self] , identifier[hookName] : identifier[str] ,
identifier[slot] :( identifier[types] . identifier[FunctionType] , identifier[types] . identifier[MethodType] )):
literal[string]
identifier[reg] = identifier[self] . identifier[_qteRegistryHooks]
keyword[if] identifier[hookName] keyword[in] identifier[reg] :
identifier[reg] [ identifier[hookName] ]. identifier[append] ( identifier[slot] )
keyword[else] :
identifier[reg] [ identifier[hookName] ]=[ identifier[slot] ]
|
def qteConnectHook(self, hookName: str, slot: (types.FunctionType, types.MethodType)):
"""
Connect the method or function ``slot`` to ``hookName``.
|Args|
* ``hookName`` (**str**): name of the hook.
* ``slot`` (**function**, **method**): the routine to execute
when the hook triggers.
|Returns|
* **None**
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
"""
# Shorthand.
reg = self._qteRegistryHooks
if hookName in reg:
reg[hookName].append(slot) # depends on [control=['if'], data=['hookName', 'reg']]
else:
reg[hookName] = [slot]
|
def _onIncomingMessageReceived(self, conn, message):
"""
Callback for initial messages on incoming connections. Handles encryption, utility messages, and association of the connection with a Node.
Once this initial setup is done, the relevant connected callback is executed, and further messages are deferred to the onMessageReceived callback.
:param conn: connection object
:type conn: TcpConnection
:param message: received message
:type message: any
"""
if self._syncObj.encryptor and not conn.sendRandKey:
conn.sendRandKey = message
conn.recvRandKey = os.urandom(32)
conn.send(conn.recvRandKey)
return
# Utility messages
if isinstance(message, list):
done = False
try:
if message[0] == 'status':
conn.send(self._syncObj.getStatus())
done = True
elif message[0] == 'add':
self._syncObj.addNodeToCluster(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'ADD', arg = message[1]))
done = True
elif message[0] == 'remove':
if message[1] == self._selfNode.address:
conn.send('FAIL REMOVE ' + message[1])
else:
self._syncObj.removeNodeFromCluster(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'REMOVE', arg = message[1]))
done = True
elif message[0] == 'set_version':
self._syncObj.setCodeVersion(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'SET_VERSION', arg = str(message[1])))
done = True
except Exception as e:
conn.send(str(e))
done = True
if done:
return
# At this point, message should be either a node ID (i.e. address) or 'readonly'
node = self._nodeAddrToNode[message] if message in self._nodeAddrToNode else None
if node is None and message != 'readonly':
conn.disconnect()
self._unknownConnections.discard(conn)
return
readonly = node is None
if readonly:
nodeId = str(self._readonlyNodesCounter)
node = Node(nodeId)
self._readonlyNodes.add(node)
self._readonlyNodesCounter += 1
self._unknownConnections.discard(conn)
self._connections[node] = conn
conn.setOnMessageReceivedCallback(functools.partial(self._onMessageReceived, node))
if not readonly:
self._onNodeConnected(node)
else:
self._onReadonlyNodeConnected(node)
|
def function[_onIncomingMessageReceived, parameter[self, conn, message]]:
constant[
Callback for initial messages on incoming connections. Handles encryption, utility messages, and association of the connection with a Node.
Once this initial setup is done, the relevant connected callback is executed, and further messages are deferred to the onMessageReceived callback.
:param conn: connection object
:type conn: TcpConnection
:param message: received message
:type message: any
]
if <ast.BoolOp object at 0x7da18ede55a0> begin[:]
name[conn].sendRandKey assign[=] name[message]
name[conn].recvRandKey assign[=] call[name[os].urandom, parameter[constant[32]]]
call[name[conn].send, parameter[name[conn].recvRandKey]]
return[None]
if call[name[isinstance], parameter[name[message], name[list]]] begin[:]
variable[done] assign[=] constant[False]
<ast.Try object at 0x7da18ede7e20>
if name[done] begin[:]
return[None]
variable[node] assign[=] <ast.IfExp object at 0x7da18ede4190>
if <ast.BoolOp object at 0x7da18ede7bb0> begin[:]
call[name[conn].disconnect, parameter[]]
call[name[self]._unknownConnections.discard, parameter[name[conn]]]
return[None]
variable[readonly] assign[=] compare[name[node] is constant[None]]
if name[readonly] begin[:]
variable[nodeId] assign[=] call[name[str], parameter[name[self]._readonlyNodesCounter]]
variable[node] assign[=] call[name[Node], parameter[name[nodeId]]]
call[name[self]._readonlyNodes.add, parameter[name[node]]]
<ast.AugAssign object at 0x7da18ede7820>
call[name[self]._unknownConnections.discard, parameter[name[conn]]]
call[name[self]._connections][name[node]] assign[=] name[conn]
call[name[conn].setOnMessageReceivedCallback, parameter[call[name[functools].partial, parameter[name[self]._onMessageReceived, name[node]]]]]
if <ast.UnaryOp object at 0x7da18ede6da0> begin[:]
call[name[self]._onNodeConnected, parameter[name[node]]]
|
keyword[def] identifier[_onIncomingMessageReceived] ( identifier[self] , identifier[conn] , identifier[message] ):
literal[string]
keyword[if] identifier[self] . identifier[_syncObj] . identifier[encryptor] keyword[and] keyword[not] identifier[conn] . identifier[sendRandKey] :
identifier[conn] . identifier[sendRandKey] = identifier[message]
identifier[conn] . identifier[recvRandKey] = identifier[os] . identifier[urandom] ( literal[int] )
identifier[conn] . identifier[send] ( identifier[conn] . identifier[recvRandKey] )
keyword[return]
keyword[if] identifier[isinstance] ( identifier[message] , identifier[list] ):
identifier[done] = keyword[False]
keyword[try] :
keyword[if] identifier[message] [ literal[int] ]== literal[string] :
identifier[conn] . identifier[send] ( identifier[self] . identifier[_syncObj] . identifier[getStatus] ())
identifier[done] = keyword[True]
keyword[elif] identifier[message] [ literal[int] ]== literal[string] :
identifier[self] . identifier[_syncObj] . identifier[addNodeToCluster] ( identifier[message] [ literal[int] ], identifier[callback] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[_utilityCallback] , identifier[conn] = identifier[conn] , identifier[cmd] = literal[string] , identifier[arg] = identifier[message] [ literal[int] ]))
identifier[done] = keyword[True]
keyword[elif] identifier[message] [ literal[int] ]== literal[string] :
keyword[if] identifier[message] [ literal[int] ]== identifier[self] . identifier[_selfNode] . identifier[address] :
identifier[conn] . identifier[send] ( literal[string] + identifier[message] [ literal[int] ])
keyword[else] :
identifier[self] . identifier[_syncObj] . identifier[removeNodeFromCluster] ( identifier[message] [ literal[int] ], identifier[callback] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[_utilityCallback] , identifier[conn] = identifier[conn] , identifier[cmd] = literal[string] , identifier[arg] = identifier[message] [ literal[int] ]))
identifier[done] = keyword[True]
keyword[elif] identifier[message] [ literal[int] ]== literal[string] :
identifier[self] . identifier[_syncObj] . identifier[setCodeVersion] ( identifier[message] [ literal[int] ], identifier[callback] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[_utilityCallback] , identifier[conn] = identifier[conn] , identifier[cmd] = literal[string] , identifier[arg] = identifier[str] ( identifier[message] [ literal[int] ])))
identifier[done] = keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[conn] . identifier[send] ( identifier[str] ( identifier[e] ))
identifier[done] = keyword[True]
keyword[if] identifier[done] :
keyword[return]
identifier[node] = identifier[self] . identifier[_nodeAddrToNode] [ identifier[message] ] keyword[if] identifier[message] keyword[in] identifier[self] . identifier[_nodeAddrToNode] keyword[else] keyword[None]
keyword[if] identifier[node] keyword[is] keyword[None] keyword[and] identifier[message] != literal[string] :
identifier[conn] . identifier[disconnect] ()
identifier[self] . identifier[_unknownConnections] . identifier[discard] ( identifier[conn] )
keyword[return]
identifier[readonly] = identifier[node] keyword[is] keyword[None]
keyword[if] identifier[readonly] :
identifier[nodeId] = identifier[str] ( identifier[self] . identifier[_readonlyNodesCounter] )
identifier[node] = identifier[Node] ( identifier[nodeId] )
identifier[self] . identifier[_readonlyNodes] . identifier[add] ( identifier[node] )
identifier[self] . identifier[_readonlyNodesCounter] += literal[int]
identifier[self] . identifier[_unknownConnections] . identifier[discard] ( identifier[conn] )
identifier[self] . identifier[_connections] [ identifier[node] ]= identifier[conn]
identifier[conn] . identifier[setOnMessageReceivedCallback] ( identifier[functools] . identifier[partial] ( identifier[self] . identifier[_onMessageReceived] , identifier[node] ))
keyword[if] keyword[not] identifier[readonly] :
identifier[self] . identifier[_onNodeConnected] ( identifier[node] )
keyword[else] :
identifier[self] . identifier[_onReadonlyNodeConnected] ( identifier[node] )
|
def _onIncomingMessageReceived(self, conn, message):
"""
Callback for initial messages on incoming connections. Handles encryption, utility messages, and association of the connection with a Node.
Once this initial setup is done, the relevant connected callback is executed, and further messages are deferred to the onMessageReceived callback.
:param conn: connection object
:type conn: TcpConnection
:param message: received message
:type message: any
"""
if self._syncObj.encryptor and (not conn.sendRandKey):
conn.sendRandKey = message
conn.recvRandKey = os.urandom(32)
conn.send(conn.recvRandKey)
return # depends on [control=['if'], data=[]]
# Utility messages
if isinstance(message, list):
done = False
try:
if message[0] == 'status':
conn.send(self._syncObj.getStatus())
done = True # depends on [control=['if'], data=[]]
elif message[0] == 'add':
self._syncObj.addNodeToCluster(message[1], callback=functools.partial(self._utilityCallback, conn=conn, cmd='ADD', arg=message[1]))
done = True # depends on [control=['if'], data=[]]
elif message[0] == 'remove':
if message[1] == self._selfNode.address:
conn.send('FAIL REMOVE ' + message[1]) # depends on [control=['if'], data=[]]
else:
self._syncObj.removeNodeFromCluster(message[1], callback=functools.partial(self._utilityCallback, conn=conn, cmd='REMOVE', arg=message[1]))
done = True # depends on [control=['if'], data=[]]
elif message[0] == 'set_version':
self._syncObj.setCodeVersion(message[1], callback=functools.partial(self._utilityCallback, conn=conn, cmd='SET_VERSION', arg=str(message[1])))
done = True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
conn.send(str(e))
done = True # depends on [control=['except'], data=['e']]
if done:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# At this point, message should be either a node ID (i.e. address) or 'readonly'
node = self._nodeAddrToNode[message] if message in self._nodeAddrToNode else None
if node is None and message != 'readonly':
conn.disconnect()
self._unknownConnections.discard(conn)
return # depends on [control=['if'], data=[]]
readonly = node is None
if readonly:
nodeId = str(self._readonlyNodesCounter)
node = Node(nodeId)
self._readonlyNodes.add(node)
self._readonlyNodesCounter += 1 # depends on [control=['if'], data=[]]
self._unknownConnections.discard(conn)
self._connections[node] = conn
conn.setOnMessageReceivedCallback(functools.partial(self._onMessageReceived, node))
if not readonly:
self._onNodeConnected(node) # depends on [control=['if'], data=[]]
else:
self._onReadonlyNodeConnected(node)
|
def scale_images_0to1(slice1, slice2):
"""Scale the two images to [0, 1] based on min/max from both."""
min_value = max(slice1.min(), slice2.min())
max_value = max(slice1.max(), slice2.max())
slice1 = (slice1 - min_value) / max_value
slice2 = (slice2 - min_value) / max_value
return slice1, slice2
|
def function[scale_images_0to1, parameter[slice1, slice2]]:
constant[Scale the two images to [0, 1] based on min/max from both.]
variable[min_value] assign[=] call[name[max], parameter[call[name[slice1].min, parameter[]], call[name[slice2].min, parameter[]]]]
variable[max_value] assign[=] call[name[max], parameter[call[name[slice1].max, parameter[]], call[name[slice2].max, parameter[]]]]
variable[slice1] assign[=] binary_operation[binary_operation[name[slice1] - name[min_value]] / name[max_value]]
variable[slice2] assign[=] binary_operation[binary_operation[name[slice2] - name[min_value]] / name[max_value]]
return[tuple[[<ast.Name object at 0x7da1b26ac910>, <ast.Name object at 0x7da1b26ac2b0>]]]
|
keyword[def] identifier[scale_images_0to1] ( identifier[slice1] , identifier[slice2] ):
literal[string]
identifier[min_value] = identifier[max] ( identifier[slice1] . identifier[min] (), identifier[slice2] . identifier[min] ())
identifier[max_value] = identifier[max] ( identifier[slice1] . identifier[max] (), identifier[slice2] . identifier[max] ())
identifier[slice1] =( identifier[slice1] - identifier[min_value] )/ identifier[max_value]
identifier[slice2] =( identifier[slice2] - identifier[min_value] )/ identifier[max_value]
keyword[return] identifier[slice1] , identifier[slice2]
|
def scale_images_0to1(slice1, slice2):
"""Scale the two images to [0, 1] based on min/max from both."""
min_value = max(slice1.min(), slice2.min())
max_value = max(slice1.max(), slice2.max())
slice1 = (slice1 - min_value) / max_value
slice2 = (slice2 - min_value) / max_value
return (slice1, slice2)
|
def _get_data_from_list_of_lists(source, fields='*', first_row=0, count=-1, schema=None):
""" Helper function for _get_data that handles lists of lists. """
if schema is None:
schema = google.datalab.bigquery.Schema.from_data(source)
fields = get_field_list(fields, schema)
gen = source[first_row:first_row + count] if count >= 0 else source
cols = [schema.find(name) for name in fields]
rows = [{'c': [{'v': row[i]} for i in cols]} for row in gen]
return {'cols': _get_cols(fields, schema), 'rows': rows}, len(source)
|
def function[_get_data_from_list_of_lists, parameter[source, fields, first_row, count, schema]]:
constant[ Helper function for _get_data that handles lists of lists. ]
if compare[name[schema] is constant[None]] begin[:]
variable[schema] assign[=] call[name[google].datalab.bigquery.Schema.from_data, parameter[name[source]]]
variable[fields] assign[=] call[name[get_field_list], parameter[name[fields], name[schema]]]
variable[gen] assign[=] <ast.IfExp object at 0x7da204621690>
variable[cols] assign[=] <ast.ListComp object at 0x7da20e9575e0>
variable[rows] assign[=] <ast.ListComp object at 0x7da2044c2b00>
return[tuple[[<ast.Dict object at 0x7da20c990a90>, <ast.Call object at 0x7da20c991750>]]]
|
keyword[def] identifier[_get_data_from_list_of_lists] ( identifier[source] , identifier[fields] = literal[string] , identifier[first_row] = literal[int] , identifier[count] =- literal[int] , identifier[schema] = keyword[None] ):
literal[string]
keyword[if] identifier[schema] keyword[is] keyword[None] :
identifier[schema] = identifier[google] . identifier[datalab] . identifier[bigquery] . identifier[Schema] . identifier[from_data] ( identifier[source] )
identifier[fields] = identifier[get_field_list] ( identifier[fields] , identifier[schema] )
identifier[gen] = identifier[source] [ identifier[first_row] : identifier[first_row] + identifier[count] ] keyword[if] identifier[count] >= literal[int] keyword[else] identifier[source]
identifier[cols] =[ identifier[schema] . identifier[find] ( identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[fields] ]
identifier[rows] =[{ literal[string] :[{ literal[string] : identifier[row] [ identifier[i] ]} keyword[for] identifier[i] keyword[in] identifier[cols] ]} keyword[for] identifier[row] keyword[in] identifier[gen] ]
keyword[return] { literal[string] : identifier[_get_cols] ( identifier[fields] , identifier[schema] ), literal[string] : identifier[rows] }, identifier[len] ( identifier[source] )
|
def _get_data_from_list_of_lists(source, fields='*', first_row=0, count=-1, schema=None):
""" Helper function for _get_data that handles lists of lists. """
if schema is None:
schema = google.datalab.bigquery.Schema.from_data(source) # depends on [control=['if'], data=['schema']]
fields = get_field_list(fields, schema)
gen = source[first_row:first_row + count] if count >= 0 else source
cols = [schema.find(name) for name in fields]
rows = [{'c': [{'v': row[i]} for i in cols]} for row in gen]
return ({'cols': _get_cols(fields, schema), 'rows': rows}, len(source))
|
def cache_control(max_age=None, private=False, public=False, s_maxage=None,
must_revalidate=False, proxy_revalidate=False, no_cache=False,
no_store=False):
"""Generate the value for a Cache-Control header.
Example:
>>> from rhino.http import cache_control as cc
>>> from datetime import timedelta
>>> cc(public=1, max_age=3600)
'public, max-age=3600'
>>> cc(public=1, max_age=timedelta(hours=1))
'public, max-age=3600'
>>> cc(private=True, no_cache=True, no_store=True)
'private, no-cache, no-store'
"""
if all([private, public]):
raise ValueError("'private' and 'public' are mutually exclusive")
if isinstance(max_age, timedelta):
max_age = int(total_seconds(max_age))
if isinstance(s_maxage, timedelta):
s_maxage = int(total_seconds(s_maxage))
directives = []
if public: directives.append('public')
if private: directives.append('private')
if max_age is not None: directives.append('max-age=%d' % max_age)
if s_maxage is not None: directives.append('s-maxage=%d' % s_maxage)
if no_cache: directives.append('no-cache')
if no_store: directives.append('no-store')
if must_revalidate: directives.append('must-revalidate')
if proxy_revalidate: directives.append('proxy-revalidate')
return ', '.join(directives)
|
def function[cache_control, parameter[max_age, private, public, s_maxage, must_revalidate, proxy_revalidate, no_cache, no_store]]:
constant[Generate the value for a Cache-Control header.
Example:
>>> from rhino.http import cache_control as cc
>>> from datetime import timedelta
>>> cc(public=1, max_age=3600)
'public, max-age=3600'
>>> cc(public=1, max_age=timedelta(hours=1))
'public, max-age=3600'
>>> cc(private=True, no_cache=True, no_store=True)
'private, no-cache, no-store'
]
if call[name[all], parameter[list[[<ast.Name object at 0x7da18f00d9c0>, <ast.Name object at 0x7da18f00e650>]]]] begin[:]
<ast.Raise object at 0x7da18f00e290>
if call[name[isinstance], parameter[name[max_age], name[timedelta]]] begin[:]
variable[max_age] assign[=] call[name[int], parameter[call[name[total_seconds], parameter[name[max_age]]]]]
if call[name[isinstance], parameter[name[s_maxage], name[timedelta]]] begin[:]
variable[s_maxage] assign[=] call[name[int], parameter[call[name[total_seconds], parameter[name[s_maxage]]]]]
variable[directives] assign[=] list[[]]
if name[public] begin[:]
call[name[directives].append, parameter[constant[public]]]
if name[private] begin[:]
call[name[directives].append, parameter[constant[private]]]
if compare[name[max_age] is_not constant[None]] begin[:]
call[name[directives].append, parameter[binary_operation[constant[max-age=%d] <ast.Mod object at 0x7da2590d6920> name[max_age]]]]
if compare[name[s_maxage] is_not constant[None]] begin[:]
call[name[directives].append, parameter[binary_operation[constant[s-maxage=%d] <ast.Mod object at 0x7da2590d6920> name[s_maxage]]]]
if name[no_cache] begin[:]
call[name[directives].append, parameter[constant[no-cache]]]
if name[no_store] begin[:]
call[name[directives].append, parameter[constant[no-store]]]
if name[must_revalidate] begin[:]
call[name[directives].append, parameter[constant[must-revalidate]]]
if name[proxy_revalidate] begin[:]
call[name[directives].append, parameter[constant[proxy-revalidate]]]
return[call[constant[, ].join, parameter[name[directives]]]]
|
keyword[def] identifier[cache_control] ( identifier[max_age] = keyword[None] , identifier[private] = keyword[False] , identifier[public] = keyword[False] , identifier[s_maxage] = keyword[None] ,
identifier[must_revalidate] = keyword[False] , identifier[proxy_revalidate] = keyword[False] , identifier[no_cache] = keyword[False] ,
identifier[no_store] = keyword[False] ):
literal[string]
keyword[if] identifier[all] ([ identifier[private] , identifier[public] ]):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[max_age] , identifier[timedelta] ):
identifier[max_age] = identifier[int] ( identifier[total_seconds] ( identifier[max_age] ))
keyword[if] identifier[isinstance] ( identifier[s_maxage] , identifier[timedelta] ):
identifier[s_maxage] = identifier[int] ( identifier[total_seconds] ( identifier[s_maxage] ))
identifier[directives] =[]
keyword[if] identifier[public] : identifier[directives] . identifier[append] ( literal[string] )
keyword[if] identifier[private] : identifier[directives] . identifier[append] ( literal[string] )
keyword[if] identifier[max_age] keyword[is] keyword[not] keyword[None] : identifier[directives] . identifier[append] ( literal[string] % identifier[max_age] )
keyword[if] identifier[s_maxage] keyword[is] keyword[not] keyword[None] : identifier[directives] . identifier[append] ( literal[string] % identifier[s_maxage] )
keyword[if] identifier[no_cache] : identifier[directives] . identifier[append] ( literal[string] )
keyword[if] identifier[no_store] : identifier[directives] . identifier[append] ( literal[string] )
keyword[if] identifier[must_revalidate] : identifier[directives] . identifier[append] ( literal[string] )
keyword[if] identifier[proxy_revalidate] : identifier[directives] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[directives] )
|
def cache_control(max_age=None, private=False, public=False, s_maxage=None, must_revalidate=False, proxy_revalidate=False, no_cache=False, no_store=False):
"""Generate the value for a Cache-Control header.
Example:
>>> from rhino.http import cache_control as cc
>>> from datetime import timedelta
>>> cc(public=1, max_age=3600)
'public, max-age=3600'
>>> cc(public=1, max_age=timedelta(hours=1))
'public, max-age=3600'
>>> cc(private=True, no_cache=True, no_store=True)
'private, no-cache, no-store'
"""
if all([private, public]):
raise ValueError("'private' and 'public' are mutually exclusive") # depends on [control=['if'], data=[]]
if isinstance(max_age, timedelta):
max_age = int(total_seconds(max_age)) # depends on [control=['if'], data=[]]
if isinstance(s_maxage, timedelta):
s_maxage = int(total_seconds(s_maxage)) # depends on [control=['if'], data=[]]
directives = []
if public:
directives.append('public') # depends on [control=['if'], data=[]]
if private:
directives.append('private') # depends on [control=['if'], data=[]]
if max_age is not None:
directives.append('max-age=%d' % max_age) # depends on [control=['if'], data=['max_age']]
if s_maxage is not None:
directives.append('s-maxage=%d' % s_maxage) # depends on [control=['if'], data=['s_maxage']]
if no_cache:
directives.append('no-cache') # depends on [control=['if'], data=[]]
if no_store:
directives.append('no-store') # depends on [control=['if'], data=[]]
if must_revalidate:
directives.append('must-revalidate') # depends on [control=['if'], data=[]]
if proxy_revalidate:
directives.append('proxy-revalidate') # depends on [control=['if'], data=[]]
return ', '.join(directives)
|
def spawn(self, parameters=None, arguments=None, stderr=None, timeout=None, short_option_prefix="-", long_option_prefix="--"):
"""
Spawn the process defined in `cmd`
parameters is converted to options the short and long option prefixes
if a list is given as the value, the parameter is repeated with each
value
If timeout is set the spawn will block until the process returns or
the timeout expires.
:param parameters: optional parameters
:param arguments: positional arguments
:param stderr: where to redirect stderr to
:param timeout: timeout for short lived process
:param long_option_prefix: option prefix, default -
:param short_option_prefix: long option prefix, default --
:return: spawned process
"""
stderr = stderr or self.stderr
cmd = self.bake(self._check_cmd(), parameters, arguments, short_option_prefix, long_option_prefix)
log.debug("Spawning command: {0}", subprocess.list2cmdline(cmd))
try:
process = subprocess.Popen(cmd, stderr=stderr, stdout=subprocess.PIPE)
except (OSError, IOError) as err:
raise StreamError("Failed to start process: {0} ({1})".format(self._check_cmd(), str(err)))
if timeout:
elapsed = 0
while elapsed < timeout and not process.poll():
time.sleep(0.25)
elapsed += 0.25
# kill after the timeout has expired and the process still hasn't ended
if not process.poll():
try:
log.debug("Process timeout expired ({0}s), killing process".format(timeout))
process.kill()
except Exception:
pass
process.wait()
return process
|
def function[spawn, parameter[self, parameters, arguments, stderr, timeout, short_option_prefix, long_option_prefix]]:
constant[
Spawn the process defined in `cmd`
parameters is converted to options the short and long option prefixes
if a list is given as the value, the parameter is repeated with each
value
If timeout is set the spawn will block until the process returns or
the timeout expires.
:param parameters: optional parameters
:param arguments: positional arguments
:param stderr: where to redirect stderr to
:param timeout: timeout for short lived process
:param long_option_prefix: option prefix, default -
:param short_option_prefix: long option prefix, default --
:return: spawned process
]
variable[stderr] assign[=] <ast.BoolOp object at 0x7da2046223e0>
variable[cmd] assign[=] call[name[self].bake, parameter[call[name[self]._check_cmd, parameter[]], name[parameters], name[arguments], name[short_option_prefix], name[long_option_prefix]]]
call[name[log].debug, parameter[constant[Spawning command: {0}], call[name[subprocess].list2cmdline, parameter[name[cmd]]]]]
<ast.Try object at 0x7da204620af0>
if name[timeout] begin[:]
variable[elapsed] assign[=] constant[0]
while <ast.BoolOp object at 0x7da207f9bb50> begin[:]
call[name[time].sleep, parameter[constant[0.25]]]
<ast.AugAssign object at 0x7da1b23462c0>
if <ast.UnaryOp object at 0x7da204567a60> begin[:]
<ast.Try object at 0x7da204566da0>
call[name[process].wait, parameter[]]
return[name[process]]
|
keyword[def] identifier[spawn] ( identifier[self] , identifier[parameters] = keyword[None] , identifier[arguments] = keyword[None] , identifier[stderr] = keyword[None] , identifier[timeout] = keyword[None] , identifier[short_option_prefix] = literal[string] , identifier[long_option_prefix] = literal[string] ):
literal[string]
identifier[stderr] = identifier[stderr] keyword[or] identifier[self] . identifier[stderr]
identifier[cmd] = identifier[self] . identifier[bake] ( identifier[self] . identifier[_check_cmd] (), identifier[parameters] , identifier[arguments] , identifier[short_option_prefix] , identifier[long_option_prefix] )
identifier[log] . identifier[debug] ( literal[string] , identifier[subprocess] . identifier[list2cmdline] ( identifier[cmd] ))
keyword[try] :
identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[stderr] = identifier[stderr] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
keyword[except] ( identifier[OSError] , identifier[IOError] ) keyword[as] identifier[err] :
keyword[raise] identifier[StreamError] ( literal[string] . identifier[format] ( identifier[self] . identifier[_check_cmd] (), identifier[str] ( identifier[err] )))
keyword[if] identifier[timeout] :
identifier[elapsed] = literal[int]
keyword[while] identifier[elapsed] < identifier[timeout] keyword[and] keyword[not] identifier[process] . identifier[poll] ():
identifier[time] . identifier[sleep] ( literal[int] )
identifier[elapsed] += literal[int]
keyword[if] keyword[not] identifier[process] . identifier[poll] ():
keyword[try] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[timeout] ))
identifier[process] . identifier[kill] ()
keyword[except] identifier[Exception] :
keyword[pass]
identifier[process] . identifier[wait] ()
keyword[return] identifier[process]
|
def spawn(self, parameters=None, arguments=None, stderr=None, timeout=None, short_option_prefix='-', long_option_prefix='--'):
"""
Spawn the process defined in `cmd`
parameters is converted to options the short and long option prefixes
if a list is given as the value, the parameter is repeated with each
value
If timeout is set the spawn will block until the process returns or
the timeout expires.
:param parameters: optional parameters
:param arguments: positional arguments
:param stderr: where to redirect stderr to
:param timeout: timeout for short lived process
:param long_option_prefix: option prefix, default -
:param short_option_prefix: long option prefix, default --
:return: spawned process
"""
stderr = stderr or self.stderr
cmd = self.bake(self._check_cmd(), parameters, arguments, short_option_prefix, long_option_prefix)
log.debug('Spawning command: {0}', subprocess.list2cmdline(cmd))
try:
process = subprocess.Popen(cmd, stderr=stderr, stdout=subprocess.PIPE) # depends on [control=['try'], data=[]]
except (OSError, IOError) as err:
raise StreamError('Failed to start process: {0} ({1})'.format(self._check_cmd(), str(err))) # depends on [control=['except'], data=['err']]
if timeout:
elapsed = 0
while elapsed < timeout and (not process.poll()):
time.sleep(0.25)
elapsed += 0.25 # depends on [control=['while'], data=[]]
# kill after the timeout has expired and the process still hasn't ended
if not process.poll():
try:
log.debug('Process timeout expired ({0}s), killing process'.format(timeout))
process.kill() # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
process.wait() # depends on [control=['if'], data=[]]
return process
|
def disconnect(self):
"""Disconnect from the card."""
# when __del__() is invoked in response to a module being deleted,
# e.g., when execution of the program is done, other globals referenced
# by the __del__() method may already have been deleted.
# this causes CardConnection.disconnect to except with a TypeError
try:
CardConnection.disconnect(self)
except TypeError:
pass
if None != self.hcard:
hresult = SCardDisconnect(self.hcard, self.disposition)
if hresult != 0:
raise CardConnectionException(
'Failed to disconnect: ' + \
SCardGetErrorMessage(hresult))
self.hcard = None
|
def function[disconnect, parameter[self]]:
constant[Disconnect from the card.]
<ast.Try object at 0x7da20c76fa30>
if compare[constant[None] not_equal[!=] name[self].hcard] begin[:]
variable[hresult] assign[=] call[name[SCardDisconnect], parameter[name[self].hcard, name[self].disposition]]
if compare[name[hresult] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1c38b80>
name[self].hcard assign[=] constant[None]
|
keyword[def] identifier[disconnect] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[CardConnection] . identifier[disconnect] ( identifier[self] )
keyword[except] identifier[TypeError] :
keyword[pass]
keyword[if] keyword[None] != identifier[self] . identifier[hcard] :
identifier[hresult] = identifier[SCardDisconnect] ( identifier[self] . identifier[hcard] , identifier[self] . identifier[disposition] )
keyword[if] identifier[hresult] != literal[int] :
keyword[raise] identifier[CardConnectionException] (
literal[string] + identifier[SCardGetErrorMessage] ( identifier[hresult] ))
identifier[self] . identifier[hcard] = keyword[None]
|
def disconnect(self):
"""Disconnect from the card."""
# when __del__() is invoked in response to a module being deleted,
# e.g., when execution of the program is done, other globals referenced
# by the __del__() method may already have been deleted.
# this causes CardConnection.disconnect to except with a TypeError
try:
CardConnection.disconnect(self) # depends on [control=['try'], data=[]]
except TypeError:
pass # depends on [control=['except'], data=[]]
if None != self.hcard:
hresult = SCardDisconnect(self.hcard, self.disposition)
if hresult != 0:
raise CardConnectionException('Failed to disconnect: ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=['hresult']]
self.hcard = None # depends on [control=['if'], data=[]]
|
def image_summary(predictions, targets, hparams):
"""Reshapes predictions and passes it to tensorboard.
Args:
predictions : The predicted image (logits).
targets : The ground truth.
hparams: model hparams.
Returns:
summary_proto: containing the summary images.
weights: A Tensor of zeros of the same shape as predictions.
"""
del hparams
results = tf.cast(tf.argmax(predictions, axis=-1), tf.uint8)
gold = tf.cast(targets, tf.uint8)
summary1 = tf.summary.image("prediction", results, max_outputs=2)
summary2 = tf.summary.image("data", gold, max_outputs=2)
summary = tf.summary.merge([summary1, summary2])
return summary, tf.zeros_like(predictions)
|
def function[image_summary, parameter[predictions, targets, hparams]]:
constant[Reshapes predictions and passes it to tensorboard.
Args:
predictions : The predicted image (logits).
targets : The ground truth.
hparams: model hparams.
Returns:
summary_proto: containing the summary images.
weights: A Tensor of zeros of the same shape as predictions.
]
<ast.Delete object at 0x7da20c6e64d0>
variable[results] assign[=] call[name[tf].cast, parameter[call[name[tf].argmax, parameter[name[predictions]]], name[tf].uint8]]
variable[gold] assign[=] call[name[tf].cast, parameter[name[targets], name[tf].uint8]]
variable[summary1] assign[=] call[name[tf].summary.image, parameter[constant[prediction], name[results]]]
variable[summary2] assign[=] call[name[tf].summary.image, parameter[constant[data], name[gold]]]
variable[summary] assign[=] call[name[tf].summary.merge, parameter[list[[<ast.Name object at 0x7da20c6e4670>, <ast.Name object at 0x7da20c6e4fd0>]]]]
return[tuple[[<ast.Name object at 0x7da20c6e5990>, <ast.Call object at 0x7da20c6e45e0>]]]
|
keyword[def] identifier[image_summary] ( identifier[predictions] , identifier[targets] , identifier[hparams] ):
literal[string]
keyword[del] identifier[hparams]
identifier[results] = identifier[tf] . identifier[cast] ( identifier[tf] . identifier[argmax] ( identifier[predictions] , identifier[axis] =- literal[int] ), identifier[tf] . identifier[uint8] )
identifier[gold] = identifier[tf] . identifier[cast] ( identifier[targets] , identifier[tf] . identifier[uint8] )
identifier[summary1] = identifier[tf] . identifier[summary] . identifier[image] ( literal[string] , identifier[results] , identifier[max_outputs] = literal[int] )
identifier[summary2] = identifier[tf] . identifier[summary] . identifier[image] ( literal[string] , identifier[gold] , identifier[max_outputs] = literal[int] )
identifier[summary] = identifier[tf] . identifier[summary] . identifier[merge] ([ identifier[summary1] , identifier[summary2] ])
keyword[return] identifier[summary] , identifier[tf] . identifier[zeros_like] ( identifier[predictions] )
|
def image_summary(predictions, targets, hparams):
"""Reshapes predictions and passes it to tensorboard.
Args:
predictions : The predicted image (logits).
targets : The ground truth.
hparams: model hparams.
Returns:
summary_proto: containing the summary images.
weights: A Tensor of zeros of the same shape as predictions.
"""
del hparams
results = tf.cast(tf.argmax(predictions, axis=-1), tf.uint8)
gold = tf.cast(targets, tf.uint8)
summary1 = tf.summary.image('prediction', results, max_outputs=2)
summary2 = tf.summary.image('data', gold, max_outputs=2)
summary = tf.summary.merge([summary1, summary2])
return (summary, tf.zeros_like(predictions))
|
def bind_and_listen_on_posix_socket(socket_name, accept_callback):
"""
:param accept_callback: Called with `PosixSocketConnection` when a new
connection is established.
"""
assert socket_name is None or isinstance(socket_name, six.text_type)
assert callable(accept_callback)
# Py2 uses 0027 and Py3 uses 0o027, but both know
# how to create the right value from the string '0027'.
old_umask = os.umask(int('0027', 8))
# Bind socket.
socket_name, socket = _bind_posix_socket(socket_name)
_ = os.umask(old_umask)
# Listen on socket.
socket.listen(0)
def _accept_cb():
connection, client_address = socket.accept()
# Note: We don't have to put this socket in non blocking mode.
# This can cause crashes when sending big packets on OS X.
posix_connection = PosixSocketConnection(connection)
accept_callback(posix_connection)
get_event_loop().add_reader(socket.fileno(), _accept_cb)
logger.info('Listening on %r.' % socket_name)
return socket_name
|
def function[bind_and_listen_on_posix_socket, parameter[socket_name, accept_callback]]:
constant[
:param accept_callback: Called with `PosixSocketConnection` when a new
connection is established.
]
assert[<ast.BoolOp object at 0x7da20e957d60>]
assert[call[name[callable], parameter[name[accept_callback]]]]
variable[old_umask] assign[=] call[name[os].umask, parameter[call[name[int], parameter[constant[0027], constant[8]]]]]
<ast.Tuple object at 0x7da20e955600> assign[=] call[name[_bind_posix_socket], parameter[name[socket_name]]]
variable[_] assign[=] call[name[os].umask, parameter[name[old_umask]]]
call[name[socket].listen, parameter[constant[0]]]
def function[_accept_cb, parameter[]]:
<ast.Tuple object at 0x7da20e962cb0> assign[=] call[name[socket].accept, parameter[]]
variable[posix_connection] assign[=] call[name[PosixSocketConnection], parameter[name[connection]]]
call[name[accept_callback], parameter[name[posix_connection]]]
call[call[name[get_event_loop], parameter[]].add_reader, parameter[call[name[socket].fileno, parameter[]], name[_accept_cb]]]
call[name[logger].info, parameter[binary_operation[constant[Listening on %r.] <ast.Mod object at 0x7da2590d6920> name[socket_name]]]]
return[name[socket_name]]
|
keyword[def] identifier[bind_and_listen_on_posix_socket] ( identifier[socket_name] , identifier[accept_callback] ):
literal[string]
keyword[assert] identifier[socket_name] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[socket_name] , identifier[six] . identifier[text_type] )
keyword[assert] identifier[callable] ( identifier[accept_callback] )
identifier[old_umask] = identifier[os] . identifier[umask] ( identifier[int] ( literal[string] , literal[int] ))
identifier[socket_name] , identifier[socket] = identifier[_bind_posix_socket] ( identifier[socket_name] )
identifier[_] = identifier[os] . identifier[umask] ( identifier[old_umask] )
identifier[socket] . identifier[listen] ( literal[int] )
keyword[def] identifier[_accept_cb] ():
identifier[connection] , identifier[client_address] = identifier[socket] . identifier[accept] ()
identifier[posix_connection] = identifier[PosixSocketConnection] ( identifier[connection] )
identifier[accept_callback] ( identifier[posix_connection] )
identifier[get_event_loop] (). identifier[add_reader] ( identifier[socket] . identifier[fileno] (), identifier[_accept_cb] )
identifier[logger] . identifier[info] ( literal[string] % identifier[socket_name] )
keyword[return] identifier[socket_name]
|
def bind_and_listen_on_posix_socket(socket_name, accept_callback):
"""
:param accept_callback: Called with `PosixSocketConnection` when a new
connection is established.
"""
assert socket_name is None or isinstance(socket_name, six.text_type)
assert callable(accept_callback)
# Py2 uses 0027 and Py3 uses 0o027, but both know
# how to create the right value from the string '0027'.
old_umask = os.umask(int('0027', 8))
# Bind socket.
(socket_name, socket) = _bind_posix_socket(socket_name)
_ = os.umask(old_umask)
# Listen on socket.
socket.listen(0)
def _accept_cb():
(connection, client_address) = socket.accept()
# Note: We don't have to put this socket in non blocking mode.
# This can cause crashes when sending big packets on OS X.
posix_connection = PosixSocketConnection(connection)
accept_callback(posix_connection)
get_event_loop().add_reader(socket.fileno(), _accept_cb)
logger.info('Listening on %r.' % socket_name)
return socket_name
|
def preview(self, when=timezone.now(), **kwargs):
"""Preview transactions, but don't actually save changes to list."""
return self.operate_on(when=when, apply=False, **kwargs)
|
def function[preview, parameter[self, when]]:
constant[Preview transactions, but don't actually save changes to list.]
return[call[name[self].operate_on, parameter[]]]
|
keyword[def] identifier[preview] ( identifier[self] , identifier[when] = identifier[timezone] . identifier[now] (),** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[operate_on] ( identifier[when] = identifier[when] , identifier[apply] = keyword[False] ,** identifier[kwargs] )
|
def preview(self, when=timezone.now(), **kwargs):
"""Preview transactions, but don't actually save changes to list."""
return self.operate_on(when=when, apply=False, **kwargs)
|
def delete(self, request, customer_uuid):
"""
Handle DELETE request - handle unlinking learner.
Arguments:
request (django.http.request.HttpRequest): Request instance
customer_uuid (str): Enterprise Customer UUID
Returns:
django.http.response.HttpResponse: HttpResponse
"""
# TODO: pylint acts stupid - find a way around it without suppressing
enterprise_customer = EnterpriseCustomer.objects.get(uuid=customer_uuid) # pylint: disable=no-member
email_to_unlink = request.GET["unlink_email"]
try:
EnterpriseCustomerUser.objects.unlink_user(
enterprise_customer=enterprise_customer, user_email=email_to_unlink
)
except (EnterpriseCustomerUser.DoesNotExist, PendingEnterpriseCustomerUser.DoesNotExist):
message = _("Email {email} is not associated with Enterprise "
"Customer {ec_name}").format(
email=email_to_unlink, ec_name=enterprise_customer.name)
return HttpResponse(message, content_type="application/json", status=404)
return HttpResponse(
json.dumps({}),
content_type="application/json"
)
|
def function[delete, parameter[self, request, customer_uuid]]:
constant[
Handle DELETE request - handle unlinking learner.
Arguments:
request (django.http.request.HttpRequest): Request instance
customer_uuid (str): Enterprise Customer UUID
Returns:
django.http.response.HttpResponse: HttpResponse
]
variable[enterprise_customer] assign[=] call[name[EnterpriseCustomer].objects.get, parameter[]]
variable[email_to_unlink] assign[=] call[name[request].GET][constant[unlink_email]]
<ast.Try object at 0x7da1b0051630>
return[call[name[HttpResponse], parameter[call[name[json].dumps, parameter[dictionary[[], []]]]]]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[request] , identifier[customer_uuid] ):
literal[string]
identifier[enterprise_customer] = identifier[EnterpriseCustomer] . identifier[objects] . identifier[get] ( identifier[uuid] = identifier[customer_uuid] )
identifier[email_to_unlink] = identifier[request] . identifier[GET] [ literal[string] ]
keyword[try] :
identifier[EnterpriseCustomerUser] . identifier[objects] . identifier[unlink_user] (
identifier[enterprise_customer] = identifier[enterprise_customer] , identifier[user_email] = identifier[email_to_unlink]
)
keyword[except] ( identifier[EnterpriseCustomerUser] . identifier[DoesNotExist] , identifier[PendingEnterpriseCustomerUser] . identifier[DoesNotExist] ):
identifier[message] = identifier[_] ( literal[string]
literal[string] ). identifier[format] (
identifier[email] = identifier[email_to_unlink] , identifier[ec_name] = identifier[enterprise_customer] . identifier[name] )
keyword[return] identifier[HttpResponse] ( identifier[message] , identifier[content_type] = literal[string] , identifier[status] = literal[int] )
keyword[return] identifier[HttpResponse] (
identifier[json] . identifier[dumps] ({}),
identifier[content_type] = literal[string]
)
|
def delete(self, request, customer_uuid):
"""
Handle DELETE request - handle unlinking learner.
Arguments:
request (django.http.request.HttpRequest): Request instance
customer_uuid (str): Enterprise Customer UUID
Returns:
django.http.response.HttpResponse: HttpResponse
"""
# TODO: pylint acts stupid - find a way around it without suppressing
enterprise_customer = EnterpriseCustomer.objects.get(uuid=customer_uuid) # pylint: disable=no-member
email_to_unlink = request.GET['unlink_email']
try:
EnterpriseCustomerUser.objects.unlink_user(enterprise_customer=enterprise_customer, user_email=email_to_unlink) # depends on [control=['try'], data=[]]
except (EnterpriseCustomerUser.DoesNotExist, PendingEnterpriseCustomerUser.DoesNotExist):
message = _('Email {email} is not associated with Enterprise Customer {ec_name}').format(email=email_to_unlink, ec_name=enterprise_customer.name)
return HttpResponse(message, content_type='application/json', status=404) # depends on [control=['except'], data=[]]
return HttpResponse(json.dumps({}), content_type='application/json')
|
def get_sstable_data_files(self, ks, table):
"""
Read sstable data files by using sstableutil, so we ignore temporary files
"""
p = self.get_sstable_data_files_process(ks=ks, table=table)
out, _, _ = handle_external_tool_process(p, ["sstableutil", '--type', 'final', ks, table])
return sorted(filter(lambda s: s.endswith('-Data.db'), out.splitlines()))
|
def function[get_sstable_data_files, parameter[self, ks, table]]:
constant[
Read sstable data files by using sstableutil, so we ignore temporary files
]
variable[p] assign[=] call[name[self].get_sstable_data_files_process, parameter[]]
<ast.Tuple object at 0x7da1b12a8fd0> assign[=] call[name[handle_external_tool_process], parameter[name[p], list[[<ast.Constant object at 0x7da1b12a8730>, <ast.Constant object at 0x7da1b12aa770>, <ast.Constant object at 0x7da1b12abbb0>, <ast.Name object at 0x7da1b12a9d80>, <ast.Name object at 0x7da1b12abe50>]]]]
return[call[name[sorted], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b12ab070>, call[name[out].splitlines, parameter[]]]]]]]
|
keyword[def] identifier[get_sstable_data_files] ( identifier[self] , identifier[ks] , identifier[table] ):
literal[string]
identifier[p] = identifier[self] . identifier[get_sstable_data_files_process] ( identifier[ks] = identifier[ks] , identifier[table] = identifier[table] )
identifier[out] , identifier[_] , identifier[_] = identifier[handle_external_tool_process] ( identifier[p] ,[ literal[string] , literal[string] , literal[string] , identifier[ks] , identifier[table] ])
keyword[return] identifier[sorted] ( identifier[filter] ( keyword[lambda] identifier[s] : identifier[s] . identifier[endswith] ( literal[string] ), identifier[out] . identifier[splitlines] ()))
|
def get_sstable_data_files(self, ks, table):
"""
Read sstable data files by using sstableutil, so we ignore temporary files
"""
p = self.get_sstable_data_files_process(ks=ks, table=table)
(out, _, _) = handle_external_tool_process(p, ['sstableutil', '--type', 'final', ks, table])
return sorted(filter(lambda s: s.endswith('-Data.db'), out.splitlines()))
|
def select_elements(source):
'''
Yields all the elements from the source
source - if an element, yields all child elements in order; if any other iterator yields the elements from that iterator
'''
if isinstance(source, element):
source = source.xml_children
return filter(lambda x: isinstance(x, element), source)
|
def function[select_elements, parameter[source]]:
constant[
Yields all the elements from the source
source - if an element, yields all child elements in order; if any other iterator yields the elements from that iterator
]
if call[name[isinstance], parameter[name[source], name[element]]] begin[:]
variable[source] assign[=] name[source].xml_children
return[call[name[filter], parameter[<ast.Lambda object at 0x7da2054a71f0>, name[source]]]]
|
keyword[def] identifier[select_elements] ( identifier[source] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[source] , identifier[element] ):
identifier[source] = identifier[source] . identifier[xml_children]
keyword[return] identifier[filter] ( keyword[lambda] identifier[x] : identifier[isinstance] ( identifier[x] , identifier[element] ), identifier[source] )
|
def select_elements(source):
"""
Yields all the elements from the source
source - if an element, yields all child elements in order; if any other iterator yields the elements from that iterator
"""
if isinstance(source, element):
source = source.xml_children # depends on [control=['if'], data=[]]
return filter(lambda x: isinstance(x, element), source)
|
def checkReference(self, reference):
"""
Check the reference for security. Tries to avoid any characters
necessary for doing a script injection.
"""
pattern = re.compile(r'[\s,;"\'&\\]')
if pattern.findall(reference.strip()):
return False
return True
|
def function[checkReference, parameter[self, reference]]:
constant[
Check the reference for security. Tries to avoid any characters
necessary for doing a script injection.
]
variable[pattern] assign[=] call[name[re].compile, parameter[constant[[\s,;"\'&\\]]]]
if call[name[pattern].findall, parameter[call[name[reference].strip, parameter[]]]] begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[checkReference] ( identifier[self] , identifier[reference] ):
literal[string]
identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] )
keyword[if] identifier[pattern] . identifier[findall] ( identifier[reference] . identifier[strip] ()):
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def checkReference(self, reference):
"""
Check the reference for security. Tries to avoid any characters
necessary for doing a script injection.
"""
pattern = re.compile('[\\s,;"\\\'&\\\\]')
if pattern.findall(reference.strip()):
return False # depends on [control=['if'], data=[]]
return True
|
def set_session(self, headers=None):
"""Init session with default or custom headers
Args:
headers: A dict of headers (default None, thus using the default
header to init the session)
"""
if headers is None:
headers = {
'User-Agent':
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3)'
' AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/48.0.2564.116 Safari/537.36')
}
elif not isinstance(headers, dict):
raise TypeError('"headers" must be a dict object')
self.session = Session(self.proxy_pool)
self.session.headers.update(headers)
|
def function[set_session, parameter[self, headers]]:
constant[Init session with default or custom headers
Args:
headers: A dict of headers (default None, thus using the default
header to init the session)
]
if compare[name[headers] is constant[None]] begin[:]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b19ef160>], [<ast.Constant object at 0x7da1b19eeda0>]]
name[self].session assign[=] call[name[Session], parameter[name[self].proxy_pool]]
call[name[self].session.headers.update, parameter[name[headers]]]
|
keyword[def] identifier[set_session] ( identifier[self] , identifier[headers] = keyword[None] ):
literal[string]
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[headers] ={
literal[string] :
( literal[string]
literal[string]
literal[string] )
}
keyword[elif] keyword[not] identifier[isinstance] ( identifier[headers] , identifier[dict] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[session] = identifier[Session] ( identifier[self] . identifier[proxy_pool] )
identifier[self] . identifier[session] . identifier[headers] . identifier[update] ( identifier[headers] )
|
def set_session(self, headers=None):
"""Init session with default or custom headers
Args:
headers: A dict of headers (default None, thus using the default
header to init the session)
"""
if headers is None:
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36'} # depends on [control=['if'], data=['headers']]
elif not isinstance(headers, dict):
raise TypeError('"headers" must be a dict object') # depends on [control=['if'], data=[]]
self.session = Session(self.proxy_pool)
self.session.headers.update(headers)
|
def create_diamond_db(self):
'''Create a diamond database from the unaligned sequences in this package.
Returns
-------
path to the created diamond db e.g. 'my_sequences.dmnd'
'''
base = self.unaligned_sequence_database_path()
cmd = "diamond makedb --in '%s' -d '%s'" % (self.unaligned_sequence_database_path(), base)
extern.run(cmd)
diamondb = '%s.dmnd' % base
# Mostly this moves a file to it's current location because Create
# follows this same logic, but there's a specially crafted
# test/data/mcrA.gpkg which is slightly different.
os.rename(diamondb, self.diamond_database_path())
return diamondb
|
def function[create_diamond_db, parameter[self]]:
constant[Create a diamond database from the unaligned sequences in this package.
Returns
-------
path to the created diamond db e.g. 'my_sequences.dmnd'
]
variable[base] assign[=] call[name[self].unaligned_sequence_database_path, parameter[]]
variable[cmd] assign[=] binary_operation[constant[diamond makedb --in '%s' -d '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bcc8ee0>, <ast.Name object at 0x7da18bcc84c0>]]]
call[name[extern].run, parameter[name[cmd]]]
variable[diamondb] assign[=] binary_operation[constant[%s.dmnd] <ast.Mod object at 0x7da2590d6920> name[base]]
call[name[os].rename, parameter[name[diamondb], call[name[self].diamond_database_path, parameter[]]]]
return[name[diamondb]]
|
keyword[def] identifier[create_diamond_db] ( identifier[self] ):
literal[string]
identifier[base] = identifier[self] . identifier[unaligned_sequence_database_path] ()
identifier[cmd] = literal[string] %( identifier[self] . identifier[unaligned_sequence_database_path] (), identifier[base] )
identifier[extern] . identifier[run] ( identifier[cmd] )
identifier[diamondb] = literal[string] % identifier[base]
identifier[os] . identifier[rename] ( identifier[diamondb] , identifier[self] . identifier[diamond_database_path] ())
keyword[return] identifier[diamondb]
|
def create_diamond_db(self):
"""Create a diamond database from the unaligned sequences in this package.
Returns
-------
path to the created diamond db e.g. 'my_sequences.dmnd'
"""
base = self.unaligned_sequence_database_path()
cmd = "diamond makedb --in '%s' -d '%s'" % (self.unaligned_sequence_database_path(), base)
extern.run(cmd)
diamondb = '%s.dmnd' % base
# Mostly this moves a file to it's current location because Create
# follows this same logic, but there's a specially crafted
# test/data/mcrA.gpkg which is slightly different.
os.rename(diamondb, self.diamond_database_path())
return diamondb
|
def _local_times_from_hours_since_midnight(times, hours):
"""
converts hours since midnight from an array of floats to localized times
"""
tz_info = times.tz # pytz timezone info
naive_times = times.tz_localize(None) # naive but still localized
# normalize local, naive times to previous midnight and add the hours until
# sunrise, sunset, and transit
return pd.DatetimeIndex(
(naive_times.normalize().astype(np.int64) +
(hours * NS_PER_HR).astype(np.int64)).astype('datetime64[ns]'),
tz=tz_info)
|
def function[_local_times_from_hours_since_midnight, parameter[times, hours]]:
constant[
converts hours since midnight from an array of floats to localized times
]
variable[tz_info] assign[=] name[times].tz
variable[naive_times] assign[=] call[name[times].tz_localize, parameter[constant[None]]]
return[call[name[pd].DatetimeIndex, parameter[call[binary_operation[call[call[name[naive_times].normalize, parameter[]].astype, parameter[name[np].int64]] + call[binary_operation[name[hours] * name[NS_PER_HR]].astype, parameter[name[np].int64]]].astype, parameter[constant[datetime64[ns]]]]]]]
|
keyword[def] identifier[_local_times_from_hours_since_midnight] ( identifier[times] , identifier[hours] ):
literal[string]
identifier[tz_info] = identifier[times] . identifier[tz]
identifier[naive_times] = identifier[times] . identifier[tz_localize] ( keyword[None] )
keyword[return] identifier[pd] . identifier[DatetimeIndex] (
( identifier[naive_times] . identifier[normalize] (). identifier[astype] ( identifier[np] . identifier[int64] )+
( identifier[hours] * identifier[NS_PER_HR] ). identifier[astype] ( identifier[np] . identifier[int64] )). identifier[astype] ( literal[string] ),
identifier[tz] = identifier[tz_info] )
|
def _local_times_from_hours_since_midnight(times, hours):
"""
converts hours since midnight from an array of floats to localized times
"""
tz_info = times.tz # pytz timezone info
naive_times = times.tz_localize(None) # naive but still localized
# normalize local, naive times to previous midnight and add the hours until
# sunrise, sunset, and transit
return pd.DatetimeIndex((naive_times.normalize().astype(np.int64) + (hours * NS_PER_HR).astype(np.int64)).astype('datetime64[ns]'), tz=tz_info)
|
def fetch(self, url, payload=None):
"""Return the textual content associated to the Response object"""
response = super().fetch(url, payload=payload)
return response.text
|
def function[fetch, parameter[self, url, payload]]:
constant[Return the textual content associated to the Response object]
variable[response] assign[=] call[call[name[super], parameter[]].fetch, parameter[name[url]]]
return[name[response].text]
|
keyword[def] identifier[fetch] ( identifier[self] , identifier[url] , identifier[payload] = keyword[None] ):
literal[string]
identifier[response] = identifier[super] (). identifier[fetch] ( identifier[url] , identifier[payload] = identifier[payload] )
keyword[return] identifier[response] . identifier[text]
|
def fetch(self, url, payload=None):
"""Return the textual content associated to the Response object"""
response = super().fetch(url, payload=payload)
return response.text
|
def find_in_tree(tree, key, perfect=False):
"""
Helper to perform find in dictionary tree.
"""
if len(key) == 0:
if tree['item'] is not None:
return tree['item'], ()
else:
for i in range(len(tree['subtrees'])):
if not perfect and tree['subtrees'][i][0] == '*':
item, trace = find_in_tree(tree['subtrees'][i][1],
(), perfect)
return item, (i,) + trace
raise KeyError(key)
else:
head, tail = key[0], key[1:]
for i in range(len(tree['subtrees'])):
if tree['subtrees'][i][0] == head or \
not perfect and tree['subtrees'][i][0] == '*':
try:
item, trace = find_in_tree(tree['subtrees'][i][1],
tail, perfect)
return item, (i,) + trace
except KeyError:
pass
raise KeyError(key)
|
def function[find_in_tree, parameter[tree, key, perfect]]:
constant[
Helper to perform find in dictionary tree.
]
if compare[call[name[len], parameter[name[key]]] equal[==] constant[0]] begin[:]
if compare[call[name[tree]][constant[item]] is_not constant[None]] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b28ac0d0>, <ast.Tuple object at 0x7da1b28acf40>]]]
|
keyword[def] identifier[find_in_tree] ( identifier[tree] , identifier[key] , identifier[perfect] = keyword[False] ):
literal[string]
keyword[if] identifier[len] ( identifier[key] )== literal[int] :
keyword[if] identifier[tree] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[tree] [ literal[string] ],()
keyword[else] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[tree] [ literal[string] ])):
keyword[if] keyword[not] identifier[perfect] keyword[and] identifier[tree] [ literal[string] ][ identifier[i] ][ literal[int] ]== literal[string] :
identifier[item] , identifier[trace] = identifier[find_in_tree] ( identifier[tree] [ literal[string] ][ identifier[i] ][ literal[int] ],
(), identifier[perfect] )
keyword[return] identifier[item] ,( identifier[i] ,)+ identifier[trace]
keyword[raise] identifier[KeyError] ( identifier[key] )
keyword[else] :
identifier[head] , identifier[tail] = identifier[key] [ literal[int] ], identifier[key] [ literal[int] :]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[tree] [ literal[string] ])):
keyword[if] identifier[tree] [ literal[string] ][ identifier[i] ][ literal[int] ]== identifier[head] keyword[or] keyword[not] identifier[perfect] keyword[and] identifier[tree] [ literal[string] ][ identifier[i] ][ literal[int] ]== literal[string] :
keyword[try] :
identifier[item] , identifier[trace] = identifier[find_in_tree] ( identifier[tree] [ literal[string] ][ identifier[i] ][ literal[int] ],
identifier[tail] , identifier[perfect] )
keyword[return] identifier[item] ,( identifier[i] ,)+ identifier[trace]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[raise] identifier[KeyError] ( identifier[key] )
|
def find_in_tree(tree, key, perfect=False):
"""
Helper to perform find in dictionary tree.
"""
if len(key) == 0:
if tree['item'] is not None:
return (tree['item'], ()) # depends on [control=['if'], data=[]]
else:
for i in range(len(tree['subtrees'])):
if not perfect and tree['subtrees'][i][0] == '*':
(item, trace) = find_in_tree(tree['subtrees'][i][1], (), perfect)
return (item, (i,) + trace) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
raise KeyError(key) # depends on [control=['if'], data=[]]
else:
(head, tail) = (key[0], key[1:])
for i in range(len(tree['subtrees'])):
if tree['subtrees'][i][0] == head or (not perfect and tree['subtrees'][i][0] == '*'):
try:
(item, trace) = find_in_tree(tree['subtrees'][i][1], tail, perfect)
return (item, (i,) + trace) # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
raise KeyError(key)
|
def _state_delete(self):
'''Try to delete the state.yml file and the folder .blockade'''
try:
os.remove(self._state_file)
except OSError as err:
if err.errno not in (errno.EPERM, errno.ENOENT):
raise
try:
os.rmdir(self._state_dir)
except OSError as err:
if err.errno not in (errno.ENOTEMPTY, errno.ENOENT):
raise
|
def function[_state_delete, parameter[self]]:
constant[Try to delete the state.yml file and the folder .blockade]
<ast.Try object at 0x7da1b01c0430>
<ast.Try object at 0x7da1b01c0520>
|
keyword[def] identifier[_state_delete] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[os] . identifier[remove] ( identifier[self] . identifier[_state_file] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
keyword[if] identifier[err] . identifier[errno] keyword[not] keyword[in] ( identifier[errno] . identifier[EPERM] , identifier[errno] . identifier[ENOENT] ):
keyword[raise]
keyword[try] :
identifier[os] . identifier[rmdir] ( identifier[self] . identifier[_state_dir] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
keyword[if] identifier[err] . identifier[errno] keyword[not] keyword[in] ( identifier[errno] . identifier[ENOTEMPTY] , identifier[errno] . identifier[ENOENT] ):
keyword[raise]
|
def _state_delete(self):
"""Try to delete the state.yml file and the folder .blockade"""
try:
os.remove(self._state_file) # depends on [control=['try'], data=[]]
except OSError as err:
if err.errno not in (errno.EPERM, errno.ENOENT):
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']]
try:
os.rmdir(self._state_dir) # depends on [control=['try'], data=[]]
except OSError as err:
if err.errno not in (errno.ENOTEMPTY, errno.ENOENT):
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']]
|
def find_Note(data, freq, bits):
"""Get the frequencies, feed them to find_notes and the return the Note
with the highest amplitude."""
data = find_frequencies(data, freq, bits)
return sorted(find_notes(data), key=operator.itemgetter(1))[-1][0]
|
def function[find_Note, parameter[data, freq, bits]]:
constant[Get the frequencies, feed them to find_notes and the return the Note
with the highest amplitude.]
variable[data] assign[=] call[name[find_frequencies], parameter[name[data], name[freq], name[bits]]]
return[call[call[call[name[sorted], parameter[call[name[find_notes], parameter[name[data]]]]]][<ast.UnaryOp object at 0x7da18eb55ea0>]][constant[0]]]
|
keyword[def] identifier[find_Note] ( identifier[data] , identifier[freq] , identifier[bits] ):
literal[string]
identifier[data] = identifier[find_frequencies] ( identifier[data] , identifier[freq] , identifier[bits] )
keyword[return] identifier[sorted] ( identifier[find_notes] ( identifier[data] ), identifier[key] = identifier[operator] . identifier[itemgetter] ( literal[int] ))[- literal[int] ][ literal[int] ]
|
def find_Note(data, freq, bits):
"""Get the frequencies, feed them to find_notes and the return the Note
with the highest amplitude."""
data = find_frequencies(data, freq, bits)
return sorted(find_notes(data), key=operator.itemgetter(1))[-1][0]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.