nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
mcordts/cityscapesScripts
aeb7b82531f86185ce287705be28f452ba3ddbb8
cityscapesscripts/helpers/annotation.py
python
CsBbox2d.bbox_modal
(self)
return [ self.bbox_modal_xywh[0], self.bbox_modal_xywh[1], self.bbox_modal_xywh[0] + self.bbox_modal_xywh[2], self.bbox_modal_xywh[1] + self.bbox_modal_xywh[3] ]
Returns the 2d box as [xmin, ymin, xmax, ymax]
Returns the 2d box as [xmin, ymin, xmax, ymax]
[ "Returns", "the", "2d", "box", "as", "[", "xmin", "ymin", "xmax", "ymax", "]" ]
def bbox_modal(self): """Returns the 2d box as [xmin, ymin, xmax, ymax]""" return [ self.bbox_modal_xywh[0], self.bbox_modal_xywh[1], self.bbox_modal_xywh[0] + self.bbox_modal_xywh[2], self.bbox_modal_xywh[1] + self.bbox_modal_xywh[3] ]
[ "def", "bbox_modal", "(", "self", ")", ":", "return", "[", "self", ".", "bbox_modal_xywh", "[", "0", "]", ",", "self", ".", "bbox_modal_xywh", "[", "1", "]", ",", "self", ".", "bbox_modal_xywh", "[", "0", "]", "+", "self", ".", "bbox_modal_xywh", "[", ...
https://github.com/mcordts/cityscapesScripts/blob/aeb7b82531f86185ce287705be28f452ba3ddbb8/cityscapesscripts/helpers/annotation.py#L199-L206
wucng/TensorExpand
4ea58f64f5c5082b278229b799c9f679536510b7
TensorExpand/Object detection/SSD/balancap-SSD-Tensorflow/nets/ssd_vgg_512.py
python
ssd_arg_scope_caffe
(caffe_scope)
Caffe scope definition. Args: caffe_scope: Caffe scope object with loaded weights. Returns: An arg_scope.
Caffe scope definition.
[ "Caffe", "scope", "definition", "." ]
def ssd_arg_scope_caffe(caffe_scope): """Caffe scope definition. Args: caffe_scope: Caffe scope object with loaded weights. Returns: An arg_scope. """ # Default network arg scope. with slim.arg_scope([slim.conv2d], activation_fn=tf.nn.relu, weights_initializer=caffe_scope.conv_weights_init(), biases_initializer=caffe_scope.conv_biases_init()): with slim.arg_scope([slim.fully_connected], activation_fn=tf.nn.relu): with slim.arg_scope([custom_layers.l2_normalization], scale_initializer=caffe_scope.l2_norm_scale_init()): with slim.arg_scope([slim.conv2d, slim.max_pool2d], padding='SAME') as sc: return sc
[ "def", "ssd_arg_scope_caffe", "(", "caffe_scope", ")", ":", "# Default network arg scope.", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", "]", ",", "activation_fn", "=", "tf", ".", "nn", ".", "relu", ",", "weights_initializer", "=", "caffe...
https://github.com/wucng/TensorExpand/blob/4ea58f64f5c5082b278229b799c9f679536510b7/TensorExpand/Object detection/SSD/balancap-SSD-Tensorflow/nets/ssd_vgg_512.py#L490-L510
nedbat/coveragepy
d004b18a1ad59ec89b89c96c03a789a55cc51693
coverage/misc.py
python
human_sorted_items
(items, reverse=False)
return sorted(items, key=lambda pair: (human_key(pair[0]), pair[1]), reverse=reverse)
Sort the (string, value) items the way humans expect. Returns the sorted list of items.
Sort the (string, value) items the way humans expect.
[ "Sort", "the", "(", "string", "value", ")", "items", "the", "way", "humans", "expect", "." ]
def human_sorted_items(items, reverse=False): """Sort the (string, value) items the way humans expect. Returns the sorted list of items. """ return sorted(items, key=lambda pair: (human_key(pair[0]), pair[1]), reverse=reverse)
[ "def", "human_sorted_items", "(", "items", ",", "reverse", "=", "False", ")", ":", "return", "sorted", "(", "items", ",", "key", "=", "lambda", "pair", ":", "(", "human_key", "(", "pair", "[", "0", "]", ")", ",", "pair", "[", "1", "]", ")", ",", ...
https://github.com/nedbat/coveragepy/blob/d004b18a1ad59ec89b89c96c03a789a55cc51693/coverage/misc.py#L390-L395
microsoft/TextWorld
c419bb63a92c7f6960aa004a367fb18894043e7f
textworld/core.py
python
GameState.copy
(self)
return state
Returns a deepcopy of this game state.
Returns a deepcopy of this game state.
[ "Returns", "a", "deepcopy", "of", "this", "game", "state", "." ]
def copy(self) -> "GameState": """ Returns a deepcopy of this game state. """ state = GameState(self) for key in self: state[key] = deepcopy(self[key]) return state
[ "def", "copy", "(", "self", ")", "->", "\"GameState\"", ":", "state", "=", "GameState", "(", "self", ")", "for", "key", "in", "self", ":", "state", "[", "key", "]", "=", "deepcopy", "(", "self", "[", "key", "]", ")", "return", "state" ]
https://github.com/microsoft/TextWorld/blob/c419bb63a92c7f6960aa004a367fb18894043e7f/textworld/core.py#L133-L139
AutodeskRoboticsLab/Mimic
85447f0d346be66988303a6a054473d92f1ed6f4
mimic/scripts/extern/pyqtgraph_0_11_0_dev0/pyqtgraph/widgets/SpinBox.py
python
SpinBox.selectNumber
(self)
Select the numerical portion of the text to allow quick editing by the user.
Select the numerical portion of the text to allow quick editing by the user.
[ "Select", "the", "numerical", "portion", "of", "the", "text", "to", "allow", "quick", "editing", "by", "the", "user", "." ]
def selectNumber(self): """ Select the numerical portion of the text to allow quick editing by the user. """ le = self.lineEdit() text = asUnicode(le.text()) m = self.opts['regex'].match(text) if m is None: return s,e = m.start('number'), m.end('number') le.setSelection(s, e-s)
[ "def", "selectNumber", "(", "self", ")", ":", "le", "=", "self", ".", "lineEdit", "(", ")", "text", "=", "asUnicode", "(", "le", ".", "text", "(", ")", ")", "m", "=", "self", ".", "opts", "[", "'regex'", "]", ".", "match", "(", "text", ")", "if...
https://github.com/AutodeskRoboticsLab/Mimic/blob/85447f0d346be66988303a6a054473d92f1ed6f4/mimic/scripts/extern/pyqtgraph_0_11_0_dev0/pyqtgraph/widgets/SpinBox.py#L293-L303
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pandas-0.24.2-py3.7-macosx-10.9-x86_64.egg/pandas/core/generic.py
python
NDFrame.clip_upper
(self, threshold, axis=None, inplace=False)
return self._clip_with_one_bound(threshold, method=self.le, axis=axis, inplace=inplace)
Trim values above a given threshold. .. deprecated:: 0.24.0 Use clip(upper=threshold) instead. Elements above the `threshold` will be changed to match the `threshold` value(s). Threshold can be a single value or an array, in the latter case it performs the truncation element-wise. Parameters ---------- threshold : numeric or array-like Maximum value allowed. All values above threshold will be set to this value. * float : every value is compared to `threshold`. * array-like : The shape of `threshold` should match the object it's compared to. When `self` is a Series, `threshold` should be the length. When `self` is a DataFrame, `threshold` should 2-D and the same shape as `self` for ``axis=None``, or 1-D and the same length as the axis being compared. axis : {0 or 'index', 1 or 'columns'}, default 0 Align object with `threshold` along the given axis. inplace : boolean, default False Whether to perform the operation in place on the data. .. versionadded:: 0.21.0 Returns ------- Series or DataFrame Original data with values trimmed. See Also -------- Series.clip : General purpose method to trim Series values to given threshold(s). DataFrame.clip : General purpose method to trim DataFrame values to given threshold(s). Examples -------- >>> s = pd.Series([1, 2, 3, 4, 5]) >>> s 0 1 1 2 2 3 3 4 4 5 dtype: int64 >>> s.clip(upper=3) 0 1 1 2 2 3 3 3 4 3 dtype: int64 >>> elemwise_thresholds = [5, 4, 3, 2, 1] >>> elemwise_thresholds [5, 4, 3, 2, 1] >>> s.clip(upper=elemwise_thresholds) 0 1 1 2 2 3 3 2 4 1 dtype: int64
Trim values above a given threshold.
[ "Trim", "values", "above", "a", "given", "threshold", "." ]
def clip_upper(self, threshold, axis=None, inplace=False): """ Trim values above a given threshold. .. deprecated:: 0.24.0 Use clip(upper=threshold) instead. Elements above the `threshold` will be changed to match the `threshold` value(s). Threshold can be a single value or an array, in the latter case it performs the truncation element-wise. Parameters ---------- threshold : numeric or array-like Maximum value allowed. All values above threshold will be set to this value. * float : every value is compared to `threshold`. * array-like : The shape of `threshold` should match the object it's compared to. When `self` is a Series, `threshold` should be the length. When `self` is a DataFrame, `threshold` should 2-D and the same shape as `self` for ``axis=None``, or 1-D and the same length as the axis being compared. axis : {0 or 'index', 1 or 'columns'}, default 0 Align object with `threshold` along the given axis. inplace : boolean, default False Whether to perform the operation in place on the data. .. versionadded:: 0.21.0 Returns ------- Series or DataFrame Original data with values trimmed. See Also -------- Series.clip : General purpose method to trim Series values to given threshold(s). DataFrame.clip : General purpose method to trim DataFrame values to given threshold(s). Examples -------- >>> s = pd.Series([1, 2, 3, 4, 5]) >>> s 0 1 1 2 2 3 3 4 4 5 dtype: int64 >>> s.clip(upper=3) 0 1 1 2 2 3 3 3 4 3 dtype: int64 >>> elemwise_thresholds = [5, 4, 3, 2, 1] >>> elemwise_thresholds [5, 4, 3, 2, 1] >>> s.clip(upper=elemwise_thresholds) 0 1 1 2 2 3 3 2 4 1 dtype: int64 """ warnings.warn('clip_upper(threshold) is deprecated, ' 'use clip(upper=threshold) instead', FutureWarning, stacklevel=2) return self._clip_with_one_bound(threshold, method=self.le, axis=axis, inplace=inplace)
[ "def", "clip_upper", "(", "self", ",", "threshold", ",", "axis", "=", "None", ",", "inplace", "=", "False", ")", ":", "warnings", ".", "warn", "(", "'clip_upper(threshold) is deprecated, '", "'use clip(upper=threshold) instead'", ",", "FutureWarning", ",", "stacklev...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pandas-0.24.2-py3.7-macosx-10.9-x86_64.egg/pandas/core/generic.py#L7318-L7396
aws/Trusted-Advisor-Tools
09c353c78810f534ee22bd28254d56f0ab7166c9
ExposedAccessKeys/lambda_functions/notify_security.py
python
publish_msg
(subject, message)
Publishes message to SNS topic. Args: subject (string): Subject of message to be published to topic. message (string): Content of message to be published to topic. Returns: (None)
Publishes message to SNS topic.
[ "Publishes", "message", "to", "SNS", "topic", "." ]
def publish_msg(subject, message): """ Publishes message to SNS topic. Args: subject (string): Subject of message to be published to topic. message (string): Content of message to be published to topic. Returns: (None) """ try: sns.publish( TopicArn=TOPIC_ARN, Message=message, Subject=subject, MessageStructure='string' ) except Exception as e: print(e) print('Could not publish message to SNS topic "{}"'.format(TOPIC_ARN)) raise e
[ "def", "publish_msg", "(", "subject", ",", "message", ")", ":", "try", ":", "sns", ".", "publish", "(", "TopicArn", "=", "TOPIC_ARN", ",", "Message", "=", "message", ",", "Subject", "=", "subject", ",", "MessageStructure", "=", "'string'", ")", "except", ...
https://github.com/aws/Trusted-Advisor-Tools/blob/09c353c78810f534ee22bd28254d56f0ab7166c9/ExposedAccessKeys/lambda_functions/notify_security.py#L64-L85
dipy/dipy
be956a529465b28085f8fc435a756947ddee1c89
dipy/reconst/csdeconv.py
python
AxSymShResponse.basis
(self, sphere)
return real_sh_descoteaux_from_index(self.m, self.n, theta, phi)
A basis that maps the response coefficients onto a sphere.
A basis that maps the response coefficients onto a sphere.
[ "A", "basis", "that", "maps", "the", "response", "coefficients", "onto", "a", "sphere", "." ]
def basis(self, sphere): """A basis that maps the response coefficients onto a sphere.""" theta = sphere.theta[:, None] phi = sphere.phi[:, None] return real_sh_descoteaux_from_index(self.m, self.n, theta, phi)
[ "def", "basis", "(", "self", ",", "sphere", ")", ":", "theta", "=", "sphere", ".", "theta", "[", ":", ",", "None", "]", "phi", "=", "sphere", ".", "phi", "[", ":", ",", "None", "]", "return", "real_sh_descoteaux_from_index", "(", "self", ".", "m", ...
https://github.com/dipy/dipy/blob/be956a529465b28085f8fc435a756947ddee1c89/dipy/reconst/csdeconv.py#L157-L161
pypa/pipenv
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
pipenv/patched/notpip/_vendor/pkg_resources/__init__.py
python
EntryPoint.parse_map
(cls, data, dist=None)
return maps
Parse a map of entry point groups
Parse a map of entry point groups
[ "Parse", "a", "map", "of", "entry", "point", "groups" ]
def parse_map(cls, data, dist=None): """Parse a map of entry point groups""" if isinstance(data, dict): data = data.items() else: data = split_sections(data) maps = {} for group, lines in data: if group is None: if not lines: continue raise ValueError("Entry points must be listed in groups") group = group.strip() if group in maps: raise ValueError("Duplicate group name", group) maps[group] = cls.parse_group(group, lines, dist) return maps
[ "def", "parse_map", "(", "cls", ",", "data", ",", "dist", "=", "None", ")", ":", "if", "isinstance", "(", "data", ",", "dict", ")", ":", "data", "=", "data", ".", "items", "(", ")", "else", ":", "data", "=", "split_sections", "(", "data", ")", "m...
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py#L2520-L2536
python/cpython
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
Lib/pathlib.py
python
Path.is_dir
(self)
Whether this path is a directory.
Whether this path is a directory.
[ "Whether", "this", "path", "is", "a", "directory", "." ]
def is_dir(self): """ Whether this path is a directory. """ try: return S_ISDIR(self.stat().st_mode) except OSError as e: if not _ignore_error(e): raise # Path doesn't exist or is a broken symlink # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) return False except ValueError: # Non-encodable path return False
[ "def", "is_dir", "(", "self", ")", ":", "try", ":", "return", "S_ISDIR", "(", "self", ".", "stat", "(", ")", ".", "st_mode", ")", "except", "OSError", "as", "e", ":", "if", "not", "_ignore_error", "(", "e", ")", ":", "raise", "# Path doesn't exist or i...
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/pathlib.py#L1298-L1312
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_vendored_deps/filter_plugins/openshift_master.py
python
OpenIDIdentityProvider.validate
(self)
validate this idp instance
validate this idp instance
[ "validate", "this", "idp", "instance" ]
def validate(self): ''' validate this idp instance ''' if not isinstance(self.provider['claims'], dict): raise errors.AnsibleFilterError("|failed claims for provider {0} " "must be a dictionary".format(self.__class__.__name__)) for var, var_type in (('extraScopes', list), ('extraAuthorizeParameters', dict)): if var in self.provider and not isinstance(self.provider[var], var_type): raise errors.AnsibleFilterError("|failed {1} for provider " "{0} must be a {2}".format(self.__class__.__name__, var, var_type.__class__.__name__)) required_claims = ['id'] optional_claims = ['email', 'name', 'preferredUsername'] all_claims = required_claims + optional_claims for claim in required_claims: if claim in required_claims and claim not in self.provider['claims']: raise errors.AnsibleFilterError("|failed {0} claim missing " "for provider {1}".format(claim, self.__class__.__name__)) for claim in all_claims: if claim in self.provider['claims'] and not isinstance(self.provider['claims'][claim], list): raise errors.AnsibleFilterError("|failed {0} claims for " "provider {1} must be a list".format(claim, self.__class__.__name__)) unknown_claims = set(self.provider['claims'].keys()) - set(all_claims) if len(unknown_claims) > 0: raise errors.AnsibleFilterError("|failed provider {0} has unknown " "claims: {1}".format(self.__class__.__name__, ', '.join(unknown_claims))) if not isinstance(self.provider['urls'], dict): raise errors.AnsibleFilterError("|failed urls for provider {0} " "must be a dictionary".format(self.__class__.__name__)) required_urls = ['authorize', 'token'] optional_urls = ['userInfo'] all_urls = required_urls + optional_urls for url in required_urls: if url not in self.provider['urls']: raise errors.AnsibleFilterError("|failed {0} url missing for " "provider {1}".format(url, self.__class__.__name__)) unknown_urls = set(self.provider['urls'].keys()) - set(all_urls) if len(unknown_urls) > 0: raise errors.AnsibleFilterError("|failed provider {0} has unknown " "urls: {1}".format(self.__class__.__name__, ', '.join(unknown_urls)))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "provider", "[", "'claims'", "]", ",", "dict", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "\"|failed claims for provider {0} \"", "\"must be a dictionary\"", ...
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_vendored_deps/filter_plugins/openshift_master.py#L361-L409
Trusted-AI/AIX360
36459f2a585d0e2a2e8582562bf226d4402b57d6
aix360/algorithms/lime/lime_wrapper.py
python
LimeTabularExplainer.__init__
(self, *argv, **kwargs)
Initialize lime Tabular Explainer object
Initialize lime Tabular Explainer object
[ "Initialize", "lime", "Tabular", "Explainer", "object" ]
def __init__(self, *argv, **kwargs): """ Initialize lime Tabular Explainer object """ super(LimeTabularExplainer, self).__init__(*argv, **kwargs) self.explainer = lime_tabular.LimeTabularExplainer(*argv, **kwargs)
[ "def", "__init__", "(", "self", ",", "*", "argv", ",", "*", "*", "kwargs", ")", ":", "super", "(", "LimeTabularExplainer", ",", "self", ")", ".", "__init__", "(", "*", "argv", ",", "*", "*", "kwargs", ")", "self", ".", "explainer", "=", "lime_tabular...
https://github.com/Trusted-AI/AIX360/blob/36459f2a585d0e2a2e8582562bf226d4402b57d6/aix360/algorithms/lime/lime_wrapper.py#L77-L83
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/symtable.py
python
SymbolTable.get_type
(self)
[]
def get_type(self): if self._table.type == _symtable.TYPE_MODULE: return "module" if self._table.type == _symtable.TYPE_FUNCTION: return "function" if self._table.type == _symtable.TYPE_CLASS: return "class" assert self._table.type in (1, 2, 3), \ "unexpected type: {0}".format(self._table.type)
[ "def", "get_type", "(", "self", ")", ":", "if", "self", ".", "_table", ".", "type", "==", "_symtable", ".", "TYPE_MODULE", ":", "return", "\"module\"", "if", "self", ".", "_table", ".", "type", "==", "_symtable", ".", "TYPE_FUNCTION", ":", "return", "\"f...
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/symtable.py#L57-L65
accel-brain/accel-brain-code
86f489dc9be001a3bae6d053f48d6b57c0bedb95
Accel-Brain-Base/accelbrainbase/iteratabledata/_mxnet/unlabeled_t_hot_txt_iterator.py
python
UnlabeledTHotTXTIterator.generate_learned_samples
(self)
Draw and generate data. Returns: `Tuple` data. The shape is ... - `mxnet.ndarray` of observed data points in training. - `mxnet.ndarray` of supervised data in training. - `mxnet.ndarray` of observed data points in test. - `mxnet.ndarray` of supervised data in test.
Draw and generate data.
[ "Draw", "and", "generate", "data", "." ]
def generate_learned_samples(self): ''' Draw and generate data. Returns: `Tuple` data. The shape is ... - `mxnet.ndarray` of observed data points in training. - `mxnet.ndarray` of supervised data in training. - `mxnet.ndarray` of observed data points in test. - `mxnet.ndarray` of supervised data in test. ''' for _ in range(self.iter_n): training_batch_arr = np.zeros( ( self.batch_size, 1, self.seq_len, len(self.__token_list) ), ) test_batch_arr = np.zeros( ( self.batch_size, 1, self.seq_len, len(self.__token_list) ), ) for i in range(self.batch_size): file_key = np.random.randint(low=0, high=len(self.__train_txt_path_list)) with open(self.__train_txt_path_list[file_key]) as f: train_txt = f.read() test_file_key = np.random.randint(low=0, high=len(self.__test_txt_path_list)) with open(self.__test_txt_path_list[test_file_key]) as f: test_txt = f.read() if self.__pre_txt_arr is None: start_row = np.random.randint(low=0, high=len(train_txt) - self.seq_len) test_start_row = np.random.randint(low=0, high=len(test_txt) - self.seq_len) train_txt = train_txt[start_row:start_row+self.seq_len] test_txt = test_txt[test_start_row:test_start_row+self.seq_len] else: if train_txt.index(self.__pre_txt_arr[i]) + (self.seq_len * 2) < len(train_txt): start_row = train_txt.index(self.__pre_txt_arr[i]) + self.seq_len else: start_row = np.random.randint(low=0, high=len(train_txt) - self.seq_len) train_txt = train_txt[start_row:start_row+self.seq_len] test_start_row = np.random.randint(low=0, high=len(test_txt) - self.seq_len) test_txt = test_txt[test_start_row:test_start_row+self.seq_len] for seq in range(self.seq_len): training_batch_arr[i, 0, seq, self.__token_list.index(train_txt[seq])] = 1.0 test_batch_arr[i, 0, seq, self.__token_list.index(test_txt[seq])] = 1.0 training_batch_arr = nd.ndarray.array(training_batch_arr, ctx=self.__ctx) test_batch_arr = nd.ndarray.array(test_batch_arr, ctx=self.__ctx) training_batch_arr = self.pre_normalize(training_batch_arr) test_batch_arr = self.pre_normalize(test_batch_arr) if self.__noiseable_data is not None: training_batch_arr = self.__noiseable_data.noise(training_batch_arr) yield training_batch_arr, training_batch_arr, test_batch_arr, test_batch_arr
[ "def", "generate_learned_samples", "(", "self", ")", ":", "for", "_", "in", "range", "(", "self", ".", "iter_n", ")", ":", "training_batch_arr", "=", "np", ".", "zeros", "(", "(", "self", ".", "batch_size", ",", "1", ",", "self", ".", "seq_len", ",", ...
https://github.com/accel-brain/accel-brain-code/blob/86f489dc9be001a3bae6d053f48d6b57c0bedb95/Accel-Brain-Base/accelbrainbase/iteratabledata/_mxnet/unlabeled_t_hot_txt_iterator.py#L115-L183
pyqt/examples
843bb982917cecb2350b5f6d7f42c9b7fb142ec1
src/pyqt-official/designer/plugins/widgets/datetimeedit.py
python
PyDateEdit.mousePressEvent
(self, event)
[]
def mousePressEvent(self, event): super(PyDateEdit, self).mousePressEvent(event) if not self.__cw: self.__cw = self.findChild(QCalendarWidget) if self.__cw: self.__cw.setFirstDayOfWeek(self.__firstDayOfWeek) self.__cw.setGridVisible(self.__gridVisible) self.__cw.setHorizontalHeaderFormat(self.__horizontalHeaderFormat) self.__cw.setVerticalHeaderFormat(self.__verticalHeaderFormat) self.__cw.setNavigationBarVisible(self.__navigationBarVisible)
[ "def", "mousePressEvent", "(", "self", ",", "event", ")", ":", "super", "(", "PyDateEdit", ",", "self", ")", ".", "mousePressEvent", "(", "event", ")", "if", "not", "self", ".", "__cw", ":", "self", ".", "__cw", "=", "self", ".", "findChild", "(", "Q...
https://github.com/pyqt/examples/blob/843bb982917cecb2350b5f6d7f42c9b7fb142ec1/src/pyqt-official/designer/plugins/widgets/datetimeedit.py#L57-L67
uqfoundation/multiprocess
028cc73f02655e6451d92e5147d19d8c10aebe50
pypy3.7/multiprocess/pool.py
python
Pool._map_async
(self, func, iterable, mapper, chunksize=None, callback=None, error_callback=None)
return result
Helper function to implement map, starmap and their async counterparts.
Helper function to implement map, starmap and their async counterparts.
[ "Helper", "function", "to", "implement", "map", "starmap", "and", "their", "async", "counterparts", "." ]
def _map_async(self, func, iterable, mapper, chunksize=None, callback=None, error_callback=None): ''' Helper function to implement map, starmap and their async counterparts. ''' if self._state != RUN: raise ValueError("Pool not running") if not hasattr(iterable, '__len__'): iterable = list(iterable) if chunksize is None: chunksize, extra = divmod(len(iterable), len(self._pool) * 4) if extra: chunksize += 1 if len(iterable) == 0: chunksize = 0 task_batches = Pool._get_tasks(func, iterable, chunksize) result = MapResult(self._cache, chunksize, len(iterable), callback, error_callback=error_callback) self._taskqueue.put( ( self._guarded_task_generation(result._job, mapper, task_batches), None ) ) return result
[ "def", "_map_async", "(", "self", ",", "func", ",", "iterable", ",", "mapper", ",", "chunksize", "=", "None", ",", "callback", "=", "None", ",", "error_callback", "=", "None", ")", ":", "if", "self", ".", "_state", "!=", "RUN", ":", "raise", "ValueErro...
https://github.com/uqfoundation/multiprocess/blob/028cc73f02655e6451d92e5147d19d8c10aebe50/pypy3.7/multiprocess/pool.py#L375-L403
SALib/SALib
b6b6b5cab3388f3b80590c98d66aca7dc784d894
src/SALib/analyze/fast.py
python
compute_orders
(outputs, N, M, omega)
return (D1 / V), (1.0 - Dt / V)
[]
def compute_orders(outputs, N, M, omega): f = np.fft.fft(outputs) Sp = np.power(np.absolute(f[np.arange(1, int((N + 1) / 2))]) / N, 2) V = 2.0 * np.sum(Sp) # Calculate first and total order D1 = 2.0 * np.sum(Sp[np.arange(1, M + 1) * int(omega) - 1]) Dt = 2.0 * np.sum(Sp[np.arange(int(omega / 2.0))]) return (D1 / V), (1.0 - Dt / V)
[ "def", "compute_orders", "(", "outputs", ",", "N", ",", "M", ",", "omega", ")", ":", "f", "=", "np", ".", "fft", ".", "fft", "(", "outputs", ")", "Sp", "=", "np", ".", "power", "(", "np", ".", "absolute", "(", "f", "[", "np", ".", "arange", "...
https://github.com/SALib/SALib/blob/b6b6b5cab3388f3b80590c98d66aca7dc784d894/src/SALib/analyze/fast.py#L98-L107
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
src/transformers/models/marian/convert_marian_to_pytorch.py
python
unzip
(zip_path: str, dest_dir: str)
[]
def unzip(zip_path: str, dest_dir: str) -> None: with ZipFile(zip_path, "r") as zipObj: zipObj.extractall(dest_dir)
[ "def", "unzip", "(", "zip_path", ":", "str", ",", "dest_dir", ":", "str", ")", "->", "None", ":", "with", "ZipFile", "(", "zip_path", ",", "\"r\"", ")", "as", "zipObj", ":", "zipObj", ".", "extractall", "(", "dest_dir", ")" ]
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/models/marian/convert_marian_to_pytorch.py#L629-L631
weinbe58/QuSpin
5bbc3204dbf5c227a87a44f0dacf39509cba580c
quspin/basis/photon.py
python
photon_basis.__init__
(self,basis_constructor,*constructor_args,**blocks)
Initialises the `photon_basis` object. Parameters ----------- basis_constructor: :obj:`basis` `basis` constructor for the lattice part of the `photon_basis`. constructor_args: obj Required arguments required by the specific `basis_constructor`. blocks: obj Optional keyword arguments for `basis_constructor` which include (but are not limited to): **Nph** (*int*) - specify the dimension of photon (HO) Hilbert space. **Ntot** (*int*) - specify total number of particles (photons + lattice). **anyblock** (*int*) - specify any lattice symmetry blocks
Initialises the `photon_basis` object.
[ "Initialises", "the", "photon_basis", "object", "." ]
def __init__(self,basis_constructor,*constructor_args,**blocks): """Initialises the `photon_basis` object. Parameters ----------- basis_constructor: :obj:`basis` `basis` constructor for the lattice part of the `photon_basis`. constructor_args: obj Required arguments required by the specific `basis_constructor`. blocks: obj Optional keyword arguments for `basis_constructor` which include (but are not limited to): **Nph** (*int*) - specify the dimension of photon (HO) Hilbert space. **Ntot** (*int*) - specify total number of particles (photons + lattice). **anyblock** (*int*) - specify any lattice symmetry blocks """ Ntot = blocks.get("Ntot") Nph = blocks.get("Nph") self._Nph = Nph self._Ntot = Ntot if Ntot is not None: blocks.pop("Ntot") if Nph is not None: blocks.pop("Nph") if Ntot is None: if Nph is None: raise TypeError("If Ntot not specified, Nph must specify the cutoff on the number of photon states.") if type(Nph) is not int: raise TypeError("Nph must be integer") if Nph < 0: raise ValueError("Nph must be an integer >= 0.") self._check_pcon=False b1 = basis_constructor(*constructor_args,_Np=-1,**blocks) b2 = ho_basis(Nph) tensor_basis.__init__(self,b1,b2) else: if type(Ntot) is not int: raise TypeError("Ntot must be integer") if Ntot < 0: raise ValueError("Ntot must be an integer >= 0.") self._check_pcon=True self._basis_left = basis_constructor(*constructor_args,_Np=Ntot,**blocks) if isinstance(self._basis_left,tensor_basis): raise TypeError("Can only create photon basis with non-tensor type basis") if not isinstance(self._basis_left,basis): raise TypeError("Can only create photon basis with basis type") self._basis_right = ho_basis(Ntot) self._n = self._basis_left._Np_list self._n -= Ntot self._n *= -1 self._blocks = self._basis_left._blocks self._Ns = self._basis_left._Ns self._unique_me = self._basis_left._unique_me self._operators = self._basis_left._operators +"\n"+ self._basis_right._operators self._sps = self._basis_left.sps
[ "def", "__init__", "(", "self", ",", "basis_constructor", ",", "*", "constructor_args", ",", "*", "*", "blocks", ")", ":", "Ntot", "=", "blocks", ".", "get", "(", "\"Ntot\"", ")", "Nph", "=", "blocks", ".", "get", "(", "\"Nph\"", ")", "self", ".", "_...
https://github.com/weinbe58/QuSpin/blob/5bbc3204dbf5c227a87a44f0dacf39509cba580c/quspin/basis/photon.py#L120-L173
google-research/morph-net
be4d79dea816c473007c5967d45ab4036306c21c
examples/keras/model.py
python
MorphNetModel.initialize_model
(self, input_tensor=None)
Initialize the model. Args: input_tensor: Input tensor to the model. tf.placeholder variable or tf.Keras.Input instance. If not provided, use the default inputs from the the base model.
Initialize the model. Args: input_tensor: Input tensor to the model. tf.placeholder variable or tf.Keras.Input instance. If not provided, use the default inputs from the the base model.
[ "Initialize", "the", "model", ".", "Args", ":", "input_tensor", ":", "Input", "tensor", "to", "the", "model", ".", "tf", ".", "placeholder", "variable", "or", "tf", ".", "Keras", ".", "Input", "instance", ".", "If", "not", "provided", "use", "the", "defa...
def initialize_model(self, input_tensor=None): """ Initialize the model. Args: input_tensor: Input tensor to the model. tf.placeholder variable or tf.Keras.Input instance. If not provided, use the default inputs from the the base model. """ with tf.device(self.main_train_device): base_model = self.base_model(weights=None, include_top=False, input_tensor=input_tensor) x = base_model.output # Add a global spatial average pooling layer since MorphNet does not support Flatten/Reshape OPs. x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(1024, activation="relu")(x) logits = tf.keras.layers.Dense(self.num_classes)(x) self.model = tf.keras.Model(inputs=base_model.input, outputs=logits) self.inputs = self.model.input self.labels = tf.placeholder(tf.float32, [None, self.num_classes]) self.morphnet_regularization_strength_placeholder = tf.placeholder( tf.float32, shape=[])
[ "def", "initialize_model", "(", "self", ",", "input_tensor", "=", "None", ")", ":", "with", "tf", ".", "device", "(", "self", ".", "main_train_device", ")", ":", "base_model", "=", "self", ".", "base_model", "(", "weights", "=", "None", ",", "include_top",...
https://github.com/google-research/morph-net/blob/be4d79dea816c473007c5967d45ab4036306c21c/examples/keras/model.py#L82-L105
cleverhans-lab/cleverhans
e5d00e537ce7ad6119ed5a8db1f0e9736d1f6e1d
cleverhans_v3.1.0/cleverhans/model_zoo/madry_lab_challenges/cifar10_model.py
python
Input.fprop
(self, x)
[]
def fprop(self, x): with tf.variable_scope("input", reuse=tf.AUTO_REUSE): input_standardized = tf.map_fn( lambda img: tf.image.per_image_standardization(img), x ) return _conv("init_conv", input_standardized, 3, 3, 16, _stride_arr(1))
[ "def", "fprop", "(", "self", ",", "x", ")", ":", "with", "tf", ".", "variable_scope", "(", "\"input\"", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "input_standardized", "=", "tf", ".", "map_fn", "(", "lambda", "img", ":", "tf", ".", "image...
https://github.com/cleverhans-lab/cleverhans/blob/e5d00e537ce7ad6119ed5a8db1f0e9736d1f6e1d/cleverhans_v3.1.0/cleverhans/model_zoo/madry_lab_challenges/cifar10_model.py#L128-L133
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
source/addons/fetchmail/fetchmail.py
python
fetchmail_server.fetch_mail
(self, cr, uid, ids, context=None)
return True
WARNING: meant for cron usage only - will commit() after each email!
WARNING: meant for cron usage only - will commit() after each email!
[ "WARNING", ":", "meant", "for", "cron", "usage", "only", "-", "will", "commit", "()", "after", "each", "email!" ]
def fetch_mail(self, cr, uid, ids, context=None): """WARNING: meant for cron usage only - will commit() after each email!""" context = dict(context or {}) context['fetchmail_cron_running'] = True mail_thread = self.pool.get('mail.thread') action_pool = self.pool.get('ir.actions.server') for server in self.browse(cr, uid, ids, context=context): _logger.info('start checking for new emails on %s server %s', server.type, server.name) context.update({'fetchmail_server_id': server.id, 'server_type': server.type}) count, failed = 0, 0 imap_server = False pop_server = False if server.type == 'imap': try: imap_server = server.connect() imap_server.select() result, data = imap_server.search(None, '(UNSEEN)') for num in data[0].split(): res_id = None result, data = imap_server.fetch(num, '(RFC822)') imap_server.store(num, '-FLAGS', '\\Seen') try: res_id = mail_thread.message_process(cr, uid, server.object_id.model, data[0][1], save_original=server.original, strip_attachments=(not server.attach), context=context) except Exception: _logger.exception('Failed to process mail from %s server %s.', server.type, server.name) failed += 1 if res_id and server.action_id: action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)}) imap_server.store(num, '+FLAGS', '\\Seen') cr.commit() count += 1 _logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.type, server.name, (count - failed), failed) except Exception: _logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name) finally: if imap_server: imap_server.close() imap_server.logout() elif server.type == 'pop': try: while True: pop_server = server.connect() (numMsgs, totalSize) = pop_server.stat() pop_server.list() for num in range(1, min(MAX_POP_MESSAGES, numMsgs) + 1): (header, msges, octets) = pop_server.retr(num) msg = '\n'.join(msges) res_id = None try: res_id = mail_thread.message_process(cr, uid, server.object_id.model, msg, save_original=server.original, strip_attachments=(not server.attach), context=context) pop_server.dele(num) except Exception: _logger.exception('Failed to process mail from %s server %s.', server.type, server.name) failed += 1 if res_id and server.action_id: action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)}) cr.commit() if numMsgs < MAX_POP_MESSAGES: break pop_server.quit() _logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", numMsgs, server.type, server.name, (numMsgs - failed), failed) except Exception: _logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name) finally: if pop_server: pop_server.quit() server.write({'date': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)}) return True
[ "def", "fetch_mail", "(", "self", ",", "cr", ",", "uid", ",", "ids", ",", "context", "=", "None", ")", ":", "context", "=", "dict", "(", "context", "or", "{", "}", ")", "context", "[", "'fetchmail_cron_running'", "]", "=", "True", "mail_thread", "=", ...
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/source/addons/fetchmail/fetchmail.py#L189-L264
Mailu/Mailu
1e53530164e9eaf77a89c322e34bff447ace5a28
core/admin/mailu/utils.py
python
handle_needs_login
()
return flask.redirect( flask.url_for('sso.login') )
redirect unauthorized requests to login page
redirect unauthorized requests to login page
[ "redirect", "unauthorized", "requests", "to", "login", "page" ]
def handle_needs_login(): """ redirect unauthorized requests to login page """ return flask.redirect( flask.url_for('sso.login') )
[ "def", "handle_needs_login", "(", ")", ":", "return", "flask", ".", "redirect", "(", "flask", ".", "url_for", "(", "'sso.login'", ")", ")" ]
https://github.com/Mailu/Mailu/blob/1e53530164e9eaf77a89c322e34bff447ace5a28/core/admin/mailu/utils.py#L42-L46
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/site-packages/setuptools/package_index.py
python
PackageIndex.download
(self, spec, tmpdir)
return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
Locate and/or download `spec` to `tmpdir`, returning a local path `spec` may be a ``Requirement`` object, or a string containing a URL, an existing local filename, or a project/version requirement spec (i.e. the string form of a ``Requirement`` object). If it is the URL of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is automatically created alongside the downloaded file. If `spec` is a ``Requirement`` object or a string containing a project/version requirement spec, this method returns the location of a matching distribution (possibly after downloading it to `tmpdir`). If `spec` is a locally existing file or directory name, it is simply returned unchanged. If `spec` is a URL, it is downloaded to a subpath of `tmpdir`, and the local filename is returned. Various errors may be raised if a problem occurs during downloading.
Locate and/or download `spec` to `tmpdir`, returning a local path
[ "Locate", "and", "/", "or", "download", "spec", "to", "tmpdir", "returning", "a", "local", "path" ]
def download(self, spec, tmpdir): """Locate and/or download `spec` to `tmpdir`, returning a local path `spec` may be a ``Requirement`` object, or a string containing a URL, an existing local filename, or a project/version requirement spec (i.e. the string form of a ``Requirement`` object). If it is the URL of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is automatically created alongside the downloaded file. If `spec` is a ``Requirement`` object or a string containing a project/version requirement spec, this method returns the location of a matching distribution (possibly after downloading it to `tmpdir`). If `spec` is a locally existing file or directory name, it is simply returned unchanged. If `spec` is a URL, it is downloaded to a subpath of `tmpdir`, and the local filename is returned. Various errors may be raised if a problem occurs during downloading. """ if not isinstance(spec, Requirement): scheme = URL_SCHEME(spec) if scheme: # It's a url, download it to tmpdir found = self._download_url(scheme.group(1), spec, tmpdir) base, fragment = egg_info_for_url(spec) if base.endswith('.py'): found = self.gen_setup(found, fragment, tmpdir) return found elif os.path.exists(spec): # Existing file or directory, just return it return spec else: spec = parse_requirement_arg(spec) return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
[ "def", "download", "(", "self", ",", "spec", ",", "tmpdir", ")", ":", "if", "not", "isinstance", "(", "spec", ",", "Requirement", ")", ":", "scheme", "=", "URL_SCHEME", "(", "spec", ")", "if", "scheme", ":", "# It's a url, download it to tmpdir", "found", ...
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/site-packages/setuptools/package_index.py#L559-L591
samuelclay/NewsBlur
2c45209df01a1566ea105e04d499367f32ac9ad2
apps/social/views.py
python
mute_story
(request, secret_token, shared_story_id)
return {}
[]
def mute_story(request, secret_token, shared_story_id): user_profile = Profile.objects.get(secret_token=secret_token) shared_story = MSharedStory.objects.get(id=shared_story_id) shared_story.mute_for_user(user_profile.user_id) return {}
[ "def", "mute_story", "(", "request", ",", "secret_token", ",", "shared_story_id", ")", ":", "user_profile", "=", "Profile", ".", "objects", ".", "get", "(", "secret_token", "=", "secret_token", ")", "shared_story", "=", "MSharedStory", ".", "objects", ".", "ge...
https://github.com/samuelclay/NewsBlur/blob/2c45209df01a1566ea105e04d499367f32ac9ad2/apps/social/views.py#L899-L904
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/iai/v20200303/models.py
python
Hair.__init__
(self)
r""" :param Length: 头发长度信息。 AttributeItem对应的Type为 —— 0:光头,1:短发,2:中发,3:长发,4:绑发。 :type Length: :class:`tencentcloud.iai.v20200303.models.AttributeItem` :param Bang: 刘海信息。 AttributeItem对应的Type为 —— 0:无刘海,1:有刘海。 :type Bang: :class:`tencentcloud.iai.v20200303.models.AttributeItem` :param Color: 头发颜色信息。 AttributeItem对应的Type为 —— 0:黑色,1:金色,2:棕色,3:灰白色。 :type Color: :class:`tencentcloud.iai.v20200303.models.AttributeItem`
r""" :param Length: 头发长度信息。 AttributeItem对应的Type为 —— 0:光头,1:短发,2:中发,3:长发,4:绑发。 :type Length: :class:`tencentcloud.iai.v20200303.models.AttributeItem` :param Bang: 刘海信息。 AttributeItem对应的Type为 —— 0:无刘海,1:有刘海。 :type Bang: :class:`tencentcloud.iai.v20200303.models.AttributeItem` :param Color: 头发颜色信息。 AttributeItem对应的Type为 —— 0:黑色,1:金色,2:棕色,3:灰白色。 :type Color: :class:`tencentcloud.iai.v20200303.models.AttributeItem`
[ "r", ":", "param", "Length", ":", "头发长度信息。", "AttributeItem对应的Type为", "——", "0:光头,1:短发,2:中发,3:长发,4:绑发。", ":", "type", "Length", ":", ":", "class", ":", "tencentcloud", ".", "iai", ".", "v20200303", ".", "models", ".", "AttributeItem", ":", "param", "Bang", ":"...
def __init__(self): r""" :param Length: 头发长度信息。 AttributeItem对应的Type为 —— 0:光头,1:短发,2:中发,3:长发,4:绑发。 :type Length: :class:`tencentcloud.iai.v20200303.models.AttributeItem` :param Bang: 刘海信息。 AttributeItem对应的Type为 —— 0:无刘海,1:有刘海。 :type Bang: :class:`tencentcloud.iai.v20200303.models.AttributeItem` :param Color: 头发颜色信息。 AttributeItem对应的Type为 —— 0:黑色,1:金色,2:棕色,3:灰白色。 :type Color: :class:`tencentcloud.iai.v20200303.models.AttributeItem` """ self.Length = None self.Bang = None self.Color = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "Length", "=", "None", "self", ".", "Bang", "=", "None", "self", ".", "Color", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/iai/v20200303/models.py#L3063-L3077
pysmt/pysmt
ade4dc2a825727615033a96d31c71e9f53ce4764
pysmt/smtlib/printers.py
python
SmtPrinter._walk_quantifier
(self, operator, formula)
[]
def _walk_quantifier(self, operator, formula): assert len(formula.quantifier_vars()) > 0 self.write("(%s (" % operator) for s in formula.quantifier_vars(): self.write("(") yield s self.write(" %s)" % s.symbol_type().as_smtlib(False)) self.write(") ") yield formula.arg(0) self.write(")")
[ "def", "_walk_quantifier", "(", "self", ",", "operator", ",", "formula", ")", ":", "assert", "len", "(", "formula", ".", "quantifier_vars", "(", ")", ")", ">", "0", "self", ".", "write", "(", "\"(%s (\"", "%", "operator", ")", "for", "s", "in", "formul...
https://github.com/pysmt/pysmt/blob/ade4dc2a825727615033a96d31c71e9f53ce4764/pysmt/smtlib/printers.py#L134-L145
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/django/contrib/auth/models.py
python
AbstractUser.email_user
(self, subject, message, from_email=None)
Sends an email to this User.
Sends an email to this User.
[ "Sends", "an", "email", "to", "this", "User", "." ]
def email_user(self, subject, message, from_email=None): """ Sends an email to this User. """ send_mail(subject, message, from_email, [self.email])
[ "def", "email_user", "(", "self", ",", "subject", ",", "message", ",", "from_email", "=", "None", ")", ":", "send_mail", "(", "subject", ",", "message", ",", "from_email", ",", "[", "self", ".", "email", "]", ")" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/django/contrib/auth/models.py#L409-L413
Veil-Framework/Veil
c825577bbc97db04be5c47e004369038491f6b7a
tools/evasion/evasion_common/encryption.py
python
buildAryaLauncher
(raw)
return payload_code
Takes a raw set of bytes and builds a launcher shell to b64decode/decrypt a string rep of the bytes, and then use reflection to invoke the original .exe
Takes a raw set of bytes and builds a launcher shell to b64decode/decrypt a string rep of the bytes, and then use reflection to invoke the original .exe
[ "Takes", "a", "raw", "set", "of", "bytes", "and", "builds", "a", "launcher", "shell", "to", "b64decode", "/", "decrypt", "a", "string", "rep", "of", "the", "bytes", "and", "then", "use", "reflection", "to", "invoke", "the", "original", ".", "exe" ]
def buildAryaLauncher(raw): """ Takes a raw set of bytes and builds a launcher shell to b64decode/decrypt a string rep of the bytes, and then use reflection to invoke the original .exe """ # the 'key' is a randomized alpha lookup table [a-zA-Z] used for substitution key = ''.join(sorted(list(string.ascii_letters), key=lambda *args: random.random())) base64payload = b64sub(raw, key) payload_code = "using System; using System.Collections.Generic; using System.Text;" payload_code += "using System.IO; using System.Reflection; using System.Linq;\n" decodeFuncName = evasion_helpers.randomString() baseStringName = evasion_helpers.randomString() targetStringName = evasion_helpers.randomString() dictionaryName = evasion_helpers.randomString() # build out the letter sub decrypt function payload_code += "namespace %s { class %s { private static string %s(string t, string k) {\n" % (evasion_helpers.randomString(), evasion_helpers.randomString(), decodeFuncName) payload_code += "string %s = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\";\n" % (baseStringName) payload_code += "string %s = \"\"; Dictionary<char, char> %s = new Dictionary<char, char>();\n" % (targetStringName, dictionaryName) payload_code += "for (int i = 0; i < %s.Length; ++i){ %s.Add(k[i], %s[i]); }\n" % (baseStringName, dictionaryName,baseStringName) payload_code += "for (int i = 0; i < t.Length; ++i){ if ((t[i] >= 'A' && t[i] <= 'Z') || (t[i] >= 'a' && t[i] <= 'z')) { %s += %s[t[i]];}\n" % (targetStringName, dictionaryName) payload_code += "else { %s += t[i]; }} return %s; }\n" % (targetStringName, targetStringName) base64PayloadName = evasion_helpers.randomString() assemblyName = evasion_helpers.randomString() # build out Main() assemblyName = evasion_helpers.randomString() methodInfoName = evasion_helpers.randomString() keyName = evasion_helpers.randomString() payload_code += "static void Main() {\n" payload_code += "string %s = \"%s\";\n" % (base64PayloadName, base64payload) payload_code += "string %s = \"%s\";\n" % (keyName, key) # load up the assembly of the decoded binary payload_code += "Assembly %s = Assembly.Load(Convert.FromBase64String(%s(%s, %s)));\n" % (assemblyName, decodeFuncName, base64PayloadName, keyName) payload_code += "MethodInfo %s = %s.EntryPoint;\n" % (methodInfoName, assemblyName) # use reflection to jump to its entry point payload_code += "%s.Invoke(%s.CreateInstance(%s.Name), null);\n" % (methodInfoName, assemblyName, methodInfoName) payload_code += "}}}\n" return payload_code
[ "def", "buildAryaLauncher", "(", "raw", ")", ":", "# the 'key' is a randomized alpha lookup table [a-zA-Z] used for substitution", "key", "=", "''", ".", "join", "(", "sorted", "(", "list", "(", "string", ".", "ascii_letters", ")", ",", "key", "=", "lambda", "*", ...
https://github.com/Veil-Framework/Veil/blob/c825577bbc97db04be5c47e004369038491f6b7a/tools/evasion/evasion_common/encryption.py#L69-L113
donnemartin/gitsome
d7c57abc7cb66e9c910a844f15d4536866da3310
xonsh/ast.py
python
load_attribute_chain
(name, lineno=None, col=None)
return node
Creates an AST that loads variable name that may (or may not) have attribute chains. For example, "a.b.c"
Creates an AST that loads variable name that may (or may not) have attribute chains. For example, "a.b.c"
[ "Creates", "an", "AST", "that", "loads", "variable", "name", "that", "may", "(", "or", "may", "not", ")", "have", "attribute", "chains", ".", "For", "example", "a", ".", "b", ".", "c" ]
def load_attribute_chain(name, lineno=None, col=None): """Creates an AST that loads variable name that may (or may not) have attribute chains. For example, "a.b.c" """ names = name.split(".") node = Name(id=names.pop(0), ctx=Load(), lineno=lineno, col_offset=col) for attr in names: node = Attribute( value=node, attr=attr, ctx=Load(), lineno=lineno, col_offset=col ) return node
[ "def", "load_attribute_chain", "(", "name", ",", "lineno", "=", "None", ",", "col", "=", "None", ")", ":", "names", "=", "name", ".", "split", "(", "\".\"", ")", "node", "=", "Name", "(", "id", "=", "names", ".", "pop", "(", "0", ")", ",", "ctx",...
https://github.com/donnemartin/gitsome/blob/d7c57abc7cb66e9c910a844f15d4536866da3310/xonsh/ast.py#L271-L281
Fallen-Breath/MCDReforged
fdb1d2520b35f916123f265dbd94603981bb2b0c
mcdreforged/permission/permission_manager.py
python
PermissionManager._pre_save
(self, data)
[]
def _pre_save(self, data): # Deduplicate the permission data= for key, value in data.items(): if key in PermissionLevel.NAMES and isinstance(value, list): data[key] = misc_util.unique_list(data[key]) # Change empty list to None for nicer look in the .yml file for key, value in data.items(): if key in PermissionLevel.NAMES and value is not None and len(value) == 0: data[key] = None
[ "def", "_pre_save", "(", "self", ",", "data", ")", ":", "# Deduplicate the permission data=", "for", "key", ",", "value", "in", "data", ".", "items", "(", ")", ":", "if", "key", "in", "PermissionLevel", ".", "NAMES", "and", "isinstance", "(", "value", ",",...
https://github.com/Fallen-Breath/MCDReforged/blob/fdb1d2520b35f916123f265dbd94603981bb2b0c/mcdreforged/permission/permission_manager.py#L32-L40
wbond/packagecontrol.io
9f5eb7e3392e6bc2ad979ad32d3dd27ef9c00b20
app/lib/package_control/deps/asn1crypto/x509.py
python
IPAddress.parse
(self, spec=None, spec_params=None)
This method is not applicable to IP addresses
This method is not applicable to IP addresses
[ "This", "method", "is", "not", "applicable", "to", "IP", "addresses" ]
def parse(self, spec=None, spec_params=None): """ This method is not applicable to IP addresses """ raise ValueError(unwrap( ''' IP address values can not be parsed ''' ))
[ "def", "parse", "(", "self", ",", "spec", "=", "None", ",", "spec_params", "=", "None", ")", ":", "raise", "ValueError", "(", "unwrap", "(", "'''\n IP address values can not be parsed\n '''", ")", ")" ]
https://github.com/wbond/packagecontrol.io/blob/9f5eb7e3392e6bc2ad979ad32d3dd27ef9c00b20/app/lib/package_control/deps/asn1crypto/x509.py#L295-L304
demisto/content
5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07
Packs/epo/Integrations/epoV2/epoV2.py
python
Client.get_system_group_path
(self, group_id: int)
return ''
return the system group path for giving group_id Args: group_id (str): the groupID to find Returns (str): returns the system group path for a giving group id
return the system group path for giving group_id Args: group_id (str): the groupID to find Returns (str): returns the system group path for a giving group id
[ "return", "the", "system", "group", "path", "for", "giving", "group_id", "Args", ":", "group_id", "(", "str", ")", ":", "the", "groupID", "to", "find", "Returns", "(", "str", ")", ":", "returns", "the", "system", "group", "path", "for", "a", "giving", ...
def get_system_group_path(self, group_id: int) -> str: """ return the system group path for giving group_id Args: group_id (str): the groupID to find Returns (str): returns the system group path for a giving group id """ response_json, response = self.get_system_tree_groups(search_text='') if response is None: return '' for entry in response_json: if group_id == entry['groupId']: return entry['groupPath'] return ''
[ "def", "get_system_group_path", "(", "self", ",", "group_id", ":", "int", ")", "->", "str", ":", "response_json", ",", "response", "=", "self", ".", "get_system_tree_groups", "(", "search_text", "=", "''", ")", "if", "response", "is", "None", ":", "return", ...
https://github.com/demisto/content/blob/5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07/Packs/epo/Integrations/epoV2/epoV2.py#L279-L293
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/Xenotix Python Scripting Engine/Lib/wsgiref/util.py
python
request_uri
(environ, include_query=1)
return url
Return the full request URI, optionally including the query string
Return the full request URI, optionally including the query string
[ "Return", "the", "full", "request", "URI", "optionally", "including", "the", "query", "string" ]
def request_uri(environ, include_query=1): """Return the full request URI, optionally including the query string""" url = application_uri(environ) from urllib import quote path_info = quote(environ.get('PATH_INFO',''),safe='/;=,') if not environ.get('SCRIPT_NAME'): url += path_info[1:] else: url += path_info if include_query and environ.get('QUERY_STRING'): url += '?' + environ['QUERY_STRING'] return url
[ "def", "request_uri", "(", "environ", ",", "include_query", "=", "1", ")", ":", "url", "=", "application_uri", "(", "environ", ")", "from", "urllib", "import", "quote", "path_info", "=", "quote", "(", "environ", ".", "get", "(", "'PATH_INFO'", ",", "''", ...
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/Lib/wsgiref/util.py#L63-L74
ericgazoni/openpyxl
c55988e4904d4337ce4c35ab8b7dc305bca9de23
openpyxl/writer/workbook.py
python
write_workbook_rels
(workbook)
return get_document_content(root)
Write the workbook relationships xml.
Write the workbook relationships xml.
[ "Write", "the", "workbook", "relationships", "xml", "." ]
def write_workbook_rels(workbook): """Write the workbook relationships xml.""" root = Element('{%s}Relationships' % PKG_REL_NS) for i in range(1, len(workbook.worksheets) + 1): SubElement(root, '{%s}Relationship' % PKG_REL_NS, {'Id': 'rId%d' % i, 'Target': 'worksheets/sheet%s.xml' % i, 'Type': '%s/worksheet' % REL_NS}) rid = len(workbook.worksheets) + 1 SubElement(root, '{%s}Relationship' % PKG_REL_NS, {'Id': 'rId%d' % rid, 'Target': 'sharedStrings.xml', 'Type': '%s/sharedStrings' % REL_NS}) SubElement(root, '{%s}Relationship' % PKG_REL_NS, {'Id': 'rId%d' % (rid + 1), 'Target': 'styles.xml', 'Type': '%s/styles' % REL_NS}) SubElement(root, '{%s}Relationship' % PKG_REL_NS, {'Id': 'rId%d' % (rid + 2), 'Target': 'theme/theme1.xml', 'Type': '%s/theme' % REL_NS}) if workbook.vba_archive: SubElement(root, '{%s}Relationship' % PKG_REL_NS, {'Id': 'rId%d' % (rid + 3), 'Target': 'vbaProject.bin', 'Type': 'http://schemas.microsoft.com/office/2006/relationships/vbaProject'}) return get_document_content(root)
[ "def", "write_workbook_rels", "(", "workbook", ")", ":", "root", "=", "Element", "(", "'{%s}Relationships'", "%", "PKG_REL_NS", ")", "for", "i", "in", "range", "(", "1", ",", "len", "(", "workbook", ".", "worksheets", ")", "+", "1", ")", ":", "SubElement...
https://github.com/ericgazoni/openpyxl/blob/c55988e4904d4337ce4c35ab8b7dc305bca9de23/openpyxl/writer/workbook.py#L287-L308
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_adm_policy_user.py
python
RoleBinding.add_subject
(self, inc_subject)
return True
add a subject
add a subject
[ "add", "a", "subject" ]
def add_subject(self, inc_subject): ''' add a subject ''' if self.subjects: # pylint: disable=no-member self.subjects.append(inc_subject) else: self.put(RoleBinding.subjects_path, [inc_subject]) return True
[ "def", "add_subject", "(", "self", ",", "inc_subject", ")", ":", "if", "self", ".", "subjects", ":", "# pylint: disable=no-member", "self", ".", "subjects", ".", "append", "(", "inc_subject", ")", "else", ":", "self", ".", "put", "(", "RoleBinding", ".", "...
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_adm_policy_user.py#L1601-L1609
ENCODE-DCC/atac-seq-pipeline
d777eb707d481156fcc21089fe9c3e6f2aec933e
src/encode_lib_genomic.py
python
get_samtools_res_param
(subcmd, nth=1, mem_gb=None)
return res_param
Make resource parameters (-@, -m) for samtools. -@: This means number of total/additional threads. -m: This means memory per thread (for samtools sort only). It's clipped between 1 and DEFAULT_SAMTOOLS_MAX_MEM_MB_PER_THREAD MBs. Tested with samtools 1.9. Lower version of samtools work a bit differently. For such lower versions, -@ is number of threads. Run `samtools view --help` with your version and check if it is based on total or additional number of threads. Args: nth: Number of threads. - index: total threads. - all other sub-commands: additional threads. mem_gb: Total memory in GBs.
Make resource parameters (-@, -m) for samtools. -@: This means number of total/additional threads. -m: This means memory per thread (for samtools sort only). It's clipped between 1 and DEFAULT_SAMTOOLS_MAX_MEM_MB_PER_THREAD MBs.
[ "Make", "resource", "parameters", "(", "-", "@", "-", "m", ")", "for", "samtools", ".", "-", "@", ":", "This", "means", "number", "of", "total", "/", "additional", "threads", ".", "-", "m", ":", "This", "means", "memory", "per", "thread", "(", "for",...
def get_samtools_res_param(subcmd, nth=1, mem_gb=None): """Make resource parameters (-@, -m) for samtools. -@: This means number of total/additional threads. -m: This means memory per thread (for samtools sort only). It's clipped between 1 and DEFAULT_SAMTOOLS_MAX_MEM_MB_PER_THREAD MBs. Tested with samtools 1.9. Lower version of samtools work a bit differently. For such lower versions, -@ is number of threads. Run `samtools view --help` with your version and check if it is based on total or additional number of threads. Args: nth: Number of threads. - index: total threads. - all other sub-commands: additional threads. mem_gb: Total memory in GBs. """ res_param = '' if subcmd == 'index': res_param += '-@ {num_total_threads} '.format( num_total_threads=nth, ) else: res_param += '-@ {num_additional_threads} '.format( num_additional_threads=nth - 1, ) if subcmd == 'sort': if nth and mem_gb: mem_mb_per_thread = min( math.floor(mem_gb * 1024.0 / nth), DEFAULT_SAMTOOLS_MAX_MEM_MB_PER_THREAD ) res_param += '-m {mem}M '.format(mem=mem_mb_per_thread) return res_param
[ "def", "get_samtools_res_param", "(", "subcmd", ",", "nth", "=", "1", ",", "mem_gb", "=", "None", ")", ":", "res_param", "=", "''", "if", "subcmd", "==", "'index'", ":", "res_param", "+=", "'-@ {num_total_threads} '", ".", "format", "(", "num_total_threads", ...
https://github.com/ENCODE-DCC/atac-seq-pipeline/blob/d777eb707d481156fcc21089fe9c3e6f2aec933e/src/encode_lib_genomic.py#L96-L136
rowliny/DiffHelper
ab3a96f58f9579d0023aed9ebd785f4edf26f8af
Tool/SitePackages/nltk/grammar.py
python
ProbabilisticDependencyGrammar.__str__
(self)
return str
Return a verbose string representation of the ``ProbabilisticDependencyGrammar`` :rtype: str
Return a verbose string representation of the ``ProbabilisticDependencyGrammar``
[ "Return", "a", "verbose", "string", "representation", "of", "the", "ProbabilisticDependencyGrammar" ]
def __str__(self): """ Return a verbose string representation of the ``ProbabilisticDependencyGrammar`` :rtype: str """ str = "Statistical dependency grammar with %d productions" % len( self._productions ) for production in self._productions: str += "\n %s" % production str += "\nEvents:" for event in self._events: str += "\n %d:%s" % (self._events[event], event) str += "\nTags:" for tag_word in self._tags: str += f"\n {tag_word}:\t({self._tags[tag_word]})" return str
[ "def", "__str__", "(", "self", ")", ":", "str", "=", "\"Statistical dependency grammar with %d productions\"", "%", "len", "(", "self", ".", "_productions", ")", "for", "production", "in", "self", ".", "_productions", ":", "str", "+=", "\"\\n %s\"", "%", "produ...
https://github.com/rowliny/DiffHelper/blob/ab3a96f58f9579d0023aed9ebd785f4edf26f8af/Tool/SitePackages/nltk/grammar.py#L1158-L1175
samgranger/EQGRP
fa0a1e1460767b8312839dc4be922f26ecdd250b
Firewall/EXPLOITS/EXBA/scapy/utils.py
python
RawPcapReader.next
(self)
return pkt
impliment the iterator protocol on a set of packets in a pcap file
impliment the iterator protocol on a set of packets in a pcap file
[ "impliment", "the", "iterator", "protocol", "on", "a", "set", "of", "packets", "in", "a", "pcap", "file" ]
def next(self): """impliment the iterator protocol on a set of packets in a pcap file""" pkt = self.read_packet() if pkt == None: raise StopIteration return pkt
[ "def", "next", "(", "self", ")", ":", "pkt", "=", "self", ".", "read_packet", "(", ")", "if", "pkt", "==", "None", ":", "raise", "StopIteration", "return", "pkt" ]
https://github.com/samgranger/EQGRP/blob/fa0a1e1460767b8312839dc4be922f26ecdd250b/Firewall/EXPLOITS/EXBA/scapy/utils.py#L505-L510
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/lfunctions/dokchitser.py
python
Dokchitser.num_coeffs
(self, T=1)
return Integer(self._gp_call_inst('cflength', T))
Return number of coefficients `a_n` that are needed in order to perform most relevant `L`-function computations to the desired precision. EXAMPLES:: sage: E = EllipticCurve('11a') sage: L = E.lseries().dokchitser(algorithm='gp') sage: L.num_coeffs() 26 sage: E = EllipticCurve('5077a') sage: L = E.lseries().dokchitser(algorithm='gp') sage: L.num_coeffs() 568 sage: L = Dokchitser(conductor=1, gammaV=[0], weight=1, eps=1, poles=[1], residues=[-1], init='1') sage: L.num_coeffs() 4 Verify that ``num_coeffs`` works with non-real spectral parameters, e.g. for the L-function of the level 10 Maass form with eigenvalue 2.7341055592527126:: sage: ev = 2.7341055592527126 sage: L = Dokchitser(conductor=10, gammaV=[ev*i, -ev*i],weight=2,eps=1) sage: L.num_coeffs() 26
Return number of coefficients `a_n` that are needed in order to perform most relevant `L`-function computations to the desired precision.
[ "Return", "number", "of", "coefficients", "a_n", "that", "are", "needed", "in", "order", "to", "perform", "most", "relevant", "L", "-", "function", "computations", "to", "the", "desired", "precision", "." ]
def num_coeffs(self, T=1): """ Return number of coefficients `a_n` that are needed in order to perform most relevant `L`-function computations to the desired precision. EXAMPLES:: sage: E = EllipticCurve('11a') sage: L = E.lseries().dokchitser(algorithm='gp') sage: L.num_coeffs() 26 sage: E = EllipticCurve('5077a') sage: L = E.lseries().dokchitser(algorithm='gp') sage: L.num_coeffs() 568 sage: L = Dokchitser(conductor=1, gammaV=[0], weight=1, eps=1, poles=[1], residues=[-1], init='1') sage: L.num_coeffs() 4 Verify that ``num_coeffs`` works with non-real spectral parameters, e.g. for the L-function of the level 10 Maass form with eigenvalue 2.7341055592527126:: sage: ev = 2.7341055592527126 sage: L = Dokchitser(conductor=10, gammaV=[ev*i, -ev*i],weight=2,eps=1) sage: L.num_coeffs() 26 """ return Integer(self._gp_call_inst('cflength', T))
[ "def", "num_coeffs", "(", "self", ",", "T", "=", "1", ")", ":", "return", "Integer", "(", "self", ".", "_gp_call_inst", "(", "'cflength'", ",", "T", ")", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/lfunctions/dokchitser.py#L347-L376
python-diamond/Diamond
7000e16cfdf4508ed9291fc4b3800592557b2431
src/collectors/squid/squid.py
python
SquidCollector.get_default_config_help
(self)
return config_help
[]
def get_default_config_help(self): config_help = super(SquidCollector, self).get_default_config_help() config_help.update({ 'hosts': 'List of hosts to collect from. Format is ' + '[nickname@]host[:port], [nickname@]host[:port], etc', }) return config_help
[ "def", "get_default_config_help", "(", "self", ")", ":", "config_help", "=", "super", "(", "SquidCollector", ",", "self", ")", ".", "get_default_config_help", "(", ")", "config_help", ".", "update", "(", "{", "'hosts'", ":", "'List of hosts to collect from. Format i...
https://github.com/python-diamond/Diamond/blob/7000e16cfdf4508ed9291fc4b3800592557b2431/src/collectors/squid/squid.py#L45-L51
graalvm/mx
29c0debab406352df3af246be2f8973be5db69ae
mx.py
python
MavenRepo.getSnapshotUrl
(self, groupId, artifactId, version)
return "{0}/{1}/{2}/{3}/maven-metadata.xml".format(self.repourl, groupId.replace('.', '/'), artifactId, version)
[]
def getSnapshotUrl(self, groupId, artifactId, version): return "{0}/{1}/{2}/{3}/maven-metadata.xml".format(self.repourl, groupId.replace('.', '/'), artifactId, version)
[ "def", "getSnapshotUrl", "(", "self", ",", "groupId", ",", "artifactId", ",", "version", ")", ":", "return", "\"{0}/{1}/{2}/{3}/maven-metadata.xml\"", ".", "format", "(", "self", ".", "repourl", ",", "groupId", ".", "replace", "(", "'.'", ",", "'/'", ")", ",...
https://github.com/graalvm/mx/blob/29c0debab406352df3af246be2f8973be5db69ae/mx.py#L10891-L10892
wistbean/learn_python3_spider
73c873f4845f4385f097e5057407d03dd37a117b
stackoverflow/venv/lib/python3.6/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/pkg_resources/__init__.py
python
_ReqExtras.markers_pass
(self, req, extras=None)
return not req.marker or any(extra_evals)
Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True.
Evaluate markers for req against each extra that demanded it.
[ "Evaluate", "markers", "for", "req", "against", "each", "extra", "that", "demanded", "it", "." ]
def markers_pass(self, req, extras=None): """ Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. """ extra_evals = ( req.marker.evaluate({'extra': extra}) for extra in self.get(req, ()) + (extras or (None,)) ) return not req.marker or any(extra_evals)
[ "def", "markers_pass", "(", "self", ",", "req", ",", "extras", "=", "None", ")", ":", "extra_evals", "=", "(", "req", ".", "marker", ".", "evaluate", "(", "{", "'extra'", ":", "extra", "}", ")", "for", "extra", "in", "self", ".", "get", "(", "req",...
https://github.com/wistbean/learn_python3_spider/blob/73c873f4845f4385f097e5057407d03dd37a117b/stackoverflow/venv/lib/python3.6/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/pkg_resources/__init__.py#L942-L954
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/hassio/__init__.py
python
async_setup
(hass: HomeAssistant, config: ConfigType)
return True
Set up the Hass.io component.
Set up the Hass.io component.
[ "Set", "up", "the", "Hass", ".", "io", "component", "." ]
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901 """Set up the Hass.io component.""" # Check local setup for env in ("HASSIO", "HASSIO_TOKEN"): if os.environ.get(env): continue _LOGGER.error("Missing %s environment variable", env) if config_entries := hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.async_remove(config_entries[0].entry_id) ) return False async_load_websocket_api(hass) host = os.environ["HASSIO"] websession = async_get_clientsession(hass) hass.data[DOMAIN] = hassio = HassIO(hass.loop, websession, host) if not await hassio.is_connected(): _LOGGER.warning("Not connected with the supervisor / system too busy!") store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) if (data := await store.async_load()) is None: data = {} refresh_token = None if "hassio_user" in data: user = await hass.auth.async_get_user(data["hassio_user"]) if user and user.refresh_tokens: refresh_token = list(user.refresh_tokens.values())[0] # Migrate old Hass.io users to be admin. if not user.is_admin: await hass.auth.async_update_user(user, group_ids=[GROUP_ID_ADMIN]) # Migrate old name if user.name == "Hass.io": await hass.auth.async_update_user(user, name=HASSIO_USER_NAME) if refresh_token is None: user = await hass.auth.async_create_system_user( HASSIO_USER_NAME, group_ids=[GROUP_ID_ADMIN] ) refresh_token = await hass.auth.async_create_refresh_token(user) data["hassio_user"] = user.id await store.async_save(data) # This overrides the normal API call that would be forwarded development_repo = config.get(DOMAIN, {}).get(CONF_FRONTEND_REPO) if development_repo is not None: hass.http.register_static_path( "/api/hassio/app", os.path.join(development_repo, "hassio/build"), False ) hass.http.register_view(HassIOView(host, websession)) await hass.components.panel_custom.async_register_panel( frontend_url_path="hassio", webcomponent_name="hassio-main", js_url="/api/hassio/app/entrypoint.js", embed_iframe=True, require_admin=True, ) await hassio.update_hass_api(config.get("http", {}), refresh_token) last_timezone = None async def push_config(_): """Push core config to Hass.io.""" nonlocal last_timezone new_timezone = str(hass.config.time_zone) if new_timezone == last_timezone: return last_timezone = new_timezone await hassio.update_hass_timezone(new_timezone) hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, push_config) await push_config(None) async def async_service_handler(service: ServiceCall) -> None: """Handle service calls for Hass.io.""" api_endpoint = MAP_SERVICE_API[service.service] data = service.data.copy() addon = data.pop(ATTR_ADDON, None) slug = data.pop(ATTR_SLUG, None) payload = None # Pass data to Hass.io API if service.service == SERVICE_ADDON_STDIN: payload = data[ATTR_INPUT] elif api_endpoint.pass_data: payload = data # Call API try: await hassio.send_command( api_endpoint.command.format(addon=addon, slug=slug), payload=payload, timeout=api_endpoint.timeout, ) except HassioAPIError: # The exceptions are logged properly in hassio.send_command pass for service, settings in MAP_SERVICE_API.items(): hass.services.async_register( DOMAIN, service, async_service_handler, schema=settings.schema ) async def update_addon_stats(slug): """Update single addon stats.""" stats = await hassio.get_addon_stats(slug) return (slug, stats) async def update_info_data(now): """Update last available supervisor information.""" try: ( hass.data[DATA_INFO], hass.data[DATA_HOST_INFO], hass.data[DATA_STORE], hass.data[DATA_CORE_INFO], hass.data[DATA_SUPERVISOR_INFO], hass.data[DATA_OS_INFO], ) = await asyncio.gather( hassio.get_info(), hassio.get_host_info(), hassio.get_store(), hassio.get_core_info(), hassio.get_supervisor_info(), hassio.get_os_info(), ) addons = [ addon for addon in hass.data[DATA_SUPERVISOR_INFO].get("addons", []) if addon[ATTR_STATE] == ATTR_STARTED ] stats_data = await asyncio.gather( *[update_addon_stats(addon[ATTR_SLUG]) for addon in addons] ) hass.data[DATA_ADDONS_STATS] = dict(stats_data) if ADDONS_COORDINATOR in hass.data: await hass.data[ADDONS_COORDINATOR].async_refresh() except HassioAPIError as err: _LOGGER.warning("Can't read Supervisor data: %s", err) hass.helpers.event.async_track_point_in_utc_time( update_info_data, utcnow() + HASSIO_UPDATE_INTERVAL ) # Fetch data await update_info_data(None) async def async_handle_core_service(call: ServiceCall) -> None: """Service handler for handling core services.""" if call.service in SHUTDOWN_SERVICES and recorder.async_migration_in_progress( hass ): _LOGGER.error( "The system cannot %s while a database upgrade is in progress", call.service, ) raise HomeAssistantError( f"The system cannot {call.service} " "while a database upgrade is in progress." ) if call.service == SERVICE_HOMEASSISTANT_STOP: await hassio.stop_homeassistant() return errors = await conf_util.async_check_ha_config_file(hass) if errors: _LOGGER.error( "The system cannot %s because the configuration is not valid: %s", call.service, errors, ) persistent_notification.async_create( hass, "Config error. See [the logs](/config/logs) for details.", "Config validating", f"{HASS_DOMAIN}.check_config", ) raise HomeAssistantError( f"The system cannot {call.service} " f"because the configuration is not valid: {errors}" ) if call.service == SERVICE_HOMEASSISTANT_RESTART: await hassio.restart_homeassistant() # Mock core services for service in ( SERVICE_HOMEASSISTANT_STOP, SERVICE_HOMEASSISTANT_RESTART, SERVICE_CHECK_CONFIG, ): hass.services.async_register(HASS_DOMAIN, service, async_handle_core_service) # Init discovery Hass.io feature async_setup_discovery_view(hass, hassio) # Init auth Hass.io feature async_setup_auth_view(hass, user) # Init ingress Hass.io feature async_setup_ingress_view(hass, host) # Init add-on ingress panels await async_setup_addon_panel(hass, hassio) hass.async_create_task( hass.config_entries.flow.async_init(DOMAIN, context={"source": "system"}) ) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "ConfigType", ")", "->", "bool", ":", "# noqa: C901", "# Check local setup", "for", "env", "in", "(", "\"HASSIO\"", ",", "\"HASSIO_TOKEN\"", ")", ":", "if", "os", ".", "env...
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/hassio/__init__.py#L413-L640
pyqt/examples
843bb982917cecb2350b5f6d7f42c9b7fb142ec1
src/pyqt-official/desktop/systray/systray_rc.py
python
qInitResources
()
[]
def qInitResources(): QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
[ "def", "qInitResources", "(", ")", ":", "QtCore", ".", "qRegisterResourceData", "(", "0x01", ",", "qt_resource_struct", ",", "qt_resource_name", ",", "qt_resource_data", ")" ]
https://github.com/pyqt/examples/blob/843bb982917cecb2350b5f6d7f42c9b7fb142ec1/src/pyqt-official/desktop/systray/systray_rc.py#L2574-L2575
ray-project/ray
703c1610348615dcb8c2d141a0c46675084660f5
python/ray/_private/services.py
python
_find_gcs_address_or_die
()
return gcs_addresses.pop()
Find one GCS address unambiguously, or raise an error. Callers outside of this module should use get_ray_address_to_use_or_die()
Find one GCS address unambiguously, or raise an error.
[ "Find", "one", "GCS", "address", "unambiguously", "or", "raise", "an", "error", "." ]
def _find_gcs_address_or_die(): """Find one GCS address unambiguously, or raise an error. Callers outside of this module should use get_ray_address_to_use_or_die() """ gcs_addresses = _find_address_from_flag("--gcs-address") if len(gcs_addresses) > 1: raise ConnectionError( f"Found multiple active Ray instances: {gcs_addresses}. " "Please specify the one to connect to by setting `--address` flag " "or `RAY_ADDRESS` environment variable.") sys.exit(1) elif not gcs_addresses: raise ConnectionError( "Could not find any running Ray instance. " "Please specify the one to connect to by setting `--address` flag " "or `RAY_ADDRESS` environment variable.") return gcs_addresses.pop()
[ "def", "_find_gcs_address_or_die", "(", ")", ":", "gcs_addresses", "=", "_find_address_from_flag", "(", "\"--gcs-address\"", ")", "if", "len", "(", "gcs_addresses", ")", ">", "1", ":", "raise", "ConnectionError", "(", "f\"Found multiple active Ray instances: {gcs_addresse...
https://github.com/ray-project/ray/blob/703c1610348615dcb8c2d141a0c46675084660f5/python/ray/_private/services.py#L318-L335
llSourcell/AI_Artist
3038c06c2e389b9c919c881c9a169efe2fd7810e
lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py
python
Specifier.prereleases
(self, value)
[]
def prereleases(self, value): self._prereleases = value
[ "def", "prereleases", "(", "self", ",", "value", ")", ":", "self", ".", "_prereleases", "=", "value" ]
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py#L544-L545
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/ft/v20200304/models.py
python
QueryFaceMorphJobResponse.__init__
(self)
r""" :param JobStatus: 当前任务状态:排队中、处理中、处理失败或者处理完成 :type JobStatus: str :param FaceMorphOutput: 人像渐变输出的结果信息 注意:此字段可能返回 null,表示取不到有效值。 :type FaceMorphOutput: :class:`tencentcloud.ft.v20200304.models.FaceMorphOutput` :param JobStatusCode: 当前任务状态码:1:排队中、3: 处理中、5: 处理失败、7:处理完成 注意:此字段可能返回 null,表示取不到有效值。 :type JobStatusCode: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
r""" :param JobStatus: 当前任务状态:排队中、处理中、处理失败或者处理完成 :type JobStatus: str :param FaceMorphOutput: 人像渐变输出的结果信息 注意:此字段可能返回 null,表示取不到有效值。 :type FaceMorphOutput: :class:`tencentcloud.ft.v20200304.models.FaceMorphOutput` :param JobStatusCode: 当前任务状态码:1:排队中、3: 处理中、5: 处理失败、7:处理完成 注意:此字段可能返回 null,表示取不到有效值。 :type JobStatusCode: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
[ "r", ":", "param", "JobStatus", ":", "当前任务状态:排队中、处理中、处理失败或者处理完成", ":", "type", "JobStatus", ":", "str", ":", "param", "FaceMorphOutput", ":", "人像渐变输出的结果信息", "注意:此字段可能返回", "null,表示取不到有效值。", ":", "type", "FaceMorphOutput", ":", ":", "class", ":", "tencentcloud", "."...
def __init__(self): r""" :param JobStatus: 当前任务状态:排队中、处理中、处理失败或者处理完成 :type JobStatus: str :param FaceMorphOutput: 人像渐变输出的结果信息 注意:此字段可能返回 null,表示取不到有效值。 :type FaceMorphOutput: :class:`tencentcloud.ft.v20200304.models.FaceMorphOutput` :param JobStatusCode: 当前任务状态码:1:排队中、3: 处理中、5: 处理失败、7:处理完成 注意:此字段可能返回 null,表示取不到有效值。 :type JobStatusCode: int :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.JobStatus = None self.FaceMorphOutput = None self.JobStatusCode = None self.RequestId = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "JobStatus", "=", "None", "self", ".", "FaceMorphOutput", "=", "None", "self", ".", "JobStatusCode", "=", "None", "self", ".", "RequestId", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/ft/v20200304/models.py#L477-L493
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/imputil.py
python
ImportManager._reload_hook
(self, module)
Python calls this hook to reload a module.
Python calls this hook to reload a module.
[ "Python", "calls", "this", "hook", "to", "reload", "a", "module", "." ]
def _reload_hook(self, module): "Python calls this hook to reload a module." # reloading of a module may or may not be possible (depending on the # importer), but at least we can validate that it's ours to reload importer = module.__dict__.get('__importer__') if not importer: ### oops. now what... pass # okay. it is using the imputil system, and we must delegate it, but # we don't know what to do (yet) ### we should blast the module dict and do another get_code(). need to ### flesh this out and add proper docco... raise SystemError, "reload not yet implemented"
[ "def", "_reload_hook", "(", "self", ",", "module", ")", ":", "# reloading of a module may or may not be possible (depending on the", "# importer), but at least we can validate that it's ours to reload", "importer", "=", "module", ".", "__dict__", ".", "get", "(", "'__importer__'"...
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/imputil.py#L200-L214
ckan/ckan
b3b01218ad88ed3fb914b51018abe8b07b07bff3
ckanext/datastore/backend/postgres.py
python
_programming_error_summary
(pe)
return message.split(u') ', 1)[-1]
u''' return the text description of a sqlalchemy DatabaseError without the actual SQL included, for raising as a ValidationError to send back to API users
u''' return the text description of a sqlalchemy DatabaseError without the actual SQL included, for raising as a ValidationError to send back to API users
[ "u", "return", "the", "text", "description", "of", "a", "sqlalchemy", "DatabaseError", "without", "the", "actual", "SQL", "included", "for", "raising", "as", "a", "ValidationError", "to", "send", "back", "to", "API", "users" ]
def _programming_error_summary(pe): u''' return the text description of a sqlalchemy DatabaseError without the actual SQL included, for raising as a ValidationError to send back to API users ''' # first line only, after the '(ProgrammingError)' text message = six.ensure_text(pe.args[0].split('\n')[0]) return message.split(u') ', 1)[-1]
[ "def", "_programming_error_summary", "(", "pe", ")", ":", "# first line only, after the '(ProgrammingError)' text", "message", "=", "six", ".", "ensure_text", "(", "pe", ".", "args", "[", "0", "]", ".", "split", "(", "'\\n'", ")", "[", "0", "]", ")", "return",...
https://github.com/ckan/ckan/blob/b3b01218ad88ed3fb914b51018abe8b07b07bff3/ckanext/datastore/backend/postgres.py#L2257-L2265
n1nj4sec/pupy
a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39
pupy/library_patches/Crypto/Util/_raw_api.py
python
load_pycryptodome_raw_lib
(name, cdecl)
Load a shared library and return a handle to it. @name, the name of the library expressed as a PyCryptodome module, for instance Crypto.Cipher._raw_cbc. @cdecl, the C function declarations.
Load a shared library and return a handle to it.
[ "Load", "a", "shared", "library", "and", "return", "a", "handle", "to", "it", "." ]
def load_pycryptodome_raw_lib(name, cdecl): """Load a shared library and return a handle to it. @name, the name of the library expressed as a PyCryptodome module, for instance Crypto.Cipher._raw_cbc. @cdecl, the C function declarations. """ attempts = [] basename = '/'.join(name.split('.')) for ext in extension_suffixes: try: filename = basename + ext return CDLL(filename) except OSError, exp: attempts.append("Trying '%s': %s" % (filename, str(exp))) raise OSError("Crypto: Cannot load native module '%s': %s (%s)" % (name, ", ".join(attempts), exp))
[ "def", "load_pycryptodome_raw_lib", "(", "name", ",", "cdecl", ")", ":", "attempts", "=", "[", "]", "basename", "=", "'/'", ".", "join", "(", "name", ".", "split", "(", "'.'", ")", ")", "for", "ext", "in", "extension_suffixes", ":", "try", ":", "filena...
https://github.com/n1nj4sec/pupy/blob/a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39/pupy/library_patches/Crypto/Util/_raw_api.py#L176-L194
wrobstory/sticky
0cc9ea8ce433eecdf6292d5610dd1e4cba4874ca
sticky/c3.py
python
C3.width
(self, width)
return self
Chart width: integer
Chart width: integer
[ "Chart", "width", ":", "integer" ]
def width(self, width): """Chart width: integer""" self.model_width = width return self
[ "def", "width", "(", "self", ",", "width", ")", ":", "self", ".", "model_width", "=", "width", "return", "self" ]
https://github.com/wrobstory/sticky/blob/0cc9ea8ce433eecdf6292d5610dd1e4cba4874ca/sticky/c3.py#L117-L120
hakril/PythonForWindows
61e027a678d5b87aa64fcf8a37a6661a86236589
windows/winobject/event_log.py
python
EvtEvent.time_created
(self)
return self.value("Event/System/TimeCreated/@SystemTime")
The creation time of the Event
The creation time of the Event
[ "The", "creation", "time", "of", "the", "Event" ]
def time_created(self): """The creation time of the Event""" return self.value("Event/System/TimeCreated/@SystemTime")
[ "def", "time_created", "(", "self", ")", ":", "return", "self", ".", "value", "(", "\"Event/System/TimeCreated/@SystemTime\"", ")" ]
https://github.com/hakril/PythonForWindows/blob/61e027a678d5b87aa64fcf8a37a6661a86236589/windows/winobject/event_log.py#L249-L251
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/pycryptodomex-3.9.7/lib/Cryptodome/Cipher/AES.py
python
new
(key, mode, *args, **kwargs)
return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs)
Create a new AES cipher. :param key: The secret key to use in the symmetric cipher. It must be 16, 24 or 32 bytes long (respectively for *AES-128*, *AES-192* or *AES-256*). For ``MODE_SIV`` only, it doubles to 32, 48, or 64 bytes. :type key: bytes/bytearray/memoryview :param mode: The chaining mode to use for encryption or decryption. If in doubt, use ``MODE_EAX``. :type mode: One of the supported ``MODE_*`` constants :Keyword Arguments: * **iv** (*bytes*, *bytearray*, *memoryview*) -- (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, and ``MODE_OPENPGP`` modes). The initialization vector to use for encryption or decryption. For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 16 bytes long. For ``MODE_OPENPGP`` mode only, it must be 16 bytes long for encryption and 18 bytes for decryption (in the latter case, it is actually the *encrypted* IV which was prefixed to the ciphertext). If not provided, a random byte string is generated (you must then read its value with the :attr:`iv` attribute). * **nonce** (*bytes*, *bytearray*, *memoryview*) -- (Only applicable for ``MODE_CCM``, ``MODE_EAX``, ``MODE_GCM``, ``MODE_SIV``, ``MODE_OCB``, and ``MODE_CTR``). A value that must never be reused for any other encryption done with this key (except possibly for ``MODE_SIV``, see below). For ``MODE_EAX``, ``MODE_GCM`` and ``MODE_SIV`` there are no restrictions on its length (recommended: **16** bytes). For ``MODE_CCM``, its length must be in the range **[7..13]**. Bear in mind that with CCM there is a trade-off between nonce length and maximum message size. Recommendation: **11** bytes. For ``MODE_OCB``, its length must be in the range **[1..15]** (recommended: **15**). For ``MODE_CTR``, its length must be in the range **[0..15]** (recommended: **8**). For ``MODE_SIV``, the nonce is optional, if it is not specified, then no nonce is being used, which renders the encryption deterministic. If not provided, for modes other than ``MODE_SIV```, a random byte string of the recommended length is used (you must then read its value with the :attr:`nonce` attribute). * **segment_size** (*integer*) -- (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext are segmented in. It must be a multiple of 8. If not specified, it will be assumed to be 8. * **mac_len** : (*integer*) -- (Only ``MODE_EAX``, ``MODE_GCM``, ``MODE_OCB``, ``MODE_CCM``) Length of the authentication tag, in bytes. It must be even and in the range **[4..16]**. The recommended value (and the default, if not specified) is **16**. * **msg_len** : (*integer*) -- (Only ``MODE_CCM``). Length of the message to (de)cipher. If not specified, ``encrypt`` must be called with the entire message. Similarly, ``decrypt`` can only be called once. * **assoc_len** : (*integer*) -- (Only ``MODE_CCM``). Length of the associated data. If not specified, all associated data is buffered internally, which may represent a problem for very large messages. * **initial_value** : (*integer* or *bytes/bytearray/memoryview*) -- (Only ``MODE_CTR``). The initial value for the counter. If not present, the cipher will start counting from 0. The value is incremented by one for each block. The counter number is encoded in big endian mode. * **counter** : (*object*) -- Instance of ``Cryptodome.Util.Counter``, which allows full customization of the counter block. This parameter is incompatible to both ``nonce`` and ``initial_value``. * **use_aesni** : (*boolean*) -- Use Intel AES-NI hardware extensions (default: use if available). :Return: an AES object, of the applicable mode.
Create a new AES cipher.
[ "Create", "a", "new", "AES", "cipher", "." ]
def new(key, mode, *args, **kwargs): """Create a new AES cipher. :param key: The secret key to use in the symmetric cipher. It must be 16, 24 or 32 bytes long (respectively for *AES-128*, *AES-192* or *AES-256*). For ``MODE_SIV`` only, it doubles to 32, 48, or 64 bytes. :type key: bytes/bytearray/memoryview :param mode: The chaining mode to use for encryption or decryption. If in doubt, use ``MODE_EAX``. :type mode: One of the supported ``MODE_*`` constants :Keyword Arguments: * **iv** (*bytes*, *bytearray*, *memoryview*) -- (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, and ``MODE_OPENPGP`` modes). The initialization vector to use for encryption or decryption. For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 16 bytes long. For ``MODE_OPENPGP`` mode only, it must be 16 bytes long for encryption and 18 bytes for decryption (in the latter case, it is actually the *encrypted* IV which was prefixed to the ciphertext). If not provided, a random byte string is generated (you must then read its value with the :attr:`iv` attribute). * **nonce** (*bytes*, *bytearray*, *memoryview*) -- (Only applicable for ``MODE_CCM``, ``MODE_EAX``, ``MODE_GCM``, ``MODE_SIV``, ``MODE_OCB``, and ``MODE_CTR``). A value that must never be reused for any other encryption done with this key (except possibly for ``MODE_SIV``, see below). For ``MODE_EAX``, ``MODE_GCM`` and ``MODE_SIV`` there are no restrictions on its length (recommended: **16** bytes). For ``MODE_CCM``, its length must be in the range **[7..13]**. Bear in mind that with CCM there is a trade-off between nonce length and maximum message size. Recommendation: **11** bytes. For ``MODE_OCB``, its length must be in the range **[1..15]** (recommended: **15**). For ``MODE_CTR``, its length must be in the range **[0..15]** (recommended: **8**). For ``MODE_SIV``, the nonce is optional, if it is not specified, then no nonce is being used, which renders the encryption deterministic. If not provided, for modes other than ``MODE_SIV```, a random byte string of the recommended length is used (you must then read its value with the :attr:`nonce` attribute). * **segment_size** (*integer*) -- (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext are segmented in. It must be a multiple of 8. If not specified, it will be assumed to be 8. * **mac_len** : (*integer*) -- (Only ``MODE_EAX``, ``MODE_GCM``, ``MODE_OCB``, ``MODE_CCM``) Length of the authentication tag, in bytes. It must be even and in the range **[4..16]**. The recommended value (and the default, if not specified) is **16**. * **msg_len** : (*integer*) -- (Only ``MODE_CCM``). Length of the message to (de)cipher. If not specified, ``encrypt`` must be called with the entire message. Similarly, ``decrypt`` can only be called once. * **assoc_len** : (*integer*) -- (Only ``MODE_CCM``). Length of the associated data. If not specified, all associated data is buffered internally, which may represent a problem for very large messages. * **initial_value** : (*integer* or *bytes/bytearray/memoryview*) -- (Only ``MODE_CTR``). The initial value for the counter. If not present, the cipher will start counting from 0. The value is incremented by one for each block. The counter number is encoded in big endian mode. * **counter** : (*object*) -- Instance of ``Cryptodome.Util.Counter``, which allows full customization of the counter block. This parameter is incompatible to both ``nonce`` and ``initial_value``. * **use_aesni** : (*boolean*) -- Use Intel AES-NI hardware extensions (default: use if available). :Return: an AES object, of the applicable mode. """ kwargs["add_aes_modes"] = True return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs)
[ "def", "new", "(", "key", ",", "mode", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "\"add_aes_modes\"", "]", "=", "True", "return", "_create_cipher", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "key", ",", "mode", ...
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/pycryptodomex-3.9.7/lib/Cryptodome/Cipher/AES.py#L130-L232
microsoft/nni
31f11f51249660930824e888af0d4e022823285c
nni/algorithms/hpo/networkmorphism_tuner/graph.py
python
Graph.extract_descriptor
(self)
return ret
Extract the the description of the Graph as an instance of NetworkDescriptor.
Extract the the description of the Graph as an instance of NetworkDescriptor.
[ "Extract", "the", "the", "description", "of", "the", "Graph", "as", "an", "instance", "of", "NetworkDescriptor", "." ]
def extract_descriptor(self): """Extract the the description of the Graph as an instance of NetworkDescriptor.""" main_chain = self.get_main_chain() index_in_main_chain = {} for index, u in enumerate(main_chain): index_in_main_chain[u] = index ret = NetworkDescriptor() for u in main_chain: for v, layer_id in self.adj_list[u]: if v not in index_in_main_chain: continue layer = self.layer_list[layer_id] copied_layer = copy(layer) copied_layer.weights = None ret.add_layer(deepcopy(copied_layer)) for u in index_in_main_chain: for v, layer_id in self.adj_list[u]: if v not in index_in_main_chain: temp_u = u temp_v = v temp_layer_id = layer_id skip_type = None while not ( temp_v in index_in_main_chain and temp_u in index_in_main_chain): if is_layer( self.layer_list[temp_layer_id], "Concatenate"): skip_type = NetworkDescriptor.CONCAT_CONNECT if is_layer(self.layer_list[temp_layer_id], "Add"): skip_type = NetworkDescriptor.ADD_CONNECT temp_u = temp_v temp_v, temp_layer_id = self.adj_list[temp_v][0] ret.add_skip_connection( index_in_main_chain[u], index_in_main_chain[temp_u], skip_type ) elif index_in_main_chain[v] - index_in_main_chain[u] != 1: skip_type = None if is_layer(self.layer_list[layer_id], "Concatenate"): skip_type = NetworkDescriptor.CONCAT_CONNECT if is_layer(self.layer_list[layer_id], "Add"): skip_type = NetworkDescriptor.ADD_CONNECT ret.add_skip_connection( index_in_main_chain[u], index_in_main_chain[v], skip_type ) return ret
[ "def", "extract_descriptor", "(", "self", ")", ":", "main_chain", "=", "self", ".", "get_main_chain", "(", ")", "index_in_main_chain", "=", "{", "}", "for", "index", ",", "u", "in", "enumerate", "(", "main_chain", ")", ":", "index_in_main_chain", "[", "u", ...
https://github.com/microsoft/nni/blob/31f11f51249660930824e888af0d4e022823285c/nni/algorithms/hpo/networkmorphism_tuner/graph.py#L581-L628
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/astropy-4.0-py3.7-macosx-10.9-x86_64.egg/astropy/io/votable/tree.py
python
Table.format
(self)
return self._format
[*required*] The serialization format of the table. Must be one of: 'tabledata' (TABLEDATA_), 'binary' (BINARY_), 'binary2' (BINARY2_) 'fits' (FITS_). Note that the 'fits' format, since it requires an external file, can not be written out. Any file read in with 'fits' format will be read out, by default, in 'tabledata' format. See :ref:`votable-serialization`.
[*required*] The serialization format of the table. Must be one of:
[ "[", "*", "required", "*", "]", "The", "serialization", "format", "of", "the", "table", ".", "Must", "be", "one", "of", ":" ]
def format(self): """ [*required*] The serialization format of the table. Must be one of: 'tabledata' (TABLEDATA_), 'binary' (BINARY_), 'binary2' (BINARY2_) 'fits' (FITS_). Note that the 'fits' format, since it requires an external file, can not be written out. Any file read in with 'fits' format will be read out, by default, in 'tabledata' format. See :ref:`votable-serialization`. """ return self._format
[ "def", "format", "(", "self", ")", ":", "return", "self", ".", "_format" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/astropy-4.0-py3.7-macosx-10.9-x86_64.egg/astropy/io/votable/tree.py#L2227-L2241
khanhnamle1994/natural-language-processing
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
assignment1/.env/lib/python2.7/site-packages/scipy/signal/filter_design.py
python
band_stop_obj
(wp, ind, passb, stopb, gpass, gstop, type)
return n
Band Stop Objective Function for order minimization. Returns the non-integer order for an analog band stop filter. Parameters ---------- wp : scalar Edge of passband `passb`. ind : int, {0, 1} Index specifying which `passb` edge to vary (0 or 1). passb : ndarray Two element sequence of fixed passband edges. stopb : ndarray Two element sequence of fixed stopband edges. gstop : float Amount of attenuation in stopband in dB. gpass : float Amount of ripple in the passband in dB. type : {'butter', 'cheby', 'ellip'} Type of filter. Returns ------- n : scalar Filter order (possibly non-integer).
Band Stop Objective Function for order minimization.
[ "Band", "Stop", "Objective", "Function", "for", "order", "minimization", "." ]
def band_stop_obj(wp, ind, passb, stopb, gpass, gstop, type): """ Band Stop Objective Function for order minimization. Returns the non-integer order for an analog band stop filter. Parameters ---------- wp : scalar Edge of passband `passb`. ind : int, {0, 1} Index specifying which `passb` edge to vary (0 or 1). passb : ndarray Two element sequence of fixed passband edges. stopb : ndarray Two element sequence of fixed stopband edges. gstop : float Amount of attenuation in stopband in dB. gpass : float Amount of ripple in the passband in dB. type : {'butter', 'cheby', 'ellip'} Type of filter. Returns ------- n : scalar Filter order (possibly non-integer). """ passbC = passb.copy() passbC[ind] = wp nat = (stopb * (passbC[0] - passbC[1]) / (stopb ** 2 - passbC[0] * passbC[1])) nat = min(abs(nat)) if type == 'butter': GSTOP = 10 ** (0.1 * abs(gstop)) GPASS = 10 ** (0.1 * abs(gpass)) n = (log10((GSTOP - 1.0) / (GPASS - 1.0)) / (2 * log10(nat))) elif type == 'cheby': GSTOP = 10 ** (0.1 * abs(gstop)) GPASS = 10 ** (0.1 * abs(gpass)) n = arccosh(sqrt((GSTOP - 1.0) / (GPASS - 1.0))) / arccosh(nat) elif type == 'ellip': GSTOP = 10 ** (0.1 * gstop) GPASS = 10 ** (0.1 * gpass) arg1 = sqrt((GPASS - 1.0) / (GSTOP - 1.0)) arg0 = 1.0 / nat d0 = special.ellipk([arg0 ** 2, 1 - arg0 ** 2]) d1 = special.ellipk([arg1 ** 2, 1 - arg1 ** 2]) n = (d0[0] * d1[1] / (d0[1] * d1[0])) else: raise ValueError("Incorrect type: %s" % type) return n
[ "def", "band_stop_obj", "(", "wp", ",", "ind", ",", "passb", ",", "stopb", ",", "gpass", ",", "gstop", ",", "type", ")", ":", "passbC", "=", "passb", ".", "copy", "(", ")", "passbC", "[", "ind", "]", "=", "wp", "nat", "=", "(", "stopb", "*", "(...
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/scipy/signal/filter_design.py#L1558-L1611
PySimpleGUI/PySimpleGUI
6c0d1fb54f493d45e90180b322fbbe70f7a5af3c
DemoPrograms/Demo_Matplotlib_Image_Elem_Spetrogram_Animated.py
python
draw_figure
(element, figure)
Draws the previously created "figure" in the supplied Image Element :param element: an Image Element :param figure: a Matplotlib figure :return: The figure canvas
Draws the previously created "figure" in the supplied Image Element
[ "Draws", "the", "previously", "created", "figure", "in", "the", "supplied", "Image", "Element" ]
def draw_figure(element, figure): """ Draws the previously created "figure" in the supplied Image Element :param element: an Image Element :param figure: a Matplotlib figure :return: The figure canvas """ plt.close('all') # erases previously drawn plots canv = FigureCanvasAgg(figure) buf = io.BytesIO() canv.print_figure(buf, format='png') if buf is not None: buf.seek(0) element.update(data=buf.read()) return canv else: return None
[ "def", "draw_figure", "(", "element", ",", "figure", ")", ":", "plt", ".", "close", "(", "'all'", ")", "# erases previously drawn plots", "canv", "=", "FigureCanvasAgg", "(", "figure", ")", "buf", "=", "io", ".", "BytesIO", "(", ")", "canv", ".", "print_fi...
https://github.com/PySimpleGUI/PySimpleGUI/blob/6c0d1fb54f493d45e90180b322fbbe70f7a5af3c/DemoPrograms/Demo_Matplotlib_Image_Elem_Spetrogram_Animated.py#L90-L108
JacquesLucke/animation_nodes
b1e3ace8dcb0a771fd882fc3ac4e490b009fa0d1
animation_nodes/nodes/text/text_file_reader.py
python
TextFileReaderNode.create
(self)
[]
def create(self): self.newInput("Text", "Path", "path", showFileChooser = True) self.newInput("Text", "Encoding", "encoding", value = "ascii") self.newOutput("Text", "Text", "text")
[ "def", "create", "(", "self", ")", ":", "self", ".", "newInput", "(", "\"Text\"", ",", "\"Path\"", ",", "\"path\"", ",", "showFileChooser", "=", "True", ")", "self", ".", "newInput", "(", "\"Text\"", ",", "\"Encoding\"", ",", "\"encoding\"", ",", "value", ...
https://github.com/JacquesLucke/animation_nodes/blob/b1e3ace8dcb0a771fd882fc3ac4e490b009fa0d1/animation_nodes/nodes/text/text_file_reader.py#L17-L20
facebookresearch/demucs
7317db81a34349e028ec943b199c9b9cdda47a12
demucs/wav.py
python
build_metadata
(path, sources, normalize=True, ext=EXT)
return meta
Build the metadata for `Wavset`. Args: path (str or Path): path to dataset. sources (list[str]): list of sources to look for. normalize (bool): if True, loads full track and store normalization values based on the mixture file. ext (str): extension of audio files (default is .wav).
Build the metadata for `Wavset`.
[ "Build", "the", "metadata", "for", "Wavset", "." ]
def build_metadata(path, sources, normalize=True, ext=EXT): """ Build the metadata for `Wavset`. Args: path (str or Path): path to dataset. sources (list[str]): list of sources to look for. normalize (bool): if True, loads full track and store normalization values based on the mixture file. ext (str): extension of audio files (default is .wav). """ meta = {} path = Path(path) pendings = [] from concurrent.futures import ThreadPoolExecutor with ThreadPoolExecutor(8) as pool: for root, folders, files in os.walk(path, followlinks=True): root = Path(root) if root.name.startswith('.') or folders or root == path: continue name = str(root.relative_to(path)) pendings.append((name, pool.submit(_track_metadata, root, sources, normalize, ext))) # meta[name] = _track_metadata(root, sources, normalize, ext) for name, pending in tqdm.tqdm(pendings, ncols=120): meta[name] = pending.result() return meta
[ "def", "build_metadata", "(", "path", ",", "sources", ",", "normalize", "=", "True", ",", "ext", "=", "EXT", ")", ":", "meta", "=", "{", "}", "path", "=", "Path", "(", "path", ")", "pendings", "=", "[", "]", "from", "concurrent", ".", "futures", "i...
https://github.com/facebookresearch/demucs/blob/7317db81a34349e028ec943b199c9b9cdda47a12/demucs/wav.py#L67-L93
apple/ccs-calendarserver
13c706b985fb728b9aab42dc0fef85aae21921c3
calendarserver/tools/dashview.py
python
safeDivision
(value, total, factor=1)
return value * factor / total if total else 0
[]
def safeDivision(value, total, factor=1): return value * factor / total if total else 0
[ "def", "safeDivision", "(", "value", ",", "total", ",", "factor", "=", "1", ")", ":", "return", "value", "*", "factor", "/", "total", "if", "total", "else", "0" ]
https://github.com/apple/ccs-calendarserver/blob/13c706b985fb728b9aab42dc0fef85aae21921c3/calendarserver/tools/dashview.py#L113-L114
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/scf/v20180416/scf_client.py
python
ScfClient.GetAsyncEventStatus
(self, request)
获取函数异步执行事件状态,事件状态保留 3 * 24 小时(从事件完成开始计时)。 :param request: Request instance for GetAsyncEventStatus. :type request: :class:`tencentcloud.scf.v20180416.models.GetAsyncEventStatusRequest` :rtype: :class:`tencentcloud.scf.v20180416.models.GetAsyncEventStatusResponse`
获取函数异步执行事件状态,事件状态保留 3 * 24 小时(从事件完成开始计时)。
[ "获取函数异步执行事件状态,事件状态保留", "3", "*", "24", "小时(从事件完成开始计时)。" ]
def GetAsyncEventStatus(self, request): """获取函数异步执行事件状态,事件状态保留 3 * 24 小时(从事件完成开始计时)。 :param request: Request instance for GetAsyncEventStatus. :type request: :class:`tencentcloud.scf.v20180416.models.GetAsyncEventStatusRequest` :rtype: :class:`tencentcloud.scf.v20180416.models.GetAsyncEventStatusResponse` """ try: params = request._serialize() body = self.call("GetAsyncEventStatus", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.GetAsyncEventStatusResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "GetAsyncEventStatus", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"GetAsyncEventStatus\"", ",", "params", ")", "response", "=", "json", ".", "l...
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/scf/v20180416/scf_client.py#L428-L453
pyansys/pymapdl
c07291fc062b359abf0e92b95a92d753a95ef3d7
ansys/mapdl/core/_commands/preproc/status.py
python
Status.areas
(self, **kwargs)
return self.run(command, **kwargs)
Specifies "Areas" as the subsequent status topic. APDL Command: AREAS Notes ----- This is a status [STAT] topic command. Status topic commands are generated by the GUI and will appear in the log file (Jobname.LOG) if status is requested for some items under Utility Menu> List> Status. This command will be immediately followed by a STAT command, which will report the status for the specified topic. If entered directly into the program, the STAT command should immediately follow this command.
Specifies "Areas" as the subsequent status topic.
[ "Specifies", "Areas", "as", "the", "subsequent", "status", "topic", "." ]
def areas(self, **kwargs): """Specifies "Areas" as the subsequent status topic. APDL Command: AREAS Notes ----- This is a status [STAT] topic command. Status topic commands are generated by the GUI and will appear in the log file (Jobname.LOG) if status is requested for some items under Utility Menu> List> Status. This command will be immediately followed by a STAT command, which will report the status for the specified topic. If entered directly into the program, the STAT command should immediately follow this command. """ command = f"AREAS," return self.run(command, **kwargs)
[ "def", "areas", "(", "self", ",", "*", "*", "kwargs", ")", ":", "command", "=", "f\"AREAS,\"", "return", "self", ".", "run", "(", "command", ",", "*", "*", "kwargs", ")" ]
https://github.com/pyansys/pymapdl/blob/c07291fc062b359abf0e92b95a92d753a95ef3d7/ansys/mapdl/core/_commands/preproc/status.py#L5-L22
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/lib-tk/ttk.py
python
Widget.identify
(self, x, y)
return self.tk.call(self._w, "identify", x, y)
Returns the name of the element at position x, y, or the empty string if the point does not lie within any element. x and y are pixel coordinates relative to the widget.
Returns the name of the element at position x, y, or the empty string if the point does not lie within any element.
[ "Returns", "the", "name", "of", "the", "element", "at", "position", "x", "y", "or", "the", "empty", "string", "if", "the", "point", "does", "not", "lie", "within", "any", "element", "." ]
def identify(self, x, y): """Returns the name of the element at position x, y, or the empty string if the point does not lie within any element. x and y are pixel coordinates relative to the widget.""" return self.tk.call(self._w, "identify", x, y)
[ "def", "identify", "(", "self", ",", "x", ",", "y", ")", ":", "return", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "\"identify\"", ",", "x", ",", "y", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/lib-tk/ttk.py#L558-L563
ptone/jiffylab
c98e4ed260efe680bce2d671d92d4652dfdecd88
webapp/app.py
python
slugify
(text, delim=u'-')
return unicode(delim.join(result))
Generates a slightly worse ASCII-only slug.
Generates a slightly worse ASCII-only slug.
[ "Generates", "a", "slightly", "worse", "ASCII", "-", "only", "slug", "." ]
def slugify(text, delim=u'-'): """Generates a slightly worse ASCII-only slug.""" result = [] for word in _punct_re.split(text.lower()): word = normalize('NFKD', word).encode('ascii', 'ignore') if word: result.append(word) return unicode(delim.join(result))
[ "def", "slugify", "(", "text", ",", "delim", "=", "u'-'", ")", ":", "result", "=", "[", "]", "for", "word", "in", "_punct_re", ".", "split", "(", "text", ".", "lower", "(", ")", ")", ":", "word", "=", "normalize", "(", "'NFKD'", ",", "word", ")",...
https://github.com/ptone/jiffylab/blob/c98e4ed260efe680bce2d671d92d4652dfdecd88/webapp/app.py#L72-L79
pyrocko/pyrocko
b6baefb7540fb7fce6ed9b856ec0c413961a4320
src/trace.py
python
Trace.mult
(self, other, interpolate=True)
Muliply with values of other trace ``(self *= other)``. Multiply values of ``other`` trace to the values of ``self``, where it intersects with ``other``. This method does not change the extent of ``self``. If ``interpolate`` is ``True`` (the default), the values of ``other`` to be multiplied are interpolated at sampling instants of ``self``. Linear interpolation is performed. In this case the sampling rate of ``other`` must be equal to or lower than that of ``self``. If ``interpolate`` is ``False``, the sampling rates of the two traces must match.
Muliply with values of other trace ``(self *= other)``.
[ "Muliply", "with", "values", "of", "other", "trace", "(", "self", "*", "=", "other", ")", "." ]
def mult(self, other, interpolate=True): ''' Muliply with values of other trace ``(self *= other)``. Multiply values of ``other`` trace to the values of ``self``, where it intersects with ``other``. This method does not change the extent of ``self``. If ``interpolate`` is ``True`` (the default), the values of ``other`` to be multiplied are interpolated at sampling instants of ``self``. Linear interpolation is performed. In this case the sampling rate of ``other`` must be equal to or lower than that of ``self``. If ``interpolate`` is ``False``, the sampling rates of the two traces must match. ''' if interpolate: assert self.deltat <= other.deltat or \ same_sampling_rate(self, other) other_xdata = other.get_xdata() xdata = self.get_xdata() self.ydata *= num.interp( xdata, other_xdata, other.ydata, left=0., right=0.) else: assert self.deltat == other.deltat ibeg1 = int(round((other.tmin-self.tmin)/self.deltat)) ibeg2 = int(round((self.tmin-other.tmin)/self.deltat)) iend1 = int(round((other.tmax-self.tmin)/self.deltat))+1 iend2 = int(round((self.tmax-other.tmin)/self.deltat))+1 ibeg1 = self.index_clip(ibeg1) iend1 = self.index_clip(iend1) ibeg2 = self.index_clip(ibeg2) iend2 = self.index_clip(iend2) self.ydata[ibeg1:iend1] *= other.ydata[ibeg2:iend2]
[ "def", "mult", "(", "self", ",", "other", ",", "interpolate", "=", "True", ")", ":", "if", "interpolate", ":", "assert", "self", ".", "deltat", "<=", "other", ".", "deltat", "or", "same_sampling_rate", "(", "self", ",", "other", ")", "other_xdata", "=", ...
https://github.com/pyrocko/pyrocko/blob/b6baefb7540fb7fce6ed9b856ec0c413961a4320/src/trace.py#L289-L323
hzy46/Deep-Learning-21-Examples
15c2d9edccad090cd67b033f24a43c544e5cba3e
chapter_5/research/object_detection/core/preprocessor.py
python
random_adjust_contrast
(image, min_delta=0.8, max_delta=1.25)
Randomly adjusts contrast. Makes sure the output image is still between 0 and 1. Args: image: rank 3 float32 tensor contains 1 image -> [height, width, channels] with pixel values varying between [0, 1]. min_delta: see max_delta. max_delta: how much to change the contrast. Contrast will change with a value between min_delta and max_delta. This value will be multiplied to the current contrast of the image. Returns: image: image which is the same shape as input image.
Randomly adjusts contrast.
[ "Randomly", "adjusts", "contrast", "." ]
def random_adjust_contrast(image, min_delta=0.8, max_delta=1.25): """Randomly adjusts contrast. Makes sure the output image is still between 0 and 1. Args: image: rank 3 float32 tensor contains 1 image -> [height, width, channels] with pixel values varying between [0, 1]. min_delta: see max_delta. max_delta: how much to change the contrast. Contrast will change with a value between min_delta and max_delta. This value will be multiplied to the current contrast of the image. Returns: image: image which is the same shape as input image. """ with tf.name_scope('RandomAdjustContrast', values=[image]): image = tf.image.random_contrast(image, min_delta, max_delta) image = tf.clip_by_value(image, clip_value_min=0.0, clip_value_max=1.0) return image
[ "def", "random_adjust_contrast", "(", "image", ",", "min_delta", "=", "0.8", ",", "max_delta", "=", "1.25", ")", ":", "with", "tf", ".", "name_scope", "(", "'RandomAdjustContrast'", ",", "values", "=", "[", "image", "]", ")", ":", "image", "=", "tf", "."...
https://github.com/hzy46/Deep-Learning-21-Examples/blob/15c2d9edccad090cd67b033f24a43c544e5cba3e/chapter_5/research/object_detection/core/preprocessor.py#L451-L470
DinoTools/dionaea
4e459f1b672a5b4c1e8335c0bff1b93738019215
modules/python/dionaea/__init__.py
python
SubTimer.cancel
(self)
Stop the timer if it hasn't finished yet.
Stop the timer if it hasn't finished yet.
[ "Stop", "the", "timer", "if", "it", "hasn", "t", "finished", "yet", "." ]
def cancel(self): """Stop the timer if it hasn't finished yet.""" self.finished.set()
[ "def", "cancel", "(", "self", ")", ":", "self", ".", "finished", ".", "set", "(", ")" ]
https://github.com/DinoTools/dionaea/blob/4e459f1b672a5b4c1e8335c0bff1b93738019215/modules/python/dionaea/__init__.py#L80-L82
thu-ml/tianshou
a2d76d1276bef334bba537a355a5ea12f4279410
tianshou/exploration/random.py
python
BaseNoise.__call__
(self, size: Sequence[int])
Generate new noise.
Generate new noise.
[ "Generate", "new", "noise", "." ]
def __call__(self, size: Sequence[int]) -> np.ndarray: """Generate new noise.""" raise NotImplementedError
[ "def", "__call__", "(", "self", ",", "size", ":", "Sequence", "[", "int", "]", ")", "->", "np", ".", "ndarray", ":", "raise", "NotImplementedError" ]
https://github.com/thu-ml/tianshou/blob/a2d76d1276bef334bba537a355a5ea12f4279410/tianshou/exploration/random.py#L18-L20
tensorflow/tfx
b4a6b83269815ed12ba9df9e9154c7376fef2ea0
tfx/types/artifact.py
python
Artifact.__getattr__
(self, name: str)
Custom __getattr__ to allow access to artifact properties.
Custom __getattr__ to allow access to artifact properties.
[ "Custom", "__getattr__", "to", "allow", "access", "to", "artifact", "properties", "." ]
def __getattr__(self, name: str) -> Any: """Custom __getattr__ to allow access to artifact properties.""" if name == '_artifact_type': # Prevent infinite recursion when used with copy.deepcopy(). raise AttributeError() if name not in self._artifact_type.properties: raise AttributeError('Artifact has no property %r.' % name) property_mlmd_type = self._artifact_type.properties[name] if property_mlmd_type == metadata_store_pb2.STRING: if name not in self._artifact.properties: # Avoid populating empty property protobuf with the [] operator. return '' return self._artifact.properties[name].string_value elif property_mlmd_type == metadata_store_pb2.INT: if name not in self._artifact.properties: # Avoid populating empty property protobuf with the [] operator. return 0 return self._artifact.properties[name].int_value elif property_mlmd_type == metadata_store_pb2.DOUBLE: if name not in self._artifact.properties: # Avoid populating empty property protobuf with the [] operator. return 0.0 return self._artifact.properties[name].double_value elif property_mlmd_type == metadata_store_pb2.STRUCT: if name not in self._artifact.properties: # Avoid populating empty property protobuf with the [] operator. return None if name in self._cached_json_value_properties: return self._cached_json_value_properties[name] value = _decode_struct_value(self._artifact.properties[name].struct_value) # We must cache the decoded lists or dictionaries returned here so that # if their recursive contents are modified, the Metadata proto message # can be updated to reflect this. if isinstance(value, (dict, list)): self._cached_json_value_properties[name] = value return value else: raise Exception('Unknown MLMD type %r for property %r.' % (property_mlmd_type, name))
[ "def", "__getattr__", "(", "self", ",", "name", ":", "str", ")", "->", "Any", ":", "if", "name", "==", "'_artifact_type'", ":", "# Prevent infinite recursion when used with copy.deepcopy().", "raise", "AttributeError", "(", ")", "if", "name", "not", "in", "self", ...
https://github.com/tensorflow/tfx/blob/b4a6b83269815ed12ba9df9e9154c7376fef2ea0/tfx/types/artifact.py#L265-L303
mrJean1/PyGeodesy
7da5ca71aa3edb7bc49e219e0b8190686e1a7965
pygeodesy/latlonBase.py
python
LatLonBase.lat
(self, lat)
Set the latitude. @arg lat: New latitude (C{str[N|S]} or C{degrees}). @raise ValueError: Invalid B{C{lat}}.
Set the latitude.
[ "Set", "the", "latitude", "." ]
def lat(self, lat): '''Set the latitude. @arg lat: New latitude (C{str[N|S]} or C{degrees}). @raise ValueError: Invalid B{C{lat}}. ''' lat = Lat(lat) # parseDMS(lat, suffix=_NS_, clip=90) self._update(lat != self._lat) self._lat = lat
[ "def", "lat", "(", "self", ",", "lat", ")", ":", "lat", "=", "Lat", "(", "lat", ")", "# parseDMS(lat, suffix=_NS_, clip=90)", "self", ".", "_update", "(", "lat", "!=", "self", ".", "_lat", ")", "self", ".", "_lat", "=", "lat" ]
https://github.com/mrJean1/PyGeodesy/blob/7da5ca71aa3edb7bc49e219e0b8190686e1a7965/pygeodesy/latlonBase.py#L759-L768
RasaHQ/rasa
54823b68c1297849ba7ae841a4246193cd1223a1
rasa/nlu/featurizers/featurizer.py
python
Featurizer.get_default_config
()
return {FEATURIZER_CLASS_ALIAS: None}
Returns the component's default config.
Returns the component's default config.
[ "Returns", "the", "component", "s", "default", "config", "." ]
def get_default_config() -> Dict[Text, Any]: """Returns the component's default config.""" return {FEATURIZER_CLASS_ALIAS: None}
[ "def", "get_default_config", "(", ")", "->", "Dict", "[", "Text", ",", "Any", "]", ":", "return", "{", "FEATURIZER_CLASS_ALIAS", ":", "None", "}" ]
https://github.com/RasaHQ/rasa/blob/54823b68c1297849ba7ae841a4246193cd1223a1/rasa/nlu/featurizers/featurizer.py#L19-L21
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/Jinja2/jinja2/ext.py
python
InternationalizationExtension.parse
(self, parser)
Parse a translatable tag.
Parse a translatable tag.
[ "Parse", "a", "translatable", "tag", "." ]
def parse(self, parser): """Parse a translatable tag.""" lineno = next(parser.stream).lineno num_called_num = False # find all the variables referenced. Additionally a variable can be # defined in the body of the trans block too, but this is checked at # a later state. plural_expr = None plural_expr_assignment = None variables = {} while parser.stream.current.type != 'block_end': if variables: parser.stream.expect('comma') # skip colon for python compatibility if parser.stream.skip_if('colon'): break name = parser.stream.expect('name') if name.value in variables: parser.fail('translatable variable %r defined twice.' % name.value, name.lineno, exc=TemplateAssertionError) # expressions if parser.stream.current.type == 'assign': next(parser.stream) variables[name.value] = var = parser.parse_expression() else: variables[name.value] = var = nodes.Name(name.value, 'load') if plural_expr is None: if isinstance(var, nodes.Call): plural_expr = nodes.Name('_trans', 'load') variables[name.value] = plural_expr plural_expr_assignment = nodes.Assign( nodes.Name('_trans', 'store'), var) else: plural_expr = var num_called_num = name.value == 'num' parser.stream.expect('block_end') plural = plural_names = None have_plural = False referenced = set() # now parse until endtrans or pluralize singular_names, singular = self._parse_block(parser, True) if singular_names: referenced.update(singular_names) if plural_expr is None: plural_expr = nodes.Name(singular_names[0], 'load') num_called_num = singular_names[0] == 'num' # if we have a pluralize block, we parse that too if parser.stream.current.test('name:pluralize'): have_plural = True next(parser.stream) if parser.stream.current.type != 'block_end': name = parser.stream.expect('name') if name.value not in variables: parser.fail('unknown variable %r for pluralization' % name.value, name.lineno, exc=TemplateAssertionError) plural_expr = variables[name.value] num_called_num = name.value == 'num' parser.stream.expect('block_end') plural_names, plural = self._parse_block(parser, False) next(parser.stream) referenced.update(plural_names) else: next(parser.stream) # register free names as simple name expressions for var in referenced: if var not in variables: variables[var] = nodes.Name(var, 'load') if not have_plural: plural_expr = None elif plural_expr is None: parser.fail('pluralize without variables', lineno) node = self._make_node(singular, plural, variables, plural_expr, bool(referenced), num_called_num and have_plural) node.set_lineno(lineno) if plural_expr_assignment is not None: return [plural_expr_assignment, node] else: return node
[ "def", "parse", "(", "self", ",", "parser", ")", ":", "lineno", "=", "next", "(", "parser", ".", "stream", ")", ".", "lineno", "num_called_num", "=", "False", "# find all the variables referenced. Additionally a variable can be", "# defined in the body of the trans block...
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/Jinja2/jinja2/ext.py#L215-L307
fedora-infra/anitya
cc01878ac023790646a76eb4cbef45d639e2372c
anitya/db/models.py
python
Run.last_entry
(cls, session)
return query.first()
Return the last log about the cron run.
Return the last log about the cron run.
[ "Return", "the", "last", "log", "about", "the", "cron", "run", "." ]
def last_entry(cls, session): """Return the last log about the cron run.""" query = session.query(cls).order_by(cls.created_on.desc()) return query.first()
[ "def", "last_entry", "(", "cls", ",", "session", ")", ":", "query", "=", "session", ".", "query", "(", "cls", ")", ".", "order_by", "(", "cls", ".", "created_on", ".", "desc", "(", ")", ")", "return", "query", ".", "first", "(", ")" ]
https://github.com/fedora-infra/anitya/blob/cc01878ac023790646a76eb4cbef45d639e2372c/anitya/db/models.py#L809-L813
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
src/transformers/generation_utils.py
python
GenerationMixin._update_model_kwargs_for_generation
( outputs: ModelOutput, model_kwargs: Dict[str, Any], is_encoder_decoder: bool = False )
return model_kwargs
[]
def _update_model_kwargs_for_generation( outputs: ModelOutput, model_kwargs: Dict[str, Any], is_encoder_decoder: bool = False ) -> Dict[str, Any]: # update past if "past_key_values" in outputs: model_kwargs["past"] = outputs.past_key_values elif "mems" in outputs: model_kwargs["past"] = outputs.mems elif "past_buckets_states" in outputs: model_kwargs["past"] = outputs.past_buckets_states else: model_kwargs["past"] = None # update token_type_ids with last value if "token_type_ids" in model_kwargs: token_type_ids = model_kwargs["token_type_ids"] model_kwargs["token_type_ids"] = torch.cat([token_type_ids, token_type_ids[:, -1].unsqueeze(-1)], dim=-1) # update attention mask if not is_encoder_decoder: if "attention_mask" in model_kwargs: attention_mask = model_kwargs["attention_mask"] model_kwargs["attention_mask"] = torch.cat( [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1 ) return model_kwargs
[ "def", "_update_model_kwargs_for_generation", "(", "outputs", ":", "ModelOutput", ",", "model_kwargs", ":", "Dict", "[", "str", ",", "Any", "]", ",", "is_encoder_decoder", ":", "bool", "=", "False", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "# u...
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/generation_utils.py#L572-L598
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/bdb.py
python
Bdb.get_file_breaks
(self, filename)
[]
def get_file_breaks(self, filename): filename = self.canonic(filename) if filename in self.breaks: return self.breaks[filename] else: return []
[ "def", "get_file_breaks", "(", "self", ",", "filename", ")", ":", "filename", "=", "self", ".", "canonic", "(", "filename", ")", "if", "filename", "in", "self", ".", "breaks", ":", "return", "self", ".", "breaks", "[", "filename", "]", "else", ":", "re...
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/bdb.py#L328-L333
django/django
0a17666045de6739ae1c2ac695041823d5f827f7
django/contrib/admin/widgets.py
python
AutocompleteMixin.build_attrs
(self, base_attrs, extra_attrs=None)
return attrs
Set select2's AJAX attributes. Attributes can be set using the html5 data attribute. Nested attributes require a double dash as per https://select2.org/configuration/data-attributes#nested-subkey-options
Set select2's AJAX attributes.
[ "Set", "select2", "s", "AJAX", "attributes", "." ]
def build_attrs(self, base_attrs, extra_attrs=None): """ Set select2's AJAX attributes. Attributes can be set using the html5 data attribute. Nested attributes require a double dash as per https://select2.org/configuration/data-attributes#nested-subkey-options """ attrs = super().build_attrs(base_attrs, extra_attrs=extra_attrs) attrs.setdefault('class', '') attrs.update({ 'data-ajax--cache': 'true', 'data-ajax--delay': 250, 'data-ajax--type': 'GET', 'data-ajax--url': self.get_url(), 'data-app-label': self.field.model._meta.app_label, 'data-model-name': self.field.model._meta.model_name, 'data-field-name': self.field.name, 'data-theme': 'admin-autocomplete', 'data-allow-clear': json.dumps(not self.is_required), 'data-placeholder': '', # Allows clearing of the input. 'lang': self.i18n_name, 'class': attrs['class'] + (' ' if attrs['class'] else '') + 'admin-autocomplete', }) return attrs
[ "def", "build_attrs", "(", "self", ",", "base_attrs", ",", "extra_attrs", "=", "None", ")", ":", "attrs", "=", "super", "(", ")", ".", "build_attrs", "(", "base_attrs", ",", "extra_attrs", "=", "extra_attrs", ")", "attrs", ".", "setdefault", "(", "'class'"...
https://github.com/django/django/blob/0a17666045de6739ae1c2ac695041823d5f827f7/django/contrib/admin/widgets.py#L399-L423
odlgroup/odl
0b088df8dc4621c68b9414c3deff9127f4c4f11d
odl/operator/pspace_ops.py
python
DiagonalOperator.derivative
(self, point)
return DiagonalOperator(*derivs, domain=self.domain, range=self.range)
Derivative of this operator. For example, if A and B are operators [[A, 0], [0, B]] The derivative is given by: [[A', 0], [0, B']] This is only well defined if each sub-operator has a derivative Parameters ---------- point : `element-like` in ``domain`` The point in which the derivative should be taken. Returns ------- derivative : `DiagonalOperator` The derivative operator See Also -------- ProductSpaceOperator.derivative
Derivative of this operator.
[ "Derivative", "of", "this", "operator", "." ]
def derivative(self, point): """Derivative of this operator. For example, if A and B are operators [[A, 0], [0, B]] The derivative is given by: [[A', 0], [0, B']] This is only well defined if each sub-operator has a derivative Parameters ---------- point : `element-like` in ``domain`` The point in which the derivative should be taken. Returns ------- derivative : `DiagonalOperator` The derivative operator See Also -------- ProductSpaceOperator.derivative """ point = self.domain.element(point) derivs = [op.derivative(p) for op, p in zip(self.operators, point)] return DiagonalOperator(*derivs, domain=self.domain, range=self.range)
[ "def", "derivative", "(", "self", ",", "point", ")", ":", "point", "=", "self", ".", "domain", ".", "element", "(", "point", ")", "derivs", "=", "[", "op", ".", "derivative", "(", "p", ")", "for", "op", ",", "p", "in", "zip", "(", "self", ".", ...
https://github.com/odlgroup/odl/blob/0b088df8dc4621c68b9414c3deff9127f4c4f11d/odl/operator/pspace_ops.py#L1171-L1204
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_adm_router.py
python
DeploymentConfig.get_replicas
(self)
return self.get(DeploymentConfig.replicas_path)
return replicas setting
return replicas setting
[ "return", "replicas", "setting" ]
def get_replicas(self): ''' return replicas setting ''' return self.get(DeploymentConfig.replicas_path)
[ "def", "get_replicas", "(", "self", ")", ":", "return", "self", ".", "get", "(", "DeploymentConfig", ".", "replicas_path", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_vendored_deps/library/oc_adm_router.py#L1999-L2001
tristandeleu/pytorch-meta
d55d89ebd47f340180267106bde3e4b723f23762
torchmeta/datasets/utils.py
python
download_file_from_google_drive
(file_id, root, filename=None, md5=None)
Download a Google Drive file from and place it in root. Args: file_id (str): id of file to be downloaded root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the id of the file. md5 (str, optional): MD5 checksum of the download. If None, do not check
Download a Google Drive file from and place it in root.
[ "Download", "a", "Google", "Drive", "file", "from", "and", "place", "it", "in", "root", "." ]
def download_file_from_google_drive(file_id, root, filename=None, md5=None): """Download a Google Drive file from and place it in root. Args: file_id (str): id of file to be downloaded root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the id of the file. md5 (str, optional): MD5 checksum of the download. If None, do not check """ # Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url import requests url = "https://docs.google.com/uc?export=download" root = os.path.expanduser(root) if not filename: filename = file_id fpath = os.path.join(root, filename) os.makedirs(root, exist_ok=True) if os.path.isfile(fpath) and check_integrity(fpath, md5): print('Using downloaded and verified file: ' + fpath) else: session = requests.Session() response = session.get(url, params={'id': file_id}, stream=True) token = _get_confirm_token(response) if token: params = {'id': file_id, 'confirm': token} response = session.get(url, params=params, stream=True) if _quota_exceeded(response): msg = ( f"The daily quota of the file {filename} is exceeded and it " f"can't be downloaded. This is a limitation of Google Drive " f"and can only be overcome by trying again later." ) raise RuntimeError(msg) _save_response_content(response, fpath)
[ "def", "download_file_from_google_drive", "(", "file_id", ",", "root", ",", "filename", "=", "None", ",", "md5", "=", "None", ")", ":", "# Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url", "import", "requests", "url", "=...
https://github.com/tristandeleu/pytorch-meta/blob/d55d89ebd47f340180267106bde3e4b723f23762/torchmeta/datasets/utils.py#L47-L87
pypr/pysph
9cb9a859934939307c65a25cbf73e4ecc83fea4a
pysph/tools/ipy_viewer.py
python
Viewer1D._configure_plot
(self)
Set attributes for plotting.
Set attributes for plotting.
[ "Set", "attributes", "for", "plotting", "." ]
def _configure_plot(self): ''' Set attributes for plotting. ''' self.figure, temp = plt.subplots() self.add_axes = False self._scatters_ax = {'host': temp} self._scatters = {} self._solver_time_ax = {} self.figure.show()
[ "def", "_configure_plot", "(", "self", ")", ":", "self", ".", "figure", ",", "temp", "=", "plt", ".", "subplots", "(", ")", "self", ".", "add_axes", "=", "False", "self", ".", "_scatters_ax", "=", "{", "'host'", ":", "temp", "}", "self", ".", "_scatt...
https://github.com/pypr/pysph/blob/9cb9a859934939307c65a25cbf73e4ecc83fea4a/pysph/tools/ipy_viewer.py#L799-L811
onnx/onnx-coreml
141fc33d7217674ea8bda36494fa8089a543a3f3
onnx_coreml/_operators_nd.py
python
_convert_acos
(builder, node, graph, err)
convert to CoreML Acos Layer: https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L3793
convert to CoreML Acos Layer: https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L3793
[ "convert", "to", "CoreML", "Acos", "Layer", ":", "https", ":", "//", "github", ".", "com", "/", "apple", "/", "coremltools", "/", "blob", "/", "655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492", "/", "mlmodel", "/", "format", "/", "NeuralNetwork", ".", "proto#L3793" ]
def _convert_acos(builder, node, graph, err): ''' convert to CoreML Acos Layer: https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L3793 ''' load_input_constants(builder, node, graph, err) builder.add_acos( name=node.name, input_name=node.inputs[0], output_name=node.outputs[0] )
[ "def", "_convert_acos", "(", "builder", ",", "node", ",", "graph", ",", "err", ")", ":", "load_input_constants", "(", "builder", ",", "node", ",", "graph", ",", "err", ")", "builder", ".", "add_acos", "(", "name", "=", "node", ".", "name", ",", "input_...
https://github.com/onnx/onnx-coreml/blob/141fc33d7217674ea8bda36494fa8089a543a3f3/onnx_coreml/_operators_nd.py#L181-L191
dongrixinyu/JioNLP
2c5b11439915891f0f24955b7de4f637f38a4b44
jionlp/rule/extractor.py
python
Extractor.extract_email
(self, text, detail=False)
提取文本中的 E-mail Args: text(str): 字符串文本 detail(bool): 是否携带 offset (E-mail 在文本中的位置信息) Returns: list: email列表
提取文本中的 E-mail
[ "提取文本中的", "E", "-", "mail" ]
def extract_email(self, text, detail=False): """ 提取文本中的 E-mail Args: text(str): 字符串文本 detail(bool): 是否携带 offset (E-mail 在文本中的位置信息) Returns: list: email列表 """ if self.email_pattern is None: self.email_pattern = re.compile(EMAIL_PATTERN) text = ''.join(['#', text, '#']) results = self._extract_base(self.email_pattern, text, with_offset=detail) if not detail: return results else: if self.email_domain_pattern is None: self.email_domain_pattern = re.compile(EMAIL_DOMAIN_PATTERN) detail_results = list() for item in results: domain_name = self.email_domain_pattern.search( item['text']).group(1) item.update({'domain_name': domain_name}) detail_results.append(item) return detail_results
[ "def", "extract_email", "(", "self", ",", "text", ",", "detail", "=", "False", ")", ":", "if", "self", ".", "email_pattern", "is", "None", ":", "self", ".", "email_pattern", "=", "re", ".", "compile", "(", "EMAIL_PATTERN", ")", "text", "=", "''", ".", ...
https://github.com/dongrixinyu/JioNLP/blob/2c5b11439915891f0f24955b7de4f637f38a4b44/jionlp/rule/extractor.py#L145-L174
orendv/learning_to_sample
99e977e1c53ec0fa8b2b8a5151a56d0d088f6f78
reconstruction/src/in_out.py
python
PointCloudDataSet.__init__
(self, point_clouds, noise=None, labels=None, copy=True, init_shuffle=True)
Construct a DataSet. Args: init_shuffle, shuffle data before first epoch has been reached. Output: original_pclouds, labels, (None or Feed) # TODO Rename
Construct a DataSet. Args: init_shuffle, shuffle data before first epoch has been reached. Output: original_pclouds, labels, (None or Feed) # TODO Rename
[ "Construct", "a", "DataSet", ".", "Args", ":", "init_shuffle", "shuffle", "data", "before", "first", "epoch", "has", "been", "reached", ".", "Output", ":", "original_pclouds", "labels", "(", "None", "or", "Feed", ")", "#", "TODO", "Rename" ]
def __init__(self, point_clouds, noise=None, labels=None, copy=True, init_shuffle=True): '''Construct a DataSet. Args: init_shuffle, shuffle data before first epoch has been reached. Output: original_pclouds, labels, (None or Feed) # TODO Rename ''' self.num_examples = point_clouds.shape[0] self.n_points = point_clouds.shape[1] if labels is not None: assert point_clouds.shape[0] == labels.shape[0], ('points.shape: %s labels.shape: %s' % (point_clouds.shape, labels.shape)) if copy: self.labels = labels.copy() else: self.labels = labels else: self.labels = np.ones(self.num_examples, dtype=np.int8) if noise is not None: assert (type(noise) is np.ndarray) if copy: self.noisy_point_clouds = noise.copy() else: self.noisy_point_clouds = noise else: self.noisy_point_clouds = None if copy: self.point_clouds = point_clouds.copy() else: self.point_clouds = point_clouds self.epochs_completed = 0 self._index_in_epoch = 0 if init_shuffle: self.shuffle_data()
[ "def", "__init__", "(", "self", ",", "point_clouds", ",", "noise", "=", "None", ",", "labels", "=", "None", ",", "copy", "=", "True", ",", "init_shuffle", "=", "True", ")", ":", "self", ".", "num_examples", "=", "point_clouds", ".", "shape", "[", "0", ...
https://github.com/orendv/learning_to_sample/blob/99e977e1c53ec0fa8b2b8a5151a56d0d088f6f78/reconstruction/src/in_out.py#L189-L227
IBM/pytorchpipe
9cb17271666061cb19fe24197ecd5e4c8d32c5da
ptp/configuration/config_registry.py
python
ConfigRegistry.del_config_params
(self, keypath: list)
Removes an entry from the `config` parameter dict of the current :py:class:`ConfigRegistry`, \ and update the resulting parameters dict. The entry can either be a subtree or a leaf of the `config` parameter dict. :param keypath: list of keys to subtree / leaf in the `config` parameter dict. :type keypath: list
Removes an entry from the `config` parameter dict of the current :py:class:`ConfigRegistry`, \ and update the resulting parameters dict.
[ "Removes", "an", "entry", "from", "the", "config", "parameter", "dict", "of", "the", "current", ":", "py", ":", "class", ":", "ConfigRegistry", "\\", "and", "update", "the", "resulting", "parameters", "dict", "." ]
def del_config_params(self, keypath: list): """ Removes an entry from the `config` parameter dict of the current :py:class:`ConfigRegistry`, \ and update the resulting parameters dict. The entry can either be a subtree or a leaf of the `config` parameter dict. :param keypath: list of keys to subtree / leaf in the `config` parameter dict. :type keypath: list """ self.delete_subtree(self._superseding_config_params, keypath) self._update_params()
[ "def", "del_config_params", "(", "self", ",", "keypath", ":", "list", ")", ":", "self", ".", "delete_subtree", "(", "self", ".", "_superseding_config_params", ",", "keypath", ")", "self", ".", "_update_params", "(", ")" ]
https://github.com/IBM/pytorchpipe/blob/9cb17271666061cb19fe24197ecd5e4c8d32c5da/ptp/configuration/config_registry.py#L148-L160
python-babel/flask-babel
ec7ae9ed2e22c7aebd4e732c1c3dc6d45fe8db76
flask_babel/__init__.py
python
format_timedelta
(datetime_or_timedelta, granularity='second', add_direction=False, threshold=0.85)
return dates.format_timedelta( datetime_or_timedelta, granularity, threshold=threshold, add_direction=add_direction, locale=get_locale() )
Format the elapsed time from the given date to now or the given timedelta. This function is also available in the template context as filter named `timedeltaformat`.
Format the elapsed time from the given date to now or the given timedelta.
[ "Format", "the", "elapsed", "time", "from", "the", "given", "date", "to", "now", "or", "the", "given", "timedelta", "." ]
def format_timedelta(datetime_or_timedelta, granularity='second', add_direction=False, threshold=0.85): """Format the elapsed time from the given date to now or the given timedelta. This function is also available in the template context as filter named `timedeltaformat`. """ if isinstance(datetime_or_timedelta, datetime): datetime_or_timedelta = datetime.utcnow() - datetime_or_timedelta return dates.format_timedelta( datetime_or_timedelta, granularity, threshold=threshold, add_direction=add_direction, locale=get_locale() )
[ "def", "format_timedelta", "(", "datetime_or_timedelta", ",", "granularity", "=", "'second'", ",", "add_direction", "=", "False", ",", "threshold", "=", "0.85", ")", ":", "if", "isinstance", "(", "datetime_or_timedelta", ",", "datetime", ")", ":", "datetime_or_tim...
https://github.com/python-babel/flask-babel/blob/ec7ae9ed2e22c7aebd4e732c1c3dc6d45fe8db76/flask_babel/__init__.py#L411-L427
devbisme/KiPart
3e5d9ae927ea27fb29998ee8d613fdc26a019243
kipart/kipart.py
python
do_bundling
(pin_data, bundle, fuzzy_match)
Handle bundling for power pins. Unbundle everything else.
Handle bundling for power pins. Unbundle everything else.
[ "Handle", "bundling", "for", "power", "pins", ".", "Unbundle", "everything", "else", "." ]
def do_bundling(pin_data, bundle, fuzzy_match): """Handle bundling for power pins. Unbundle everything else.""" for unit in list(pin_data.values()): for side in list(unit.values()): for name, pins in list(side.items()): if len(pins) > 1: for index, p in enumerate(pins): if is_pwr(p, fuzzy_match) and bundle: side[p.name + "_pwr"].append(p) else: side[p.name + "_" + str(index)].append(p) del side[name]
[ "def", "do_bundling", "(", "pin_data", ",", "bundle", ",", "fuzzy_match", ")", ":", "for", "unit", "in", "list", "(", "pin_data", ".", "values", "(", ")", ")", ":", "for", "side", "in", "list", "(", "unit", ".", "values", "(", ")", ")", ":", "for",...
https://github.com/devbisme/KiPart/blob/3e5d9ae927ea27fb29998ee8d613fdc26a019243/kipart/kipart.py#L767-L778
gnuradio/pybombs
17044241bf835b93571026b112f179f2db7448a4
pybombs/packagers/port.py
python
ExternalPort.update
(self, pkgname)
update package with 'port upgrade'
update package with 'port upgrade'
[ "update", "package", "with", "port", "upgrade" ]
def update(self, pkgname): """ update package with 'port upgrade' """ try: subproc.monitor_process(["port", "upgrade", pkgname], elevate=True, throw=True) return True except Exception as ex: self.log.error("Running port upgrade failed.") self.log.trace(str(ex))
[ "def", "update", "(", "self", ",", "pkgname", ")", ":", "try", ":", "subproc", ".", "monitor_process", "(", "[", "\"port\"", ",", "\"upgrade\"", ",", "pkgname", "]", ",", "elevate", "=", "True", ",", "throw", "=", "True", ")", "return", "True", "except...
https://github.com/gnuradio/pybombs/blob/17044241bf835b93571026b112f179f2db7448a4/pybombs/packagers/port.py#L83-L92
therne/dmn-tensorflow
c107d6c08e29cfad219bc1b9421f1bac56b72cc3
utils/nn.py
python
bias
(name, dim, initial_value=0.0)
return tf.get_variable(name, dims, initializer=tf.constant_initializer(initial_value))
Initializes bias parameter. :param name: Variable name :param dim: Tensor size (list or int) :param initial_value: Initial bias term :return: Variable
Initializes bias parameter. :param name: Variable name :param dim: Tensor size (list or int) :param initial_value: Initial bias term :return: Variable
[ "Initializes", "bias", "parameter", ".", ":", "param", "name", ":", "Variable", "name", ":", "param", "dim", ":", "Tensor", "size", "(", "list", "or", "int", ")", ":", "param", "initial_value", ":", "Initial", "bias", "term", ":", "return", ":", "Variabl...
def bias(name, dim, initial_value=0.0): """ Initializes bias parameter. :param name: Variable name :param dim: Tensor size (list or int) :param initial_value: Initial bias term :return: Variable """ dims = dim if isinstance(dim, list) else [dim] return tf.get_variable(name, dims, initializer=tf.constant_initializer(initial_value))
[ "def", "bias", "(", "name", ",", "dim", ",", "initial_value", "=", "0.0", ")", ":", "dims", "=", "dim", "if", "isinstance", "(", "dim", ",", "list", ")", "else", "[", "dim", "]", "return", "tf", ".", "get_variable", "(", "name", ",", "dims", ",", ...
https://github.com/therne/dmn-tensorflow/blob/c107d6c08e29cfad219bc1b9421f1bac56b72cc3/utils/nn.py#L44-L52
reviewboard/reviewboard
7395902e4c181bcd1d633f61105012ffb1d18e1b
reviewboard/datagrids/sidebar.py
python
Sidebar.__init__
(self, item_classes, default_view_id=None, css_classes=[])
Initialize the sidebar. Args: item_classes (list of type): The list of :py:class:`BaseSidebarItem` subclasses to include by default in the sidebar. default_view_id (unicode, optional): The default "view" of the datagrid to display. This corresponds to a registered :py:attr:`BaseSidebarItem.view_id`. css_classes (list of unicode): The list of additional CSS classes to apply to the sidebar.
Initialize the sidebar.
[ "Initialize", "the", "sidebar", "." ]
def __init__(self, item_classes, default_view_id=None, css_classes=[]): """Initialize the sidebar. Args: item_classes (list of type): The list of :py:class:`BaseSidebarItem` subclasses to include by default in the sidebar. default_view_id (unicode, optional): The default "view" of the datagrid to display. This corresponds to a registered :py:attr:`BaseSidebarItem.view_id`. css_classes (list of unicode): The list of additional CSS classes to apply to the sidebar. """ self._item_classes = [] self.css_classes = css_classes self.default_view_id = default_view_id for item_cls in item_classes: self.add_item(item_cls)
[ "def", "__init__", "(", "self", ",", "item_classes", ",", "default_view_id", "=", "None", ",", "css_classes", "=", "[", "]", ")", ":", "self", ".", "_item_classes", "=", "[", "]", "self", ".", "css_classes", "=", "css_classes", "self", ".", "default_view_i...
https://github.com/reviewboard/reviewboard/blob/7395902e4c181bcd1d633f61105012ffb1d18e1b/reviewboard/datagrids/sidebar.py#L349-L369
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/networkx/algorithms/operators/all.py
python
intersection_all
(graphs)
return R
Returns a new graph that contains only the edges that exist in all graphs. All supplied graphs must have the same node set. Parameters ---------- graphs : list List of NetworkX graphs Returns ------- R : A new graph with the same type as the first graph in list Raises ------ ValueError If `graphs` is an empty list. Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph.
Returns a new graph that contains only the edges that exist in all graphs.
[ "Returns", "a", "new", "graph", "that", "contains", "only", "the", "edges", "that", "exist", "in", "all", "graphs", "." ]
def intersection_all(graphs): """Returns a new graph that contains only the edges that exist in all graphs. All supplied graphs must have the same node set. Parameters ---------- graphs : list List of NetworkX graphs Returns ------- R : A new graph with the same type as the first graph in list Raises ------ ValueError If `graphs` is an empty list. Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph. """ if not graphs: raise ValueError('cannot apply intersection_all to an empty list') graphs = iter(graphs) R = next(graphs) for H in graphs: R = nx.intersection(R, H) return R
[ "def", "intersection_all", "(", "graphs", ")", ":", "if", "not", "graphs", ":", "raise", "ValueError", "(", "'cannot apply intersection_all to an empty list'", ")", "graphs", "=", "iter", "(", "graphs", ")", "R", "=", "next", "(", "graphs", ")", "for", "H", ...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/networkx/algorithms/operators/all.py#L145-L176
jkkummerfeld/text2sql-data
2905ab815b4893d99ea061a20fb55860ecb1f92e
systems/sequence-to-sequence/bin/tools/profile.py
python
param_analysis_options
(output_dir)
return "scope", options
Options for model parameter analysis
Options for model parameter analysis
[ "Options", "for", "model", "parameter", "analysis" ]
def param_analysis_options(output_dir): """Options for model parameter analysis """ options = model_analyzer.TRAINABLE_VARS_PARAMS_STAT_OPTIONS.copy() options["select"] = ["params", "bytes"] options["order_by"] = "params" options["account_type_regexes"] = ["Variable"] if output_dir: options["dump_to_file"] = os.path.join(output_dir, "params.txt") return "scope", options
[ "def", "param_analysis_options", "(", "output_dir", ")", ":", "options", "=", "model_analyzer", ".", "TRAINABLE_VARS_PARAMS_STAT_OPTIONS", ".", "copy", "(", ")", "options", "[", "\"select\"", "]", "=", "[", "\"params\"", ",", "\"bytes\"", "]", "options", "[", "\...
https://github.com/jkkummerfeld/text2sql-data/blob/2905ab815b4893d99ea061a20fb55860ecb1f92e/systems/sequence-to-sequence/bin/tools/profile.py#L124-L133
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/modular/modform/element.py
python
Newform._atkin_lehner_eigenvalue_from_qexp
(self, Q)
return l
Return the arithmetically-normalized `W_Q`-pseudoeigenvalue of ``self``, using a formula based on `q`-expansions (Theorem 2.1 of [AL1978]_). INPUT: - ``self`` -- a newform `f` - ``Q`` -- an integer exactly dividing the level of ``self`` .. NOTE:: This method assumes that the `Q`-th coefficient in the `q`-expansion of ``self`` is non-zero. TESTS:: sage: f = Newforms(Gamma0(18), 4)[0]; f q + 2*q^2 + 4*q^4 - 6*q^5 + O(q^6) sage: f._atkin_lehner_eigenvalue_from_qexp(2) -2 sage: f._atkin_lehner_eigenvalue_from_qexp(9) Traceback (most recent call last): ... ValueError: a_Q must be nonzero An example with odd weight:: sage: f = Newforms(Gamma1(15), 3, names='a')[2]; f q + a2*q^2 + (-a2 - 2)*q^3 - q^4 - a2*q^5 + O(q^6) sage: f._atkin_lehner_eigenvalue_from_qexp(5) a2
Return the arithmetically-normalized `W_Q`-pseudoeigenvalue of ``self``, using a formula based on `q`-expansions (Theorem 2.1 of [AL1978]_).
[ "Return", "the", "arithmetically", "-", "normalized", "W_Q", "-", "pseudoeigenvalue", "of", "self", "using", "a", "formula", "based", "on", "q", "-", "expansions", "(", "Theorem", "2", ".", "1", "of", "[", "AL1978", "]", "_", ")", "." ]
def _atkin_lehner_eigenvalue_from_qexp(self, Q): """ Return the arithmetically-normalized `W_Q`-pseudoeigenvalue of ``self``, using a formula based on `q`-expansions (Theorem 2.1 of [AL1978]_). INPUT: - ``self`` -- a newform `f` - ``Q`` -- an integer exactly dividing the level of ``self`` .. NOTE:: This method assumes that the `Q`-th coefficient in the `q`-expansion of ``self`` is non-zero. TESTS:: sage: f = Newforms(Gamma0(18), 4)[0]; f q + 2*q^2 + 4*q^4 - 6*q^5 + O(q^6) sage: f._atkin_lehner_eigenvalue_from_qexp(2) -2 sage: f._atkin_lehner_eigenvalue_from_qexp(9) Traceback (most recent call last): ... ValueError: a_Q must be nonzero An example with odd weight:: sage: f = Newforms(Gamma1(15), 3, names='a')[2]; f q + a2*q^2 + (-a2 - 2)*q^3 - q^4 - a2*q^5 + O(q^6) sage: f._atkin_lehner_eigenvalue_from_qexp(5) a2 """ if Q == 1: return ZZ(1) a_Q = self[Q] if not a_Q: raise ValueError("a_Q must be nonzero") l = ZZ(1) M = self.character().conductor() for p, e in Q.factor(): if p.divides(M): # principal series at p l *= (p**(self.weight() - 2) / self[p])**e else: # special at p l *= -self[p] return l
[ "def", "_atkin_lehner_eigenvalue_from_qexp", "(", "self", ",", "Q", ")", ":", "if", "Q", "==", "1", ":", "return", "ZZ", "(", "1", ")", "a_Q", "=", "self", "[", "Q", "]", "if", "not", "a_Q", ":", "raise", "ValueError", "(", "\"a_Q must be nonzero\"", "...
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/modular/modform/element.py#L1740-L1788
indygreg/python-build-standalone
1dfe9d4186e37bd57c544b09402690e968a225be
pythonbuild/utils.py
python
add_licenses_to_extension_entry
(entry)
Add licenses keys to a ``extensions`` entry for JSON distribution info.
Add licenses keys to a ``extensions`` entry for JSON distribution info.
[ "Add", "licenses", "keys", "to", "a", "extensions", "entry", "for", "JSON", "distribution", "info", "." ]
def add_licenses_to_extension_entry(entry): """Add licenses keys to a ``extensions`` entry for JSON distribution info.""" have_licenses = False licenses = set() license_paths = set() license_public_domain = None have_local_link = False for link in entry["links"]: name = link["name"] if "path_static" in link or "path_dynamic" in link: have_local_link = True for key, value in DOWNLOADS.items(): if name not in value.get("library_names", []): continue # Don't add licenses annotations if they aren't defined. This leaves # things as "unknown" to consumers. if "licenses" not in value: continue have_licenses = True licenses |= set(value["licenses"]) license_paths.add("licenses/%s" % value["license_file"]) license_public_domain = value.get("license_public_domain", False) if have_local_link and not have_licenses: raise Exception( "missing license for local library for extension entry: %s" % entry ) if not have_licenses: return entry["licenses"] = sorted(licenses) entry["license_paths"] = sorted(license_paths) entry["license_public_domain"] = license_public_domain
[ "def", "add_licenses_to_extension_entry", "(", "entry", ")", ":", "have_licenses", "=", "False", "licenses", "=", "set", "(", ")", "license_paths", "=", "set", "(", ")", "license_public_domain", "=", "None", "have_local_link", "=", "False", "for", "link", "in", ...
https://github.com/indygreg/python-build-standalone/blob/1dfe9d4186e37bd57c544b09402690e968a225be/pythonbuild/utils.py#L435-L474
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/macpath.py
python
abspath
(path)
return normpath(path)
Return an absolute path.
Return an absolute path.
[ "Return", "an", "absolute", "path", "." ]
def abspath(path): """Return an absolute path.""" if not isabs(path): if isinstance(path, _unicode): cwd = os.getcwdu() else: cwd = os.getcwd() path = join(cwd, path) return normpath(path)
[ "def", "abspath", "(", "path", ")", ":", "if", "not", "isabs", "(", "path", ")", ":", "if", "isinstance", "(", "path", ",", "_unicode", ")", ":", "cwd", "=", "os", ".", "getcwdu", "(", ")", "else", ":", "cwd", "=", "os", ".", "getcwd", "(", ")"...
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/macpath.py#L187-L195
GoogleCloudPlatform/gsutil
5be882803e76608e2fd29cf8c504ccd1fe0a7746
gslib/commands/autoclass.py
python
AutoclassCommand.RunCommand
(self)
Command entry point for the autoclass command.
Command entry point for the autoclass command.
[ "Command", "entry", "point", "for", "the", "autoclass", "command", "." ]
def RunCommand(self): """Command entry point for the autoclass command.""" action_subcommand = self.args[0] self.ParseSubOpts(check_args=True) if action_subcommand == 'get' or action_subcommand == 'set': metrics.LogCommandParams(sub_opts=self.sub_opts) metrics.LogCommandParams(subcommands=[action_subcommand]) return self._autoclass() else: raise CommandException('Invalid subcommand "%s", use get|set instead.' % action_subcommand)
[ "def", "RunCommand", "(", "self", ")", ":", "action_subcommand", "=", "self", ".", "args", "[", "0", "]", "self", ".", "ParseSubOpts", "(", "check_args", "=", "True", ")", "if", "action_subcommand", "==", "'get'", "or", "action_subcommand", "==", "'set'", ...
https://github.com/GoogleCloudPlatform/gsutil/blob/5be882803e76608e2fd29cf8c504ccd1fe0a7746/gslib/commands/autoclass.py#L186-L197
mesonbuild/meson
a22d0f9a0a787df70ce79b05d0c45de90a970048
mesonbuild/environment.py
python
Environment.get_static_lib_dir
(self)
return self.get_libdir()
Install dir for the static library
Install dir for the static library
[ "Install", "dir", "for", "the", "static", "library" ]
def get_static_lib_dir(self) -> str: "Install dir for the static library" return self.get_libdir()
[ "def", "get_static_lib_dir", "(", "self", ")", "->", "str", ":", "return", "self", ".", "get_libdir", "(", ")" ]
https://github.com/mesonbuild/meson/blob/a22d0f9a0a787df70ce79b05d0c45de90a970048/mesonbuild/environment.py#L817-L819
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
ansible/roles/lib_statuspageio/library/statuspage_incident.py
python
StatusPageIncident.__init__
(self, api_key, page_id, name=None, scheduled=None, unresolved=None, org_id=None, incident_type='realtime', status='investigating', update_twitter=False, message=None, components=None, scheduled_for=None, scheduled_until=None, scheduled_remind_prior=False, scheduled_auto_in_progress=False, scheduled_auto_completed=False, verbose=False)
Constructor for OCVolume
Constructor for OCVolume
[ "Constructor", "for", "OCVolume" ]
def __init__(self, api_key, page_id, name=None, scheduled=None, unresolved=None, org_id=None, incident_type='realtime', status='investigating', update_twitter=False, message=None, components=None, scheduled_for=None, scheduled_until=None, scheduled_remind_prior=False, scheduled_auto_in_progress=False, scheduled_auto_completed=False, verbose=False): ''' Constructor for OCVolume ''' super(StatusPageIncident, self).__init__(api_key, page_id, org_id) self.name = name self.api_key = api_key self.page_id = page_id self.org_id = org_id self.scheduled = scheduled self.unresolved = unresolved self.verbose = verbose self.incidents = None self.incident_type = incident_type self.status = status self.update_twitter = update_twitter self.message = message self.components = components self.scheduled_for = scheduled_for self.scheduled_until = scheduled_until self.scheduled_remind_prior = scheduled_remind_prior self.scheduled_auto_in_progress = scheduled_auto_in_progress self.scheduled_auto_completed = scheduled_auto_completed if self.components == None: self.components = {} self._params = None self._incidents = None
[ "def", "__init__", "(", "self", ",", "api_key", ",", "page_id", ",", "name", "=", "None", ",", "scheduled", "=", "None", ",", "unresolved", "=", "None", ",", "org_id", "=", "None", ",", "incident_type", "=", "'realtime'", ",", "status", "=", "'investigat...
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/ansible/roles/lib_statuspageio/library/statuspage_incident.py#L330-L371
eth-brownie/brownie
754bda9f0a294b2beb86453d5eca4ff769a877c8
brownie/network/web3.py
python
Web3.genesis_hash
(self)
return self._genesis_hash
The genesis hash of the currently active network.
The genesis hash of the currently active network.
[ "The", "genesis", "hash", "of", "the", "currently", "active", "network", "." ]
def genesis_hash(self) -> str: """The genesis hash of the currently active network.""" if self.provider is None: raise ConnectionError("web3 is not currently connected") if self._genesis_hash is None: self._genesis_hash = self.eth.get_block(0)["hash"].hex()[2:] return self._genesis_hash
[ "def", "genesis_hash", "(", "self", ")", "->", "str", ":", "if", "self", ".", "provider", "is", "None", ":", "raise", "ConnectionError", "(", "\"web3 is not currently connected\"", ")", "if", "self", ".", "_genesis_hash", "is", "None", ":", "self", ".", "_ge...
https://github.com/eth-brownie/brownie/blob/754bda9f0a294b2beb86453d5eca4ff769a877c8/brownie/network/web3.py#L147-L153
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/mailbox.py
python
Mailbox.flush
(self)
Write any pending changes to the disk.
Write any pending changes to the disk.
[ "Write", "any", "pending", "changes", "to", "the", "disk", "." ]
def flush(self): """Write any pending changes to the disk.""" raise NotImplementedError('Method must be implemented by subclass')
[ "def", "flush", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "'Method must be implemented by subclass'", ")" ]
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/mailbox.py#L184-L186