nwo stringlengths 5 86 | sha stringlengths 40 40 | path stringlengths 4 189 | language stringclasses 1 value | identifier stringlengths 1 94 | parameters stringlengths 2 4.03k | argument_list stringclasses 1 value | return_statement stringlengths 0 11.5k | docstring stringlengths 1 33.2k | docstring_summary stringlengths 0 5.15k | docstring_tokens list | function stringlengths 34 151k | function_tokens list | url stringlengths 90 278 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/boto/boto/file/bucket.py | python | Bucket.get_all_keys | (self, headers=None, **params) | return SimpleResultSet([key]) | This method returns the single key around which this anonymous Bucket
was instantiated.
:rtype: SimpleResultSet
:return: The result from file system listing the keys requested | This method returns the single key around which this anonymous Bucket
was instantiated. | [
"This",
"method",
"returns",
"the",
"single",
"key",
"around",
"which",
"this",
"anonymous",
"Bucket",
"was",
"instantiated",
"."
] | def get_all_keys(self, headers=None, **params):
"""
This method returns the single key around which this anonymous Bucket
was instantiated.
:rtype: SimpleResultSet
:return: The result from file system listing the keys requested
"""
key = Key(self.name, self.contained_key)
return SimpleResultSet([key]) | [
"def",
"get_all_keys",
"(",
"self",
",",
"headers",
"=",
"None",
",",
"*",
"*",
"params",
")",
":",
"key",
"=",
"Key",
"(",
"self",
".",
"name",
",",
"self",
".",
"contained_key",
")",
"return",
"SimpleResultSet",
"(",
"[",
"key",
"]",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/file/bucket.py#L59-L69 | |
natanielruiz/android-yolo | 1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f | jni-build/jni/include/tensorflow/examples/tutorials/mnist/fully_connected_feed.py | python | run_training | () | Train MNIST for a number of steps. | Train MNIST for a number of steps. | [
"Train",
"MNIST",
"for",
"a",
"number",
"of",
"steps",
"."
] | def run_training():
"""Train MNIST for a number of steps."""
# Get the sets of images and labels for training, validation, and
# test on MNIST.
data_sets = input_data.read_data_sets(FLAGS.train_dir, FLAGS.fake_data)
# Tell TensorFlow that the model will be built into the default Graph.
with tf.Graph().as_default():
# Generate placeholders for the images and labels.
images_placeholder, labels_placeholder = placeholder_inputs(
FLAGS.batch_size)
# Build a Graph that computes predictions from the inference model.
logits = mnist.inference(images_placeholder,
FLAGS.hidden1,
FLAGS.hidden2)
# Add to the Graph the Ops for loss calculation.
loss = mnist.loss(logits, labels_placeholder)
# Add to the Graph the Ops that calculate and apply gradients.
train_op = mnist.training(loss, FLAGS.learning_rate)
# Add the Op to compare the logits to the labels during evaluation.
eval_correct = mnist.evaluation(logits, labels_placeholder)
# Build the summary operation based on the TF collection of Summaries.
summary_op = tf.merge_all_summaries()
# Add the variable initializer Op.
init = tf.initialize_all_variables()
# Create a saver for writing training checkpoints.
saver = tf.train.Saver()
# Create a session for running Ops on the Graph.
sess = tf.Session()
# Instantiate a SummaryWriter to output summaries and the Graph.
summary_writer = tf.train.SummaryWriter(FLAGS.train_dir, sess.graph)
# And then after everything is built:
# Run the Op to initialize the variables.
sess.run(init)
# Start the training loop.
for step in xrange(FLAGS.max_steps):
start_time = time.time()
# Fill a feed dictionary with the actual set of images and labels
# for this particular training step.
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder)
# Run one step of the model. The return values are the activations
# from the `train_op` (which is discarded) and the `loss` Op. To
# inspect the values of your Ops or variables, you may include them
# in the list passed to sess.run() and the value tensors will be
# returned in the tuple from the call.
_, loss_value = sess.run([train_op, loss],
feed_dict=feed_dict)
duration = time.time() - start_time
# Write the summaries and print an overview fairly often.
if step % 100 == 0:
# Print status to stdout.
print('Step %d: loss = %.2f (%.3f sec)' % (step, loss_value, duration))
# Update the events file.
summary_str = sess.run(summary_op, feed_dict=feed_dict)
summary_writer.add_summary(summary_str, step)
summary_writer.flush()
# Save a checkpoint and evaluate the model periodically.
if (step + 1) % 1000 == 0 or (step + 1) == FLAGS.max_steps:
checkpoint_file = os.path.join(FLAGS.train_dir, 'checkpoint')
saver.save(sess, checkpoint_file, global_step=step)
# Evaluate against the training set.
print('Training Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.train)
# Evaluate against the validation set.
print('Validation Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.validation)
# Evaluate against the test set.
print('Test Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.test) | [
"def",
"run_training",
"(",
")",
":",
"# Get the sets of images and labels for training, validation, and",
"# test on MNIST.",
"data_sets",
"=",
"input_data",
".",
"read_data_sets",
"(",
"FLAGS",
".",
"train_dir",
",",
"FLAGS",
".",
"fake_data",
")",
"# Tell TensorFlow that... | https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/examples/tutorials/mnist/fully_connected_feed.py#L125-L224 | ||
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/dataset.py | python | InMemoryDataset.get_shuffle_data_size | (self, fleet=None) | return local_data_size[0] | Get shuffle data size, user can call this function to know the num
of ins in all workers after local/global shuffle.
Note:
This function may cause bad performance to local shuffle,
because it has barrier. It does not affect global shuffle.
Args:
fleet(Fleet): Fleet Object.
Returns:
The size of shuffle data.
Examples:
.. code-block:: python
# required: skiptest
import paddle.fluid as fluid
from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
filelist = ["a.txt", "b.txt"]
dataset.set_filelist(filelist)
dataset.load_into_memory()
dataset.global_shuffle(fleet)
print dataset.get_shuffle_data_size(fleet) | Get shuffle data size, user can call this function to know the num
of ins in all workers after local/global shuffle. | [
"Get",
"shuffle",
"data",
"size",
"user",
"can",
"call",
"this",
"function",
"to",
"know",
"the",
"num",
"of",
"ins",
"in",
"all",
"workers",
"after",
"local",
"/",
"global",
"shuffle",
"."
] | def get_shuffle_data_size(self, fleet=None):
"""
Get shuffle data size, user can call this function to know the num
of ins in all workers after local/global shuffle.
Note:
This function may cause bad performance to local shuffle,
because it has barrier. It does not affect global shuffle.
Args:
fleet(Fleet): Fleet Object.
Returns:
The size of shuffle data.
Examples:
.. code-block:: python
# required: skiptest
import paddle.fluid as fluid
from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
filelist = ["a.txt", "b.txt"]
dataset.set_filelist(filelist)
dataset.load_into_memory()
dataset.global_shuffle(fleet)
print dataset.get_shuffle_data_size(fleet)
"""
import numpy as np
local_data_size = self.dataset.get_shuffle_data_size()
local_data_size = np.array([local_data_size])
print('global shuffle local_data_size: ', local_data_size)
if fleet is not None:
from paddle.fluid.incubate.fleet.parameter_server.pslib import PSLib
global_data_size = local_data_size * 0
if not isinstance(fleet, PSLib):
global_data_size = fleet.util.all_reduce(local_data_size)
else:
fleet._role_maker.all_reduce_worker(local_data_size,
global_data_size)
return global_data_size[0]
return local_data_size[0] | [
"def",
"get_shuffle_data_size",
"(",
"self",
",",
"fleet",
"=",
"None",
")",
":",
"import",
"numpy",
"as",
"np",
"local_data_size",
"=",
"self",
".",
"dataset",
".",
"get_shuffle_data_size",
"(",
")",
"local_data_size",
"=",
"np",
".",
"array",
"(",
"[",
"... | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dataset.py#L995-L1037 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/html/parser.py | python | HTMLParser.feed | (self, data) | r"""Feed data to the parser.
Call this as often as you want, with as little or as much text
as you want (may include '\n'). | r"""Feed data to the parser. | [
"r",
"Feed",
"data",
"to",
"the",
"parser",
"."
] | def feed(self, data):
r"""Feed data to the parser.
Call this as often as you want, with as little or as much text
as you want (may include '\n').
"""
self.rawdata = self.rawdata + data
self.goahead(0) | [
"def",
"feed",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"rawdata",
"=",
"self",
".",
"rawdata",
"+",
"data",
"self",
".",
"goahead",
"(",
"0",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/html/parser.py#L104-L111 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/xml/sax/saxutils.py | python | unescape | (data, entities={}) | return data.replace("&", "&") | Unescape &, <, and > in a string of data.
You can unescape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value. | Unescape &, <, and > in a string of data. | [
"Unescape",
"&",
";",
"<",
";",
"and",
">",
";",
"in",
"a",
"string",
"of",
"data",
"."
] | def unescape(data, entities={}):
"""Unescape &, <, and > in a string of data.
You can unescape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = data.replace("<", "<")
data = data.replace(">", ">")
if entities:
data = __dict_replace(data, entities)
# must do ampersand last
return data.replace("&", "&") | [
"def",
"unescape",
"(",
"data",
",",
"entities",
"=",
"{",
"}",
")",
":",
"data",
"=",
"data",
".",
"replace",
"(",
"\"<\"",
",",
"\"<\"",
")",
"data",
"=",
"data",
".",
"replace",
"(",
"\">\"",
",",
"\">\"",
")",
"if",
"entities",
":",
"data... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/xml/sax/saxutils.py#L34-L46 | |
SFTtech/openage | d6a08c53c48dc1e157807471df92197f6ca9e04d | openage/convert/processor/conversion/aoc/modifier_subprocessor.py | python | AoCModifierSubprocessor.elevation_attack_modifiers | (converter_obj_group) | return modifiers | Adds the pregenerated elevation damage multipliers to a line or civ group.
:param converter_obj_group: ConverterObjectGroup that gets the modifier.
:type converter_obj_group: ...dataformat.converter_object.ConverterObjectGroup
:returns: The forward references for the modifier.
:rtype: list | Adds the pregenerated elevation damage multipliers to a line or civ group. | [
"Adds",
"the",
"pregenerated",
"elevation",
"damage",
"multipliers",
"to",
"a",
"line",
"or",
"civ",
"group",
"."
] | def elevation_attack_modifiers(converter_obj_group):
"""
Adds the pregenerated elevation damage multipliers to a line or civ group.
:param converter_obj_group: ConverterObjectGroup that gets the modifier.
:type converter_obj_group: ...dataformat.converter_object.ConverterObjectGroup
:returns: The forward references for the modifier.
:rtype: list
"""
dataset = converter_obj_group.data
modifiers = [
dataset.pregen_nyan_objects[
"util.modifier.elevation_difference.AttackHigh"
].get_nyan_object(),
dataset.pregen_nyan_objects[
"util.modifier.elevation_difference.AttackLow"
].get_nyan_object()
]
return modifiers | [
"def",
"elevation_attack_modifiers",
"(",
"converter_obj_group",
")",
":",
"dataset",
"=",
"converter_obj_group",
".",
"data",
"modifiers",
"=",
"[",
"dataset",
".",
"pregen_nyan_objects",
"[",
"\"util.modifier.elevation_difference.AttackHigh\"",
"]",
".",
"get_nyan_object"... | https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/processor/conversion/aoc/modifier_subprocessor.py#L23-L42 | |
Yijunmaverick/GenerativeFaceCompletion | f72dea0fa27c779fef7b65d2f01e82bcc23a0eb2 | python/caffe/pycaffe.py | python | _Net_forward_all | (self, blobs=None, **kwargs) | return all_outs | Run net forward in batches.
Parameters
----------
blobs : list of blobs to extract as in forward()
kwargs : Keys are input blob names and values are blob ndarrays.
Refer to forward().
Returns
-------
all_outs : {blob name: list of blobs} dict. | Run net forward in batches. | [
"Run",
"net",
"forward",
"in",
"batches",
"."
] | def _Net_forward_all(self, blobs=None, **kwargs):
"""
Run net forward in batches.
Parameters
----------
blobs : list of blobs to extract as in forward()
kwargs : Keys are input blob names and values are blob ndarrays.
Refer to forward().
Returns
-------
all_outs : {blob name: list of blobs} dict.
"""
# Collect outputs from batches
all_outs = {out: [] for out in set(self.outputs + (blobs or []))}
for batch in self._batch(kwargs):
outs = self.forward(blobs=blobs, **batch)
for out, out_blob in outs.iteritems():
all_outs[out].extend(out_blob.copy())
# Package in ndarray.
for out in all_outs:
all_outs[out] = np.asarray(all_outs[out])
# Discard padding.
pad = len(all_outs.itervalues().next()) - len(kwargs.itervalues().next())
if pad:
for out in all_outs:
all_outs[out] = all_outs[out][:-pad]
return all_outs | [
"def",
"_Net_forward_all",
"(",
"self",
",",
"blobs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# Collect outputs from batches",
"all_outs",
"=",
"{",
"out",
":",
"[",
"]",
"for",
"out",
"in",
"set",
"(",
"self",
".",
"outputs",
"+",
"(",
"blobs... | https://github.com/Yijunmaverick/GenerativeFaceCompletion/blob/f72dea0fa27c779fef7b65d2f01e82bcc23a0eb2/python/caffe/pycaffe.py#L161-L189 | |
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBValueList.FindValueObjectByUID | (self, uid) | return _lldb.SBValueList_FindValueObjectByUID(self, uid) | FindValueObjectByUID(SBValueList self, lldb::user_id_t uid) -> SBValue | FindValueObjectByUID(SBValueList self, lldb::user_id_t uid) -> SBValue | [
"FindValueObjectByUID",
"(",
"SBValueList",
"self",
"lldb",
"::",
"user_id_t",
"uid",
")",
"-",
">",
"SBValue"
] | def FindValueObjectByUID(self, uid):
"""FindValueObjectByUID(SBValueList self, lldb::user_id_t uid) -> SBValue"""
return _lldb.SBValueList_FindValueObjectByUID(self, uid) | [
"def",
"FindValueObjectByUID",
"(",
"self",
",",
"uid",
")",
":",
"return",
"_lldb",
".",
"SBValueList_FindValueObjectByUID",
"(",
"self",
",",
"uid",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L14959-L14961 | |
kevinlin311tw/caffe-cvprw15 | 45c2a1bf0368569c54e0be4edf8d34285cf79e70 | scripts/cpp_lint.py | python | ParseNolintSuppressions | (filename, raw_line, linenum, error) | Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler. | Updates the global list of error-suppressions. | [
"Updates",
"the",
"global",
"list",
"of",
"error",
"-",
"suppressions",
"."
] | def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
# FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*).
matched = _RE_SUPPRESSION.search(raw_line)
if matched:
if matched.group(1) == '_NEXT_LINE':
linenum += 1
category = matched.group(2)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(linenum)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(linenum)
else:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category) | [
"def",
"ParseNolintSuppressions",
"(",
"filename",
",",
"raw_line",
",",
"linenum",
",",
"error",
")",
":",
"# FIXME(adonovan): \"NOLINT(\" is misparsed as NOLINT(*).",
"matched",
"=",
"_RE_SUPPRESSION",
".",
"search",
"(",
"raw_line",
")",
"if",
"matched",
":",
"if",... | https://github.com/kevinlin311tw/caffe-cvprw15/blob/45c2a1bf0368569c54e0be4edf8d34285cf79e70/scripts/cpp_lint.py#L464-L492 | ||
Kronuz/Xapiand | a71570859dcfc9f48090d845053f359b07f4f78c | contrib/python/xapiand-py/xapiand/exceptions.py | python | TransportError.info | (self) | return self.args[2] | Dict of returned error info from ES, where available, underlying
exception when not. | Dict of returned error info from ES, where available, underlying
exception when not. | [
"Dict",
"of",
"returned",
"error",
"info",
"from",
"ES",
"where",
"available",
"underlying",
"exception",
"when",
"not",
"."
] | def info(self):
"""
Dict of returned error info from ES, where available, underlying
exception when not.
"""
return self.args[2] | [
"def",
"info",
"(",
"self",
")",
":",
"return",
"self",
".",
"args",
"[",
"2",
"]"
] | https://github.com/Kronuz/Xapiand/blob/a71570859dcfc9f48090d845053f359b07f4f78c/contrib/python/xapiand-py/xapiand/exceptions.py#L66-L71 | |
Ewenwan/MVision | 97b394dfa48cb21c82cd003b1a952745e413a17f | deepLearning/07_gbrbm.py | python | GBRBM.propdown | (self, h) | return tf.matmul(h, tf.transpose(self.W)) + self.vbias | Compute the mean for visible units given hidden units | Compute the mean for visible units given hidden units | [
"Compute",
"the",
"mean",
"for",
"visible",
"units",
"given",
"hidden",
"units"
] | def propdown(self, h):
"""Compute the mean for visible units given hidden units"""
return tf.matmul(h, tf.transpose(self.W)) + self.vbias | [
"def",
"propdown",
"(",
"self",
",",
"h",
")",
":",
"return",
"tf",
".",
"matmul",
"(",
"h",
",",
"tf",
".",
"transpose",
"(",
"self",
".",
"W",
")",
")",
"+",
"self",
".",
"vbias"
] | https://github.com/Ewenwan/MVision/blob/97b394dfa48cb21c82cd003b1a952745e413a17f/deepLearning/07_gbrbm.py#L41-L43 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_gdi.py | python | DC.CalcBoundingBox | (*args, **kwargs) | return _gdi_.DC_CalcBoundingBox(*args, **kwargs) | CalcBoundingBox(self, int x, int y)
Adds the specified point to the bounding box which can be retrieved
with `MinX`, `MaxX` and `MinY`, `MaxY` or `GetBoundingBox` functions. | CalcBoundingBox(self, int x, int y) | [
"CalcBoundingBox",
"(",
"self",
"int",
"x",
"int",
"y",
")"
] | def CalcBoundingBox(*args, **kwargs):
"""
CalcBoundingBox(self, int x, int y)
Adds the specified point to the bounding box which can be retrieved
with `MinX`, `MaxX` and `MinY`, `MaxY` or `GetBoundingBox` functions.
"""
return _gdi_.DC_CalcBoundingBox(*args, **kwargs) | [
"def",
"CalcBoundingBox",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"DC_CalcBoundingBox",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_gdi.py#L4544-L4551 | |
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/python/ops/math_grad.py | python | _MaximumGrad | (op, grad) | return _MaximumMinimumGrad(op, grad, math_ops.greater_equal) | Returns grad*(x > y, x <= y) with type of grad. | Returns grad*(x > y, x <= y) with type of grad. | [
"Returns",
"grad",
"*",
"(",
"x",
">",
"y",
"x",
"<",
"=",
"y",
")",
"with",
"type",
"of",
"grad",
"."
] | def _MaximumGrad(op, grad):
"""Returns grad*(x > y, x <= y) with type of grad."""
return _MaximumMinimumGrad(op, grad, math_ops.greater_equal) | [
"def",
"_MaximumGrad",
"(",
"op",
",",
"grad",
")",
":",
"return",
"_MaximumMinimumGrad",
"(",
"op",
",",
"grad",
",",
"math_ops",
".",
"greater_equal",
")"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/math_grad.py#L646-L648 | |
Polidea/SiriusObfuscator | b0e590d8130e97856afe578869b83a209e2b19be | SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py | python | SBDebugger.CreateCategory | (self, *args) | return _lldb.SBDebugger_CreateCategory(self, *args) | CreateCategory(self, str category_name) -> SBTypeCategory | CreateCategory(self, str category_name) -> SBTypeCategory | [
"CreateCategory",
"(",
"self",
"str",
"category_name",
")",
"-",
">",
"SBTypeCategory"
] | def CreateCategory(self, *args):
"""CreateCategory(self, str category_name) -> SBTypeCategory"""
return _lldb.SBDebugger_CreateCategory(self, *args) | [
"def",
"CreateCategory",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_lldb",
".",
"SBDebugger_CreateCategory",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L3487-L3489 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/floatcanvas/Utilities/BBox.py | python | InfBBox | () | return N.ndarray.__new__(BBox, shape=arr.shape, dtype=arr.dtype, buffer=arr) | Returns a BBox object with all -inf and inf entries | Returns a BBox object with all -inf and inf entries | [
"Returns",
"a",
"BBox",
"object",
"with",
"all",
"-",
"inf",
"and",
"inf",
"entries"
] | def InfBBox():
"""
Returns a BBox object with all -inf and inf entries
"""
arr = N.array(((-N.inf, -N.inf),(N.inf, N.inf)), N.float)
return N.ndarray.__new__(BBox, shape=arr.shape, dtype=arr.dtype, buffer=arr) | [
"def",
"InfBBox",
"(",
")",
":",
"arr",
"=",
"N",
".",
"array",
"(",
"(",
"(",
"-",
"N",
".",
"inf",
",",
"-",
"N",
".",
"inf",
")",
",",
"(",
"N",
".",
"inf",
",",
"N",
".",
"inf",
")",
")",
",",
"N",
".",
"float",
")",
"return",
"N",
... | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/floatcanvas/Utilities/BBox.py#L249-L256 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py | python | Misc.winfo_pathname | (self, id, displayof=0) | return self.tk.call(args) | Return the pathname of the widget given by ID. | Return the pathname of the widget given by ID. | [
"Return",
"the",
"pathname",
"of",
"the",
"widget",
"given",
"by",
"ID",
"."
] | def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args) | [
"def",
"winfo_pathname",
"(",
"self",
",",
"id",
",",
"displayof",
"=",
"0",
")",
":",
"args",
"=",
"(",
"'winfo'",
",",
"'pathname'",
")",
"+",
"self",
".",
"_displayof",
"(",
"displayof",
")",
"+",
"(",
"id",
",",
")",
"return",
"self",
".",
"tk"... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py#L1021-L1025 | |
bundy-dns/bundy | 3d41934996b82b0cd2fe22dd74d2abc1daba835d | src/lib/python/bundy/ddns/session.py | python | UpdateSession.__prereq_name_not_in_use | (self, rrset) | return not self.__prereq_name_in_use(rrset) | Check whether the name of the given RRset is not in use (i.e. does
not exist at all, or is an empty nonterminal.
RFC2136 Section 2.4.5.
Returns True if the prerequisite is satisfied, False otherwise. | Check whether the name of the given RRset is not in use (i.e. does
not exist at all, or is an empty nonterminal.
RFC2136 Section 2.4.5.
Returns True if the prerequisite is satisfied, False otherwise. | [
"Check",
"whether",
"the",
"name",
"of",
"the",
"given",
"RRset",
"is",
"not",
"in",
"use",
"(",
"i",
".",
"e",
".",
"does",
"not",
"exist",
"at",
"all",
"or",
"is",
"an",
"empty",
"nonterminal",
".",
"RFC2136",
"Section",
"2",
".",
"4",
".",
"5",
... | def __prereq_name_not_in_use(self, rrset):
'''Check whether the name of the given RRset is not in use (i.e. does
not exist at all, or is an empty nonterminal.
RFC2136 Section 2.4.5.
Returns True if the prerequisite is satisfied, False otherwise.
'''
return not self.__prereq_name_in_use(rrset) | [
"def",
"__prereq_name_not_in_use",
"(",
"self",
",",
"rrset",
")",
":",
"return",
"not",
"self",
".",
"__prereq_name_in_use",
"(",
"rrset",
")"
] | https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/lib/python/bundy/ddns/session.py#L448-L454 | |
esa/pagmo | 80281d549c8f1b470e1489a5d37c8f06b2e429c0 | PyGMO/algorithm/__init__.py | python | _cstrs_immune_system_ctor | (
self,
algorithm=None,
algorithm_immune=None,
gen=1,
select_method=cstrs_immune_system.select_method.BEST_ANTIBODY,
inject_method=cstrs_immune_system.inject_method.CHAMPION,
distance_method=cstrs_immune_system.distance_method.EUCLIDEAN,
phi=0.5,
gamma=0.5,
sigma=1. / 3.,
f_tol=1e-15,
x_tol=1e-15) | Constructs an immune system algorithm for constrained optimization.
USAGE: algorithm._cstrs_immune_system(algorithm = _algorithm.jde(), algorithm_immune = _algorithm.jde(), gen = 1, select_method = cstrs_immune_system.select_method.BEST_ANTIBODY, inject_method = cstrs_immune_system.inject_method.CHAMPION, distance_method = cstrs_immune_system.distance_method.EUCLIDEAN, phi = 0.5, gamma = 0.5, sigma = 1./3., ftol = 1e-15, xtol = 1e-15):
* algorithm: optimizer to use as 'original' optimization method. Its number of generations should be set to 1.
* algorithm_2: optimizer to use as 'original' optimization method for the evolution of the immune system.
* gen: number of generations.
* select_method: cstrs_immune_system.select_method.BEST_ANTIBODY by default, the method used for selecting the antibodies.
* inject_method: cstrs_immune_system.inject_method.CHAMPION by default, the method used for reinjecting the antibodies.
* distance_method: cstrs_immune_system.distance_method.EUCLIDEAN by default, the method used for computing the distance to the antigenes population.
* Two possibilities are available: CHAMPION, and BEST25.
* phi: 0.5 by default. The feasible fraction selection to compute the mean value.
* gamma: 0.5 by default. The number of antigens selected / number of total antigens.
* sigma: 1/3 by default. The number of antibodies / number of antigens.
* ftol: 1e-15 by default. The stopping criteria on the x tolerance.
* xtol: 1e-15 by default. The stopping criteria on the f tolerance. | Constructs an immune system algorithm for constrained optimization. | [
"Constructs",
"an",
"immune",
"system",
"algorithm",
"for",
"constrained",
"optimization",
"."
] | def _cstrs_immune_system_ctor(
self,
algorithm=None,
algorithm_immune=None,
gen=1,
select_method=cstrs_immune_system.select_method.BEST_ANTIBODY,
inject_method=cstrs_immune_system.inject_method.CHAMPION,
distance_method=cstrs_immune_system.distance_method.EUCLIDEAN,
phi=0.5,
gamma=0.5,
sigma=1. / 3.,
f_tol=1e-15,
x_tol=1e-15):
"""
Constructs an immune system algorithm for constrained optimization.
USAGE: algorithm._cstrs_immune_system(algorithm = _algorithm.jde(), algorithm_immune = _algorithm.jde(), gen = 1, select_method = cstrs_immune_system.select_method.BEST_ANTIBODY, inject_method = cstrs_immune_system.inject_method.CHAMPION, distance_method = cstrs_immune_system.distance_method.EUCLIDEAN, phi = 0.5, gamma = 0.5, sigma = 1./3., ftol = 1e-15, xtol = 1e-15):
* algorithm: optimizer to use as 'original' optimization method. Its number of generations should be set to 1.
* algorithm_2: optimizer to use as 'original' optimization method for the evolution of the immune system.
* gen: number of generations.
* select_method: cstrs_immune_system.select_method.BEST_ANTIBODY by default, the method used for selecting the antibodies.
* inject_method: cstrs_immune_system.inject_method.CHAMPION by default, the method used for reinjecting the antibodies.
* distance_method: cstrs_immune_system.distance_method.EUCLIDEAN by default, the method used for computing the distance to the antigenes population.
* Two possibilities are available: CHAMPION, and BEST25.
* phi: 0.5 by default. The feasible fraction selection to compute the mean value.
* gamma: 0.5 by default. The number of antigens selected / number of total antigens.
* sigma: 1/3 by default. The number of antibodies / number of antigens.
* ftol: 1e-15 by default. The stopping criteria on the x tolerance.
* xtol: 1e-15 by default. The stopping criteria on the f tolerance.
"""
arg_list = []
if algorithm is None:
algorithm = _algorithm.jde()
if algorithm_immune is None:
algorithm_immune = _algorithm.jde()
arg_list.append(algorithm)
arg_list.append(algorithm_immune)
arg_list.append(gen)
arg_list.append(select_method)
arg_list.append(inject_method)
arg_list.append(distance_method)
arg_list.append(phi)
arg_list.append(gamma)
arg_list.append(sigma)
arg_list.append(f_tol)
arg_list.append(x_tol)
self._orig_init(*arg_list) | [
"def",
"_cstrs_immune_system_ctor",
"(",
"self",
",",
"algorithm",
"=",
"None",
",",
"algorithm_immune",
"=",
"None",
",",
"gen",
"=",
"1",
",",
"select_method",
"=",
"cstrs_immune_system",
".",
"select_method",
".",
"BEST_ANTIBODY",
",",
"inject_method",
"=",
"... | https://github.com/esa/pagmo/blob/80281d549c8f1b470e1489a5d37c8f06b2e429c0/PyGMO/algorithm/__init__.py#L1048-L1096 | ||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/nccl/python/ops/nccl_ops.py | python | all_prod | (tensors) | return _apply_all_reduce('prod', tensors) | Returns a list of tensors with the all-reduce product across `tensors`.
The computation is done with an all-reduce operation, so if only some of the
returned tensors are evaluated then the computation will hang.
Args:
tensors: The input tensors across which to multiply; must be assigned
to GPU devices.
Returns:
List of tensors, each with the product of the input tensors, where tensor i
has the same device as `tensors[i]`. | Returns a list of tensors with the all-reduce product across `tensors`. | [
"Returns",
"a",
"list",
"of",
"tensors",
"with",
"the",
"all",
"-",
"reduce",
"product",
"across",
"tensors",
"."
] | def all_prod(tensors):
"""Returns a list of tensors with the all-reduce product across `tensors`.
The computation is done with an all-reduce operation, so if only some of the
returned tensors are evaluated then the computation will hang.
Args:
tensors: The input tensors across which to multiply; must be assigned
to GPU devices.
Returns:
List of tensors, each with the product of the input tensors, where tensor i
has the same device as `tensors[i]`.
"""
return _apply_all_reduce('prod', tensors) | [
"def",
"all_prod",
"(",
"tensors",
")",
":",
"return",
"_apply_all_reduce",
"(",
"'prod'",
",",
"tensors",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/nccl/python/ops/nccl_ops.py#L51-L65 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/util/dispatch.py | python | _add_dispatch_for_binary_elementwise_api | (api, x_type, y_type,
elementwise_api_handler) | Registers a binary elementwise handler as a dispatcher for a given API. | Registers a binary elementwise handler as a dispatcher for a given API. | [
"Registers",
"a",
"binary",
"elementwise",
"handler",
"as",
"a",
"dispatcher",
"for",
"a",
"given",
"API",
"."
] | def _add_dispatch_for_binary_elementwise_api(api, x_type, y_type,
elementwise_api_handler):
"""Registers a binary elementwise handler as a dispatcher for a given API."""
api_signature = tf_inspect.signature(api)
x_name, y_name = list(api_signature.parameters)[:2]
name_index = _find_name_index(api_signature)
need_to_bind_api_args = (len(api_signature.parameters) > 3 or
"name" not in api_signature.parameters)
@dispatch_for_api(api, {x_name: x_type, y_name: y_type})
def dispatch_target(*args, **kwargs):
args, kwargs, name = _extract_name_arg(args, kwargs, name_index)
if len(args) > 1:
x, y, args = args[0], args[1], args[2:]
elif args:
x, args = args[0], args[1:]
y = kwargs.pop(y_name, None)
else:
x = kwargs.pop(x_name, None)
y = kwargs.pop(y_name, None)
if need_to_bind_api_args:
tensor_api = lambda v1, v2: api(v1, v2, *args, **kwargs)
else:
tensor_api = api
if name is None:
return elementwise_api_handler(tensor_api, x, y)
else:
with ops.name_scope(name, None, [x, y]):
return elementwise_api_handler(tensor_api, x, y)
dispatch_target.__name__ = "elementwise_dispatch_target_for_" + api.__name__
dispatch_target.__qualname__ = dispatch_target.__name__
# Keep track of what targets we've registered (so we can unregister them).
target_list = _ELEMENTWISE_API_TARGETS.setdefault((x_type, y_type), [])
target_list.append((api, dispatch_target)) | [
"def",
"_add_dispatch_for_binary_elementwise_api",
"(",
"api",
",",
"x_type",
",",
"y_type",
",",
"elementwise_api_handler",
")",
":",
"api_signature",
"=",
"tf_inspect",
".",
"signature",
"(",
"api",
")",
"x_name",
",",
"y_name",
"=",
"list",
"(",
"api_signature"... | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/util/dispatch.py#L914-L951 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/AzCodeGenerator/bin/windows/az_code_gen/clang_cpp.py | python | is_simple_token | (string_data, delimiters) | return valid | Returns true if the given string doesn't contain any delimiters or
parenthesis that aren't contained inside a quotation. This is the
terminal case when parsing a JSON string.
@param string_data - The string we're going to check
@param delimiters - A string containing all the delimiters that could
separate valid tokens.
@return True if string_data is a simple token, False otherwise | Returns true if the given string doesn't contain any delimiters or
parenthesis that aren't contained inside a quotation. This is the
terminal case when parsing a JSON string. | [
"Returns",
"true",
"if",
"the",
"given",
"string",
"doesn",
"t",
"contain",
"any",
"delimiters",
"or",
"parenthesis",
"that",
"aren",
"t",
"contained",
"inside",
"a",
"quotation",
".",
"This",
"is",
"the",
"terminal",
"case",
"when",
"parsing",
"a",
"JSON",
... | def is_simple_token(string_data, delimiters):
"""Returns true if the given string doesn't contain any delimiters or
parenthesis that aren't contained inside a quotation. This is the
terminal case when parsing a JSON string.
@param string_data - The string we're going to check
@param delimiters - A string containing all the delimiters that could
separate valid tokens.
@return True if string_data is a simple token, False otherwise
"""
valid = True
index = 0
string_checker = string_detector(string_data)
for c in string_data:
in_string = next(string_checker)
# Handle Parenthesis
if not in_string and c == '(':
# Any opening parenthesis outside of strings invalid tokens
valid = False
# Find Separators
if not in_string:
if delimiters.find(c) != -1:
# Major Delimiters Invalidate Tokens
valid = False
# increment our index
index += 1
if not valid:
break
return valid | [
"def",
"is_simple_token",
"(",
"string_data",
",",
"delimiters",
")",
":",
"valid",
"=",
"True",
"index",
"=",
"0",
"string_checker",
"=",
"string_detector",
"(",
"string_data",
")",
"for",
"c",
"in",
"string_data",
":",
"in_string",
"=",
"next",
"(",
"strin... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AzCodeGenerator/bin/windows/az_code_gen/clang_cpp.py#L373-L407 | |
weolar/miniblink49 | 1c4678db0594a4abde23d3ebbcc7cd13c3170777 | third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/BeautifulSoup.py | python | BeautifulStoneSoup.isSelfClosingTag | (self, name) | return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name) | Returns true iff the given string is the name of a
self-closing tag according to this parser. | Returns true iff the given string is the name of a
self-closing tag according to this parser. | [
"Returns",
"true",
"iff",
"the",
"given",
"string",
"is",
"the",
"name",
"of",
"a",
"self",
"-",
"closing",
"tag",
"according",
"to",
"this",
"parser",
"."
] | def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name) | [
"def",
"isSelfClosingTag",
"(",
"self",
",",
"name",
")",
":",
"return",
"self",
".",
"SELF_CLOSING_TAGS",
".",
"has_key",
"(",
"name",
")",
"or",
"self",
".",
"instanceSelfClosingTags",
".",
"has_key",
"(",
"name",
")"
] | https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/BeautifulSoup.py#L1205-L1209 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/linalg.py | python | _compute_singular_values_impl | (a) | return sv_function | Returns a function to compute singular values of `a` | Returns a function to compute singular values of `a` | [
"Returns",
"a",
"function",
"to",
"compute",
"singular",
"values",
"of",
"a"
] | def _compute_singular_values_impl(a):
"""
Returns a function to compute singular values of `a`
"""
numba_ez_gesdd = _LAPACK().numba_ez_gesdd(a.dtype)
kind = ord(get_blas_kind(a.dtype, "svd"))
# Flag for "only compute `S`" to give to xgesdd
JOBZ_N = ord('N')
nb_ret_type = getattr(a.dtype, "underlying_float", a.dtype)
np_ret_type = np_support.as_dtype(nb_ret_type)
np_dtype = np_support.as_dtype(a.dtype)
# These are not referenced in the computation but must be set
# for MKL.
u = np.empty((1, 1), dtype=np_dtype)
vt = np.empty((1, 1), dtype=np_dtype)
F_layout = a.layout == 'F'
def sv_function(a):
"""
Computes singular values.
"""
# Don't use the np.linalg.svd impl instead
# call LAPACK to shortcut doing the "reconstruct
# singular vectors from reflectors" step and just
# get back the singular values.
n = a.shape[-1]
m = a.shape[-2]
if m == 0 or n == 0:
raise np.linalg.LinAlgError('Arrays cannot be empty')
_check_finite_matrix(a)
ldu = m
minmn = min(m, n)
# need to be >=1 but aren't referenced
ucol = 1
ldvt = 1
if F_layout:
acpy = np.copy(a)
else:
acpy = np.asfortranarray(a)
# u and vt are not referenced however need to be
# allocated (as done above) for MKL as it
# checks for ref is nullptr.
s = np.empty(minmn, dtype=np_ret_type)
r = numba_ez_gesdd(
kind, # kind
JOBZ_N, # jobz
m, # m
n, # n
acpy.ctypes, # a
m, # lda
s.ctypes, # s
u.ctypes, # u
ldu, # ldu
vt.ctypes, # vt
ldvt # ldvt
)
_handle_err_maybe_convergence_problem(r)
# help liveness analysis
_dummy_liveness_func([acpy.size, vt.size, u.size, s.size])
return s
return sv_function | [
"def",
"_compute_singular_values_impl",
"(",
"a",
")",
":",
"numba_ez_gesdd",
"=",
"_LAPACK",
"(",
")",
".",
"numba_ez_gesdd",
"(",
"a",
".",
"dtype",
")",
"kind",
"=",
"ord",
"(",
"get_blas_kind",
"(",
"a",
".",
"dtype",
",",
"\"svd\"",
")",
")",
"# Fla... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/linalg.py#L2048-L2120 | |
Komnomnomnom/swigibpy | cfd307fdbfaffabc69a2dc037538d7e34a8b8daf | swigibpy.py | python | EPosixClientSocket.__init__ | (self, ewrapper, poll_auto=True, reconnect_auto=False) | Create an EPosixClientSocket to comunicate with Interactive Brokers.
Parameters
----------
ewrapper : EWrapper subclass to which responses will be dispatched.
poll_auto : boolean, if True automatically poll for messages with a
background thread. Default True
reconnect_auto : boolean, if True automatically reconnect to TWS if
the connection is lost. Default False | Create an EPosixClientSocket to comunicate with Interactive Brokers. | [
"Create",
"an",
"EPosixClientSocket",
"to",
"comunicate",
"with",
"Interactive",
"Brokers",
"."
] | def __init__(self, ewrapper, poll_auto=True, reconnect_auto=False):
'''Create an EPosixClientSocket to comunicate with Interactive Brokers.
Parameters
----------
ewrapper : EWrapper subclass to which responses will be dispatched.
poll_auto : boolean, if True automatically poll for messages with a
background thread. Default True
reconnect_auto : boolean, if True automatically reconnect to TWS if
the connection is lost. Default False
'''
_swigibpy.EPosixClientSocket_swiginit(self, _swigibpy.new_EPosixClientSocket(ewrapper))
# store a reference to EWrapper on the Python side (C++ member is protected so inaccessible from Python).
self._ewrapper = ewrapper
self._connect_lock = threading.Lock()
self.poller = None
self._poll_auto = poll_auto
self.reconnect_auto = reconnect_auto
self._connect_args = None | [
"def",
"__init__",
"(",
"self",
",",
"ewrapper",
",",
"poll_auto",
"=",
"True",
",",
"reconnect_auto",
"=",
"False",
")",
":",
"_swigibpy",
".",
"EPosixClientSocket_swiginit",
"(",
"self",
",",
"_swigibpy",
".",
"new_EPosixClientSocket",
"(",
"ewrapper",
")",
... | https://github.com/Komnomnomnom/swigibpy/blob/cfd307fdbfaffabc69a2dc037538d7e34a8b8daf/swigibpy.py#L2113-L2133 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/urllib3/connectionpool.py | python | HTTPSConnectionPool._validate_conn | (self, conn) | Called right before a request is made, after the socket is created. | Called right before a request is made, after the socket is created. | [
"Called",
"right",
"before",
"a",
"request",
"is",
"made",
"after",
"the",
"socket",
"is",
"created",
"."
] | def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn(
(
"Unverified HTTPS request is being made to host '%s'. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
"#ssl-warnings" % conn.host
),
InsecureRequestWarning,
) | [
"def",
"_validate_conn",
"(",
"self",
",",
"conn",
")",
":",
"super",
"(",
"HTTPSConnectionPool",
",",
"self",
")",
".",
"_validate_conn",
"(",
"conn",
")",
"# Force connect early to allow us to validate the connection.",
"if",
"not",
"getattr",
"(",
"conn",
",",
... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/urllib3/connectionpool.py#L986-L1005 | ||
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/python/platform/gfile.py | python | _GFileBase.read | (self, n=-1) | return self._fp.read(n) | Read n bytes from the underlying file handle.
Args:
n: Number of bytes to read (if negative, read to end of file handle.)
Returns:
A string of the bytes read, up to the end of file. | Read n bytes from the underlying file handle. | [
"Read",
"n",
"bytes",
"from",
"the",
"underlying",
"file",
"handle",
"."
] | def read(self, n=-1):
"""Read n bytes from the underlying file handle.
Args:
n: Number of bytes to read (if negative, read to end of file handle.)
Returns:
A string of the bytes read, up to the end of file.
"""
return self._fp.read(n) | [
"def",
"read",
"(",
"self",
",",
"n",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"_fp",
".",
"read",
"(",
"n",
")"
] | https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/platform/gfile.py#L190-L199 | |
lmb-freiburg/ogn | 974f72ef4bf840d6f6693d22d1843a79223e77ce | scripts/cpp_lint.py | python | CheckAltTokens | (filename, clean_lines, linenum, error) | Check alternative keywords being used in boolean expressions.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Check alternative keywords being used in boolean expressions. | [
"Check",
"alternative",
"keywords",
"being",
"used",
"in",
"boolean",
"expressions",
"."
] | def CheckAltTokens(filename, clean_lines, linenum, error):
"""Check alternative keywords being used in boolean expressions.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Avoid preprocessor lines
if Match(r'^\s*#', line):
return
# Last ditch effort to avoid multi-line comments. This will not help
# if the comment started before the current line or ended after the
# current line, but it catches most of the false positives. At least,
# it provides a way to workaround this warning for people who use
# multi-line comments in preprocessor macros.
#
# TODO(unknown): remove this once cpplint has better support for
# multi-line comments.
if line.find('/*') >= 0 or line.find('*/') >= 0:
return
for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line):
error(filename, linenum, 'readability/alt_tokens', 2,
'Use operator %s instead of %s' % (
_ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1))) | [
"def",
"CheckAltTokens",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Avoid preprocessor lines",
"if",
"Match",
"(",
"r'^\\s*#'",
",",
"line",
")",
":",
"retur... | https://github.com/lmb-freiburg/ogn/blob/974f72ef4bf840d6f6693d22d1843a79223e77ce/scripts/cpp_lint.py#L3405-L3434 | ||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/decimal.py | python | Context.is_subnormal | (self, a) | return a.is_subnormal(context=self) | Return True if the operand is subnormal; otherwise return False.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.is_subnormal(Decimal('2.50'))
False
>>> c.is_subnormal(Decimal('0.1E-999'))
True
>>> c.is_subnormal(Decimal('0.00'))
False
>>> c.is_subnormal(Decimal('-Inf'))
False
>>> c.is_subnormal(Decimal('NaN'))
False
>>> c.is_subnormal(1)
False | Return True if the operand is subnormal; otherwise return False. | [
"Return",
"True",
"if",
"the",
"operand",
"is",
"subnormal",
";",
"otherwise",
"return",
"False",
"."
] | def is_subnormal(self, a):
"""Return True if the operand is subnormal; otherwise return False.
>>> c = ExtendedContext.copy()
>>> c.Emin = -999
>>> c.Emax = 999
>>> c.is_subnormal(Decimal('2.50'))
False
>>> c.is_subnormal(Decimal('0.1E-999'))
True
>>> c.is_subnormal(Decimal('0.00'))
False
>>> c.is_subnormal(Decimal('-Inf'))
False
>>> c.is_subnormal(Decimal('NaN'))
False
>>> c.is_subnormal(1)
False
"""
a = _convert_other(a, raiseit=True)
return a.is_subnormal(context=self) | [
"def",
"is_subnormal",
"(",
"self",
",",
"a",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"return",
"a",
".",
"is_subnormal",
"(",
"context",
"=",
"self",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/decimal.py#L4447-L4467 | |
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBBreakpoint.GetDescription | (self, *args) | return _lldb.SBBreakpoint_GetDescription(self, *args) | GetDescription(SBBreakpoint self, SBStream description) -> bool
GetDescription(SBBreakpoint self, SBStream description, bool include_locations) -> bool | GetDescription(SBBreakpoint self, SBStream description) -> bool
GetDescription(SBBreakpoint self, SBStream description, bool include_locations) -> bool | [
"GetDescription",
"(",
"SBBreakpoint",
"self",
"SBStream",
"description",
")",
"-",
">",
"bool",
"GetDescription",
"(",
"SBBreakpoint",
"self",
"SBStream",
"description",
"bool",
"include_locations",
")",
"-",
">",
"bool"
] | def GetDescription(self, *args):
"""
GetDescription(SBBreakpoint self, SBStream description) -> bool
GetDescription(SBBreakpoint self, SBStream description, bool include_locations) -> bool
"""
return _lldb.SBBreakpoint_GetDescription(self, *args) | [
"def",
"GetDescription",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_lldb",
".",
"SBBreakpoint_GetDescription",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L1743-L1748 | |
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/profiler/common/util.py | python | calculate_percent | (partial, total) | return f'{percent}%' | Calculate percent value. | Calculate percent value. | [
"Calculate",
"percent",
"value",
"."
] | def calculate_percent(partial, total):
"""Calculate percent value."""
if total:
percent = round(partial / total * 100, 2)
else:
percent = 0
return f'{percent}%' | [
"def",
"calculate_percent",
"(",
"partial",
",",
"total",
")",
":",
"if",
"total",
":",
"percent",
"=",
"round",
"(",
"partial",
"/",
"total",
"*",
"100",
",",
"2",
")",
"else",
":",
"percent",
"=",
"0",
"return",
"f'{percent}%'"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/profiler/common/util.py#L269-L275 | |
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | tools/deep_memory_profiler/lib/symbol.py | python | SymbolDataSources.path | (self) | return self._prepared_symbol_data_sources_path | Returns the path of the prepared symbol data sources if possible. | Returns the path of the prepared symbol data sources if possible. | [
"Returns",
"the",
"path",
"of",
"the",
"prepared",
"symbol",
"data",
"sources",
"if",
"possible",
"."
] | def path(self):
"""Returns the path of the prepared symbol data sources if possible."""
if not self._prepared_symbol_data_sources_path and not self.prepare():
return None
return self._prepared_symbol_data_sources_path | [
"def",
"path",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_prepared_symbol_data_sources_path",
"and",
"not",
"self",
".",
"prepare",
"(",
")",
":",
"return",
"None",
"return",
"self",
".",
"_prepared_symbol_data_sources_path"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/deep_memory_profiler/lib/symbol.py#L94-L98 | |
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | scripts/pythonTSV.py | python | loadFromTSV | (TSV, key, value) | Will return the stored data from a TSV
with from the line called key and with
a data type of value | Will return the stored data from a TSV
with from the line called key and with
a data type of value | [
"Will",
"return",
"the",
"stored",
"data",
"from",
"a",
"TSV",
"with",
"from",
"the",
"line",
"called",
"key",
"and",
"with",
"a",
"data",
"type",
"of",
"value"
] | def loadFromTSV(TSV, key, value):
"""
Will return the stored data from a TSV
with from the line called key and with
a data type of value
"""
safeKey = makeLineNameSafe(key)
TSV.selectLine(safeKey)
if isinstance(value, int):
return TSV.readInt()
elif isinstance(value, float):
return TSV.readDouble()
elif isinstance(value, bool):
return TSV.readBool()
elif isinstance(value, str):
return TSV.readString()
else:
raise TypeError("Value is not recognised by TSVSerialiser") | [
"def",
"loadFromTSV",
"(",
"TSV",
",",
"key",
",",
"value",
")",
":",
"safeKey",
"=",
"makeLineNameSafe",
"(",
"key",
")",
"TSV",
".",
"selectLine",
"(",
"safeKey",
")",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"return",
"TSV",
".",
"r... | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/pythonTSV.py#L30-L47 | ||
H-uru/Plasma | c2140ea046e82e9c199e257a7f2e7edb42602871 | Scripts/Python/tldnVaporScope.py | python | tldnVaporScope.__del__ | (self) | unload the dialog that we loaded | unload the dialog that we loaded | [
"unload",
"the",
"dialog",
"that",
"we",
"loaded"
] | def __del__(self):
"unload the dialog that we loaded"
if Vignette.value:
PtUnloadDialog(Vignette.value) | [
"def",
"__del__",
"(",
"self",
")",
":",
"if",
"Vignette",
".",
"value",
":",
"PtUnloadDialog",
"(",
"Vignette",
".",
"value",
")"
] | https://github.com/H-uru/Plasma/blob/c2140ea046e82e9c199e257a7f2e7edb42602871/Scripts/Python/tldnVaporScope.py#L164-L167 | ||
facebook/bistro | db9eff7e92f5cedcc917a440d5c88064c7980e40 | build/fbcode_builder/getdeps/cargo.py | python | CargoBuilder._resolve_crate_to_path | (crate, git_conf) | Tries to find <crate> in git_conf["inst_dir"] by searching a [package]
keyword followed by name = "<crate>". | Tries to find <crate> in git_conf["inst_dir"] by searching a [package]
keyword followed by name = "<crate>". | [
"Tries",
"to",
"find",
"<crate",
">",
"in",
"git_conf",
"[",
"inst_dir",
"]",
"by",
"searching",
"a",
"[",
"package",
"]",
"keyword",
"followed",
"by",
"name",
"=",
"<crate",
">",
"."
] | def _resolve_crate_to_path(crate, git_conf):
"""
Tries to find <crate> in git_conf["inst_dir"] by searching a [package]
keyword followed by name = "<crate>".
"""
source_dir = git_conf["source_dir"]
search_pattern = '[package]\nname = "{}"'.format(crate)
for root, _, files in os.walk(source_dir):
for fname in files:
if fname == "Cargo.toml":
with open(os.path.join(root, fname), "r") as f:
if search_pattern in f.read():
return root
raise Exception("Failed to found crate {} in path {}".format(crate, source_dir)) | [
"def",
"_resolve_crate_to_path",
"(",
"crate",
",",
"git_conf",
")",
":",
"source_dir",
"=",
"git_conf",
"[",
"\"source_dir\"",
"]",
"search_pattern",
"=",
"'[package]\\nname = \"{}\"'",
".",
"format",
"(",
"crate",
")",
"for",
"root",
",",
"_",
",",
"files",
... | https://github.com/facebook/bistro/blob/db9eff7e92f5cedcc917a440d5c88064c7980e40/build/fbcode_builder/getdeps/cargo.py#L300-L315 | ||
glotzerlab/hoomd-blue | f7f97abfa3fcc2522fa8d458d65d0aeca7ba781a | hoomd/state.py | python | State.types | (self) | return dict(particle_types=self.particle_types,
bond_types=self.bond_types,
angle_types=self.angle_types,
dihedral_types=self.dihedral_types,
improper_types=self.improper_types,
special_pair_types=self.special_pair_types) | dict[str, list[str]]: dictionary of all types in the state.
Combines the data from `State.particle_types`, `State.bond_types`,
`State.angle_types`, `State.dihedral_types`, `State.improper_types`, and
`State.special_pair_types` into a dictionary with keys matching the
property names. | dict[str, list[str]]: dictionary of all types in the state. | [
"dict",
"[",
"str",
"list",
"[",
"str",
"]]",
":",
"dictionary",
"of",
"all",
"types",
"in",
"the",
"state",
"."
] | def types(self):
"""dict[str, list[str]]: dictionary of all types in the state.
Combines the data from `State.particle_types`, `State.bond_types`,
`State.angle_types`, `State.dihedral_types`, `State.improper_types`, and
`State.special_pair_types` into a dictionary with keys matching the
property names.
"""
return dict(particle_types=self.particle_types,
bond_types=self.bond_types,
angle_types=self.angle_types,
dihedral_types=self.dihedral_types,
improper_types=self.improper_types,
special_pair_types=self.special_pair_types) | [
"def",
"types",
"(",
"self",
")",
":",
"return",
"dict",
"(",
"particle_types",
"=",
"self",
".",
"particle_types",
",",
"bond_types",
"=",
"self",
".",
"bond_types",
",",
"angle_types",
"=",
"self",
".",
"angle_types",
",",
"dihedral_types",
"=",
"self",
... | https://github.com/glotzerlab/hoomd-blue/blob/f7f97abfa3fcc2522fa8d458d65d0aeca7ba781a/hoomd/state.py#L357-L370 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/idlelib/PyShell.py | python | ModifiedInterpreter.open_remote_stack_viewer | (self) | return | Initiate the remote stack viewer from a separate thread.
This method is called from the subprocess, and by returning from this
method we allow the subprocess to unblock. After a bit the shell
requests the subprocess to open the remote stack viewer which returns a
static object looking at the last exception. It is queried through
the RPC mechanism. | Initiate the remote stack viewer from a separate thread. | [
"Initiate",
"the",
"remote",
"stack",
"viewer",
"from",
"a",
"separate",
"thread",
"."
] | def open_remote_stack_viewer(self):
"""Initiate the remote stack viewer from a separate thread.
This method is called from the subprocess, and by returning from this
method we allow the subprocess to unblock. After a bit the shell
requests the subprocess to open the remote stack viewer which returns a
static object looking at the last exception. It is queried through
the RPC mechanism.
"""
self.tkconsole.text.after(300, self.remote_stack_viewer)
return | [
"def",
"open_remote_stack_viewer",
"(",
"self",
")",
":",
"self",
".",
"tkconsole",
".",
"text",
".",
"after",
"(",
"300",
",",
"self",
".",
"remote_stack_viewer",
")",
"return"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/idlelib/PyShell.py#L583-L594 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/random.py | python | Random.uniform | (self, a, b) | return a + (b-a) * self.random() | Get a random number in the range [a, b) or [a, b] depending on rounding. | Get a random number in the range [a, b) or [a, b] depending on rounding. | [
"Get",
"a",
"random",
"number",
"in",
"the",
"range",
"[",
"a",
"b",
")",
"or",
"[",
"a",
"b",
"]",
"depending",
"on",
"rounding",
"."
] | def uniform(self, a, b):
"Get a random number in the range [a, b) or [a, b] depending on rounding."
return a + (b-a) * self.random() | [
"def",
"uniform",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"return",
"a",
"+",
"(",
"b",
"-",
"a",
")",
"*",
"self",
".",
"random",
"(",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/random.py#L355-L357 | |
sdhash/sdhash | b9eff63e4e5867e910f41fd69032bbb1c94a2a5e | sdhash-ui/thrift/server/TNonblockingServer.py | python | TNonblockingServer.serve | (self) | Serve forever. | Serve forever. | [
"Serve",
"forever",
"."
] | def serve(self):
"""Serve forever."""
self.prepare()
while True:
self.handle() | [
"def",
"serve",
"(",
"self",
")",
":",
"self",
".",
"prepare",
"(",
")",
"while",
"True",
":",
"self",
".",
"handle",
"(",
")"
] | https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/sdhash-ui/thrift/server/TNonblockingServer.py#L306-L310 | ||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/python/turicreate/data_structures/sgraph.py | python | SGraph.save | (self, filename, format="auto") | Save the SGraph to disk. If the graph is saved in binary format, the
graph can be re-loaded using the :py:func:`load_sgraph` method.
Alternatively, the SGraph can be saved in JSON format for a
human-readable and portable representation.
Parameters
----------
filename : string
Filename to use when saving the file. It can be either a local or
remote url.
format : {'auto', 'binary', 'json'}, optional
File format. If not specified, the format is detected automatically
based on the filename. Note that JSON format graphs cannot be
re-loaded with :py:func:`load_sgraph`.
See Also
--------
load_sgraph
Examples
--------
>>> g = turicreate.SGraph()
>>> g = g.add_vertices([turicreate.Vertex(i) for i in range(5)])
Save and load in binary format.
>>> g.save('mygraph')
>>> g2 = turicreate.load_sgraph('mygraph')
Save in JSON format.
>>> g.save('mygraph.json', format='json') | Save the SGraph to disk. If the graph is saved in binary format, the
graph can be re-loaded using the :py:func:`load_sgraph` method.
Alternatively, the SGraph can be saved in JSON format for a
human-readable and portable representation. | [
"Save",
"the",
"SGraph",
"to",
"disk",
".",
"If",
"the",
"graph",
"is",
"saved",
"in",
"binary",
"format",
"the",
"graph",
"can",
"be",
"re",
"-",
"loaded",
"using",
"the",
":",
"py",
":",
"func",
":",
"load_sgraph",
"method",
".",
"Alternatively",
"th... | def save(self, filename, format="auto"):
"""
Save the SGraph to disk. If the graph is saved in binary format, the
graph can be re-loaded using the :py:func:`load_sgraph` method.
Alternatively, the SGraph can be saved in JSON format for a
human-readable and portable representation.
Parameters
----------
filename : string
Filename to use when saving the file. It can be either a local or
remote url.
format : {'auto', 'binary', 'json'}, optional
File format. If not specified, the format is detected automatically
based on the filename. Note that JSON format graphs cannot be
re-loaded with :py:func:`load_sgraph`.
See Also
--------
load_sgraph
Examples
--------
>>> g = turicreate.SGraph()
>>> g = g.add_vertices([turicreate.Vertex(i) for i in range(5)])
Save and load in binary format.
>>> g.save('mygraph')
>>> g2 = turicreate.load_sgraph('mygraph')
Save in JSON format.
>>> g.save('mygraph.json', format='json')
"""
if format == "auto":
if filename.endswith((".json", ".json.gz")):
format = "json"
else:
format = "binary"
if format not in ["binary", "json", "csv"]:
raise ValueError(
"Invalid format: %s. Supported formats are: %s"
% (format, ["binary", "json", "csv"])
)
with cython_context():
self.__proxy__.save_graph(_make_internal_url(filename), format) | [
"def",
"save",
"(",
"self",
",",
"filename",
",",
"format",
"=",
"\"auto\"",
")",
":",
"if",
"format",
"==",
"\"auto\"",
":",
"if",
"filename",
".",
"endswith",
"(",
"(",
"\".json\"",
",",
"\".json.gz\"",
")",
")",
":",
"format",
"=",
"\"json\"",
"else... | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/data_structures/sgraph.py#L1059-L1108 | ||
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/python2_version/klampt/vis/camera.py | python | orientation_matrix | (axis1,axis2,axis3) | return so3.from_matrix([axis1,axis2,axis3]) | Returns the matrix that maps the camera's identity coordinate system (right,down,forward)
to world axes 1,2,3 (assuming no camera translation).
Each axis can be either a 3-tuple or any element of
['x','y','z','-x','-y','-z'] | Returns the matrix that maps the camera's identity coordinate system (right,down,forward)
to world axes 1,2,3 (assuming no camera translation).
Each axis can be either a 3-tuple or any element of
['x','y','z','-x','-y','-z'] | [
"Returns",
"the",
"matrix",
"that",
"maps",
"the",
"camera",
"s",
"identity",
"coordinate",
"system",
"(",
"right",
"down",
"forward",
")",
"to",
"world",
"axes",
"1",
"2",
"3",
"(",
"assuming",
"no",
"camera",
"translation",
")",
".",
"Each",
"axis",
"c... | def orientation_matrix(axis1,axis2,axis3):
"""Returns the matrix that maps the camera's identity coordinate system (right,down,forward)
to world axes 1,2,3 (assuming no camera translation).
Each axis can be either a 3-tuple or any element of
['x','y','z','-x','-y','-z']"""
if isinstance(axis1,str):
axis1 = basis_vectors[axis1]
if isinstance(axis2,str):
axis2 = basis_vectors[axis2]
if isinstance(axis3,str):
axis3 = basis_vectors[axis3]
return so3.from_matrix([axis1,axis2,axis3]) | [
"def",
"orientation_matrix",
"(",
"axis1",
",",
"axis2",
",",
"axis3",
")",
":",
"if",
"isinstance",
"(",
"axis1",
",",
"str",
")",
":",
"axis1",
"=",
"basis_vectors",
"[",
"axis1",
"]",
"if",
"isinstance",
"(",
"axis2",
",",
"str",
")",
":",
"axis2",
... | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/vis/camera.py#L13-L25 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_controls.py | python | ColourPickerEvent.GetColour | (*args, **kwargs) | return _controls_.ColourPickerEvent_GetColour(*args, **kwargs) | GetColour(self) -> Colour | GetColour(self) -> Colour | [
"GetColour",
"(",
"self",
")",
"-",
">",
"Colour"
] | def GetColour(*args, **kwargs):
"""GetColour(self) -> Colour"""
return _controls_.ColourPickerEvent_GetColour(*args, **kwargs) | [
"def",
"GetColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"ColourPickerEvent_GetColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L7066-L7068 | |
rampageX/firmware-mod-kit | c94cd6aeee50d92ec5280a6dba6d74828fd3606b | src/binwalk-2.1.1/src/binwalk/core/C.py | python | FunctionHandler.run | (self, *args) | return retval | Executes the library function, handling Python 2/3 compatibility and properly converting the return type.
@*args - Library function arguments.
Returns the return value from the libraray function. | Executes the library function, handling Python 2/3 compatibility and properly converting the return type. | [
"Executes",
"the",
"library",
"function",
"handling",
"Python",
"2",
"/",
"3",
"compatibility",
"and",
"properly",
"converting",
"the",
"return",
"type",
"."
] | def run(self, *args):
'''
Executes the library function, handling Python 2/3 compatibility and properly converting the return type.
@*args - Library function arguments.
Returns the return value from the libraray function.
'''
args = list(args)
# Python3 expects a bytes object for char *'s, not a str.
# This allows us to pass either, regardless of the Python version.
for i in range(0, len(args)):
if isinstance(args[i], str):
args[i] = str2bytes(args[i])
retval = self.function(*args)
if self.retval_converter is not None:
retval = self.retval_converter(retval)
return retval | [
"def",
"run",
"(",
"self",
",",
"*",
"args",
")",
":",
"args",
"=",
"list",
"(",
"args",
")",
"# Python3 expects a bytes object for char *'s, not a str. ",
"# This allows us to pass either, regardless of the Python version.",
"for",
"i",
"in",
"range",
"(",
"0",
",",
... | https://github.com/rampageX/firmware-mod-kit/blob/c94cd6aeee50d92ec5280a6dba6d74828fd3606b/src/binwalk-2.1.1/src/binwalk/core/C.py#L63-L83 | |
Dovyski/cvui | d1b40267bdee34fcc193a375911415222f1409b3 | cvui.py | python | button | (theIdle, theOver, theDown) | Display a button whose graphics are images (np.ndarray).
IMPORTANT: this function can only be used within a `begin*()/end*()` block, otherwise it does nothing.
The button accepts three images to describe its states,
which are idle (no mouse interaction), over (mouse is over the button) and down (mouse clicked the button).
The button size will be defined by the width and height of the images.
Parameters
----------
theIdle: np.ndarray
image that will be rendered when the button is not interacting with the mouse cursor.
theOver: np.ndarray
image that will be rendered when the mouse cursor is over the button.
theDown: np.ndarray
image that will be rendered when the mouse cursor clicked the button (or is clicking).
Returns
----------
`true` everytime the user clicks the button.
See Also
----------
button()
image()
iarea()
beginColumn()
beginRow()
endRow()
endColumn() | Display a button whose graphics are images (np.ndarray). | [
"Display",
"a",
"button",
"whose",
"graphics",
"are",
"images",
"(",
"np",
".",
"ndarray",
")",
"."
] | def button(theIdle, theOver, theDown):
"""
Display a button whose graphics are images (np.ndarray).
IMPORTANT: this function can only be used within a `begin*()/end*()` block, otherwise it does nothing.
The button accepts three images to describe its states,
which are idle (no mouse interaction), over (mouse is over the button) and down (mouse clicked the button).
The button size will be defined by the width and height of the images.
Parameters
----------
theIdle: np.ndarray
image that will be rendered when the button is not interacting with the mouse cursor.
theOver: np.ndarray
image that will be rendered when the mouse cursor is over the button.
theDown: np.ndarray
image that will be rendered when the mouse cursor clicked the button (or is clicking).
Returns
----------
`true` everytime the user clicks the button.
See Also
----------
button()
image()
iarea()
beginColumn()
beginRow()
endRow()
endColumn()
"""
print('This is wrapper function to help code autocompletion.') | [
"def",
"button",
"(",
"theIdle",
",",
"theOver",
",",
"theDown",
")",
":",
"print",
"(",
"'This is wrapper function to help code autocompletion.'",
")"
] | https://github.com/Dovyski/cvui/blob/d1b40267bdee34fcc193a375911415222f1409b3/cvui.py#L2079-L2112 | ||
rapidsai/cudf | d5b2448fc69f17509304d594f029d0df56984962 | python/cudf/cudf/core/udf/typing.py | python | NAType.unify | (self, context, other) | Masked <-> NA is deferred to MaskedType.unify()
Literal <-> NA -> Masked | Masked <-> NA is deferred to MaskedType.unify()
Literal <-> NA -> Masked | [
"Masked",
"<",
"-",
">",
"NA",
"is",
"deferred",
"to",
"MaskedType",
".",
"unify",
"()",
"Literal",
"<",
"-",
">",
"NA",
"-",
">",
"Masked"
] | def unify(self, context, other):
"""
Masked <-> NA is deferred to MaskedType.unify()
Literal <-> NA -> Masked
"""
if isinstance(other, MaskedType):
# bounce to MaskedType.unify
return None
elif isinstance(other, NAType):
# unify {NA, NA} -> NA
return self
else:
return MaskedType(other) | [
"def",
"unify",
"(",
"self",
",",
"context",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"MaskedType",
")",
":",
"# bounce to MaskedType.unify",
"return",
"None",
"elif",
"isinstance",
"(",
"other",
",",
"NAType",
")",
":",
"# unify {NA, N... | https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/udf/typing.py#L197-L209 | ||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/keras/python/keras/engine/topology.py | python | Layer.get_output_shape_at | (self, node_index) | return self._get_node_attribute_at_index(node_index, 'output_shapes',
'output shape') | Retrieves the output shape(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A shape tuple
(or list of shape tuples if the layer has multiple outputs). | Retrieves the output shape(s) of a layer at a given node. | [
"Retrieves",
"the",
"output",
"shape",
"(",
"s",
")",
"of",
"a",
"layer",
"at",
"a",
"given",
"node",
"."
] | def get_output_shape_at(self, node_index):
"""Retrieves the output shape(s) of a layer at a given node.
Arguments:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
A shape tuple
(or list of shape tuples if the layer has multiple outputs).
"""
return self._get_node_attribute_at_index(node_index, 'output_shapes',
'output shape') | [
"def",
"get_output_shape_at",
"(",
"self",
",",
"node_index",
")",
":",
"return",
"self",
".",
"_get_node_attribute_at_index",
"(",
"node_index",
",",
"'output_shapes'",
",",
"'output shape'",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/keras/python/keras/engine/topology.py#L600-L614 | |
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TIntH.__eq__ | (self, *args) | return _snap.TIntH___eq__(self, *args) | __eq__(TIntH self, TIntH Hash) -> bool
Parameters:
Hash: THash< TInt,TInt > const & | __eq__(TIntH self, TIntH Hash) -> bool | [
"__eq__",
"(",
"TIntH",
"self",
"TIntH",
"Hash",
")",
"-",
">",
"bool"
] | def __eq__(self, *args):
"""
__eq__(TIntH self, TIntH Hash) -> bool
Parameters:
Hash: THash< TInt,TInt > const &
"""
return _snap.TIntH___eq__(self, *args) | [
"def",
"__eq__",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TIntH___eq__",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L18356-L18364 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/psutil/_pssunos.py | python | Process._assert_alive | (self) | Raise NSP if the process disappeared on us. | Raise NSP if the process disappeared on us. | [
"Raise",
"NSP",
"if",
"the",
"process",
"disappeared",
"on",
"us",
"."
] | def _assert_alive(self):
"""Raise NSP if the process disappeared on us."""
# For those C function who do not raise NSP, possibly returning
# incorrect or incomplete result.
os.stat('%s/%s' % (self._procfs_path, self.pid)) | [
"def",
"_assert_alive",
"(",
"self",
")",
":",
"# For those C function who do not raise NSP, possibly returning",
"# incorrect or incomplete result.",
"os",
".",
"stat",
"(",
"'%s/%s'",
"%",
"(",
"self",
".",
"_procfs_path",
",",
"self",
".",
"pid",
")",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/psutil/_pssunos.py#L381-L385 | ||
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/student_t.py | python | StudentT.mean | (self, name="mean") | Mean of the distribution.
The mean of Student's T equals `mu` if `df > 1`, otherwise it is `NaN`. If
`self.allow_nan_stats=False`, then an exception will be raised rather than
returning `NaN`.
Args:
name: A name to give this op.
Returns:
The mean for every batch member, a `Tensor` with same `dtype` as self. | Mean of the distribution. | [
"Mean",
"of",
"the",
"distribution",
"."
] | def mean(self, name="mean"):
"""Mean of the distribution.
The mean of Student's T equals `mu` if `df > 1`, otherwise it is `NaN`. If
`self.allow_nan_stats=False`, then an exception will be raised rather than
returning `NaN`.
Args:
name: A name to give this op.
Returns:
The mean for every batch member, a `Tensor` with same `dtype` as self.
"""
with ops.name_scope(self.name):
with ops.op_scope([self._mu], name):
result_if_defined = self._mu * self._ones()
if self.allow_nan_stats:
df_gt_1 = self._df > self._ones()
nan = np.nan + self._zeros()
return math_ops.select(df_gt_1, result_if_defined, nan)
else:
one = constant_op.constant(1.0, dtype=self.dtype)
return control_flow_ops.with_dependencies(
[check_ops.assert_less(
one, self._df,
message="mean not defined for components of df <= 1"
)], result_if_defined) | [
"def",
"mean",
"(",
"self",
",",
"name",
"=",
"\"mean\"",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"self",
".",
"name",
")",
":",
"with",
"ops",
".",
"op_scope",
"(",
"[",
"self",
".",
"_mu",
"]",
",",
"name",
")",
":",
"result_if_defined",... | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/student_t.py#L168-L194 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_windows.py | python | MessageDialog.SetExtendedMessage | (*args, **kwargs) | return _windows_.MessageDialog_SetExtendedMessage(*args, **kwargs) | SetExtendedMessage(self, String extendedMessage) | SetExtendedMessage(self, String extendedMessage) | [
"SetExtendedMessage",
"(",
"self",
"String",
"extendedMessage",
")"
] | def SetExtendedMessage(*args, **kwargs):
"""SetExtendedMessage(self, String extendedMessage)"""
return _windows_.MessageDialog_SetExtendedMessage(*args, **kwargs) | [
"def",
"SetExtendedMessage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"MessageDialog_SetExtendedMessage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L3662-L3664 | |
y123456yz/reading-and-annotate-mongodb-3.6 | 93280293672ca7586dc24af18132aa61e4ed7fcf | mongo/buildscripts/linter/pylint.py | python | PyLintLinter.get_lint_version_cmd_args | (self) | return ["--version"] | Get the command to run a linter version check. | Get the command to run a linter version check. | [
"Get",
"the",
"command",
"to",
"run",
"a",
"linter",
"version",
"check",
"."
] | def get_lint_version_cmd_args(self):
# type: () -> List[str]
"""Get the command to run a linter version check."""
return ["--version"] | [
"def",
"get_lint_version_cmd_args",
"(",
"self",
")",
":",
"# type: () -> List[str]",
"return",
"[",
"\"--version\"",
"]"
] | https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/buildscripts/linter/pylint.py#L22-L25 | |
yifita/3PU | 9ca4c3dfe4e3ead08c72e98a62e4cf181d5c70e0 | code/curriculumn_record_provider.py | python | jitter_perturbation_point_cloud | (batch_data, sigma=0.005, clip=0.02) | return jittered_data | Randomly jitter points. jittering is per point.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, jittered batch of point clouds | Randomly jitter points. jittering is per point.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, jittered batch of point clouds | [
"Randomly",
"jitter",
"points",
".",
"jittering",
"is",
"per",
"point",
".",
"Input",
":",
"BxNx3",
"array",
"original",
"batch",
"of",
"point",
"clouds",
"Return",
":",
"BxNx3",
"array",
"jittered",
"batch",
"of",
"point",
"clouds"
] | def jitter_perturbation_point_cloud(batch_data, sigma=0.005, clip=0.02):
""" Randomly jitter points. jittering is per point.
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, jittered batch of point clouds
"""
assert(clip > 0)
jittered_data = tf.clip_by_value(sigma * tf.random_normal(tf.shape(batch_data)), -1 * clip, clip)
if is_2D:
chn = 2
else:
chn = 3
jittered_data = tf.concat([batch_data[:, :, :chn] + jittered_data[:, :, :chn], batch_data[:, :, chn:]], axis=-1)
return jittered_data | [
"def",
"jitter_perturbation_point_cloud",
"(",
"batch_data",
",",
"sigma",
"=",
"0.005",
",",
"clip",
"=",
"0.02",
")",
":",
"assert",
"(",
"clip",
">",
"0",
")",
"jittered_data",
"=",
"tf",
".",
"clip_by_value",
"(",
"sigma",
"*",
"tf",
".",
"random_norma... | https://github.com/yifita/3PU/blob/9ca4c3dfe4e3ead08c72e98a62e4cf181d5c70e0/code/curriculumn_record_provider.py#L77-L91 | |
PyMesh/PyMesh | 384ba882b7558ba6e8653ed263c419226c22bddf | python/pymesh/wires/WireNetwork.py | python | WireNetwork.offset | (self, offset_vector) | Offset vertices by per-vertex ``offset_vector``.
Args:
offset_vector (:py:class:`numpy.ndarray`): A :math:`N \times dim`
matrix representing per-vertex offset vectors. | Offset vertices by per-vertex ``offset_vector``. | [
"Offset",
"vertices",
"by",
"per",
"-",
"vertex",
"offset_vector",
"."
] | def offset(self, offset_vector):
""" Offset vertices by per-vertex ``offset_vector``.
Args:
offset_vector (:py:class:`numpy.ndarray`): A :math:`N \times dim`
matrix representing per-vertex offset vectors.
"""
vertices = self.vertices + offset_vector
self.vertices = vertices | [
"def",
"offset",
"(",
"self",
",",
"offset_vector",
")",
":",
"vertices",
"=",
"self",
".",
"vertices",
"+",
"offset_vector",
"self",
".",
"vertices",
"=",
"vertices"
] | https://github.com/PyMesh/PyMesh/blob/384ba882b7558ba6e8653ed263c419226c22bddf/python/pymesh/wires/WireNetwork.py#L145-L153 | ||
danxuhk/ContinuousCRF-CNN | 2b6dcaf179620f118b225ed12c890414ca828e21 | scripts/cpp_lint.py | python | IsErrorSuppressedByNolint | (category, linenum) | return (linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set())) | Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error should be suppressed due to a NOLINT comment. | Returns true if the specified error category is suppressed on this line. | [
"Returns",
"true",
"if",
"the",
"specified",
"error",
"category",
"is",
"suppressed",
"on",
"this",
"line",
"."
] | def IsErrorSuppressedByNolint(category, linenum):
"""Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error should be suppressed due to a NOLINT comment.
"""
return (linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set())) | [
"def",
"IsErrorSuppressedByNolint",
"(",
"category",
",",
"linenum",
")",
":",
"return",
"(",
"linenum",
"in",
"_error_suppressions",
".",
"get",
"(",
"category",
",",
"set",
"(",
")",
")",
"or",
"linenum",
"in",
"_error_suppressions",
".",
"get",
"(",
"None... | https://github.com/danxuhk/ContinuousCRF-CNN/blob/2b6dcaf179620f118b225ed12c890414ca828e21/scripts/cpp_lint.py#L504-L517 | |
Tencent/CMONGO | c40380caa14e05509f46993aa8b8da966b09b0b5 | src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Tool/GettextCommon.py | python | _read_linguas_from_files | (env, linguas_files = None) | return linguas | Parse `LINGUAS` file and return list of extracted languages | Parse `LINGUAS` file and return list of extracted languages | [
"Parse",
"LINGUAS",
"file",
"and",
"return",
"list",
"of",
"extracted",
"languages"
] | def _read_linguas_from_files(env, linguas_files = None):
""" Parse `LINGUAS` file and return list of extracted languages """
import SCons.Util
import SCons.Environment
global _re_comment
global _re_lang
if not SCons.Util.is_List(linguas_files) \
and not SCons.Util.is_String(linguas_files) \
and not isinstance(linguas_files, SCons.Node.FS.Base) \
and linguas_files:
# If, linguas_files==True or such, then read 'LINGUAS' file.
linguas_files = [ 'LINGUAS' ]
if linguas_files is None:
return []
fnodes = env.arg2nodes(linguas_files)
linguas = []
for fnode in fnodes:
contents = _re_comment.sub("", fnode.get_text_contents())
ls = [ l for l in _re_lang.findall(contents) if l ]
linguas.extend(ls)
return linguas | [
"def",
"_read_linguas_from_files",
"(",
"env",
",",
"linguas_files",
"=",
"None",
")",
":",
"import",
"SCons",
".",
"Util",
"import",
"SCons",
".",
"Environment",
"global",
"_re_comment",
"global",
"_re_lang",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_List",... | https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Tool/GettextCommon.py#L107-L127 | |
gnuradio/gnuradio | 09c3c4fa4bfb1a02caac74cb5334dfe065391e3b | grc/gui/canvas/port.py | python | Port.move | (self, delta_coor) | Move the parent rather than self. | Move the parent rather than self. | [
"Move",
"the",
"parent",
"rather",
"than",
"self",
"."
] | def move(self, delta_coor):
"""Move the parent rather than self."""
self.parent_block.move(delta_coor) | [
"def",
"move",
"(",
"self",
",",
"delta_coor",
")",
":",
"self",
".",
"parent_block",
".",
"move",
"(",
"delta_coor",
")"
] | https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/grc/gui/canvas/port.py#L183-L185 | ||
neopenx/Dragon | 0e639a7319035ddc81918bd3df059230436ee0a1 | Dragon/python/dragon/vm/caffe/solver.py | python | Solver.InitTrainNet | (self) | Initialize the train net.
Returns
-------
None
References
----------
The implementation of `InitTrainNet(solver.cpp, L63)`_. | Initialize the train net. | [
"Initialize",
"the",
"train",
"net",
"."
] | def InitTrainNet(self):
"""Initialize the train net.
Returns
-------
None
References
----------
The implementation of `InitTrainNet(solver.cpp, L63)`_.
"""
if self._param.HasField('net'):
self._net = Net(self._param.net, "TRAIN")
if self._param.HasField('train_net'):
if self._net is not None:
raise RuntimeError('net or train_net can not be specfied both.')
self._net = Net(self._param.train_net, "TRAIN") | [
"def",
"InitTrainNet",
"(",
"self",
")",
":",
"if",
"self",
".",
"_param",
".",
"HasField",
"(",
"'net'",
")",
":",
"self",
".",
"_net",
"=",
"Net",
"(",
"self",
".",
"_param",
".",
"net",
",",
"\"TRAIN\"",
")",
"if",
"self",
".",
"_param",
".",
... | https://github.com/neopenx/Dragon/blob/0e639a7319035ddc81918bd3df059230436ee0a1/Dragon/python/dragon/vm/caffe/solver.py#L60-L78 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/req/req_install.py | python | InstallRequirement.check_if_exists | (self, use_user_site) | Find an installed distribution that satisfies or conflicts
with this requirement, and set self.satisfied_by or
self.should_reinstall appropriately. | Find an installed distribution that satisfies or conflicts | [
"Find",
"an",
"installed",
"distribution",
"that",
"satisfies",
"or",
"conflicts"
] | def check_if_exists(self, use_user_site):
# type: (bool) -> None
"""Find an installed distribution that satisfies or conflicts
with this requirement, and set self.satisfied_by or
self.should_reinstall appropriately.
"""
if self.req is None:
return
existing_dist = get_distribution(self.req.name)
if not existing_dist:
return
existing_version = existing_dist.parsed_version
if not self.req.specifier.contains(existing_version, prereleases=True):
self.satisfied_by = None
if use_user_site:
if dist_in_usersite(existing_dist):
self.should_reinstall = True
elif (running_under_virtualenv() and
dist_in_site_packages(existing_dist)):
raise InstallationError(
"Will not install to the user site because it will "
"lack sys.path precedence to {} in {}".format(
existing_dist.project_name, existing_dist.location)
)
else:
self.should_reinstall = True
else:
if self.editable:
self.should_reinstall = True
# when installing editables, nothing pre-existing should ever
# satisfy
self.satisfied_by = None
else:
self.satisfied_by = existing_dist | [
"def",
"check_if_exists",
"(",
"self",
",",
"use_user_site",
")",
":",
"# type: (bool) -> None",
"if",
"self",
".",
"req",
"is",
"None",
":",
"return",
"existing_dist",
"=",
"get_distribution",
"(",
"self",
".",
"req",
".",
"name",
")",
"if",
"not",
"existin... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/req/req_install.py#L845-L913 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_gdi.py | python | Icon.SetWidth | (*args, **kwargs) | return _gdi_.Icon_SetWidth(*args, **kwargs) | SetWidth(self, int w) | SetWidth(self, int w) | [
"SetWidth",
"(",
"self",
"int",
"w",
")"
] | def SetWidth(*args, **kwargs):
"""SetWidth(self, int w)"""
return _gdi_.Icon_SetWidth(*args, **kwargs) | [
"def",
"SetWidth",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"Icon_SetWidth",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L1341-L1343 | |
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/python/ops/nn_ops.py | python | _calc_bias_add_weight_params | (graph, node) | return ops.OpStats("weight_parameters", bias_count) | Calculates the on-disk weight parameters for BiasAdd. | Calculates the on-disk weight parameters for BiasAdd. | [
"Calculates",
"the",
"on",
"-",
"disk",
"weight",
"parameters",
"for",
"BiasAdd",
"."
] | def _calc_bias_add_weight_params(graph, node):
"""Calculates the on-disk weight parameters for BiasAdd."""
bias_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[1])
bias_shape.assert_is_fully_defined()
bias_count = np.prod(bias_shape.as_list())
return ops.OpStats("weight_parameters", bias_count) | [
"def",
"_calc_bias_add_weight_params",
"(",
"graph",
",",
"node",
")",
":",
"bias_shape",
"=",
"graph_util",
".",
"tensor_shape_from_node_def_name",
"(",
"graph",
",",
"node",
".",
"input",
"[",
"1",
"]",
")",
"bias_shape",
".",
"assert_is_fully_defined",
"(",
"... | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/nn_ops.py#L1649-L1654 | |
IfcOpenShell/IfcOpenShell | 2c2954b11a9c9d581bef03240836d4567e69ad0b | src/ifcopenshell-python/ifcopenshell/ids.py | python | entity.__call__ | (self, inst, logger) | Validate an ifc instance against that entity facet.
:param inst: IFC entity element
:type inst: IFC entity
:param logger: Logging object
:type logger: logging
:return: result of the validation as bool and message
:rtype: facet_evaluation(bool, str) | Validate an ifc instance against that entity facet. | [
"Validate",
"an",
"ifc",
"instance",
"against",
"that",
"entity",
"facet",
"."
] | def __call__(self, inst, logger):
"""Validate an ifc instance against that entity facet.
:param inst: IFC entity element
:type inst: IFC entity
:param logger: Logging object
:type logger: logging
:return: result of the validation as bool and message
:rtype: facet_evaluation(bool, str)
"""
# @nb with inheritance
if self.predefinedtype and hasattr(inst, "PredefinedType"):
self.message = "an entity name '%(name)s' of predefined type '%(predefinedtype)s'"
return facet_evaluation(
inst.is_a(self.name) and inst.PredefinedType == self.predefinedtype,
self.message % {"name": inst.is_a(), "predefinedtype": inst.PredefinedType},
)
else:
self.message = "an entity name '%(name)s'"
return facet_evaluation(inst.is_a(self.name), self.message % {"name": inst.is_a()}) | [
"def",
"__call__",
"(",
"self",
",",
"inst",
",",
"logger",
")",
":",
"# @nb with inheritance",
"if",
"self",
".",
"predefinedtype",
"and",
"hasattr",
"(",
"inst",
",",
"\"PredefinedType\"",
")",
":",
"self",
".",
"message",
"=",
"\"an entity name '%(name)s' of ... | https://github.com/IfcOpenShell/IfcOpenShell/blob/2c2954b11a9c9d581bef03240836d4567e69ad0b/src/ifcopenshell-python/ifcopenshell/ids.py#L509-L529 | ||
seiing/SoftCon | 7856692e6ce2776070a4b578aacdfff2b003a06d | learn/pposgd_simple.py | python | add_vtarg_and_adv | (seg, gamma, lam) | Compute target value using TD(lambda) estimator, and advantage with GAE(lambda) | Compute target value using TD(lambda) estimator, and advantage with GAE(lambda) | [
"Compute",
"target",
"value",
"using",
"TD",
"(",
"lambda",
")",
"estimator",
"and",
"advantage",
"with",
"GAE",
"(",
"lambda",
")"
] | def add_vtarg_and_adv(seg, gamma, lam):
"""
Compute target value using TD(lambda) estimator, and advantage with GAE(lambda)
"""
new = np.append(seg["new"], 0) # last element is only used for last vtarg, but we already zeroed it if last new = 1
vpred = np.append(seg["vpred"], seg["nextvpred"])
T = len(seg["rew"])
seg["adv"] = gaelam = np.empty(T, 'float32')
rew = seg["rew"]
lastgaelam = 0
for t in reversed(range(T)):
nonterminal = 1-new[t+1]
delta = rew[t] + gamma * vpred[t+1] * nonterminal - vpred[t]
gaelam[t] = lastgaelam = delta + gamma * lam * nonterminal * lastgaelam
seg["tdlamret"] = seg["adv"] + seg["vpred"] | [
"def",
"add_vtarg_and_adv",
"(",
"seg",
",",
"gamma",
",",
"lam",
")",
":",
"new",
"=",
"np",
".",
"append",
"(",
"seg",
"[",
"\"new\"",
"]",
",",
"0",
")",
"# last element is only used for last vtarg, but we already zeroed it if last new = 1",
"vpred",
"=",
"np",... | https://github.com/seiing/SoftCon/blob/7856692e6ce2776070a4b578aacdfff2b003a06d/learn/pposgd_simple.py#L138-L152 | ||
cocos-creator/engine-native | 984c4c9f5838253313b44ccd429bd8fac4ec8a6a | tools/bindings-generator/generator.py | python | generate_namespace_list | (cursor, namespaces=[]) | return namespaces | build the full namespace for a specific cursor | build the full namespace for a specific cursor | [
"build",
"the",
"full",
"namespace",
"for",
"a",
"specific",
"cursor"
] | def generate_namespace_list(cursor, namespaces=[]):
'''
build the full namespace for a specific cursor
'''
if cursor:
parent = cursor.semantic_parent
if parent:
if parent.kind == cindex.CursorKind.NAMESPACE or parent.kind == cindex.CursorKind.CLASS_DECL:
if parent.kind == cindex.CursorKind.NAMESPACE:
namespaces.append(parent.displayname)
generate_namespace_list(parent, namespaces)
return namespaces | [
"def",
"generate_namespace_list",
"(",
"cursor",
",",
"namespaces",
"=",
"[",
"]",
")",
":",
"if",
"cursor",
":",
"parent",
"=",
"cursor",
".",
"semantic_parent",
"if",
"parent",
":",
"if",
"parent",
".",
"kind",
"==",
"cindex",
".",
"CursorKind",
".",
"... | https://github.com/cocos-creator/engine-native/blob/984c4c9f5838253313b44ccd429bd8fac4ec8a6a/tools/bindings-generator/generator.py#L375-L386 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_misc.py | python | WindowDisabler.__init__ | (self, *args) | __init__(self, bool disable=True) -> WindowDisabler
__init__(self, Window winToSkip) -> WindowDisabler | __init__(self, bool disable=True) -> WindowDisabler
__init__(self, Window winToSkip) -> WindowDisabler | [
"__init__",
"(",
"self",
"bool",
"disable",
"=",
"True",
")",
"-",
">",
"WindowDisabler",
"__init__",
"(",
"self",
"Window",
"winToSkip",
")",
"-",
">",
"WindowDisabler"
] | def __init__(self, *args):
"""
__init__(self, bool disable=True) -> WindowDisabler
__init__(self, Window winToSkip) -> WindowDisabler
"""
_misc_.WindowDisabler_swiginit(self,_misc_.new_WindowDisabler(*args)) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
")",
":",
"_misc_",
".",
"WindowDisabler_swiginit",
"(",
"self",
",",
"_misc_",
".",
"new_WindowDisabler",
"(",
"*",
"args",
")",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L842-L847 | ||
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/mhlib.py | python | Folder.error | (self, *args) | Error message handler. | Error message handler. | [
"Error",
"message",
"handler",
"."
] | def error(self, *args):
"""Error message handler."""
self.mh.error(*args) | [
"def",
"error",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"mh",
".",
"error",
"(",
"*",
"args",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/mhlib.py#L256-L258 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/_pydecimal.py | python | Context.is_zero | (self, a) | return a.is_zero() | Return True if the operand is a zero; otherwise return False.
>>> ExtendedContext.is_zero(Decimal('0'))
True
>>> ExtendedContext.is_zero(Decimal('2.50'))
False
>>> ExtendedContext.is_zero(Decimal('-0E+2'))
True
>>> ExtendedContext.is_zero(1)
False
>>> ExtendedContext.is_zero(0)
True | Return True if the operand is a zero; otherwise return False. | [
"Return",
"True",
"if",
"the",
"operand",
"is",
"a",
"zero",
";",
"otherwise",
"return",
"False",
"."
] | def is_zero(self, a):
"""Return True if the operand is a zero; otherwise return False.
>>> ExtendedContext.is_zero(Decimal('0'))
True
>>> ExtendedContext.is_zero(Decimal('2.50'))
False
>>> ExtendedContext.is_zero(Decimal('-0E+2'))
True
>>> ExtendedContext.is_zero(1)
False
>>> ExtendedContext.is_zero(0)
True
"""
a = _convert_other(a, raiseit=True)
return a.is_zero() | [
"def",
"is_zero",
"(",
"self",
",",
"a",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"return",
"a",
".",
"is_zero",
"(",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/_pydecimal.py#L4645-L4660 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/aui.py | python | AuiTabArt.GetTabSize | (*args, **kwargs) | return _aui.AuiTabArt_GetTabSize(*args, **kwargs) | GetTabSize(self, DC dc, Window wnd, String caption, Bitmap bitmap, bool active,
int close_button_state) -> PyObject | GetTabSize(self, DC dc, Window wnd, String caption, Bitmap bitmap, bool active,
int close_button_state) -> PyObject | [
"GetTabSize",
"(",
"self",
"DC",
"dc",
"Window",
"wnd",
"String",
"caption",
"Bitmap",
"bitmap",
"bool",
"active",
"int",
"close_button_state",
")",
"-",
">",
"PyObject"
] | def GetTabSize(*args, **kwargs):
"""
GetTabSize(self, DC dc, Window wnd, String caption, Bitmap bitmap, bool active,
int close_button_state) -> PyObject
"""
return _aui.AuiTabArt_GetTabSize(*args, **kwargs) | [
"def",
"GetTabSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"AuiTabArt_GetTabSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/aui.py#L2356-L2361 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/stats/_multivariate.py | python | multivariate_normal_gen.__call__ | (self, mean=None, cov=1, allow_singular=False, seed=None) | return multivariate_normal_frozen(mean, cov,
allow_singular=allow_singular,
seed=seed) | Create a frozen multivariate normal distribution.
See `multivariate_normal_frozen` for more information. | Create a frozen multivariate normal distribution. | [
"Create",
"a",
"frozen",
"multivariate",
"normal",
"distribution",
"."
] | def __call__(self, mean=None, cov=1, allow_singular=False, seed=None):
"""
Create a frozen multivariate normal distribution.
See `multivariate_normal_frozen` for more information.
"""
return multivariate_normal_frozen(mean, cov,
allow_singular=allow_singular,
seed=seed) | [
"def",
"__call__",
"(",
"self",
",",
"mean",
"=",
"None",
",",
"cov",
"=",
"1",
",",
"allow_singular",
"=",
"False",
",",
"seed",
"=",
"None",
")",
":",
"return",
"multivariate_normal_frozen",
"(",
"mean",
",",
"cov",
",",
"allow_singular",
"=",
"allow_s... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/_multivariate.py#L353-L362 | |
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleGUI.py | python | MainWindow.update_adding_peaks_status | (self, exp_number, scan_number, progress) | Update the status for adding peak to UB matrix calculating
:param exp_number:
:param scan_number:
:param progress:
:return: | Update the status for adding peak to UB matrix calculating
:param exp_number:
:param scan_number:
:param progress:
:return: | [
"Update",
"the",
"status",
"for",
"adding",
"peak",
"to",
"UB",
"matrix",
"calculating",
":",
"param",
"exp_number",
":",
":",
"param",
"scan_number",
":",
":",
"param",
"progress",
":",
":",
"return",
":"
] | def update_adding_peaks_status(self, exp_number, scan_number, progress):
"""
Update the status for adding peak to UB matrix calculating
:param exp_number:
:param scan_number:
:param progress:
:return:
"""
# show message to bar
if scan_number < 0:
message = 'Peak processing finished'
else:
message = 'Processing experiment %d scan %d starting from %s.' % (exp_number, scan_number,
str(datetime.datetime.now()))
self.ui.statusbar.showMessage(message)
# update progress bar
self.ui.progressBar_add_ub_peaks.setValue(progress) | [
"def",
"update_adding_peaks_status",
"(",
"self",
",",
"exp_number",
",",
"scan_number",
",",
"progress",
")",
":",
"# show message to bar",
"if",
"scan_number",
"<",
"0",
":",
"message",
"=",
"'Peak processing finished'",
"else",
":",
"message",
"=",
"'Processing e... | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleGUI.py#L4007-L4024 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/plat-mac/EasyDialogs.py | python | AskString | (prompt, default = "", id=261, ok=None, cancel=None) | Display a PROMPT string and a text entry field with a DEFAULT string.
Return the contents of the text entry field when the user clicks the
OK button or presses Return.
Return None when the user clicks the Cancel button.
If omitted, DEFAULT is empty.
The PROMPT and DEFAULT strings, as well as the return value,
can be at most 255 characters long. | Display a PROMPT string and a text entry field with a DEFAULT string. | [
"Display",
"a",
"PROMPT",
"string",
"and",
"a",
"text",
"entry",
"field",
"with",
"a",
"DEFAULT",
"string",
"."
] | def AskString(prompt, default = "", id=261, ok=None, cancel=None):
"""Display a PROMPT string and a text entry field with a DEFAULT string.
Return the contents of the text entry field when the user clicks the
OK button or presses Return.
Return None when the user clicks the Cancel button.
If omitted, DEFAULT is empty.
The PROMPT and DEFAULT strings, as well as the return value,
can be at most 255 characters long.
"""
_initialize()
_interact()
d = GetNewDialog(id, -1)
if not d:
print "EasyDialogs: Can't get DLOG resource with id =", id, " (missing resource file?)"
return
h = d.GetDialogItemAsControl(3)
SetDialogItemText(h, lf2cr(prompt))
h = d.GetDialogItemAsControl(4)
SetDialogItemText(h, lf2cr(default))
d.SelectDialogItemText(4, 0, 999)
# d.SetDialogItem(4, 0, 255)
if ok is not None:
h = d.GetDialogItemAsControl(1)
h.SetControlTitle(ok)
if cancel is not None:
h = d.GetDialogItemAsControl(2)
h.SetControlTitle(cancel)
d.SetDialogDefaultItem(1)
d.SetDialogCancelItem(2)
d.AutoSizeDialog()
d.GetDialogWindow().ShowWindow()
while 1:
n = ModalDialog(None)
if n == 1:
h = d.GetDialogItemAsControl(4)
return cr2lf(GetDialogItemText(h))
if n == 2: return None | [
"def",
"AskString",
"(",
"prompt",
",",
"default",
"=",
"\"\"",
",",
"id",
"=",
"261",
",",
"ok",
"=",
"None",
",",
"cancel",
"=",
"None",
")",
":",
"_initialize",
"(",
")",
"_interact",
"(",
")",
"d",
"=",
"GetNewDialog",
"(",
"id",
",",
"-",
"1... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/plat-mac/EasyDialogs.py#L97-L137 | ||
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/geometry/tf/src/tf/transformations.py | python | inverse_matrix | (matrix) | return numpy.linalg.inv(matrix) | Return inverse of square transformation matrix.
>>> M0 = random_rotation_matrix()
>>> M1 = inverse_matrix(M0.T)
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
True
>>> for size in range(1, 7):
... M0 = numpy.random.rand(size, size)
... M1 = inverse_matrix(M0)
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print size | Return inverse of square transformation matrix. | [
"Return",
"inverse",
"of",
"square",
"transformation",
"matrix",
"."
] | def inverse_matrix(matrix):
"""Return inverse of square transformation matrix.
>>> M0 = random_rotation_matrix()
>>> M1 = inverse_matrix(M0.T)
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
True
>>> for size in range(1, 7):
... M0 = numpy.random.rand(size, size)
... M1 = inverse_matrix(M0)
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print size
"""
return numpy.linalg.inv(matrix) | [
"def",
"inverse_matrix",
"(",
"matrix",
")",
":",
"return",
"numpy",
".",
"linalg",
".",
"inv",
"(",
"matrix",
")"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/geometry/tf/src/tf/transformations.py#L1633-L1646 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/cgi.py | python | FieldStorage.getvalue | (self, key, default=None) | Dictionary style get() method, including 'value' lookup. | Dictionary style get() method, including 'value' lookup. | [
"Dictionary",
"style",
"get",
"()",
"method",
"including",
"value",
"lookup",
"."
] | def getvalue(self, key, default=None):
"""Dictionary style get() method, including 'value' lookup."""
if key in self:
value = self[key]
if isinstance(value, list):
return [x.value for x in value]
else:
return value.value
else:
return default | [
"def",
"getvalue",
"(",
"self",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"if",
"key",
"in",
"self",
":",
"value",
"=",
"self",
"[",
"key",
"]",
"if",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"return",
"[",
"x",
".",
"value",
... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/cgi.py#L531-L540 | ||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/boto/boto/iam/connection.py | python | IAMConnection.put_user_policy | (self, user_name, policy_name, policy_json) | return self.get_response('PutUserPolicy', params, verb='POST') | Adds or updates the specified policy document for the specified user.
:type user_name: string
:param user_name: The name of the user the policy is associated with.
:type policy_name: string
:param policy_name: The policy document to get.
:type policy_json: string
:param policy_json: The policy document. | Adds or updates the specified policy document for the specified user. | [
"Adds",
"or",
"updates",
"the",
"specified",
"policy",
"document",
"for",
"the",
"specified",
"user",
"."
] | def put_user_policy(self, user_name, policy_name, policy_json):
"""
Adds or updates the specified policy document for the specified user.
:type user_name: string
:param user_name: The name of the user the policy is associated with.
:type policy_name: string
:param policy_name: The policy document to get.
:type policy_json: string
:param policy_json: The policy document.
"""
params = {'UserName': user_name,
'PolicyName': policy_name,
'PolicyDocument': policy_json}
return self.get_response('PutUserPolicy', params, verb='POST') | [
"def",
"put_user_policy",
"(",
"self",
",",
"user_name",
",",
"policy_name",
",",
"policy_json",
")",
":",
"params",
"=",
"{",
"'UserName'",
":",
"user_name",
",",
"'PolicyName'",
":",
"policy_name",
",",
"'PolicyDocument'",
":",
"policy_json",
"}",
"return",
... | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/iam/connection.py#L437-L454 | |
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/incubate/fleet/utils/fleet_util.py | python | FleetUtil.save_cache_base_model | (self, output_path, day, **kwargs) | return key_num | save cache model
Args:
output_path(str): output path
day(str|int): training day
pass_id(str|int): training pass id
kwargs(dict): user defined properties
table_id(int): table id to save cache
Returns:
key_num(int): cache key num
Examples:
.. code-block:: python
from paddle.fluid.incubate.fleet.utils.fleet_util import FleetUtil
fleet_util = FleetUtil()
fleet_util.save_cache_base_model("hdfs:/my/path", 20190722) | save cache model | [
"save",
"cache",
"model"
] | def save_cache_base_model(self, output_path, day, **kwargs):
"""
save cache model
Args:
output_path(str): output path
day(str|int): training day
pass_id(str|int): training pass id
kwargs(dict): user defined properties
table_id(int): table id to save cache
Returns:
key_num(int): cache key num
Examples:
.. code-block:: python
from paddle.fluid.incubate.fleet.utils.fleet_util import FleetUtil
fleet_util = FleetUtil()
fleet_util.save_cache_base_model("hdfs:/my/path", 20190722)
"""
day = str(day)
table_id = kwargs.get("table_id", 0)
suffix_name = "/%s/base" % day
model_path = output_path.rstrip("/") + suffix_name
self.rank0_print("going to save_cache_base_model %s" % model_path)
key_num = fleet.save_cache_model(
None, model_path, mode=2, table_id=table_id)
self.rank0_print("save_cache_base_model done")
return key_num | [
"def",
"save_cache_base_model",
"(",
"self",
",",
"output_path",
",",
"day",
",",
"*",
"*",
"kwargs",
")",
":",
"day",
"=",
"str",
"(",
"day",
")",
"table_id",
"=",
"kwargs",
".",
"get",
"(",
"\"table_id\"",
",",
"0",
")",
"suffix_name",
"=",
"\"/%s/ba... | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/incubate/fleet/utils/fleet_util.py#L785-L815 | |
tpfister/caffe-heatmap | 4db69ef53e6b8a0b3b4ebb29328b0ab3dbf67c4e | scripts/cpp_lint.py | python | CheckForBadCharacters | (filename, lines, error) | Logs an error for each line containing bad characters.
Two kinds of bad characters:
1. Unicode replacement characters: These indicate that either the file
contained invalid UTF-8 (likely) or Unicode replacement characters (which
it shouldn't). Note that it's possible for this to throw off line
numbering if the invalid UTF-8 occurred adjacent to a newline.
2. NUL bytes. These are problematic for some tools.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found. | Logs an error for each line containing bad characters. | [
"Logs",
"an",
"error",
"for",
"each",
"line",
"containing",
"bad",
"characters",
"."
] | def CheckForBadCharacters(filename, lines, error):
"""Logs an error for each line containing bad characters.
Two kinds of bad characters:
1. Unicode replacement characters: These indicate that either the file
contained invalid UTF-8 (likely) or Unicode replacement characters (which
it shouldn't). Note that it's possible for this to throw off line
numbering if the invalid UTF-8 occurred adjacent to a newline.
2. NUL bytes. These are problematic for some tools.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
for linenum, line in enumerate(lines):
if u'\ufffd' in line:
error(filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).')
if '\0' in line:
error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.') | [
"def",
"CheckForBadCharacters",
"(",
"filename",
",",
"lines",
",",
"error",
")",
":",
"for",
"linenum",
",",
"line",
"in",
"enumerate",
"(",
"lines",
")",
":",
"if",
"u'\\ufffd'",
"in",
"line",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'reada... | https://github.com/tpfister/caffe-heatmap/blob/4db69ef53e6b8a0b3b4ebb29328b0ab3dbf67c4e/scripts/cpp_lint.py#L1483-L1505 | ||
microsoft/Azure-Kinect-Sensor-SDK | d87ef578676c05b9a5d23c097502942753bf3777 | examples/calibration_registration/camera_tools.py | python | estimate_pose | (img: np.array,
template: str,
opencv_calfile: str) | return retval, rvec, tvec | Estimate camera pose using board.
Args:
img (np.array): Board image.
template (str): fullpath of the board json_file.
opencv_calfile (str): fullpath of the opencv cal file.
Raises:
ValueError: Throw an error if the calibration file fails to load.
Returns:
Tuple[bool, np.ndarray, np.ndarray]: Returns success of calibration and
extrinsics.
retval: Return True of the optimizer converged.
rvec: rotation array
tvec: translation array 1*3 | Estimate camera pose using board. | [
"Estimate",
"camera",
"pose",
"using",
"board",
"."
] | def estimate_pose(img: np.array,
template: str,
opencv_calfile: str) -> Tuple[bool,
np.ndarray,
np.ndarray]:
"""Estimate camera pose using board.
Args:
img (np.array): Board image.
template (str): fullpath of the board json_file.
opencv_calfile (str): fullpath of the opencv cal file.
Raises:
ValueError: Throw an error if the calibration file fails to load.
Returns:
Tuple[bool, np.ndarray, np.ndarray]: Returns success of calibration and
extrinsics.
retval: Return True of the optimizer converged.
rvec: rotation array
tvec: translation array 1*3
"""
k_matrix, dist, _ = read_opencv_calfile(opencv_calfile)
rvec = np.full((1, 3), 0.01)
tvec = np.full((1, 3), 0.01)
charuco_corners, charuco_ids, board = detect_markers(img, template)
if len(charuco_corners) > 0:
retval, rvec, tvec = aruco.estimatePoseCharucoBoard(charuco_corners,
charuco_ids,
board,
k_matrix,
dist,
rvec,
tvec)
else:
retval = False
rvec = []
tvec = []
return retval, rvec, tvec | [
"def",
"estimate_pose",
"(",
"img",
":",
"np",
".",
"array",
",",
"template",
":",
"str",
",",
"opencv_calfile",
":",
"str",
")",
"->",
"Tuple",
"[",
"bool",
",",
"np",
".",
"ndarray",
",",
"np",
".",
"ndarray",
"]",
":",
"k_matrix",
",",
"dist",
"... | https://github.com/microsoft/Azure-Kinect-Sensor-SDK/blob/d87ef578676c05b9a5d23c097502942753bf3777/examples/calibration_registration/camera_tools.py#L332-L373 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/xml/dom/minidom.py | python | parseString | (string, parser=None) | Parse a file into a DOM from a string. | Parse a file into a DOM from a string. | [
"Parse",
"a",
"file",
"into",
"a",
"DOM",
"from",
"a",
"string",
"."
] | def parseString(string, parser=None):
"""Parse a file into a DOM from a string."""
if parser is None:
from xml.dom import expatbuilder
return expatbuilder.parseString(string)
else:
from xml.dom import pulldom
return _do_pulldom_parse(pulldom.parseString, (string,),
{'parser': parser}) | [
"def",
"parseString",
"(",
"string",
",",
"parser",
"=",
"None",
")",
":",
"if",
"parser",
"is",
"None",
":",
"from",
"xml",
".",
"dom",
"import",
"expatbuilder",
"return",
"expatbuilder",
".",
"parseString",
"(",
"string",
")",
"else",
":",
"from",
"xml... | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/xml/dom/minidom.py#L1927-L1935 | ||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/android/loading/user_satisfied_lens.py | python | _UserSatisfiedLens.CriticalFingerprints | (self) | return set(rq.fingerprint for rq in self.CriticalRequests()) | Fingerprints of critical requests. | Fingerprints of critical requests. | [
"Fingerprints",
"of",
"critical",
"requests",
"."
] | def CriticalFingerprints(self):
"""Fingerprints of critical requests."""
return set(rq.fingerprint for rq in self.CriticalRequests()) | [
"def",
"CriticalFingerprints",
"(",
"self",
")",
":",
"return",
"set",
"(",
"rq",
".",
"fingerprint",
"for",
"rq",
"in",
"self",
".",
"CriticalRequests",
"(",
")",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/android/loading/user_satisfied_lens.py#L44-L46 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py2/sklearn/random_projection.py | python | johnson_lindenstrauss_min_dim | (n_samples, eps=0.1) | return (4 * np.log(n_samples) / denominator).astype(np.int) | Find a 'safe' number of components to randomly project to
The distortion introduced by a random projection `p` only changes the
distance between two points by a factor (1 +- eps) in an euclidean space
with good probability. The projection `p` is an eps-embedding as defined
by:
(1 - eps) ||u - v||^2 < ||p(u) - p(v)||^2 < (1 + eps) ||u - v||^2
Where u and v are any rows taken from a dataset of shape [n_samples,
n_features], eps is in ]0, 1[ and p is a projection by a random Gaussian
N(0, 1) matrix with shape [n_components, n_features] (or a sparse
Achlioptas matrix).
The minimum number of components to guarantee the eps-embedding is
given by:
n_components >= 4 log(n_samples) / (eps^2 / 2 - eps^3 / 3)
Note that the number of dimensions is independent of the original
number of features but instead depends on the size of the dataset:
the larger the dataset, the higher is the minimal dimensionality of
an eps-embedding.
Read more in the :ref:`User Guide <johnson_lindenstrauss>`.
Parameters
----------
n_samples : int or numpy array of int greater than 0,
Number of samples. If an array is given, it will compute
a safe number of components array-wise.
eps : float or numpy array of float in ]0,1[, optional (default=0.1)
Maximum distortion rate as defined by the Johnson-Lindenstrauss lemma.
If an array is given, it will compute a safe number of components
array-wise.
Returns
-------
n_components : int or numpy array of int,
The minimal number of components to guarantee with good probability
an eps-embedding with n_samples.
Examples
--------
>>> johnson_lindenstrauss_min_dim(1e6, eps=0.5)
663
>>> johnson_lindenstrauss_min_dim(1e6, eps=[0.5, 0.1, 0.01])
array([ 663, 11841, 1112658])
>>> johnson_lindenstrauss_min_dim([1e4, 1e5, 1e6], eps=0.1)
array([ 7894, 9868, 11841])
References
----------
.. [1] https://en.wikipedia.org/wiki/Johnson%E2%80%93Lindenstrauss_lemma
.. [2] Sanjoy Dasgupta and Anupam Gupta, 1999,
"An elementary proof of the Johnson-Lindenstrauss Lemma."
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.45.3654 | Find a 'safe' number of components to randomly project to | [
"Find",
"a",
"safe",
"number",
"of",
"components",
"to",
"randomly",
"project",
"to"
] | def johnson_lindenstrauss_min_dim(n_samples, eps=0.1):
"""Find a 'safe' number of components to randomly project to
The distortion introduced by a random projection `p` only changes the
distance between two points by a factor (1 +- eps) in an euclidean space
with good probability. The projection `p` is an eps-embedding as defined
by:
(1 - eps) ||u - v||^2 < ||p(u) - p(v)||^2 < (1 + eps) ||u - v||^2
Where u and v are any rows taken from a dataset of shape [n_samples,
n_features], eps is in ]0, 1[ and p is a projection by a random Gaussian
N(0, 1) matrix with shape [n_components, n_features] (or a sparse
Achlioptas matrix).
The minimum number of components to guarantee the eps-embedding is
given by:
n_components >= 4 log(n_samples) / (eps^2 / 2 - eps^3 / 3)
Note that the number of dimensions is independent of the original
number of features but instead depends on the size of the dataset:
the larger the dataset, the higher is the minimal dimensionality of
an eps-embedding.
Read more in the :ref:`User Guide <johnson_lindenstrauss>`.
Parameters
----------
n_samples : int or numpy array of int greater than 0,
Number of samples. If an array is given, it will compute
a safe number of components array-wise.
eps : float or numpy array of float in ]0,1[, optional (default=0.1)
Maximum distortion rate as defined by the Johnson-Lindenstrauss lemma.
If an array is given, it will compute a safe number of components
array-wise.
Returns
-------
n_components : int or numpy array of int,
The minimal number of components to guarantee with good probability
an eps-embedding with n_samples.
Examples
--------
>>> johnson_lindenstrauss_min_dim(1e6, eps=0.5)
663
>>> johnson_lindenstrauss_min_dim(1e6, eps=[0.5, 0.1, 0.01])
array([ 663, 11841, 1112658])
>>> johnson_lindenstrauss_min_dim([1e4, 1e5, 1e6], eps=0.1)
array([ 7894, 9868, 11841])
References
----------
.. [1] https://en.wikipedia.org/wiki/Johnson%E2%80%93Lindenstrauss_lemma
.. [2] Sanjoy Dasgupta and Anupam Gupta, 1999,
"An elementary proof of the Johnson-Lindenstrauss Lemma."
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.45.3654
"""
eps = np.asarray(eps)
n_samples = np.asarray(n_samples)
if np.any(eps <= 0.0) or np.any(eps >= 1):
raise ValueError(
"The JL bound is defined for eps in ]0, 1[, got %r" % eps)
if np.any(n_samples) <= 0:
raise ValueError(
"The JL bound is defined for n_samples greater than zero, got %r"
% n_samples)
denominator = (eps ** 2 / 2) - (eps ** 3 / 3)
return (4 * np.log(n_samples) / denominator).astype(np.int) | [
"def",
"johnson_lindenstrauss_min_dim",
"(",
"n_samples",
",",
"eps",
"=",
"0.1",
")",
":",
"eps",
"=",
"np",
".",
"asarray",
"(",
"eps",
")",
"n_samples",
"=",
"np",
".",
"asarray",
"(",
"n_samples",
")",
"if",
"np",
".",
"any",
"(",
"eps",
"<=",
"0... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/random_projection.py#L54-L133 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/bdb.py | python | Bdb.dispatch_return | (self, frame, arg) | return self.trace_dispatch | Invoke user function and return trace function for return event.
If the debugger stops on this function return, invoke
self.user_return(). Raise BdbQuit if self.quitting is set.
Return self.trace_dispatch to continue tracing in this scope. | Invoke user function and return trace function for return event. | [
"Invoke",
"user",
"function",
"and",
"return",
"trace",
"function",
"for",
"return",
"event",
"."
] | def dispatch_return(self, frame, arg):
"""Invoke user function and return trace function for return event.
If the debugger stops on this function return, invoke
self.user_return(). Raise BdbQuit if self.quitting is set.
Return self.trace_dispatch to continue tracing in this scope.
"""
if self.stop_here(frame) or frame == self.returnframe:
# Ignore return events in generator except when stepping.
if self.stopframe and frame.f_code.co_flags & GENERATOR_AND_COROUTINE_FLAGS:
return self.trace_dispatch
try:
self.frame_returning = frame
self.user_return(frame, arg)
finally:
self.frame_returning = None
if self.quitting: raise BdbQuit
# The user issued a 'next' or 'until' command.
if self.stopframe is frame and self.stoplineno != -1:
self._set_stopinfo(None, None)
return self.trace_dispatch | [
"def",
"dispatch_return",
"(",
"self",
",",
"frame",
",",
"arg",
")",
":",
"if",
"self",
".",
"stop_here",
"(",
"frame",
")",
"or",
"frame",
"==",
"self",
".",
"returnframe",
":",
"# Ignore return events in generator except when stepping.",
"if",
"self",
".",
... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/bdb.py#L138-L158 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/interpolate/fitpack2.py | python | _BivariateSplineBase.get_knots | (self) | return self.tck[:2] | Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively. | Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively. | [
"Return",
"a",
"tuple",
"(",
"tx",
"ty",
")",
"where",
"tx",
"ty",
"contain",
"knots",
"positions",
"of",
"the",
"spline",
"with",
"respect",
"to",
"x",
"-",
"y",
"-",
"variable",
"respectively",
".",
"The",
"position",
"of",
"interior",
"and",
"addition... | def get_knots(self):
""" Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively.
"""
return self.tck[:2] | [
"def",
"get_knots",
"(",
"self",
")",
":",
"return",
"self",
".",
"tck",
"[",
":",
"2",
"]"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/interpolate/fitpack2.py#L788-L794 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/ndarray/numpy/_op.py | python | logical_or | (x1, x2, out=None) | return _api_internal.logical_or(x1, x2, out) | Compute the truth value of x1 OR x2 element-wise.
Parameters
----------
x1, x2 : array_like
Logical OR is applied to the elements of `x1` and `x2`.
If ``x1.shape != x2.shape``, they must be broadcastable to a common
shape (which becomes the shape of the output).
out : ndarray, None, or tuple of ndarray and None, optional
A location into which the result is stored. If provided, it must have
a shape that the inputs broadcast to. If not provided or `None`,
a freshly-allocated array is returned. A tuple (possible only as a
keyword argument) must have length equal to the number of outputs.
Returns
-------
y : ndarray or bool
Boolean result of the logical OR operation applied to the elements
of `x1` and `x2`; the shape is determined by broadcasting.
This is a scalar if both `x1` and `x2` are scalars.
See Also
--------
logical_and, logical_not, logical_xor, bitwise_or
Examples
--------
>>> np.logical_or(True, False)
True
>>> np.logical_or(np.array([True, True], dtype='bool'), np.array([False, True], dtype='bool'))
array([True, True]) | Compute the truth value of x1 OR x2 element-wise.
Parameters
----------
x1, x2 : array_like
Logical OR is applied to the elements of `x1` and `x2`.
If ``x1.shape != x2.shape``, they must be broadcastable to a common
shape (which becomes the shape of the output).
out : ndarray, None, or tuple of ndarray and None, optional
A location into which the result is stored. If provided, it must have
a shape that the inputs broadcast to. If not provided or `None`,
a freshly-allocated array is returned. A tuple (possible only as a
keyword argument) must have length equal to the number of outputs.
Returns
-------
y : ndarray or bool
Boolean result of the logical OR operation applied to the elements
of `x1` and `x2`; the shape is determined by broadcasting.
This is a scalar if both `x1` and `x2` are scalars.
See Also
--------
logical_and, logical_not, logical_xor, bitwise_or
Examples
--------
>>> np.logical_or(True, False)
True
>>> np.logical_or(np.array([True, True], dtype='bool'), np.array([False, True], dtype='bool'))
array([True, True]) | [
"Compute",
"the",
"truth",
"value",
"of",
"x1",
"OR",
"x2",
"element",
"-",
"wise",
".",
"Parameters",
"----------",
"x1",
"x2",
":",
"array_like",
"Logical",
"OR",
"is",
"applied",
"to",
"the",
"elements",
"of",
"x1",
"and",
"x2",
".",
"If",
"x1",
"."... | def logical_or(x1, x2, out=None):
"""
Compute the truth value of x1 OR x2 element-wise.
Parameters
----------
x1, x2 : array_like
Logical OR is applied to the elements of `x1` and `x2`.
If ``x1.shape != x2.shape``, they must be broadcastable to a common
shape (which becomes the shape of the output).
out : ndarray, None, or tuple of ndarray and None, optional
A location into which the result is stored. If provided, it must have
a shape that the inputs broadcast to. If not provided or `None`,
a freshly-allocated array is returned. A tuple (possible only as a
keyword argument) must have length equal to the number of outputs.
Returns
-------
y : ndarray or bool
Boolean result of the logical OR operation applied to the elements
of `x1` and `x2`; the shape is determined by broadcasting.
This is a scalar if both `x1` and `x2` are scalars.
See Also
--------
logical_and, logical_not, logical_xor, bitwise_or
Examples
--------
>>> np.logical_or(True, False)
True
>>> np.logical_or(np.array([True, True], dtype='bool'), np.array([False, True], dtype='bool'))
array([True, True])
"""
if isinstance(x1, numeric_types) and isinstance(x2, numeric_types):
return _np.logical_or(x1, x2, out=out)
return _api_internal.logical_or(x1, x2, out) | [
"def",
"logical_or",
"(",
"x1",
",",
"x2",
",",
"out",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"x1",
",",
"numeric_types",
")",
"and",
"isinstance",
"(",
"x2",
",",
"numeric_types",
")",
":",
"return",
"_np",
".",
"logical_or",
"(",
"x1",
","... | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/ndarray/numpy/_op.py#L7663-L7695 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/mhlib.py | python | Folder.getcurrent | (self) | Return the current message. Raise Error when there is none. | Return the current message. Raise Error when there is none. | [
"Return",
"the",
"current",
"message",
".",
"Raise",
"Error",
"when",
"there",
"is",
"none",
"."
] | def getcurrent(self):
"""Return the current message. Raise Error when there is none."""
seqs = self.getsequences()
try:
return max(seqs['cur'])
except (ValueError, KeyError):
raise Error, "no cur message" | [
"def",
"getcurrent",
"(",
"self",
")",
":",
"seqs",
"=",
"self",
".",
"getsequences",
"(",
")",
"try",
":",
"return",
"max",
"(",
"seqs",
"[",
"'cur'",
"]",
")",
"except",
"(",
"ValueError",
",",
"KeyError",
")",
":",
"raise",
"Error",
",",
"\"no cur... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/mhlib.py#L334-L340 | ||
raymondlu/super-animation-samples | 04234269112ff0dc32447f27a761dbbb00b8ba17 | samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py | python | CursorKind.is_attribute | (self) | return conf.lib.clang_isAttribute(self) | Test if this is an attribute kind. | Test if this is an attribute kind. | [
"Test",
"if",
"this",
"is",
"an",
"attribute",
"kind",
"."
] | def is_attribute(self):
"""Test if this is an attribute kind."""
return conf.lib.clang_isAttribute(self) | [
"def",
"is_attribute",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_isAttribute",
"(",
"self",
")"
] | https://github.com/raymondlu/super-animation-samples/blob/04234269112ff0dc32447f27a761dbbb00b8ba17/samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py#L535-L537 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/xrc.py | python | XmlNode.RemoveChild | (*args, **kwargs) | return _xrc.XmlNode_RemoveChild(*args, **kwargs) | RemoveChild(self, XmlNode child) -> bool | RemoveChild(self, XmlNode child) -> bool | [
"RemoveChild",
"(",
"self",
"XmlNode",
"child",
")",
"-",
">",
"bool"
] | def RemoveChild(*args, **kwargs):
"""RemoveChild(self, XmlNode child) -> bool"""
return _xrc.XmlNode_RemoveChild(*args, **kwargs) | [
"def",
"RemoveChild",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_xrc",
".",
"XmlNode_RemoveChild",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/xrc.py#L370-L372 | |
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/tools/docs/parser.py | python | _ClassPageInfo.for_module | (self) | return False | Returns true if this object documents a module. | Returns true if this object documents a module. | [
"Returns",
"true",
"if",
"this",
"object",
"documents",
"a",
"module",
"."
] | def for_module(self):
"""Returns true if this object documents a module."""
return False | [
"def",
"for_module",
"(",
"self",
")",
":",
"return",
"False"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/tools/docs/parser.py#L989-L991 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Tools/pybench/pybench.py | python | Benchmark.compatible | (self, other) | return 1 | Return 1/0 depending on whether the benchmark is
compatible with the other Benchmark instance or not. | Return 1/0 depending on whether the benchmark is
compatible with the other Benchmark instance or not. | [
"Return",
"1",
"/",
"0",
"depending",
"on",
"whether",
"the",
"benchmark",
"is",
"compatible",
"with",
"the",
"other",
"Benchmark",
"instance",
"or",
"not",
"."
] | def compatible(self, other):
""" Return 1/0 depending on whether the benchmark is
compatible with the other Benchmark instance or not.
"""
if self.version != other.version:
return 0
if (self.machine_details == other.machine_details and
self.timer != other.timer):
return 0
if (self.calibration_runs == 0 and
other.calibration_runs != 0):
return 0
if (self.calibration_runs != 0 and
other.calibration_runs == 0):
return 0
return 1 | [
"def",
"compatible",
"(",
"self",
",",
"other",
")",
":",
"if",
"self",
".",
"version",
"!=",
"other",
".",
"version",
":",
"return",
"0",
"if",
"(",
"self",
".",
"machine_details",
"==",
"other",
".",
"machine_details",
"and",
"self",
".",
"timer",
"!... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Tools/pybench/pybench.py#L454-L471 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/client/session.py | python | BaseSession.partial_run | (self, handle, fetches, feed_dict=None) | return self._run(handle, fetches, feed_dict, None, None) | Continues the execution with more feeds and fetches.
This is EXPERIMENTAL and subject to change.
To use partial execution, a user first calls `partial_run_setup()` and
then a sequence of `partial_run()`. `partial_run_setup` specifies the
list of feeds and fetches that will be used in the subsequent
`partial_run` calls.
The optional `feed_dict` argument allows the caller to override
the value of tensors in the graph. See run() for more information.
Below is a simple example:
```python
a = array_ops.placeholder(dtypes.float32, shape=[])
b = array_ops.placeholder(dtypes.float32, shape=[])
c = array_ops.placeholder(dtypes.float32, shape=[])
r1 = math_ops.add(a, b)
r2 = math_ops.multiply(r1, c)
h = sess.partial_run_setup([r1, r2], [a, b, c])
res = sess.partial_run(h, r1, feed_dict={a: 1, b: 2})
res = sess.partial_run(h, r2, feed_dict={c: res})
```
Args:
handle: A handle for a sequence of partial runs.
fetches: A single graph element, a list of graph elements, or a dictionary
whose values are graph elements or lists of graph elements (see
documentation for `run`).
feed_dict: A dictionary that maps graph elements to values (described
above).
Returns:
Either a single value if `fetches` is a single graph element, or
a list of values if `fetches` is a list, or a dictionary with the
same keys as `fetches` if that is a dictionary
(see documentation for `run`).
Raises:
tf.errors.OpError: Or one of its subclasses on error. | Continues the execution with more feeds and fetches. | [
"Continues",
"the",
"execution",
"with",
"more",
"feeds",
"and",
"fetches",
"."
] | def partial_run(self, handle, fetches, feed_dict=None):
"""Continues the execution with more feeds and fetches.
This is EXPERIMENTAL and subject to change.
To use partial execution, a user first calls `partial_run_setup()` and
then a sequence of `partial_run()`. `partial_run_setup` specifies the
list of feeds and fetches that will be used in the subsequent
`partial_run` calls.
The optional `feed_dict` argument allows the caller to override
the value of tensors in the graph. See run() for more information.
Below is a simple example:
```python
a = array_ops.placeholder(dtypes.float32, shape=[])
b = array_ops.placeholder(dtypes.float32, shape=[])
c = array_ops.placeholder(dtypes.float32, shape=[])
r1 = math_ops.add(a, b)
r2 = math_ops.multiply(r1, c)
h = sess.partial_run_setup([r1, r2], [a, b, c])
res = sess.partial_run(h, r1, feed_dict={a: 1, b: 2})
res = sess.partial_run(h, r2, feed_dict={c: res})
```
Args:
handle: A handle for a sequence of partial runs.
fetches: A single graph element, a list of graph elements, or a dictionary
whose values are graph elements or lists of graph elements (see
documentation for `run`).
feed_dict: A dictionary that maps graph elements to values (described
above).
Returns:
Either a single value if `fetches` is a single graph element, or
a list of values if `fetches` is a list, or a dictionary with the
same keys as `fetches` if that is a dictionary
(see documentation for `run`).
Raises:
tf.errors.OpError: Or one of its subclasses on error.
"""
# TODO(touts): Support feeding and fetching the same tensor.
return self._run(handle, fetches, feed_dict, None, None) | [
"def",
"partial_run",
"(",
"self",
",",
"handle",
",",
"fetches",
",",
"feed_dict",
"=",
"None",
")",
":",
"# TODO(touts): Support feeding and fetching the same tensor.",
"return",
"self",
".",
"_run",
"(",
"handle",
",",
"fetches",
",",
"feed_dict",
",",
"None",
... | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/client/session.py#L979-L1024 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/AWSPythonSDK/1.5.8/boto3/session.py | python | Session.get_credentials | (self) | return self._session.get_credentials() | Return the :class:`botocore.credential.Credential` object
associated with this session. If the credentials have not
yet been loaded, this will attempt to load them. If they
have already been loaded, this will return the cached
credentials. | Return the :class:`botocore.credential.Credential` object
associated with this session. If the credentials have not
yet been loaded, this will attempt to load them. If they
have already been loaded, this will return the cached
credentials. | [
"Return",
"the",
":",
"class",
":",
"botocore",
".",
"credential",
".",
"Credential",
"object",
"associated",
"with",
"this",
"session",
".",
"If",
"the",
"credentials",
"have",
"not",
"yet",
"been",
"loaded",
"this",
"will",
"attempt",
"to",
"load",
"them",... | def get_credentials(self):
"""
Return the :class:`botocore.credential.Credential` object
associated with this session. If the credentials have not
yet been loaded, this will attempt to load them. If they
have already been loaded, this will return the cached
credentials.
"""
return self._session.get_credentials() | [
"def",
"get_credentials",
"(",
"self",
")",
":",
"return",
"self",
".",
"_session",
".",
"get_credentials",
"(",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/boto3/session.py#L175-L183 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/contrib/text/embedding.py | python | create | (embedding_name, **kwargs) | return create_text_embedding(embedding_name, **kwargs) | Creates an instance of token embedding.
Creates a token embedding instance by loading embedding vectors from an externally hosted
pre-trained token embedding file, such as those of GloVe and FastText. To get all the valid
`embedding_name` and `pretrained_file_name`, use
`mxnet.contrib.text.embedding.get_pretrained_file_names()`.
Parameters
----------
embedding_name : str
The token embedding name (case-insensitive).
Returns
-------
An instance of `mxnet.contrib.text.glossary._TokenEmbedding`:
A token embedding instance that loads embedding vectors from an externally hosted
pre-trained token embedding file. | Creates an instance of token embedding. | [
"Creates",
"an",
"instance",
"of",
"token",
"embedding",
"."
] | def create(embedding_name, **kwargs):
"""Creates an instance of token embedding.
Creates a token embedding instance by loading embedding vectors from an externally hosted
pre-trained token embedding file, such as those of GloVe and FastText. To get all the valid
`embedding_name` and `pretrained_file_name`, use
`mxnet.contrib.text.embedding.get_pretrained_file_names()`.
Parameters
----------
embedding_name : str
The token embedding name (case-insensitive).
Returns
-------
An instance of `mxnet.contrib.text.glossary._TokenEmbedding`:
A token embedding instance that loads embedding vectors from an externally hosted
pre-trained token embedding file.
"""
create_text_embedding = registry.get_create_func(_TokenEmbedding, 'token embedding')
return create_text_embedding(embedding_name, **kwargs) | [
"def",
"create",
"(",
"embedding_name",
",",
"*",
"*",
"kwargs",
")",
":",
"create_text_embedding",
"=",
"registry",
".",
"get_create_func",
"(",
"_TokenEmbedding",
",",
"'token embedding'",
")",
"return",
"create_text_embedding",
"(",
"embedding_name",
",",
"*",
... | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/contrib/text/embedding.py#L63-L87 | |
IntelligentSoftwareSystems/Galois | 07514b288f708082430304f3d8b934fb3e2a821f | scripts/sparse-matrices/diff_edgelists.py | python | main | (argv) | return 0 if differ.report() else 1 | Main entry point when run as a program. | Main entry point when run as a program. | [
"Main",
"entry",
"point",
"when",
"run",
"as",
"a",
"program",
"."
] | def main(argv):
"""Main entry point when run as a program."""
import argparse
parser = argparse.ArgumentParser(
description="Display differences between edge list files"
)
parser.add_argument('--asymmetric', dest='symmetric', action='store_false',
help="Do not assume a symmetric matrix")
parser.add_argument('--quick', action='store_true',
help="Stop after first error")
parser.add_argument('--quiet', '-q', action='count', default=0,
help="Show less output")
parser.add_argument('--verbose', '-v', action='count', default=1,
help="Show more output")
parser.add_argument('files', type=argparse.FileType('r'), nargs='*',
help="Files to compare", metavar='file')
parser.set_defaults(
files=[sys.stdin],
)
args = parser.parse_args(argv)
args.verbose -= args.quiet
differ = DiffEdgelists(symmetric=args.symmetric, verbose=args.verbose,
quick=args.quick)
#import statprof
#statprof.start()
differ.diff(args.files)
for filehandle in args.files:
filehandle.close()
#statprof.stop()
#statprof.display()
return 0 if differ.report() else 1 | [
"def",
"main",
"(",
"argv",
")",
":",
"import",
"argparse",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"\"Display differences between edge list files\"",
")",
"parser",
".",
"add_argument",
"(",
"'--asymmetric'",
",",
"dest",
"=",
"... | https://github.com/IntelligentSoftwareSystems/Galois/blob/07514b288f708082430304f3d8b934fb3e2a821f/scripts/sparse-matrices/diff_edgelists.py#L196-L226 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | catboost/python-package/catboost/core.py | python | _get_features_indices | (features, feature_names) | return features | Parameters
----------
features :
must be a sequence of either integers or strings
if it contains strings 'feature_names' parameter must be defined and string ids from 'features'
must represent a subset of in 'feature_names'
feature_names :
A sequence of string ids for features or None.
Used to get feature indices for string ids in 'features' parameter | Parameters
----------
features :
must be a sequence of either integers or strings
if it contains strings 'feature_names' parameter must be defined and string ids from 'features'
must represent a subset of in 'feature_names' | [
"Parameters",
"----------",
"features",
":",
"must",
"be",
"a",
"sequence",
"of",
"either",
"integers",
"or",
"strings",
"if",
"it",
"contains",
"strings",
"feature_names",
"parameter",
"must",
"be",
"defined",
"and",
"string",
"ids",
"from",
"features",
"must",... | def _get_features_indices(features, feature_names):
"""
Parameters
----------
features :
must be a sequence of either integers or strings
if it contains strings 'feature_names' parameter must be defined and string ids from 'features'
must represent a subset of in 'feature_names'
feature_names :
A sequence of string ids for features or None.
Used to get feature indices for string ids in 'features' parameter
"""
if (not isinstance(features, (Sequence, np.ndarray))) or isinstance(features, (str, bytes, bytearray)):
raise CatBoostError("feature names should be a sequence, but got " + repr(features))
if feature_names is not None:
return [
feature_names.index(f) if isinstance(f, STRING_TYPES) else f
for f in features
]
else:
for f in features:
if isinstance(f, STRING_TYPES):
raise CatBoostError("features parameter contains string value '{}' but feature names "
"for a dataset are not specified".format(f))
return features | [
"def",
"_get_features_indices",
"(",
"features",
",",
"feature_names",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"features",
",",
"(",
"Sequence",
",",
"np",
".",
"ndarray",
")",
")",
")",
"or",
"isinstance",
"(",
"features",
",",
"(",
"str",
",",
... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/catboost/python-package/catboost/core.py#L237-L262 | |
sfzhang15/FaceBoxes | b52cc92f9362d3adc08d54666aeb9ebb62fdb7da | scripts/cpp_lint.py | python | CheckForNewlineAtEOF | (filename, lines, error) | Logs an error if there is no newline char at the end of the file.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found. | Logs an error if there is no newline char at the end of the file. | [
"Logs",
"an",
"error",
"if",
"there",
"is",
"no",
"newline",
"char",
"at",
"the",
"end",
"of",
"the",
"file",
"."
] | def CheckForNewlineAtEOF(filename, lines, error):
"""Logs an error if there is no newline char at the end of the file.
Args:
filename: The name of the current file.
lines: An array of strings, each representing a line of the file.
error: The function to call with any errors found.
"""
# The array lines() was created by adding two newlines to the
# original file (go figure), then splitting on \n.
# To verify that the file ends in \n, we just have to make sure the
# last-but-two element of lines() exists and is empty.
if len(lines) < 3 or lines[-2]:
error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
'Could not find a newline character at the end of the file.') | [
"def",
"CheckForNewlineAtEOF",
"(",
"filename",
",",
"lines",
",",
"error",
")",
":",
"# The array lines() was created by adding two newlines to the",
"# original file (go figure), then splitting on \\n.",
"# To verify that the file ends in \\n, we just have to make sure the",
"# last-but-... | https://github.com/sfzhang15/FaceBoxes/blob/b52cc92f9362d3adc08d54666aeb9ebb62fdb7da/scripts/cpp_lint.py#L1508-L1523 | ||
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/_extends/graph_kernel/model/model_builder.py | python | GraphBuilder.op | (self, prim, output, inputs, attrs=None) | Insert an operator into graph | Insert an operator into graph | [
"Insert",
"an",
"operator",
"into",
"graph"
] | def op(self, prim, output, inputs, attrs=None):
"""Insert an operator into graph"""
if attrs is None:
attrs = {}
if isinstance(inputs, Tensor):
inputs = [inputs]
tensor_inputs = [t for t in inputs if isinstance(t, Tensor)]
node = Operator(prim, tensor_inputs, output, attrs)
node.all_inputs = inputs
self.current.graph.add(node) | [
"def",
"op",
"(",
"self",
",",
"prim",
",",
"output",
",",
"inputs",
",",
"attrs",
"=",
"None",
")",
":",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"{",
"}",
"if",
"isinstance",
"(",
"inputs",
",",
"Tensor",
")",
":",
"inputs",
"=",
"[",
... | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/_extends/graph_kernel/model/model_builder.py#L87-L96 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/index/collector.py | python | parse_links | (page) | Parse an HTML document, and yield its anchor elements as Link objects. | [] | def parse_links(page):
# type: (HTMLPage) -> Iterable[Link]
"""
Parse an HTML document, and yield its anchor elements as Link objects.
"""
document = html5lib.parse(
page.content,
transport_encoding=page.encoding,
namespaceHTMLElements=False,
)
url = page.url
base_url = _determine_base_url(document, url)
for anchor in document.findall(".//a"):
link = _create_link_from_element(
anchor,
page_url=url,
base_url=base_url,
)
if link is None:
continue
yield link | [
"def",
"parse_links",
"(",
"page",
")",
":",
"# type: (HTMLPage) -> Iterable[Link]",
"document",
"=",
"html5lib",
".",
"parse",
"(",
"page",
".",
"content",
",",
"transport_encoding",
"=",
"page",
".",
"encoding",
",",
"namespaceHTMLElements",
"=",
"False",
",",
... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/index/collector.py#L657-L699 | |||
cvmfs/cvmfs | 4637bdb5153178eadf885c1acf37bdc5c685bf8a | cpplint.py | python | NestingState.SeenOpenBrace | (self) | return (not self.stack) or self.stack[-1].seen_open_brace | Check if we have seen the opening brace for the innermost block.
Returns:
True if we have seen the opening brace, False if the innermost
block is still expecting an opening brace. | Check if we have seen the opening brace for the innermost block. | [
"Check",
"if",
"we",
"have",
"seen",
"the",
"opening",
"brace",
"for",
"the",
"innermost",
"block",
"."
] | def SeenOpenBrace(self):
"""Check if we have seen the opening brace for the innermost block.
Returns:
True if we have seen the opening brace, False if the innermost
block is still expecting an opening brace.
"""
return (not self.stack) or self.stack[-1].seen_open_brace | [
"def",
"SeenOpenBrace",
"(",
"self",
")",
":",
"return",
"(",
"not",
"self",
".",
"stack",
")",
"or",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"seen_open_brace"
] | https://github.com/cvmfs/cvmfs/blob/4637bdb5153178eadf885c1acf37bdc5c685bf8a/cpplint.py#L2225-L2232 | |
wujixiu/helmet-detection | 8eff5c59ddfba5a29e0b76aeb48babcb49246178 | hardhat-wearing-detection/SSD-RPA/scripts/cpp_lint.py | python | ReplaceAll | (pattern, rep, s) | return _regexp_compile_cache[pattern].sub(rep, s) | Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements) | Replaces instances of pattern in a string with a replacement. | [
"Replaces",
"instances",
"of",
"pattern",
"in",
"a",
"string",
"with",
"a",
"replacement",
"."
] | def ReplaceAll(pattern, rep, s):
"""Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements)
"""
if pattern not in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].sub(rep, s) | [
"def",
"ReplaceAll",
"(",
"pattern",
",",
"rep",
",",
"s",
")",
":",
"if",
"pattern",
"not",
"in",
"_regexp_compile_cache",
":",
"_regexp_compile_cache",
"[",
"pattern",
"]",
"=",
"sre_compile",
".",
"compile",
"(",
"pattern",
")",
"return",
"_regexp_compile_c... | https://github.com/wujixiu/helmet-detection/blob/8eff5c59ddfba5a29e0b76aeb48babcb49246178/hardhat-wearing-detection/SSD-RPA/scripts/cpp_lint.py#L525-L540 | |
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | buildscripts/blackduck_hub.py | python | BlackDuckConfig.__init__ | (self) | Init Black Duck config from disk. | Init Black Duck config from disk. | [
"Init",
"Black",
"Duck",
"config",
"from",
"disk",
"."
] | def __init__(self):
"""Init Black Duck config from disk."""
if not os.path.exists(BLACKDUCK_RESTCONFIG):
raise ValueError("Cannot find %s for blackduck configuration" % (BLACKDUCK_RESTCONFIG))
with open(BLACKDUCK_RESTCONFIG, "r") as rfh:
rc = json.loads(rfh.read())
self.url = rc["baseurl"]
self.username = rc["username"]
self.password = rc["password"] | [
"def",
"__init__",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"BLACKDUCK_RESTCONFIG",
")",
":",
"raise",
"ValueError",
"(",
"\"Cannot find %s for blackduck configuration\"",
"%",
"(",
"BLACKDUCK_RESTCONFIG",
")",
")",
"with",
"ope... | https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/blackduck_hub.py#L550-L560 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/flatnotebook.py | python | PageContainer.OnDropTarget | (self, x, y, nTabPage, wnd_oldContainer) | return wx.DragMove | Handles the drop action from a drag and drop operation.
:param `x`: the x position of the drop action;
:param `y`: the y position of the drop action;
:param `nTabPage`: the index of the tab being dropped;
:param `wnd_oldContainer`: the :class:`FlatNotebook` to which the dropped tab previously
belonged to. | Handles the drop action from a drag and drop operation. | [
"Handles",
"the",
"drop",
"action",
"from",
"a",
"drag",
"and",
"drop",
"operation",
"."
] | def OnDropTarget(self, x, y, nTabPage, wnd_oldContainer):
"""
Handles the drop action from a drag and drop operation.
:param `x`: the x position of the drop action;
:param `y`: the y position of the drop action;
:param `nTabPage`: the index of the tab being dropped;
:param `wnd_oldContainer`: the :class:`FlatNotebook` to which the dropped tab previously
belonged to.
"""
# Disable drag'n'drop for disabled tab
if len(wnd_oldContainer._pagesInfoVec) > nTabPage and \
not wnd_oldContainer._pagesInfoVec[nTabPage].GetEnabled():
return wx.DragCancel
self._isdragging = True
oldContainer = wnd_oldContainer
nIndex = -1
where, nIndex = self.HitTest(wx.Point(x, y))
oldNotebook = oldContainer.GetParent()
newNotebook = self.GetParent()
if oldNotebook == newNotebook:
if nTabPage >= 0:
if where == FNB_TAB:
self.MoveTabPage(nTabPage, nIndex)
event = FlatNotebookEvent(wxEVT_FLATNOTEBOOK_PAGE_DROPPED, self.GetParent().GetId())
event.SetSelection(nIndex)
event.SetOldSelection(nTabPage)
event.SetEventObject(self.GetParent())
self.GetParent().GetEventHandler().ProcessEvent(event)
elif self.GetParent().GetAGWWindowStyleFlag() & FNB_ALLOW_FOREIGN_DND:
if wx.Platform in ["__WXMSW__", "__WXGTK__", "__WXMAC__"]:
if nTabPage >= 0:
window = oldNotebook.GetPage(nTabPage)
if window:
where, nIndex = newNotebook._pages.HitTest(wx.Point(x, y))
caption = oldContainer.GetPageText(nTabPage)
imageindex = oldContainer.GetPageImage(nTabPage)
oldNotebook.RemovePage(nTabPage)
window.Reparent(newNotebook)
if imageindex >= 0:
bmp = oldNotebook.GetImageList().GetBitmap(imageindex)
newImageList = newNotebook.GetImageList()
if not newImageList:
xbmp, ybmp = bmp.GetWidth(), bmp.GetHeight()
newImageList = wx.ImageList(xbmp, ybmp)
imageindex = 0
else:
imageindex = newImageList.GetImageCount()
newImageList.Add(bmp)
newNotebook.SetImageList(newImageList)
newNotebook.InsertPage(nIndex, window, caption, True, imageindex)
event = FlatNotebookDragEvent(wxEVT_FLATNOTEBOOK_PAGE_DROPPED_FOREIGN, self.GetParent().GetId())
event.SetSelection(nIndex)
event.SetOldSelection(nTabPage)
event.SetNotebook(newNotebook)
event.SetOldNotebook(oldNotebook)
event.SetEventObject(self.GetParent())
self.GetParent().GetEventHandler().ProcessEvent(event)
self._isdragging = False
return wx.DragMove | [
"def",
"OnDropTarget",
"(",
"self",
",",
"x",
",",
"y",
",",
"nTabPage",
",",
"wnd_oldContainer",
")",
":",
"# Disable drag'n'drop for disabled tab",
"if",
"len",
"(",
"wnd_oldContainer",
".",
"_pagesInfoVec",
")",
">",
"nTabPage",
"and",
"not",
"wnd_oldContainer"... | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/flatnotebook.py#L6075-L6154 | |
facebookresearch/ELF | 1f790173095cd910976d9f651b80beb872ec5d12 | vendor/pybind11/tools/clang/cindex.py | python | TranslationUnit.reparse | (self, unsaved_files=None, options=0) | Reparse an already parsed translation unit.
In-memory contents for files can be provided by passing a list of pairs
as unsaved_files, the first items should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects. | Reparse an already parsed translation unit. | [
"Reparse",
"an",
"already",
"parsed",
"translation",
"unit",
"."
] | def reparse(self, unsaved_files=None, options=0):
"""
Reparse an already parsed translation unit.
In-memory contents for files can be provided by passing a list of pairs
as unsaved_files, the first items should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
if unsaved_files is None:
unsaved_files = []
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print(value)
if not isinstance(value, str):
raise TypeError('Unexpected unsaved file contents.')
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = conf.lib.clang_reparseTranslationUnit(self, len(unsaved_files),
unsaved_files_array, options) | [
"def",
"reparse",
"(",
"self",
",",
"unsaved_files",
"=",
"None",
",",
"options",
"=",
"0",
")",
":",
"if",
"unsaved_files",
"is",
"None",
":",
"unsaved_files",
"=",
"[",
"]",
"unsaved_files_array",
"=",
"0",
"if",
"len",
"(",
"unsaved_files",
")",
":",
... | https://github.com/facebookresearch/ELF/blob/1f790173095cd910976d9f651b80beb872ec5d12/vendor/pybind11/tools/clang/cindex.py#L2686-L2713 | ||
klzgrad/naiveproxy | ed2c513637c77b18721fe428d7ed395b4d284c83 | src/tools/grit/grit/gather/txt.py | python | TxtFile.GetCliques | (self) | return [self.clique_] | Returns the MessageClique objects for all translateable portions. | Returns the MessageClique objects for all translateable portions. | [
"Returns",
"the",
"MessageClique",
"objects",
"for",
"all",
"translateable",
"portions",
"."
] | def GetCliques(self):
'''Returns the MessageClique objects for all translateable portions.'''
return [self.clique_] | [
"def",
"GetCliques",
"(",
"self",
")",
":",
"return",
"[",
"self",
".",
"clique_",
"]"
] | https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/tools/grit/grit/gather/txt.py#L30-L32 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.