nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
Tencent/CMONGO
c40380caa14e05509f46993aa8b8da966b09b0b5
src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Node/__init__.py
python
Node.get_contents
(self)
return _get_contents_map[self._func_get_contents](self)
Fetch the contents of the entry.
Fetch the contents of the entry.
[ "Fetch", "the", "contents", "of", "the", "entry", "." ]
def get_contents(self): """Fetch the contents of the entry.""" return _get_contents_map[self._func_get_contents](self)
[ "def", "get_contents", "(", "self", ")", ":", "return", "_get_contents_map", "[", "self", ".", "_func_get_contents", "]", "(", "self", ")" ]
https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Node/__init__.py#L1223-L1225
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/re.py
python
findall
(pattern, string, flags=0)
return _compile(pattern, flags).findall(string)
Return a list of all non-overlapping matches in the string. If one or more groups are present in the pattern, return a list of groups; this will be a list of tuples if the pattern has more than one group. Empty matches are included in the result.
Return a list of all non-overlapping matches in the string.
[ "Return", "a", "list", "of", "all", "non", "-", "overlapping", "matches", "in", "the", "string", "." ]
def findall(pattern, string, flags=0): """Return a list of all non-overlapping matches in the string. If one or more groups are present in the pattern, return a list of groups; this will be a list of tuples if the pattern has more than one group. Empty matches are included in the result.""" return _compile(pattern, flags).findall(string)
[ "def", "findall", "(", "pattern", ",", "string", ",", "flags", "=", "0", ")", ":", "return", "_compile", "(", "pattern", ",", "flags", ")", ".", "findall", "(", "string", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/re.py#L169-L177
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/sets.py
python
Set.__isub__
(self, other)
return self
Remove all elements of another set from this set.
Remove all elements of another set from this set.
[ "Remove", "all", "elements", "of", "another", "set", "from", "this", "set", "." ]
def __isub__(self, other): """Remove all elements of another set from this set.""" self._binary_sanity_check(other) self.difference_update(other) return self
[ "def", "__isub__", "(", "self", ",", "other", ")", ":", "self", ".", "_binary_sanity_check", "(", "other", ")", "self", ".", "difference_update", "(", "other", ")", "return", "self" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/sets.py#L493-L497
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/contrib/boosted_trees/python/utils/losses.py
python
per_example_maxent_loss
(labels, weights, logits, num_classes, eps=1e-15)
Maximum entropy loss for multiclass problems. Maximum entropy is a generalization of logistic loss for the case when more than 2 classes are present. Args: labels: Rank 2 (N, 1) or Rank 1 (N) tensor of per-example labels. weights: Rank 2 (N, 1) tensor of per-example weights. logits: Rank 2 (N, K) tensor of per-example predictions, K - num of classes. num_classes: number of classes in classification task. Used to expand label indices into one-hot encodings. eps: tolerance, used as a minimum possible value. Returns: loss: A Rank 2 (N, 1) tensor of per-example maxent loss update_op: An update operation to update the loss's internal state.
Maximum entropy loss for multiclass problems.
[ "Maximum", "entropy", "loss", "for", "multiclass", "problems", "." ]
def per_example_maxent_loss(labels, weights, logits, num_classes, eps=1e-15): """Maximum entropy loss for multiclass problems. Maximum entropy is a generalization of logistic loss for the case when more than 2 classes are present. Args: labels: Rank 2 (N, 1) or Rank 1 (N) tensor of per-example labels. weights: Rank 2 (N, 1) tensor of per-example weights. logits: Rank 2 (N, K) tensor of per-example predictions, K - num of classes. num_classes: number of classes in classification task. Used to expand label indices into one-hot encodings. eps: tolerance, used as a minimum possible value. Returns: loss: A Rank 2 (N, 1) tensor of per-example maxent loss update_op: An update operation to update the loss's internal state. """ labels = math_ops.to_int64(labels) # If labels are of rank 1, make them rank 2. labels_shape = labels.get_shape() if len(labels_shape) != 2: labels = array_ops.expand_dims(labels, 1) # Labels are indices of classes, convert them to one hot encodings. target_one_hot = array_ops.one_hot(indices=labels, depth=num_classes) labels = math_ops.reduce_sum( input_tensor=target_one_hot, reduction_indices=[1]) labels = math_ops.to_float(labels) # Calculate softmax probabilities for each class. unnormalized_probs = math_ops.exp(logits) normalizers = math_ops.reduce_sum(unnormalized_probs, 1, keep_dims=True) softmax_predictions = math_ops.divide(unnormalized_probs, math_ops.add(normalizers, eps)) # Pull out the probabilities for real label. probs_for_real_class = math_ops.reduce_sum(labels * softmax_predictions, 1) # Add handling for values near 0 and 1. zeros = array_ops.zeros_like(probs_for_real_class, dtype=logits.dtype) + eps one_minus_eps = array_ops.ones_like( probs_for_real_class, dtype=logits.dtype) - eps # Take maximum(eps, pred) cond = (probs_for_real_class >= eps) probs_for_real_class = array_ops.where(cond, probs_for_real_class, zeros) # Take minimum(1-eps, pred) cond = (probs_for_real_class <= 1 - eps) probs_for_real_class = array_ops.where(cond, probs_for_real_class, one_minus_eps) unweighted_loss = array_ops.expand_dims(-math_ops.log(probs_for_real_class), 1) if weights is None: return unweighted_loss, control_flow_ops.no_op() else: return unweighted_loss * weights, control_flow_ops.no_op()
[ "def", "per_example_maxent_loss", "(", "labels", ",", "weights", ",", "logits", ",", "num_classes", ",", "eps", "=", "1e-15", ")", ":", "labels", "=", "math_ops", ".", "to_int64", "(", "labels", ")", "# If labels are of rank 1, make them rank 2.", "labels_shape", ...
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/boosted_trees/python/utils/losses.py#L49-L107
macchina-io/macchina.io
ef24ba0e18379c3dd48fb84e6dbf991101cb8db0
platform/JS/V8/v8/gypfiles/vs_toolchain.py
python
Update
(force=False)
return 0
Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|.
Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|.
[ "Requests", "an", "update", "of", "the", "toolchain", "to", "the", "specific", "hashes", "we", "have", "at", "this", "revision", ".", "The", "update", "outputs", "a", ".", "json", "of", "the", "various", "configuration", "information", "required", "to", "pas...
def Update(force=False): """Requests an update of the toolchain to the specific hashes we have at this revision. The update outputs a .json of the various configuration information required to pass to gyp which we use in |GetToolchainDir()|. """ if force != False and force != '--force': print >>sys.stderr, 'Unknown parameter "%s"' % force return 1 if force == '--force' or os.path.exists(json_data_file): force = True depot_tools_win_toolchain = \ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) if ((sys.platform in ('win32', 'cygwin') or force) and depot_tools_win_toolchain): import find_depot_tools depot_tools_path = find_depot_tools.add_depot_tools_to_path() # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit # in the correct directory. os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion() get_toolchain_args = [ sys.executable, os.path.join(depot_tools_path, 'win_toolchain', 'get_toolchain_if_necessary.py'), '--output-json', json_data_file, ] + _GetDesiredVsToolchainHashes() if force: get_toolchain_args.append('--force') subprocess.check_call(get_toolchain_args) return 0
[ "def", "Update", "(", "force", "=", "False", ")", ":", "if", "force", "!=", "False", "and", "force", "!=", "'--force'", ":", "print", ">>", "sys", ".", "stderr", ",", "'Unknown parameter \"%s\"'", "%", "force", "return", "1", "if", "force", "==", "'--for...
https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/v8/gypfiles/vs_toolchain.py#L294-L325
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
python
ExpandEnvVars
(string, expansions)
return string
Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the expansions list. If the variable expands to something that references another variable, this variable is expanded as well if it's in env -- until no variables present in env are left.
Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the expansions list. If the variable expands to something that references another variable, this variable is expanded as well if it's in env -- until no variables present in env are left.
[ "Expands", "$", "{", "VARIABLES", "}", "$", "(", "VARIABLES", ")", "and", "$VARIABLES", "in", "string", "per", "the", "expansions", "list", ".", "If", "the", "variable", "expands", "to", "something", "that", "references", "another", "variable", "this", "vari...
def ExpandEnvVars(string, expansions): """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the expansions list. If the variable expands to something that references another variable, this variable is expanded as well if it's in env -- until no variables present in env are left.""" for k, v in reversed(expansions): string = string.replace('${' + k + '}', v) string = string.replace('$(' + k + ')', v) string = string.replace('$' + k, v) return string
[ "def", "ExpandEnvVars", "(", "string", ",", "expansions", ")", ":", "for", "k", ",", "v", "in", "reversed", "(", "expansions", ")", ":", "string", "=", "string", ".", "replace", "(", "'${'", "+", "k", "+", "'}'", ",", "v", ")", "string", "=", "stri...
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L1566-L1575
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/platform.py
python
_syscmd_file
(target,default='')
Interface to the system's file command. The function uses the -b option of the file command to have it ommit the filename in its output and if possible the -L option to have the command follow symlinks. It returns default in case the command should fail.
Interface to the system's file command.
[ "Interface", "to", "the", "system", "s", "file", "command", "." ]
def _syscmd_file(target,default=''): """ Interface to the system's file command. The function uses the -b option of the file command to have it ommit the filename in its output and if possible the -L option to have the command follow symlinks. It returns default in case the command should fail. """ # We do the import here to avoid a bootstrap issue. # See c73b90b6dadd changeset. # # [..] # ranlib libpython2.7.a # gcc -o python \ # Modules/python.o \ # libpython2.7.a -lsocket -lnsl -ldl -lm # Traceback (most recent call last): # File "./setup.py", line 8, in <module> # from platform import machine as platform_machine # File "[..]/build/Lib/platform.py", line 116, in <module> # import sys,string,os,re,subprocess # File "[..]/build/Lib/subprocess.py", line 429, in <module> # import select # ImportError: No module named select import subprocess if sys.platform in ('dos','win32','win16','os2'): # XXX Others too ? return default target = _follow_symlinks(target) try: proc = subprocess.Popen(['file', target], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except (AttributeError,os.error): return default output = proc.communicate()[0] rc = proc.wait() if not output or rc: return default else: return output
[ "def", "_syscmd_file", "(", "target", ",", "default", "=", "''", ")", ":", "# We do the import here to avoid a bootstrap issue.", "# See c73b90b6dadd changeset.", "#", "# [..]", "# ranlib libpython2.7.a", "# gcc -o python \\", "# Modules/python.o \\", "# libpython2....
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/platform.py#L1018-L1063
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/applications/workbench/workbench/widgets/plotselector/view.py
python
PlotSelectorView.set_active_font
(self, plot_number, is_active)
Makes the active plot number bold, and makes a previously active bold plot number normal :param plot_number: The unique number in GlobalFigureManager :param is_active: True if plot is the active one or false to make the plot number not bold
Makes the active plot number bold, and makes a previously active bold plot number normal :param plot_number: The unique number in GlobalFigureManager :param is_active: True if plot is the active one or false to make the plot number not bold
[ "Makes", "the", "active", "plot", "number", "bold", "and", "makes", "a", "previously", "active", "bold", "plot", "number", "normal", ":", "param", "plot_number", ":", "The", "unique", "number", "in", "GlobalFigureManager", ":", "param", "is_active", ":", "True...
def set_active_font(self, plot_number, is_active): """ Makes the active plot number bold, and makes a previously active bold plot number normal :param plot_number: The unique number in GlobalFigureManager :param is_active: True if plot is the active one or false to make the plot number not bold """ with QMutexLocker(self.mutex): row, widget = self._get_row_and_widget_from_plot_number(plot_number) if row is None or widget is None: raise ValueError(f'Unable to find row and/or widget from plot_number {plot_number}') font = self.table_widget.item(row, Column.Number).font() font.setBold(is_active) self.table_widget.item(row, Column.Number).setFont(font) self.table_widget.cellWidget(row, Column.Name).line_edit.setFont(font)
[ "def", "set_active_font", "(", "self", ",", "plot_number", ",", "is_active", ")", ":", "with", "QMutexLocker", "(", "self", ".", "mutex", ")", ":", "row", ",", "widget", "=", "self", ".", "_get_row_and_widget_from_plot_number", "(", "plot_number", ")", "if", ...
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/applications/workbench/workbench/widgets/plotselector/view.py#L294-L310
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/bsddb/dbshelve.py
python
DBShelf.__getattr__
(self, name)
return getattr(self.db, name)
Many methods we can just pass through to the DB object. (See below)
Many methods we can just pass through to the DB object. (See below)
[ "Many", "methods", "we", "can", "just", "pass", "through", "to", "the", "DB", "object", ".", "(", "See", "below", ")" ]
def __getattr__(self, name): """Many methods we can just pass through to the DB object. (See below) """ return getattr(self.db, name)
[ "def", "__getattr__", "(", "self", ",", "name", ")", ":", "return", "getattr", "(", "self", ".", "db", ",", "name", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/bsddb/dbshelve.py#L136-L140
psi4/psi4
be533f7f426b6ccc263904e55122899b16663395
psi4/driver/p4util/solvers.py
python
SolverEngine.vector_axpy
(a, X, Y)
Compute scaled `vector` addition operation `a*X + Y` Parameters ---------- a : float The scale factor applied to `X` X : singlet `vector` The `vector` which will be scaled and added to `Y` Y : single `vector` The `vector` which the result of `a*X` is added to Returns ------- Y : single `vector` The solver assumes that Y is updated, and returned. So it is safe to avoid a copy of Y if possible
Compute scaled `vector` addition operation `a*X + Y`
[ "Compute", "scaled", "vector", "addition", "operation", "a", "*", "X", "+", "Y" ]
def vector_axpy(a, X, Y): """Compute scaled `vector` addition operation `a*X + Y` Parameters ---------- a : float The scale factor applied to `X` X : singlet `vector` The `vector` which will be scaled and added to `Y` Y : single `vector` The `vector` which the result of `a*X` is added to Returns ------- Y : single `vector` The solver assumes that Y is updated, and returned. So it is safe to avoid a copy of Y if possible """ pass
[ "def", "vector_axpy", "(", "a", ",", "X", ",", "Y", ")", ":", "pass" ]
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/p4util/solvers.py#L577-L595
bigartm/bigartm
47e37f982de87aa67bfd475ff1f39da696b181b3
3rdparty/protobuf-3.0.0/python/google/protobuf/internal/encoder.py
python
TagBytes
(field_number, wire_type)
return _VarintBytes(wire_format.PackTag(field_number, wire_type))
Encode the given tag and return the bytes. Only called at startup.
Encode the given tag and return the bytes. Only called at startup.
[ "Encode", "the", "given", "tag", "and", "return", "the", "bytes", ".", "Only", "called", "at", "startup", "." ]
def TagBytes(field_number, wire_type): """Encode the given tag and return the bytes. Only called at startup.""" return _VarintBytes(wire_format.PackTag(field_number, wire_type))
[ "def", "TagBytes", "(", "field_number", ",", "wire_type", ")", ":", "return", "_VarintBytes", "(", "wire_format", ".", "PackTag", "(", "field_number", ",", "wire_type", ")", ")" ]
https://github.com/bigartm/bigartm/blob/47e37f982de87aa67bfd475ff1f39da696b181b3/3rdparty/protobuf-3.0.0/python/google/protobuf/internal/encoder.py#L415-L418
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/stencil.py
python
stencil_dummy_lower
(context, builder, sig, args)
return lir.Constant(lir.IntType(types.intp.bitwidth), 0)
lowering for dummy stencil calls
lowering for dummy stencil calls
[ "lowering", "for", "dummy", "stencil", "calls" ]
def stencil_dummy_lower(context, builder, sig, args): "lowering for dummy stencil calls" return lir.Constant(lir.IntType(types.intp.bitwidth), 0)
[ "def", "stencil_dummy_lower", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "return", "lir", ".", "Constant", "(", "lir", ".", "IntType", "(", "types", ".", "intp", ".", "bitwidth", ")", ",", "0", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/stencil.py#L798-L800
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/email/mime/audio.py
python
MIMEAudio.__init__
(self, _audiodata, _subtype=None, _encoder=encoders.encode_base64, *, policy=None, **_params)
Create an audio/* type MIME document. _audiodata is a string containing the raw audio data. If this data can be decoded by the standard Python `sndhdr' module, then the subtype will be automatically included in the Content-Type header. Otherwise, you can specify the specific audio subtype via the _subtype parameter. If _subtype is not given, and no subtype can be guessed, a TypeError is raised. _encoder is a function which will perform the actual encoding for transport of the image data. It takes one argument, which is this Image instance. It should use get_payload() and set_payload() to change the payload to the encoded form. It should also add any Content-Transfer-Encoding or other headers to the message as necessary. The default encoding is Base64. Any additional keyword arguments are passed to the base class constructor, which turns them into parameters on the Content-Type header.
Create an audio/* type MIME document.
[ "Create", "an", "audio", "/", "*", "type", "MIME", "document", "." ]
def __init__(self, _audiodata, _subtype=None, _encoder=encoders.encode_base64, *, policy=None, **_params): """Create an audio/* type MIME document. _audiodata is a string containing the raw audio data. If this data can be decoded by the standard Python `sndhdr' module, then the subtype will be automatically included in the Content-Type header. Otherwise, you can specify the specific audio subtype via the _subtype parameter. If _subtype is not given, and no subtype can be guessed, a TypeError is raised. _encoder is a function which will perform the actual encoding for transport of the image data. It takes one argument, which is this Image instance. It should use get_payload() and set_payload() to change the payload to the encoded form. It should also add any Content-Transfer-Encoding or other headers to the message as necessary. The default encoding is Base64. Any additional keyword arguments are passed to the base class constructor, which turns them into parameters on the Content-Type header. """ if _subtype is None: _subtype = _whatsnd(_audiodata) if _subtype is None: raise TypeError('Could not find audio MIME subtype') MIMENonMultipart.__init__(self, 'audio', _subtype, policy=policy, **_params) self.set_payload(_audiodata) _encoder(self)
[ "def", "__init__", "(", "self", ",", "_audiodata", ",", "_subtype", "=", "None", ",", "_encoder", "=", "encoders", ".", "encode_base64", ",", "*", ",", "policy", "=", "None", ",", "*", "*", "_params", ")", ":", "if", "_subtype", "is", "None", ":", "_...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/email/mime/audio.py#L45-L74
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/telemetry/third_party/web-page-replay/third_party/dns/reversename.py
python
from_address
(text)
return dns.name.from_text('.'.join(parts), origin=origin)
Convert an IPv4 or IPv6 address in textual form into a Name object whose value is the reverse-map domain name of the address. @param text: an IPv4 or IPv6 address in textual form (e.g. '127.0.0.1', '::1') @type text: str @rtype: dns.name.Name object
Convert an IPv4 or IPv6 address in textual form into a Name object whose value is the reverse-map domain name of the address.
[ "Convert", "an", "IPv4", "or", "IPv6", "address", "in", "textual", "form", "into", "a", "Name", "object", "whose", "value", "is", "the", "reverse", "-", "map", "domain", "name", "of", "the", "address", "." ]
def from_address(text): """Convert an IPv4 or IPv6 address in textual form into a Name object whose value is the reverse-map domain name of the address. @param text: an IPv4 or IPv6 address in textual form (e.g. '127.0.0.1', '::1') @type text: str @rtype: dns.name.Name object """ try: parts = list(dns.ipv6.inet_aton(text).encode('hex_codec')) origin = ipv6_reverse_domain except: parts = ['%d' % ord(byte) for byte in dns.ipv4.inet_aton(text)] origin = ipv4_reverse_domain parts.reverse() return dns.name.from_text('.'.join(parts), origin=origin)
[ "def", "from_address", "(", "text", ")", ":", "try", ":", "parts", "=", "list", "(", "dns", ".", "ipv6", ".", "inet_aton", "(", "text", ")", ".", "encode", "(", "'hex_codec'", ")", ")", "origin", "=", "ipv6_reverse_domain", "except", ":", "parts", "=",...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/third_party/web-page-replay/third_party/dns/reversename.py#L31-L46
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/deps/v8/third_party/jinja2/lexer.py
python
get_lexer
(environment)
return lexer
Return a lexer which is probably cached.
Return a lexer which is probably cached.
[ "Return", "a", "lexer", "which", "is", "probably", "cached", "." ]
def get_lexer(environment): """Return a lexer which is probably cached.""" key = (environment.block_start_string, environment.block_end_string, environment.variable_start_string, environment.variable_end_string, environment.comment_start_string, environment.comment_end_string, environment.line_statement_prefix, environment.line_comment_prefix, environment.trim_blocks, environment.lstrip_blocks, environment.newline_sequence, environment.keep_trailing_newline) lexer = _lexer_cache.get(key) if lexer is None: lexer = Lexer(environment) _lexer_cache[key] = lexer return lexer
[ "def", "get_lexer", "(", "environment", ")", ":", "key", "=", "(", "environment", ".", "block_start_string", ",", "environment", ".", "block_end_string", ",", "environment", ".", "variable_start_string", ",", "environment", ".", "variable_end_string", ",", "environm...
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/deps/v8/third_party/jinja2/lexer.py#L391-L409
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/drafttaskpanels/task_circulararray.py
python
TaskPanelCircularArray.print_messages
(self)
Print messages about the operation.
Print messages about the operation.
[ "Print", "messages", "about", "the", "operation", "." ]
def print_messages(self): """Print messages about the operation.""" if len(self.selection) == 1: sel_obj = self.selection[0] else: # TODO: this should handle multiple objects. # For example, it could take the shapes of all objects, # make a compound and then use it as input for the array function. sel_obj = self.selection[0] _msg(translate("draft","Object:") + " {}".format(sel_obj.Label)) _msg(translate("draft","Radial distance:") + " {}".format(self.r_distance)) _msg(translate("draft","Tangential distance:") + " {}".format(self.tan_distance)) _msg(translate("draft","Number of circular layers:") + " {}".format(self.number)) _msg(translate("draft","Symmetry parameter:") + " {}".format(self.symmetry)) _msg(translate("draft","Center of rotation:") + " ({0}, {1}, {2})".format(self.center.x, self.center.y, self.center.z)) self.print_fuse_state(self.fuse) self.print_link_state(self.use_link)
[ "def", "print_messages", "(", "self", ")", ":", "if", "len", "(", "self", ".", "selection", ")", "==", "1", ":", "sel_obj", "=", "self", ".", "selection", "[", "0", "]", "else", ":", "# TODO: this should handle multiple objects.", "# For example, it could take t...
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/drafttaskpanels/task_circulararray.py#L373-L392
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/os.py
python
execvpe
(file, args, env)
execvpe(file, args, env) Execute the executable file (which is searched for along $PATH) with argument list args and environment env, replacing the current process. args may be a list or tuple of strings.
execvpe(file, args, env)
[ "execvpe", "(", "file", "args", "env", ")" ]
def execvpe(file, args, env): """execvpe(file, args, env) Execute the executable file (which is searched for along $PATH) with argument list args and environment env, replacing the current process. args may be a list or tuple of strings. """ _execvpe(file, args, env)
[ "def", "execvpe", "(", "file", ",", "args", ",", "env", ")", ":", "_execvpe", "(", "file", ",", "args", ",", "env", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/os.py#L576-L583
Bitcoin-ABC/bitcoin-abc
aff7e41f00bef9d52786c6cffb49faca5c84d32e
contrib/devtools/update-translations.py
python
find_format_specifiers
(s)
return specifiers
Find all format specifiers in a string.
Find all format specifiers in a string.
[ "Find", "all", "format", "specifiers", "in", "a", "string", "." ]
def find_format_specifiers(s): '''Find all format specifiers in a string.''' pos = 0 specifiers = [] while True: percent = s.find('%', pos) if percent < 0: break specifiers.append(s[percent + 1]) pos = percent + 2 return specifiers
[ "def", "find_format_specifiers", "(", "s", ")", ":", "pos", "=", "0", "specifiers", "=", "[", "]", "while", "True", ":", "percent", "=", "s", ".", "find", "(", "'%'", ",", "pos", ")", "if", "percent", "<", "0", ":", "break", "specifiers", ".", "app...
https://github.com/Bitcoin-ABC/bitcoin-abc/blob/aff7e41f00bef9d52786c6cffb49faca5c84d32e/contrib/devtools/update-translations.py#L50-L60
esa/pykep
b410363653623730b577de257c04b0e0289f2014
pykep/planet/__init__.py
python
_j2_ctor
(self, *args)
pykep.planet.j2(when,orbital_elements, mu_central_body, mu_self,radius, safe_radius, J2RG2 [, name = 'unknown']) pykep.planet.j2(when,r,v, mu_central_body, mu_self,radius, safe_radius, J2RG2 [, name = 'unknown']) - when: a :py:class:`pykep.epoch` indicating the orbital elements reference epoch - orbital_elements: a sequence of six containing a,e,i,W,w,M (SI units, i.e. meters and radiants) - r,v: position and velocity of an object at when (SI units) - mu_central_body: gravity parameter of the central body (SI units, i.e. m^2/s^3) - mu_self: gravity parameter of the planet (SI units, i.e. m^2/s^3) - radius: body radius (SI units, i.e. meters) - safe_radius: mimimual radius that is safe during a fly-by of the planet (SI units, i.e. m) - J2RG2: the product of J2 and the mean radius of the oblate primary squared (SI units, i.e. m^2) - name: body name Example:: deb1 = planet.j2(epoch(54000,"mjd"),(7000000, 1.67e-02, 78.23 * DEG2RAD, 175. * DEG2RAD, 287. * DEG2RAD, 257 * DEG2RAD), MU_EARTH, 1, 1, 1, EARTH_J2*EARTH_RADIUS**2, 'deb1')
pykep.planet.j2(when,orbital_elements, mu_central_body, mu_self,radius, safe_radius, J2RG2 [, name = 'unknown'])
[ "pykep", ".", "planet", ".", "j2", "(", "when", "orbital_elements", "mu_central_body", "mu_self", "radius", "safe_radius", "J2RG2", "[", "name", "=", "unknown", "]", ")" ]
def _j2_ctor(self, *args): """ pykep.planet.j2(when,orbital_elements, mu_central_body, mu_self,radius, safe_radius, J2RG2 [, name = 'unknown']) pykep.planet.j2(when,r,v, mu_central_body, mu_self,radius, safe_radius, J2RG2 [, name = 'unknown']) - when: a :py:class:`pykep.epoch` indicating the orbital elements reference epoch - orbital_elements: a sequence of six containing a,e,i,W,w,M (SI units, i.e. meters and radiants) - r,v: position and velocity of an object at when (SI units) - mu_central_body: gravity parameter of the central body (SI units, i.e. m^2/s^3) - mu_self: gravity parameter of the planet (SI units, i.e. m^2/s^3) - radius: body radius (SI units, i.e. meters) - safe_radius: mimimual radius that is safe during a fly-by of the planet (SI units, i.e. m) - J2RG2: the product of J2 and the mean radius of the oblate primary squared (SI units, i.e. m^2) - name: body name Example:: deb1 = planet.j2(epoch(54000,"mjd"),(7000000, 1.67e-02, 78.23 * DEG2RAD, 175. * DEG2RAD, 287. * DEG2RAD, 257 * DEG2RAD), MU_EARTH, 1, 1, 1, EARTH_J2*EARTH_RADIUS**2, 'deb1') """ self._orig_init(*args)
[ "def", "_j2_ctor", "(", "self", ",", "*", "args", ")", ":", "self", ".", "_orig_init", "(", "*", "args", ")" ]
https://github.com/esa/pykep/blob/b410363653623730b577de257c04b0e0289f2014/pykep/planet/__init__.py#L40-L60
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tix.py
python
Grid.info_exists
(self, x, y)
return self._getboolean(self.tk.call(self, 'info', 'exists', x, y))
Return True if display item exists at (x,y)
Return True if display item exists at (x,y)
[ "Return", "True", "if", "display", "item", "exists", "at", "(", "x", "y", ")" ]
def info_exists(self, x, y): "Return True if display item exists at (x,y)" return self._getboolean(self.tk.call(self, 'info', 'exists', x, y))
[ "def", "info_exists", "(", "self", ",", "x", ",", "y", ")", ":", "return", "self", ".", "_getboolean", "(", "self", ".", "tk", ".", "call", "(", "self", ",", "'info'", ",", "'exists'", ",", "x", ",", "y", ")", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tix.py#L1868-L1870
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/contrib/distributions/python/ops/gamma.py
python
Gamma.cdf
(self, x, name="cdf")
CDF of observations `x` under these Gamma distribution(s). Args: x: tensor of dtype `dtype`, must be broadcastable with `alpha` and `beta`. name: The name to give this op. Returns: cdf: tensor of dtype `dtype`, the CDFs of `x`.
CDF of observations `x` under these Gamma distribution(s).
[ "CDF", "of", "observations", "x", "under", "these", "Gamma", "distribution", "(", "s", ")", "." ]
def cdf(self, x, name="cdf"): """CDF of observations `x` under these Gamma distribution(s). Args: x: tensor of dtype `dtype`, must be broadcastable with `alpha` and `beta`. name: The name to give this op. Returns: cdf: tensor of dtype `dtype`, the CDFs of `x`. """ with ops.name_scope(self.name): with ops.op_scope([self._alpha, self._beta, x], name): return math_ops.igamma(self._alpha, self._beta * x)
[ "def", "cdf", "(", "self", ",", "x", ",", "name", "=", "\"cdf\"", ")", ":", "with", "ops", ".", "name_scope", "(", "self", ".", "name", ")", ":", "with", "ops", ".", "op_scope", "(", "[", "self", ".", "_alpha", ",", "self", ".", "_beta", ",", "...
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/gamma.py#L297-L309
smilehao/xlua-framework
a03801538be2b0e92d39332d445b22caca1ef61f
ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/mox.py
python
MockMethod.__call__
(self, *params, **named_params)
return expected_method._return_value
Log parameters and return the specified return value. If the Mock(Anything/Object) associated with this call is in record mode, this MockMethod will be pushed onto the expected call queue. If the mock is in replay mode, this will pop a MockMethod off the top of the queue and verify this call is equal to the expected call. Raises: UnexpectedMethodCall if this call is supposed to match an expected method call and it does not.
Log parameters and return the specified return value.
[ "Log", "parameters", "and", "return", "the", "specified", "return", "value", "." ]
def __call__(self, *params, **named_params): """Log parameters and return the specified return value. If the Mock(Anything/Object) associated with this call is in record mode, this MockMethod will be pushed onto the expected call queue. If the mock is in replay mode, this will pop a MockMethod off the top of the queue and verify this call is equal to the expected call. Raises: UnexpectedMethodCall if this call is supposed to match an expected method call and it does not. """ self._params = params self._named_params = named_params if not self._replay_mode: self._call_queue.append(self) return self expected_method = self._VerifyMethodCall() if expected_method._side_effects: expected_method._side_effects(*params, **named_params) if expected_method._exception: raise expected_method._exception return expected_method._return_value
[ "def", "__call__", "(", "self", ",", "*", "params", ",", "*", "*", "named_params", ")", ":", "self", ".", "_params", "=", "params", "self", ".", "_named_params", "=", "named_params", "if", "not", "self", ".", "_replay_mode", ":", "self", ".", "_call_queu...
https://github.com/smilehao/xlua-framework/blob/a03801538be2b0e92d39332d445b22caca1ef61f/ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/mox.py#L545-L573
devsisters/libquic
8954789a056d8e7d5fcb6452fd1572ca57eb5c4e
src/third_party/protobuf/python/google/protobuf/descriptor.py
python
_ToCamelCase
(name)
return ''.join(result)
Converts name to camel-case and returns it.
Converts name to camel-case and returns it.
[ "Converts", "name", "to", "camel", "-", "case", "and", "returns", "it", "." ]
def _ToCamelCase(name): """Converts name to camel-case and returns it.""" capitalize_next = False result = [] for c in name: if c == '_': if result: capitalize_next = True elif capitalize_next: result.append(c.upper()) capitalize_next = False else: result += c # Lower-case the first letter. if result and result[0].isupper(): result[0] = result[0].lower() return ''.join(result)
[ "def", "_ToCamelCase", "(", "name", ")", ":", "capitalize_next", "=", "False", "result", "=", "[", "]", "for", "c", "in", "name", ":", "if", "c", "==", "'_'", ":", "if", "result", ":", "capitalize_next", "=", "True", "elif", "capitalize_next", ":", "re...
https://github.com/devsisters/libquic/blob/8954789a056d8e7d5fcb6452fd1572ca57eb5c4e/src/third_party/protobuf/python/google/protobuf/descriptor.py#L854-L872
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemTextToSpeech/AWS/common-code/PollyCommon/tts.py
python
write_data_to_cache
(stream, key, file_name_template, from_cgp)
Write data from stream to object with the given key in ttscache bucket
Write data from stream to object with the given key in ttscache bucket
[ "Write", "data", "from", "stream", "to", "object", "with", "the", "given", "key", "in", "ttscache", "bucket" ]
def write_data_to_cache(stream, key, file_name_template, from_cgp): ''' Write data from stream to object with the given key in ttscache bucket ''' data = stream.read() tts_bucket = get_bucket(TTSCACHE) tts_object = tts_bucket.Object(file_name_template.format(key)) if not from_cgp and not cache_runtime_generated_files(): tts_object.put(Body=data, Tagging="letexpire=true") else: tts_object.put(Body=data)
[ "def", "write_data_to_cache", "(", "stream", ",", "key", ",", "file_name_template", ",", "from_cgp", ")", ":", "data", "=", "stream", ".", "read", "(", ")", "tts_bucket", "=", "get_bucket", "(", "TTSCACHE", ")", "tts_object", "=", "tts_bucket", ".", "Object"...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemTextToSpeech/AWS/common-code/PollyCommon/tts.py#L190-L200
WeitaoVan/L-GM-loss
598582f0631bac876b3eeb8d6c4cd1d780269e03
scripts/cpp_lint.py
python
_FunctionState.End
(self)
Stop analyzing function body.
Stop analyzing function body.
[ "Stop", "analyzing", "function", "body", "." ]
def End(self): """Stop analyzing function body.""" self.in_a_function = False
[ "def", "End", "(", "self", ")", ":", "self", ".", "in_a_function", "=", "False" ]
https://github.com/WeitaoVan/L-GM-loss/blob/598582f0631bac876b3eeb8d6c4cd1d780269e03/scripts/cpp_lint.py#L861-L863
numworks/epsilon
8952d2f8b1de1c3f064eec8ffcea804c5594ba4c
build/device/usb/util.py
python
build_request_type
(direction, type, recipient)
return recipient | type | direction
r"""Build a bmRequestType field for control requests. These is a conventional function to build a bmRequestType for a control request. The direction parameter can be CTRL_OUT or CTRL_IN. The type parameter can be CTRL_TYPE_STANDARD, CTRL_TYPE_CLASS, CTRL_TYPE_VENDOR or CTRL_TYPE_RESERVED values. The recipient can be CTRL_RECIPIENT_DEVICE, CTRL_RECIPIENT_INTERFACE, CTRL_RECIPIENT_ENDPOINT or CTRL_RECIPIENT_OTHER. Return the bmRequestType value.
r"""Build a bmRequestType field for control requests.
[ "r", "Build", "a", "bmRequestType", "field", "for", "control", "requests", "." ]
def build_request_type(direction, type, recipient): r"""Build a bmRequestType field for control requests. These is a conventional function to build a bmRequestType for a control request. The direction parameter can be CTRL_OUT or CTRL_IN. The type parameter can be CTRL_TYPE_STANDARD, CTRL_TYPE_CLASS, CTRL_TYPE_VENDOR or CTRL_TYPE_RESERVED values. The recipient can be CTRL_RECIPIENT_DEVICE, CTRL_RECIPIENT_INTERFACE, CTRL_RECIPIENT_ENDPOINT or CTRL_RECIPIENT_OTHER. Return the bmRequestType value. """ return recipient | type | direction
[ "def", "build_request_type", "(", "direction", ",", "type", ",", "recipient", ")", ":", "return", "recipient", "|", "type", "|", "direction" ]
https://github.com/numworks/epsilon/blob/8952d2f8b1de1c3f064eec8ffcea804c5594ba4c/build/device/usb/util.py#L139-L153
sunpinyin/sunpinyin
8a2c96e51ca7020398c26feab0af2afdfbbee8a6
wrapper/ibus/setup/main.py
python
MultiCheckDialog.get_setup_name
(self)
return '_'.join(['dlg', self.ui_name, 'setup'])
assuming the name of dialog looks like 'dlg_fuzzy_setup'
assuming the name of dialog looks like 'dlg_fuzzy_setup'
[ "assuming", "the", "name", "of", "dialog", "looks", "like", "dlg_fuzzy_setup" ]
def get_setup_name(self): """assuming the name of dialog looks like 'dlg_fuzzy_setup' """ return '_'.join(['dlg', self.ui_name, 'setup'])
[ "def", "get_setup_name", "(", "self", ")", ":", "return", "'_'", ".", "join", "(", "[", "'dlg'", ",", "self", ".", "ui_name", ",", "'setup'", "]", ")" ]
https://github.com/sunpinyin/sunpinyin/blob/8a2c96e51ca7020398c26feab0af2afdfbbee8a6/wrapper/ibus/setup/main.py#L293-L296
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py
python
uCSIsSupplementalArrowsB
(code)
return ret
Check whether the character is part of SupplementalArrows-B UCS Block
Check whether the character is part of SupplementalArrows-B UCS Block
[ "Check", "whether", "the", "character", "is", "part", "of", "SupplementalArrows", "-", "B", "UCS", "Block" ]
def uCSIsSupplementalArrowsB(code): """Check whether the character is part of SupplementalArrows-B UCS Block """ ret = libxml2mod.xmlUCSIsSupplementalArrowsB(code) return ret
[ "def", "uCSIsSupplementalArrowsB", "(", "code", ")", ":", "ret", "=", "libxml2mod", ".", "xmlUCSIsSupplementalArrowsB", "(", "code", ")", "return", "ret" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L2878-L2882
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/__init__.py
python
YView.yview
(self, *args)
Query and change the vertical position of the view.
Query and change the vertical position of the view.
[ "Query", "and", "change", "the", "vertical", "position", "of", "the", "view", "." ]
def yview(self, *args): """Query and change the vertical position of the view.""" res = self.tk.call(self._w, 'yview', *args) if not args: return self._getdoubles(res)
[ "def", "yview", "(", "self", ",", "*", "args", ")", ":", "res", "=", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'yview'", ",", "*", "args", ")", "if", "not", "args", ":", "return", "self", ".", "_getdoubles", "(", "res", ")" ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/__init__.py#L1737-L1741
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/setuptools/depends.py
python
Require.full_name
(self)
return self.name
Return full package/distribution name, w/version
Return full package/distribution name, w/version
[ "Return", "full", "package", "/", "distribution", "name", "w", "/", "version" ]
def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: return '%s-%s' % (self.name, self.requested_version) return self.name
[ "def", "full_name", "(", "self", ")", ":", "if", "self", ".", "requested_version", "is", "not", "None", ":", "return", "'%s-%s'", "%", "(", "self", ".", "name", ",", "self", ".", "requested_version", ")", "return", "self", ".", "name" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/setuptools/depends.py#L32-L36
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/integrate/_ivp/common.py
python
warn_extraneous
(extraneous)
Display a warning for extraneous keyword arguments. The initializer of each solver class is expected to collect keyword arguments that it doesn't understand and warn about them. This function prints a warning for each key in the supplied dictionary. Parameters ---------- extraneous : dict Extraneous keyword arguments
Display a warning for extraneous keyword arguments.
[ "Display", "a", "warning", "for", "extraneous", "keyword", "arguments", "." ]
def warn_extraneous(extraneous): """Display a warning for extraneous keyword arguments. The initializer of each solver class is expected to collect keyword arguments that it doesn't understand and warn about them. This function prints a warning for each key in the supplied dictionary. Parameters ---------- extraneous : dict Extraneous keyword arguments """ if extraneous: warn("The following arguments have no effect for a chosen solver: {}." .format(", ".join("`{}`".format(x) for x in extraneous)))
[ "def", "warn_extraneous", "(", "extraneous", ")", ":", "if", "extraneous", ":", "warn", "(", "\"The following arguments have no effect for a chosen solver: {}.\"", ".", "format", "(", "\", \"", ".", "join", "(", "\"`{}`\"", ".", "format", "(", "x", ")", "for", "x"...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/integrate/_ivp/common.py#L27-L41
networkit/networkit
695b7a786a894a303fa8587597d5ef916e797729
benchmark/Benchmark.py
python
averageRuns
(df, groupby=["graph"])
return df
Average running time, modularity, edges per second and number of clusters over multiple runs
Average running time, modularity, edges per second and number of clusters over multiple runs
[ "Average", "running", "time", "modularity", "edges", "per", "second", "and", "number", "of", "clusters", "over", "multiple", "runs" ]
def averageRuns(df, groupby=["graph"]): """ Average running time, modularity, edges per second and number of clusters over multiple runs""" df = df.groupby(groupby, as_index=False).mean() df = df.sort("m", ascending=True) # sort by graph size return df
[ "def", "averageRuns", "(", "df", ",", "groupby", "=", "[", "\"graph\"", "]", ")", ":", "df", "=", "df", ".", "groupby", "(", "groupby", ",", "as_index", "=", "False", ")", ".", "mean", "(", ")", "df", "=", "df", ".", "sort", "(", "\"m\"", ",", ...
https://github.com/networkit/networkit/blob/695b7a786a894a303fa8587597d5ef916e797729/benchmark/Benchmark.py#L71-L75
UDST/pandana
3e3d35ca2d57428714b89ed8fc7020bc55067e1d
pandana/network.py
python
Network.shortest_path
(self, node_a, node_b, imp_name=None)
return self.node_ids.values[path]
Return the shortest path between two node IDs in the network. Must provide an impedance name if more than one is available. Parameters ---------- node_a : int Source node ID node_b : int Destination node ID imp_name : string, optional The impedance name to use for the shortest path Returns ------- path : np.ndarray Nodes that are traversed in the shortest path
Return the shortest path between two node IDs in the network. Must provide an impedance name if more than one is available.
[ "Return", "the", "shortest", "path", "between", "two", "node", "IDs", "in", "the", "network", ".", "Must", "provide", "an", "impedance", "name", "if", "more", "than", "one", "is", "available", "." ]
def shortest_path(self, node_a, node_b, imp_name=None): """ Return the shortest path between two node IDs in the network. Must provide an impedance name if more than one is available. Parameters ---------- node_a : int Source node ID node_b : int Destination node ID imp_name : string, optional The impedance name to use for the shortest path Returns ------- path : np.ndarray Nodes that are traversed in the shortest path """ # map to internal node indexes node_idx = self._node_indexes(pd.Series([node_a, node_b])) node_a = node_idx.iloc[0] node_b = node_idx.iloc[1] imp_num = self._imp_name_to_num(imp_name) path = self.net.shortest_path(node_a, node_b, imp_num) # map back to external node IDs return self.node_ids.values[path]
[ "def", "shortest_path", "(", "self", ",", "node_a", ",", "node_b", ",", "imp_name", "=", "None", ")", ":", "# map to internal node indexes", "node_idx", "=", "self", ".", "_node_indexes", "(", "pd", ".", "Series", "(", "[", "node_a", ",", "node_b", "]", ")...
https://github.com/UDST/pandana/blob/3e3d35ca2d57428714b89ed8fc7020bc55067e1d/pandana/network.py#L170-L200
qt/qt
0a2f2382541424726168804be2c90b91381608c6
src/3rdparty/freetype/src/tools/docmaker/sources.py
python
SourceProcessor.process_normal_line
( self, line )
process a normal line and check whether it is the start of a new block
process a normal line and check whether it is the start of a new block
[ "process", "a", "normal", "line", "and", "check", "whether", "it", "is", "the", "start", "of", "a", "new", "block" ]
def process_normal_line( self, line ): """process a normal line and check whether it is the start of a new block""" for f in re_source_block_formats: if f.start.match( line ): self.add_block_lines() self.format = f self.lineno = fileinput.filelineno() self.lines.append( line )
[ "def", "process_normal_line", "(", "self", ",", "line", ")", ":", "for", "f", "in", "re_source_block_formats", ":", "if", "f", ".", "start", ".", "match", "(", "line", ")", ":", "self", ".", "add_block_lines", "(", ")", "self", ".", "format", "=", "f",...
https://github.com/qt/qt/blob/0a2f2382541424726168804be2c90b91381608c6/src/3rdparty/freetype/src/tools/docmaker/sources.py#L322-L330
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_core.py
python
Rect2D.MoveTopTo
(*args, **kwargs)
return _core_.Rect2D_MoveTopTo(*args, **kwargs)
MoveTopTo(self, Double n)
MoveTopTo(self, Double n)
[ "MoveTopTo", "(", "self", "Double", "n", ")" ]
def MoveTopTo(*args, **kwargs): """MoveTopTo(self, Double n)""" return _core_.Rect2D_MoveTopTo(*args, **kwargs)
[ "def", "MoveTopTo", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Rect2D_MoveTopTo", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L1875-L1877
husixu1/HUST-Homeworks
fbf6ed749eacab6e14bffea83703aadaf9324828
DataMining/src/randomCF.py
python
recommend
(table)
return result
recommend items for user :param items: the items this user already scored :param table: original data table (training set) :param k: k nearest neighbor :return: estimated items and score, in a dictionary of item: estimate-score
recommend items for user :param items: the items this user already scored :param table: original data table (training set) :param k: k nearest neighbor :return: estimated items and score, in a dictionary of item: estimate-score
[ "recommend", "items", "for", "user", ":", "param", "items", ":", "the", "items", "this", "user", "already", "scored", ":", "param", "table", ":", "original", "data", "table", "(", "training", "set", ")", ":", "param", "k", ":", "k", "nearest", "neighbor"...
def recommend(table): """recommend items for user :param items: the items this user already scored :param table: original data table (training set) :param k: k nearest neighbor :return: estimated items and score, in a dictionary of item: estimate-score """ result = {} for i in range(table.shape[1]): result[i] = random.randint(-10,10) return result
[ "def", "recommend", "(", "table", ")", ":", "result", "=", "{", "}", "for", "i", "in", "range", "(", "table", ".", "shape", "[", "1", "]", ")", ":", "result", "[", "i", "]", "=", "random", ".", "randint", "(", "-", "10", ",", "10", ")", "retu...
https://github.com/husixu1/HUST-Homeworks/blob/fbf6ed749eacab6e14bffea83703aadaf9324828/DataMining/src/randomCF.py#L46-L56
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/tools/gyp/pylib/gyp/generator/analyzer.py
python
TargetCalculator.find_matching_test_target_names
(self)
return matching_target_names
Returns the set of output test targets.
Returns the set of output test targets.
[ "Returns", "the", "set", "of", "output", "test", "targets", "." ]
def find_matching_test_target_names(self): """Returns the set of output test targets.""" assert self.is_build_impacted() # Find the test targets first. 'all' is special cased to mean all the # root targets. To deal with all the supplied |test_targets| are expanded # to include the root targets during lookup. If any of the root targets # match, we remove it and replace it with 'all'. test_target_names_no_all = set(self._test_target_names) test_target_names_no_all.discard('all') test_targets_no_all = _LookupTargets(test_target_names_no_all, self._unqualified_mapping) test_target_names_contains_all = 'all' in self._test_target_names if test_target_names_contains_all: test_targets = [x for x in (set(test_targets_no_all) | set(self._root_targets))] else: test_targets = [x for x in test_targets_no_all] print('supplied test_targets') for target_name in self._test_target_names: print('\t', target_name) print('found test_targets') for target in test_targets: print('\t', target.name) print('searching for matching test targets') matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) matching_test_targets_contains_all = (test_target_names_contains_all and set(matching_test_targets) & set(self._root_targets)) if matching_test_targets_contains_all: # Remove any of the targets for all that were not explicitly supplied, # 'all' is subsequentely added to the matching names below. matching_test_targets = [x for x in (set(matching_test_targets) & set(test_targets_no_all))] print('matched test_targets') for target in matching_test_targets: print('\t', target.name) matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] for target in matching_test_targets] if matching_test_targets_contains_all: matching_target_names.append('all') print('\tall') return matching_target_names
[ "def", "find_matching_test_target_names", "(", "self", ")", ":", "assert", "self", ".", "is_build_impacted", "(", ")", "# Find the test targets first. 'all' is special cased to mean all the", "# root targets. To deal with all the supplied |test_targets| are expanded", "# to include the r...
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/generator/analyzer.py#L626-L667
apache/impala
8ddac48f3428c86f2cbd037ced89cfb903298b12
shell/pkg_resources.py
python
IMetadataProvider.metadata_listdir
(name)
List of metadata names in the directory (like ``os.listdir()``)
List of metadata names in the directory (like ``os.listdir()``)
[ "List", "of", "metadata", "names", "in", "the", "directory", "(", "like", "os", ".", "listdir", "()", ")" ]
def metadata_listdir(name): """List of metadata names in the directory (like ``os.listdir()``)"""
[ "def", "metadata_listdir", "(", "name", ")", ":" ]
https://github.com/apache/impala/blob/8ddac48f3428c86f2cbd037ced89cfb903298b12/shell/pkg_resources.py#L340-L341
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/feature_column/feature_column.py
python
_add_to_collections
(var, weight_collections)
Adds a var to the list of weight_collections provided. Handles the case for partitioned and non-partitioned variables. Args: var: A variable or Partitioned Variable. weight_collections: List of collections to add variable to.
Adds a var to the list of weight_collections provided.
[ "Adds", "a", "var", "to", "the", "list", "of", "weight_collections", "provided", "." ]
def _add_to_collections(var, weight_collections): """Adds a var to the list of weight_collections provided. Handles the case for partitioned and non-partitioned variables. Args: var: A variable or Partitioned Variable. weight_collections: List of collections to add variable to. """ for weight_collection in weight_collections: # The layer self.add_variable call already adds it to GLOBAL_VARIABLES. if weight_collection == ops.GraphKeys.GLOBAL_VARIABLES: continue # TODO(rohanj): Explore adding a _get_variable_list method on `Variable` # so that we don't have to do this check. if isinstance(var, variables.PartitionedVariable): for constituent_var in list(var): ops.add_to_collection(weight_collection, constituent_var) else: ops.add_to_collection(weight_collection, var)
[ "def", "_add_to_collections", "(", "var", ",", "weight_collections", ")", ":", "for", "weight_collection", "in", "weight_collections", ":", "# The layer self.add_variable call already adds it to GLOBAL_VARIABLES.", "if", "weight_collection", "==", "ops", ".", "GraphKeys", "."...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/feature_column/feature_column.py#L507-L526
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/summary/event_accumulator.py
python
EventAccumulator._ProcessAudio
(self, tag, wall_time, step, audio)
Processes a audio by adding it to accumulated state.
Processes a audio by adding it to accumulated state.
[ "Processes", "a", "audio", "by", "adding", "it", "to", "accumulated", "state", "." ]
def _ProcessAudio(self, tag, wall_time, step, audio): """Processes a audio by adding it to accumulated state.""" event = AudioEvent(wall_time=wall_time, step=step, encoded_audio_string=audio.encoded_audio_string, content_type=audio.content_type, sample_rate=audio.sample_rate, length_frames=audio.length_frames) self._audio.AddItem(tag, event)
[ "def", "_ProcessAudio", "(", "self", ",", "tag", ",", "wall_time", ",", "step", ",", "audio", ")", ":", "event", "=", "AudioEvent", "(", "wall_time", "=", "wall_time", ",", "step", "=", "step", ",", "encoded_audio_string", "=", "audio", ".", "encoded_audio...
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/summary/event_accumulator.py#L555-L563
raymondlu/super-animation-samples
04234269112ff0dc32447f27a761dbbb00b8ba17
samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py
python
Cursor.get_children_array
(self)
return children
Return an iterator for accessing the children of this cursor.
Return an iterator for accessing the children of this cursor.
[ "Return", "an", "iterator", "for", "accessing", "the", "children", "of", "this", "cursor", "." ]
def get_children_array(self): """Return an iterator for accessing the children of this cursor.""" # FIXME: Expose iteration from CIndex, PR6125. def visitor(child, parent, children): # FIXME: Document this assertion in API. # FIXME: There should just be an isNull method. assert child != conf.lib.clang_getNullCursor() # Create reference to TU so it isn't GC'd before Cursor. child._tu = self._tu children.append(child) return 1 # continue children = [] conf.lib.clang_visitChildren(self, callbacks['cursor_visit'](visitor), children) return children
[ "def", "get_children_array", "(", "self", ")", ":", "# FIXME: Expose iteration from CIndex, PR6125.", "def", "visitor", "(", "child", ",", "parent", ",", "children", ")", ":", "# FIXME: Document this assertion in API.", "# FIXME: There should just be an isNull method.", "assert...
https://github.com/raymondlu/super-animation-samples/blob/04234269112ff0dc32447f27a761dbbb00b8ba17/samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py#L1457-L1473
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/nn_grad.py
python
_SoftmaxGrad
(op, grad_softmax)
return (grad_softmax - sum_channels) * softmax
The derivative of the softmax nonlinearity. We assume that probs is of shape [batch_size * dim] The formula for dsoftmax / dx = (diag(softmax) - softmax * softmax'). This matrix is diagonal minus a rank one matrix, so it is easy to implement as follows: grad_x = grad_softmax * softmax - sum(grad_softmax * softmax) * softmax Args: op: the Softmax op. grad_softmax: the tensor representing the gradient w.r.t. the softmax output. Returns: gradient w.r.t the input to the softmax
The derivative of the softmax nonlinearity.
[ "The", "derivative", "of", "the", "softmax", "nonlinearity", "." ]
def _SoftmaxGrad(op, grad_softmax): """The derivative of the softmax nonlinearity. We assume that probs is of shape [batch_size * dim] The formula for dsoftmax / dx = (diag(softmax) - softmax * softmax'). This matrix is diagonal minus a rank one matrix, so it is easy to implement as follows: grad_x = grad_softmax * softmax - sum(grad_softmax * softmax) * softmax Args: op: the Softmax op. grad_softmax: the tensor representing the gradient w.r.t. the softmax output. Returns: gradient w.r.t the input to the softmax """ softmax = op.outputs[0] sum_channels = math_ops.reduce_sum(grad_softmax * softmax, -1, keepdims=True) return (grad_softmax - sum_channels) * softmax
[ "def", "_SoftmaxGrad", "(", "op", ",", "grad_softmax", ")", ":", "softmax", "=", "op", ".", "outputs", "[", "0", "]", "sum_channels", "=", "math_ops", ".", "reduce_sum", "(", "grad_softmax", "*", "softmax", ",", "-", "1", ",", "keepdims", "=", "True", ...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/nn_grad.py#L283-L304
redpony/cdec
f7c4899b174d86bc70b40b1cae68dcad364615cb
python/cdec/configobj.py
python
Section.merge
(self, indict)
A recursive update - useful for merging config files. >>> a = '''[section1] ... option1 = True ... [[subsection]] ... more_options = False ... # end of file'''.splitlines() >>> b = '''# File is user.ini ... [section1] ... option1 = False ... # end of file'''.splitlines() >>> c1 = ConfigObj(b) >>> c2 = ConfigObj(a) >>> c2.merge(c1) >>> c2 ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
A recursive update - useful for merging config files. >>> a = '''[section1] ... option1 = True ... [[subsection]] ... more_options = False ... # end of file'''.splitlines() >>> b = '''# File is user.ini ... [section1] ... option1 = False ... # end of file'''.splitlines() >>> c1 = ConfigObj(b) >>> c2 = ConfigObj(a) >>> c2.merge(c1) >>> c2 ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
[ "A", "recursive", "update", "-", "useful", "for", "merging", "config", "files", ".", ">>>", "a", "=", "[", "section1", "]", "...", "option1", "=", "True", "...", "[[", "subsection", "]]", "...", "more_options", "=", "False", "...", "#", "end", "of", "f...
def merge(self, indict): """ A recursive update - useful for merging config files. >>> a = '''[section1] ... option1 = True ... [[subsection]] ... more_options = False ... # end of file'''.splitlines() >>> b = '''# File is user.ini ... [section1] ... option1 = False ... # end of file'''.splitlines() >>> c1 = ConfigObj(b) >>> c2 = ConfigObj(a) >>> c2.merge(c1) >>> c2 ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}}) """ for key, val in indict.items(): if (key in self and isinstance(self[key], dict) and isinstance(val, dict)): self[key].merge(val) else: self[key] = val
[ "def", "merge", "(", "self", ",", "indict", ")", ":", "for", "key", ",", "val", "in", "indict", ".", "items", "(", ")", ":", "if", "(", "key", "in", "self", "and", "isinstance", "(", "self", "[", "key", "]", ",", "dict", ")", "and", "isinstance",...
https://github.com/redpony/cdec/blob/f7c4899b174d86bc70b40b1cae68dcad364615cb/python/cdec/configobj.py#L798-L822
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/python2_version/klampt/src/doxy2swig.py
python
Doxy2SWIG.parse_Element
(self, node)
Parse an `ELEMENT_NODE`. This calls specific `do_<tagName>` handers for different elements. If no handler is available the `subnode_parse` method is called. All tagNames specified in `self.ignores` are simply ignored.
Parse an `ELEMENT_NODE`. This calls specific `do_<tagName>` handers for different elements. If no handler is available the `subnode_parse` method is called. All tagNames specified in `self.ignores` are simply ignored.
[ "Parse", "an", "ELEMENT_NODE", ".", "This", "calls", "specific", "do_<tagName", ">", "handers", "for", "different", "elements", ".", "If", "no", "handler", "is", "available", "the", "subnode_parse", "method", "is", "called", ".", "All", "tagNames", "specified", ...
def parse_Element(self, node): """Parse an `ELEMENT_NODE`. This calls specific `do_<tagName>` handers for different elements. If no handler is available the `subnode_parse` method is called. All tagNames specified in `self.ignores` are simply ignored. """ name = node.tagName ignores = self.ignores if name in ignores: return attr = "do_%s" % name if hasattr(self, attr): handlerMethod = getattr(self, attr) handlerMethod(node) else: self.subnode_parse(node)
[ "def", "parse_Element", "(", "self", ",", "node", ")", ":", "name", "=", "node", ".", "tagName", "ignores", "=", "self", ".", "ignores", "if", "name", "in", "ignores", ":", "return", "attr", "=", "\"do_%s\"", "%", "name", "if", "hasattr", "(", "self", ...
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/src/doxy2swig.py#L202-L218
DGA-MI-SSI/YaCo
9b85e6ca1809114c4df1382c11255f7e38408912
deps/flatbuffers-1.8.0/python/flatbuffers/builder.py
python
Builder.CreateString
(self, s, encoding='utf-8', errors='strict')
return self.EndVector(len(x))
CreateString writes a null-terminated byte string as a vector.
CreateString writes a null-terminated byte string as a vector.
[ "CreateString", "writes", "a", "null", "-", "terminated", "byte", "string", "as", "a", "vector", "." ]
def CreateString(self, s, encoding='utf-8', errors='strict'): """CreateString writes a null-terminated byte string as a vector.""" self.assertNotNested() ## @cond FLATBUFFERS_INTERNAL self.nested = True ## @endcond if isinstance(s, compat.string_types): x = s.encode(encoding, errors) elif isinstance(s, compat.binary_types): x = s else: raise TypeError("non-string passed to CreateString") self.Prep(N.UOffsetTFlags.bytewidth, (len(x)+1)*N.Uint8Flags.bytewidth) self.Place(0, N.Uint8Flags) l = UOffsetTFlags.py_type(len(s)) ## @cond FLATBUFFERS_INTERNAL self.head = UOffsetTFlags.py_type(self.Head() - l) ## @endcond self.Bytes[self.Head():self.Head()+l] = x return self.EndVector(len(x))
[ "def", "CreateString", "(", "self", ",", "s", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'strict'", ")", ":", "self", ".", "assertNotNested", "(", ")", "## @cond FLATBUFFERS_INTERNAL", "self", ".", "nested", "=", "True", "## @endcond", "if", "isin...
https://github.com/DGA-MI-SSI/YaCo/blob/9b85e6ca1809114c4df1382c11255f7e38408912/deps/flatbuffers-1.8.0/python/flatbuffers/builder.py#L398-L422
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
build/android/pylib/android_commands.py
python
GetAttachedDevices
(hardware=True, emulator=True, offline=False)
return devices
Returns a list of attached, android devices and emulators. If a preferred device has been set with ANDROID_SERIAL, it will be first in the returned list. The arguments specify what devices to include in the list. Example output: * daemon not running. starting it now on port 5037 * * daemon started successfully * List of devices attached 027c10494100b4d7 device emulator-5554 offline Args: hardware: Include attached actual devices that are online. emulator: Include emulators (i.e. AVD's) currently on host. offline: Include devices and emulators that are offline. Returns: List of devices.
Returns a list of attached, android devices and emulators.
[ "Returns", "a", "list", "of", "attached", "android", "devices", "and", "emulators", "." ]
def GetAttachedDevices(hardware=True, emulator=True, offline=False): """Returns a list of attached, android devices and emulators. If a preferred device has been set with ANDROID_SERIAL, it will be first in the returned list. The arguments specify what devices to include in the list. Example output: * daemon not running. starting it now on port 5037 * * daemon started successfully * List of devices attached 027c10494100b4d7 device emulator-5554 offline Args: hardware: Include attached actual devices that are online. emulator: Include emulators (i.e. AVD's) currently on host. offline: Include devices and emulators that are offline. Returns: List of devices. """ adb_devices_output = cmd_helper.GetCmdOutput([constants.GetAdbPath(), 'devices']) re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE) online_devices = re_device.findall(adb_devices_output) re_device = re.compile('^(emulator-[0-9]+)\tdevice', re.MULTILINE) emulator_devices = re_device.findall(adb_devices_output) re_device = re.compile('^([a-zA-Z0-9_:.-]+)\toffline$', re.MULTILINE) offline_devices = re_device.findall(adb_devices_output) devices = [] # First determine list of online devices (e.g. hardware and/or emulator). if hardware and emulator: devices = online_devices elif hardware: devices = [device for device in online_devices if device not in emulator_devices] elif emulator: devices = emulator_devices # Now add offline devices if offline is true if offline: devices = devices + offline_devices preferred_device = os.environ.get('ANDROID_SERIAL') if preferred_device in devices: devices.remove(preferred_device) devices.insert(0, preferred_device) return devices
[ "def", "GetAttachedDevices", "(", "hardware", "=", "True", ",", "emulator", "=", "True", ",", "offline", "=", "False", ")", ":", "adb_devices_output", "=", "cmd_helper", ".", "GetCmdOutput", "(", "[", "constants", ".", "GetAdbPath", "(", ")", ",", "'devices'...
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/build/android/pylib/android_commands.py#L80-L131
apple/swift-clang
d7403439fc6641751840b723e7165fb02f52db95
bindings/python/clang/cindex.py
python
Diagnostic.format
(self, options=None)
return conf.lib.clang_formatDiagnostic(self, options)
Format this diagnostic for display. The options argument takes Diagnostic.Display* flags, which can be combined using bitwise OR. If the options argument is not provided, the default display options will be used.
Format this diagnostic for display. The options argument takes Diagnostic.Display* flags, which can be combined using bitwise OR. If the options argument is not provided, the default display options will be used.
[ "Format", "this", "diagnostic", "for", "display", ".", "The", "options", "argument", "takes", "Diagnostic", ".", "Display", "*", "flags", "which", "can", "be", "combined", "using", "bitwise", "OR", ".", "If", "the", "options", "argument", "is", "not", "provi...
def format(self, options=None): """ Format this diagnostic for display. The options argument takes Diagnostic.Display* flags, which can be combined using bitwise OR. If the options argument is not provided, the default display options will be used. """ if options is None: options = conf.lib.clang_defaultDiagnosticDisplayOptions() if options & ~Diagnostic._FormatOptionsMask: raise ValueError('Invalid format options') return conf.lib.clang_formatDiagnostic(self, options)
[ "def", "format", "(", "self", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "conf", ".", "lib", ".", "clang_defaultDiagnosticDisplayOptions", "(", ")", "if", "options", "&", "~", "Diagnostic", ".", "_FormatOptio...
https://github.com/apple/swift-clang/blob/d7403439fc6641751840b723e7165fb02f52db95/bindings/python/clang/cindex.py#L481-L492
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/http/cookiejar.py
python
CookiePolicy.domain_return_ok
(self, domain, request)
return True
Return false if cookies should not be returned, given cookie domain.
Return false if cookies should not be returned, given cookie domain.
[ "Return", "false", "if", "cookies", "should", "not", "be", "returned", "given", "cookie", "domain", "." ]
def domain_return_ok(self, domain, request): """Return false if cookies should not be returned, given cookie domain. """ return True
[ "def", "domain_return_ok", "(", "self", ",", "domain", ",", "request", ")", ":", "return", "True" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/http/cookiejar.py#L856-L859
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_controls.py
python
TreeCtrl.GetCount
(*args, **kwargs)
return _controls_.TreeCtrl_GetCount(*args, **kwargs)
GetCount(self) -> unsigned int
GetCount(self) -> unsigned int
[ "GetCount", "(", "self", ")", "-", ">", "unsigned", "int" ]
def GetCount(*args, **kwargs): """GetCount(self) -> unsigned int""" return _controls_.TreeCtrl_GetCount(*args, **kwargs)
[ "def", "GetCount", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "TreeCtrl_GetCount", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L5209-L5211
TheLegendAli/DeepLab-Context
fb04e9e2fc2682490ad9f60533b9d6c4c0e0479c
scripts/cpp_lint.py
python
_SetCountingStyle
(level)
Sets the module's counting options.
Sets the module's counting options.
[ "Sets", "the", "module", "s", "counting", "options", "." ]
def _SetCountingStyle(level): """Sets the module's counting options.""" _cpplint_state.SetCountingStyle(level)
[ "def", "_SetCountingStyle", "(", "level", ")", ":", "_cpplint_state", ".", "SetCountingStyle", "(", "level", ")" ]
https://github.com/TheLegendAli/DeepLab-Context/blob/fb04e9e2fc2682490ad9f60533b9d6c4c0e0479c/scripts/cpp_lint.py#L787-L789
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/smtplib.py
python
SMTP.mail
(self, sender, options=())
return self.getreply()
SMTP 'mail' command -- begins mail xfer session. This method may raise the following exceptions: SMTPNotSupportedError The options parameter includes 'SMTPUTF8' but the SMTPUTF8 extension is not supported by the server.
SMTP 'mail' command -- begins mail xfer session.
[ "SMTP", "mail", "command", "--", "begins", "mail", "xfer", "session", "." ]
def mail(self, sender, options=()): """SMTP 'mail' command -- begins mail xfer session. This method may raise the following exceptions: SMTPNotSupportedError The options parameter includes 'SMTPUTF8' but the SMTPUTF8 extension is not supported by the server. """ optionlist = '' if options and self.does_esmtp: if any(x.lower()=='smtputf8' for x in options): if self.has_extn('smtputf8'): self.command_encoding = 'utf-8' else: raise SMTPNotSupportedError( 'SMTPUTF8 not supported by server') optionlist = ' ' + ' '.join(options) self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender), optionlist)) return self.getreply()
[ "def", "mail", "(", "self", ",", "sender", ",", "options", "=", "(", ")", ")", ":", "optionlist", "=", "''", "if", "options", "and", "self", ".", "does_esmtp", ":", "if", "any", "(", "x", ".", "lower", "(", ")", "==", "'smtputf8'", "for", "x", "i...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/smtplib.py#L516-L535
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
llvm/utils/benchmark/tools/strip_asm.py
python
process_asm
(asm)
return new_contents
Strip the ASM of unwanted directives and lines
Strip the ASM of unwanted directives and lines
[ "Strip", "the", "ASM", "of", "unwanted", "directives", "and", "lines" ]
def process_asm(asm): """ Strip the ASM of unwanted directives and lines """ new_contents = '' asm = transform_labels(asm) # TODO: Add more things we want to remove discard_regexes = [ re.compile("\s+\..*$"), # directive re.compile("\s*#(NO_APP|APP)$"), #inline ASM re.compile("\s*#.*$"), # comment line re.compile("\s*\.globa?l\s*([.a-zA-Z_][a-zA-Z0-9$_.]*)"), #global directive re.compile("\s*\.(string|asciz|ascii|[1248]?byte|short|word|long|quad|value|zero)"), ] keep_regexes = [ ] fn_label_def = re.compile("^[a-zA-Z_][a-zA-Z0-9_.]*:") for l in asm.splitlines(): # Remove Mach-O attribute l = l.replace('@GOTPCREL', '') add_line = True for reg in discard_regexes: if reg.match(l) is not None: add_line = False break for reg in keep_regexes: if reg.match(l) is not None: add_line = True break if add_line: if fn_label_def.match(l) and len(new_contents) != 0: new_contents += '\n' l = process_identifiers(l) new_contents += l new_contents += '\n' return new_contents
[ "def", "process_asm", "(", "asm", ")", ":", "new_contents", "=", "''", "asm", "=", "transform_labels", "(", "asm", ")", "# TODO: Add more things we want to remove", "discard_regexes", "=", "[", "re", ".", "compile", "(", "\"\\s+\\..*$\"", ")", ",", "# directive", ...
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/llvm/utils/benchmark/tools/strip_asm.py#L84-L121
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/framework/error_interpolation.py
python
_compute_colocation_summary_from_dict
(name, colocation_dict, prefix="")
return "\n".join(str_list)
Return a summary of an op's colocation stack. Args: name: The op name. colocation_dict: The op._colocation_dict. prefix: An optional string prefix used before each line of the multi- line string returned by this function. Returns: A multi-line string similar to: Node-device colocations active during op creation: with tf.compat.v1.colocate_with(test_node_1): <test_1.py:27> with tf.compat.v1.colocate_with(test_node_2): <test_2.py:38> The first line will have no padding to its left by default. Subsequent lines will have two spaces of left-padding. Use the prefix argument to increase indentation.
Return a summary of an op's colocation stack.
[ "Return", "a", "summary", "of", "an", "op", "s", "colocation", "stack", "." ]
def _compute_colocation_summary_from_dict(name, colocation_dict, prefix=""): """Return a summary of an op's colocation stack. Args: name: The op name. colocation_dict: The op._colocation_dict. prefix: An optional string prefix used before each line of the multi- line string returned by this function. Returns: A multi-line string similar to: Node-device colocations active during op creation: with tf.compat.v1.colocate_with(test_node_1): <test_1.py:27> with tf.compat.v1.colocate_with(test_node_2): <test_2.py:38> The first line will have no padding to its left by default. Subsequent lines will have two spaces of left-padding. Use the prefix argument to increase indentation. """ if not colocation_dict: message = "No node-device colocations were active during op '%s' creation." message %= name return prefix + message str_list = [] str_list.append("%sNode-device colocations active during op '%s' creation:" % (prefix, name)) for coloc_name, location in colocation_dict.items(): location_summary = "<{file}:{line}>".format( file=location.filename, line=location.lineno) subs = { "prefix": prefix, "indent": " ", "name": coloc_name, "loc": location_summary, } str_list.append( "{prefix}{indent}with tf.colocate_with({name}): {loc}".format(**subs)) return "\n".join(str_list)
[ "def", "_compute_colocation_summary_from_dict", "(", "name", ",", "colocation_dict", ",", "prefix", "=", "\"\"", ")", ":", "if", "not", "colocation_dict", ":", "message", "=", "\"No node-device colocations were active during op '%s' creation.\"", "message", "%=", "name", ...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/framework/error_interpolation.py#L132-L171
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/examples/learn/wide_n_deep_tutorial.py
python
input_fn
(data_file, num_epochs, shuffle)
return tf.estimator.inputs.pandas_input_fn( x=df_data, y=labels, batch_size=100, num_epochs=num_epochs, shuffle=shuffle, num_threads=5)
Input builder function.
Input builder function.
[ "Input", "builder", "function", "." ]
def input_fn(data_file, num_epochs, shuffle): """Input builder function.""" df_data = pd.read_csv( tf.gfile.Open(data_file), names=CSV_COLUMNS, skipinitialspace=True, engine="python", skiprows=1) # remove NaN elements df_data = df_data.dropna(how="any", axis=0) labels = df_data["income_bracket"].apply(lambda x: ">50K" in x).astype(int) return tf.estimator.inputs.pandas_input_fn( x=df_data, y=labels, batch_size=100, num_epochs=num_epochs, shuffle=shuffle, num_threads=5)
[ "def", "input_fn", "(", "data_file", ",", "num_epochs", ",", "shuffle", ")", ":", "df_data", "=", "pd", ".", "read_csv", "(", "tf", ".", "gfile", ".", "Open", "(", "data_file", ")", ",", "names", "=", "CSV_COLUMNS", ",", "skipinitialspace", "=", "True", ...
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/examples/learn/wide_n_deep_tutorial.py#L155-L172
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBValue.WatchPointee
(self, *args)
return _lldb.SBValue_WatchPointee(self, *args)
WatchPointee(self, bool resolve_location, bool read, bool write, SBError error) -> SBWatchpoint Find and watch the location pointed to by a variable. It returns an SBWatchpoint, which may be invalid.
WatchPointee(self, bool resolve_location, bool read, bool write, SBError error) -> SBWatchpoint
[ "WatchPointee", "(", "self", "bool", "resolve_location", "bool", "read", "bool", "write", "SBError", "error", ")", "-", ">", "SBWatchpoint" ]
def WatchPointee(self, *args): """ WatchPointee(self, bool resolve_location, bool read, bool write, SBError error) -> SBWatchpoint Find and watch the location pointed to by a variable. It returns an SBWatchpoint, which may be invalid. """ return _lldb.SBValue_WatchPointee(self, *args)
[ "def", "WatchPointee", "(", "self", ",", "*", "args", ")", ":", "return", "_lldb", ".", "SBValue_WatchPointee", "(", "self", ",", "*", "args", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L12147-L12154
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
media/tools/constrained_network_server/traffic_control.py
python
CheckRequirements
()
Checks if permissions are available to run traffic control commands. Raises: TrafficControlError: If permissions to run traffic control commands are not available.
Checks if permissions are available to run traffic control commands.
[ "Checks", "if", "permissions", "are", "available", "to", "run", "traffic", "control", "commands", "." ]
def CheckRequirements(): """Checks if permissions are available to run traffic control commands. Raises: TrafficControlError: If permissions to run traffic control commands are not available. """ if os.geteuid() != 0: _Exec(['sudo', '-n', 'tc', '-help'], msg=('Cannot run \'tc\' command. Traffic Control must be run as root ' 'or have password-less sudo access to this command.')) _Exec(['sudo', '-n', 'iptables', '-help'], msg=('Cannot run \'iptables\' command. Traffic Control must be run ' 'as root or have password-less sudo access to this command.'))
[ "def", "CheckRequirements", "(", ")", ":", "if", "os", ".", "geteuid", "(", ")", "!=", "0", ":", "_Exec", "(", "[", "'sudo'", ",", "'-n'", ",", "'tc'", ",", "'-help'", "]", ",", "msg", "=", "(", "'Cannot run \\'tc\\' command. Traffic Control must be run as r...
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/media/tools/constrained_network_server/traffic_control.py#L41-L54
BitMEX/api-connectors
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
auto-generated/python/swagger_client/api/order_api.py
python
OrderApi.order_cancel_all_after
(self, timeout, **kwargs)
Automatically cancel all your orders after a specified timeout. # noqa: E501 Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If called repeatedly, the existing offset will be canceled and a new one will be inserted in its place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this route is not called within 60 seconds, all your orders will be automatically canceled. This is also available via [WebSocket](https://www.bitmex.com/app/wsAPI#Dead-Mans-Switch-Auto-Cancel). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.order_cancel_all_after(timeout, async_req=True) >>> result = thread.get() :param async_req bool :param float timeout: Timeout in ms. Set to 0 to cancel this timer. (required) :return: object If the method is called asynchronously, returns the request thread.
Automatically cancel all your orders after a specified timeout. # noqa: E501
[ "Automatically", "cancel", "all", "your", "orders", "after", "a", "specified", "timeout", ".", "#", "noqa", ":", "E501" ]
def order_cancel_all_after(self, timeout, **kwargs): # noqa: E501 """Automatically cancel all your orders after a specified timeout. # noqa: E501 Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If called repeatedly, the existing offset will be canceled and a new one will be inserted in its place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this route is not called within 60 seconds, all your orders will be automatically canceled. This is also available via [WebSocket](https://www.bitmex.com/app/wsAPI#Dead-Mans-Switch-Auto-Cancel). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.order_cancel_all_after(timeout, async_req=True) >>> result = thread.get() :param async_req bool :param float timeout: Timeout in ms. Set to 0 to cancel this timer. (required) :return: object If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.order_cancel_all_after_with_http_info(timeout, **kwargs) # noqa: E501 else: (data) = self.order_cancel_all_after_with_http_info(timeout, **kwargs) # noqa: E501 return data
[ "def", "order_cancel_all_after", "(", "self", ",", "timeout", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "...
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/api/order_api.py#L375-L395
physercoe/starquant
c00cad64d1de2da05081b3dc320ef264c6295e08
source/strategy/strategy_base.py
python
StrategyBase.put_event
(self)
Put an strategy data event for ui update.
Put an strategy data event for ui update.
[ "Put", "an", "strategy", "data", "event", "for", "ui", "update", "." ]
def put_event(self): """ Put an strategy data event for ui update. """ if self.inited: self.strategy_engine.put_strategy_event(self)
[ "def", "put_event", "(", "self", ")", ":", "if", "self", ".", "inited", ":", "self", ".", "strategy_engine", ".", "put_strategy_event", "(", "self", ")" ]
https://github.com/physercoe/starquant/blob/c00cad64d1de2da05081b3dc320ef264c6295e08/source/strategy/strategy_base.py#L1158-L1163
clementine-player/Clementine
111379dfd027802b59125829fcf87e3e1d0ad73b
dist/cpplint.py
python
_AddFilters
(filters)
Adds more filter overrides. Unlike _SetFilters, this function does not reset the current list of filters available. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die.
Adds more filter overrides.
[ "Adds", "more", "filter", "overrides", "." ]
def _AddFilters(filters): """Adds more filter overrides. Unlike _SetFilters, this function does not reset the current list of filters available. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die. """ _cpplint_state.AddFilters(filters)
[ "def", "_AddFilters", "(", "filters", ")", ":", "_cpplint_state", ".", "AddFilters", "(", "filters", ")" ]
https://github.com/clementine-player/Clementine/blob/111379dfd027802b59125829fcf87e3e1d0ad73b/dist/cpplint.py#L887-L897
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/executor.py
python
Executor.__init__
(self, handle, symbol, ctx, grad_req, group2ctx)
Constructor, used Symbol.bind and Symbol.simple_bind instead. Parameters ---------- handle: ExecutorHandle ExecutorHandle generated by calling `bind`. See Also -------- Symbol.bind : to create executor.
Constructor, used Symbol.bind and Symbol.simple_bind instead.
[ "Constructor", "used", "Symbol", ".", "bind", "and", "Symbol", ".", "simple_bind", "instead", "." ]
def __init__(self, handle, symbol, ctx, grad_req, group2ctx): """Constructor, used Symbol.bind and Symbol.simple_bind instead. Parameters ---------- handle: ExecutorHandle ExecutorHandle generated by calling `bind`. See Also -------- Symbol.bind : to create executor. """ if not isinstance(handle, ExecutorHandle): raise TypeError("Handle type error") self.handle = handle self.arg_arrays = [] self.grad_arrays = [] self.aux_arrays = [] self.outputs = self._get_outputs() self._symbol = copy.deepcopy(symbol) self._optimized_symbol = None self._arg_dict = None self._grad_dict = None self._aux_dict = None self._output_dict = None self._monitor_callback = None self._ctx = copy.deepcopy(ctx) self._grad_req = copy.deepcopy(grad_req) self._group2ctx = copy.deepcopy(group2ctx)
[ "def", "__init__", "(", "self", ",", "handle", ",", "symbol", ",", "ctx", ",", "grad_req", ",", "group2ctx", ")", ":", "if", "not", "isinstance", "(", "handle", ",", "ExecutorHandle", ")", ":", "raise", "TypeError", "(", "\"Handle type error\"", ")", "self...
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/executor.py#L56-L84
casadi/casadi
8d0f80a4d0fe2054384bfb9748f7a0f6bae540ff
misc/cpplint.py
python
FindNextMatchingAngleBracket
(clean_lines, linenum, init_suffix)
return True
Find the corresponding > to close a template. Args: clean_lines: A CleansedLines instance containing the file. linenum: Current line number. init_suffix: Remainder of the current line after the initial <. Returns: True if a matching bracket exists.
Find the corresponding > to close a template.
[ "Find", "the", "corresponding", ">", "to", "close", "a", "template", "." ]
def FindNextMatchingAngleBracket(clean_lines, linenum, init_suffix): """Find the corresponding > to close a template. Args: clean_lines: A CleansedLines instance containing the file. linenum: Current line number. init_suffix: Remainder of the current line after the initial <. Returns: True if a matching bracket exists. """ line = init_suffix nesting_stack = ['<'] while True: # Find the next operator that can tell us whether < is used as an # opening bracket or as a less-than operator. We only want to # warn on the latter case. # # We could also check all other operators and terminate the search # early, e.g. if we got something like this "a<b+c", the "<" is # most likely a less-than operator, but then we will get false # positives for default arguments and other template expressions. match = Search(r'^[^<>(),;\[\]]*([<>(),;\[\]])(.*)$', line) if match: # Found an operator, update nesting stack operator = match.group(1) line = match.group(2) if nesting_stack[-1] == '<': # Expecting closing angle bracket if operator in ('<', '(', '['): nesting_stack.append(operator) elif operator == '>': nesting_stack.pop() if not nesting_stack: # Found matching angle bracket return True elif operator == ',': # Got a comma after a bracket, this is most likely a template # argument. We have not seen a closing angle bracket yet, but # it's probably a few lines later if we look for it, so just # return early here. return True else: # Got some other operator. return False else: # Expecting closing parenthesis or closing bracket if operator in ('<', '(', '['): nesting_stack.append(operator) elif operator in (')', ']'): # We don't bother checking for matching () or []. If we got # something like (] or [), it would have been a syntax error. nesting_stack.pop() else: # Scan the next line linenum += 1 if linenum >= len(clean_lines.elided): break line = clean_lines.elided[linenum] # Exhausted all remaining lines and still no matching angle bracket. # Most likely the input was incomplete, otherwise we should have # seen a semicolon and returned early. return True
[ "def", "FindNextMatchingAngleBracket", "(", "clean_lines", ",", "linenum", ",", "init_suffix", ")", ":", "line", "=", "init_suffix", "nesting_stack", "=", "[", "'<'", "]", "while", "True", ":", "# Find the next operator that can tell us whether < is used as an", "# openin...
https://github.com/casadi/casadi/blob/8d0f80a4d0fe2054384bfb9748f7a0f6bae540ff/misc/cpplint.py#L2410-L2476
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
third_party/Python/module/pexpect-4.6/pexpect/pty_spawn.py
python
spawn.waitnoecho
(self, timeout=-1)
This waits until the terminal ECHO flag is set False. This returns True if the echo mode is off. This returns False if the ECHO flag was not set False before the timeout. This can be used to detect when the child is waiting for a password. Usually a child application will turn off echo mode when it is waiting for the user to enter a password. For example, instead of expecting the "password:" prompt you can wait for the child to set ECHO off:: p = pexpect.spawn('ssh user@example.com') p.waitnoecho() p.sendline(mypassword) If timeout==-1 then this method will use the value in self.timeout. If timeout==None then this method to block until ECHO flag is False.
This waits until the terminal ECHO flag is set False. This returns True if the echo mode is off. This returns False if the ECHO flag was not set False before the timeout. This can be used to detect when the child is waiting for a password. Usually a child application will turn off echo mode when it is waiting for the user to enter a password. For example, instead of expecting the "password:" prompt you can wait for the child to set ECHO off::
[ "This", "waits", "until", "the", "terminal", "ECHO", "flag", "is", "set", "False", ".", "This", "returns", "True", "if", "the", "echo", "mode", "is", "off", ".", "This", "returns", "False", "if", "the", "ECHO", "flag", "was", "not", "set", "False", "be...
def waitnoecho(self, timeout=-1): '''This waits until the terminal ECHO flag is set False. This returns True if the echo mode is off. This returns False if the ECHO flag was not set False before the timeout. This can be used to detect when the child is waiting for a password. Usually a child application will turn off echo mode when it is waiting for the user to enter a password. For example, instead of expecting the "password:" prompt you can wait for the child to set ECHO off:: p = pexpect.spawn('ssh user@example.com') p.waitnoecho() p.sendline(mypassword) If timeout==-1 then this method will use the value in self.timeout. If timeout==None then this method to block until ECHO flag is False. ''' if timeout == -1: timeout = self.timeout if timeout is not None: end_time = time.time() + timeout while True: if not self.getecho(): return True if timeout < 0 and timeout is not None: return False if timeout is not None: timeout = end_time - time.time() time.sleep(0.1)
[ "def", "waitnoecho", "(", "self", ",", "timeout", "=", "-", "1", ")", ":", "if", "timeout", "==", "-", "1", ":", "timeout", "=", "self", ".", "timeout", "if", "timeout", "is", "not", "None", ":", "end_time", "=", "time", ".", "time", "(", ")", "+...
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/third_party/Python/module/pexpect-4.6/pexpect/pty_spawn.py#L343-L371
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/fluid/dygraph/dygraph_to_static/program_translator.py
python
StaticFunction.dygraph_function
(self)
return self._dygraph_function
Returns the original decorated function.
Returns the original decorated function.
[ "Returns", "the", "original", "decorated", "function", "." ]
def dygraph_function(self): """ Returns the original decorated function. """ return self._dygraph_function
[ "def", "dygraph_function", "(", "self", ")", ":", "return", "self", ".", "_dygraph_function" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dygraph/dygraph_to_static/program_translator.py#L448-L452
facebookresearch/mvfst-rl
778bc4259ae7277e67c2ead593a493845c93db83
third-party/gala/utils.py
python
update_linear_schedule
(optimizer, epoch, total_num_epochs, initial_lr)
Decreases the learning rate linearly
Decreases the learning rate linearly
[ "Decreases", "the", "learning", "rate", "linearly" ]
def update_linear_schedule(optimizer, epoch, total_num_epochs, initial_lr): """Decreases the learning rate linearly""" lr = initial_lr - (initial_lr * (epoch / float(total_num_epochs))) for param_group in optimizer.param_groups: param_group['lr'] = lr
[ "def", "update_linear_schedule", "(", "optimizer", ",", "epoch", ",", "total_num_epochs", ",", "initial_lr", ")", ":", "lr", "=", "initial_lr", "-", "(", "initial_lr", "*", "(", "epoch", "/", "float", "(", "total_num_epochs", ")", ")", ")", "for", "param_gro...
https://github.com/facebookresearch/mvfst-rl/blob/778bc4259ae7277e67c2ead593a493845c93db83/third-party/gala/utils.py#L75-L79
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/profile_analyzer_cli.py
python
ProfileAnalyzer._get_profile_data_generator
(self)
return profile_data_generator
Get function that generates `ProfileDatum` objects. Returns: A function that generates `ProfileDatum` objects.
Get function that generates `ProfileDatum` objects.
[ "Get", "function", "that", "generates", "ProfileDatum", "objects", "." ]
def _get_profile_data_generator(self): """Get function that generates `ProfileDatum` objects. Returns: A function that generates `ProfileDatum` objects. """ node_to_file_path = {} node_to_line_number = {} node_to_func_name = {} node_to_op_type = {} for op in self._graph.get_operations(): for trace_entry in reversed(op.traceback): file_path = trace_entry[0] line_num = trace_entry[1] func_name = trace_entry[2] if not source_utils.guess_is_tensorflow_py_library(file_path): break node_to_file_path[op.name] = file_path node_to_line_number[op.name] = line_num node_to_func_name[op.name] = func_name node_to_op_type[op.name] = op.type def profile_data_generator(device_step_stats): for node_stats in device_step_stats.node_stats: if node_stats.node_name == "_SOURCE" or node_stats.node_name == "_SINK": continue yield profiling.ProfileDatum( device_step_stats.device, node_stats, node_to_file_path.get(node_stats.node_name, ""), node_to_line_number.get(node_stats.node_name, 0), node_to_func_name.get(node_stats.node_name, ""), node_to_op_type.get(node_stats.node_name, "")) return profile_data_generator
[ "def", "_get_profile_data_generator", "(", "self", ")", ":", "node_to_file_path", "=", "{", "}", "node_to_line_number", "=", "{", "}", "node_to_func_name", "=", "{", "}", "node_to_op_type", "=", "{", "}", "for", "op", "in", "self", ".", "_graph", ".", "get_o...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/profile_analyzer_cli.py#L441-L474
apiaryio/drafter
4634ebd07f6c6f257cc656598ccd535492fdfb55
tools/gyp/pylib/gyp/MSVSSettings.py
python
_Renamed
(tool, msvs_name, msbuild_name, setting_type)
Defines a setting for which the name has changed. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting. msbuild_name: the name of the MSBuild setting. setting_type: the type of this setting.
Defines a setting for which the name has changed.
[ "Defines", "a", "setting", "for", "which", "the", "name", "has", "changed", "." ]
def _Renamed(tool, msvs_name, msbuild_name, setting_type): """Defines a setting for which the name has changed. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting. msbuild_name: the name of the MSBuild setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS _msbuild_validators[tool.msbuild_name][msbuild_name] = ( setting_type.ValidateMSBuild) _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
[ "def", "_Renamed", "(", "tool", ",", "msvs_name", ",", "msbuild_name", ",", "setting_type", ")", ":", "def", "_Translate", "(", "value", ",", "msbuild_settings", ")", ":", "msbuild_tool_settings", "=", "_GetMSBuildToolSettings", "(", "msbuild_settings", ",", "tool...
https://github.com/apiaryio/drafter/blob/4634ebd07f6c6f257cc656598ccd535492fdfb55/tools/gyp/pylib/gyp/MSVSSettings.py#L244-L261
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
FWCore/ParameterSet/python/Types.py
python
ESInputTag.value
(self)
return self.configValue()
Return the string rep
Return the string rep
[ "Return", "the", "string", "rep" ]
def value(self): "Return the string rep" return self.configValue()
[ "def", "value", "(", "self", ")", ":", "return", "self", ".", "configValue", "(", ")" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/FWCore/ParameterSet/python/Types.py#L774-L776
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/profiler/internal/flops_registry.py
python
_unary_op_flops
(graph, node, ops_per_element=1)
return ops.OpStats("flops", in_shape.num_elements() * ops_per_element)
Common code which compute flops for unary operations.
Common code which compute flops for unary operations.
[ "Common", "code", "which", "compute", "flops", "for", "unary", "operations", "." ]
def _unary_op_flops(graph, node, ops_per_element=1): """Common code which compute flops for unary operations.""" in_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0]) in_shape.assert_is_fully_defined() return ops.OpStats("flops", in_shape.num_elements() * ops_per_element)
[ "def", "_unary_op_flops", "(", "graph", ",", "node", ",", "ops_per_element", "=", "1", ")", ":", "in_shape", "=", "graph_util", ".", "tensor_shape_from_node_def_name", "(", "graph", ",", "node", ".", "input", "[", "0", "]", ")", "in_shape", ".", "assert_is_f...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/profiler/internal/flops_registry.py#L64-L68
sfzhang15/FaceBoxes
b52cc92f9362d3adc08d54666aeb9ebb62fdb7da
scripts/cpp_lint.py
python
ResetNolintSuppressions
()
Resets the set of NOLINT suppressions to empty.
Resets the set of NOLINT suppressions to empty.
[ "Resets", "the", "set", "of", "NOLINT", "suppressions", "to", "empty", "." ]
def ResetNolintSuppressions(): "Resets the set of NOLINT suppressions to empty." _error_suppressions.clear()
[ "def", "ResetNolintSuppressions", "(", ")", ":", "_error_suppressions", ".", "clear", "(", ")" ]
https://github.com/sfzhang15/FaceBoxes/blob/b52cc92f9362d3adc08d54666aeb9ebb62fdb7da/scripts/cpp_lint.py#L495-L497
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/maximum-depth-of-n-ary-tree.py
python
Solution.maxDepth
(self, root)
return 1+depth
:type root: Node :rtype: int
:type root: Node :rtype: int
[ ":", "type", "root", ":", "Node", ":", "rtype", ":", "int" ]
def maxDepth(self, root): """ :type root: Node :rtype: int """ if not root: return 0 depth = 0 for child in root.children: depth = max(depth, self.maxDepth(child)) return 1+depth
[ "def", "maxDepth", "(", "self", ",", "root", ")", ":", "if", "not", "root", ":", "return", "0", "depth", "=", "0", "for", "child", "in", "root", ".", "children", ":", "depth", "=", "max", "(", "depth", ",", "self", ".", "maxDepth", "(", "child", ...
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/maximum-depth-of-n-ary-tree.py#L11-L21
google/filament
d21f092645b8e1e312307cbf89f1484891347c63
web/docs/build.py
python
gather_docstrings
(paths)
return result
Given a list of paths to JS and CPP files, builds a JSON tree of type descriptions.
Given a list of paths to JS and CPP files, builds a JSON tree of type descriptions.
[ "Given", "a", "list", "of", "paths", "to", "JS", "and", "CPP", "files", "builds", "a", "JSON", "tree", "of", "type", "descriptions", "." ]
def gather_docstrings(paths): """Given a list of paths to JS and CPP files, builds a JSON tree of type descriptions.""" result = [] stack = [{"tags": ["root"]}] previous = stack[0] docline = re.compile(r' */// (.+)') enumline = re.compile(r' *enum_.*\"(.*)\"') enumvalue = re.compile(r' *\.value\("(.*)\"') tagged = re.compile(r'(\S+)? *::(.+):: *(.*)') lines = [] enumerating = False current_enumeration = None for path in paths: lines += open(path).readlines() for line in lines: match_obj = docline.match(line) if not match_obj: match_obj = enumline.match(line) if match_obj: result.append({ "name": match_obj.groups()[0], "tags": "enum", "brief": "", "detail": None, "children": [], }) current_enumeration = result[-1]["children"] enumerating = True continue match_obj = enumvalue.match(line) if match_obj: val = match_obj.groups()[0] current_enumeration.append(val) continue ln = match_obj.groups()[0] match_obj = tagged.match(ln) if match_obj: name = match_obj.groups()[0] tags = match_obj.groups()[1].split() brief = match_obj.groups()[2] entity = { "name": name, "tags": tags, "brief": brief, "detail": None, "children": [] } # Check if this is continuation of a previous type. if brief == '': for existing_type in result: if existing_type['name'] == name: entity = existing_type result.remove(existing_type) break top = stack[-1]["tags"] if 'root' in top: result.append(entity) stack.append(entity) elif 'class' in tags or 'function' in tags: result.append(entity) stack[-1] = entity elif 'method' in tags and 'class' in top: stack[-1]["children"].append(entity) stack.append(entity) elif 'method' in tags: stack[-2]["children"].append(entity) stack[-1] = entity elif 'retval' in tags or 'argument' in tags: stack[-1]["children"].append(entity) previous = entity else: brief = previous["brief"] detail = previous["detail"] if brief.endswith("\\"): previous["brief"] = brief[:-1] + ln elif not detail: previous["detail"] = ln else: previous["detail"] += "\n" + ln return result
[ "def", "gather_docstrings", "(", "paths", ")", ":", "result", "=", "[", "]", "stack", "=", "[", "{", "\"tags\"", ":", "[", "\"root\"", "]", "}", "]", "previous", "=", "stack", "[", "0", "]", "docline", "=", "re", ".", "compile", "(", "r' */// (.+)'",...
https://github.com/google/filament/blob/d21f092645b8e1e312307cbf89f1484891347c63/web/docs/build.py#L250-L332
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/python_gflags/gflags.py
python
_RegisterBoundsValidatorIfNeeded
(parser, name, flag_values)
Enforce lower and upper bounds for numeric flags. Args: parser: NumericParser (either FloatParser or IntegerParser). Provides lower and upper bounds, and help text to display. name: string, name of the flag flag_values: FlagValues
Enforce lower and upper bounds for numeric flags.
[ "Enforce", "lower", "and", "upper", "bounds", "for", "numeric", "flags", "." ]
def _RegisterBoundsValidatorIfNeeded(parser, name, flag_values): """Enforce lower and upper bounds for numeric flags. Args: parser: NumericParser (either FloatParser or IntegerParser). Provides lower and upper bounds, and help text to display. name: string, name of the flag flag_values: FlagValues """ if parser.lower_bound is not None or parser.upper_bound is not None: def Checker(value): if value is not None and parser.IsOutsideBounds(value): message = '%s is not %s' % (value, parser.syntactic_help) raise gflags_validators.Error(message) return True RegisterValidator(name, Checker, flag_values=flag_values)
[ "def", "_RegisterBoundsValidatorIfNeeded", "(", "parser", ",", "name", ",", "flag_values", ")", ":", "if", "parser", ".", "lower_bound", "is", "not", "None", "or", "parser", ".", "upper_bound", "is", "not", "None", ":", "def", "Checker", "(", "value", ")", ...
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/python_gflags/gflags.py#L2129-L2148
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/xmlrpc/server.py
python
SimpleXMLRPCRequestHandler.log_request
(self, code='-', size='-')
Selectively log an accepted request.
Selectively log an accepted request.
[ "Selectively", "log", "an", "accepted", "request", "." ]
def log_request(self, code='-', size='-'): """Selectively log an accepted request.""" if self.server.logRequests: BaseHTTPRequestHandler.log_request(self, code, size)
[ "def", "log_request", "(", "self", ",", "code", "=", "'-'", ",", "size", "=", "'-'", ")", ":", "if", "self", ".", "server", ".", "logRequests", ":", "BaseHTTPRequestHandler", ".", "log_request", "(", "self", ",", "code", ",", "size", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/xmlrpc/server.py#L574-L578
google/earthenterprise
0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9
earth_enterprise/src/google/protobuf-py/google/protobuf/text_format.py
python
_Tokenizer.ConsumeUint64
(self)
return result
Consumes an unsigned 64bit integer number. Returns: The integer parsed. Raises: ParseError: If an unsigned 64bit integer couldn't be consumed.
Consumes an unsigned 64bit integer number.
[ "Consumes", "an", "unsigned", "64bit", "integer", "number", "." ]
def ConsumeUint64(self): """Consumes an unsigned 64bit integer number. Returns: The integer parsed. Raises: ParseError: If an unsigned 64bit integer couldn't be consumed. """ try: result = self._ParseInteger(self.token, is_signed=False, is_long=True) except ValueError, e: raise self._IntegerParseError(e) self.NextToken() return result
[ "def", "ConsumeUint64", "(", "self", ")", ":", "try", ":", "result", "=", "self", ".", "_ParseInteger", "(", "self", ".", "token", ",", "is_signed", "=", "False", ",", "is_long", "=", "True", ")", "except", "ValueError", ",", "e", ":", "raise", "self",...
https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/google/protobuf-py/google/protobuf/text_format.py#L470-L484
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
lldb/examples/python/file_extract.py
python
FileExtract.get_fixed_length_c_string
( self, n, fail_value='', isprint_only_with_space_padding=False)
Extract a single fixed length C string from the binary file at the current file position, returns a single C string
Extract a single fixed length C string from the binary file at the current file position, returns a single C string
[ "Extract", "a", "single", "fixed", "length", "C", "string", "from", "the", "binary", "file", "at", "the", "current", "file", "position", "returns", "a", "single", "C", "string" ]
def get_fixed_length_c_string( self, n, fail_value='', isprint_only_with_space_padding=False): '''Extract a single fixed length C string from the binary file at the current file position, returns a single C string''' s = self.read_size(n) if s: cstr, = struct.unpack(self.byte_order + ("%i" % n) + 's', s) # Strip trialing NULLs cstr = string.strip(cstr, "\0") if isprint_only_with_space_padding: for c in cstr: if c in string.printable or ord(c) == 0: continue return fail_value return cstr else: return fail_value
[ "def", "get_fixed_length_c_string", "(", "self", ",", "n", ",", "fail_value", "=", "''", ",", "isprint_only_with_space_padding", "=", "False", ")", ":", "s", "=", "self", ".", "read_size", "(", "n", ")", "if", "s", ":", "cstr", ",", "=", "struct", ".", ...
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/lldb/examples/python/file_extract.py#L135-L153
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
example/rcnn/symdata/bbox.py
python
clip_boxes
(boxes, im_shape)
return boxes
Clip boxes to image boundaries. :param boxes: [N, 4* num_classes] :param im_shape: tuple of 2 :return: [N, 4* num_classes]
Clip boxes to image boundaries. :param boxes: [N, 4* num_classes] :param im_shape: tuple of 2 :return: [N, 4* num_classes]
[ "Clip", "boxes", "to", "image", "boundaries", ".", ":", "param", "boxes", ":", "[", "N", "4", "*", "num_classes", "]", ":", "param", "im_shape", ":", "tuple", "of", "2", ":", "return", ":", "[", "N", "4", "*", "num_classes", "]" ]
def clip_boxes(boxes, im_shape): """ Clip boxes to image boundaries. :param boxes: [N, 4* num_classes] :param im_shape: tuple of 2 :return: [N, 4* num_classes] """ # x1 >= 0 boxes[:, 0::4] = np.maximum(np.minimum(boxes[:, 0::4], im_shape[1] - 1), 0) # y1 >= 0 boxes[:, 1::4] = np.maximum(np.minimum(boxes[:, 1::4], im_shape[0] - 1), 0) # x2 < im_shape[1] boxes[:, 2::4] = np.maximum(np.minimum(boxes[:, 2::4], im_shape[1] - 1), 0) # y2 < im_shape[0] boxes[:, 3::4] = np.maximum(np.minimum(boxes[:, 3::4], im_shape[0] - 1), 0) return boxes
[ "def", "clip_boxes", "(", "boxes", ",", "im_shape", ")", ":", "# x1 >= 0", "boxes", "[", ":", ",", "0", ":", ":", "4", "]", "=", "np", ".", "maximum", "(", "np", ".", "minimum", "(", "boxes", "[", ":", ",", "0", ":", ":", "4", "]", ",", "im_s...
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/example/rcnn/symdata/bbox.py#L61-L76
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/contrib/distributions/python/ops/vector_exponential_linear_operator.py
python
VectorExponentialLinearOperator.loc
(self)
return self.bijector.shift
The `loc` `Tensor` in `Y = scale @ X + loc`.
The `loc` `Tensor` in `Y = scale
[ "The", "loc", "Tensor", "in", "Y", "=", "scale" ]
def loc(self): """The `loc` `Tensor` in `Y = scale @ X + loc`.""" return self.bijector.shift
[ "def", "loc", "(", "self", ")", ":", "return", "self", ".", "bijector", ".", "shift" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/distributions/python/ops/vector_exponential_linear_operator.py#L204-L206
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/framework/ops.py
python
Graph.building_function
(self)
return self._building_function
Returns True iff this graph represents a function.
Returns True iff this graph represents a function.
[ "Returns", "True", "iff", "this", "graph", "represents", "a", "function", "." ]
def building_function(self): """Returns True iff this graph represents a function.""" return self._building_function
[ "def", "building_function", "(", "self", ")", ":", "return", "self", ".", "_building_function" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/framework/ops.py#L2536-L2538
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/supertooltip.py
python
SuperToolTip.IsEnabled
(self)
return wx.GetApp().__superToolTip
Returns ``True`` when :class:`SuperToolTip` is globally enabled, ``False`` otherwise. .. versionadded:: 0.9.6
Returns ``True`` when :class:`SuperToolTip` is globally enabled, ``False`` otherwise.
[ "Returns", "True", "when", ":", "class", ":", "SuperToolTip", "is", "globally", "enabled", "False", "otherwise", "." ]
def IsEnabled(self): """ Returns ``True`` when :class:`SuperToolTip` is globally enabled, ``False`` otherwise. .. versionadded:: 0.9.6 """ return wx.GetApp().__superToolTip
[ "def", "IsEnabled", "(", "self", ")", ":", "return", "wx", ".", "GetApp", "(", ")", ".", "__superToolTip" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/supertooltip.py#L1433-L1440
Yelp/MOE
5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c
moe/optimal_learning/python/python_version/gaussian_process.py
python
GaussianProcess.compute_grad_cholesky_variance_of_points
(self, points_to_sample, chol_var=None, num_derivatives=-1)
return grad_chol_decomp
r"""Compute the gradient of the cholesky factorization of the variance (matrix) of this GP at each point of ``Xs`` (``points_to_sample``) wrt ``Xs``. .. Warning:: ``points_to_sample`` should not contain duplicate points. This function accounts for the effect on the gradient resulting from cholesky-factoring the variance matrix. See Smith 1995 for algorithm details. Observe that ``grad_chol`` is nominally sized: ``grad_chol[num_to_sample][num_to_sample][num_to_sample][dim]``. Let this be indexed ``grad_chol[k][j][i][d]``, which is read the derivative of ``var[j][i]`` with respect to ``x_{k,d}`` (x = ``points_to_sample``) .. Note:: Comments are copied from :mod:`moe.optimal_learning.python.interfaces.gaussian_process_interface.GaussianProcessInterface.compute_grad_cholesky_variance_of_points` :param points_to_sample: num_to_sample points (in dim dimensions) being sampled from the GP :type points_to_sample: array of float64 with shape (num_to_sample, dim) :param chol_var: the cholesky factorization (L) of the variance matrix; only the lower triangle is accessed :type chol_var: array of float64 with shape (num_to_sample, num_to_sample) :param num_derivatives: return derivatives wrt points_to_sample[0:num_derivatives]; large or negative values are clamped :type num_derivatives: int :return: grad_chol: gradient of the cholesky factorization of the variance matrix of this GP. ``grad_chol[k][j][i][d]`` is actually the gradients of ``var_{j,i}`` with respect to ``x_{k,d}``, the d-th dimension of the k-th entry of ``points_to_sample``, where k = ``var_of_grad`` :rtype: array of float64 with shape (num_derivatives, num_to_sample, num_to_sample, dim)
r"""Compute the gradient of the cholesky factorization of the variance (matrix) of this GP at each point of ``Xs`` (``points_to_sample``) wrt ``Xs``.
[ "r", "Compute", "the", "gradient", "of", "the", "cholesky", "factorization", "of", "the", "variance", "(", "matrix", ")", "of", "this", "GP", "at", "each", "point", "of", "Xs", "(", "points_to_sample", ")", "wrt", "Xs", "." ]
def compute_grad_cholesky_variance_of_points(self, points_to_sample, chol_var=None, num_derivatives=-1): r"""Compute the gradient of the cholesky factorization of the variance (matrix) of this GP at each point of ``Xs`` (``points_to_sample``) wrt ``Xs``. .. Warning:: ``points_to_sample`` should not contain duplicate points. This function accounts for the effect on the gradient resulting from cholesky-factoring the variance matrix. See Smith 1995 for algorithm details. Observe that ``grad_chol`` is nominally sized: ``grad_chol[num_to_sample][num_to_sample][num_to_sample][dim]``. Let this be indexed ``grad_chol[k][j][i][d]``, which is read the derivative of ``var[j][i]`` with respect to ``x_{k,d}`` (x = ``points_to_sample``) .. Note:: Comments are copied from :mod:`moe.optimal_learning.python.interfaces.gaussian_process_interface.GaussianProcessInterface.compute_grad_cholesky_variance_of_points` :param points_to_sample: num_to_sample points (in dim dimensions) being sampled from the GP :type points_to_sample: array of float64 with shape (num_to_sample, dim) :param chol_var: the cholesky factorization (L) of the variance matrix; only the lower triangle is accessed :type chol_var: array of float64 with shape (num_to_sample, num_to_sample) :param num_derivatives: return derivatives wrt points_to_sample[0:num_derivatives]; large or negative values are clamped :type num_derivatives: int :return: grad_chol: gradient of the cholesky factorization of the variance matrix of this GP. ``grad_chol[k][j][i][d]`` is actually the gradients of ``var_{j,i}`` with respect to ``x_{k,d}``, the d-th dimension of the k-th entry of ``points_to_sample``, where k = ``var_of_grad`` :rtype: array of float64 with shape (num_derivatives, num_to_sample, num_to_sample, dim) """ num_derivatives = self._clamp_num_derivatives(points_to_sample.shape[0], num_derivatives) # Users can pass this in directly b/c it has often been computed already. if chol_var is None: var_star = self.compute_variance_of_points(points_to_sample) # Note: only access the lower triangle of chol_var; upper triangle is garbage # cho_factor returns a tuple, (factorized_matrix, lower_tri_flag); grab the matrix chol_var = scipy.linalg.cho_factor(var_star, lower=True, overwrite_a=True)[0] grad_chol_decomp = numpy.empty((num_derivatives, points_to_sample.shape[0], points_to_sample.shape[0], self.dim)) for i in xrange(num_derivatives): grad_chol_decomp[i, ...] = self._compute_grad_cholesky_variance_of_points_per_point(points_to_sample, chol_var, i) return grad_chol_decomp
[ "def", "compute_grad_cholesky_variance_of_points", "(", "self", ",", "points_to_sample", ",", "chol_var", "=", "None", ",", "num_derivatives", "=", "-", "1", ")", ":", "num_derivatives", "=", "self", ".", "_clamp_num_derivatives", "(", "points_to_sample", ".", "shap...
https://github.com/Yelp/MOE/blob/5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c/moe/optimal_learning/python/python_version/gaussian_process.py#L357-L399
plaidml/plaidml
f3c6681db21460e5fdc11ae651d6d7b6c27f8262
mlperf/pycoco.py
python
COCO.download
(self, tarDir=None, imgIds=[])
Download COCO images from mscoco.org server. :param tarDir (str): COCO results directory name imgIds (list): images to be downloaded :return:
Download COCO images from mscoco.org server. :param tarDir (str): COCO results directory name imgIds (list): images to be downloaded :return:
[ "Download", "COCO", "images", "from", "mscoco", ".", "org", "server", ".", ":", "param", "tarDir", "(", "str", ")", ":", "COCO", "results", "directory", "name", "imgIds", "(", "list", ")", ":", "images", "to", "be", "downloaded", ":", "return", ":" ]
def download(self, tarDir=None, imgIds=[]): ''' Download COCO images from mscoco.org server. :param tarDir (str): COCO results directory name imgIds (list): images to be downloaded :return: ''' if tarDir is None: print('Please specify target directory') return -1 if len(imgIds) == 0: imgs = self.imgs.values() else: imgs = self.loadImgs(imgIds) N = len(imgs) if not os.path.exists(tarDir): os.makedirs(tarDir) for i, img in enumerate(imgs): tic = time.time() fname = os.path.join(tarDir, img['file_name']) if not os.path.exists(fname): urlretrieve(img['coco_url'], fname) print('downloaded {}/{} images (t={:0.1f}s)'.format(i, N, time.time() - tic))
[ "def", "download", "(", "self", ",", "tarDir", "=", "None", ",", "imgIds", "=", "[", "]", ")", ":", "if", "tarDir", "is", "None", ":", "print", "(", "'Please specify target directory'", ")", "return", "-", "1", "if", "len", "(", "imgIds", ")", "==", ...
https://github.com/plaidml/plaidml/blob/f3c6681db21460e5fdc11ae651d6d7b6c27f8262/mlperf/pycoco.py#L380-L402
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/framework/type_spec.py
python
lookup
(name)
return _NAME_TO_TYPE_SPEC[name]
Returns the TypeSpec that has been registered with name `name`.
Returns the TypeSpec that has been registered with name `name`.
[ "Returns", "the", "TypeSpec", "that", "has", "been", "registered", "with", "name", "name", "." ]
def lookup(name): """Returns the TypeSpec that has been registered with name `name`.""" if not isinstance(name, str): raise TypeError("Expected `name` to be a string; got %r" % (name,)) if name not in _NAME_TO_TYPE_SPEC: raise ValueError("No TypeSpec has been registered with name %r" % (name,)) return _NAME_TO_TYPE_SPEC[name]
[ "def", "lookup", "(", "name", ")", ":", "if", "not", "isinstance", "(", "name", ",", "str", ")", ":", "raise", "TypeError", "(", "\"Expected `name` to be a string; got %r\"", "%", "(", "name", ",", ")", ")", "if", "name", "not", "in", "_NAME_TO_TYPE_SPEC", ...
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/framework/type_spec.py#L1013-L1019
facebookincubator/BOLT
88c70afe9d388ad430cc150cc158641701397f70
clang/tools/scan-build-py/lib/libear/__init__.py
python
build_libear
(compiler, dst_dir)
Returns the full path to the 'libear' library.
Returns the full path to the 'libear' library.
[ "Returns", "the", "full", "path", "to", "the", "libear", "library", "." ]
def build_libear(compiler, dst_dir): """ Returns the full path to the 'libear' library. """ try: src_dir = os.path.dirname(os.path.realpath(__file__)) toolset = make_toolset(src_dir) toolset.set_compiler(compiler) toolset.set_language_standard('c99') toolset.add_definitions(['-D_GNU_SOURCE']) configure = do_configure(toolset) configure.check_function_exists('execve', 'HAVE_EXECVE') configure.check_function_exists('execv', 'HAVE_EXECV') configure.check_function_exists('execvpe', 'HAVE_EXECVPE') configure.check_function_exists('execvp', 'HAVE_EXECVP') configure.check_function_exists('execvP', 'HAVE_EXECVP2') configure.check_function_exists('exect', 'HAVE_EXECT') configure.check_function_exists('execl', 'HAVE_EXECL') configure.check_function_exists('execlp', 'HAVE_EXECLP') configure.check_function_exists('execle', 'HAVE_EXECLE') configure.check_function_exists('posix_spawn', 'HAVE_POSIX_SPAWN') configure.check_function_exists('posix_spawnp', 'HAVE_POSIX_SPAWNP') configure.check_symbol_exists('_NSGetEnviron', 'crt_externs.h', 'HAVE_NSGETENVIRON') configure.write_by_template( os.path.join(src_dir, 'config.h.in'), os.path.join(dst_dir, 'config.h')) target = create_shared_library('ear', toolset) target.add_include(dst_dir) target.add_sources('ear.c') target.link_against(toolset.dl_libraries()) target.link_against(['pthread']) target.build_release(dst_dir) return os.path.join(dst_dir, target.name) except Exception: logging.info("Could not build interception library.", exc_info=True) return None
[ "def", "build_libear", "(", "compiler", ",", "dst_dir", ")", ":", "try", ":", "src_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", "toolset", "=", "make_toolset", "(", "src_dir", ")", ...
https://github.com/facebookincubator/BOLT/blob/88c70afe9d388ad430cc150cc158641701397f70/clang/tools/scan-build-py/lib/libear/__init__.py#L19-L58
gimli-org/gimli
17aa2160de9b15ababd9ef99e89b1bc3277bbb23
pygimli/viewer/mpl/overlayimage.py
python
cacheFileName
(fullname, vendor)
return os.path.join(path, fileName)
Createfilename and path to cache download data.
Createfilename and path to cache download data.
[ "Createfilename", "and", "path", "to", "cache", "download", "data", "." ]
def cacheFileName(fullname, vendor): """Createfilename and path to cache download data.""" (dirName, fileName) = os.path.split(fullname) #os.path.joint(pg.getConfigPath(), fileName) path = os.path.join(pg.getConfigPath(), vendor, dirName) try: os.makedirs(path) except OSError: pass return os.path.join(path, fileName)
[ "def", "cacheFileName", "(", "fullname", ",", "vendor", ")", ":", "(", "dirName", ",", "fileName", ")", "=", "os", ".", "path", ".", "split", "(", "fullname", ")", "#os.path.joint(pg.getConfigPath(), fileName)", "path", "=", "os", ".", "path", ".", "join", ...
https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/viewer/mpl/overlayimage.py#L146-L159
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/cubecolourdialog.py
python
CubeColourDialog.SetCodes
(self)
Sets the HTML/MS Access codes (if any) in the text controls.
Sets the HTML/MS Access codes (if any) in the text controls.
[ "Sets", "the", "HTML", "/", "MS", "Access", "codes", "(", "if", "any", ")", "in", "the", "text", "controls", "." ]
def SetCodes(self): """ Sets the HTML/MS Access codes (if any) in the text controls. """ colour = rgb2html(self._colour) self.htmlCode.SetValue(colour) self.htmlCode.Refresh() if colour in HTMLCodes: colourName, access, webSafe = HTMLCodes[colour] self.webSafe.SetValue(webSafe) self.accessCode.SetValue(access) self.htmlName.SetValue(colourName) else: self.webSafe.SetValue("") self.accessCode.SetValue("") self.htmlName.SetValue("")
[ "def", "SetCodes", "(", "self", ")", ":", "colour", "=", "rgb2html", "(", "self", ".", "_colour", ")", "self", ".", "htmlCode", ".", "SetValue", "(", "colour", ")", "self", ".", "htmlCode", ".", "Refresh", "(", ")", "if", "colour", "in", "HTMLCodes", ...
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/cubecolourdialog.py#L3255-L3270
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/prompt-toolkit/py3/prompt_toolkit/output/base.py
python
Output.cursor_goto
(self, row: int = 0, column: int = 0)
Move cursor position.
Move cursor position.
[ "Move", "cursor", "position", "." ]
def cursor_goto(self, row: int = 0, column: int = 0) -> None: "Move cursor position."
[ "def", "cursor_goto", "(", "self", ",", "row", ":", "int", "=", "0", ",", "column", ":", "int", "=", "0", ")", "->", "None", ":" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py3/prompt_toolkit/output/base.py#L117-L118
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_windows.py
python
SplitterEvent.GetY
(*args, **kwargs)
return _windows_.SplitterEvent_GetY(*args, **kwargs)
GetY(self) -> int Returns the y coordinate of the double-click point in a EVT_SPLITTER_DCLICK event.
GetY(self) -> int
[ "GetY", "(", "self", ")", "-", ">", "int" ]
def GetY(*args, **kwargs): """ GetY(self) -> int Returns the y coordinate of the double-click point in a EVT_SPLITTER_DCLICK event. """ return _windows_.SplitterEvent_GetY(*args, **kwargs)
[ "def", "GetY", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "SplitterEvent_GetY", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L1749-L1756
mysql/mysql-workbench
2f35f9034f015cbcd22139a60e1baa2e3e8e795c
plugins/wb.admin/frontend/wb_admin_utils.py
python
weakcb
(object, cbname)
return lambda ref=weakref.ref(object): call(ref, cbname)
Create a callback that holds a weak reference to the object. When passing a callback for mforms, use this to create a ref to it and prevent circular references that are never freed.
Create a callback that holds a weak reference to the object. When passing a callback for mforms, use this to create a ref to it and prevent circular references that are never freed.
[ "Create", "a", "callback", "that", "holds", "a", "weak", "reference", "to", "the", "object", ".", "When", "passing", "a", "callback", "for", "mforms", "use", "this", "to", "create", "a", "ref", "to", "it", "and", "prevent", "circular", "references", "that"...
def weakcb(object, cbname): """Create a callback that holds a weak reference to the object. When passing a callback for mforms, use this to create a ref to it and prevent circular references that are never freed. """ def call(ref, cbname): callback = getattr(ref(), cbname, None) if callback is None: print("Object has no callback %s"%cbname) else: return callback() return lambda ref=weakref.ref(object): call(ref, cbname)
[ "def", "weakcb", "(", "object", ",", "cbname", ")", ":", "def", "call", "(", "ref", ",", "cbname", ")", ":", "callback", "=", "getattr", "(", "ref", "(", ")", ",", "cbname", ",", "None", ")", "if", "callback", "is", "None", ":", "print", "(", "\"...
https://github.com/mysql/mysql-workbench/blob/2f35f9034f015cbcd22139a60e1baa2e3e8e795c/plugins/wb.admin/frontend/wb_admin_utils.py#L31-L42
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/decimal.py
python
Context.logical_and
(self, a, b)
return a.logical_and(b, context=self)
Applies the logical operation 'and' between each operand's digits. The operands must be both logical numbers. >>> ExtendedContext.logical_and(Decimal('0'), Decimal('0')) Decimal('0') >>> ExtendedContext.logical_and(Decimal('0'), Decimal('1')) Decimal('0') >>> ExtendedContext.logical_and(Decimal('1'), Decimal('0')) Decimal('0') >>> ExtendedContext.logical_and(Decimal('1'), Decimal('1')) Decimal('1') >>> ExtendedContext.logical_and(Decimal('1100'), Decimal('1010')) Decimal('1000') >>> ExtendedContext.logical_and(Decimal('1111'), Decimal('10')) Decimal('10')
Applies the logical operation 'and' between each operand's digits.
[ "Applies", "the", "logical", "operation", "and", "between", "each", "operand", "s", "digits", "." ]
def logical_and(self, a, b): """Applies the logical operation 'and' between each operand's digits. The operands must be both logical numbers. >>> ExtendedContext.logical_and(Decimal('0'), Decimal('0')) Decimal('0') >>> ExtendedContext.logical_and(Decimal('0'), Decimal('1')) Decimal('0') >>> ExtendedContext.logical_and(Decimal('1'), Decimal('0')) Decimal('0') >>> ExtendedContext.logical_and(Decimal('1'), Decimal('1')) Decimal('1') >>> ExtendedContext.logical_and(Decimal('1100'), Decimal('1010')) Decimal('1000') >>> ExtendedContext.logical_and(Decimal('1111'), Decimal('10')) Decimal('10') """ return a.logical_and(b, context=self)
[ "def", "logical_and", "(", "self", ",", "a", ",", "b", ")", ":", "return", "a", ".", "logical_and", "(", "b", ",", "context", "=", "self", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/decimal.py#L4251-L4269
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py
python
Misc._bind
(self, what, sequence, func, add, needcleanup=1)
Internal function.
Internal function.
[ "Internal", "function", "." ]
def _bind(self, what, sequence, func, add, needcleanup=1): """Internal function.""" if isinstance(func, str): self.tk.call(what + (sequence, func)) elif func: funcid = self._register(func, self._substitute, needcleanup) cmd = ('%sif {"[%s %s]" == "break"} break\n' % (add and '+' or '', funcid, self._subst_format_str)) self.tk.call(what + (sequence, cmd)) return funcid elif sequence: return self.tk.call(what + (sequence,)) else: return self.tk.splitlist(self.tk.call(what))
[ "def", "_bind", "(", "self", ",", "what", ",", "sequence", ",", "func", ",", "add", ",", "needcleanup", "=", "1", ")", ":", "if", "isinstance", "(", "func", ",", "str", ")", ":", "self", ".", "tk", ".", "call", "(", "what", "+", "(", "sequence", ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py#L1195-L1211
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py3/sklearn/manifold/_mds.py
python
MDS.fit_transform
(self, X, y=None, init=None)
return self.embedding_
Fit the data from X, and returns the embedded coordinates Parameters ---------- X : array, shape (n_samples, n_features) or (n_samples, n_samples) Input data. If ``dissimilarity=='precomputed'``, the input should be the dissimilarity matrix. y : Ignored init : ndarray, shape (n_samples,), optional, default: None Starting configuration of the embedding to initialize the SMACOF algorithm. By default, the algorithm is initialized with a randomly chosen array.
Fit the data from X, and returns the embedded coordinates
[ "Fit", "the", "data", "from", "X", "and", "returns", "the", "embedded", "coordinates" ]
def fit_transform(self, X, y=None, init=None): """ Fit the data from X, and returns the embedded coordinates Parameters ---------- X : array, shape (n_samples, n_features) or (n_samples, n_samples) Input data. If ``dissimilarity=='precomputed'``, the input should be the dissimilarity matrix. y : Ignored init : ndarray, shape (n_samples,), optional, default: None Starting configuration of the embedding to initialize the SMACOF algorithm. By default, the algorithm is initialized with a randomly chosen array. """ X = check_array(X) if X.shape[0] == X.shape[1] and self.dissimilarity != "precomputed": warnings.warn("The MDS API has changed. ``fit`` now constructs an" " dissimilarity matrix from data. To use a custom " "dissimilarity matrix, set " "``dissimilarity='precomputed'``.") if self.dissimilarity == "precomputed": self.dissimilarity_matrix_ = X elif self.dissimilarity == "euclidean": self.dissimilarity_matrix_ = euclidean_distances(X) else: raise ValueError("Proximity must be 'precomputed' or 'euclidean'." " Got %s instead" % str(self.dissimilarity)) self.embedding_, self.stress_, self.n_iter_ = smacof( self.dissimilarity_matrix_, metric=self.metric, n_components=self.n_components, init=init, n_init=self.n_init, n_jobs=self.n_jobs, max_iter=self.max_iter, verbose=self.verbose, eps=self.eps, random_state=self.random_state, return_n_iter=True) return self.embedding_
[ "def", "fit_transform", "(", "self", ",", "X", ",", "y", "=", "None", ",", "init", "=", "None", ")", ":", "X", "=", "check_array", "(", "X", ")", "if", "X", ".", "shape", "[", "0", "]", "==", "X", ".", "shape", "[", "1", "]", "and", "self", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/manifold/_mds.py#L400-L439
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/syntax/_editra_ss.py
python
SyntaxData.GetSyntaxSpec
(self)
return SYNTAX_ITEMS
Syntax Specifications
Syntax Specifications
[ "Syntax", "Specifications" ]
def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS
[ "def", "GetSyntaxSpec", "(", "self", ")", ":", "return", "SYNTAX_ITEMS" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/syntax/_editra_ss.py#L72-L74
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
llvm/utils/lit/lit/util.py
python
killProcessAndChildrenIsSupported
()
Returns a tuple (<supported> , <error message>) where `<supported>` is True if `killProcessAndChildren()` is supported on the current host, returns False otherwise. `<error message>` is an empty string if `<supported>` is True, otherwise is contains a string describing why the function is not supported.
Returns a tuple (<supported> , <error message>) where `<supported>` is True if `killProcessAndChildren()` is supported on the current host, returns False otherwise. `<error message>` is an empty string if `<supported>` is True, otherwise is contains a string describing why the function is not supported.
[ "Returns", "a", "tuple", "(", "<supported", ">", "<error", "message", ">", ")", "where", "<supported", ">", "is", "True", "if", "killProcessAndChildren", "()", "is", "supported", "on", "the", "current", "host", "returns", "False", "otherwise", ".", "<error", ...
def killProcessAndChildrenIsSupported(): """ Returns a tuple (<supported> , <error message>) where `<supported>` is True if `killProcessAndChildren()` is supported on the current host, returns False otherwise. `<error message>` is an empty string if `<supported>` is True, otherwise is contains a string describing why the function is not supported. """ if platform.system() == 'AIX': return (True, "") try: import psutil # noqa: F401 return (True, "") except ImportError: return (False, "Requires the Python psutil module but it could" " not be found. Try installing it via pip or via" " your operating system's package manager.")
[ "def", "killProcessAndChildrenIsSupported", "(", ")", ":", "if", "platform", ".", "system", "(", ")", "==", "'AIX'", ":", "return", "(", "True", ",", "\"\"", ")", "try", ":", "import", "psutil", "# noqa: F401", "return", "(", "True", ",", "\"\"", ")", "e...
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/llvm/utils/lit/lit/util.py#L426-L444
coinapi/coinapi-sdk
854f21e7f69ea8599ae35c5403565cf299d8b795
oeml-sdk/python/openapi_client/model/position_data.py
python
PositionData.additional_properties_type
()
return (bool, date, datetime, dict, float, int, list, str, none_type,)
This must be a method because a model may have properties that are of type self, this must run after the class is loaded
This must be a method because a model may have properties that are of type self, this must run after the class is loaded
[ "This", "must", "be", "a", "method", "because", "a", "model", "may", "have", "properties", "that", "are", "of", "type", "self", "this", "must", "run", "after", "the", "class", "is", "loaded" ]
def additional_properties_type(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,)
[ "def", "additional_properties_type", "(", ")", ":", "lazy_import", "(", ")", "return", "(", "bool", ",", "date", ",", "datetime", ",", "dict", ",", "float", ",", "int", ",", "list", ",", "str", ",", "none_type", ",", ")" ]
https://github.com/coinapi/coinapi-sdk/blob/854f21e7f69ea8599ae35c5403565cf299d8b795/oeml-sdk/python/openapi_client/model/position_data.py#L69-L75
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/DraftFillet.py
python
_save_properties0_19_to_0_19
(obj)
return old_dict
Save the old property values and remove the old properties. Since we know the structure of the older Proxy class, we can take its old values and store them before we remove the property. We do not need to save the old properties if these can be recalculated from the new data.
Save the old property values and remove the old properties.
[ "Save", "the", "old", "property", "values", "and", "remove", "the", "old", "properties", "." ]
def _save_properties0_19_to_0_19(obj): """Save the old property values and remove the old properties. Since we know the structure of the older Proxy class, we can take its old values and store them before we remove the property. We do not need to save the old properties if these can be recalculated from the new data. """ _wrn("Old property values saved, old properties removed.") old_dict = dict() if hasattr(obj, "Length"): old_dict["Length"] = obj.Length obj.removeProperty("Length") if hasattr(obj, "Start"): old_dict["Start"] = obj.Start obj.removeProperty("Start") if hasattr(obj, "End"): old_dict["End"] = obj.End obj.removeProperty("End") if hasattr(obj, "FilletRadius"): old_dict["FilletRadius"] = obj.FilletRadius obj.removeProperty("FilletRadius") return old_dict
[ "def", "_save_properties0_19_to_0_19", "(", "obj", ")", ":", "_wrn", "(", "\"Old property values saved, old properties removed.\"", ")", "old_dict", "=", "dict", "(", ")", "if", "hasattr", "(", "obj", ",", "\"Length\"", ")", ":", "old_dict", "[", "\"Length\"", "]"...
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/DraftFillet.py#L127-L151
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/cudadrv/driver.py
python
MemoryPointer.free
(self)
Forces the device memory to the trash.
Forces the device memory to the trash.
[ "Forces", "the", "device", "memory", "to", "the", "trash", "." ]
def free(self): """ Forces the device memory to the trash. """ if self.is_managed: if not self._finalizer.alive: raise RuntimeError("Freeing dead memory") self._finalizer() assert not self._finalizer.alive
[ "def", "free", "(", "self", ")", ":", "if", "self", ".", "is_managed", ":", "if", "not", "self", ".", "_finalizer", ".", "alive", ":", "raise", "RuntimeError", "(", "\"Freeing dead memory\"", ")", "self", ".", "_finalizer", "(", ")", "assert", "not", "se...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/cudadrv/driver.py#L1264-L1272
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/auto_bisect/bisect_state.py
python
BisectState.__init__
(self, depot, revisions)
Initializes a new BisectState object with a set of revision states. Args: depot: Name of the depot used for initial set of revision states. revisions: List of revisions used for initial set of revision states.
Initializes a new BisectState object with a set of revision states.
[ "Initializes", "a", "new", "BisectState", "object", "with", "a", "set", "of", "revision", "states", "." ]
def __init__(self, depot, revisions): """Initializes a new BisectState object with a set of revision states. Args: depot: Name of the depot used for initial set of revision states. revisions: List of revisions used for initial set of revision states. """ self.revision_states = [] self.revision_index = {} index = 0 for revision in revisions: new_state = self._InitRevisionState(depot, revision, index) self.revision_states.append(new_state) index += 1
[ "def", "__init__", "(", "self", ",", "depot", ",", "revisions", ")", ":", "self", ".", "revision_states", "=", "[", "]", "self", ".", "revision_index", "=", "{", "}", "index", "=", "0", "for", "revision", "in", "revisions", ":", "new_state", "=", "self...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/auto_bisect/bisect_state.py#L39-L53
qt-creator/qt-creator
c9cd00f2ce590e9b272ad69214dc910e92bf62eb
share/qtcreator/debugger/gdbtracepoint.py
python
GDBTracepoint._getProcessName
(slef)
Capture for ProcessName
Capture for ProcessName
[ "Capture", "for", "ProcessName" ]
def _getProcessName(slef): """ Capture for ProcessName """ # gdb does not expose process name, neither does (standard) python # You can use for example psutil, but it might not be present. # Default to name of thread with ID 1 inf = gdb.selected_inferior() if psutil is None: try: if inf is None: return '<unknown process name>' threads = filter(lambda t: t.num == 1, list(inf.threads())) if len(threads) < 1: return '<unknown process name>' thread = threads[0] # use thread name return thread.name except Exception as e: return str(e) else: return psutil.Process(inf.pid).name()
[ "def", "_getProcessName", "(", "slef", ")", ":", "# gdb does not expose process name, neither does (standard) python", "# You can use for example psutil, but it might not be present.", "# Default to name of thread with ID 1", "inf", "=", "gdb", ".", "selected_inferior", "(", ")", "if...
https://github.com/qt-creator/qt-creator/blob/c9cd00f2ce590e9b272ad69214dc910e92bf62eb/share/qtcreator/debugger/gdbtracepoint.py#L316-L337
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/quopri.py
python
ishex
(c)
return '0' <= c <= '9' or 'a' <= c <= 'f' or 'A' <= c <= 'F'
Return true if the character 'c' is a hexadecimal digit.
Return true if the character 'c' is a hexadecimal digit.
[ "Return", "true", "if", "the", "character", "c", "is", "a", "hexadecimal", "digit", "." ]
def ishex(c): """Return true if the character 'c' is a hexadecimal digit.""" return '0' <= c <= '9' or 'a' <= c <= 'f' or 'A' <= c <= 'F'
[ "def", "ishex", "(", "c", ")", ":", "return", "'0'", "<=", "c", "<=", "'9'", "or", "'a'", "<=", "c", "<=", "'f'", "or", "'A'", "<=", "c", "<=", "'F'" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/quopri.py#L171-L173
shedskin/shedskin
ae88dbca7b1d9671cd8be448cb0b497122758936
examples/solitaire.py
python
Solitaire._tripleCut
(self)
Swap cards above first joker with cards below second joker.
Swap cards above first joker with cards below second joker.
[ "Swap", "cards", "above", "first", "joker", "with", "cards", "below", "second", "joker", "." ]
def _tripleCut(self): """ Swap cards above first joker with cards below second joker. """ d = self.deck a, b = d.index(53), d.index(54) if a > b: a, b = b, a d[:] = d[b+1:] + d[a:b+1] + d[:a]
[ "def", "_tripleCut", "(", "self", ")", ":", "d", "=", "self", ".", "deck", "a", ",", "b", "=", "d", ".", "index", "(", "53", ")", ",", "d", ".", "index", "(", "54", ")", "if", "a", ">", "b", ":", "a", ",", "b", "=", "b", ",", "a", "d", ...
https://github.com/shedskin/shedskin/blob/ae88dbca7b1d9671cd8be448cb0b497122758936/examples/solitaire.py#L61-L70