nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
perilouswithadollarsign/cstrike15_src
f82112a2388b841d72cb62ca48ab1846dfcc11c8
thirdparty/protobuf-2.5.0/python/google/protobuf/internal/type_checkers.py
python
GetTypeChecker
(cpp_type, field_type)
return _VALUE_CHECKERS[cpp_type]
Returns a type checker for a message field of the specified types. Args: cpp_type: C++ type of the field (see descriptor.py). field_type: Protocol message field type (see descriptor.py). Returns: An instance of TypeChecker which can be used to verify the types of values assigned to a field of the specified type.
Returns a type checker for a message field of the specified types.
[ "Returns", "a", "type", "checker", "for", "a", "message", "field", "of", "the", "specified", "types", "." ]
def GetTypeChecker(cpp_type, field_type): """Returns a type checker for a message field of the specified types. Args: cpp_type: C++ type of the field (see descriptor.py). field_type: Protocol message field type (see descriptor.py). Returns: An instance of TypeChecker which can be used to verify the types of values assigned to a field of the specified type. """ if (cpp_type == _FieldDescriptor.CPPTYPE_STRING and field_type == _FieldDescriptor.TYPE_STRING): return UnicodeValueChecker() return _VALUE_CHECKERS[cpp_type]
[ "def", "GetTypeChecker", "(", "cpp_type", ",", "field_type", ")", ":", "if", "(", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_STRING", "and", "field_type", "==", "_FieldDescriptor", ".", "TYPE_STRING", ")", ":", "return", "UnicodeValueChecker", "(", ")", ...
https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/google/protobuf/internal/type_checkers.py#L56-L70
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/saving/saved_model/serialized_attributes.py
python
SerializedAttributes.new
(obj)
Returns a new SerializedAttribute object.
Returns a new SerializedAttribute object.
[ "Returns", "a", "new", "SerializedAttribute", "object", "." ]
def new(obj): """Returns a new SerializedAttribute object.""" if isinstance(obj, training_lib.Model): return ModelAttributes() elif isinstance(obj, metrics.Metric): return MetricAttributes() elif isinstance(obj, recurrent.RNN): return RNNAttributes() elif isinstance(obj, base_layer.Layer): return LayerAttributes() else: raise TypeError('Internal error during serialization: Expected Keras ' 'Layer object, got {} of type {}'.format(obj, type(obj)))
[ "def", "new", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "training_lib", ".", "Model", ")", ":", "return", "ModelAttributes", "(", ")", "elif", "isinstance", "(", "obj", ",", "metrics", ".", "Metric", ")", ":", "return", "MetricAttributes...
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/saving/saved_model/serialized_attributes.py#L140-L152
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/computation/engines.py
python
AbstractEngine._evaluate
(self)
Return an evaluated expression. Parameters ---------- env : Scope The local and global environment in which to evaluate an expression. Notes ----- Must be implemented by subclasses.
Return an evaluated expression.
[ "Return", "an", "evaluated", "expression", "." ]
def _evaluate(self): """Return an evaluated expression. Parameters ---------- env : Scope The local and global environment in which to evaluate an expression. Notes ----- Must be implemented by subclasses. """ pass
[ "def", "_evaluate", "(", "self", ")", ":", "pass" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/computation/engines.py#L85-L98
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/simplejson/encoder.py
python
encode_basestring
(s, _PY3=PY3, _q=u('"'))
return _q + ESCAPE.sub(replace, s) + _q
Return a JSON representation of a Python string
Return a JSON representation of a Python string
[ "Return", "a", "JSON", "representation", "of", "a", "Python", "string" ]
def encode_basestring(s, _PY3=PY3, _q=u('"')): """Return a JSON representation of a Python string """ if _PY3: if isinstance(s, binary_type): s = s.decode('utf-8') else: if isinstance(s, str) and HAS_UTF8.search(s) is not None: s = s.decode('utf-8') def replace(match): return ESCAPE_DCT[match.group(0)] return _q + ESCAPE.sub(replace, s) + _q
[ "def", "encode_basestring", "(", "s", ",", "_PY3", "=", "PY3", ",", "_q", "=", "u", "(", "'\"'", ")", ")", ":", "if", "_PY3", ":", "if", "isinstance", "(", "s", ",", "binary_type", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "e...
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/simplejson/encoder.py#L41-L53
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/docutils/parsers/rst/roles.py
python
role
(role_name, language_module, lineno, reporter)
Locate and return a role function from its language-dependent name, along with a list of system messages. If the role is not found in the current language, check English. Return a 2-tuple: role function (``None`` if the named role cannot be found) and a list of system messages.
Locate and return a role function from its language-dependent name, along with a list of system messages. If the role is not found in the current language, check English. Return a 2-tuple: role function (``None`` if the named role cannot be found) and a list of system messages.
[ "Locate", "and", "return", "a", "role", "function", "from", "its", "language", "-", "dependent", "name", "along", "with", "a", "list", "of", "system", "messages", ".", "If", "the", "role", "is", "not", "found", "in", "the", "current", "language", "check", ...
def role(role_name, language_module, lineno, reporter): """ Locate and return a role function from its language-dependent name, along with a list of system messages. If the role is not found in the current language, check English. Return a 2-tuple: role function (``None`` if the named role cannot be found) and a list of system messages. """ normname = role_name.lower() messages = [] msg_text = [] if normname in _roles: return _roles[normname], messages if role_name: canonicalname = None try: canonicalname = language_module.roles[normname] except AttributeError, error: msg_text.append('Problem retrieving role entry from language ' 'module %r: %s.' % (language_module, error)) except KeyError: msg_text.append('No role entry for "%s" in module "%s".' % (role_name, language_module.__name__)) else: canonicalname = DEFAULT_INTERPRETED_ROLE # If we didn't find it, try English as a fallback. if not canonicalname: try: canonicalname = _fallback_language_module.roles[normname] msg_text.append('Using English fallback for role "%s".' % role_name) except KeyError: msg_text.append('Trying "%s" as canonical role name.' % role_name) # The canonical name should be an English name, but just in case: canonicalname = normname # Collect any messages that we generated. if msg_text: message = reporter.info('\n'.join(msg_text), line=lineno) messages.append(message) # Look the role up in the registry, and return it. if canonicalname in _role_registry: role_fn = _role_registry[canonicalname] register_local_role(normname, role_fn) return role_fn, messages else: return None, messages
[ "def", "role", "(", "role_name", ",", "language_module", ",", "lineno", ",", "reporter", ")", ":", "normname", "=", "role_name", ".", "lower", "(", ")", "messages", "=", "[", "]", "msg_text", "=", "[", "]", "if", "normname", "in", "_roles", ":", "retur...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/parsers/rst/roles.py#L94-L144
neoml-lib/neoml
a0d370fba05269a1b2258cef126f77bbd2054a3e
NeoML/Python/neoml/Dnn/Loss.py
python
CenterLoss.rate
(self)
return self._internal.get_rate()
Gets the convergence rate multiplier.
Gets the convergence rate multiplier.
[ "Gets", "the", "convergence", "rate", "multiplier", "." ]
def rate(self): """Gets the convergence rate multiplier. """ return self._internal.get_rate()
[ "def", "rate", "(", "self", ")", ":", "return", "self", ".", "_internal", ".", "get_rate", "(", ")" ]
https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/Python/neoml/Dnn/Loss.py#L647-L650
CanalTP/navitia
cb84ce9859070187e708818b058e6a7e0b7f891b
source/jormungandr/jormungandr/street_network/street_network.py
python
AbstractStreetNetworkService.record_call
(self, status, **kwargs)
status can be in: ok, failure
status can be in: ok, failure
[ "status", "can", "be", "in", ":", "ok", "failure" ]
def record_call(self, status, **kwargs): """ status can be in: ok, failure """ params = {'streetnetwork_id': six.text_type(self.sn_system_id), 'status': status} params.update(kwargs) new_relic.record_custom_event('streetnetwork', params)
[ "def", "record_call", "(", "self", ",", "status", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'streetnetwork_id'", ":", "six", ".", "text_type", "(", "self", ".", "sn_system_id", ")", ",", "'status'", ":", "status", "}", "params", ".", "upd...
https://github.com/CanalTP/navitia/blob/cb84ce9859070187e708818b058e6a7e0b7f891b/source/jormungandr/jormungandr/street_network/street_network.py#L148-L154
papyrussolution/OpenPapyrus
bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91
Src/OSF/protobuf-3.19.1/python/mox.py
python
MultipleTimesGroup.IsSatisfied
(self)
return False
Return True if all methods in this group are called at least once.
Return True if all methods in this group are called at least once.
[ "Return", "True", "if", "all", "methods", "in", "this", "group", "are", "called", "at", "least", "once", "." ]
def IsSatisfied(self): """Return True if all methods in this group are called at least once.""" # NOTE(psycho): We can't use the simple set difference here because we want # to match different parameters which are considered the same e.g. IsA(str) # and some string. This solution is O(n^2) but n should be small. tmp = self._methods.copy() for called in self._methods_called: for expected in tmp: if called == expected: tmp.remove(expected) if not tmp: return True break return False
[ "def", "IsSatisfied", "(", "self", ")", ":", "# NOTE(psycho): We can't use the simple set difference here because we want", "# to match different parameters which are considered the same e.g. IsA(str)", "# and some string. This solution is O(n^2) but n should be small.", "tmp", "=", "self", ...
https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/protobuf-3.19.1/python/mox.py#L1318-L1331
UbiquityRobotics/fiducials
1ae8a13d56fafa436aca181da451c1bf808a288c
fiducial_slam/scripts/standard_fit.py
python
function
(x, C, N)
return np.dot(x-Ck, Nk) * -1.0 / Nu + Cu
Calculate an orthogonal projection of the points on the standard Args: x: (n-1) x m dimensional matrix C: n dimensional vector whicn indicate the centroid of the standard N: n dimensional vector which indicate the normal vector of the standard Returns: m dimensional vector which indicate the last attribute value of orthogonal projection
Calculate an orthogonal projection of the points on the standard
[ "Calculate", "an", "orthogonal", "projection", "of", "the", "points", "on", "the", "standard" ]
def function(x, C, N): """ Calculate an orthogonal projection of the points on the standard Args: x: (n-1) x m dimensional matrix C: n dimensional vector whicn indicate the centroid of the standard N: n dimensional vector which indicate the normal vector of the standard Returns: m dimensional vector which indicate the last attribute value of orthogonal projection """ Ck = C[0:-1] # centroid for known parameters Nk = N[0:-1] # normal for known parmeters Cu = C[-1] # centroid for unknown parameter Nu = N[-1] # normal for unknown parameter return np.dot(x-Ck, Nk) * -1.0 / Nu + Cu
[ "def", "function", "(", "x", ",", "C", ",", "N", ")", ":", "Ck", "=", "C", "[", "0", ":", "-", "1", "]", "# centroid for known parameters", "Nk", "=", "N", "[", "0", ":", "-", "1", "]", "# normal for known parmeters", "Cu", "=", "C", "[", "-", "1...
https://github.com/UbiquityRobotics/fiducials/blob/1ae8a13d56fafa436aca181da451c1bf808a288c/fiducial_slam/scripts/standard_fit.py#L89-L106
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
bindings/pyroot/cppyy/cppyy-backend/cling/python/cppyy_backend/_cppyy_generator.py
python
CppyyGenerator._get_access_specifier
(self, member)
return False
In principle, we just want member.access_specifier.name.lower(), except that we need to handle: Q_SIGNALS:|signals: which are converted by the preprocessor...so read the original text. :param member: The access_specifier. :return:
In principle, we just want member.access_specifier.name.lower(), except that we need to handle:
[ "In", "principle", "we", "just", "want", "member", ".", "access_specifier", ".", "name", ".", "lower", "()", "except", "that", "we", "need", "to", "handle", ":" ]
def _get_access_specifier(self, member): """ In principle, we just want member.access_specifier.name.lower(), except that we need to handle: Q_SIGNALS:|signals: which are converted by the preprocessor...so read the original text. :param member: The access_specifier. :return: """ access_specifier_text = self.source_processor.unpreprocessed(member.extent) if access_specifier_text in (Q_SIGNALS + ":", "signals:"): return True return False
[ "def", "_get_access_specifier", "(", "self", ",", "member", ")", ":", "access_specifier_text", "=", "self", ".", "source_processor", ".", "unpreprocessed", "(", "member", ".", "extent", ")", "if", "access_specifier_text", "in", "(", "Q_SIGNALS", "+", "\":\"", ",...
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/bindings/pyroot/cppyy/cppyy-backend/cling/python/cppyy_backend/_cppyy_generator.py#L393-L407
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/mailbox.py
python
MH.get_string
(self, key)
Return a string representation or raise a KeyError.
Return a string representation or raise a KeyError.
[ "Return", "a", "string", "representation", "or", "raise", "a", "KeyError", "." ]
def get_string(self, key): """Return a string representation or raise a KeyError.""" try: if self._locked: f = open(os.path.join(self._path, str(key)), 'r+') else: f = open(os.path.join(self._path, str(key)), 'r') except IOError, e: if e.errno == errno.ENOENT: raise KeyError('No message with key: %s' % key) else: raise try: if self._locked: _lock_file(f) try: return f.read() finally: if self._locked: _unlock_file(f) finally: f.close()
[ "def", "get_string", "(", "self", ",", "key", ")", ":", "try", ":", "if", "self", ".", "_locked", ":", "f", "=", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "str", "(", "key", ")", ")", ",", "'r+'", ")", "else...
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/mailbox.py#L1021-L1042
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
interpreter/llvm/src/tools/clang/utils/check_cfc/check_cfc.py
python
get_output_file
(args)
return None
Return the output file specified by this command or None if not specified.
Return the output file specified by this command or None if not specified.
[ "Return", "the", "output", "file", "specified", "by", "this", "command", "or", "None", "if", "not", "specified", "." ]
def get_output_file(args): """Return the output file specified by this command or None if not specified.""" grabnext = False for arg in args: if grabnext: return arg if arg == '-o': # Specified as a separate arg grabnext = True elif arg.startswith('-o'): # Specified conjoined with -o return arg[2:] assert grabnext == False return None
[ "def", "get_output_file", "(", "args", ")", ":", "grabnext", "=", "False", "for", "arg", "in", "args", ":", "if", "grabnext", ":", "return", "arg", "if", "arg", "==", "'-o'", ":", "# Specified as a separate arg", "grabnext", "=", "True", "elif", "arg", "."...
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/interpreter/llvm/src/tools/clang/utils/check_cfc/check_cfc.py#L130-L145
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/sets.py
python
BaseSet.issuperset
(self, other)
return True
Report whether this set contains another set.
Report whether this set contains another set.
[ "Report", "whether", "this", "set", "contains", "another", "set", "." ]
def issuperset(self, other): """Report whether this set contains another set.""" self._binary_sanity_check(other) if len(self) < len(other): # Fast check for obvious cases return False for elt in ifilterfalse(self._data.__contains__, other): return False return True
[ "def", "issuperset", "(", "self", ",", "other", ")", ":", "self", ".", "_binary_sanity_check", "(", "other", ")", "if", "len", "(", "self", ")", "<", "len", "(", "other", ")", ":", "# Fast check for obvious cases", "return", "False", "for", "elt", "in", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/sets.py#L298-L305
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_core.py
python
Sizer.ComputeFittingWindowSize
(*args, **kwargs)
return _core_.Sizer_ComputeFittingWindowSize(*args, **kwargs)
ComputeFittingWindowSize(self, Window window) -> Size Like `ComputeFittingClientSize`, but converts the result into *window* size. The returned value is suitable for passing to `wx.Window.SetSize` or `wx.Window.SetMinSize`.
ComputeFittingWindowSize(self, Window window) -> Size
[ "ComputeFittingWindowSize", "(", "self", "Window", "window", ")", "-", ">", "Size" ]
def ComputeFittingWindowSize(*args, **kwargs): """ ComputeFittingWindowSize(self, Window window) -> Size Like `ComputeFittingClientSize`, but converts the result into *window* size. The returned value is suitable for passing to `wx.Window.SetSize` or `wx.Window.SetMinSize`. """ return _core_.Sizer_ComputeFittingWindowSize(*args, **kwargs)
[ "def", "ComputeFittingWindowSize", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Sizer_ComputeFittingWindowSize", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L14863-L14874
mamedev/mame
02cd26d37ee11191f3e311e19e805d872cb1e3a4
3rdparty/portmidi/pm_python/pyportmidi/midi.py
python
Input.poll
(self)
returns true if there's data, or false if not. Input.poll(): return Bool raises a MidiException on error.
returns true if there's data, or false if not. Input.poll(): return Bool
[ "returns", "true", "if", "there", "s", "data", "or", "false", "if", "not", ".", "Input", ".", "poll", "()", ":", "return", "Bool" ]
def poll(self): """returns true if there's data, or false if not. Input.poll(): return Bool raises a MidiException on error. """ _check_init() self._check_open() r = self._input.Poll() if r == _pypm.TRUE: return True elif r == _pypm.FALSE: return False else: err_text = GetErrorText(r) raise MidiException( (r, err_text) )
[ "def", "poll", "(", "self", ")", ":", "_check_init", "(", ")", "self", ".", "_check_open", "(", ")", "r", "=", "self", ".", "_input", ".", "Poll", "(", ")", "if", "r", "==", "_pypm", ".", "TRUE", ":", "return", "True", "elif", "r", "==", "_pypm",...
https://github.com/mamedev/mame/blob/02cd26d37ee11191f3e311e19e805d872cb1e3a4/3rdparty/portmidi/pm_python/pyportmidi/midi.py#L284-L300
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBCompileUnit.FindLineEntryIndex
(self, *args)
return _lldb.SBCompileUnit_FindLineEntryIndex(self, *args)
FindLineEntryIndex(SBCompileUnit self, uint32_t start_idx, uint32_t line, SBFileSpec inline_file_spec) -> uint32_t FindLineEntryIndex(SBCompileUnit self, uint32_t start_idx, uint32_t line, SBFileSpec inline_file_spec, bool exact) -> uint32_t
FindLineEntryIndex(SBCompileUnit self, uint32_t start_idx, uint32_t line, SBFileSpec inline_file_spec) -> uint32_t FindLineEntryIndex(SBCompileUnit self, uint32_t start_idx, uint32_t line, SBFileSpec inline_file_spec, bool exact) -> uint32_t
[ "FindLineEntryIndex", "(", "SBCompileUnit", "self", "uint32_t", "start_idx", "uint32_t", "line", "SBFileSpec", "inline_file_spec", ")", "-", ">", "uint32_t", "FindLineEntryIndex", "(", "SBCompileUnit", "self", "uint32_t", "start_idx", "uint32_t", "line", "SBFileSpec", "...
def FindLineEntryIndex(self, *args): """ FindLineEntryIndex(SBCompileUnit self, uint32_t start_idx, uint32_t line, SBFileSpec inline_file_spec) -> uint32_t FindLineEntryIndex(SBCompileUnit self, uint32_t start_idx, uint32_t line, SBFileSpec inline_file_spec, bool exact) -> uint32_t """ return _lldb.SBCompileUnit_FindLineEntryIndex(self, *args)
[ "def", "FindLineEntryIndex", "(", "self", ",", "*", "args", ")", ":", "return", "_lldb", ".", "SBCompileUnit_FindLineEntryIndex", "(", "self", ",", "*", "args", ")" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L3180-L3185
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/resources/list_unused_grit_header.py
python
ShouldScanFile
(filename)
return file_extension in extensions
Return if the filename has one of the extensions below.
Return if the filename has one of the extensions below.
[ "Return", "if", "the", "filename", "has", "one", "of", "the", "extensions", "below", "." ]
def ShouldScanFile(filename): """Return if the filename has one of the extensions below.""" extensions = ['.cc', '.cpp', '.h', '.mm'] file_extension = os.path.splitext(filename)[1] return file_extension in extensions
[ "def", "ShouldScanFile", "(", "filename", ")", ":", "extensions", "=", "[", "'.cc'", ",", "'.cpp'", ",", "'.h'", ",", "'.mm'", "]", "file_extension", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "1", "]", "return", "file_extension"...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/resources/list_unused_grit_header.py#L150-L154
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/crcmod_osx/crcmod/crcmod.py
python
Crc.digest
(self)
return ''.join(lst)
Return the current CRC value as a string of bytes. The length of this string is specified in the digest_size attribute.
Return the current CRC value as a string of bytes. The length of this string is specified in the digest_size attribute.
[ "Return", "the", "current", "CRC", "value", "as", "a", "string", "of", "bytes", ".", "The", "length", "of", "this", "string", "is", "specified", "in", "the", "digest_size", "attribute", "." ]
def digest(self): '''Return the current CRC value as a string of bytes. The length of this string is specified in the digest_size attribute. ''' n = self.digest_size crc = self.crcValue lst = [] while n > 0: lst.append(chr(crc & 0xFF)) crc = crc >> 8 n -= 1 lst.reverse() return ''.join(lst)
[ "def", "digest", "(", "self", ")", ":", "n", "=", "self", ".", "digest_size", "crc", "=", "self", ".", "crcValue", "lst", "=", "[", "]", "while", "n", ">", "0", ":", "lst", ".", "append", "(", "chr", "(", "crc", "&", "0xFF", ")", ")", "crc", ...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/crcmod_osx/crcmod/crcmod.py#L153-L165
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/trace.py
python
CoverageResults.update
(self, other)
Merge in the data from another CoverageResults
Merge in the data from another CoverageResults
[ "Merge", "in", "the", "data", "from", "another", "CoverageResults" ]
def update(self, other): """Merge in the data from another CoverageResults""" counts = self.counts calledfuncs = self.calledfuncs callers = self.callers other_counts = other.counts other_calledfuncs = other.calledfuncs other_callers = other.callers for key in other_counts.keys(): counts[key] = counts.get(key, 0) + other_counts[key] for key in other_calledfuncs.keys(): calledfuncs[key] = 1 for key in other_callers.keys(): callers[key] = 1
[ "def", "update", "(", "self", ",", "other", ")", ":", "counts", "=", "self", ".", "counts", "calledfuncs", "=", "self", ".", "calledfuncs", "callers", "=", "self", ".", "callers", "other_counts", "=", "other", ".", "counts", "other_calledfuncs", "=", "othe...
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/trace.py#L248-L264
NervanaSystems/ngraph
f677a119765ca30636cf407009dabd118664951f
python/src/ngraph/ops.py
python
logical_and
( left_node: NodeInput, right_node: NodeInput, auto_broadcast: str = "NUMPY", name: Optional[str] = None, )
return _get_node_factory().create( "LogicalAnd", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} )
Return node which perform logical and operation on input nodes element-wise. :param left_node: The first input node providing data. :param right_node: The second input node providing data. :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: The optional new name for output node. :return: The node performing logical and operation on input nodes corresponding elements.
Return node which perform logical and operation on input nodes element-wise.
[ "Return", "node", "which", "perform", "logical", "and", "operation", "on", "input", "nodes", "element", "-", "wise", "." ]
def logical_and( left_node: NodeInput, right_node: NodeInput, auto_broadcast: str = "NUMPY", name: Optional[str] = None, ) -> Node: """Return node which perform logical and operation on input nodes element-wise. :param left_node: The first input node providing data. :param right_node: The second input node providing data. :param auto_broadcast: The type of broadcasting that specifies mapping of input tensor axes to output shape axes. Range of values: numpy, explicit. :param name: The optional new name for output node. :return: The node performing logical and operation on input nodes corresponding elements. """ return _get_node_factory().create( "LogicalAnd", [left_node, right_node], {"auto_broadcast": auto_broadcast.upper()} )
[ "def", "logical_and", "(", "left_node", ":", "NodeInput", ",", "right_node", ":", "NodeInput", ",", "auto_broadcast", ":", "str", "=", "\"NUMPY\"", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "Node", ":", "return", "_get_n...
https://github.com/NervanaSystems/ngraph/blob/f677a119765ca30636cf407009dabd118664951f/python/src/ngraph/ops.py#L1276-L1293
glotzerlab/hoomd-blue
f7f97abfa3fcc2522fa8d458d65d0aeca7ba781a
hoomd/hpmc/pair/user.py
python
CPPPotentialBase._wrap_gpu_code
(self, code)
return cpp_function
Convert the provided code into a device function with the expected \ signature. Args: code (`str`): Body of the C++ function
Convert the provided code into a device function with the expected \ signature.
[ "Convert", "the", "provided", "code", "into", "a", "device", "function", "with", "the", "expected", "\\", "signature", "." ]
def _wrap_gpu_code(self, code): """Convert the provided code into a device function with the expected \ signature. Args: code (`str`): Body of the C++ function """ param_array_suffix = {True: '_isotropic', False: ''}[self._is_union] constituent_str = '__device__ float *param_array_constituent;' constituent_param_array = { True: constituent_str, False: '' }[self._is_union] cpp_function = f""" #include "hoomd/HOOMDMath.h" #include "hoomd/VectorMath.h" #include "hoomd/hpmc/IntegratorHPMCMonoGPUJIT.inc" using namespace hoomd; // param_array (singlet class) or param_array_isotropic // and param_array_constituent (union class) are // allocated by the library __device__ float *param_array{param_array_suffix}; {constituent_param_array} __device__ inline float eval(const vec3<float>& r_ij, unsigned int type_i, const quat<float>& q_i, float d_i, float charge_i, unsigned int type_j, const quat<float>& q_j, float d_j, float charge_j) {{ """ cpp_function += code cpp_function += """ } """ return cpp_function
[ "def", "_wrap_gpu_code", "(", "self", ",", "code", ")", ":", "param_array_suffix", "=", "{", "True", ":", "'_isotropic'", ",", "False", ":", "''", "}", "[", "self", ".", "_is_union", "]", "constituent_str", "=", "'__device__ float *param_array_constituent;'", "c...
https://github.com/glotzerlab/hoomd-blue/blob/f7f97abfa3fcc2522fa8d458d65d0aeca7ba781a/hoomd/hpmc/pair/user.py#L137-L178
mapnik/mapnik
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
scons/scons-time.py
python
SConsTimer.subst_variables
(self, command, dictionary)
return (action, string, args)
Substitutes (via the format operator) the values in the specified dictionary into the specified command. The command can be an (action, string) tuple. In all cases, we perform substitution on strings and don't worry if something isn't a string. (It's probably a Python function to be executed.)
Substitutes (via the format operator) the values in the specified dictionary into the specified command.
[ "Substitutes", "(", "via", "the", "format", "operator", ")", "the", "values", "in", "the", "specified", "dictionary", "into", "the", "specified", "command", "." ]
def subst_variables(self, command, dictionary): """ Substitutes (via the format operator) the values in the specified dictionary into the specified command. The command can be an (action, string) tuple. In all cases, we perform substitution on strings and don't worry if something isn't a string. (It's probably a Python function to be executed.) """ try: command + '' except TypeError: action = command[0] string = command[1] args = command[2:] else: action = command string = action args = (()) action = self.subst(action, dictionary) string = self.subst(string, dictionary) return (action, string, args)
[ "def", "subst_variables", "(", "self", ",", "command", ",", "dictionary", ")", ":", "try", ":", "command", "+", "''", "except", "TypeError", ":", "action", "=", "command", "[", "0", "]", "string", "=", "command", "[", "1", "]", "args", "=", "command", ...
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-time.py#L374-L395
apiaryio/drafter
4634ebd07f6c6f257cc656598ccd535492fdfb55
tools/gyp/pylib/gyp/msvs_emulation.py
python
MsvsSettings.GetLibFlags
(self, config, gyp_to_build_path)
return libflags
Returns the flags that need to be added to lib commands.
Returns the flags that need to be added to lib commands.
[ "Returns", "the", "flags", "that", "need", "to", "be", "added", "to", "lib", "commands", "." ]
def GetLibFlags(self, config, gyp_to_build_path): """Returns the flags that need to be added to lib commands.""" config = self._TargetConfig(config) libflags = [] lib = self._GetWrapper(self, self.msvs_settings[config], 'VCLibrarianTool', append=libflags) libflags.extend(self._GetAdditionalLibraryDirectories( 'VCLibrarianTool', config, gyp_to_build_path)) lib('LinkTimeCodeGeneration', map={'true': '/LTCG'}) lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'}, prefix='/MACHINE:') lib('AdditionalOptions') return libflags
[ "def", "GetLibFlags", "(", "self", ",", "config", ",", "gyp_to_build_path", ")", ":", "config", "=", "self", ".", "_TargetConfig", "(", "config", ")", "libflags", "=", "[", "]", "lib", "=", "self", ".", "_GetWrapper", "(", "self", ",", "self", ".", "ms...
https://github.com/apiaryio/drafter/blob/4634ebd07f6c6f257cc656598ccd535492fdfb55/tools/gyp/pylib/gyp/msvs_emulation.py#L513-L525
google/sandboxed-api
7004d59150c9fbfaa3e5fd1872affffd1ab14fe8
oss-internship-2020/libuv/generator/wrapper_generator.py
python
read_file
(filename: str)
return str(file.read())
Returns contents of filename as a string. Args: filename: The name of the file to read Returns: The contents of the file as a string.
Returns contents of filename as a string.
[ "Returns", "contents", "of", "filename", "as", "a", "string", "." ]
def read_file(filename: str) -> str: """Returns contents of filename as a string. Args: filename: The name of the file to read Returns: The contents of the file as a string. """ file = open(filename, "r") return str(file.read())
[ "def", "read_file", "(", "filename", ":", "str", ")", "->", "str", ":", "file", "=", "open", "(", "filename", ",", "\"r\"", ")", "return", "str", "(", "file", ".", "read", "(", ")", ")" ]
https://github.com/google/sandboxed-api/blob/7004d59150c9fbfaa3e5fd1872affffd1ab14fe8/oss-internship-2020/libuv/generator/wrapper_generator.py#L149-L160
SequoiaDB/SequoiaDB
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py
python
xmlDoc.saveFormatFileTo
(self, buf, encoding, format)
return ret
Dump an XML document to an I/O buffer. Warning ! This call xmlOutputBufferClose() on buf which is not available after this call.
Dump an XML document to an I/O buffer. Warning ! This call xmlOutputBufferClose() on buf which is not available after this call.
[ "Dump", "an", "XML", "document", "to", "an", "I", "/", "O", "buffer", ".", "Warning", "!", "This", "call", "xmlOutputBufferClose", "()", "on", "buf", "which", "is", "not", "available", "after", "this", "call", "." ]
def saveFormatFileTo(self, buf, encoding, format): """Dump an XML document to an I/O buffer. Warning ! This call xmlOutputBufferClose() on buf which is not available after this call. """ if buf is None: buf__o = None else: buf__o = buf._o ret = libxml2mod.xmlSaveFormatFileTo(buf__o, self._o, encoding, format) return ret
[ "def", "saveFormatFileTo", "(", "self", ",", "buf", ",", "encoding", ",", "format", ")", ":", "if", "buf", "is", "None", ":", "buf__o", "=", "None", "else", ":", "buf__o", "=", "buf", ".", "_o", "ret", "=", "libxml2mod", ".", "xmlSaveFormatFileTo", "("...
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py#L4454-L4461
wenwei202/caffe
f54a74abaf6951d8485cbdcfa1d74a4c37839466
scripts/cpp_lint.py
python
CheckVlogArguments
(filename, clean_lines, linenum, error)
Checks that VLOG() is only used for defining a logging level. For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and VLOG(FATAL) are not. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Checks that VLOG() is only used for defining a logging level.
[ "Checks", "that", "VLOG", "()", "is", "only", "used", "for", "defining", "a", "logging", "level", "." ]
def CheckVlogArguments(filename, clean_lines, linenum, error): """Checks that VLOG() is only used for defining a logging level. For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and VLOG(FATAL) are not. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line): error(filename, linenum, 'runtime/vlog', 5, 'VLOG() should be used with numeric verbosity level. ' 'Use LOG() if you want symbolic severity levels.')
[ "def", "CheckVlogArguments", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "Search", "(", "r'\\bVLOG\\((INFO|ERROR|WARNING|DFATAL|FATAL)\\)'", ",", "line", ")", ...
https://github.com/wenwei202/caffe/blob/f54a74abaf6951d8485cbdcfa1d74a4c37839466/scripts/cpp_lint.py#L1708-L1724
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mailbox.py
python
Mailbox.pop
(self, key, default=None)
return result
Delete the keyed message and return it, or default.
Delete the keyed message and return it, or default.
[ "Delete", "the", "keyed", "message", "and", "return", "it", "or", "default", "." ]
def pop(self, key, default=None): """Delete the keyed message and return it, or default.""" try: result = self[key] except KeyError: return default self.discard(key) return result
[ "def", "pop", "(", "self", ",", "key", ",", "default", "=", "None", ")", ":", "try", ":", "result", "=", "self", "[", "key", "]", "except", "KeyError", ":", "return", "default", "self", ".", "discard", "(", "key", ")", "return", "result" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mailbox.py#L151-L158
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/sets.py
python
Set.clear
(self)
Remove all elements from this set.
Remove all elements from this set.
[ "Remove", "all", "elements", "from", "this", "set", "." ]
def clear(self): """Remove all elements from this set.""" self._data.clear()
[ "def", "clear", "(", "self", ")", ":", "self", ".", "_data", ".", "clear", "(", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/sets.py#L493-L495
gabyx/ApproxMVBB
838f3ff7690a938f1e4199a5f41b6feefc32a603
example/kdTreeFiltering/python/Tools/Transformations/Transformations.py
python
random_rotation_matrix
(rand=None)
return quaternion_matrix(random_quaternion(rand))
Return uniform random rotation matrix. rand: array like Three independent random variables that are uniformly distributed between 0 and 1 for each returned quaternion. >>> R = random_rotation_matrix() >>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4)) True
Return uniform random rotation matrix.
[ "Return", "uniform", "random", "rotation", "matrix", "." ]
def random_rotation_matrix(rand=None): """Return uniform random rotation matrix. rand: array like Three independent random variables that are uniformly distributed between 0 and 1 for each returned quaternion. >>> R = random_rotation_matrix() >>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4)) True """ return quaternion_matrix(random_quaternion(rand))
[ "def", "random_rotation_matrix", "(", "rand", "=", "None", ")", ":", "return", "quaternion_matrix", "(", "random_quaternion", "(", "rand", ")", ")" ]
https://github.com/gabyx/ApproxMVBB/blob/838f3ff7690a938f1e4199a5f41b6feefc32a603/example/kdTreeFiltering/python/Tools/Transformations/Transformations.py#L1486-L1498
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/ntpath.py
python
isabs
(s)
return s != '' and s[:1] in '/\\'
Test whether a path is absolute
Test whether a path is absolute
[ "Test", "whether", "a", "path", "is", "absolute" ]
def isabs(s): """Test whether a path is absolute""" s = splitdrive(s)[1] return s != '' and s[:1] in '/\\'
[ "def", "isabs", "(", "s", ")", ":", "s", "=", "splitdrive", "(", "s", ")", "[", "1", "]", "return", "s", "!=", "''", "and", "s", "[", ":", "1", "]", "in", "'/\\\\'" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/ntpath.py#L55-L58
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_core.py
python
BookCtrlBase.SetPageSize
(*args, **kwargs)
return _core_.BookCtrlBase_SetPageSize(*args, **kwargs)
SetPageSize(self, Size size)
SetPageSize(self, Size size)
[ "SetPageSize", "(", "self", "Size", "size", ")" ]
def SetPageSize(*args, **kwargs): """SetPageSize(self, Size size)""" return _core_.BookCtrlBase_SetPageSize(*args, **kwargs)
[ "def", "SetPageSize", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "BookCtrlBase_SetPageSize", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L13570-L13572
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/thrift/server/TProcessPoolServer.py
python
TProcessPoolServer.serve
(self)
Start workers and put into queue
Start workers and put into queue
[ "Start", "workers", "and", "put", "into", "queue" ]
def serve(self): """Start workers and put into queue""" # this is a shared state that can tell the workers to exit when False self.isRunning.value = True # first bind and listen to the port self.serverTransport.listen() # fork the children for i in range(self.numWorkers): try: w = Process(target=self.workerProcess) w.daemon = True w.start() self.workers.append(w) except Exception as x: logger.exception(x) # wait until the condition is set by stop() while True: self.stopCondition.acquire() try: self.stopCondition.wait() break except (SystemExit, KeyboardInterrupt): break except Exception as x: logger.exception(x) self.isRunning.value = False
[ "def", "serve", "(", "self", ")", ":", "# this is a shared state that can tell the workers to exit when False", "self", ".", "isRunning", ".", "value", "=", "True", "# first bind and listen to the port", "self", ".", "serverTransport", ".", "listen", "(", ")", "# fork the...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/thrift/server/TProcessPoolServer.py#L88-L117
SoarGroup/Soar
a1c5e249499137a27da60533c72969eef3b8ab6b
scons/scons-local-4.1.0/SCons/SConf.py
python
SetCacheMode
(mode)
Set the Configure cache mode. mode must be one of "auto", "force", or "cache".
Set the Configure cache mode. mode must be one of "auto", "force", or "cache".
[ "Set", "the", "Configure", "cache", "mode", ".", "mode", "must", "be", "one", "of", "auto", "force", "or", "cache", "." ]
def SetCacheMode(mode): """Set the Configure cache mode. mode must be one of "auto", "force", or "cache".""" global cache_mode if mode == "auto": cache_mode = AUTO elif mode == "force": cache_mode = FORCE elif mode == "cache": cache_mode = CACHE else: raise ValueError("SCons.SConf.SetCacheMode: Unknown mode " + mode)
[ "def", "SetCacheMode", "(", "mode", ")", ":", "global", "cache_mode", "if", "mode", "==", "\"auto\"", ":", "cache_mode", "=", "AUTO", "elif", "mode", "==", "\"force\"", ":", "cache_mode", "=", "FORCE", "elif", "mode", "==", "\"cache\"", ":", "cache_mode", ...
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/SConf.py#L76-L87
CaoWGG/TensorRT-CenterNet
f949252e37b51e60f873808f46d3683f15735e79
onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py
python
CursorKind.is_reference
(self)
return conf.lib.clang_isReference(self)
Test if this is a reference kind.
Test if this is a reference kind.
[ "Test", "if", "this", "is", "a", "reference", "kind", "." ]
def is_reference(self): """Test if this is a reference kind.""" return conf.lib.clang_isReference(self)
[ "def", "is_reference", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_isReference", "(", "self", ")" ]
https://github.com/CaoWGG/TensorRT-CenterNet/blob/f949252e37b51e60f873808f46d3683f15735e79/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py#L580-L582
devsisters/libquic
8954789a056d8e7d5fcb6452fd1572ca57eb5c4e
src/third_party/protobuf/python/google/protobuf/internal/_parameterized.py
python
_IsSingletonList
(testcases)
return len(testcases) == 1 and not isinstance(testcases[0], tuple)
True iff testcases contains only a single non-tuple element.
True iff testcases contains only a single non-tuple element.
[ "True", "iff", "testcases", "contains", "only", "a", "single", "non", "-", "tuple", "element", "." ]
def _IsSingletonList(testcases): """True iff testcases contains only a single non-tuple element.""" return len(testcases) == 1 and not isinstance(testcases[0], tuple)
[ "def", "_IsSingletonList", "(", "testcases", ")", ":", "return", "len", "(", "testcases", ")", "==", "1", "and", "not", "isinstance", "(", "testcases", "[", "0", "]", ",", "tuple", ")" ]
https://github.com/devsisters/libquic/blob/8954789a056d8e7d5fcb6452fd1572ca57eb5c4e/src/third_party/protobuf/python/google/protobuf/internal/_parameterized.py#L256-L258
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/multiprocessing/process.py
python
current_process
()
return _current_process
Return process object representing the current process
Return process object representing the current process
[ "Return", "process", "object", "representing", "the", "current", "process" ]
def current_process(): ''' Return process object representing the current process ''' return _current_process
[ "def", "current_process", "(", ")", ":", "return", "_current_process" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/multiprocessing/process.py#L36-L40
lmb-freiburg/ogn
974f72ef4bf840d6f6693d22d1843a79223e77ce
examples/pycaffe/tools.py
python
SimpleTransformer.preprocess
(self, im)
return im
preprocess() emulate the pre-processing occurring in the vgg16 caffe prototxt.
preprocess() emulate the pre-processing occurring in the vgg16 caffe prototxt.
[ "preprocess", "()", "emulate", "the", "pre", "-", "processing", "occurring", "in", "the", "vgg16", "caffe", "prototxt", "." ]
def preprocess(self, im): """ preprocess() emulate the pre-processing occurring in the vgg16 caffe prototxt. """ im = np.float32(im) im = im[:, :, ::-1] # change to BGR im -= self.mean im *= self.scale im = im.transpose((2, 0, 1)) return im
[ "def", "preprocess", "(", "self", ",", "im", ")", ":", "im", "=", "np", ".", "float32", "(", "im", ")", "im", "=", "im", "[", ":", ",", ":", ",", ":", ":", "-", "1", "]", "# change to BGR", "im", "-=", "self", ".", "mean", "im", "*=", "self",...
https://github.com/lmb-freiburg/ogn/blob/974f72ef4bf840d6f6693d22d1843a79223e77ce/examples/pycaffe/tools.py#L27-L39
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/stc.py
python
StyledTextEvent.SetModifiers
(*args, **kwargs)
return _stc.StyledTextEvent_SetModifiers(*args, **kwargs)
SetModifiers(self, int m)
SetModifiers(self, int m)
[ "SetModifiers", "(", "self", "int", "m", ")" ]
def SetModifiers(*args, **kwargs): """SetModifiers(self, int m)""" return _stc.StyledTextEvent_SetModifiers(*args, **kwargs)
[ "def", "SetModifiers", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextEvent_SetModifiers", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/stc.py#L7034-L7036
sonyxperiadev/WebGL
0299b38196f78c6d5f74bcf6fa312a3daee6de60
Tools/Scripts/webkitpy/style/checkers/cpp.py
python
_FunctionState.check
(self, error, line_number)
Report if too many lines in function body. Args: error: The function to call with any errors found. line_number: The number of the line to check.
Report if too many lines in function body.
[ "Report", "if", "too", "many", "lines", "in", "function", "body", "." ]
def check(self, error, line_number): """Report if too many lines in function body. Args: error: The function to call with any errors found. line_number: The number of the line to check. """ if match(r'T(EST|est)', self.current_function): base_trigger = self._TEST_TRIGGER else: base_trigger = self._NORMAL_TRIGGER trigger = base_trigger * 2 ** self.min_confidence if self.lines_in_function > trigger: error_level = int(math.log(self.lines_in_function / base_trigger, 2)) # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ... if error_level > 5: error_level = 5 error(line_number, 'readability/fn_size', error_level, 'Small and focused functions are preferred:' ' %s has %d non-comment lines' ' (error triggered by exceeding %d lines).' % ( self.current_function, self.lines_in_function, trigger))
[ "def", "check", "(", "self", ",", "error", ",", "line_number", ")", ":", "if", "match", "(", "r'T(EST|est)'", ",", "self", ".", "current_function", ")", ":", "base_trigger", "=", "self", ".", "_TEST_TRIGGER", "else", ":", "base_trigger", "=", "self", ".", ...
https://github.com/sonyxperiadev/WebGL/blob/0299b38196f78c6d5f74bcf6fa312a3daee6de60/Tools/Scripts/webkitpy/style/checkers/cpp.py#L576-L598
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/tools/gyp/pylib/gyp/MSVSNew.py
python
MSVSSolution.__init__
(self, path, version, entries=None, variants=None, websiteProperties=True)
Initializes the solution. Args: path: Path to solution file. version: Format version to emit. entries: List of entries in solution. May contain Folder or Project objects. May be None, if the folder is empty. variants: List of build variant strings. If none, a default list will be used. websiteProperties: Flag to decide if the website properties section is generated.
Initializes the solution.
[ "Initializes", "the", "solution", "." ]
def __init__(self, path, version, entries=None, variants=None, websiteProperties=True): """Initializes the solution. Args: path: Path to solution file. version: Format version to emit. entries: List of entries in solution. May contain Folder or Project objects. May be None, if the folder is empty. variants: List of build variant strings. If none, a default list will be used. websiteProperties: Flag to decide if the website properties section is generated. """ self.path = path self.websiteProperties = websiteProperties self.version = version # Copy passed lists (or set to empty lists) self.entries = list(entries or []) if variants: # Copy passed list self.variants = variants[:] else: # Use default self.variants = ['Debug|Win32', 'Release|Win32'] # TODO(rspangler): Need to be able to handle a mapping of solution config # to project config. Should we be able to handle variants being a dict, # or add a separate variant_map variable? If it's a dict, we can't # guarantee the order of variants since dict keys aren't ordered. # TODO(rspangler): Automatically write to disk for now; should delay until # node-evaluation time. self.Write()
[ "def", "__init__", "(", "self", ",", "path", ",", "version", ",", "entries", "=", "None", ",", "variants", "=", "None", ",", "websiteProperties", "=", "True", ")", ":", "self", ".", "path", "=", "path", "self", ".", "websiteProperties", "=", "websiteProp...
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/gyp/pylib/gyp/MSVSNew.py#L178-L213
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py2/numpy/ma/core.py
python
MaskedArray.__rsub__
(self, other)
return subtract(other, self)
Subtract self from other, and return a new masked array.
Subtract self from other, and return a new masked array.
[ "Subtract", "self", "from", "other", "and", "return", "a", "new", "masked", "array", "." ]
def __rsub__(self, other): """ Subtract self from other, and return a new masked array. """ return subtract(other, self)
[ "def", "__rsub__", "(", "self", ",", "other", ")", ":", "return", "subtract", "(", "other", ",", "self", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/ma/core.py#L4087-L4092
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py
python
Search
(pattern, s)
return _regexp_compile_cache[pattern].search(s)
Searches the string for the pattern, caching the compiled regexp.
Searches the string for the pattern, caching the compiled regexp.
[ "Searches", "the", "string", "for", "the", "pattern", "caching", "the", "compiled", "regexp", "." ]
def Search(pattern, s): """Searches the string for the pattern, caching the compiled regexp.""" if not pattern in _regexp_compile_cache: _regexp_compile_cache[pattern] = sre_compile.compile(pattern) return _regexp_compile_cache[pattern].search(s)
[ "def", "Search", "(", "pattern", ",", "s", ")", ":", "if", "not", "pattern", "in", "_regexp_compile_cache", ":", "_regexp_compile_cache", "[", "pattern", "]", "=", "sre_compile", ".", "compile", "(", "pattern", ")", "return", "_regexp_compile_cache", "[", "pat...
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L419-L423
NASA-SW-VnV/ikos
71325dfb94737332542caa708d7537752021522d
analyzer/python/ikos/stats.py
python
get
(key)
return _statistics.get(key)
Gets a value from statistics table
Gets a value from statistics table
[ "Gets", "a", "value", "from", "statistics", "table" ]
def get(key): ''' Gets a value from statistics table ''' return _statistics.get(key)
[ "def", "get", "(", "key", ")", ":", "return", "_statistics", ".", "get", "(", "key", ")" ]
https://github.com/NASA-SW-VnV/ikos/blob/71325dfb94737332542caa708d7537752021522d/analyzer/python/ikos/stats.py#L99-L101
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
GetInvParticipRat
(*args)
return _snap.GetInvParticipRat(*args)
GetInvParticipRat(PUNGraph const & Graph, int MaxEigVecs, int TimeLimit, TFltPrV & EigValIprV) Parameters: Graph: PUNGraph const & MaxEigVecs: int TimeLimit: int EigValIprV: TFltPrV &
GetInvParticipRat(PUNGraph const & Graph, int MaxEigVecs, int TimeLimit, TFltPrV & EigValIprV)
[ "GetInvParticipRat", "(", "PUNGraph", "const", "&", "Graph", "int", "MaxEigVecs", "int", "TimeLimit", "TFltPrV", "&", "EigValIprV", ")" ]
def GetInvParticipRat(*args): """ GetInvParticipRat(PUNGraph const & Graph, int MaxEigVecs, int TimeLimit, TFltPrV & EigValIprV) Parameters: Graph: PUNGraph const & MaxEigVecs: int TimeLimit: int EigValIprV: TFltPrV & """ return _snap.GetInvParticipRat(*args)
[ "def", "GetInvParticipRat", "(", "*", "args", ")", ":", "return", "_snap", ".", "GetInvParticipRat", "(", "*", "args", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L5618-L5629
nlohmann/json
eb2182414749825be086c825edb5229e5c28503d
third_party/cpplint/cpplint.py
python
CloseExpression
(clean_lines, linenum, pos)
return (line, clean_lines.NumLines(), -1)
If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. TODO(unknown): cpplint spends a fair bit of time matching parentheses. Ideally we would want to index all opening and closing parentheses once and have CloseExpression be just a simple lookup, but due to preprocessor tricks, this is not so easy. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum.
If input points to ( or { or [ or <, finds the position that closes it.
[ "If", "input", "points", "to", "(", "or", "{", "or", "[", "or", "<", "finds", "the", "position", "that", "closes", "it", "." ]
def CloseExpression(clean_lines, linenum, pos): """If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. TODO(unknown): cpplint spends a fair bit of time matching parentheses. Ideally we would want to index all opening and closing parentheses once and have CloseExpression be just a simple lookup, but due to preprocessor tricks, this is not so easy. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum. """ line = clean_lines.elided[linenum] if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]): return (line, clean_lines.NumLines(), -1) # Check first line (end_pos, stack) = FindEndOfExpressionInLine(line, pos, []) if end_pos > -1: return (line, linenum, end_pos) # Continue scanning forward while stack and linenum < clean_lines.NumLines() - 1: linenum += 1 line = clean_lines.elided[linenum] (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack) if end_pos > -1: return (line, linenum, end_pos) # Did not find end of expression before end of file, give up return (line, clean_lines.NumLines(), -1)
[ "def", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "(", "line", "[", "pos", "]", "not", "in", "'({[<'", ")", "or", "Match", "(", "r'<[<=]'", ",", "...
https://github.com/nlohmann/json/blob/eb2182414749825be086c825edb5229e5c28503d/third_party/cpplint/cpplint.py#L2074-L2115
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
example/rnn/word_lm/module.py
python
CustomStatefulModule.get_loss
(self)
return self._module.get_outputs(merge_multi_context=False)[-1]
Gets the output loss of the previous forward computation.
Gets the output loss of the previous forward computation.
[ "Gets", "the", "output", "loss", "of", "the", "previous", "forward", "computation", "." ]
def get_loss(self): """Gets the output loss of the previous forward computation. """ return self._module.get_outputs(merge_multi_context=False)[-1]
[ "def", "get_loss", "(", "self", ")", ":", "return", "self", ".", "_module", ".", "get_outputs", "(", "merge_multi_context", "=", "False", ")", "[", "-", "1", "]" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/example/rnn/word_lm/module.py#L131-L134
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/grit/grit/format/policy_templates/writers/adm_writer.py
python
IndentedStringBuilder.AddLines
(self, other)
Appends the content of another |IndentedStringBuilder| to |self.lines|. Indentation of the added lines will be the sum of |self.indent| and their original indentation. Args: other: The buffer from which lines are copied.
Appends the content of another |IndentedStringBuilder| to |self.lines|. Indentation of the added lines will be the sum of |self.indent| and their original indentation.
[ "Appends", "the", "content", "of", "another", "|IndentedStringBuilder|", "to", "|self", ".", "lines|", ".", "Indentation", "of", "the", "added", "lines", "will", "be", "the", "sum", "of", "|self", ".", "indent|", "and", "their", "original", "indentation", "." ...
def AddLines(self, other): '''Appends the content of another |IndentedStringBuilder| to |self.lines|. Indentation of the added lines will be the sum of |self.indent| and their original indentation. Args: other: The buffer from which lines are copied. ''' for line in other.lines: self.AddLine(line)
[ "def", "AddLines", "(", "self", ",", "other", ")", ":", "for", "line", "in", "other", ".", "lines", ":", "self", ".", "AddLine", "(", "line", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/grit/grit/format/policy_templates/writers/adm_writer.py#L47-L56
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/indexes/range.py
python
RangeIndex.__len__
(self)
return len(self._range)
return the length of the RangeIndex
return the length of the RangeIndex
[ "return", "the", "length", "of", "the", "RangeIndex" ]
def __len__(self) -> int: """ return the length of the RangeIndex """ return len(self._range)
[ "def", "__len__", "(", "self", ")", "->", "int", ":", "return", "len", "(", "self", ".", "_range", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/indexes/range.py#L675-L679
llvm/llvm-project
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
mlir/utils/spirv/gen_spirv_dialect.py
python
extract_td_op_info
(op_def)
return { # Prefix with 'Op' to make it consistent with SPIR-V spec 'opname': 'Op{}'.format(opname), 'inst_category': inst_category, 'category_args': category_args, 'traits': traits, 'description': description, 'arguments': args, 'results': results, 'extras': extras }
Extracts potentially manually specified sections in op's definition. Arguments: - A string containing the op's TableGen definition Returns: - A dict containing potential manually specified sections
Extracts potentially manually specified sections in op's definition.
[ "Extracts", "potentially", "manually", "specified", "sections", "in", "op", "s", "definition", "." ]
def extract_td_op_info(op_def): """Extracts potentially manually specified sections in op's definition. Arguments: - A string containing the op's TableGen definition Returns: - A dict containing potential manually specified sections """ # Get opname opname = [o[8:-2] for o in re.findall('def SPV_\w+Op', op_def)] assert len(opname) == 1, 'more than one ops in the same section!' opname = opname[0] # Get instruction category inst_category = [ o[4:] for o in re.findall('SPV_\w+Op', op_def.split(':', 1)[1]) ] assert len(inst_category) <= 1, 'more than one ops in the same section!' inst_category = inst_category[0] if len(inst_category) == 1 else 'Op' # Get category_args op_tmpl_params, _ = get_string_between_nested(op_def, '<', '>') opstringname, rest = get_string_between(op_tmpl_params, '"', '"') category_args = rest.split('[', 1)[0] # Get traits traits, _ = get_string_between_nested(rest, '[', ']') # Get description description, rest = get_string_between(op_def, 'let description = [{\n', '}];\n') # Get arguments args, rest = get_string_between(rest, ' let arguments = (ins', ');\n') # Get results results, rest = get_string_between(rest, ' let results = (outs', ');\n') extras = rest.strip(' }\n') if extras: extras = '\n {}\n'.format(extras) return { # Prefix with 'Op' to make it consistent with SPIR-V spec 'opname': 'Op{}'.format(opname), 'inst_category': inst_category, 'category_args': category_args, 'traits': traits, 'description': description, 'arguments': args, 'results': results, 'extras': extras }
[ "def", "extract_td_op_info", "(", "op_def", ")", ":", "# Get opname", "opname", "=", "[", "o", "[", "8", ":", "-", "2", "]", "for", "o", "in", "re", ".", "findall", "(", "'def SPV_\\w+Op'", ",", "op_def", ")", "]", "assert", "len", "(", "opname", ")"...
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/mlir/utils/spirv/gen_spirv_dialect.py#L867-L920
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/math_ops.py
python
reduce_max
(input_tensor, axis=None, keepdims=False, name=None)
return reduce_max_with_dims(input_tensor, axis, keepdims, name, _ReductionDims(input_tensor, axis))
Computes `tf.math.maximum` of elements across dimensions of a tensor. This is the reduction operation for the elementwise `tf.math.maximum` op. Reduces `input_tensor` along the dimensions given in `axis`. Unless `keepdims` is true, the rank of the tensor is reduced by 1 for each of the entries in `axis`, which must be unique. If `keepdims` is true, the reduced dimensions are retained with length 1. If `axis` is None, all dimensions are reduced, and a tensor with a single element is returned. Usage example: >>> x = tf.constant([5, 1, 2, 4]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=int32, numpy=5> >>> x = tf.constant([-5, -1, -2, -4]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=int32, numpy=-1> >>> x = tf.constant([4, float('nan')]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=float32, numpy=nan> >>> x = tf.constant([float('nan'), float('nan')]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=float32, numpy=nan> >>> x = tf.constant([float('-inf'), float('inf')]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=float32, numpy=inf> See the numpy docs for `np.amax` and `np.nanmax` behavior. Args: input_tensor: The tensor to reduce. Should have real numeric type. axis: The dimensions to reduce. If `None` (the default), reduces all dimensions. Must be in the range `[-rank(input_tensor), rank(input_tensor))`. keepdims: If true, retains reduced dimensions with length 1. name: A name for the operation (optional). Returns: The reduced tensor.
Computes `tf.math.maximum` of elements across dimensions of a tensor.
[ "Computes", "tf", ".", "math", ".", "maximum", "of", "elements", "across", "dimensions", "of", "a", "tensor", "." ]
def reduce_max(input_tensor, axis=None, keepdims=False, name=None): """Computes `tf.math.maximum` of elements across dimensions of a tensor. This is the reduction operation for the elementwise `tf.math.maximum` op. Reduces `input_tensor` along the dimensions given in `axis`. Unless `keepdims` is true, the rank of the tensor is reduced by 1 for each of the entries in `axis`, which must be unique. If `keepdims` is true, the reduced dimensions are retained with length 1. If `axis` is None, all dimensions are reduced, and a tensor with a single element is returned. Usage example: >>> x = tf.constant([5, 1, 2, 4]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=int32, numpy=5> >>> x = tf.constant([-5, -1, -2, -4]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=int32, numpy=-1> >>> x = tf.constant([4, float('nan')]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=float32, numpy=nan> >>> x = tf.constant([float('nan'), float('nan')]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=float32, numpy=nan> >>> x = tf.constant([float('-inf'), float('inf')]) >>> tf.reduce_max(x) <tf.Tensor: shape=(), dtype=float32, numpy=inf> See the numpy docs for `np.amax` and `np.nanmax` behavior. Args: input_tensor: The tensor to reduce. Should have real numeric type. axis: The dimensions to reduce. If `None` (the default), reduces all dimensions. Must be in the range `[-rank(input_tensor), rank(input_tensor))`. keepdims: If true, retains reduced dimensions with length 1. name: A name for the operation (optional). Returns: The reduced tensor. """ return reduce_max_with_dims(input_tensor, axis, keepdims, name, _ReductionDims(input_tensor, axis))
[ "def", "reduce_max", "(", "input_tensor", ",", "axis", "=", "None", ",", "keepdims", "=", "False", ",", "name", "=", "None", ")", ":", "return", "reduce_max_with_dims", "(", "input_tensor", ",", "axis", ",", "keepdims", ",", "name", ",", "_ReductionDims", ...
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/math_ops.py#L3060-L3105
openthread/openthread
9fcdbed9c526c70f1556d1ed84099c1535c7cd32
tools/harness-automation/autothreadharness/runner.py
python
list_devices
(names=None, continue_from=None, **kwargs)
List devices in settings file and print versions
List devices in settings file and print versions
[ "List", "devices", "in", "settings", "file", "and", "print", "versions" ]
def list_devices(names=None, continue_from=None, **kwargs): """List devices in settings file and print versions""" if not names: names = [device for device, _type in settings.GOLDEN_DEVICES if _type == 'OpenThread'] if continue_from: continue_from = names.index(continue_from) else: continue_from = 0 for port in names[continue_from:]: try: with OpenThreadController(port) as otc: print('%s: %s' % (port, otc.version)) except BaseException: logger.exception('failed to get version of %s' % port)
[ "def", "list_devices", "(", "names", "=", "None", ",", "continue_from", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "names", ":", "names", "=", "[", "device", "for", "device", ",", "_type", "in", "settings", ".", "GOLDEN_DEVICES", "if"...
https://github.com/openthread/openthread/blob/9fcdbed9c526c70f1556d1ed84099c1535c7cd32/tools/harness-automation/autothreadharness/runner.py#L157-L173
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBQueue.GetNumRunningItems
(self)
return _lldb.SBQueue_GetNumRunningItems(self)
GetNumRunningItems(SBQueue self) -> uint32_t
GetNumRunningItems(SBQueue self) -> uint32_t
[ "GetNumRunningItems", "(", "SBQueue", "self", ")", "-", ">", "uint32_t" ]
def GetNumRunningItems(self): """GetNumRunningItems(SBQueue self) -> uint32_t""" return _lldb.SBQueue_GetNumRunningItems(self)
[ "def", "GetNumRunningItems", "(", "self", ")", ":", "return", "_lldb", ".", "SBQueue_GetNumRunningItems", "(", "self", ")" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L9126-L9128
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/report.py
python
parse_bug_html
(filename)
Parse out the bug information from HTML output.
Parse out the bug information from HTML output.
[ "Parse", "out", "the", "bug", "information", "from", "HTML", "output", "." ]
def parse_bug_html(filename): """ Parse out the bug information from HTML output. """ patterns = [re.compile(r'<!-- BUGTYPE (?P<bug_type>.*) -->$'), re.compile(r'<!-- BUGFILE (?P<bug_file>.*) -->$'), re.compile(r'<!-- BUGPATHLENGTH (?P<bug_path_length>.*) -->$'), re.compile(r'<!-- BUGLINE (?P<bug_line>.*) -->$'), re.compile(r'<!-- BUGCATEGORY (?P<bug_category>.*) -->$'), re.compile(r'<!-- BUGDESC (?P<bug_description>.*) -->$'), re.compile(r'<!-- FUNCTIONNAME (?P<bug_function>.*) -->$')] endsign = re.compile(r'<!-- BUGMETAEND -->') bug = { 'report_file': filename, 'bug_function': 'n/a', # compatibility with < clang-3.5 'bug_category': 'Other', 'bug_line': 0, 'bug_path_length': 1 } with open(filename) as handler: for line in handler.readlines(): # do not read the file further if endsign.match(line): break # search for the right lines for regex in patterns: match = regex.match(line.strip()) if match: bug.update(match.groupdict()) break encode_value(bug, 'bug_line', int) encode_value(bug, 'bug_path_length', int) yield bug
[ "def", "parse_bug_html", "(", "filename", ")", ":", "patterns", "=", "[", "re", ".", "compile", "(", "r'<!-- BUGTYPE (?P<bug_type>.*) -->$'", ")", ",", "re", ".", "compile", "(", "r'<!-- BUGFILE (?P<bug_file>.*) -->$'", ")", ",", "re", ".", "compile", "(", "r'<!...
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/report.py#L301-L336
brave/brave-core
ceaa3de4735789d355b6fa80c21d4709e2c1d0e8
tools/posix/generate_breakpad_symbols.py
python
mkdir_p
(path)
Simulates mkdir -p.
Simulates mkdir -p.
[ "Simulates", "mkdir", "-", "p", "." ]
def mkdir_p(path): """Simulates mkdir -p.""" try: os.makedirs(path) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
[ "def", "mkdir_p", "(", "path", ")", ":", "try", ":", "os", ".", "makedirs", "(", "path", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EEXIST", "and", "os", ".", "path", ".", "isdir", "(", "path", ")", ...
https://github.com/brave/brave-core/blob/ceaa3de4735789d355b6fa80c21d4709e2c1d0e8/tools/posix/generate_breakpad_symbols.py#L171-L178
etternagame/etterna
8775f74ac9c353320128609d4b4150672e9a6d04
extern/crashpad/buildtools/checkdeps/graphdeps.py
python
DepsGrapher._DumpDependenciesImpl
(self, deps, out)
Computes nodes' and edges' properties for the dependency graph |deps| and carries out the actual dumping to a file/pipe |out|.
Computes nodes' and edges' properties for the dependency graph |deps| and carries out the actual dumping to a file/pipe |out|.
[ "Computes", "nodes", "and", "edges", "properties", "for", "the", "dependency", "graph", "|deps|", "and", "carries", "out", "the", "actual", "dumping", "to", "a", "file", "/", "pipe", "|out|", "." ]
def _DumpDependenciesImpl(self, deps, out): """Computes nodes' and edges' properties for the dependency graph |deps| and carries out the actual dumping to a file/pipe |out|.""" deps_graph = dict() deps_srcs = set() # Pre-initialize the graph with src->(dst, allow) pairs. for (allow, src, dst) in deps: if allow == Rule.TEMP_ALLOW and self.ignore_temp_rules: continue deps_srcs.add(src) if src not in deps_graph: deps_graph[src] = [] deps_graph[src].append((dst, allow)) # Add all hierarchical parents too, in case some of them don't have their # own DEPS, and therefore are missing from the list of rules. Those will # be recursively populated with their parents' rules in the next block. parent_src = os.path.dirname(src) while parent_src: if parent_src not in deps_graph: deps_graph[parent_src] = [] parent_src = os.path.dirname(parent_src) # For every node, propagate its rules down to all its children. deps_srcs = list(deps_srcs) deps_srcs.sort() for src in deps_srcs: parent_src = os.path.dirname(src) if parent_src: # We presort the list, so parents are guaranteed to precede children. assert parent_src in deps_graph,\ "src: %s, parent_src: %s" % (src, parent_src) for (dst, allow) in deps_graph[parent_src]: # Check that this node does not explicitly override a rule from the # parent that we're about to add. if ((dst, Rule.ALLOW) not in deps_graph[src]) and \ ((dst, Rule.TEMP_ALLOW) not in deps_graph[src]) and \ ((dst, Rule.DISALLOW) not in deps_graph[src]): deps_graph[src].append((dst, allow)) node_props = {} edges = [] # 1) Populate a list of edge specifications in DOT format; # 2) Populate a list of computed raw node attributes to be output as node # specifications in DOT format later on. # Edges and nodes are emphasized with color and line/border weight depending # on how many of incl/excl/hilite_fanins/hilite_fanouts filters they hit, # and in what way. for src in deps_graph.keys(): for (dst, allow) in deps_graph[src]: if allow == Rule.DISALLOW and self.hide_disallowed_deps: continue if allow == Rule.ALLOW and src == dst: continue edge_spec = "%s->%s" % (src, dst) if not re.search(self.incl, edge_spec) or \ re.search(self.excl, edge_spec): continue if src not in node_props: node_props[src] = {'hilite': None, 'degree': 0} if dst not in node_props: node_props[dst] = {'hilite': None, 'degree': 0} edge_weight = 1 if self.hilite_fanouts and re.search(self.hilite_fanouts, src): node_props[src]['hilite'] = 'lightgreen' node_props[dst]['hilite'] = 'lightblue' node_props[dst]['degree'] += 1 edge_weight += 1 if self.hilite_fanins and re.search(self.hilite_fanins, dst): node_props[src]['hilite'] = 'lightblue' node_props[dst]['hilite'] = 'lightgreen' node_props[src]['degree'] += 1 edge_weight += 1 if allow == Rule.ALLOW: edge_color = (edge_weight > 1) and 'blue' or 'green' edge_style = 'solid' elif allow == Rule.TEMP_ALLOW: edge_color = (edge_weight > 1) and 'blue' or 'green' edge_style = 'dashed' else: edge_color = 'red' edge_style = 'dashed' edges.append(' "%s" -> "%s" [style=%s,color=%s,penwidth=%d];' % \ (src, dst, edge_style, edge_color, edge_weight)) # Reformat the computed raw node attributes into a final DOT representation. nodes = [] for (node, attrs) in node_props.iteritems(): attr_strs = [] if attrs['hilite']: attr_strs.append('style=filled,fillcolor=%s' % attrs['hilite']) attr_strs.append('penwidth=%d' % (attrs['degree'] or 1)) nodes.append(' "%s" [%s];' % (node, ','.join(attr_strs))) # Output nodes and edges to |out| (can be a file or a pipe). edges.sort() nodes.sort() out.write('digraph DEPS {\n' ' fontsize=8;\n') out.write('\n'.join(nodes)) out.write('\n\n') out.write('\n'.join(edges)) out.write('\n}\n') out.close()
[ "def", "_DumpDependenciesImpl", "(", "self", ",", "deps", ",", "out", ")", ":", "deps_graph", "=", "dict", "(", ")", "deps_srcs", "=", "set", "(", ")", "# Pre-initialize the graph with src->(dst, allow) pairs.", "for", "(", "allow", ",", "src", ",", "dst", ")"...
https://github.com/etternagame/etterna/blob/8775f74ac9c353320128609d4b4150672e9a6d04/extern/crashpad/buildtools/checkdeps/graphdeps.py#L139-L252
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/distutils/dist.py
python
Distribution.get_command_packages
(self)
return pkgs
Return a list of packages from which commands are loaded.
Return a list of packages from which commands are loaded.
[ "Return", "a", "list", "of", "packages", "from", "which", "commands", "are", "loaded", "." ]
def get_command_packages(self): """Return a list of packages from which commands are loaded.""" pkgs = self.command_packages if not isinstance(pkgs, list): if pkgs is None: pkgs = '' pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] if "distutils.command" not in pkgs: pkgs.insert(0, "distutils.command") self.command_packages = pkgs return pkgs
[ "def", "get_command_packages", "(", "self", ")", ":", "pkgs", "=", "self", ".", "command_packages", "if", "not", "isinstance", "(", "pkgs", ",", "list", ")", ":", "if", "pkgs", "is", "None", ":", "pkgs", "=", "''", "pkgs", "=", "[", "pkg", ".", "stri...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/distutils/dist.py#L795-L805
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/parallel_for/pfor.py
python
RegisterPFor.__init__
(self, op_type)
Creates an object to register a converter for op with type `op_type`.
Creates an object to register a converter for op with type `op_type`.
[ "Creates", "an", "object", "to", "register", "a", "converter", "for", "op", "with", "type", "op_type", "." ]
def __init__(self, op_type): """Creates an object to register a converter for op with type `op_type`.""" self.op_type = op_type
[ "def", "__init__", "(", "self", ",", "op_type", ")", ":", "self", ".", "op_type", "=", "op_type" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/parallel_for/pfor.py#L987-L989
yun-liu/RCF
91bfb054ad04187dbbe21e539e165ad9bd3ff00b
scripts/cpp_lint.py
python
FileInfo.FullName
(self)
return os.path.abspath(self._filename).replace('\\', '/')
Make Windows paths like Unix.
Make Windows paths like Unix.
[ "Make", "Windows", "paths", "like", "Unix", "." ]
def FullName(self): """Make Windows paths like Unix.""" return os.path.abspath(self._filename).replace('\\', '/')
[ "def", "FullName", "(", "self", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "self", ".", "_filename", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")" ]
https://github.com/yun-liu/RCF/blob/91bfb054ad04187dbbe21e539e165ad9bd3ff00b/scripts/cpp_lint.py#L881-L883
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBValue.GetObjectDescription
(self)
return _lldb.SBValue_GetObjectDescription(self)
GetObjectDescription(SBValue self) -> char const *
GetObjectDescription(SBValue self) -> char const *
[ "GetObjectDescription", "(", "SBValue", "self", ")", "-", ">", "char", "const", "*" ]
def GetObjectDescription(self): """GetObjectDescription(SBValue self) -> char const *""" return _lldb.SBValue_GetObjectDescription(self)
[ "def", "GetObjectDescription", "(", "self", ")", ":", "return", "_lldb", ".", "SBValue_GetObjectDescription", "(", "self", ")" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L14269-L14271
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/calendar.py
python
CalendarCtrlBase.PySetLowerDateLimit
(self, date)
takes datetime.datetime or datetime.date object
takes datetime.datetime or datetime.date object
[ "takes", "datetime", ".", "datetime", "or", "datetime", ".", "date", "object" ]
def PySetLowerDateLimit(self, date): """takes datetime.datetime or datetime.date object""" self.SetLowerDateLimit(_pydate2wxdate(date))
[ "def", "PySetLowerDateLimit", "(", "self", ",", "date", ")", ":", "self", ".", "SetLowerDateLimit", "(", "_pydate2wxdate", "(", "date", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/calendar.py#L451-L453
amd/OpenCL-caffe
638543108517265366c18ae5821f3096cf5cf34a
python/caffe/net_spec.py
python
Top.to_proto
(self)
return to_proto(self)
Generate a NetParameter that contains all layers needed to compute this top.
Generate a NetParameter that contains all layers needed to compute this top.
[ "Generate", "a", "NetParameter", "that", "contains", "all", "layers", "needed", "to", "compute", "this", "top", "." ]
def to_proto(self): """Generate a NetParameter that contains all layers needed to compute this top.""" return to_proto(self)
[ "def", "to_proto", "(", "self", ")", ":", "return", "to_proto", "(", "self", ")" ]
https://github.com/amd/OpenCL-caffe/blob/638543108517265366c18ae5821f3096cf5cf34a/python/caffe/net_spec.py#L86-L90
baoboa/pyqt5
11d5f43bc6f213d9d60272f3954a0048569cfc7c
pyuic/uic/__init__.py
python
compileUiDir
(dir, recurse=False, map=None, **compileUi_args)
compileUiDir(dir, recurse=False, map=None, **compileUi_args) Creates Python modules from Qt Designer .ui files in a directory or directory tree. dir is the name of the directory to scan for files whose name ends with '.ui'. By default the generated Python module is created in the same directory ending with '.py'. recurse is set if any sub-directories should be scanned. The default is False. map is an optional callable that is passed the name of the directory containing the '.ui' file and the name of the Python module that will be created. The callable should return a tuple of the name of the directory in which the Python module will be created and the (possibly modified) name of the module. The default is None. compileUi_args are any additional keyword arguments that are passed to the compileUi() function that is called to create each Python module.
compileUiDir(dir, recurse=False, map=None, **compileUi_args)
[ "compileUiDir", "(", "dir", "recurse", "=", "False", "map", "=", "None", "**", "compileUi_args", ")" ]
def compileUiDir(dir, recurse=False, map=None, **compileUi_args): """compileUiDir(dir, recurse=False, map=None, **compileUi_args) Creates Python modules from Qt Designer .ui files in a directory or directory tree. dir is the name of the directory to scan for files whose name ends with '.ui'. By default the generated Python module is created in the same directory ending with '.py'. recurse is set if any sub-directories should be scanned. The default is False. map is an optional callable that is passed the name of the directory containing the '.ui' file and the name of the Python module that will be created. The callable should return a tuple of the name of the directory in which the Python module will be created and the (possibly modified) name of the module. The default is None. compileUi_args are any additional keyword arguments that are passed to the compileUi() function that is called to create each Python module. """ import os # Compile a single .ui file. def compile_ui(ui_dir, ui_file): # Ignore if it doesn't seem to be a .ui file. if ui_file.endswith('.ui'): py_dir = ui_dir py_file = ui_file[:-3] + '.py' # Allow the caller to change the name of the .py file or generate # it in a different directory. if map is not None: py_dir, py_file = map(py_dir, py_file) # Make sure the destination directory exists. try: os.makedirs(py_dir) except: pass ui_path = os.path.join(ui_dir, ui_file) py_path = os.path.join(py_dir, py_file) ui_file = open(ui_path, 'r') py_file = open(py_path, 'w') try: compileUi(ui_file, py_file, **compileUi_args) finally: ui_file.close() py_file.close() if recurse: for root, _, files in os.walk(dir): for ui in files: compile_ui(root, ui) else: for ui in os.listdir(dir): if os.path.isfile(os.path.join(dir, ui)): compile_ui(dir, ui)
[ "def", "compileUiDir", "(", "dir", ",", "recurse", "=", "False", ",", "map", "=", "None", ",", "*", "*", "compileUi_args", ")", ":", "import", "os", "# Compile a single .ui file.", "def", "compile_ui", "(", "ui_dir", ",", "ui_file", ")", ":", "# Ignore if it...
https://github.com/baoboa/pyqt5/blob/11d5f43bc6f213d9d60272f3954a0048569cfc7c/pyuic/uic/__init__.py#L69-L128
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/confusing-number-ii.py
python
Solution.confusingNumberII
(self, n)
return totalCount(n) - validCountInLessLength(n) - validCountInFullLength(n)
:type n: int :rtype: int
:type n: int :rtype: int
[ ":", "type", "n", ":", "int", ":", "rtype", ":", "int" ]
def confusingNumberII(self, n): """ :type n: int :rtype: int """ lookup = {"0":"0", "1":"1", "6":"9", "8":"8", "9":"6"} centers = {"0":"0", "1":"1", "8":"8"} def totalCount(n): # count all numbers in the pattern of [01689]{1,len(n)} in the range of [1, n] s = str(n) total = 0 p = len(lookup)**(len(s)-1) for i in xrange(len(s)+1): if i == len(s): total += 1 break smaller = sum(int(c < s[i]) for c in lookup.iterkeys()) total += smaller * p if s[i] not in lookup: break p //= len(lookup) return total-1 # exclude 0 def validCountInLessLength(n): # count unconfusing numbers in the pattern of [01689]{1,len(n)-1} in the range of [1, n] s = str(n) valid = 0 total = len(centers) for i in xrange(1, len(s), 2): # count unconfusing numbers for each odd length less than s if i == 1: valid += len({c for c in centers.iterkeys() if c != '0'}) else: valid += total * (len(lookup)-1) total *= len(lookup) total = 1 for i in xrange(2, len(s), 2): # count unconfusing numbers for each even length less than s valid += total * (len(lookup)-1) total *= len(lookup) return valid def validCountInFullLength(n): # count unconfusing numbers in the pattern of [01689]{len(n)} in the range of [1, n] s = str(n) half_s = s[:(len(s)+1)//2] total = 0 choices = centers if (len(s) % 2) else lookup p = int(len(lookup)**(len(half_s)-2) * len(choices)) for i in xrange(len(half_s)): if i == len(half_s)-1: total += sum(int(c < half_s[i]) for c in choices.iterkeys() if i != 0 or c != '0') if half_s[i] not in choices: break tmp = list(half_s)+[lookup[half_s[i]] for i in reversed(xrange(len(half_s)-(len(s) % 2)))] total += 0 < int("".join(tmp)) <= n break smaller = sum(int(c < half_s[i]) for c in lookup.iterkeys() if i != 0 or c != '0') total += smaller * p if half_s[i] not in lookup: break p //= len(lookup) return total return totalCount(n) - validCountInLessLength(n) - validCountInFullLength(n)
[ "def", "confusingNumberII", "(", "self", ",", "n", ")", ":", "lookup", "=", "{", "\"0\"", ":", "\"0\"", ",", "\"1\"", ":", "\"1\"", ",", "\"6\"", ":", "\"9\"", ",", "\"8\"", ":", "\"8\"", ",", "\"9\"", ":", "\"6\"", "}", "centers", "=", "{", "\"0\"...
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/confusing-number-ii.py#L5-L64
papyrussolution/OpenPapyrus
bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91
Src/OSF/protobuf-3.19.1/python/mox.py
python
Mox.__init__
(self)
Initialize a new Mox.
Initialize a new Mox.
[ "Initialize", "a", "new", "Mox", "." ]
def __init__(self): """Initialize a new Mox.""" self._mock_objects = [] self.stubs = stubout.StubOutForTesting()
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "_mock_objects", "=", "[", "]", "self", ".", "stubs", "=", "stubout", ".", "StubOutForTesting", "(", ")" ]
https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/protobuf-3.19.1/python/mox.py#L158-L162
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/urllib3/fields.py
python
guess_content_type
(filename, default="application/octet-stream")
return default
Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`.
Guess the "Content-Type" of a file.
[ "Guess", "the", "Content", "-", "Type", "of", "a", "file", "." ]
def guess_content_type(filename, default="application/octet-stream"): """ Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. """ if filename: return mimetypes.guess_type(filename)[0] or default return default
[ "def", "guess_content_type", "(", "filename", ",", "default", "=", "\"application/octet-stream\"", ")", ":", "if", "filename", ":", "return", "mimetypes", ".", "guess_type", "(", "filename", ")", "[", "0", "]", "or", "default", "return", "default" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/urllib3/fields.py#L10-L21
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/saved_model/builder_impl.py
python
_add_asset_to_collection
(asset_filename, asset_tensor)
Builds an asset proto and adds it to the asset collection of the graph. Args: asset_filename: The filename of the asset to be added. asset_tensor: The asset tensor used to populate the tensor info of the asset proto.
Builds an asset proto and adds it to the asset collection of the graph.
[ "Builds", "an", "asset", "proto", "and", "adds", "it", "to", "the", "asset", "collection", "of", "the", "graph", "." ]
def _add_asset_to_collection(asset_filename, asset_tensor): """Builds an asset proto and adds it to the asset collection of the graph. Args: asset_filename: The filename of the asset to be added. asset_tensor: The asset tensor used to populate the tensor info of the asset proto. """ asset_proto = meta_graph_pb2.AssetFileDef() asset_proto.filename = asset_filename asset_proto.tensor_info.name = asset_tensor.name asset_any_proto = Any() asset_any_proto.Pack(asset_proto) ops.add_to_collection(constants.ASSETS_KEY, asset_any_proto)
[ "def", "_add_asset_to_collection", "(", "asset_filename", ",", "asset_tensor", ")", ":", "asset_proto", "=", "meta_graph_pb2", ".", "AssetFileDef", "(", ")", "asset_proto", ".", "filename", "=", "asset_filename", "asset_proto", ".", "tensor_info", ".", "name", "=", ...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/saved_model/builder_impl.py#L777-L791
yyzybb537/libgo
4af17b7c67643c4d54aa354dcc77963ea07847d0
third_party/boost.context/tools/build/src/build/generators.py
python
construct
(project, name, target_type, prop_set, sources, top_level=False)
return result
Attempts to create target of 'target-type' with 'properties' from 'sources'. The 'sources' are treated as a collection of *possible* ingridients -- i.e. it is not required to consume them all. If 'multiple' is true, the rule is allowed to return several targets of 'target-type'. Returns a list of target. When this invocation is first instance of 'construct' in stack, returns only targets of requested 'target-type', otherwise, returns also unused sources and additionally generated targets. If 'top-level' is set, does not suppress generators that are already used in the stack. This may be useful in cases where a generator has to build a metatarget -- for example a target corresponding to built tool.
Attempts to create target of 'target-type' with 'properties' from 'sources'. The 'sources' are treated as a collection of *possible* ingridients -- i.e. it is not required to consume them all. If 'multiple' is true, the rule is allowed to return several targets of 'target-type'.
[ "Attempts", "to", "create", "target", "of", "target", "-", "type", "with", "properties", "from", "sources", ".", "The", "sources", "are", "treated", "as", "a", "collection", "of", "*", "possible", "*", "ingridients", "--", "i", ".", "e", ".", "it", "is",...
def construct (project, name, target_type, prop_set, sources, top_level=False): """ Attempts to create target of 'target-type' with 'properties' from 'sources'. The 'sources' are treated as a collection of *possible* ingridients -- i.e. it is not required to consume them all. If 'multiple' is true, the rule is allowed to return several targets of 'target-type'. Returns a list of target. When this invocation is first instance of 'construct' in stack, returns only targets of requested 'target-type', otherwise, returns also unused sources and additionally generated targets. If 'top-level' is set, does not suppress generators that are already used in the stack. This may be useful in cases where a generator has to build a metatarget -- for example a target corresponding to built tool. """ if __debug__: from .targets import ProjectTarget assert isinstance(project, ProjectTarget) assert isinstance(name, basestring) or name is None assert isinstance(target_type, basestring) assert isinstance(prop_set, property_set.PropertySet) assert is_iterable_typed(sources, virtual_target.VirtualTarget) assert isinstance(top_level, bool) global __active_generators if top_level: saved_active = __active_generators __active_generators = [] global __construct_stack if not __construct_stack: __ensure_type (sources) __construct_stack.append (1) increase_indent () if project.manager().logger().on(): dout( "*** construct " + target_type) for s in sources: dout(" from " + str(s)) project.manager().logger().log (__name__, " properties: ", prop_set.raw ()) result = __construct_really(project, name, target_type, prop_set, sources) decrease_indent() __construct_stack = __construct_stack [1:] if top_level: __active_generators = saved_active return result
[ "def", "construct", "(", "project", ",", "name", ",", "target_type", ",", "prop_set", ",", "sources", ",", "top_level", "=", "False", ")", ":", "if", "__debug__", ":", "from", ".", "targets", "import", "ProjectTarget", "assert", "isinstance", "(", "project",...
https://github.com/yyzybb537/libgo/blob/4af17b7c67643c4d54aa354dcc77963ea07847d0/third_party/boost.context/tools/build/src/build/generators.py#L1146-L1201
Yijunmaverick/GenerativeFaceCompletion
f72dea0fa27c779fef7b65d2f01e82bcc23a0eb2
scripts/cpp_lint.py
python
CheckPosixThreading
(filename, clean_lines, linenum, error)
Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when using posix directly). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Checks for calls to thread-unsafe functions.
[ "Checks", "for", "calls", "to", "thread", "-", "unsafe", "functions", "." ]
def CheckPosixThreading(filename, clean_lines, linenum, error): """Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when using posix directly). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] for single_thread_function, multithread_safe_function in threading_list: ix = line.find(single_thread_function) # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and line[ix - 1] not in ('_', '.', '>'))): error(filename, linenum, 'runtime/threadsafe_fn', 2, 'Consider using ' + multithread_safe_function + '...) instead of ' + single_thread_function + '...) for improved thread safety.')
[ "def", "CheckPosixThreading", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "for", "single_thread_function", ",", "multithread_safe_function", "in", "threading_list", ":...
https://github.com/Yijunmaverick/GenerativeFaceCompletion/blob/f72dea0fa27c779fef7b65d2f01e82bcc23a0eb2/scripts/cpp_lint.py#L1681-L1705
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/tools/cpplint.py
python
_IncludeState.ResetSection
(self, directive)
Reset section checking for preprocessor directive. Args: directive: preprocessor directive (e.g. "if", "else").
Reset section checking for preprocessor directive.
[ "Reset", "section", "checking", "for", "preprocessor", "directive", "." ]
def ResetSection(self, directive): """Reset section checking for preprocessor directive. Args: directive: preprocessor directive (e.g. "if", "else"). """ # The name of the current section. self._section = self._INITIAL_SECTION # The path of last found header. self._last_header = '' # Update list of includes. Note that we never pop from the # include list. if directive in ('if', 'ifdef', 'ifndef'): self.include_list.append([]) elif directive in ('else', 'elif'): self.include_list[-1] = []
[ "def", "ResetSection", "(", "self", ",", "directive", ")", ":", "# The name of the current section.", "self", ".", "_section", "=", "self", ".", "_INITIAL_SECTION", "# The path of last found header.", "self", ".", "_last_header", "=", "''", "# Update list of includes. No...
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/cpplint.py#L869-L885
omnisci/omniscidb
b9c95f1bd602b4ffc8b0edf18bfad61031e08d86
python/omnisci/thrift/OmniSci.py
python
Iface.check_table_consistency
(self, session, table_id)
Parameters: - session - table_id
Parameters: - session - table_id
[ "Parameters", ":", "-", "session", "-", "table_id" ]
def check_table_consistency(self, session, table_id): """ Parameters: - session - table_id """ pass
[ "def", "check_table_consistency", "(", "self", ",", "session", ",", "table_id", ")", ":", "pass" ]
https://github.com/omnisci/omniscidb/blob/b9c95f1bd602b4ffc8b0edf18bfad61031e08d86/python/omnisci/thrift/OmniSci.py#L763-L770
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/glob.py
python
glob
(pathname, recursive=False)
return list(iglob(pathname, recursive=recursive))
Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. However, unlike fnmatch, filenames starting with a dot are special cases that are not matched by '*' and '?' patterns. If recursive is true, the pattern '**' will match any files and zero or more directories and subdirectories.
Return a list of paths matching a pathname pattern.
[ "Return", "a", "list", "of", "paths", "matching", "a", "pathname", "pattern", "." ]
def glob(pathname, recursive=False): """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. However, unlike fnmatch, filenames starting with a dot are special cases that are not matched by '*' and '?' patterns. If recursive is true, the pattern '**' will match any files and zero or more directories and subdirectories. """ return list(iglob(pathname, recursive=recursive))
[ "def", "glob", "(", "pathname", ",", "recursive", "=", "False", ")", ":", "return", "list", "(", "iglob", "(", "pathname", ",", "recursive", "=", "recursive", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/glob.py#L16-L27
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
llvm/utils/benchmark/mingw.py
python
main
()
Invoked when the script is run directly by the python interpreter
Invoked when the script is run directly by the python interpreter
[ "Invoked", "when", "the", "script", "is", "run", "directly", "by", "the", "python", "interpreter" ]
def main(): ''' Invoked when the script is run directly by the python interpreter ''' parser = argparse.ArgumentParser( description = 'Downloads a specific version of MinGW', formatter_class = argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument('--location', help = 'the location to download the compiler to', default = os.path.join(tempfile.gettempdir(), 'mingw-builds')) parser.add_argument('--arch', required = True, choices = ['i686', 'x86_64'], help = 'the target MinGW architecture string') parser.add_argument('--version', type = str2ver, help = 'the version of GCC to download') parser.add_argument('--threading', choices = ['posix', 'win32'], help = 'the threading type of the compiler') parser.add_argument('--exceptions', choices = ['sjlj', 'seh', 'dwarf'], help = 'the method to throw exceptions') parser.add_argument('--revision', type=int, help = 'the revision of the MinGW release') group = parser.add_mutually_exclusive_group() group.add_argument('-v', '--verbose', action='store_true', help='increase the script output verbosity') group.add_argument('-q', '--quiet', action='store_true', help='only print errors and warning') args = parser.parse_args() # Create the logger logger = logging.getLogger('mingw') handler = logging.StreamHandler() formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.INFO) if args.quiet: logger.setLevel(logging.WARN) if args.verbose: logger.setLevel(logging.DEBUG) # Get MinGW root_dir = root(location = args.location, arch = args.arch, version = args.version, threading = args.threading, exceptions = args.exceptions, revision = args.revision, log = logger) sys.stdout.write('%s\n' % os.path.join(root_dir, 'bin'))
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Downloads a specific version of MinGW'", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "parser", ".", "add_argument", "(", ...
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/llvm/utils/benchmark/mingw.py#L261-L307
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/default_gradient.py
python
get_zeros_dtype
(t)
return t.dtype
Return the dtype for the default gradient for a Tensor.
Return the dtype for the default gradient for a Tensor.
[ "Return", "the", "dtype", "for", "the", "default", "gradient", "for", "a", "Tensor", "." ]
def get_zeros_dtype(t): """Return the dtype for the default gradient for a Tensor.""" if t.dtype == dtypes.resource: handle_data = resource_variable_ops.get_eager_safe_handle_data(t) if (handle_data is None or not handle_data.is_set or len(handle_data.shape_and_type) != 1): # TODO(srbs): Ideally we should raise an error here but returning float32 # for backwards compatibility. return dtypes.float32 else: return handle_data.shape_and_type[0].dtype return t.dtype
[ "def", "get_zeros_dtype", "(", "t", ")", ":", "if", "t", ".", "dtype", "==", "dtypes", ".", "resource", ":", "handle_data", "=", "resource_variable_ops", ".", "get_eager_safe_handle_data", "(", "t", ")", "if", "(", "handle_data", "is", "None", "or", "not", ...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/default_gradient.py#L26-L37
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/wrap_function.py
python
_filter_returned_ops
(fn)
return wrap_and_filter_returned_ops, returned_ops
Filtering out any ops returned by function. Args: fn: a function Returns: A tuple of ( Wrapped function that returns `None` in place of any ops, dict that maps the index in the flat output structure to the returned op )
Filtering out any ops returned by function.
[ "Filtering", "out", "any", "ops", "returned", "by", "function", "." ]
def _filter_returned_ops(fn): """Filtering out any ops returned by function. Args: fn: a function Returns: A tuple of ( Wrapped function that returns `None` in place of any ops, dict that maps the index in the flat output structure to the returned op ) """ returned_ops = {} def wrap_and_filter_returned_ops(*args, **kwargs): outputs = fn(*args, **kwargs) flat_outputs = nest.flatten(outputs) for n in range(len(flat_outputs)): output = flat_outputs[n] if isinstance(output, ops.Operation): returned_ops[n] = output flat_outputs[n] = None return nest.pack_sequence_as(outputs, flat_outputs) return wrap_and_filter_returned_ops, returned_ops
[ "def", "_filter_returned_ops", "(", "fn", ")", ":", "returned_ops", "=", "{", "}", "def", "wrap_and_filter_returned_ops", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "outputs", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "flat_o...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/wrap_function.py#L364-L388
nvdla/sw
79538ba1b52b040a4a4645f630e457fa01839e90
umd/external/protobuf-2.6/python/google/protobuf/text_format.py
python
_Tokenizer.ConsumeFloat
(self)
return result
Consumes an floating point number. Returns: The number parsed. Raises: ParseError: If a floating point number couldn't be consumed.
Consumes an floating point number.
[ "Consumes", "an", "floating", "point", "number", "." ]
def ConsumeFloat(self): """Consumes an floating point number. Returns: The number parsed. Raises: ParseError: If a floating point number couldn't be consumed. """ try: result = ParseFloat(self.token) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result
[ "def", "ConsumeFloat", "(", "self", ")", ":", "try", ":", "result", "=", "ParseFloat", "(", "self", ".", "token", ")", "except", "ValueError", ",", "e", ":", "raise", "self", ".", "_ParseError", "(", "str", "(", "e", ")", ")", "self", ".", "NextToken...
https://github.com/nvdla/sw/blob/79538ba1b52b040a4a4645f630e457fa01839e90/umd/external/protobuf-2.6/python/google/protobuf/text_format.py#L625-L639
facebook/fbthrift
fb9c8562aba04c4fd9b17716eb5d970cc88a75bb
thrift/lib/py/util/__init__.py
python
__map_to_dict
(amap, type_args, defaults: bool=False)
return dict(zip(keys, values))
Given a python dictionary, potentially containing Thrift Structs, convert it into a dict :param amap: a map :param defaults: return default values :return: Dict
Given a python dictionary, potentially containing Thrift Structs, convert it into a dict :param amap: a map :param defaults: return default values :return: Dict
[ "Given", "a", "python", "dictionary", "potentially", "containing", "Thrift", "Structs", "convert", "it", "into", "a", "dict", ":", "param", "amap", ":", "a", "map", ":", "param", "defaults", ":", "return", "default", "values", ":", "return", ":", "Dict" ]
def __map_to_dict(amap, type_args, defaults: bool=False): """ Given a python dictionary, potentially containing Thrift Structs, convert it into a dict :param amap: a map :param defaults: return default values :return: Dict """ if not amap: return amap keys, values = zip(*amap.items()) keys = __list_to_dict(keys, type_args[:2], defaults=defaults) values = __list_to_dict(values, type_args[2:4], defaults=defaults) return dict(zip(keys, values))
[ "def", "__map_to_dict", "(", "amap", ",", "type_args", ",", "defaults", ":", "bool", "=", "False", ")", ":", "if", "not", "amap", ":", "return", "amap", "keys", ",", "values", "=", "zip", "(", "*", "amap", ".", "items", "(", ")", ")", "keys", "=", ...
https://github.com/facebook/fbthrift/blob/fb9c8562aba04c4fd9b17716eb5d970cc88a75bb/thrift/lib/py/util/__init__.py#L177-L193
MegEngine/MegEngine
ce9ad07a27ec909fb8db4dd67943d24ba98fb93a
imperative/python/megengine/functional/elemwise.py
python
cos
(x)
return _elwise(x, mode=Elemwise.Mode.COS)
r"""Element-wise `cosine`. Examples: .. testcode:: import numpy as np from megengine import tensor import megengine.functional as F x = tensor(np.arange(0, 6, dtype=np.float32).reshape(2, 3)) out = F.cos(x) print(out.numpy().round(decimals=4)) Outputs: .. testoutput:: [[ 1. 0.5403 -0.4161] [-0.99 -0.6536 0.2837]]
r"""Element-wise `cosine`.
[ "r", "Element", "-", "wise", "cosine", "." ]
def cos(x): r"""Element-wise `cosine`. Examples: .. testcode:: import numpy as np from megengine import tensor import megengine.functional as F x = tensor(np.arange(0, 6, dtype=np.float32).reshape(2, 3)) out = F.cos(x) print(out.numpy().round(decimals=4)) Outputs: .. testoutput:: [[ 1. 0.5403 -0.4161] [-0.99 -0.6536 0.2837]] """ return _elwise(x, mode=Elemwise.Mode.COS)
[ "def", "cos", "(", "x", ")", ":", "return", "_elwise", "(", "x", ",", "mode", "=", "Elemwise", ".", "Mode", ".", "COS", ")" ]
https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/functional/elemwise.py#L315-L337
CGRU/cgru
1881a4128530e3d31ac6c25314c18314fc50c2c7
afanasy/python/af.py
python
Block.setHostsMask
(self, value)
Missing DocString :param value: :return:
Missing DocString
[ "Missing", "DocString" ]
def setHostsMask(self, value): """Missing DocString :param value: :return: """ if checkRegExp(value): self.data["hosts_mask"] = value
[ "def", "setHostsMask", "(", "self", ",", "value", ")", ":", "if", "checkRegExp", "(", "value", ")", ":", "self", ".", "data", "[", "\"hosts_mask\"", "]", "=", "value" ]
https://github.com/CGRU/cgru/blob/1881a4128530e3d31ac6c25314c18314fc50c2c7/afanasy/python/af.py#L451-L458
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
PNEANet.IntAttrValueEI
(self, *args)
return _snap.PNEANet_IntAttrValueEI(self, *args)
IntAttrValueEI(PNEANet self, TInt EId, TIntV Values) Parameters: EId: TInt const & Values: TIntV & IntAttrValueEI(PNEANet self, TInt EId, TStrIntPrH::TIter EdgeHI, TIntV Values) Parameters: EId: TInt const & EdgeHI: TStrIntPrH::TIter Values: TIntV &
IntAttrValueEI(PNEANet self, TInt EId, TIntV Values)
[ "IntAttrValueEI", "(", "PNEANet", "self", "TInt", "EId", "TIntV", "Values", ")" ]
def IntAttrValueEI(self, *args): """ IntAttrValueEI(PNEANet self, TInt EId, TIntV Values) Parameters: EId: TInt const & Values: TIntV & IntAttrValueEI(PNEANet self, TInt EId, TStrIntPrH::TIter EdgeHI, TIntV Values) Parameters: EId: TInt const & EdgeHI: TStrIntPrH::TIter Values: TIntV & """ return _snap.PNEANet_IntAttrValueEI(self, *args)
[ "def", "IntAttrValueEI", "(", "self", ",", "*", "args", ")", ":", "return", "_snap", ".", "PNEANet_IntAttrValueEI", "(", "self", ",", "*", "args", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L23524-L23540
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
clang/bindings/python/clang/cindex.py
python
Token.location
(self)
return conf.lib.clang_getTokenLocation(self._tu, self)
The SourceLocation this Token occurs at.
The SourceLocation this Token occurs at.
[ "The", "SourceLocation", "this", "Token", "occurs", "at", "." ]
def location(self): """The SourceLocation this Token occurs at.""" return conf.lib.clang_getTokenLocation(self._tu, self)
[ "def", "location", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_getTokenLocation", "(", "self", ".", "_tu", ",", "self", ")" ]
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/clang/bindings/python/clang/cindex.py#L3300-L3302
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/decimal.py
python
Decimal.__rmod__
(self, other, context=None)
return other.__mod__(self, context=context)
Swaps self/other and returns __mod__.
Swaps self/other and returns __mod__.
[ "Swaps", "self", "/", "other", "and", "returns", "__mod__", "." ]
def __rmod__(self, other, context=None): """Swaps self/other and returns __mod__.""" other = _convert_other(other) if other is NotImplemented: return other return other.__mod__(self, context=context)
[ "def", "__rmod__", "(", "self", ",", "other", ",", "context", "=", "None", ")", ":", "other", "=", "_convert_other", "(", "other", ")", "if", "other", "is", "NotImplemented", ":", "return", "other", "return", "other", ".", "__mod__", "(", "self", ",", ...
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/decimal.py#L1390-L1395
apache/parquet-cpp
642da055adf009652689b20e68a198cffb857651
build-support/cpplint.py
python
FileInfo.IsSource
(self)
return self.Extension()[1:] in ('c', 'cc', 'cpp', 'cxx')
File has a source file extension.
File has a source file extension.
[ "File", "has", "a", "source", "file", "extension", "." ]
def IsSource(self): """File has a source file extension.""" return self.Extension()[1:] in ('c', 'cc', 'cpp', 'cxx')
[ "def", "IsSource", "(", "self", ")", ":", "return", "self", ".", "Extension", "(", ")", "[", "1", ":", "]", "in", "(", "'c'", ",", "'cc'", ",", "'cpp'", ",", "'cxx'", ")" ]
https://github.com/apache/parquet-cpp/blob/642da055adf009652689b20e68a198cffb857651/build-support/cpplint.py#L1059-L1061
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/linear_model/base.py
python
LinearRegression.residues_
(self)
return self._residues
Get the residues of the fitted model.
Get the residues of the fitted model.
[ "Get", "the", "residues", "of", "the", "fitted", "model", "." ]
def residues_(self): """Get the residues of the fitted model.""" return self._residues
[ "def", "residues_", "(", "self", ")", ":", "return", "self", ".", "_residues" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/linear_model/base.py#L483-L485
may0324/DeepCompression-caffe
0aff6c1287bda4cfc7f378ed8a16524e1afabd8c
python/caffe/detector.py
python
Detector.configure_crop
(self, context_pad)
Configure crop dimensions and amount of context for cropping. If context is included, make the special input mean for context padding. Parameters ---------- context_pad : amount of context for cropping.
Configure crop dimensions and amount of context for cropping. If context is included, make the special input mean for context padding.
[ "Configure", "crop", "dimensions", "and", "amount", "of", "context", "for", "cropping", ".", "If", "context", "is", "included", "make", "the", "special", "input", "mean", "for", "context", "padding", "." ]
def configure_crop(self, context_pad): """ Configure crop dimensions and amount of context for cropping. If context is included, make the special input mean for context padding. Parameters ---------- context_pad : amount of context for cropping. """ # crop dimensions in_ = self.inputs[0] tpose = self.transformer.transpose[in_] inv_tpose = [tpose[t] for t in tpose] self.crop_dims = np.array(self.blobs[in_].data.shape[1:])[inv_tpose] #.transpose(inv_tpose) # context padding self.context_pad = context_pad if self.context_pad: in_ = self.inputs[0] transpose = self.transformer.transpose.get(in_) channel_order = self.transformer.channel_swap.get(in_) raw_scale = self.transformer.raw_scale.get(in_) # Padding context crops needs the mean in unprocessed input space. mean = self.transformer.mean.get(in_) if mean is not None: inv_transpose = [transpose[t] for t in transpose] crop_mean = mean.copy().transpose(inv_transpose) if channel_order is not None: channel_order_inverse = [channel_order.index(i) for i in range(crop_mean.shape[2])] crop_mean = crop_mean[:, :, channel_order_inverse] if raw_scale is not None: crop_mean /= raw_scale self.crop_mean = crop_mean else: self.crop_mean = np.zeros(self.crop_dims, dtype=np.float32)
[ "def", "configure_crop", "(", "self", ",", "context_pad", ")", ":", "# crop dimensions", "in_", "=", "self", ".", "inputs", "[", "0", "]", "tpose", "=", "self", ".", "transformer", ".", "transpose", "[", "in_", "]", "inv_tpose", "=", "[", "tpose", "[", ...
https://github.com/may0324/DeepCompression-caffe/blob/0aff6c1287bda4cfc7f378ed8a16524e1afabd8c/python/caffe/detector.py#L181-L216
potassco/clingo
e0c91d8f95cc28de1c480a871f9c97c30de83d40
libpyclingo/clingo/ast.py
python
TheoryUnparsedTerm
(location: Location, elements: Sequence[AST])
return AST(p_ast[0])
Construct an AST node of type `ASTType.TheoryUnparsedTerm`.
Construct an AST node of type `ASTType.TheoryUnparsedTerm`.
[ "Construct", "an", "AST", "node", "of", "type", "ASTType", ".", "TheoryUnparsedTerm", "." ]
def TheoryUnparsedTerm(location: Location, elements: Sequence[AST]) -> AST: ''' Construct an AST node of type `ASTType.TheoryUnparsedTerm`. ''' p_ast = _ffi.new('clingo_ast_t**') c_location = _c_location(location) _handle_error(_lib.clingo_ast_build( _lib.clingo_ast_type_theory_unparsed_term, p_ast, c_location[0], _ffi.new('clingo_ast_t*[]', [ x._rep for x in elements ]), _ffi.cast('size_t', len(elements)))) return AST(p_ast[0])
[ "def", "TheoryUnparsedTerm", "(", "location", ":", "Location", ",", "elements", ":", "Sequence", "[", "AST", "]", ")", "->", "AST", ":", "p_ast", "=", "_ffi", ".", "new", "(", "'clingo_ast_t**'", ")", "c_location", "=", "_c_location", "(", "location", ")",...
https://github.com/potassco/clingo/blob/e0c91d8f95cc28de1c480a871f9c97c30de83d40/libpyclingo/clingo/ast.py#L1566-L1577
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftguitools/gui_draft2sketch.py
python
Draft2Sketch.proceed
(self)
Proceed with the command if one object was selected.
Proceed with the command if one object was selected.
[ "Proceed", "with", "the", "command", "if", "one", "object", "was", "selected", "." ]
def proceed(self): """Proceed with the command if one object was selected.""" sel = Gui.Selection.getSelection() allSketches = True allDraft = True Gui.addModule("Draft") for obj in sel: if obj.isDerivedFrom("Sketcher::SketchObject"): allDraft = False elif (obj.isDerivedFrom("Part::Part2DObjectPython") or obj.isDerivedFrom("Part::Feature")): allSketches = False else: allDraft = False allSketches = False if not sel: return elif allDraft: _cmd = "Draft.makeSketch" _cmd += "(" _cmd += "FreeCADGui.Selection.getSelection(), " _cmd += "autoconstraints=True" _cmd += ")" _cmd_list = ['sk = ' + _cmd, 'FreeCAD.ActiveDocument.recompute()'] self.commit(translate("draft", "Convert to Sketch"), _cmd_list) elif allSketches: n = 0 _cmd_list = list() for o in sel: _cmd = "Draft.draftify" _cmd += "(" _cmd += "FreeCAD.ActiveDocument." + o.Name + ", " _cmd += "delete=False" _cmd += ")" _cmd_list.append("df" + str(n) + " = " + _cmd) n += 1 _cmd_list.append('FreeCAD.ActiveDocument.recompute()') self.commit(translate("draft", "Convert to Draft"), _cmd_list) else: _cmd_list = list() n = 0 for obj in sel: _cmd_df = "Draft.draftify" _cmd_df += "(" _cmd_df += "FreeCAD.ActiveDocument." + obj.Name + ", " _cmd_df += "delete=False" _cmd_df += ")" _cmd_sk = "Draft.makeSketch" _cmd_sk += "(" _cmd_sk += "FreeCAD.ActiveDocument." + obj.Name + ", " _cmd_sk += "autoconstraints=True" _cmd_sk += ")" if obj.isDerivedFrom("Sketcher::SketchObject"): _cmd_list.append("obj" + str(n) + " = " + _cmd_df) elif (obj.isDerivedFrom("Part::Part2DObjectPython") or obj.isDerivedFrom("Part::Feature")): _cmd_list.append("obj" + str(n) + " = " + _cmd_sk) #elif obj.isDerivedFrom("Part::Feature"): # # if (len(obj.Shape.Wires) == 1 # # or len(obj.Shape.Edges) == 1): # _cmd_list.append("obj" + str(n) + " = " + _cmd_sk) n += 1 _cmd_list.append('FreeCAD.ActiveDocument.recompute()') self.commit(translate("draft", "Convert Draft/Sketch"), _cmd_list) self.finish()
[ "def", "proceed", "(", "self", ")", ":", "sel", "=", "Gui", ".", "Selection", ".", "getSelection", "(", ")", "allSketches", "=", "True", "allDraft", "=", "True", "Gui", ".", "addModule", "(", "\"Draft\"", ")", "for", "obj", "in", "sel", ":", "if", "o...
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_draft2sketch.py#L73-L145
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/ipython/py2/IPython/core/debugger.py
python
Tracer.__init__
(self, colors=None)
DEPRECATED Create a local debugger instance. Parameters ---------- colors : str, optional The name of the color scheme to use, it must be one of IPython's valid color schemes. If not given, the function will default to the current IPython scheme when running inside IPython, and to 'NoColor' otherwise. Examples -------- :: from IPython.core.debugger import Tracer; debug_here = Tracer() Later in your code:: debug_here() # -> will open up the debugger at that point. Once the debugger activates, you can use all of its regular commands to step through code, set breakpoints, etc. See the pdb documentation from the Python standard library for usage details.
DEPRECATED
[ "DEPRECATED" ]
def __init__(self, colors=None): """ DEPRECATED Create a local debugger instance. Parameters ---------- colors : str, optional The name of the color scheme to use, it must be one of IPython's valid color schemes. If not given, the function will default to the current IPython scheme when running inside IPython, and to 'NoColor' otherwise. Examples -------- :: from IPython.core.debugger import Tracer; debug_here = Tracer() Later in your code:: debug_here() # -> will open up the debugger at that point. Once the debugger activates, you can use all of its regular commands to step through code, set breakpoints, etc. See the pdb documentation from the Python standard library for usage details. """ warnings.warn("`Tracer` is deprecated since version 5.1, directly use " "`IPython.core.debugger.Pdb.set_trace()`", DeprecationWarning, stacklevel=2) ip = get_ipython() if ip is None: # Outside of ipython, we set our own exception hook manually sys.excepthook = functools.partial(BdbQuit_excepthook, excepthook=sys.excepthook) def_colors = 'NoColor' else: # In ipython, we use its custom exception handler mechanism def_colors = ip.colors ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) if colors is None: colors = def_colors # The stdlib debugger internally uses a modified repr from the `repr` # module, that limits the length of printed strings to a hardcoded # limit of 30 characters. That much trimming is too aggressive, let's # at least raise that limit to 80 chars, which should be enough for # most interactive uses. try: try: from reprlib import aRepr # Py 3 except ImportError: from repr import aRepr # Py 2 aRepr.maxstring = 80 except: # This is only a user-facing convenience, so any error we encounter # here can be warned about but can be otherwise ignored. These # printouts will tell us about problems if this API changes import traceback traceback.print_exc() self.debugger = Pdb(colors)
[ "def", "__init__", "(", "self", ",", "colors", "=", "None", ")", ":", "warnings", ".", "warn", "(", "\"`Tracer` is deprecated since version 5.1, directly use \"", "\"`IPython.core.debugger.Pdb.set_trace()`\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py2/IPython/core/debugger.py#L102-L167
yue/yue
619d62c191b13c51c01be451dc48917c34a5aefc
building/tools/cpplint.py
python
_SetQuiet
(quiet)
return _cpplint_state.SetQuiet(quiet)
Set the module's quiet status, and return previous setting.
Set the module's quiet status, and return previous setting.
[ "Set", "the", "module", "s", "quiet", "status", "and", "return", "previous", "setting", "." ]
def _SetQuiet(quiet): """Set the module's quiet status, and return previous setting.""" return _cpplint_state.SetQuiet(quiet)
[ "def", "_SetQuiet", "(", "quiet", ")", ":", "return", "_cpplint_state", ".", "SetQuiet", "(", "quiet", ")" ]
https://github.com/yue/yue/blob/619d62c191b13c51c01be451dc48917c34a5aefc/building/tools/cpplint.py#L970-L972
facebook/watchman
0917460c71b000b96be9b9575d77f06f2f6053bb
watchman/python/pywatchman/__init__.py
python
Transport.write
(self, buf)
write some data
write some data
[ "write", "some", "data" ]
def write(self, buf): """write some data""" raise NotImplementedError()
[ "def", "write", "(", "self", ",", "buf", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/facebook/watchman/blob/0917460c71b000b96be9b9575d77f06f2f6053bb/watchman/python/pywatchman/__init__.py#L220-L222
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
IsStockID
(*args, **kwargs)
return _misc_.IsStockID(*args, **kwargs)
IsStockID(int id) -> bool
IsStockID(int id) -> bool
[ "IsStockID", "(", "int", "id", ")", "-", ">", "bool" ]
def IsStockID(*args, **kwargs): """IsStockID(int id) -> bool""" return _misc_.IsStockID(*args, **kwargs)
[ "def", "IsStockID", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "IsStockID", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L294-L296
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/dateutil/parser.py
python
parser.parse
(self, timestr, default=None, ignoretz=False, tzinfos=None, **kwargs)
Parse the date/time string into a :class:`datetime.datetime` object. :param timestr: Any date/time string using the supported formats. :param default: The default datetime object, if this is a datetime object and not ``None``, elements specified in ``timestr`` replace elements in the default object. :param ignoretz: If set ``True``, time zones in parsed strings are ignored and a naive :class:`datetime.datetime` object is returned. :param tzinfos: Additional time zone names / aliases which may be present in the string. This argument maps time zone names (and optionally offsets from those time zones) to time zones. This parameter can be a dictionary with timezone aliases mapping time zone names to time zones or a function taking two parameters (``tzname`` and ``tzoffset``) and returning a time zone. The timezones to which the names are mapped can be an integer offset from UTC in minutes or a :class:`tzinfo` object. .. doctest:: :options: +NORMALIZE_WHITESPACE >>> from dateutil.parser import parse >>> from dateutil.tz import gettz >>> tzinfos = {"BRST": -10800, "CST": gettz("America/Chicago")} >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -10800)) >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) This parameter is ignored if ``ignoretz`` is set. :param **kwargs: Keyword arguments as passed to ``_parse()``. :return: Returns a :class:`datetime.datetime` object or, if the ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the first element being a :class:`datetime.datetime` object, the second a tuple containing the fuzzy tokens. :raises ValueError: Raised for invalid or unknown string format, if the provided :class:`tzinfo` is not in a valid format, or if an invalid date would be created. :raises TypeError: Raised for non-string or character stream input. :raises OverflowError: Raised if the parsed date exceeds the largest valid C integer on your system.
Parse the date/time string into a :class:`datetime.datetime` object.
[ "Parse", "the", "date", "/", "time", "string", "into", "a", ":", "class", ":", "datetime", ".", "datetime", "object", "." ]
def parse(self, timestr, default=None, ignoretz=False, tzinfos=None, **kwargs): """ Parse the date/time string into a :class:`datetime.datetime` object. :param timestr: Any date/time string using the supported formats. :param default: The default datetime object, if this is a datetime object and not ``None``, elements specified in ``timestr`` replace elements in the default object. :param ignoretz: If set ``True``, time zones in parsed strings are ignored and a naive :class:`datetime.datetime` object is returned. :param tzinfos: Additional time zone names / aliases which may be present in the string. This argument maps time zone names (and optionally offsets from those time zones) to time zones. This parameter can be a dictionary with timezone aliases mapping time zone names to time zones or a function taking two parameters (``tzname`` and ``tzoffset``) and returning a time zone. The timezones to which the names are mapped can be an integer offset from UTC in minutes or a :class:`tzinfo` object. .. doctest:: :options: +NORMALIZE_WHITESPACE >>> from dateutil.parser import parse >>> from dateutil.tz import gettz >>> tzinfos = {"BRST": -10800, "CST": gettz("America/Chicago")} >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -10800)) >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) This parameter is ignored if ``ignoretz`` is set. :param **kwargs: Keyword arguments as passed to ``_parse()``. :return: Returns a :class:`datetime.datetime` object or, if the ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the first element being a :class:`datetime.datetime` object, the second a tuple containing the fuzzy tokens. :raises ValueError: Raised for invalid or unknown string format, if the provided :class:`tzinfo` is not in a valid format, or if an invalid date would be created. :raises TypeError: Raised for non-string or character stream input. :raises OverflowError: Raised if the parsed date exceeds the largest valid C integer on your system. """ if default is None: default = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) res, skipped_tokens = self._parse(timestr, **kwargs) if res is None: raise ValueError("Unknown string format") if len(res) == 0: raise ValueError("String does not contain a date.") repl = {} for attr in ("year", "month", "day", "hour", "minute", "second", "microsecond"): value = getattr(res, attr) if value is not None: repl[attr] = value if 'day' not in repl: # If the default day exceeds the last day of the month, fall back to # the end of the month. cyear = default.year if res.year is None else res.year cmonth = default.month if res.month is None else res.month cday = default.day if res.day is None else res.day if cday > monthrange(cyear, cmonth)[1]: repl['day'] = monthrange(cyear, cmonth)[1] ret = default.replace(**repl) if res.weekday is not None and not res.day: ret = ret+relativedelta.relativedelta(weekday=res.weekday) if not ignoretz: if (isinstance(tzinfos, collections.Callable) or tzinfos and res.tzname in tzinfos): if isinstance(tzinfos, collections.Callable): tzdata = tzinfos(res.tzname, res.tzoffset) else: tzdata = tzinfos.get(res.tzname) if isinstance(tzdata, datetime.tzinfo): tzinfo = tzdata elif isinstance(tzdata, text_type): tzinfo = tz.tzstr(tzdata) elif isinstance(tzdata, integer_types): tzinfo = tz.tzoffset(res.tzname, tzdata) else: raise ValueError("Offset must be tzinfo subclass, " "tz string, or int offset.") ret = ret.replace(tzinfo=tzinfo) elif res.tzname and res.tzname in time.tzname: ret = ret.replace(tzinfo=tz.tzlocal()) elif res.tzoffset == 0: ret = ret.replace(tzinfo=tz.tzutc()) elif res.tzoffset: ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) if kwargs.get('fuzzy_with_tokens', False): return ret, skipped_tokens else: return ret
[ "def", "parse", "(", "self", ",", "timestr", ",", "default", "=", "None", ",", "ignoretz", "=", "False", ",", "tzinfos", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "default", "is", "None", ":", "default", "=", "datetime", ".", "datetime", ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/dateutil/parser.py#L489-L615
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/ops/data_flow_ops.py
python
QueueBase.queue_ref
(self)
return self._queue_ref
The underlying queue reference.
The underlying queue reference.
[ "The", "underlying", "queue", "reference", "." ]
def queue_ref(self): """The underlying queue reference.""" return self._queue_ref
[ "def", "queue_ref", "(", "self", ")", ":", "return", "self", ".", "_queue_ref" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/ops/data_flow_ops.py#L196-L198
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/Blast/3rdParty/assimp/port/PyAssimp/pyassimp/helper.py
python
vec2tuple
(x)
return (x.x, x.y, x.z)
Converts a VECTOR3D to a Tuple
Converts a VECTOR3D to a Tuple
[ "Converts", "a", "VECTOR3D", "to", "a", "Tuple" ]
def vec2tuple(x): """ Converts a VECTOR3D to a Tuple """ return (x.x, x.y, x.z)
[ "def", "vec2tuple", "(", "x", ")", ":", "return", "(", "x", ".", "x", ",", "x", ".", "y", ",", "x", ".", "z", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/Blast/3rdParty/assimp/port/PyAssimp/pyassimp/helper.py#L55-L57
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/eager/execution_callbacks.py
python
inf_nan_callback
(op_type, op_name, attrs, inputs, outputs, check_inf=True, check_nan=True, action=_DEFAULT_CALLBACK_ACTION)
An execution callback that checks for `inf`s and `nan`s in output tensors. This callback can be used with `tfe.add_execute_callback` to check for invalid numeric values. E.g., ```python tfe.add_execute_callback(tfe.inf_nan_callback) ``` Args: op_type: Name of the TFE operation type (e.g., `MatMul`). op_name: Name of the TFE operation. This name is set by client and can be `None` if it unset. attrs: Attributes of the TFE operation, as a tuple of alternating attribute names and attribute values. inputs: The `list` of input tensors to the operation, currently unused by this callback. outputs: The `list` of output tensors from the operation, checked by this callback for `inf` and `nan` values. check_inf: (`bool`) Whether this callback should check for `inf` values in the output tensor values. check_nan: (`bool`) Whether this callback should check for `nan` values in the output tensor values. action: (`str`) Action to be taken by the callback when `inf` or `nan` values are detected. Possible values {"raise", "warn", "print"} `"raise"`: Raise a `InfOrNanError`. `"warn"`: Log a warning using `tf.logging.warn`. `"print"`: Print a message to `sys.stdout`. Raises: InfOrNanError: iff `inf` or `nan` values are seen in any of `outputs` and `action` is `"raise"`. ValueError: iff the value of `action` is invalid.
An execution callback that checks for `inf`s and `nan`s in output tensors.
[ "An", "execution", "callback", "that", "checks", "for", "inf", "s", "and", "nan", "s", "in", "output", "tensors", "." ]
def inf_nan_callback(op_type, op_name, attrs, inputs, outputs, check_inf=True, check_nan=True, action=_DEFAULT_CALLBACK_ACTION): """An execution callback that checks for `inf`s and `nan`s in output tensors. This callback can be used with `tfe.add_execute_callback` to check for invalid numeric values. E.g., ```python tfe.add_execute_callback(tfe.inf_nan_callback) ``` Args: op_type: Name of the TFE operation type (e.g., `MatMul`). op_name: Name of the TFE operation. This name is set by client and can be `None` if it unset. attrs: Attributes of the TFE operation, as a tuple of alternating attribute names and attribute values. inputs: The `list` of input tensors to the operation, currently unused by this callback. outputs: The `list` of output tensors from the operation, checked by this callback for `inf` and `nan` values. check_inf: (`bool`) Whether this callback should check for `inf` values in the output tensor values. check_nan: (`bool`) Whether this callback should check for `nan` values in the output tensor values. action: (`str`) Action to be taken by the callback when `inf` or `nan` values are detected. Possible values {"raise", "warn", "print"} `"raise"`: Raise a `InfOrNanError`. `"warn"`: Log a warning using `tf.logging.warn`. `"print"`: Print a message to `sys.stdout`. Raises: InfOrNanError: iff `inf` or `nan` values are seen in any of `outputs` and `action` is `"raise"`. ValueError: iff the value of `action` is invalid. """ del attrs, inputs # Not used. ctx = context.get_default_context() for index, output in enumerate(outputs): if not output.dtype.is_numpy_compatible: continue numpy_dtype = output.dtype.as_numpy_dtype if (np.issubdtype(numpy_dtype, np.float) or np.issubdtype(numpy_dtype, np.complex) or np.issubdtype(numpy_dtype, np.integer)): try: check_numerics_op_attrs = ( "message", "Eager-mode inf/nan check", "T", outputs[0].dtype.as_datatype_enum) # TODO(cais): Consider moving this into execute.py. # pylint: disable=protected-access pywrap_tensorflow.TFE_Py_Execute( ctx._handle, output.device, "CheckNumerics", [output], check_numerics_op_attrs, 1) # pylint: enable=protected-access except core._NotOkStatusException: # pylint: disable=protected-access value = output.numpy() inf_detected = np.any(np.isinf(value)) and check_inf nan_detected = np.any(np.isnan(value)) and check_nan if not inf_detected and not nan_detected: continue error = InfOrNanError(op_type, op_name, index, len(outputs), value) if action == "print": print("Warning: %s" % str(error)) elif action == "warn": logging.warn(str(error)) elif action == "raise": raise error else: raise ValueError( "Invalid action for inf_nan_callback: %s. Valid actions are: " "{print | warn | raise}" % action)
[ "def", "inf_nan_callback", "(", "op_type", ",", "op_name", ",", "attrs", ",", "inputs", ",", "outputs", ",", "check_inf", "=", "True", ",", "check_nan", "=", "True", ",", "action", "=", "_DEFAULT_CALLBACK_ACTION", ")", ":", "del", "attrs", ",", "inputs", "...
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/eager/execution_callbacks.py#L105-L185
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
media/webrtc/trunk/tools/gyp/pylib/gyp/xcodeproj_file.py
python
PBXGroup.TakeOverOnlyChild
(self, recurse=False)
If this PBXGroup has only one child and it's also a PBXGroup, take it over by making all of its children this object's children. This function will continue to take over only children when those children are groups. If there are three PBXGroups representing a, b, and c, with c inside b and b inside a, and a and b have no other children, this will result in a taking over both b and c, forming a PBXGroup for a/b/c. If recurse is True, this function will recurse into children and ask them to collapse themselves by taking over only children as well. Assuming an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f (d1, d2, and f are files, the rest are groups), recursion will result in a group for a/b/c containing a group for d3/e.
If this PBXGroup has only one child and it's also a PBXGroup, take it over by making all of its children this object's children.
[ "If", "this", "PBXGroup", "has", "only", "one", "child", "and", "it", "s", "also", "a", "PBXGroup", "take", "it", "over", "by", "making", "all", "of", "its", "children", "this", "object", "s", "children", "." ]
def TakeOverOnlyChild(self, recurse=False): """If this PBXGroup has only one child and it's also a PBXGroup, take it over by making all of its children this object's children. This function will continue to take over only children when those children are groups. If there are three PBXGroups representing a, b, and c, with c inside b and b inside a, and a and b have no other children, this will result in a taking over both b and c, forming a PBXGroup for a/b/c. If recurse is True, this function will recurse into children and ask them to collapse themselves by taking over only children as well. Assuming an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f (d1, d2, and f are files, the rest are groups), recursion will result in a group for a/b/c containing a group for d3/e. """ # At this stage, check that child class types are PBXGroup exactly, # instead of using isinstance. The only subclass of PBXGroup, # PBXVariantGroup, should not participate in reparenting in the same way: # reparenting by merging different object types would be wrong. while len(self._properties['children']) == 1 and \ self._properties['children'][0].__class__ == PBXGroup: # Loop to take over the innermost only-child group possible. child = self._properties['children'][0] # Assume the child's properties, including its children. Save a copy # of this object's old properties, because they'll still be needed. # This object retains its existing id and parent attributes. old_properties = self._properties self._properties = child._properties self._children_by_path = child._children_by_path if not 'sourceTree' in self._properties or \ self._properties['sourceTree'] == '<group>': # The child was relative to its parent. Fix up the path. Note that # children with a sourceTree other than "<group>" are not relative to # their parents, so no path fix-up is needed in that case. if 'path' in old_properties: if 'path' in self._properties: # Both the original parent and child have paths set. self._properties['path'] = posixpath.join(old_properties['path'], self._properties['path']) else: # Only the original parent has a path, use it. self._properties['path'] = old_properties['path'] if 'sourceTree' in old_properties: # The original parent had a sourceTree set, use it. self._properties['sourceTree'] = old_properties['sourceTree'] # If the original parent had a name set, keep using it. If the original # parent didn't have a name but the child did, let the child's name # live on. If the name attribute seems unnecessary now, get rid of it. if 'name' in old_properties and old_properties['name'] != None and \ old_properties['name'] != self.Name(): self._properties['name'] = old_properties['name'] if 'name' in self._properties and 'path' in self._properties and \ self._properties['name'] == self._properties['path']: del self._properties['name'] # Notify all children of their new parent. for child in self._properties['children']: child.parent = self # If asked to recurse, recurse. if recurse: for child in self._properties['children']: if child.__class__ == PBXGroup: child.TakeOverOnlyChild(recurse)
[ "def", "TakeOverOnlyChild", "(", "self", ",", "recurse", "=", "False", ")", ":", "# At this stage, check that child class types are PBXGroup exactly,", "# instead of using isinstance. The only subclass of PBXGroup,", "# PBXVariantGroup, should not participate in reparenting in the same way:...
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/media/webrtc/trunk/tools/gyp/pylib/gyp/xcodeproj_file.py#L1334-L1402
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/aui/framemanager.py
python
AuiManager.Snap
(self)
Snaps the main frame to specified position on the screen. :see: :meth:`SnapToScreen`
Snaps the main frame to specified position on the screen.
[ "Snaps", "the", "main", "frame", "to", "specified", "position", "on", "the", "screen", "." ]
def Snap(self): """ Snaps the main frame to specified position on the screen. :see: :meth:`SnapToScreen` """ snap, hAlign, vAlign, monitor = self._is_docked if not snap: return managed_window = self.GetManagedWindow() snap_pos = self.GetSnapPosition() wnd_pos = managed_window.GetPosition() snapX, snapY = self._snap_limits if abs(snap_pos.x - wnd_pos.x) < snapX and abs(snap_pos.y - wnd_pos.y) < snapY: managed_window.SetPosition(snap_pos)
[ "def", "Snap", "(", "self", ")", ":", "snap", ",", "hAlign", ",", "vAlign", ",", "monitor", "=", "self", ".", "_is_docked", "if", "not", "snap", ":", "return", "managed_window", "=", "self", ".", "GetManagedWindow", "(", ")", "snap_pos", "=", "self", "...
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/framemanager.py#L10413-L10430
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/_py_abc.py
python
ABCMeta._dump_registry
(cls, file=None)
Debug helper to print the ABC registry.
Debug helper to print the ABC registry.
[ "Debug", "helper", "to", "print", "the", "ABC", "registry", "." ]
def _dump_registry(cls, file=None): """Debug helper to print the ABC registry.""" print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file) print(f"Inv. counter: {get_cache_token()}", file=file) for name in cls.__dict__: if name.startswith("_abc_"): value = getattr(cls, name) if isinstance(value, WeakSet): value = set(value) print(f"{name}: {value!r}", file=file)
[ "def", "_dump_registry", "(", "cls", ",", "file", "=", "None", ")", ":", "print", "(", "f\"Class: {cls.__module__}.{cls.__qualname__}\"", ",", "file", "=", "file", ")", "print", "(", "f\"Inv. counter: {get_cache_token()}\"", ",", "file", "=", "file", ")", "for", ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/_py_abc.py#L72-L81
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/codegen.py
python
JitEngine.add_global_mapping
(self, gv, addr)
return self._ee.add_global_mapping(gv, addr)
Override ExecutionEngine.add_global_mapping to keep info about defined symbols.
Override ExecutionEngine.add_global_mapping to keep info about defined symbols.
[ "Override", "ExecutionEngine", ".", "add_global_mapping", "to", "keep", "info", "about", "defined", "symbols", "." ]
def add_global_mapping(self, gv, addr): """Override ExecutionEngine.add_global_mapping to keep info about defined symbols. """ self._defined_symbols.add(gv.name) return self._ee.add_global_mapping(gv, addr)
[ "def", "add_global_mapping", "(", "self", ",", "gv", ",", "addr", ")", ":", "self", ".", "_defined_symbols", ".", "add", "(", "gv", ".", "name", ")", "return", "self", ".", "_ee", ".", "add_global_mapping", "(", "gv", ",", "addr", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/codegen.py#L601-L606
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/deps/v8/tools/grokdump.py
python
InspectionShell.do_list
(self, smth)
List all available memory regions.
List all available memory regions.
[ "List", "all", "available", "memory", "regions", "." ]
def do_list(self, smth): """ List all available memory regions. """ def print_region(reader, start, size, location): print(" %s - %s (%d bytes)" % (reader.FormatIntPtr(start), reader.FormatIntPtr(start + size), size)) print("Available memory regions:") self.reader.ForEachMemoryRegion(print_region)
[ "def", "do_list", "(", "self", ",", "smth", ")", ":", "def", "print_region", "(", "reader", ",", "start", ",", "size", ",", "location", ")", ":", "print", "(", "\" %s - %s (%d bytes)\"", "%", "(", "reader", ".", "FormatIntPtr", "(", "start", ")", ",", ...
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/deps/v8/tools/grokdump.py#L3674-L3683
lmb-freiburg/flownet2
b92e198b56b0e52e1ba0a5a98dc0e39fa5ae70cc
scripts/cpp_lint.py
python
Search
(pattern, s)
return _regexp_compile_cache[pattern].search(s)
Searches the string for the pattern, caching the compiled regexp.
Searches the string for the pattern, caching the compiled regexp.
[ "Searches", "the", "string", "for", "the", "pattern", "caching", "the", "compiled", "regexp", "." ]
def Search(pattern, s): """Searches the string for the pattern, caching the compiled regexp.""" if pattern not in _regexp_compile_cache: _regexp_compile_cache[pattern] = sre_compile.compile(pattern) return _regexp_compile_cache[pattern].search(s)
[ "def", "Search", "(", "pattern", ",", "s", ")", ":", "if", "pattern", "not", "in", "_regexp_compile_cache", ":", "_regexp_compile_cache", "[", "pattern", "]", "=", "sre_compile", ".", "compile", "(", "pattern", ")", "return", "_regexp_compile_cache", "[", "pat...
https://github.com/lmb-freiburg/flownet2/blob/b92e198b56b0e52e1ba0a5a98dc0e39fa5ae70cc/scripts/cpp_lint.py#L543-L547