nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
livecode/livecode
4606a10ea10b16d5071d0f9f263ccdd7ede8b31d
gyp/pylib/gyp/generator/ninja.py
python
NinjaWriter.WriteSourcesForArch
(self, ninja_file, config_name, config, sources, predepends, precompiled_header, spec, arch=None)
return outputs
Write build rules to compile all of |sources|.
Write build rules to compile all of |sources|.
[ "Write", "build", "rules", "to", "compile", "all", "of", "|sources|", "." ]
def WriteSourcesForArch(self, ninja_file, config_name, config, sources, predepends, precompiled_header, spec, arch=None): """Write build rules to compile all of |sources|.""" extra_defines = [] if self.flavor == 'mac': cflags = self.xcode_settings.GetCflags(config_name, arch=arch) cflags_c = self.xcode_settings.GetCflagsC(config_name) cflags_cc = self.xcode_settings.GetCflagsCC(config_name) cflags_objc = ['$cflags_c'] + \ self.xcode_settings.GetCflagsObjC(config_name) cflags_objcc = ['$cflags_cc'] + \ self.xcode_settings.GetCflagsObjCC(config_name) elif self.flavor == 'win': asmflags = self.msvs_settings.GetAsmflags(config_name) cflags = self.msvs_settings.GetCflags(config_name) cflags_c = self.msvs_settings.GetCflagsC(config_name) cflags_cc = self.msvs_settings.GetCflagsCC(config_name) extra_defines = self.msvs_settings.GetComputedDefines(config_name) # See comment at cc_command for why there's two .pdb files. pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( config_name, self.ExpandSpecial) if not pdbpath_c: obj = 'obj' if self.toolset != 'target': obj += '.' + self.toolset pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) pdbpath_c = pdbpath + '.c.pdb' pdbpath_cc = pdbpath + '.cc.pdb' self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c]) self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc]) self.WriteVariableList(ninja_file, 'pchprefix', [self.name]) else: cflags = config.get('cflags', []) cflags_c = config.get('cflags_c', []) cflags_cc = config.get('cflags_cc', []) # Respect environment variables related to build, but target-specific # flags can still override them. if self.toolset == 'target': cflags_c = (os.environ.get('CPPFLAGS', '').split() + os.environ.get('CFLAGS', '').split() + cflags_c) cflags_cc = (os.environ.get('CPPFLAGS', '').split() + os.environ.get('CXXFLAGS', '').split() + cflags_cc) defines = config.get('defines', []) + extra_defines self.WriteVariableList(ninja_file, 'defines', [Define(d, self.flavor) for d in defines]) if self.flavor == 'win': self.WriteVariableList(ninja_file, 'asmflags', map(self.ExpandSpecial, asmflags)) self.WriteVariableList(ninja_file, 'rcflags', [QuoteShellArgument(self.ExpandSpecial(f), self.flavor) for f in self.msvs_settings.GetRcflags(config_name, self.GypPathToNinja)]) include_dirs = config.get('include_dirs', []) env = self.GetToolchainEnv() if self.flavor == 'win': include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, config_name) self.WriteVariableList(ninja_file, 'includes', [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) for i in include_dirs]) if self.flavor == 'win': midl_include_dirs = config.get('midl_include_dirs', []) midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs( midl_include_dirs, config_name) self.WriteVariableList(ninja_file, 'midl_includes', [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) for i in midl_include_dirs]) pch_commands = precompiled_header.GetPchBuildCommands(arch) if self.flavor == 'mac': # Most targets use no precompiled headers, so only write these if needed. for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'), ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]: include = precompiled_header.GetInclude(ext, arch) if include: ninja_file.variable(var, include) arflags = config.get('arflags', []) self.WriteVariableList(ninja_file, 'cflags', map(self.ExpandSpecial, cflags)) self.WriteVariableList(ninja_file, 'cflags_c', map(self.ExpandSpecial, cflags_c)) self.WriteVariableList(ninja_file, 'cflags_cc', map(self.ExpandSpecial, cflags_cc)) if self.flavor == 'mac': self.WriteVariableList(ninja_file, 'cflags_objc', map(self.ExpandSpecial, cflags_objc)) self.WriteVariableList(ninja_file, 'cflags_objcc', map(self.ExpandSpecial, cflags_objcc)) self.WriteVariableList(ninja_file, 'arflags', map(self.ExpandSpecial, arflags)) ninja_file.newline() outputs = [] has_rc_source = False for source in sources: filename, ext = os.path.splitext(source) ext = ext[1:] obj_ext = self.obj_ext if ext in ('cc', 'cpp', 'cxx'): command = 'cxx' self.uses_cpp = True elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): command = 'cc' elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. command = 'cc_s' elif (self.flavor == 'win' and ext == 'asm' and not self.msvs_settings.HasExplicitAsmRules(spec)): command = 'asm' # Add the _asm suffix as msvs is capable of handling .cc and # .asm files of the same name without collision. obj_ext = '_asm.obj' elif self.flavor == 'mac' and ext == 'm': command = 'objc' elif self.flavor == 'mac' and ext == 'mm': command = 'objcxx' self.uses_cpp = True elif self.flavor == 'win' and ext == 'rc': command = 'rc' obj_ext = '.res' has_rc_source = True else: # Ignore unhandled extensions. continue input = self.GypPathToNinja(source) output = self.GypPathToUniqueOutput(filename + obj_ext) if arch is not None: output = AddArch(output, arch) implicit = precompiled_header.GetObjDependencies([input], [output], arch) variables = [] if self.flavor == 'win': variables, output, implicit = precompiled_header.GetFlagsModifications( input, output, implicit, command, cflags_c, cflags_cc, self.ExpandSpecial) ninja_file.build(output, command, input, implicit=[gch for _, _, gch in implicit], order_only=predepends, variables=variables) outputs.append(output) if has_rc_source: resource_include_dirs = config.get('resource_include_dirs', include_dirs) self.WriteVariableList(ninja_file, 'resource_includes', [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) for i in resource_include_dirs]) self.WritePchTargets(ninja_file, pch_commands) ninja_file.newline() return outputs
[ "def", "WriteSourcesForArch", "(", "self", ",", "ninja_file", ",", "config_name", ",", "config", ",", "sources", ",", "predepends", ",", "precompiled_header", ",", "spec", ",", "arch", "=", "None", ")", ":", "extra_defines", "=", "[", "]", "if", "self", "....
https://github.com/livecode/livecode/blob/4606a10ea10b16d5071d0f9f263ccdd7ede8b31d/gyp/pylib/gyp/generator/ninja.py#L877-L1030
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/design-hit-counter.py
python
HitCounter.__init__
(self)
Initialize your data structure here.
Initialize your data structure here.
[ "Initialize", "your", "data", "structure", "here", "." ]
def __init__(self): """ Initialize your data structure here. """ self.__k = 300 self.__dq = deque() self.__count = 0
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "__k", "=", "300", "self", ".", "__dq", "=", "deque", "(", ")", "self", ".", "__count", "=", "0" ]
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/design-hit-counter.py#L8-L14
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_controls.py
python
RadioBox.GetClassDefaultAttributes
(*args, **kwargs)
return _controls_.RadioBox_GetClassDefaultAttributes(*args, **kwargs)
GetClassDefaultAttributes(int variant=WINDOW_VARIANT_NORMAL) -> VisualAttributes Get the default attributes for this class. This is useful if you want to use the same font or colour in your own control as in a standard control -- which is a much better idea than hard coding specific colours or fonts which might look completely out of place on the user's system, especially if it uses themes. The variant parameter is only relevant under Mac currently and is ignore under other platforms. Under Mac, it will change the size of the returned font. See `wx.Window.SetWindowVariant` for more about this.
GetClassDefaultAttributes(int variant=WINDOW_VARIANT_NORMAL) -> VisualAttributes
[ "GetClassDefaultAttributes", "(", "int", "variant", "=", "WINDOW_VARIANT_NORMAL", ")", "-", ">", "VisualAttributes" ]
def GetClassDefaultAttributes(*args, **kwargs): """ GetClassDefaultAttributes(int variant=WINDOW_VARIANT_NORMAL) -> VisualAttributes Get the default attributes for this class. This is useful if you want to use the same font or colour in your own control as in a standard control -- which is a much better idea than hard coding specific colours or fonts which might look completely out of place on the user's system, especially if it uses themes. The variant parameter is only relevant under Mac currently and is ignore under other platforms. Under Mac, it will change the size of the returned font. See `wx.Window.SetWindowVariant` for more about this. """ return _controls_.RadioBox_GetClassDefaultAttributes(*args, **kwargs)
[ "def", "GetClassDefaultAttributes", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "RadioBox_GetClassDefaultAttributes", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L2681-L2696
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/base.py
python
Index.insert
(self, loc, item)
return self._shallow_copy_with_infer(idx)
Make new Index inserting new item at location. Follows Python list.append semantics for negative values. Parameters ---------- loc : int item : object Returns ------- new_index : Index
Make new Index inserting new item at location.
[ "Make", "new", "Index", "inserting", "new", "item", "at", "location", "." ]
def insert(self, loc, item): """ Make new Index inserting new item at location. Follows Python list.append semantics for negative values. Parameters ---------- loc : int item : object Returns ------- new_index : Index """ _self = np.asarray(self) item = self._coerce_scalar_to_index(item)._ndarray_values idx = np.concatenate((_self[:loc], item, _self[loc:])) return self._shallow_copy_with_infer(idx)
[ "def", "insert", "(", "self", ",", "loc", ",", "item", ")", ":", "_self", "=", "np", ".", "asarray", "(", "self", ")", "item", "=", "self", ".", "_coerce_scalar_to_index", "(", "item", ")", ".", "_ndarray_values", "idx", "=", "np", ".", "concatenate", ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/base.py#L4972-L4990
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_misc.py
python
TextDataObject.SetText
(*args, **kwargs)
return _misc_.TextDataObject_SetText(*args, **kwargs)
SetText(self, String text) Sets the text associated with the data object. This method is called when the data object receives the data and, by default, copies the text into the member variable. If you want to process the text on the fly you may wish to override this function (via `wx.PyTextDataObject`.)
SetText(self, String text)
[ "SetText", "(", "self", "String", "text", ")" ]
def SetText(*args, **kwargs): """ SetText(self, String text) Sets the text associated with the data object. This method is called when the data object receives the data and, by default, copies the text into the member variable. If you want to process the text on the fly you may wish to override this function (via `wx.PyTextDataObject`.) """ return _misc_.TextDataObject_SetText(*args, **kwargs)
[ "def", "SetText", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "TextDataObject_SetText", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_misc.py#L5209-L5219
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/training/supervisor.py
python
Supervisor._get_first_op_from_collection
(self, key)
return None
Returns the first `Operation` from a collection. Args: key: A string collection key. Returns: The first Op found in a collection, or `None` if the collection is empty.
Returns the first `Operation` from a collection.
[ "Returns", "the", "first", "Operation", "from", "a", "collection", "." ]
def _get_first_op_from_collection(self, key): """Returns the first `Operation` from a collection. Args: key: A string collection key. Returns: The first Op found in a collection, or `None` if the collection is empty. """ try: op_list = ops.get_collection(key) if len(op_list) > 1: logging.info("Found %d %s operations. Returning the first one.", len(op_list), key) if op_list: return op_list[0] except LookupError: pass return None
[ "def", "_get_first_op_from_collection", "(", "self", ",", "key", ")", ":", "try", ":", "op_list", "=", "ops", ".", "get_collection", "(", "key", ")", "if", "len", "(", "op_list", ")", ">", "1", ":", "logging", ".", "info", "(", "\"Found %d %s operations. R...
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/training/supervisor.py#L339-L358
plumonito/dtslam
5994bb9cf7a11981b830370db206bceb654c085d
3rdparty/opencv-git/samples/python2/asift.py
python
affine_skew
(tilt, phi, img, mask=None)
return img, mask, Ai
affine_skew(tilt, phi, img, mask=None) -> skew_img, skew_mask, Ai Ai - is an affine transform matrix from skew_img to img
affine_skew(tilt, phi, img, mask=None) -> skew_img, skew_mask, Ai
[ "affine_skew", "(", "tilt", "phi", "img", "mask", "=", "None", ")", "-", ">", "skew_img", "skew_mask", "Ai" ]
def affine_skew(tilt, phi, img, mask=None): ''' affine_skew(tilt, phi, img, mask=None) -> skew_img, skew_mask, Ai Ai - is an affine transform matrix from skew_img to img ''' h, w = img.shape[:2] if mask is None: mask = np.zeros((h, w), np.uint8) mask[:] = 255 A = np.float32([[1, 0, 0], [0, 1, 0]]) if phi != 0.0: phi = np.deg2rad(phi) s, c = np.sin(phi), np.cos(phi) A = np.float32([[c,-s], [ s, c]]) corners = [[0, 0], [w, 0], [w, h], [0, h]] tcorners = np.int32( np.dot(corners, A.T) ) x, y, w, h = cv2.boundingRect(tcorners.reshape(1,-1,2)) A = np.hstack([A, [[-x], [-y]]]) img = cv2.warpAffine(img, A, (w, h), flags=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REPLICATE) if tilt != 1.0: s = 0.8*np.sqrt(tilt*tilt-1) img = cv2.GaussianBlur(img, (0, 0), sigmaX=s, sigmaY=0.01) img = cv2.resize(img, (0, 0), fx=1.0/tilt, fy=1.0, interpolation=cv2.INTER_NEAREST) A[0] /= tilt if phi != 0.0 or tilt != 1.0: h, w = img.shape[:2] mask = cv2.warpAffine(mask, A, (w, h), flags=cv2.INTER_NEAREST) Ai = cv2.invertAffineTransform(A) return img, mask, Ai
[ "def", "affine_skew", "(", "tilt", ",", "phi", ",", "img", ",", "mask", "=", "None", ")", ":", "h", ",", "w", "=", "img", ".", "shape", "[", ":", "2", "]", "if", "mask", "is", "None", ":", "mask", "=", "np", ".", "zeros", "(", "(", "h", ","...
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/samples/python2/asift.py#L34-L63
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/xml/dom/domreg.py
python
getDOMImplementation
(name = None, features = ())
getDOMImplementation(name = None, features = ()) -> DOM implementation. Return a suitable DOM implementation. The name is either well-known, the module name of a DOM implementation, or None. If it is not None, imports the corresponding module and returns DOMImplementation object if the import succeeds. If name is not given, consider the available implementations to find one with the required feature set. If no implementation can be found, raise an ImportError. The features list must be a sequence of (feature, version) pairs which are passed to hasFeature.
getDOMImplementation(name = None, features = ()) -> DOM implementation.
[ "getDOMImplementation", "(", "name", "=", "None", "features", "=", "()", ")", "-", ">", "DOM", "implementation", "." ]
def getDOMImplementation(name = None, features = ()): """getDOMImplementation(name = None, features = ()) -> DOM implementation. Return a suitable DOM implementation. The name is either well-known, the module name of a DOM implementation, or None. If it is not None, imports the corresponding module and returns DOMImplementation object if the import succeeds. If name is not given, consider the available implementations to find one with the required feature set. If no implementation can be found, raise an ImportError. The features list must be a sequence of (feature, version) pairs which are passed to hasFeature.""" import os creator = None mod = well_known_implementations.get(name) if mod: mod = __import__(mod, {}, {}, ['getDOMImplementation']) return mod.getDOMImplementation() elif name: return registered[name]() elif os.environ.has_key("PYTHON_DOM"): return getDOMImplementation(name = os.environ["PYTHON_DOM"]) # User did not specify a name, try implementations in arbitrary # order, returning the one that has the required features if isinstance(features, StringTypes): features = _parse_feature_string(features) for creator in registered.values(): dom = creator() if _good_enough(dom, features): return dom for creator in well_known_implementations.keys(): try: dom = getDOMImplementation(name = creator) except StandardError: # typically ImportError, or AttributeError continue if _good_enough(dom, features): return dom raise ImportError,"no suitable DOM implementation found"
[ "def", "getDOMImplementation", "(", "name", "=", "None", ",", "features", "=", "(", ")", ")", ":", "import", "os", "creator", "=", "None", "mod", "=", "well_known_implementations", ".", "get", "(", "name", ")", "if", "mod", ":", "mod", "=", "__import__",...
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/xml/dom/domreg.py#L39-L80
neopenx/Dragon
0e639a7319035ddc81918bd3df059230436ee0a1
Dragon/python/dragon/operators/ndarray.py
python
Concat
(inputs, axis=1, **kwargs)
return output
Concatenate the inputs along the given axis. All dimensions except specific ``axis`` should be same. Parameters ---------- inputs : list of Tensor The inputs. axis : int The axis to concatenate. Returns ------- Tensor The output tensor.
Concatenate the inputs along the given axis.
[ "Concatenate", "the", "inputs", "along", "the", "given", "axis", "." ]
def Concat(inputs, axis=1, **kwargs): """Concatenate the inputs along the given axis. All dimensions except specific ``axis`` should be same. Parameters ---------- inputs : list of Tensor The inputs. axis : int The axis to concatenate. Returns ------- Tensor The output tensor. """ CheckInputs(inputs, 1, INT_MAX) arguments = ParseArguments(locals()) arguments['num_input'] = len(inputs) output = Tensor.CreateOperator(nout=1, op_type='Concat', **arguments) if all(input.shape is not None for input in inputs): if all(input.shape[axis] is not None for input in inputs): output.shape = inputs[0].shape[:] for i in xrange(1, int(len(inputs))): output.shape[axis] += inputs[i].shape[axis] return output
[ "def", "Concat", "(", "inputs", ",", "axis", "=", "1", ",", "*", "*", "kwargs", ")", ":", "CheckInputs", "(", "inputs", ",", "1", ",", "INT_MAX", ")", "arguments", "=", "ParseArguments", "(", "locals", "(", ")", ")", "arguments", "[", "'num_input'", ...
https://github.com/neopenx/Dragon/blob/0e639a7319035ddc81918bd3df059230436ee0a1/Dragon/python/dragon/operators/ndarray.py#L216-L246
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/email/_header_value_parser.py
python
get_cfws
(value)
return cfws, value
CFWS = (1*([FWS] comment) [FWS]) / FWS
CFWS = (1*([FWS] comment) [FWS]) / FWS
[ "CFWS", "=", "(", "1", "*", "(", "[", "FWS", "]", "comment", ")", "[", "FWS", "]", ")", "/", "FWS" ]
def get_cfws(value): """CFWS = (1*([FWS] comment) [FWS]) / FWS """ cfws = CFWSList() while value and value[0] in CFWS_LEADER: if value[0] in WSP: token, value = get_fws(value) else: token, value = get_comment(value) cfws.append(token) return cfws, value
[ "def", "get_cfws", "(", "value", ")", ":", "cfws", "=", "CFWSList", "(", ")", "while", "value", "and", "value", "[", "0", "]", "in", "CFWS_LEADER", ":", "if", "value", "[", "0", "]", "in", "WSP", ":", "token", ",", "value", "=", "get_fws", "(", "...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/email/_header_value_parser.py#L1263-L1274
wywu/LAB
4b6debd302ae109fd104d4dd04dccc3418ae7471
examples/finetune_flickr_style/assemble_data.py
python
download_image
(args_tuple)
For use with multiprocessing map. Returns filename on fail.
For use with multiprocessing map. Returns filename on fail.
[ "For", "use", "with", "multiprocessing", "map", ".", "Returns", "filename", "on", "fail", "." ]
def download_image(args_tuple): "For use with multiprocessing map. Returns filename on fail." try: url, filename = args_tuple if not os.path.exists(filename): urllib.urlretrieve(url, filename) with open(filename) as f: assert hashlib.sha1(f.read()).hexdigest() != MISSING_IMAGE_SHA1 test_read_image = io.imread(filename) return True except KeyboardInterrupt: raise Exception() # multiprocessing doesn't catch keyboard exceptions except: return False
[ "def", "download_image", "(", "args_tuple", ")", ":", "try", ":", "url", ",", "filename", "=", "args_tuple", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "urllib", ".", "urlretrieve", "(", "url", ",", "filename", ")", "with...
https://github.com/wywu/LAB/blob/4b6debd302ae109fd104d4dd04dccc3418ae7471/examples/finetune_flickr_style/assemble_data.py#L23-L36
kushview/Element
1cc16380caa2ab79461246ba758b9de1f46db2a5
libs/lv2/lv2specgen/lv2specgen.py
python
extraInfo
(term, m)
return doc
Generate information about misc. properties of a term
Generate information about misc. properties of a term
[ "Generate", "information", "about", "misc", ".", "properties", "of", "a", "term" ]
def extraInfo(term, m): """Generate information about misc. properties of a term""" doc = "" properties = findStatements(m, term, None, None) first = True for p in sorted(properties): if isSpecial(getPredicate(p)): continue doc += '<tr><th>%s</th>\n' % getTermLink(getPredicate(p)) if isResource(getObject(p)): doc += getProperty(getTermLink(getObject(p), term, getPredicate(p)), first) elif isLiteral(getObject(p)): doc += getProperty(linkify(str(getObject(p))), first) elif isBlank(getObject(p)): doc += getProperty(str(blankNodeDesc(getObject(p), m)), first) else: doc += getProperty('?', first) #doc += endProperties(first) return doc
[ "def", "extraInfo", "(", "term", ",", "m", ")", ":", "doc", "=", "\"\"", "properties", "=", "findStatements", "(", "m", ",", "term", ",", "None", ",", "None", ")", "first", "=", "True", "for", "p", "in", "sorted", "(", "properties", ")", ":", "if",...
https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/libs/lv2/lv2specgen/lv2specgen.py#L581-L601
jsk-ros-pkg/jsk_recognition
be6e319d29797bafb10c589fdff364c3d333a605
posedetectiondb/src/ObjectProjection.py
python
ObjectProjection.CameraPoseDistSqr
(pose,posearray,rotweightsqr=0.3,angleweight=0.0)
return sum((tile(pose[4:7],(len(posearray),1))-posearray[:,4:7])**2,1)+rotweightsqr*ObjectProjection.CameraQuaternionDistSqr(pose[0:4],posearray[:,0:4],angleweight=angleweight)
distance between two poses ignoring left rotation around zaxis of posearray. Squared quaternion distance is scaled by rotweightsqr
distance between two poses ignoring left rotation around zaxis of posearray. Squared quaternion distance is scaled by rotweightsqr
[ "distance", "between", "two", "poses", "ignoring", "left", "rotation", "around", "zaxis", "of", "posearray", ".", "Squared", "quaternion", "distance", "is", "scaled", "by", "rotweightsqr" ]
def CameraPoseDistSqr(pose,posearray,rotweightsqr=0.3,angleweight=0.0): """distance between two poses ignoring left rotation around zaxis of posearray. Squared quaternion distance is scaled by rotweightsqr""" return sum((tile(pose[4:7],(len(posearray),1))-posearray[:,4:7])**2,1)+rotweightsqr*ObjectProjection.CameraQuaternionDistSqr(pose[0:4],posearray[:,0:4],angleweight=angleweight)
[ "def", "CameraPoseDistSqr", "(", "pose", ",", "posearray", ",", "rotweightsqr", "=", "0.3", ",", "angleweight", "=", "0.0", ")", ":", "return", "sum", "(", "(", "tile", "(", "pose", "[", "4", ":", "7", "]", ",", "(", "len", "(", "posearray", ")", "...
https://github.com/jsk-ros-pkg/jsk_recognition/blob/be6e319d29797bafb10c589fdff364c3d333a605/posedetectiondb/src/ObjectProjection.py#L136-L138
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
llvm/bindings/python/llvm/object.py
python
Relocation.cache
(self)
Cache all cacheable properties on this instance.
Cache all cacheable properties on this instance.
[ "Cache", "all", "cacheable", "properties", "on", "this", "instance", "." ]
def cache(self): """Cache all cacheable properties on this instance.""" getattr(self, 'address') getattr(self, 'offset') getattr(self, 'symbol') getattr(self, 'type') getattr(self, 'type_name') getattr(self, 'value_string')
[ "def", "cache", "(", "self", ")", ":", "getattr", "(", "self", ",", "'address'", ")", "getattr", "(", "self", ",", "'offset'", ")", "getattr", "(", "self", ",", "'symbol'", ")", "getattr", "(", "self", ",", "'type'", ")", "getattr", "(", "self", ",",...
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/llvm/bindings/python/llvm/object.py#L417-L424
microsoft/TSS.MSR
0f2516fca2cd9929c31d5450e39301c9bde43688
TSS.Py/src/TpmTypes.py
python
FirmwareReadResponse.fromTpm
(buf)
return buf.createObj(FirmwareReadResponse)
Returns new FirmwareReadResponse object constructed from its marshaled representation in the given TpmBuffer buffer
Returns new FirmwareReadResponse object constructed from its marshaled representation in the given TpmBuffer buffer
[ "Returns", "new", "FirmwareReadResponse", "object", "constructed", "from", "its", "marshaled", "representation", "in", "the", "given", "TpmBuffer", "buffer" ]
def fromTpm(buf): """ Returns new FirmwareReadResponse object constructed from its marshaled representation in the given TpmBuffer buffer """ return buf.createObj(FirmwareReadResponse)
[ "def", "fromTpm", "(", "buf", ")", ":", "return", "buf", ".", "createObj", "(", "FirmwareReadResponse", ")" ]
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L16052-L16056
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/mws/connection.py
python
MWSConnection.get_offamazonpayments_service_status
(self, request, response, **kw)
return self._post_request(request, kw, response)
Returns the operational status of the Off-Amazon Payments API section.
Returns the operational status of the Off-Amazon Payments API section.
[ "Returns", "the", "operational", "status", "of", "the", "Off", "-", "Amazon", "Payments", "API", "section", "." ]
def get_offamazonpayments_service_status(self, request, response, **kw): """Returns the operational status of the Off-Amazon Payments API section. """ return self._post_request(request, kw, response)
[ "def", "get_offamazonpayments_service_status", "(", "self", ",", "request", ",", "response", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "_post_request", "(", "request", ",", "kw", ",", "response", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/mws/connection.py#L1164-L1168
generalized-intelligence/GAAS
29ab17d3e8a4ba18edef3a57c36d8db6329fac73
algorithms/src/LocalizationAndMapping/icp_lidar_localization/fast_gicp/thirdparty/Sophus/py/sophus/complex.py
python
Complex.Db_a_mul_b
(a, b)
return sympy.Matrix([[a.real, -a.imag], [a.imag, a.real]])
derivatice of complex muliplication wrt right multiplicand b
derivatice of complex muliplication wrt right multiplicand b
[ "derivatice", "of", "complex", "muliplication", "wrt", "right", "multiplicand", "b" ]
def Db_a_mul_b(a, b): """ derivatice of complex muliplication wrt right multiplicand b """ return sympy.Matrix([[a.real, -a.imag], [a.imag, a.real]])
[ "def", "Db_a_mul_b", "(", "a", ",", "b", ")", ":", "return", "sympy", ".", "Matrix", "(", "[", "[", "a", ".", "real", ",", "-", "a", ".", "imag", "]", ",", "[", "a", ".", "imag", ",", "a", ".", "real", "]", "]", ")" ]
https://github.com/generalized-intelligence/GAAS/blob/29ab17d3e8a4ba18edef3a57c36d8db6329fac73/algorithms/src/LocalizationAndMapping/icp_lidar_localization/fast_gicp/thirdparty/Sophus/py/sophus/complex.py#L78-L81
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/python2_version/klampt/model/create/moving_base_robot.py
python
make
(robotfile,world,tempname="temp.rob",debug=False)
return robot
Converts the given fixed-base robot file into a moving base robot and loads it into the given world. Args: robotfile (str): the name of a fixed-base robot file to load world (WorldModel): a world that will contain the new robot tempname (str, optional): a name of a temporary file containing the moving-base robot debug (bool, optional): if True, the robot file named by ``tempname`` is not removed from disk. Returns: (RobotModel): the loaded robot, stored in ``world``.
Converts the given fixed-base robot file into a moving base robot and loads it into the given world.
[ "Converts", "the", "given", "fixed", "-", "base", "robot", "file", "into", "a", "moving", "base", "robot", "and", "loads", "it", "into", "the", "given", "world", "." ]
def make(robotfile,world,tempname="temp.rob",debug=False): """Converts the given fixed-base robot file into a moving base robot and loads it into the given world. Args: robotfile (str): the name of a fixed-base robot file to load world (WorldModel): a world that will contain the new robot tempname (str, optional): a name of a temporary file containing the moving-base robot debug (bool, optional): if True, the robot file named by ``tempname`` is not removed from disk. Returns: (RobotModel): the loaded robot, stored in ``world``. """ _template_ = """### Boilerplate kinematics of a drivable floating (translating and rotating) cube with a robot hand mounted on it TParent 1 0 0 0 1 0 0 0 1 0 0 0 \ 1 0 0 0 1 0 0 0 1 0 0 0 \ 1 0 0 0 1 0 0 0 1 0 0 0 \ 1 0 0 0 1 0 0 0 1 0 0 0 \ 1 0 0 0 1 0 0 0 1 0 0 0 \ 1 0 0 0 1 0 0 0 1 0 0 0 parents -1 0 1 2 3 4 axis 1 0 0 0 1 0 0 0 1 0 0 1 0 1 0 1 0 0 jointtype p p p r r r qMin -1 -1 -1 -inf -inf -inf qMax 1 1 1 inf inf inf q 0 0 0 0 0 0 links "tx" "ty" "tz" "rz" "ry" "rx" geometry "" "" "" "" "" "data/objects/cube.tri" geomscale 1 1 1 1 1 0.01 mass 0.1 0.1 0.1 0.1 0.1 0.1 com 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 inertia 0.001 0 0 0 0.001 0 0 0 0.001 \ 0.001 0 0 0 0.001 0 0 0 0.001 \ 0.001 0 0 0 0.001 0 0 0 0.001 \ 0.001 0 0 0 0.001 0 0 0 0.001 \ 0.001 0 0 0 0.001 0 0 0 0.001 \ 0.001 0 0 0 0.001 0 0 0 0.001 torqueMax 500 500 500 50 50 50 accMax 4 4 4 4 4 4 4 velMax 2 2 2 3 3 3 joint normal 0 joint normal 1 joint normal 2 joint spin 3 joint spin 4 joint spin 5 driver normal 0 driver normal 1 driver normal 2 driver normal 3 driver normal 4 driver normal 5 servoP 5000 5000 5000 500 500 500 servoI 10 10 10 .5 .5 .5 servoD 100 100 100 10 10 10 viscousFriction 50 50 50 50 50 50 dryFriction 1 1 1 1 1 1 property sensors <sensors><ForceTorqueSensor link="5" hasForce="1 1 1" hasTorque="1 1 1" /></sensors> mount 5 "%s" 1 0 0 0 1 0 0 0 1 0 0 0 as "%s" """ robotname = os.path.splitext(os.path.basename(robotfile))[0] f.close() f2 = open("temp.rob",'w') f2.write(_template_ % (robotfile,robotname)) f2.close() world.loadElement("temp.rob") robot = world.robot(world.numRobots()-1) #set torques mass = sum(robot.link(i).getMass().mass for i in range(robot.numLinks())) inertia = 0.0 for i in range(robot.numLinks()): m = robot.link(i).getMass() inertia += (vectorops.normSquared(m.com)*m.mass + max(m.inertia)) tmax = robot.getTorqueMax() tmax[0] = tmax[1] = tmax[2] = mass*9.8*5 tmax[3] = tmax[4] = tmax[5] = inertia*9.8*5 robot.setTorqueMax(tmax) if debug: robot.saveFile("temp.rob") else: os.remove("temp.rob") return robot
[ "def", "make", "(", "robotfile", ",", "world", ",", "tempname", "=", "\"temp.rob\"", ",", "debug", "=", "False", ")", ":", "_template_", "=", "\"\"\"### Boilerplate kinematics of a drivable floating (translating and rotating) cube with a robot hand mounted on it\nTParent 1 0 0 ...
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/model/create/moving_base_robot.py#L12-L100
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/tree.py
python
TreeItem.GetSelectedIconName
(self)
Return name of icon to be displayed when selected.
Return name of icon to be displayed when selected.
[ "Return", "name", "of", "icon", "to", "be", "displayed", "when", "selected", "." ]
def GetSelectedIconName(self): """Return name of icon to be displayed when selected."""
[ "def", "GetSelectedIconName", "(", "self", ")", ":" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/tree.py#L381-L382
apiaryio/drafter
4634ebd07f6c6f257cc656598ccd535492fdfb55
tools/gyp/pylib/gyp/generator/make.py
python
MakefileWriter.WriteTarget
(self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all)
Write Makefile code to produce the final target of the gyp spec. spec, configs: input from gyp. deps, link_deps: dependency lists; see ComputeDeps() extra_outputs: any extra outputs that our target should depend on part_of_all: flag indicating this target is part of 'all'
Write Makefile code to produce the final target of the gyp spec.
[ "Write", "Makefile", "code", "to", "produce", "the", "final", "target", "of", "the", "gyp", "spec", "." ]
def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all): """Write Makefile code to produce the final target of the gyp spec. spec, configs: input from gyp. deps, link_deps: dependency lists; see ComputeDeps() extra_outputs: any extra outputs that our target should depend on part_of_all: flag indicating this target is part of 'all' """ self.WriteLn('### Rules for final target.') if extra_outputs: self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) self.WriteMakeRule(extra_outputs, deps, comment=('Preserve order dependency of ' 'special output on deps.'), order_only = True) target_postbuilds = {} if self.type != 'none': for configname in sorted(configs.keys()): config = configs[configname] if self.flavor == 'mac': ldflags = self.xcode_settings.GetLdflags(configname, generator_default_variables['PRODUCT_DIR'], lambda p: Sourceify(self.Absolutify(p))) # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. gyp_to_build = gyp.common.InvertRelativePath(self.path) target_postbuild = self.xcode_settings.AddImplicitPostbuilds( configname, QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, self.output))), QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, self.output_binary)))) if target_postbuild: target_postbuilds[configname] = target_postbuild else: ldflags = config.get('ldflags', []) # Compute an rpath for this output if needed. if any(dep.endswith('.so') or '.so.' in dep for dep in deps): # We want to get the literal string "$ORIGIN" into the link command, # so we need lots of escaping. ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset) ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' % self.toolset) library_dirs = config.get('library_dirs', []) ldflags += [('-L%s' % library_dir) for library_dir in library_dirs] self.WriteList(ldflags, 'LDFLAGS_%s' % configname) if self.flavor == 'mac': self.WriteList(self.xcode_settings.GetLibtoolflags(configname), 'LIBTOOLFLAGS_%s' % configname) libraries = spec.get('libraries') if libraries: # Remove duplicate entries libraries = gyp.common.uniquer(libraries) if self.flavor == 'mac': libraries = self.xcode_settings.AdjustLibraries(libraries) self.WriteList(libraries, 'LIBS') self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % QuoteSpaces(self.output_binary)) self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary)) if self.flavor == 'mac': self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % QuoteSpaces(self.output_binary)) # Postbuild actions. Like actions, but implicitly depend on the target's # output. postbuilds = [] if self.flavor == 'mac': if target_postbuilds: postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))') postbuilds.extend( gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) if postbuilds: # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), # so we must output its definition first, since we declare variables # using ":=". self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) for configname in target_postbuilds: self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' % (QuoteSpaces(self.output), configname, gyp.common.EncodePOSIXShellList(target_postbuilds[configname]))) # Postbuilds expect to be run in the gyp file's directory, so insert an # implicit postbuild to cd to there. postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) for i in xrange(len(postbuilds)): if not postbuilds[i].startswith('$'): postbuilds[i] = EscapeShellArgument(postbuilds[i]) self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) self.WriteLn('%s: POSTBUILDS := %s' % ( QuoteSpaces(self.output), ' '.join(postbuilds))) # A bundle directory depends on its dependencies such as bundle resources # and bundle binary. When all dependencies have been built, the bundle # needs to be packaged. if self.is_mac_bundle: # If the framework doesn't contain a binary, then nothing depends # on the actions -- make the framework depend on them directly too. self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) # Bundle dependencies. Note that the code below adds actions to this # target, so if you move these two lines, move the lines below as well. self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS') self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output)) # After the framework is built, package it. Needs to happen before # postbuilds, since postbuilds depend on this. if self.type in ('shared_library', 'loadable_module'): self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' % self.xcode_settings.GetFrameworkVersion()) # Bundle postbuilds can depend on the whole bundle, so run them after # the bundle is packaged, not already after the bundle binary is done. if postbuilds: self.WriteLn('\t@$(call do_postbuilds)') postbuilds = [] # Don't write postbuilds for target's output. # Needed by test/mac/gyptest-rebuild.py. self.WriteLn('\t@true # No-op, used by tests') # Since this target depends on binary and resources which are in # nested subfolders, the framework directory will be older than # its dependencies usually. To prevent this rule from executing # on every build (expensive, especially with postbuilds), expliclity # update the time on the framework directory. self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output)) if postbuilds: assert not self.is_mac_bundle, ('Postbuilds for bundles should be done ' 'on the bundle, not the binary (target \'%s\')' % self.target) assert 'product_dir' not in spec, ('Postbuilds do not work with ' 'custom product_dir') if self.type == 'executable': self.WriteLn('%s: LD_INPUTS := %s' % ( QuoteSpaces(self.output_binary), ' '.join(map(QuoteSpaces, link_deps)))) if self.toolset == 'host' and self.flavor == 'android': self.WriteDoCmd([self.output_binary], link_deps, 'link_host', part_of_all, postbuilds=postbuilds) else: self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all, postbuilds=postbuilds) elif self.type == 'static_library': for link_dep in link_deps: assert ' ' not in link_dep, ( "Spaces in alink input filenames not supported (%s)" % link_dep) if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not self.is_standalone_static_library): self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', part_of_all, postbuilds=postbuilds) else: self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, postbuilds=postbuilds) elif self.type == 'shared_library': self.WriteLn('%s: LD_INPUTS := %s' % ( QuoteSpaces(self.output_binary), ' '.join(map(QuoteSpaces, link_deps)))) self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, postbuilds=postbuilds) elif self.type == 'loadable_module': for link_dep in link_deps: assert ' ' not in link_dep, ( "Spaces in module input filenames not supported (%s)" % link_dep) if self.toolset == 'host' and self.flavor == 'android': self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host', part_of_all, postbuilds=postbuilds) else: self.WriteDoCmd( [self.output_binary], link_deps, 'solink_module', part_of_all, postbuilds=postbuilds) elif self.type == 'none': # Write a stamp line. self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, postbuilds=postbuilds) else: print "WARNING: no output for", self.type, target # Add an alias for each target (if there are any outputs). # Installable target aliases are created below. if ((self.output and self.output != self.target) and (self.type not in self._INSTALLABLE_TARGETS)): self.WriteMakeRule([self.target], [self.output], comment='Add target alias', phony = True) if part_of_all: self.WriteMakeRule(['all'], [self.target], comment = 'Add target alias to "all" target.', phony = True) # Add special-case rules for our installable targets. # 1) They need to install to the build dir or "product" dir. # 2) They get shortcuts for building (e.g. "make chrome"). # 3) They are part of "make all". if (self.type in self._INSTALLABLE_TARGETS or self.is_standalone_static_library): if self.type == 'shared_library': file_desc = 'shared library' elif self.type == 'static_library': file_desc = 'static library' else: file_desc = 'executable' install_path = self._InstallableTargetInstallPath() installable_deps = [self.output] if (self.flavor == 'mac' and not 'product_dir' in spec and self.toolset == 'target'): # On mac, products are created in install_path immediately. assert install_path == self.output, '%s != %s' % ( install_path, self.output) # Point the target alias to the final binary output. self.WriteMakeRule([self.target], [install_path], comment='Add target alias', phony = True) if install_path != self.output: assert not self.is_mac_bundle # See comment a few lines above. self.WriteDoCmd([install_path], [self.output], 'copy', comment = 'Copy this to the %s output path.' % file_desc, part_of_all=part_of_all) installable_deps.append(install_path) if self.output != self.alias and self.alias != self.target: self.WriteMakeRule([self.alias], installable_deps, comment = 'Short alias for building this %s.' % file_desc, phony = True) if part_of_all: self.WriteMakeRule(['all'], [install_path], comment = 'Add %s to "all" target.' % file_desc, phony = True)
[ "def", "WriteTarget", "(", "self", ",", "spec", ",", "configs", ",", "deps", ",", "link_deps", ",", "bundle_deps", ",", "extra_outputs", ",", "part_of_all", ")", ":", "self", ".", "WriteLn", "(", "'### Rules for final target.'", ")", "if", "extra_outputs", ":"...
https://github.com/apiaryio/drafter/blob/4634ebd07f6c6f257cc656598ccd535492fdfb55/tools/gyp/pylib/gyp/generator/make.py#L1427-L1660
google/mediapipe
e6c19885c6d3c6f410c730952aeed2852790d306
mediapipe/python/solutions/drawing_utils.py
python
plot_landmarks
(landmark_list: landmark_pb2.NormalizedLandmarkList, connections: Optional[List[Tuple[int, int]]] = None, landmark_drawing_spec: DrawingSpec = DrawingSpec( color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec( color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int = 10)
Plot the landmarks and the connections in matplotlib 3d. Args: landmark_list: A normalized landmark list proto message to be plotted. connections: A list of landmark index tuples that specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing settings such as color and line thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing settings such as color and line thickness. elevation: The elevation from which to view the plot. azimuth: the azimuth angle to rotate the plot. Raises: ValueError: If any connetions contain invalid landmark index.
Plot the landmarks and the connections in matplotlib 3d.
[ "Plot", "the", "landmarks", "and", "the", "connections", "in", "matplotlib", "3d", "." ]
def plot_landmarks(landmark_list: landmark_pb2.NormalizedLandmarkList, connections: Optional[List[Tuple[int, int]]] = None, landmark_drawing_spec: DrawingSpec = DrawingSpec( color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec( color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int = 10): """Plot the landmarks and the connections in matplotlib 3d. Args: landmark_list: A normalized landmark list proto message to be plotted. connections: A list of landmark index tuples that specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing settings such as color and line thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing settings such as color and line thickness. elevation: The elevation from which to view the plot. azimuth: the azimuth angle to rotate the plot. Raises: ValueError: If any connetions contain invalid landmark index. """ if not landmark_list: return plt.figure(figsize=(10, 10)) ax = plt.axes(projection='3d') ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark): if ((landmark.HasField('visibility') and landmark.visibility < _VISIBILITY_THRESHOLD) or (landmark.HasField('presence') and landmark.presence < _PRESENCE_THRESHOLD)): continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = len(landmark_list.landmark) # Draws the connections if the start and end landmarks are both visible. for connection in connections: start_idx = connection[0] end_idx = connection[1] if not (0 <= start_idx < num_landmarks and 0 <= end_idx < num_landmarks): raise ValueError(f'Landmark index is out of range. Invalid connection ' f'from landmark #{start_idx} to landmark #{end_idx}.') if start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx] ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness) plt.show()
[ "def", "plot_landmarks", "(", "landmark_list", ":", "landmark_pb2", ".", "NormalizedLandmarkList", ",", "connections", ":", "Optional", "[", "List", "[", "Tuple", "[", "int", ",", "int", "]", "]", "]", "=", "None", ",", "landmark_drawing_spec", ":", "DrawingSp...
https://github.com/google/mediapipe/blob/e6c19885c6d3c6f410c730952aeed2852790d306/mediapipe/python/solutions/drawing_utils.py#L257-L318
domino-team/openwrt-cc
8b181297c34d14d3ca521cc9f31430d561dbc688
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
python
XcodeSettings.GetLdflags
(self, configname, product_dir, gyp_to_build_path, arch=None)
return ldflags
Returns flags that need to be passed to the linker. Args: configname: The name of the configuration to get ld flags for. product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry.
Returns flags that need to be passed to the linker.
[ "Returns", "flags", "that", "need", "to", "be", "passed", "to", "the", "linker", "." ]
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): """Returns flags that need to be passed to the linker. Args: configname: The name of the configuration to get ld flags for. product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry. """ self.configname = configname ldflags = [] # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS # can contain entries that depend on this. Explicitly absolutify these. for ldflag in self._Settings().get('OTHER_LDFLAGS', []): ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): ldflags.append('-Wl,-dead_strip') if self._Test('PREBINDING', 'YES', default='NO'): ldflags.append('-Wl,-prebind') self._Appendf( ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') self._Appendf( ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') self._AppendPlatformVersionMinFlags(ldflags) if 'SDKROOT' in self._Settings() and self._SdkPath(): ldflags.append('-isysroot ' + self._SdkPath()) for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): ldflags.append('-L' + gyp_to_build_path(library_path)) if 'ORDER_FILE' in self._Settings(): ldflags.append('-Wl,-order_file ' + '-Wl,' + gyp_to_build_path( self._Settings()['ORDER_FILE'])) if arch is not None: archs = [arch] else: assert self.configname archs = self.GetActiveArchs(self.configname) if len(archs) != 1: # TODO: Supporting fat binaries will be annoying. self._WarnUnimplemented('ARCHS') archs = ['i386'] ldflags.append('-arch ' + archs[0]) # Xcode adds the product directory by default. ldflags.append('-L' + product_dir) install_name = self.GetInstallName() if install_name and self.spec['type'] != 'loadable_module': ldflags.append('-install_name ' + install_name.replace(' ', r'\ ')) for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): ldflags.append('-Wl,-rpath,' + rpath) sdk_root = self._SdkPath() if not sdk_root: sdk_root = '' config = self.spec['configurations'][self.configname] framework_dirs = config.get('mac_framework_dirs', []) for directory in framework_dirs: ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() if sdk_root and is_extension: # Adds the link flags for extensions. These flags are common for all # extensions and provide loader and main function. # These flags reflect the compilation options used by xcode to compile # extensions. ldflags.append('-lpkstart') if XcodeVersion() < '0900': ldflags.append(sdk_root + '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit') ldflags.append('-fapplication-extension') ldflags.append('-Xlinker -rpath ' '-Xlinker @executable_path/../../Frameworks') self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s') self.configname = None return ldflags
[ "def", "GetLdflags", "(", "self", ",", "configname", ",", "product_dir", ",", "gyp_to_build_path", ",", "arch", "=", "None", ")", ":", "self", ".", "configname", "=", "configname", "ldflags", "=", "[", "]", "# The xcode build is relative to a gyp file's directory, a...
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L763-L851
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/combo.py
python
OwnerDrawnComboBox.__init__
(self, *args, **kwargs)
__init__(self, Window parent, int id=-1, String value=EmptyString, Point pos=DefaultPosition, Size size=DefaultSize, wxArrayString choices=wxPyEmptyStringArray, long style=0, Validator validator=DefaultValidator, String name=ComboBoxNameStr) -> OwnerDrawnComboBox Standard constructor.
__init__(self, Window parent, int id=-1, String value=EmptyString, Point pos=DefaultPosition, Size size=DefaultSize, wxArrayString choices=wxPyEmptyStringArray, long style=0, Validator validator=DefaultValidator, String name=ComboBoxNameStr) -> OwnerDrawnComboBox
[ "__init__", "(", "self", "Window", "parent", "int", "id", "=", "-", "1", "String", "value", "=", "EmptyString", "Point", "pos", "=", "DefaultPosition", "Size", "size", "=", "DefaultSize", "wxArrayString", "choices", "=", "wxPyEmptyStringArray", "long", "style", ...
def __init__(self, *args, **kwargs): """ __init__(self, Window parent, int id=-1, String value=EmptyString, Point pos=DefaultPosition, Size size=DefaultSize, wxArrayString choices=wxPyEmptyStringArray, long style=0, Validator validator=DefaultValidator, String name=ComboBoxNameStr) -> OwnerDrawnComboBox Standard constructor. """ _combo.OwnerDrawnComboBox_swiginit(self,_combo.new_OwnerDrawnComboBox(*args, **kwargs)) self._setOORInfo(self);OwnerDrawnComboBox._setCallbackInfo(self, self, OwnerDrawnComboBox)
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_combo", ".", "OwnerDrawnComboBox_swiginit", "(", "self", ",", "_combo", ".", "new_OwnerDrawnComboBox", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "self", "...
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/combo.py#L820-L831
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/loader.py
python
RosdepLoader.get_rosdeps
(self, resource_name, implicit=True)
:raises: :exc:`rospkg.ResourceNotFound` if *resource_name* cannot be found.
:raises: :exc:`rospkg.ResourceNotFound` if *resource_name* cannot be found.
[ ":", "raises", ":", ":", "exc", ":", "rospkg", ".", "ResourceNotFound", "if", "*", "resource_name", "*", "cannot", "be", "found", "." ]
def get_rosdeps(self, resource_name, implicit=True): """ :raises: :exc:`rospkg.ResourceNotFound` if *resource_name* cannot be found. """ raise NotImplementedError(resource_name, implicit)
[ "def", "get_rosdeps", "(", "self", ",", "resource_name", ",", "implicit", "=", "True", ")", ":", "raise", "NotImplementedError", "(", "resource_name", ",", "implicit", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/loader.py#L80-L84
eclipse/sumo
7132a9b8b6eea734bdec38479026b4d8c4336d03
tools/traci/_vehicle.py
python
VehicleDomain.getRouteIndex
(self, vehID)
return self._getUniversal(tc.VAR_ROUTE_INDEX, vehID)
getRouteIndex(string) -> int Returns the index of the current edge within the vehicles route or -1 if the vehicle has not yet departed
getRouteIndex(string) -> int
[ "getRouteIndex", "(", "string", ")", "-", ">", "int" ]
def getRouteIndex(self, vehID): """getRouteIndex(string) -> int Returns the index of the current edge within the vehicles route or -1 if the vehicle has not yet departed """ return self._getUniversal(tc.VAR_ROUTE_INDEX, vehID)
[ "def", "getRouteIndex", "(", "self", ",", "vehID", ")", ":", "return", "self", ".", "_getUniversal", "(", "tc", ".", "VAR_ROUTE_INDEX", ",", "vehID", ")" ]
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/traci/_vehicle.py#L329-L335
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py3/sklearn/compose/_column_transformer.py
python
ColumnTransformer.fit_transform
(self, X, y=None)
return self._hstack(list(Xs))
Fit all transformers, transform the data and concatenate results. Parameters ---------- X : array-like or DataFrame of shape [n_samples, n_features] Input data, of which specified subsets are used to fit the transformers. y : array-like, shape (n_samples, ...), optional Targets for supervised learning. Returns ------- X_t : array-like or sparse matrix, shape (n_samples, sum_n_components) hstack of results of transformers. sum_n_components is the sum of n_components (output dimension) over transformers. If any result is a sparse matrix, everything will be converted to sparse matrices.
Fit all transformers, transform the data and concatenate results.
[ "Fit", "all", "transformers", "transform", "the", "data", "and", "concatenate", "results", "." ]
def fit_transform(self, X, y=None): """Fit all transformers, transform the data and concatenate results. Parameters ---------- X : array-like or DataFrame of shape [n_samples, n_features] Input data, of which specified subsets are used to fit the transformers. y : array-like, shape (n_samples, ...), optional Targets for supervised learning. Returns ------- X_t : array-like or sparse matrix, shape (n_samples, sum_n_components) hstack of results of transformers. sum_n_components is the sum of n_components (output dimension) over transformers. If any result is a sparse matrix, everything will be converted to sparse matrices. """ # TODO: this should be `feature_names_in_` when we start having it if hasattr(X, "columns"): self._feature_names_in = np.asarray(X.columns) else: self._feature_names_in = None X = _check_X(X) self._validate_transformers() self._validate_column_callables(X) self._validate_remainder(X) result = self._fit_transform(X, y, _fit_transform_one) if not result: self._update_fitted_transformers([]) # All transformers are None return np.zeros((X.shape[0], 0)) Xs, transformers = zip(*result) # determine if concatenated output will be sparse or not if any(sparse.issparse(X) for X in Xs): nnz = sum(X.nnz if sparse.issparse(X) else X.size for X in Xs) total = sum(X.shape[0] * X.shape[1] if sparse.issparse(X) else X.size for X in Xs) density = nnz / total self.sparse_output_ = density < self.sparse_threshold else: self.sparse_output_ = False self._update_fitted_transformers(transformers) self._validate_output(Xs) return self._hstack(list(Xs))
[ "def", "fit_transform", "(", "self", ",", "X", ",", "y", "=", "None", ")", ":", "# TODO: this should be `feature_names_in_` when we start having it", "if", "hasattr", "(", "X", ",", "\"columns\"", ")", ":", "self", ".", "_feature_names_in", "=", "np", ".", "asar...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/compose/_column_transformer.py#L487-L540
line/stellite
5bd1c1f5f0cdc22a65319068f4f8b2ca7769bfa1
modified_files/tools/vim/chromium.ycm_extra_conf.py
python
GetBuildableSourceFile
(chrome_root, filename)
return filename
Returns a buildable source file corresponding to |filename|. A buildable source file is one which is likely to be passed into clang as a source file during the build. For .h files, returns the closest matching .cc, .cpp or .c file. If no such file is found, returns the same as GetDefaultSourceFile(). Args: chrome_root: (String) Absolute path to the root of Chromium checkout. filename: (String) Absolute path to the target source file. Returns: (String) Absolute path to source file.
Returns a buildable source file corresponding to |filename|.
[ "Returns", "a", "buildable", "source", "file", "corresponding", "to", "|filename|", "." ]
def GetBuildableSourceFile(chrome_root, filename): """Returns a buildable source file corresponding to |filename|. A buildable source file is one which is likely to be passed into clang as a source file during the build. For .h files, returns the closest matching .cc, .cpp or .c file. If no such file is found, returns the same as GetDefaultSourceFile(). Args: chrome_root: (String) Absolute path to the root of Chromium checkout. filename: (String) Absolute path to the target source file. Returns: (String) Absolute path to source file. """ if filename.endswith('.h'): # Header files can't be built. Instead, try to match a header file to its # corresponding source file. alternates = ['.cc', '.cpp', '.c'] for alt_extension in alternates: alt_name = filename[:-2] + alt_extension if os.path.exists(alt_name): return alt_name return GetDefaultSourceFile(chrome_root, filename) return filename
[ "def", "GetBuildableSourceFile", "(", "chrome_root", ",", "filename", ")", ":", "if", "filename", ".", "endswith", "(", "'.h'", ")", ":", "# Header files can't be built. Instead, try to match a header file to its", "# corresponding source file.", "alternates", "=", "[", "'....
https://github.com/line/stellite/blob/5bd1c1f5f0cdc22a65319068f4f8b2ca7769bfa1/modified_files/tools/vim/chromium.ycm_extra_conf.py#L125-L151
hunterlew/mstar_deeplearning_project
3761624dcbd7d44af257200542d13d1444dc634a
classification/caffe/python/caffe/io.py
python
blobproto_to_array
(blob, return_diff=False)
Convert a blob proto to an array. In default, we will just return the data, unless return_diff is True, in which case we will return the diff.
Convert a blob proto to an array. In default, we will just return the data, unless return_diff is True, in which case we will return the diff.
[ "Convert", "a", "blob", "proto", "to", "an", "array", ".", "In", "default", "we", "will", "just", "return", "the", "data", "unless", "return_diff", "is", "True", "in", "which", "case", "we", "will", "return", "the", "diff", "." ]
def blobproto_to_array(blob, return_diff=False): """ Convert a blob proto to an array. In default, we will just return the data, unless return_diff is True, in which case we will return the diff. """ # Read the data into an array if return_diff: data = np.array(blob.diff) else: data = np.array(blob.data) # Reshape the array if blob.HasField('num') or blob.HasField('channels') or blob.HasField('height') or blob.HasField('width'): # Use legacy 4D shape return data.reshape(blob.num, blob.channels, blob.height, blob.width) else: return data.reshape(blob.shape.dim)
[ "def", "blobproto_to_array", "(", "blob", ",", "return_diff", "=", "False", ")", ":", "# Read the data into an array", "if", "return_diff", ":", "data", "=", "np", ".", "array", "(", "blob", ".", "diff", ")", "else", ":", "data", "=", "np", ".", "array", ...
https://github.com/hunterlew/mstar_deeplearning_project/blob/3761624dcbd7d44af257200542d13d1444dc634a/classification/caffe/python/caffe/io.py#L18-L34
Harick1/caffe-yolo
eea92bf3ddfe4d0ff6b0b3ba9b15c029a83ed9a3
scripts/cpp_lint.py
python
ParseNolintSuppressions
(filename, raw_line, linenum, error)
Updates the global list of error-suppressions. Parses any NOLINT comments on the current line, updating the global error_suppressions store. Reports an error if the NOLINT comment was malformed. Args: filename: str, the name of the input file. raw_line: str, the line of input text, with comments. linenum: int, the number of the current line. error: function, an error handler.
Updates the global list of error-suppressions.
[ "Updates", "the", "global", "list", "of", "error", "-", "suppressions", "." ]
def ParseNolintSuppressions(filename, raw_line, linenum, error): """Updates the global list of error-suppressions. Parses any NOLINT comments on the current line, updating the global error_suppressions store. Reports an error if the NOLINT comment was malformed. Args: filename: str, the name of the input file. raw_line: str, the line of input text, with comments. linenum: int, the number of the current line. error: function, an error handler. """ # FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*). matched = _RE_SUPPRESSION.search(raw_line) if matched: if matched.group(1) == '_NEXT_LINE': linenum += 1 category = matched.group(2) if category in (None, '(*)'): # => "suppress all" _error_suppressions.setdefault(None, set()).add(linenum) else: if category.startswith('(') and category.endswith(')'): category = category[1:-1] if category in _ERROR_CATEGORIES: _error_suppressions.setdefault(category, set()).add(linenum) else: error(filename, linenum, 'readability/nolint', 5, 'Unknown NOLINT error category: %s' % category)
[ "def", "ParseNolintSuppressions", "(", "filename", ",", "raw_line", ",", "linenum", ",", "error", ")", ":", "# FIXME(adonovan): \"NOLINT(\" is misparsed as NOLINT(*).", "matched", "=", "_RE_SUPPRESSION", ".", "search", "(", "raw_line", ")", "if", "matched", ":", "if",...
https://github.com/Harick1/caffe-yolo/blob/eea92bf3ddfe4d0ff6b0b3ba9b15c029a83ed9a3/scripts/cpp_lint.py#L464-L492
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
python
inputBuffer.Setup
(self, reader, URL, encoding, options)
return ret
Setup an XML reader with new options
Setup an XML reader with new options
[ "Setup", "an", "XML", "reader", "with", "new", "options" ]
def Setup(self, reader, URL, encoding, options): """Setup an XML reader with new options """ if reader is None: reader__o = None else: reader__o = reader._o ret = libxml2mod.xmlTextReaderSetup(reader__o, self._o, URL, encoding, options) return ret
[ "def", "Setup", "(", "self", ",", "reader", ",", "URL", ",", "encoding", ",", "options", ")", ":", "if", "reader", "is", "None", ":", "reader__o", "=", "None", "else", ":", "reader__o", "=", "reader", ".", "_o", "ret", "=", "libxml2mod", ".", "xmlTex...
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L5383-L5388
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/WebKit/Tools/Scripts/webkitpy/common/message_pool.py
python
get
(caller, worker_factory, num_workers, host=None)
return _MessagePool(caller, worker_factory, num_workers, host)
Returns an object that exposes a run() method that takes a list of test shards and runs them in parallel.
Returns an object that exposes a run() method that takes a list of test shards and runs them in parallel.
[ "Returns", "an", "object", "that", "exposes", "a", "run", "()", "method", "that", "takes", "a", "list", "of", "test", "shards", "and", "runs", "them", "in", "parallel", "." ]
def get(caller, worker_factory, num_workers, host=None): """Returns an object that exposes a run() method that takes a list of test shards and runs them in parallel.""" return _MessagePool(caller, worker_factory, num_workers, host)
[ "def", "get", "(", "caller", ",", "worker_factory", ",", "num_workers", ",", "host", "=", "None", ")", ":", "return", "_MessagePool", "(", "caller", ",", "worker_factory", ",", "num_workers", ",", "host", ")" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/common/message_pool.py#L59-L61
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_windows.py
python
StandardDialogLayoutAdapter.ReparentControls
(*args, **kwargs)
return _windows_.StandardDialogLayoutAdapter_ReparentControls(*args, **kwargs)
ReparentControls(self, Window parent, Window reparentTo, Sizer buttonSizer=None)
ReparentControls(self, Window parent, Window reparentTo, Sizer buttonSizer=None)
[ "ReparentControls", "(", "self", "Window", "parent", "Window", "reparentTo", "Sizer", "buttonSizer", "=", "None", ")" ]
def ReparentControls(*args, **kwargs): """ReparentControls(self, Window parent, Window reparentTo, Sizer buttonSizer=None)""" return _windows_.StandardDialogLayoutAdapter_ReparentControls(*args, **kwargs)
[ "def", "ReparentControls", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "StandardDialogLayoutAdapter_ReparentControls", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L1005-L1007
genn-team/genn
75e1eb218cafa228bf36ae4613d1ce26e877b12c
pygenn/model_preprocessor.py
python
param_space_to_val_vec
(model, param_space)
return DoubleVector([param_space[pn] for pn in model.get_param_names()])
Convert a param_space dict to a std::vector<double> Args: model -- instance of the model param_space -- dict with parameters Returns: native vector of parameters
Convert a param_space dict to a std::vector<double>
[ "Convert", "a", "param_space", "dict", "to", "a", "std", "::", "vector<double", ">" ]
def param_space_to_val_vec(model, param_space): """Convert a param_space dict to a std::vector<double> Args: model -- instance of the model param_space -- dict with parameters Returns: native vector of parameters """ if not all(isinstance(p, Number) for p in itervalues(param_space)): raise ValueError("non-numeric parameters are not supported") return DoubleVector([param_space[pn] for pn in model.get_param_names()])
[ "def", "param_space_to_val_vec", "(", "model", ",", "param_space", ")", ":", "if", "not", "all", "(", "isinstance", "(", "p", ",", "Number", ")", "for", "p", "in", "itervalues", "(", "param_space", ")", ")", ":", "raise", "ValueError", "(", "\"non-numeric ...
https://github.com/genn-team/genn/blob/75e1eb218cafa228bf36ae4613d1ce26e877b12c/pygenn/model_preprocessor.py#L134-L147
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/jinja2/utils.py
python
clear_caches
()
Jinja2 keeps internal caches for environments and lexers. These are used so that Jinja2 doesn't have to recreate environments and lexers all the time. Normally you don't have to care about that but if you are messuring memory consumption you may want to clean the caches.
Jinja2 keeps internal caches for environments and lexers. These are used so that Jinja2 doesn't have to recreate environments and lexers all the time. Normally you don't have to care about that but if you are messuring memory consumption you may want to clean the caches.
[ "Jinja2", "keeps", "internal", "caches", "for", "environments", "and", "lexers", ".", "These", "are", "used", "so", "that", "Jinja2", "doesn", "t", "have", "to", "recreate", "environments", "and", "lexers", "all", "the", "time", ".", "Normally", "you", "don"...
def clear_caches(): """Jinja2 keeps internal caches for environments and lexers. These are used so that Jinja2 doesn't have to recreate environments and lexers all the time. Normally you don't have to care about that but if you are messuring memory consumption you may want to clean the caches. """ from jinja2.environment import _spontaneous_environments from jinja2.lexer import _lexer_cache _spontaneous_environments.clear() _lexer_cache.clear()
[ "def", "clear_caches", "(", ")", ":", "from", "jinja2", ".", "environment", "import", "_spontaneous_environments", "from", "jinja2", ".", "lexer", "import", "_lexer_cache", "_spontaneous_environments", ".", "clear", "(", ")", "_lexer_cache", ".", "clear", "(", ")"...
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/jinja2/utils.py#L107-L116
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/gs/bucket.py
python
Bucket.set_xml_acl
(self, acl_str, key_name='', headers=None, version_id=None, query_args='acl', generation=None, if_generation=None, if_metageneration=None)
return self._set_acl_helper(acl_str, key_name=key_name, headers=headers, query_args=query_args, generation=generation, if_generation=if_generation, if_metageneration=if_metageneration)
Sets a bucket's or objects's ACL to an XML string. :type acl_str: string :param acl_str: A string containing the ACL XML. :type key_name: string :param key_name: A key name within the bucket to set the ACL for. If not specified, the ACL for the bucket will be set. :type headers: dict :param headers: Additional headers to set during the request. :type version_id: string :param version_id: Unused in this subclass. :type query_args: str :param query_args: The query parameters to pass with the request. :type generation: int :param generation: If specified, sets the ACL for a specific generation of a versioned object. If not specified, the current version is modified. :type if_generation: int :param if_generation: (optional) If set to a generation number, the acl will only be updated if its current generation number is this value. :type if_metageneration: int :param if_metageneration: (optional) If set to a metageneration number, the acl will only be updated if its current metageneration number is this value.
Sets a bucket's or objects's ACL to an XML string.
[ "Sets", "a", "bucket", "s", "or", "objects", "s", "ACL", "to", "an", "XML", "string", "." ]
def set_xml_acl(self, acl_str, key_name='', headers=None, version_id=None, query_args='acl', generation=None, if_generation=None, if_metageneration=None): """Sets a bucket's or objects's ACL to an XML string. :type acl_str: string :param acl_str: A string containing the ACL XML. :type key_name: string :param key_name: A key name within the bucket to set the ACL for. If not specified, the ACL for the bucket will be set. :type headers: dict :param headers: Additional headers to set during the request. :type version_id: string :param version_id: Unused in this subclass. :type query_args: str :param query_args: The query parameters to pass with the request. :type generation: int :param generation: If specified, sets the ACL for a specific generation of a versioned object. If not specified, the current version is modified. :type if_generation: int :param if_generation: (optional) If set to a generation number, the acl will only be updated if its current generation number is this value. :type if_metageneration: int :param if_metageneration: (optional) If set to a metageneration number, the acl will only be updated if its current metageneration number is this value. """ return self._set_acl_helper(acl_str, key_name=key_name, headers=headers, query_args=query_args, generation=generation, if_generation=if_generation, if_metageneration=if_metageneration)
[ "def", "set_xml_acl", "(", "self", ",", "acl_str", ",", "key_name", "=", "''", ",", "headers", "=", "None", ",", "version_id", "=", "None", ",", "query_args", "=", "'acl'", ",", "generation", "=", "None", ",", "if_generation", "=", "None", ",", "if_metag...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/gs/bucket.py#L448-L487
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
torch/sparse/__init__.py
python
mm
(mat1: Tensor, mat2: Tensor)
return torch._sparse_mm(mat1, mat2)
r""" Performs a matrix multiplication of the sparse matrix :attr:`mat1` and the (sparse or strided) matrix :attr:`mat2`. Similar to :func:`torch.mm`, If :attr:`mat1` is a :math:`(n \times m)` tensor, :attr:`mat2` is a :math:`(m \times p)` tensor, out will be a :math:`(n \times p)` tensor. :attr:`mat1` need to have `sparse_dim = 2`. This function also supports backward for both matrices. Note that the gradients of :attr:`mat1` is a coalesced sparse tensor. Args: mat1 (SparseTensor): the first sparse matrix to be multiplied mat2 (Tensor): the second matrix to be multiplied, which could be sparse or dense Shape: The format of the output tensor of this function follows: - sparse x sparse -> sparse - sparse x dense -> dense Example:: >>> a = torch.randn(2, 3).to_sparse().requires_grad_(True) >>> a tensor(indices=tensor([[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]]), values=tensor([ 1.5901, 0.0183, -0.6146, 1.8061, -0.0112, 0.6302]), size=(2, 3), nnz=6, layout=torch.sparse_coo, requires_grad=True) >>> b = torch.randn(3, 2, requires_grad=True) >>> b tensor([[-0.6479, 0.7874], [-1.2056, 0.5641], [-1.1716, -0.9923]], requires_grad=True) >>> y = torch.sparse.mm(a, b) >>> y tensor([[-0.3323, 1.8723], [-1.8951, 0.7904]], grad_fn=<SparseAddmmBackward>) >>> y.sum().backward() >>> a.grad tensor(indices=tensor([[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]]), values=tensor([ 0.1394, -0.6415, -2.1639, 0.1394, -0.6415, -2.1639]), size=(2, 3), nnz=6, layout=torch.sparse_coo)
r""" Performs a matrix multiplication of the sparse matrix :attr:`mat1` and the (sparse or strided) matrix :attr:`mat2`. Similar to :func:`torch.mm`, If :attr:`mat1` is a :math:`(n \times m)` tensor, :attr:`mat2` is a :math:`(m \times p)` tensor, out will be a :math:`(n \times p)` tensor. :attr:`mat1` need to have `sparse_dim = 2`. This function also supports backward for both matrices. Note that the gradients of :attr:`mat1` is a coalesced sparse tensor.
[ "r", "Performs", "a", "matrix", "multiplication", "of", "the", "sparse", "matrix", ":", "attr", ":", "mat1", "and", "the", "(", "sparse", "or", "strided", ")", "matrix", ":", "attr", ":", "mat2", ".", "Similar", "to", ":", "func", ":", "torch", ".", ...
def mm(mat1: Tensor, mat2: Tensor) -> Tensor: r""" Performs a matrix multiplication of the sparse matrix :attr:`mat1` and the (sparse or strided) matrix :attr:`mat2`. Similar to :func:`torch.mm`, If :attr:`mat1` is a :math:`(n \times m)` tensor, :attr:`mat2` is a :math:`(m \times p)` tensor, out will be a :math:`(n \times p)` tensor. :attr:`mat1` need to have `sparse_dim = 2`. This function also supports backward for both matrices. Note that the gradients of :attr:`mat1` is a coalesced sparse tensor. Args: mat1 (SparseTensor): the first sparse matrix to be multiplied mat2 (Tensor): the second matrix to be multiplied, which could be sparse or dense Shape: The format of the output tensor of this function follows: - sparse x sparse -> sparse - sparse x dense -> dense Example:: >>> a = torch.randn(2, 3).to_sparse().requires_grad_(True) >>> a tensor(indices=tensor([[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]]), values=tensor([ 1.5901, 0.0183, -0.6146, 1.8061, -0.0112, 0.6302]), size=(2, 3), nnz=6, layout=torch.sparse_coo, requires_grad=True) >>> b = torch.randn(3, 2, requires_grad=True) >>> b tensor([[-0.6479, 0.7874], [-1.2056, 0.5641], [-1.1716, -0.9923]], requires_grad=True) >>> y = torch.sparse.mm(a, b) >>> y tensor([[-0.3323, 1.8723], [-1.8951, 0.7904]], grad_fn=<SparseAddmmBackward>) >>> y.sum().backward() >>> a.grad tensor(indices=tensor([[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]]), values=tensor([ 0.1394, -0.6415, -2.1639, 0.1394, -0.6415, -2.1639]), size=(2, 3), nnz=6, layout=torch.sparse_coo) """ if mat1.is_sparse and mat2.is_sparse: return torch._sparse_sparse_matmul(mat1, mat2) return torch._sparse_mm(mat1, mat2)
[ "def", "mm", "(", "mat1", ":", "Tensor", ",", "mat2", ":", "Tensor", ")", "->", "Tensor", ":", "if", "mat1", ".", "is_sparse", "and", "mat2", ".", "is_sparse", ":", "return", "torch", ".", "_sparse_sparse_matmul", "(", "mat1", ",", "mat2", ")", "return...
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/sparse/__init__.py#L45-L91
devpack/android-python27
d42dd67565e104cf7b0b50eb473f615db3e69901
python-build-with-qt/sip-4.11.2/siputils.py
python
format
(msg, leftmargin=0, rightmargin=78)
return fmsg
Format a message by inserting line breaks at appropriate places. msg is the text of the message. leftmargin is the position of the left margin. rightmargin is the position of the right margin. Return the formatted message.
Format a message by inserting line breaks at appropriate places.
[ "Format", "a", "message", "by", "inserting", "line", "breaks", "at", "appropriate", "places", "." ]
def format(msg, leftmargin=0, rightmargin=78): """Format a message by inserting line breaks at appropriate places. msg is the text of the message. leftmargin is the position of the left margin. rightmargin is the position of the right margin. Return the formatted message. """ curs = leftmargin fmsg = " " * leftmargin for w in msg.split(): l = len(w) if curs != leftmargin and curs + l > rightmargin: fmsg = fmsg + "\n" + (" " * leftmargin) curs = leftmargin if curs > leftmargin: fmsg = fmsg + " " curs = curs + 1 fmsg = fmsg + w curs = curs + l return fmsg
[ "def", "format", "(", "msg", ",", "leftmargin", "=", "0", ",", "rightmargin", "=", "78", ")", ":", "curs", "=", "leftmargin", "fmsg", "=", "\" \"", "*", "leftmargin", "for", "w", "in", "msg", ".", "split", "(", ")", ":", "l", "=", "len", "(", "w"...
https://github.com/devpack/android-python27/blob/d42dd67565e104cf7b0b50eb473f615db3e69901/python-build-with-qt/sip-4.11.2/siputils.py#L2131-L2156
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/contrib/tensor_forest/python/tensor_forest.py
python
ForestStats.__init__
(self, tree_stats, params)
A simple container for stats about a forest.
A simple container for stats about a forest.
[ "A", "simple", "container", "for", "stats", "about", "a", "forest", "." ]
def __init__(self, tree_stats, params): """A simple container for stats about a forest.""" self.tree_stats = tree_stats self.params = params
[ "def", "__init__", "(", "self", ",", "tree_stats", ",", "params", ")", ":", "self", ".", "tree_stats", "=", "tree_stats", "self", ".", "params", "=", "params" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/tensor_forest/python/tensor_forest.py#L243-L246
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/linalg/_procrustes.py
python
orthogonal_procrustes
(A, B, check_finite=True)
return R, scale
Compute the matrix solution of the orthogonal Procrustes problem. Given matrices A and B of equal shape, find an orthogonal matrix R that most closely maps A to B [1]_. Note that unlike higher level Procrustes analyses of spatial data, this function only uses orthogonal transformations like rotations and reflections, and it does not use scaling or translation. Parameters ---------- A : (M, N) array_like Matrix to be mapped. B : (M, N) array_like Target matrix. check_finite : bool, optional Whether to check that the input matrices contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- R : (N, N) ndarray The matrix solution of the orthogonal Procrustes problem. Minimizes the Frobenius norm of dot(A, R) - B, subject to dot(R.T, R) == I. scale : float Sum of the singular values of ``dot(A.T, B)``. Raises ------ ValueError If the input arrays are incompatibly shaped. This may also be raised if matrix A or B contains an inf or nan and check_finite is True, or if the matrix product AB contains an inf or nan. Notes ----- .. versionadded:: 0.15.0 References ---------- .. [1] Peter H. Schonemann, "A generalized solution of the orthogonal Procrustes problem", Psychometrica -- Vol. 31, No. 1, March, 1996.
Compute the matrix solution of the orthogonal Procrustes problem.
[ "Compute", "the", "matrix", "solution", "of", "the", "orthogonal", "Procrustes", "problem", "." ]
def orthogonal_procrustes(A, B, check_finite=True): """ Compute the matrix solution of the orthogonal Procrustes problem. Given matrices A and B of equal shape, find an orthogonal matrix R that most closely maps A to B [1]_. Note that unlike higher level Procrustes analyses of spatial data, this function only uses orthogonal transformations like rotations and reflections, and it does not use scaling or translation. Parameters ---------- A : (M, N) array_like Matrix to be mapped. B : (M, N) array_like Target matrix. check_finite : bool, optional Whether to check that the input matrices contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- R : (N, N) ndarray The matrix solution of the orthogonal Procrustes problem. Minimizes the Frobenius norm of dot(A, R) - B, subject to dot(R.T, R) == I. scale : float Sum of the singular values of ``dot(A.T, B)``. Raises ------ ValueError If the input arrays are incompatibly shaped. This may also be raised if matrix A or B contains an inf or nan and check_finite is True, or if the matrix product AB contains an inf or nan. Notes ----- .. versionadded:: 0.15.0 References ---------- .. [1] Peter H. Schonemann, "A generalized solution of the orthogonal Procrustes problem", Psychometrica -- Vol. 31, No. 1, March, 1996. """ if check_finite: A = np.asarray_chkfinite(A) B = np.asarray_chkfinite(B) else: A = np.asanyarray(A) B = np.asanyarray(B) if A.ndim != 2: raise ValueError('expected ndim to be 2, but observed %s' % A.ndim) if A.shape != B.shape: raise ValueError('the shapes of A and B differ (%s vs %s)' % ( A.shape, B.shape)) # Be clever with transposes, with the intention to save memory. u, w, vt = svd(B.T.dot(A).T) R = u.dot(vt) scale = w.sum() return R, scale
[ "def", "orthogonal_procrustes", "(", "A", ",", "B", ",", "check_finite", "=", "True", ")", ":", "if", "check_finite", ":", "A", "=", "np", ".", "asarray_chkfinite", "(", "A", ")", "B", "=", "np", ".", "asarray_chkfinite", "(", "B", ")", "else", ":", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/linalg/_procrustes.py#L14-L77
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemFramework/v1/AWS/common-code/lib/OpenSSL/SSL.py
python
Connection.renegotiate_pending
(self)
return _lib.SSL_renegotiate_pending(self._ssl) == 1
Check if there's a renegotiation in progress, it will return False once a renegotiation is finished. :return: Whether there's a renegotiation in progress :rtype: bool
Check if there's a renegotiation in progress, it will return False once a renegotiation is finished.
[ "Check", "if", "there", "s", "a", "renegotiation", "in", "progress", "it", "will", "return", "False", "once", "a", "renegotiation", "is", "finished", "." ]
def renegotiate_pending(self): """ Check if there's a renegotiation in progress, it will return False once a renegotiation is finished. :return: Whether there's a renegotiation in progress :rtype: bool """ return _lib.SSL_renegotiate_pending(self._ssl) == 1
[ "def", "renegotiate_pending", "(", "self", ")", ":", "return", "_lib", ".", "SSL_renegotiate_pending", "(", "self", ".", "_ssl", ")", "==", "1" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/common-code/lib/OpenSSL/SSL.py#L1936-L1944
kushview/Element
1cc16380caa2ab79461246ba758b9de1f46db2a5
waflib/extras/pgicc.py
python
get_pgi_version
(conf, cc)
return version[0]
Find the version of a pgi compiler.
Find the version of a pgi compiler.
[ "Find", "the", "version", "of", "a", "pgi", "compiler", "." ]
def get_pgi_version(conf, cc): """Find the version of a pgi compiler.""" version_re = re.compile(r"The Portland Group", re.I).search cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking try: out, err = conf.cmd_and_log(cmd, output=0) except Errors.WafError: conf.fatal('Could not find pgi compiler %r' % cmd) if out: match = version_re(out) else: match = version_re(err) if not match: conf.fatal('Could not verify PGI signature') cmd = cc + ['-help=variable'] try: out, err = conf.cmd_and_log(cmd, output=0) except Errors.WafError: conf.fatal('Could not find pgi compiler %r' % cmd) version = re.findall(r'^COMPVER\s*=(.*)', out, re.M) if len(version) != 1: conf.fatal('Could not determine the compiler version') return version[0]
[ "def", "get_pgi_version", "(", "conf", ",", "cc", ")", ":", "version_re", "=", "re", ".", "compile", "(", "r\"The Portland Group\"", ",", "re", ".", "I", ")", ".", "search", "cmd", "=", "cc", "+", "[", "'-V'", ",", "'-E'", "]", "# Issue 1078, prevent wra...
https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/waflib/extras/pgicc.py#L39-L66
ValveSoftware/source-sdk-2013
0d8dceea4310fde5706b3ce1c70609d72a38efdf
mp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py
python
BaseContainer.__getitem__
(self, key)
return self._values[key]
Retrieves item by the specified key.
Retrieves item by the specified key.
[ "Retrieves", "item", "by", "the", "specified", "key", "." ]
def __getitem__(self, key): """Retrieves item by the specified key.""" return self._values[key]
[ "def", "__getitem__", "(", "self", ",", "key", ")", ":", "return", "self", ".", "_values", "[", "key", "]" ]
https://github.com/ValveSoftware/source-sdk-2013/blob/0d8dceea4310fde5706b3ce1c70609d72a38efdf/mp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py#L62-L64
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/distributed/collective.py
python
barrier
(group=None)
Barrier among all participators in the group. Args: group (Group): The group instance return by new_group or None for global default group. Returns: None. Examples: .. code-block:: python import paddle from paddle.distributed import init_parallel_env paddle.set_device('gpu:%d'%paddle.distributed.ParallelEnv().dev_id) init_parallel_env() paddle.distributed.barrier()
[]
def barrier(group=None): """ Barrier among all participators in the group. Args: group (Group): The group instance return by new_group or None for global default group. Returns: None. Examples: .. code-block:: python import paddle from paddle.distributed import init_parallel_env paddle.set_device('gpu:%d'%paddle.distributed.ParallelEnv().dev_id) init_parallel_env() paddle.distributed.barrier() """ if group is not None and not group.is_member(): return ring_id = 0 if group is None else group.id temp = fill_constant([1], dtype="int32", value="1") if in_dygraph_mode(): return _C_ops.barrier(temp, temp, 'ring_id', ring_id) op_type = 'barrier' if not isinstance(ring_id, int): raise ValueError("The type of 'group' for barrier must be int.") helper = LayerHelper(op_type, **locals()) helper.append_op( type=op_type, inputs={'X': [temp]}, outputs={'Out': [temp]}, attrs={'ring_id': ring_id})
[ "def", "barrier", "(", "group", "=", "None", ")", ":", "if", "group", "is", "not", "None", "and", "not", "group", ".", "is_member", "(", ")", ":", "return", "ring_id", "=", "0", "if", "group", "is", "None", "else", "group", ".", "id", "temp", "=", ...
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/distributed/collective.py#L167-L206
apache/mesos
97d9a4063332aae3825d78de71611657e05cf5e2
support/cpplint.py
python
CloseExpression
(clean_lines, linenum, pos)
return (line, clean_lines.NumLines(), -1)
If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. TODO(unknown): cpplint spends a fair bit of time matching parentheses. Ideally we would want to index all opening and closing parentheses once and have CloseExpression be just a simple lookup, but due to preprocessor tricks, this is not so easy. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum.
If input points to ( or { or [ or <, finds the position that closes it.
[ "If", "input", "points", "to", "(", "or", "{", "or", "[", "or", "<", "finds", "the", "position", "that", "closes", "it", "." ]
def CloseExpression(clean_lines, linenum, pos): """If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. TODO(unknown): cpplint spends a fair bit of time matching parentheses. Ideally we would want to index all opening and closing parentheses once and have CloseExpression be just a simple lookup, but due to preprocessor tricks, this is not so easy. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum. """ line = clean_lines.elided[linenum] if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]): return (line, clean_lines.NumLines(), -1) # Check first line (end_pos, stack) = FindEndOfExpressionInLine(line, pos, []) if end_pos > -1: return (line, linenum, end_pos) # Continue scanning forward while stack and linenum < clean_lines.NumLines() - 1: linenum += 1 line = clean_lines.elided[linenum] (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack) if end_pos > -1: return (line, linenum, end_pos) # Did not find end of expression before end of file, give up return (line, clean_lines.NumLines(), -1)
[ "def", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "(", "line", "[", "pos", "]", "not", "in", "'({[<'", ")", "or", "Match", "(", "r'<[<=]'", ",", "...
https://github.com/apache/mesos/blob/97d9a4063332aae3825d78de71611657e05cf5e2/support/cpplint.py#L1602-L1643
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/tools/gyp/pylib/gyp/msvs_emulation.py
python
_DoRemapping
(element, map)
return element
If |element| then remap it through |map|. If |element| is iterable then each item will be remapped. Any elements not found will be removed.
If |element| then remap it through |map|. If |element| is iterable then each item will be remapped. Any elements not found will be removed.
[ "If", "|element|", "then", "remap", "it", "through", "|map|", ".", "If", "|element|", "is", "iterable", "then", "each", "item", "will", "be", "remapped", ".", "Any", "elements", "not", "found", "will", "be", "removed", "." ]
def _DoRemapping(element, map): """If |element| then remap it through |map|. If |element| is iterable then each item will be remapped. Any elements not found will be removed.""" if map is not None and element is not None: if not callable(map): map = map.get # Assume it's a dict, otherwise a callable to do the remap. if isinstance(element, list) or isinstance(element, tuple): element = filter(None, [map(elem) for elem in element]) else: element = map(element) return element
[ "def", "_DoRemapping", "(", "element", ",", "map", ")", ":", "if", "map", "is", "not", "None", "and", "element", "is", "not", "None", ":", "if", "not", "callable", "(", "map", ")", ":", "map", "=", "map", ".", "get", "# Assume it's a dict, otherwise a ca...
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/msvs_emulation.py#L95-L105
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/installers.py
python
PackageManagerInstaller.resolve
(self, rosdep_args)
return packages
See :meth:`Installer.resolve()`
See :meth:`Installer.resolve()`
[ "See", ":", "meth", ":", "Installer", ".", "resolve", "()" ]
def resolve(self, rosdep_args): """ See :meth:`Installer.resolve()` """ packages = None if type(rosdep_args) == dict: packages = rosdep_args.get("packages", []) if type(packages) == type("string"): packages = packages.split() elif type(rosdep_args) == type('str'): packages = rosdep_args.split(' ') elif type(rosdep_args) == list: packages = rosdep_args else: raise InvalidData("Invalid rosdep args: %s"%(rosdep_args)) return packages
[ "def", "resolve", "(", "self", ",", "rosdep_args", ")", ":", "packages", "=", "None", "if", "type", "(", "rosdep_args", ")", "==", "dict", ":", "packages", "=", "rosdep_args", ".", "get", "(", "\"packages\"", ",", "[", "]", ")", "if", "type", "(", "p...
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/installers.py#L332-L347
Netflix/NfWebCrypto
499faf4eb9f9ccf0b21dc728e974970f54bd6c52
plugin/ppapi/ppapi/native_client/src/untrusted/pnacl_support_extension/pnacl_component_crx_gen.py
python
GetWebAccessibleResources
(base_dir)
return resources
Return the default list of web_accessible_resources to allow us to do a CORS request to get extension files.
Return the default list of web_accessible_resources to allow us to do a CORS request to get extension files.
[ "Return", "the", "default", "list", "of", "web_accessible_resources", "to", "allow", "us", "to", "do", "a", "CORS", "request", "to", "get", "extension", "files", "." ]
def GetWebAccessibleResources(base_dir): ''' Return the default list of web_accessible_resources to allow us to do a CORS request to get extension files. ''' resources = ListDirectoryRecursivelyAsURLs(base_dir) # Make sure that the pnacl.json file is accessible. resources.append(os.path.basename(PnaclPackaging.pnacl_json)) return resources
[ "def", "GetWebAccessibleResources", "(", "base_dir", ")", ":", "resources", "=", "ListDirectoryRecursivelyAsURLs", "(", "base_dir", ")", "# Make sure that the pnacl.json file is accessible.", "resources", ".", "append", "(", "os", ".", "path", ".", "basename", "(", "Pna...
https://github.com/Netflix/NfWebCrypto/blob/499faf4eb9f9ccf0b21dc728e974970f54bd6c52/plugin/ppapi/ppapi/native_client/src/untrusted/pnacl_support_extension/pnacl_component_crx_gen.py#L342-L348
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/ttk.py
python
Scale.configure
(self, cnf=None, **kw)
Modify or query scale options. Setting a value for any of the "from", "from_" or "to" options generates a <<RangeChanged>> event.
Modify or query scale options.
[ "Modify", "or", "query", "scale", "options", "." ]
def configure(self, cnf=None, **kw): """Modify or query scale options. Setting a value for any of the "from", "from_" or "to" options generates a <<RangeChanged>> event.""" if cnf: kw.update(cnf) Widget.configure(self, **kw) if any(['from' in kw, 'from_' in kw, 'to' in kw]): self.event_generate('<<RangeChanged>>')
[ "def", "configure", "(", "self", ",", "cnf", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "cnf", ":", "kw", ".", "update", "(", "cnf", ")", "Widget", ".", "configure", "(", "self", ",", "*", "*", "kw", ")", "if", "any", "(", "[", "'from'...
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/ttk.py#L1077-L1086
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/tornado/tornado-6/tornado/web.py
python
StaticFileHandler.compute_etag
(self)
return '"%s"' % (version_hash,)
Sets the ``Etag`` header based on static url version. This allows efficient ``If-None-Match`` checks against cached versions, and sends the correct ``Etag`` for a partial response (i.e. the same ``Etag`` as the full file). .. versionadded:: 3.1
Sets the ``Etag`` header based on static url version.
[ "Sets", "the", "Etag", "header", "based", "on", "static", "url", "version", "." ]
def compute_etag(self) -> Optional[str]: """Sets the ``Etag`` header based on static url version. This allows efficient ``If-None-Match`` checks against cached versions, and sends the correct ``Etag`` for a partial response (i.e. the same ``Etag`` as the full file). .. versionadded:: 3.1 """ assert self.absolute_path is not None version_hash = self._get_cached_version(self.absolute_path) if not version_hash: return None return '"%s"' % (version_hash,)
[ "def", "compute_etag", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "assert", "self", ".", "absolute_path", "is", "not", "None", "version_hash", "=", "self", ".", "_get_cached_version", "(", "self", ".", "absolute_path", ")", "if", "not", "ver...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/web.py#L2654-L2667
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/python/turicreate/util/_cloudpickle/_cloudpickle.py
python
_is_importable
(obj, name=None)
Dispatcher utility to test the importability of various constructs.
Dispatcher utility to test the importability of various constructs.
[ "Dispatcher", "utility", "to", "test", "the", "importability", "of", "various", "constructs", "." ]
def _is_importable(obj, name=None): """Dispatcher utility to test the importability of various constructs.""" if isinstance(obj, types.FunctionType): return _lookup_module_and_qualname(obj, name=name) is not None elif issubclass(type(obj), type): return _lookup_module_and_qualname(obj, name=name) is not None elif isinstance(obj, types.ModuleType): # We assume that sys.modules is primarily used as a cache mechanism for # the Python import machinery. Checking if a module has been added in # is sys.modules therefore a cheap and simple heuristic to tell us whether # we can assume that a given module could be imported by name in # another Python process. return obj.__name__ in sys.modules else: raise TypeError( "cannot check importability of {} instances".format( type(obj).__name__) )
[ "def", "_is_importable", "(", "obj", ",", "name", "=", "None", ")", ":", "if", "isinstance", "(", "obj", ",", "types", ".", "FunctionType", ")", ":", "return", "_lookup_module_and_qualname", "(", "obj", ",", "name", "=", "name", ")", "is", "not", "None",...
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/util/_cloudpickle/_cloudpickle.py#L176-L193
RamadhanAmizudin/malware
2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1
GMBot/gmbot/apps/smsg_r/smsapp/commands.py
python
block_phone
(phone, number)
Block specified number at this phone @param phone: the phone object @type phone: PhoneData @param number: Number to block
Block specified number at this phone
[ "Block", "specified", "number", "at", "this", "phone" ]
def block_phone(phone, number): """ Block specified number at this phone @param phone: the phone object @type phone: PhoneData @param number: Number to block """ logger.debug("Phone {0} sent command to add blocked number {1}".format(phone, number)) command_queue.add_command(phone.uniq_id, None, "#block_numbers {0}".format(number))
[ "def", "block_phone", "(", "phone", ",", "number", ")", ":", "logger", ".", "debug", "(", "\"Phone {0} sent command to add blocked number {1}\"", ".", "format", "(", "phone", ",", "number", ")", ")", "command_queue", ".", "add_command", "(", "phone", ".", "uniq_...
https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/GMBot/gmbot/apps/smsg_r/smsapp/commands.py#L162-L170
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/propgrid.py
python
PropertyGridInterface.IsPropertyShown
(*args, **kwargs)
return _propgrid.PropertyGridInterface_IsPropertyShown(*args, **kwargs)
IsPropertyShown(self, PGPropArg id) -> bool
IsPropertyShown(self, PGPropArg id) -> bool
[ "IsPropertyShown", "(", "self", "PGPropArg", "id", ")", "-", ">", "bool" ]
def IsPropertyShown(*args, **kwargs): """IsPropertyShown(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyShown(*args, **kwargs)
[ "def", "IsPropertyShown", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGridInterface_IsPropertyShown", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L1325-L1327
schwehr/libais
1e19605942c8e155cd02fde6d1acde75ecd15d75
third_party/gmock/scripts/upload.py
python
RealMain
(argv, data=None)
return issue, patchset
The real main function. Args: argv: Command line arguments. data: Diff contents. If None (default) the diff is generated by the VersionControlSystem implementation returned by GuessVCS(). Returns: A 2-tuple (issue id, patchset id). The patchset id is None if the base files are not uploaded by this script (applies only to SVN checkouts).
The real main function.
[ "The", "real", "main", "function", "." ]
def RealMain(argv, data=None): """The real main function. Args: argv: Command line arguments. data: Diff contents. If None (default) the diff is generated by the VersionControlSystem implementation returned by GuessVCS(). Returns: A 2-tuple (issue id, patchset id). The patchset id is None if the base files are not uploaded by this script (applies only to SVN checkouts). """ logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" "%(lineno)s %(message)s ")) os.environ['LC_ALL'] = 'C' options, args = parser.parse_args(argv[1:]) global verbosity verbosity = options.verbose if verbosity >= 3: logging.getLogger().setLevel(logging.DEBUG) elif verbosity >= 2: logging.getLogger().setLevel(logging.INFO) vcs = GuessVCS(options) if isinstance(vcs, SubversionVCS): # base field is only allowed for Subversion. # Note: Fetching base files may become deprecated in future releases. base = vcs.GuessBase(options.download_base) else: base = None if not base and options.download_base: options.download_base = True logging.info("Enabled upload of base file") if not options.assume_yes: vcs.CheckForUnknownFiles() if data is None: data = vcs.GenerateDiff(args) files = vcs.GetBaseFiles(data) if verbosity >= 1: print "Upload server:", options.server, "(change with -s/--server)" if options.issue: prompt = "Message describing this patch set: " else: prompt = "New issue subject: " message = options.message or raw_input(prompt).strip() if not message: ErrorExit("A non-empty message is required") rpc_server = GetRpcServer(options) form_fields = [("subject", message)] if base: form_fields.append(("base", base)) if options.issue: form_fields.append(("issue", str(options.issue))) if options.email: form_fields.append(("user", options.email)) if options.reviewers: for reviewer in options.reviewers.split(','): if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1: ErrorExit("Invalid email address: %s" % reviewer) form_fields.append(("reviewers", options.reviewers)) if options.cc: for cc in options.cc.split(','): if "@" in cc and not cc.split("@")[1].count(".") == 1: ErrorExit("Invalid email address: %s" % cc) form_fields.append(("cc", options.cc)) description = options.description if options.description_file: if options.description: ErrorExit("Can't specify description and description_file") file = open(options.description_file, 'r') description = file.read() file.close() if description: form_fields.append(("description", description)) # Send a hash of all the base file so the server can determine if a copy # already exists in an earlier patchset. base_hashes = "" for file, info in files.iteritems(): if not info[0] is None: checksum = md5.new(info[0]).hexdigest() if base_hashes: base_hashes += "|" base_hashes += checksum + ":" + file form_fields.append(("base_hashes", base_hashes)) # If we're uploading base files, don't send the email before the uploads, so # that it contains the file status. if options.send_mail and options.download_base: form_fields.append(("send_mail", "1")) if not options.download_base: form_fields.append(("content_upload", "1")) if len(data) > MAX_UPLOAD_SIZE: print "Patch is large, so uploading file patches separately." uploaded_diff_file = [] form_fields.append(("separate_patches", "1")) else: uploaded_diff_file = [("data", "data.diff", data)] ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) response_body = rpc_server.Send("/upload", body, content_type=ctype) patchset = None if not options.download_base or not uploaded_diff_file: lines = response_body.splitlines() if len(lines) >= 2: msg = lines[0] patchset = lines[1].strip() patches = [x.split(" ", 1) for x in lines[2:]] else: msg = response_body else: msg = response_body StatusUpdate(msg) if not response_body.startswith("Issue created.") and \ not response_body.startswith("Issue updated."): sys.exit(0) issue = msg[msg.rfind("/")+1:] if not uploaded_diff_file: result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) if not options.download_base: patches = result if not options.download_base: vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) if options.send_mail: rpc_server.Send("/" + issue + "/mail", payload="") return issue, patchset
[ "def", "RealMain", "(", "argv", ",", "data", "=", "None", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "(", "\"%(asctime).19s %(levelname)s %(filename)s:\"", "\"%(lineno)s %(message)s \"", ")", ")", "os", ".", "environ", "[", "'LC_ALL'", "]", "="...
https://github.com/schwehr/libais/blob/1e19605942c8e155cd02fde6d1acde75ecd15d75/third_party/gmock/scripts/upload.py#L1250-L1374
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
libcxx/utils/google-benchmark/tools/gbench/util.py
python
check_input_file
(filename)
return ftype
Classify the file named by 'filename' and return the classification. If the file is classified as 'IT_Invalid' print an error message and exit the program.
Classify the file named by 'filename' and return the classification. If the file is classified as 'IT_Invalid' print an error message and exit the program.
[ "Classify", "the", "file", "named", "by", "filename", "and", "return", "the", "classification", ".", "If", "the", "file", "is", "classified", "as", "IT_Invalid", "print", "an", "error", "message", "and", "exit", "the", "program", "." ]
def check_input_file(filename): """ Classify the file named by 'filename' and return the classification. If the file is classified as 'IT_Invalid' print an error message and exit the program. """ ftype, msg = classify_input_file(filename) if ftype == IT_Invalid: print("Invalid input file: %s" % msg) sys.exit(1) return ftype
[ "def", "check_input_file", "(", "filename", ")", ":", "ftype", ",", "msg", "=", "classify_input_file", "(", "filename", ")", "if", "ftype", "==", "IT_Invalid", ":", "print", "(", "\"Invalid input file: %s\"", "%", "msg", ")", "sys", ".", "exit", "(", "1", ...
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/libcxx/utils/google-benchmark/tools/gbench/util.py#L77-L87
Illumina/strelka
d7377443b62319f7c7bd70c241c4b2df3459e29a
src/python/scoringModelTraining/somatic/lib/evs/somatic_rf.py
python
SomaticRF.plots
(self, prefix, featurenames)
Make diagnostic plots
Make diagnostic plots
[ "Make", "diagnostic", "plots" ]
def plots(self, prefix, featurenames): """ Make diagnostic plots """ importances = self.clf.feature_importances_ std = np.std([tree.feature_importances_ for tree in self.clf.estimators_], axis=0) indices = np.argsort(importances)[::-1] # Print the feature ranking print "Feature ranking:" for f in xrange(0, len(indices)): print "%d. feature %d:%s (%f +- %f)" % (f + 1, indices[f], featurenames[indices[f]], importances[indices[f]], std[indices[f]])
[ "def", "plots", "(", "self", ",", "prefix", ",", "featurenames", ")", ":", "importances", "=", "self", ".", "clf", ".", "feature_importances_", "std", "=", "np", ".", "std", "(", "[", "tree", ".", "feature_importances_", "for", "tree", "in", "self", ".",...
https://github.com/Illumina/strelka/blob/d7377443b62319f7c7bd70c241c4b2df3459e29a/src/python/scoringModelTraining/somatic/lib/evs/somatic_rf.py#L103-L117
intel/caffe
3f494b442ee3f9d17a07b09ecbd5fa2bbda00836
examples/faster-rcnn/lib/roi_data_layer/layer.py
python
RoIDataLayer.reshape
(self, bottom, top)
Reshaping happens during the call to forward.
Reshaping happens during the call to forward.
[ "Reshaping", "happens", "during", "the", "call", "to", "forward", "." ]
def reshape(self, bottom, top): """Reshaping happens during the call to forward.""" pass
[ "def", "reshape", "(", "self", ",", "bottom", ",", "top", ")", ":", "pass" ]
https://github.com/intel/caffe/blob/3f494b442ee3f9d17a07b09ecbd5fa2bbda00836/examples/faster-rcnn/lib/roi_data_layer/layer.py#L157-L159
alibaba/MNN
c4d9566171d589c3ded23aa18ffb197016995a12
3rd_party/flatbuffers/conanfile.py
python
FlatbuffersConan.build
(self)
Configure, build and install FlatBuffers using CMake.
Configure, build and install FlatBuffers using CMake.
[ "Configure", "build", "and", "install", "FlatBuffers", "using", "CMake", "." ]
def build(self): """Configure, build and install FlatBuffers using CMake. """ cmake = self.configure_cmake() cmake.build()
[ "def", "build", "(", "self", ")", ":", "cmake", "=", "self", ".", "configure_cmake", "(", ")", "cmake", ".", "build", "(", ")" ]
https://github.com/alibaba/MNN/blob/c4d9566171d589c3ded23aa18ffb197016995a12/3rd_party/flatbuffers/conanfile.py#L48-L52
giuspen/cherrytree
84712f206478fcf9acf30174009ad28c648c6344
pygtk2/modules/exports.py
python
Export2Txt.node_export_to_txt
(self, text_buffer, filepath, sel_range=None, tree_iter_for_node_name=None, check_link_target=False)
return plain_text
Export the Selected Node To Txt
Export the Selected Node To Txt
[ "Export", "the", "Selected", "Node", "To", "Txt" ]
def node_export_to_txt(self, text_buffer, filepath, sel_range=None, tree_iter_for_node_name=None, check_link_target=False): """Export the Selected Node To Txt""" plain_text = "" text_n_objects = self.plain_get_from_treestore_node(text_buffer, sel_range, check_link_target) self.images_count = 0 for i, plain_slot in enumerate(text_n_objects[0]): plain_text += plain_slot if i < len(text_n_objects[1]): curr_object = text_n_objects[1][i] if curr_object[0] == "table": plain_text += self.get_table_plain(curr_object[1]) elif curr_object[0] == "codebox": plain_text += self.get_codebox_plain(curr_object[1]) if tree_iter_for_node_name: node_name = clean_text_to_utf8(self.dad.treestore[tree_iter_for_node_name][1]) plain_text = node_name.upper() + cons.CHAR_NEWLINE + plain_text if filepath: file_descriptor = open(filepath, 'a') file_descriptor.write(plain_text + 2*cons.CHAR_NEWLINE) file_descriptor.close() return plain_text
[ "def", "node_export_to_txt", "(", "self", ",", "text_buffer", ",", "filepath", ",", "sel_range", "=", "None", ",", "tree_iter_for_node_name", "=", "None", ",", "check_link_target", "=", "False", ")", ":", "plain_text", "=", "\"\"", "text_n_objects", "=", "self",...
https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/exports.py#L408-L426
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/spatial/kdtree.py
python
KDTree.query
(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf)
Query the kd-tree for nearest neighbors Parameters ---------- x : array_like, last dimension self.m An array of points to query. k : int, optional The number of nearest neighbors to return. eps : nonnegative float, optional Return approximate nearest neighbors; the kth returned value is guaranteed to be no further than (1+eps) times the distance to the real kth nearest neighbor. p : float, 1<=p<=infinity, optional Which Minkowski p-norm to use. 1 is the sum-of-absolute-values "Manhattan" distance 2 is the usual Euclidean distance infinity is the maximum-coordinate-difference distance distance_upper_bound : nonnegative float, optional Return only neighbors within this distance. This is used to prune tree searches, so if you are doing a series of nearest-neighbor queries, it may help to supply the distance to the nearest neighbor of the most recent point. Returns ------- d : float or array of floats The distances to the nearest neighbors. If x has shape tuple+(self.m,), then d has shape tuple if k is one, or tuple+(k,) if k is larger than one. Missing neighbors (e.g. when k > n or distance_upper_bound is given) are indicated with infinite distances. If k is None, then d is an object array of shape tuple, containing lists of distances. In either case the hits are sorted by distance (nearest first). i : integer or array of integers The locations of the neighbors in self.data. i is the same shape as d. Examples -------- >>> from scipy import spatial >>> x, y = np.mgrid[0:5, 2:8] >>> tree = spatial.KDTree(list(zip(x.ravel(), y.ravel()))) >>> tree.data array([[0, 2], [0, 3], [0, 4], [0, 5], [0, 6], [0, 7], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 2], [3, 3], [3, 4], [3, 5], [3, 6], [3, 7], [4, 2], [4, 3], [4, 4], [4, 5], [4, 6], [4, 7]]) >>> pts = np.array([[0, 0], [2.1, 2.9]]) >>> tree.query(pts) (array([ 2. , 0.14142136]), array([ 0, 13])) >>> tree.query(pts[0]) (2.0, 0)
Query the kd-tree for nearest neighbors
[ "Query", "the", "kd", "-", "tree", "for", "nearest", "neighbors" ]
def query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf): """ Query the kd-tree for nearest neighbors Parameters ---------- x : array_like, last dimension self.m An array of points to query. k : int, optional The number of nearest neighbors to return. eps : nonnegative float, optional Return approximate nearest neighbors; the kth returned value is guaranteed to be no further than (1+eps) times the distance to the real kth nearest neighbor. p : float, 1<=p<=infinity, optional Which Minkowski p-norm to use. 1 is the sum-of-absolute-values "Manhattan" distance 2 is the usual Euclidean distance infinity is the maximum-coordinate-difference distance distance_upper_bound : nonnegative float, optional Return only neighbors within this distance. This is used to prune tree searches, so if you are doing a series of nearest-neighbor queries, it may help to supply the distance to the nearest neighbor of the most recent point. Returns ------- d : float or array of floats The distances to the nearest neighbors. If x has shape tuple+(self.m,), then d has shape tuple if k is one, or tuple+(k,) if k is larger than one. Missing neighbors (e.g. when k > n or distance_upper_bound is given) are indicated with infinite distances. If k is None, then d is an object array of shape tuple, containing lists of distances. In either case the hits are sorted by distance (nearest first). i : integer or array of integers The locations of the neighbors in self.data. i is the same shape as d. Examples -------- >>> from scipy import spatial >>> x, y = np.mgrid[0:5, 2:8] >>> tree = spatial.KDTree(list(zip(x.ravel(), y.ravel()))) >>> tree.data array([[0, 2], [0, 3], [0, 4], [0, 5], [0, 6], [0, 7], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 2], [3, 3], [3, 4], [3, 5], [3, 6], [3, 7], [4, 2], [4, 3], [4, 4], [4, 5], [4, 6], [4, 7]]) >>> pts = np.array([[0, 0], [2.1, 2.9]]) >>> tree.query(pts) (array([ 2. , 0.14142136]), array([ 0, 13])) >>> tree.query(pts[0]) (2.0, 0) """ x = np.asarray(x) if np.shape(x)[-1] != self.m: raise ValueError("x must consist of vectors of length %d but has shape %s" % (self.m, np.shape(x))) if p < 1: raise ValueError("Only p-norms with 1<=p<=infinity permitted") retshape = np.shape(x)[:-1] if retshape != (): if k is None: dd = np.empty(retshape,dtype=object) ii = np.empty(retshape,dtype=object) elif k > 1: dd = np.empty(retshape+(k,),dtype=float) dd.fill(np.inf) ii = np.empty(retshape+(k,),dtype=int) ii.fill(self.n) elif k == 1: dd = np.empty(retshape,dtype=float) dd.fill(np.inf) ii = np.empty(retshape,dtype=int) ii.fill(self.n) else: raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None") for c in np.ndindex(retshape): hits = self.__query(x[c], k=k, eps=eps, p=p, distance_upper_bound=distance_upper_bound) if k is None: dd[c] = [d for (d,i) in hits] ii[c] = [i for (d,i) in hits] elif k > 1: for j in range(len(hits)): dd[c+(j,)], ii[c+(j,)] = hits[j] elif k == 1: if len(hits) > 0: dd[c], ii[c] = hits[0] else: dd[c] = np.inf ii[c] = self.n return dd, ii else: hits = self.__query(x, k=k, eps=eps, p=p, distance_upper_bound=distance_upper_bound) if k is None: return [d for (d,i) in hits], [i for (d,i) in hits] elif k == 1: if len(hits) > 0: return hits[0] else: return np.inf, self.n elif k > 1: dd = np.empty(k,dtype=float) dd.fill(np.inf) ii = np.empty(k,dtype=int) ii.fill(self.n) for j in range(len(hits)): dd[j], ii[j] = hits[j] return dd, ii else: raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None")
[ "def", "query", "(", "self", ",", "x", ",", "k", "=", "1", ",", "eps", "=", "0", ",", "p", "=", "2", ",", "distance_upper_bound", "=", "np", ".", "inf", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "if", "np", ".", "shape", "(",...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/spatial/kdtree.py#L400-L538
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_controls.py
python
TextAttr.SetFontStyle
(*args, **kwargs)
return _controls_.TextAttr_SetFontStyle(*args, **kwargs)
SetFontStyle(self, int fontStyle)
SetFontStyle(self, int fontStyle)
[ "SetFontStyle", "(", "self", "int", "fontStyle", ")" ]
def SetFontStyle(*args, **kwargs): """SetFontStyle(self, int fontStyle)""" return _controls_.TextAttr_SetFontStyle(*args, **kwargs)
[ "def", "SetFontStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "TextAttr_SetFontStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_controls.py#L1539-L1541
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/Jinja2/py3/jinja2/filters.py
python
do_capitalize
(s: str)
return soft_str(s).capitalize()
Capitalize a value. The first character will be uppercase, all others lowercase.
Capitalize a value. The first character will be uppercase, all others lowercase.
[ "Capitalize", "a", "value", ".", "The", "first", "character", "will", "be", "uppercase", "all", "others", "lowercase", "." ]
def do_capitalize(s: str) -> str: """Capitalize a value. The first character will be uppercase, all others lowercase. """ return soft_str(s).capitalize()
[ "def", "do_capitalize", "(", "s", ":", "str", ")", "->", "str", ":", "return", "soft_str", "(", "s", ")", ".", "capitalize", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/Jinja2/py3/jinja2/filters.py#L315-L319
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/ctypes/__init__.py
python
string_at
(ptr, size=-1)
return _string_at(ptr, size)
string_at(addr[, size]) -> string Return the string at addr.
string_at(addr[, size]) -> string
[ "string_at", "(", "addr", "[", "size", "]", ")", "-", ">", "string" ]
def string_at(ptr, size=-1): """string_at(addr[, size]) -> string Return the string at addr.""" return _string_at(ptr, size)
[ "def", "string_at", "(", "ptr", ",", "size", "=", "-", "1", ")", ":", "return", "_string_at", "(", "ptr", ",", "size", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/ctypes/__init__.py#L492-L496
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/deps/v8/third_party/jinja2/debug.py
python
ProcessedTraceback.standard_exc_info
(self)
return self.exc_type, self.exc_value, tb
Standard python exc_info for re-raising
Standard python exc_info for re-raising
[ "Standard", "python", "exc_info", "for", "re", "-", "raising" ]
def standard_exc_info(self): """Standard python exc_info for re-raising""" tb = self.frames[0] # the frame will be an actual traceback (or transparent proxy) if # we are on pypy or a python implementation with support for tproxy if type(tb) is not TracebackType: tb = tb.tb return self.exc_type, self.exc_value, tb
[ "def", "standard_exc_info", "(", "self", ")", ":", "tb", "=", "self", ".", "frames", "[", "0", "]", "# the frame will be an actual traceback (or transparent proxy) if", "# we are on pypy or a python implementation with support for tproxy", "if", "type", "(", "tb", ")", "is"...
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/deps/v8/third_party/jinja2/debug.py#L122-L129
stan-dev/math
5fd79f89933269a4ca4d8dd1fde2a36d53d4768c
lib/tbb_2020.3/python/tbb/pool.py
python
Pool.terminate
(self)
Stops the worker processes immediately without completing outstanding work. When the pool object is garbage collected terminate() will be called immediately.
Stops the worker processes immediately without completing outstanding work. When the pool object is garbage collected terminate() will be called immediately.
[ "Stops", "the", "worker", "processes", "immediately", "without", "completing", "outstanding", "work", ".", "When", "the", "pool", "object", "is", "garbage", "collected", "terminate", "()", "will", "be", "called", "immediately", "." ]
def terminate(self): """Stops the worker processes immediately without completing outstanding work. When the pool object is garbage collected terminate() will be called immediately.""" self.close() self._tasks.cancel()
[ "def", "terminate", "(", "self", ")", ":", "self", ".", "close", "(", ")", "self", ".", "_tasks", ".", "cancel", "(", ")" ]
https://github.com/stan-dev/math/blob/5fd79f89933269a4ca4d8dd1fde2a36d53d4768c/lib/tbb_2020.3/python/tbb/pool.py#L213-L218
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_controls.py
python
PyControl.DoEraseBackground
(*args, **kwargs)
return _controls_.PyControl_DoEraseBackground(*args, **kwargs)
DoEraseBackground(self, DC dc) -> bool
DoEraseBackground(self, DC dc) -> bool
[ "DoEraseBackground", "(", "self", "DC", "dc", ")", "-", ">", "bool" ]
def DoEraseBackground(*args, **kwargs): """DoEraseBackground(self, DC dc) -> bool""" return _controls_.PyControl_DoEraseBackground(*args, **kwargs)
[ "def", "DoEraseBackground", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "PyControl_DoEraseBackground", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L5849-L5851
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/plat-mac/lib-scriptpackages/StdSuites/AppleScript_Suite.py
python
AppleScript_Suite_Events.tell
(self, _no_object=None, _attributes={}, **_arguments)
tell: Record or log a \xd4tell\xd5 statement Keyword argument _attributes: AppleEvent attribute dictionary
tell: Record or log a \xd4tell\xd5 statement Keyword argument _attributes: AppleEvent attribute dictionary
[ "tell", ":", "Record", "or", "log", "a", "\\", "xd4tell", "\\", "xd5", "statement", "Keyword", "argument", "_attributes", ":", "AppleEvent", "attribute", "dictionary" ]
def tell(self, _no_object=None, _attributes={}, **_arguments): """tell: Record or log a \xd4tell\xd5 statement Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'ascr' _subcode = 'tell' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----']
[ "def", "tell", "(", "self", ",", "_no_object", "=", "None", ",", "_attributes", "=", "{", "}", ",", "*", "*", "_arguments", ")", ":", "_code", "=", "'ascr'", "_subcode", "=", "'tell'", "if", "_arguments", ":", "raise", "TypeError", ",", "'No optional arg...
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/plat-mac/lib-scriptpackages/StdSuites/AppleScript_Suite.py#L639-L656
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/compiler.py
python
CUDAKernel.device
(self)
return get_current_device()
Get current active context
Get current active context
[ "Get", "current", "active", "context" ]
def device(self): """ Get current active context """ return get_current_device()
[ "def", "device", "(", "self", ")", ":", "return", "get_current_device", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/compiler.py#L558-L562
qboticslabs/mastering_ros
d83e78f30acc45b0f18522c1d5fae3a7f52974b9
chapter_9_codes/chefbot/chefbot_bringup/scripts/bkup_working/arduino.py
python
Arduino._HandleSetDriveGains
(self, request)
return SetDriveControlGainsResponse()
Handle the setting of the drive gains (PID).
Handle the setting of the drive gains (PID).
[ "Handle", "the", "setting", "of", "the", "drive", "gains", "(", "PID", ")", "." ]
def _HandleSetDriveGains(self, request): """ Handle the setting of the drive gains (PID). """ # We persist the new values in the parameter server rospy.set_param("~speedController", {'velocityPParam': request.velocityPParam, 'velocityPParam': request.velocityIParam, 'turnPParam': request.turnPParam, 'turnIParam': request.turnIParam}) commandTimeout = self._GetCommandTimeoutForSpeedController() speedControllerParams = (request.velocityPParam, request.velocityIParam, request.turnPParam, request.turnIParam, commandTimeout) self._WriteSpeedControllerParams(speedControllerParams) return SetDriveControlGainsResponse()
[ "def", "_HandleSetDriveGains", "(", "self", ",", "request", ")", ":", "# We persist the new values in the parameter server", "rospy", ".", "set_param", "(", "\"~speedController\"", ",", "{", "'velocityPParam'", ":", "request", ".", "velocityPParam", ",", "'velocityPParam'...
https://github.com/qboticslabs/mastering_ros/blob/d83e78f30acc45b0f18522c1d5fae3a7f52974b9/chapter_9_codes/chefbot/chefbot_bringup/scripts/bkup_working/arduino.py#L369-L378
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py2/setuptools/config.py
python
ConfigOptionsHandler.parse_section_exclude_package_data
(self, section_options)
Parses `exclude_package_data` configuration file section. :param dict section_options:
Parses `exclude_package_data` configuration file section.
[ "Parses", "exclude_package_data", "configuration", "file", "section", "." ]
def parse_section_exclude_package_data(self, section_options): """Parses `exclude_package_data` configuration file section. :param dict section_options: """ self['exclude_package_data'] = self._parse_package_data( section_options)
[ "def", "parse_section_exclude_package_data", "(", "self", ",", "section_options", ")", ":", "self", "[", "'exclude_package_data'", "]", "=", "self", ".", "_parse_package_data", "(", "section_options", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/config.py#L636-L642
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/tools/inspector_protocol/jinja2/runtime.py
python
unicode_join
(seq)
return concat(imap(text_type, seq))
Simple args to unicode conversion and concatenation.
Simple args to unicode conversion and concatenation.
[ "Simple", "args", "to", "unicode", "conversion", "and", "concatenation", "." ]
def unicode_join(seq): """Simple args to unicode conversion and concatenation.""" return concat(imap(text_type, seq))
[ "def", "unicode_join", "(", "seq", ")", ":", "return", "concat", "(", "imap", "(", "text_type", ",", "seq", ")", ")" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/inspector_protocol/jinja2/runtime.py#L54-L56
timi-liuliang/echo
40a5a24d430eee4118314459ab7e03afcb3b8719
thirdparty/protobuf/python/google/protobuf/descriptor_pool.py
python
DescriptorPool._GetDeps
(self, dependencies)
Recursively finds dependencies for file protos. Args: dependencies: The names of the files being depended on. Yields: Each direct and indirect dependency.
Recursively finds dependencies for file protos.
[ "Recursively", "finds", "dependencies", "for", "file", "protos", "." ]
def _GetDeps(self, dependencies): """Recursively finds dependencies for file protos. Args: dependencies: The names of the files being depended on. Yields: Each direct and indirect dependency. """ for dependency in dependencies: dep_desc = self.FindFileByName(dependency) yield dep_desc for parent_dep in dep_desc.dependencies: yield parent_dep
[ "def", "_GetDeps", "(", "self", ",", "dependencies", ")", ":", "for", "dependency", "in", "dependencies", ":", "dep_desc", "=", "self", ".", "FindFileByName", "(", "dependency", ")", "yield", "dep_desc", "for", "parent_dep", "in", "dep_desc", ".", "dependencie...
https://github.com/timi-liuliang/echo/blob/40a5a24d430eee4118314459ab7e03afcb3b8719/thirdparty/protobuf/python/google/protobuf/descriptor_pool.py#L603-L617
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/telemetry/third_party/web-page-replay/third_party/ipaddr/ipaddr.py
python
_find_address_range
(addresses)
return (first, last)
Find a sequence of addresses. Args: addresses: a list of IPv4 or IPv6 addresses. Returns: A tuple containing the first and last IP addresses in the sequence.
Find a sequence of addresses.
[ "Find", "a", "sequence", "of", "addresses", "." ]
def _find_address_range(addresses): """Find a sequence of addresses. Args: addresses: a list of IPv4 or IPv6 addresses. Returns: A tuple containing the first and last IP addresses in the sequence. """ first = last = addresses[0] for ip in addresses[1:]: if ip._ip == last._ip + 1: last = ip else: break return (first, last)
[ "def", "_find_address_range", "(", "addresses", ")", ":", "first", "=", "last", "=", "addresses", "[", "0", "]", "for", "ip", "in", "addresses", "[", "1", ":", "]", ":", "if", "ip", ".", "_ip", "==", "last", ".", "_ip", "+", "1", ":", "last", "="...
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/third_party/web-page-replay/third_party/ipaddr/ipaddr.py#L152-L168
doyubkim/fluid-engine-dev
45b4bdbdb4c6d8c0beebc682180469198203b0ef
scripts/utils.py
python
is_mac
()
return guess_os() == 'macosx'
Returns True if you are using Mac.
Returns True if you are using Mac.
[ "Returns", "True", "if", "you", "are", "using", "Mac", "." ]
def is_mac(): """ Returns True if you are using Mac. """ return guess_os() == 'macosx'
[ "def", "is_mac", "(", ")", ":", "return", "guess_os", "(", ")", "==", "'macosx'" ]
https://github.com/doyubkim/fluid-engine-dev/blob/45b4bdbdb4c6d8c0beebc682180469198203b0ef/scripts/utils.py#L142-L146
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
buildscripts/resmokelib/core/redirect.py
python
Pipe.__init__
(self, cmd, read_from, write_to)
`read_from` can be `sys.stdout` or an object that implements a `read` method. `write_to` must implement `write` and `flush`.
`read_from` can be `sys.stdout` or an object that implements a `read` method. `write_to` must implement `write` and `flush`.
[ "read_from", "can", "be", "sys", ".", "stdout", "or", "an", "object", "that", "implements", "a", "read", "method", ".", "write_to", "must", "implement", "write", "and", "flush", "." ]
def __init__(self, cmd, read_from, write_to): """`read_from` can be `sys.stdout` or an object that implements a `read` method. `write_to` must implement `write` and `flush`.""" if read_from == sys.__stdout__: # sys.stdout does not implement a `read` method so it cannot be passed as a `stdin` # variable. Use a `StdoutRewrite` object to write the spawned `stdin`. self.proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=write_to) self.rewrite = StdoutRewrite(self.proc.stdin) else: self.proc = subprocess.Popen(cmd, stdin=read_from, stdout=write_to)
[ "def", "__init__", "(", "self", ",", "cmd", ",", "read_from", ",", "write_to", ")", ":", "if", "read_from", "==", "sys", ".", "__stdout__", ":", "# sys.stdout does not implement a `read` method so it cannot be passed as a `stdin`", "# variable. Use a `StdoutRewrite` object to...
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/core/redirect.py#L49-L58
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/functools.py
python
_c3_merge
(sequences)
Merges MROs in *sequences* to a single MRO using the C3 algorithm. Adapted from http://www.python.org/download/releases/2.3/mro/.
Merges MROs in *sequences* to a single MRO using the C3 algorithm.
[ "Merges", "MROs", "in", "*", "sequences", "*", "to", "a", "single", "MRO", "using", "the", "C3", "algorithm", "." ]
def _c3_merge(sequences): """Merges MROs in *sequences* to a single MRO using the C3 algorithm. Adapted from http://www.python.org/download/releases/2.3/mro/. """ result = [] while True: sequences = [s for s in sequences if s] # purge empty sequences if not sequences: return result for s1 in sequences: # find merge candidates among seq heads candidate = s1[0] for s2 in sequences: if candidate in s2[1:]: candidate = None break # reject the current head, it appears later else: break if candidate is None: raise RuntimeError("Inconsistent hierarchy") result.append(candidate) # remove the chosen candidate for seq in sequences: if seq[0] == candidate: del seq[0]
[ "def", "_c3_merge", "(", "sequences", ")", ":", "result", "=", "[", "]", "while", "True", ":", "sequences", "=", "[", "s", "for", "s", "in", "sequences", "if", "s", "]", "# purge empty sequences", "if", "not", "sequences", ":", "return", "result", "for",...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/functools.py#L624-L649
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/lib2to3/pytree.py
python
BasePattern.generate_matches
(self, nodes)
Generator yielding all matches for this pattern. Default implementation for non-wildcard patterns.
Generator yielding all matches for this pattern.
[ "Generator", "yielding", "all", "matches", "for", "this", "pattern", "." ]
def generate_matches(self, nodes): """ Generator yielding all matches for this pattern. Default implementation for non-wildcard patterns. """ r = {} if nodes and self.match(nodes[0], r): yield 1, r
[ "def", "generate_matches", "(", "self", ",", "nodes", ")", ":", "r", "=", "{", "}", "if", "nodes", "and", "self", ".", "match", "(", "nodes", "[", "0", "]", ",", "r", ")", ":", "yield", "1", ",", "r" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/lib2to3/pytree.py#L523-L531
emscripten-core/emscripten
0d413d3c5af8b28349682496edc14656f5700c2f
third_party/ply/example/classcalc/calc.py
python
Calc.t_NUMBER
(self, t)
return t
r'\d+
r'\d+
[ "r", "\\", "d", "+" ]
def t_NUMBER(self, t): r'\d+' try: t.value = int(t.value) except ValueError: print("Integer value too large %s" % t.value) t.value = 0 #print "parsed number %s" % repr(t.value) return t
[ "def", "t_NUMBER", "(", "self", ",", "t", ")", ":", "try", ":", "t", ".", "value", "=", "int", "(", "t", ".", "value", ")", "except", "ValueError", ":", "print", "(", "\"Integer value too large %s\"", "%", "t", ".", "value", ")", "t", ".", "value", ...
https://github.com/emscripten-core/emscripten/blob/0d413d3c5af8b28349682496edc14656f5700c2f/third_party/ply/example/classcalc/calc.py#L77-L85
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/train/callback/_checkpoint.py
python
CheckpointConfig.get_checkpoint_policy
(self)
return checkpoint_policy
Get the policy of checkpoint.
Get the policy of checkpoint.
[ "Get", "the", "policy", "of", "checkpoint", "." ]
def get_checkpoint_policy(self): """Get the policy of checkpoint.""" checkpoint_policy = {'save_checkpoint_steps': self.save_checkpoint_steps, 'save_checkpoint_seconds': self.save_checkpoint_seconds, 'keep_checkpoint_max': self.keep_checkpoint_max, 'keep_checkpoint_per_n_minutes': self.keep_checkpoint_per_n_minutes, 'saved_network': self.saved_network} return checkpoint_policy
[ "def", "get_checkpoint_policy", "(", "self", ")", ":", "checkpoint_policy", "=", "{", "'save_checkpoint_steps'", ":", "self", ".", "save_checkpoint_steps", ",", "'save_checkpoint_seconds'", ":", "self", ".", "save_checkpoint_seconds", ",", "'keep_checkpoint_max'", ":", ...
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/train/callback/_checkpoint.py#L249-L257
sccn/lsl_archived
2ff44b7a5172b02fe845b1fc72b9ab5578a489ed
LSL/liblsl-Python/pylsl/pylsl.py
python
StreamInfo.channel_count
(self)
return lib.lsl_get_channel_count(self.obj)
Number of channels of the stream. A stream has at least one channel; the channel count stays constant for all samples.
Number of channels of the stream.
[ "Number", "of", "channels", "of", "the", "stream", "." ]
def channel_count(self): """Number of channels of the stream. A stream has at least one channel; the channel count stays constant for all samples. """ return lib.lsl_get_channel_count(self.obj)
[ "def", "channel_count", "(", "self", ")", ":", "return", "lib", ".", "lsl_get_channel_count", "(", "self", ".", "obj", ")" ]
https://github.com/sccn/lsl_archived/blob/2ff44b7a5172b02fe845b1fc72b9ab5578a489ed/LSL/liblsl-Python/pylsl/pylsl.py#L243-L250
reverbrain/elliptics
4b4f9b8094d7616c1ec50eb8605edb059b9f228e
bindings/python/src/route.py
python
RouteList.__iter__
(self)
return iter(self.routes)
x.__iter__() <==> iter(x)
x.__iter__() <==> iter(x)
[ "x", ".", "__iter__", "()", "<", "==", ">", "iter", "(", "x", ")" ]
def __iter__(self): """x.__iter__() <==> iter(x)""" return iter(self.routes)
[ "def", "__iter__", "(", "self", ")", ":", "return", "iter", "(", "self", ".", "routes", ")" ]
https://github.com/reverbrain/elliptics/blob/4b4f9b8094d7616c1ec50eb8605edb059b9f228e/bindings/python/src/route.py#L418-L420
ideawu/ssdb
f229ba277c7f7d0ca5a441c0c6fb3d1209af68e4
deps/cpy/antlr3/tokens.py
python
Token.setChannel
(self, channel)
@brief Set the channel of the token Using setter/getter methods is deprecated. Use o.channel instead.
@brief Set the channel of the token
[ "@brief", "Set", "the", "channel", "of", "the", "token" ]
def setChannel(self, channel): """@brief Set the channel of the token Using setter/getter methods is deprecated. Use o.channel instead.""" raise NotImplementedError
[ "def", "setChannel", "(", "self", ",", "channel", ")", ":", "raise", "NotImplementedError" ]
https://github.com/ideawu/ssdb/blob/f229ba277c7f7d0ca5a441c0c6fb3d1209af68e4/deps/cpy/antlr3/tokens.py#L115-L120
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/saved_model/revived_types.py
python
register_revived_type
(identifier, predicate, versions)
Register a type for revived objects. Args: identifier: A unique string identifying this class of objects. predicate: A Boolean predicate for this registration. Takes a trackable object as an argument. If True, `type_registration` may be used to save and restore the object. versions: A list of `VersionedTypeRegistration` objects.
Register a type for revived objects.
[ "Register", "a", "type", "for", "revived", "objects", "." ]
def register_revived_type(identifier, predicate, versions): """Register a type for revived objects. Args: identifier: A unique string identifying this class of objects. predicate: A Boolean predicate for this registration. Takes a trackable object as an argument. If True, `type_registration` may be used to save and restore the object. versions: A list of `VersionedTypeRegistration` objects. """ # Keep registrations in order of version. We always use the highest matching # version (respecting the min consumer version and bad consumers). versions.sort(key=lambda reg: reg.version, reverse=True) if not versions: raise AssertionError("Need at least one version of a registered type.") version_numbers = set() for registration in versions: # Copy over the identifier for use in generating protos registration.identifier = identifier if registration.version in version_numbers: raise AssertionError( f"Got multiple registrations with version {registration.version} for " f"type {identifier}.") version_numbers.add(registration.version) if identifier in _REVIVED_TYPE_REGISTRY: raise AssertionError(f"Duplicate registrations for type '{identifier}'") _REVIVED_TYPE_REGISTRY[identifier] = (predicate, versions) _TYPE_IDENTIFIERS.append(identifier)
[ "def", "register_revived_type", "(", "identifier", ",", "predicate", ",", "versions", ")", ":", "# Keep registrations in order of version. We always use the highest matching", "# version (respecting the min consumer version and bad consumers).", "versions", ".", "sort", "(", "key", ...
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/saved_model/revived_types.py#L107-L136
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/dataset/image.py
python
left_right_flip
(im, is_color=True)
Flip an image along the horizontal direction. Return the flipped image. Example usage: .. code-block:: python im = left_right_flip(im) :param im: input image with HWC layout or HW layout for gray image :type im: ndarray :param is_color: whether input image is color or not :type is_color: bool
Flip an image along the horizontal direction. Return the flipped image.
[ "Flip", "an", "image", "along", "the", "horizontal", "direction", ".", "Return", "the", "flipped", "image", "." ]
def left_right_flip(im, is_color=True): """ Flip an image along the horizontal direction. Return the flipped image. Example usage: .. code-block:: python im = left_right_flip(im) :param im: input image with HWC layout or HW layout for gray image :type im: ndarray :param is_color: whether input image is color or not :type is_color: bool """ if len(im.shape) == 3 and is_color: return im[:, ::-1, :] else: return im[:, ::-1]
[ "def", "left_right_flip", "(", "im", ",", "is_color", "=", "True", ")", ":", "if", "len", "(", "im", ".", "shape", ")", "==", "3", "and", "is_color", ":", "return", "im", "[", ":", ",", ":", ":", "-", "1", ",", ":", "]", "else", ":", "return", ...
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/dataset/image.py#L307-L326
christinaa/LLVM-VideoCore4
7773c3c9e5d22b785d4b96ed0acea37c8aa9c183
bindings/python/llvm/core.py
python
Module.datalayout
(self, new_data_layout)
new_data_layout is a string.
new_data_layout is a string.
[ "new_data_layout", "is", "a", "string", "." ]
def datalayout(self, new_data_layout): """new_data_layout is a string.""" lib.LLVMSetDataLayout(self, new_data_layout)
[ "def", "datalayout", "(", "self", ",", "new_data_layout", ")", ":", "lib", ".", "LLVMSetDataLayout", "(", "self", ",", "new_data_layout", ")" ]
https://github.com/christinaa/LLVM-VideoCore4/blob/7773c3c9e5d22b785d4b96ed0acea37c8aa9c183/bindings/python/llvm/core.py#L210-L212
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/core/fromnumeric.py
python
searchsorted
(a, v, side='left', sorter=None)
return _wrapfunc(a, 'searchsorted', v, side=side, sorter=sorter)
Find indices where elements should be inserted to maintain order. Find the indices into a sorted array `a` such that, if the corresponding elements in `v` were inserted before the indices, the order of `a` would be preserved. Assuming that `a` is sorted: ====== ============================ `side` returned index `i` satisfies ====== ============================ left ``a[i-1] < v <= a[i]`` right ``a[i-1] <= v < a[i]`` ====== ============================ Parameters ---------- a : 1-D array_like Input array. If `sorter` is None, then it must be sorted in ascending order, otherwise `sorter` must be an array of indices that sort it. v : array_like Values to insert into `a`. side : {'left', 'right'}, optional If 'left', the index of the first suitable location found is given. If 'right', return the last such index. If there is no suitable index, return either 0 or N (where N is the length of `a`). sorter : 1-D array_like, optional Optional array of integer indices that sort array a into ascending order. They are typically the result of argsort. .. versionadded:: 1.7.0 Returns ------- indices : array of ints Array of insertion points with the same shape as `v`. See Also -------- sort : Return a sorted copy of an array. histogram : Produce histogram from 1-D data. Notes ----- Binary search is used to find the required insertion points. As of NumPy 1.4.0 `searchsorted` works with real/complex arrays containing `nan` values. The enhanced sort order is documented in `sort`. This function uses the same algorithm as the builtin python `bisect.bisect_left` (``side='left'``) and `bisect.bisect_right` (``side='right'``) functions, which is also vectorized in the `v` argument. Examples -------- >>> np.searchsorted([1,2,3,4,5], 3) 2 >>> np.searchsorted([1,2,3,4,5], 3, side='right') 3 >>> np.searchsorted([1,2,3,4,5], [-10, 10, 2, 3]) array([0, 5, 1, 2])
Find indices where elements should be inserted to maintain order.
[ "Find", "indices", "where", "elements", "should", "be", "inserted", "to", "maintain", "order", "." ]
def searchsorted(a, v, side='left', sorter=None): """ Find indices where elements should be inserted to maintain order. Find the indices into a sorted array `a` such that, if the corresponding elements in `v` were inserted before the indices, the order of `a` would be preserved. Assuming that `a` is sorted: ====== ============================ `side` returned index `i` satisfies ====== ============================ left ``a[i-1] < v <= a[i]`` right ``a[i-1] <= v < a[i]`` ====== ============================ Parameters ---------- a : 1-D array_like Input array. If `sorter` is None, then it must be sorted in ascending order, otherwise `sorter` must be an array of indices that sort it. v : array_like Values to insert into `a`. side : {'left', 'right'}, optional If 'left', the index of the first suitable location found is given. If 'right', return the last such index. If there is no suitable index, return either 0 or N (where N is the length of `a`). sorter : 1-D array_like, optional Optional array of integer indices that sort array a into ascending order. They are typically the result of argsort. .. versionadded:: 1.7.0 Returns ------- indices : array of ints Array of insertion points with the same shape as `v`. See Also -------- sort : Return a sorted copy of an array. histogram : Produce histogram from 1-D data. Notes ----- Binary search is used to find the required insertion points. As of NumPy 1.4.0 `searchsorted` works with real/complex arrays containing `nan` values. The enhanced sort order is documented in `sort`. This function uses the same algorithm as the builtin python `bisect.bisect_left` (``side='left'``) and `bisect.bisect_right` (``side='right'``) functions, which is also vectorized in the `v` argument. Examples -------- >>> np.searchsorted([1,2,3,4,5], 3) 2 >>> np.searchsorted([1,2,3,4,5], 3, side='right') 3 >>> np.searchsorted([1,2,3,4,5], [-10, 10, 2, 3]) array([0, 5, 1, 2]) """ return _wrapfunc(a, 'searchsorted', v, side=side, sorter=sorter)
[ "def", "searchsorted", "(", "a", ",", "v", ",", "side", "=", "'left'", ",", "sorter", "=", "None", ")", ":", "return", "_wrapfunc", "(", "a", ",", "'searchsorted'", ",", "v", ",", "side", "=", "side", ",", "sorter", "=", "sorter", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/core/fromnumeric.py#L1275-L1341
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/contrib/graph_editor/util.py
python
get_unique_graph
(tops, check_types=None, none_if_empty=False)
return g
Return the unique graph used by the all the elements in tops. Args: tops: list of elements to check (usually a list of tf.Operation and/or tf.Tensor). Or a tf.Graph. check_types: check that the element in tops are of given type(s). If None, the types (tf.Operation, tf.Tensor) are used. none_if_empty: don't raise an error if tops is an empty list, just return None. Returns: The unique graph used by all the tops. Raises: TypeError: if tops is not a iterable of tf.Operation. ValueError: if the graph is not unique.
Return the unique graph used by the all the elements in tops.
[ "Return", "the", "unique", "graph", "used", "by", "the", "all", "the", "elements", "in", "tops", "." ]
def get_unique_graph(tops, check_types=None, none_if_empty=False): """Return the unique graph used by the all the elements in tops. Args: tops: list of elements to check (usually a list of tf.Operation and/or tf.Tensor). Or a tf.Graph. check_types: check that the element in tops are of given type(s). If None, the types (tf.Operation, tf.Tensor) are used. none_if_empty: don't raise an error if tops is an empty list, just return None. Returns: The unique graph used by all the tops. Raises: TypeError: if tops is not a iterable of tf.Operation. ValueError: if the graph is not unique. """ if isinstance(tops, tf_ops.Graph): return tops if not is_iterable(tops): raise TypeError("{} is not iterable".format(type(tops))) if check_types is None: check_types = (tf_ops.Operation, tf_ops.Tensor) g = None for op in tops: if not isinstance(op, check_types): raise TypeError("Expected a tf.Operation, got: {}".format(type(op))) if g is None: g = op.graph elif g is not op.graph: raise ValueError("Operation {} does not belong to given graph".format(op)) if g is None and not none_if_empty: raise ValueError("Can't find the unique graph of an empty list") return g
[ "def", "get_unique_graph", "(", "tops", ",", "check_types", "=", "None", ",", "none_if_empty", "=", "False", ")", ":", "if", "isinstance", "(", "tops", ",", "tf_ops", ".", "Graph", ")", ":", "return", "tops", "if", "not", "is_iterable", "(", "tops", ")",...
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/graph_editor/util.py#L88-L120
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py3/setuptools/dist.py
python
check_nsp
(dist, attr, value)
Verify that namespace packages are valid
Verify that namespace packages are valid
[ "Verify", "that", "namespace", "packages", "are", "valid" ]
def check_nsp(dist, attr, value): """Verify that namespace packages are valid""" ns_packages = value assert_string_list(dist, attr, ns_packages) for nsp in ns_packages: if not dist.has_contents_for(nsp): raise DistutilsSetupError( "Distribution contains no modules or packages for " + "namespace package %r" % nsp ) parent, sep, child = nsp.rpartition('.') if parent and parent not in ns_packages: distutils.log.warn( "WARNING: %r is declared as a package namespace, but %r" " is not: please correct this in setup.py", nsp, parent, )
[ "def", "check_nsp", "(", "dist", ",", "attr", ",", "value", ")", ":", "ns_packages", "=", "value", "assert_string_list", "(", "dist", ",", "attr", ",", "ns_packages", ")", "for", "nsp", "in", "ns_packages", ":", "if", "not", "dist", ".", "has_contents_for"...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/setuptools/dist.py#L252-L269
turi-code/SFrame
796b9bdfb2fa1b881d82080754643c7e68629cd2
oss_src/unity/python/sframe/_gl_pickle.py
python
_get_gl_object_from_persistent_id
(type_tag, gl_archive_abs_path)
return obj
Internal util to get a GLC object from a persistent ID in the pickle file. Parameters ---------- type_tag : The name of the glc class as saved in the GLC pickler. gl_archive_abs_path: An absolute path to the GLC archive where the object was saved. Returns ---------- The GLC object.
Internal util to get a GLC object from a persistent ID in the pickle file.
[ "Internal", "util", "to", "get", "a", "GLC", "object", "from", "a", "persistent", "ID", "in", "the", "pickle", "file", "." ]
def _get_gl_object_from_persistent_id(type_tag, gl_archive_abs_path): """ Internal util to get a GLC object from a persistent ID in the pickle file. Parameters ---------- type_tag : The name of the glc class as saved in the GLC pickler. gl_archive_abs_path: An absolute path to the GLC archive where the object was saved. Returns ---------- The GLC object. """ if type_tag == "SFrame": obj = _SFrame(gl_archive_abs_path) elif type_tag == "SGraph": obj = _load_graph(gl_archive_abs_path) elif type_tag == "SArray": obj = _SArray(gl_archive_abs_path) elif type_tag == "Model": from . import load_model as _load_model obj = _load_model(gl_archive_abs_path) else: raise _pickle.UnpicklingError("GraphLab pickling Error: Unspported object." " Only SFrames, SGraphs, SArrays, and Models are supported.") return obj
[ "def", "_get_gl_object_from_persistent_id", "(", "type_tag", ",", "gl_archive_abs_path", ")", ":", "if", "type_tag", "==", "\"SFrame\"", ":", "obj", "=", "_SFrame", "(", "gl_archive_abs_path", ")", "elif", "type_tag", "==", "\"SGraph\"", ":", "obj", "=", "_load_gr...
https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/_gl_pickle.py#L98-L126
priyankchheda/algorithms
c361aa9071573fa9966d5b02d05e524815abcf2b
graph/library/graph.py
python
Graph.is_vertex
(self, node)
return node in self._graph_dict
Returns true if node is vertex of graph, otherwise false
Returns true if node is vertex of graph, otherwise false
[ "Returns", "true", "if", "node", "is", "vertex", "of", "graph", "otherwise", "false" ]
def is_vertex(self, node): """ Returns true if node is vertex of graph, otherwise false """ return node in self._graph_dict
[ "def", "is_vertex", "(", "self", ",", "node", ")", ":", "return", "node", "in", "self", ".", "_graph_dict" ]
https://github.com/priyankchheda/algorithms/blob/c361aa9071573fa9966d5b02d05e524815abcf2b/graph/library/graph.py#L17-L19
llvm/llvm-project
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
polly/lib/External/isl/imath/tools/mkdoc.py
python
LIndex.linecol
(self, pos)
return 1, pos
Returns the (line, col) corresponding to pos. Line numbers are 1-based, columns are 0-based.
Returns the (line, col) corresponding to pos.
[ "Returns", "the", "(", "line", "col", ")", "corresponding", "to", "pos", "." ]
def linecol(self, pos): """Returns the (line, col) corresponding to pos. Line numbers are 1-based, columns are 0-based. """ if pos < 0 or pos > self._len: raise IndexError("position %d out of range" % pos) # Binary search for the largest line number whose end marker is at or # after pos and whose previous line's end is before pos. idx = self._index i, j = 1, len(idx) while i < j: m = (i + j) / 2 if idx[m] < pos: i = m + 1 elif idx[m - 1] < pos: return m, pos - idx[m - 1] else: j = m # This happens if (and only if) the whole file is one line. return 1, pos
[ "def", "linecol", "(", "self", ",", "pos", ")", ":", "if", "pos", "<", "0", "or", "pos", ">", "self", ".", "_len", ":", "raise", "IndexError", "(", "\"position %d out of range\"", "%", "pos", ")", "# Binary search for the largest line number whose end marker is at...
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/polly/lib/External/isl/imath/tools/mkdoc.py#L82-L104
macchina-io/macchina.io
ef24ba0e18379c3dd48fb84e6dbf991101cb8db0
platform/JS/V8/tools/gyp/pylib/gyp/generator/make.py
python
MakefileWriter.WritePchTargets
(self, pch_commands)
Writes make rules to compile prefix headers.
Writes make rules to compile prefix headers.
[ "Writes", "make", "rules", "to", "compile", "prefix", "headers", "." ]
def WritePchTargets(self, pch_commands): """Writes make rules to compile prefix headers.""" if not pch_commands: return for gch, lang_flag, lang, input in pch_commands: extra_flags = { 'c': '$(CFLAGS_C_$(BUILDTYPE))', 'cc': '$(CFLAGS_CC_$(BUILDTYPE))', 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))', 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))', }[lang] var_name = { 'c': 'GYP_PCH_CFLAGS', 'cc': 'GYP_PCH_CXXFLAGS', 'm': 'GYP_PCH_OBJCFLAGS', 'mm': 'GYP_PCH_OBJCXXFLAGS', }[lang] self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) + "$(DEFS_$(BUILDTYPE)) " "$(INCS_$(BUILDTYPE)) " "$(CFLAGS_$(BUILDTYPE)) " + extra_flags) self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input)) self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang) self.WriteLn('') assert ' ' not in gch, ( "Spaces in gch filenames not supported (%s)" % gch) self.WriteLn('all_deps += %s' % gch) self.WriteLn('')
[ "def", "WritePchTargets", "(", "self", ",", "pch_commands", ")", ":", "if", "not", "pch_commands", ":", "return", "for", "gch", ",", "lang_flag", ",", "lang", ",", "input", "in", "pch_commands", ":", "extra_flags", "=", "{", "'c'", ":", "'$(CFLAGS_C_$(BUILDT...
https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/tools/gyp/pylib/gyp/generator/make.py#L1298-L1328
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/jinja2/bccache.py
python
BytecodeCache.get_cache_key
(self, name, filename=None)
return hash.hexdigest()
Returns the unique hash key for this template name.
Returns the unique hash key for this template name.
[ "Returns", "the", "unique", "hash", "key", "for", "this", "template", "name", "." ]
def get_cache_key(self, name, filename=None): """Returns the unique hash key for this template name.""" hash = sha1(name.encode('utf-8')) if filename is not None: filename = '|' + filename if isinstance(filename, text_type): filename = filename.encode('utf-8') hash.update(filename) return hash.hexdigest()
[ "def", "get_cache_key", "(", "self", ",", "name", ",", "filename", "=", "None", ")", ":", "hash", "=", "sha1", "(", "name", ".", "encode", "(", "'utf-8'", ")", ")", "if", "filename", "is", "not", "None", ":", "filename", "=", "'|'", "+", "filename", ...
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/jinja2/bccache.py#L158-L166
PaddlePaddle/Anakin
5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730
tools/external_converter_v2/parser/tensorflow/auto_debug.py
python
AutoDebug.run
(self)
run debug mode :return:
run debug mode :return:
[ "run", "debug", "mode", ":", "return", ":" ]
def run(self): ''' run debug mode :return: ''' pass
[ "def", "run", "(", "self", ")", ":", "pass" ]
https://github.com/PaddlePaddle/Anakin/blob/5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730/tools/external_converter_v2/parser/tensorflow/auto_debug.py#L112-L117
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/autograph/operators/logical.py
python
_tf_equal
(a, b)
return gen_math_ops.equal(a, b)
Overload of "equal" for Tensors.
Overload of "equal" for Tensors.
[ "Overload", "of", "equal", "for", "Tensors", "." ]
def _tf_equal(a, b): """Overload of "equal" for Tensors.""" return gen_math_ops.equal(a, b)
[ "def", "_tf_equal", "(", "a", ",", "b", ")", ":", "return", "gen_math_ops", ".", "equal", "(", "a", ",", "b", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/autograph/operators/logical.py#L88-L90
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/session_bundle/gc.py
python
mod_export_version
(n)
return keep
Creates a filter that keeps every export that is a multiple of n. Args: n: step size. Returns: A filter function that keeps paths where export_version % n == 0.
Creates a filter that keeps every export that is a multiple of n.
[ "Creates", "a", "filter", "that", "keeps", "every", "export", "that", "is", "a", "multiple", "of", "n", "." ]
def mod_export_version(n): """Creates a filter that keeps every export that is a multiple of n. Args: n: step size. Returns: A filter function that keeps paths where export_version % n == 0. """ def keep(paths): keepers = [] for p in paths: if p.export_version % n == 0: keepers.append(p) return sorted(keepers) return keep
[ "def", "mod_export_version", "(", "n", ")", ":", "def", "keep", "(", "paths", ")", ":", "keepers", "=", "[", "]", "for", "p", "in", "paths", ":", "if", "p", ".", "export_version", "%", "n", "==", "0", ":", "keepers", ".", "append", "(", "p", ")",...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/session_bundle/gc.py#L134-L149
larq/compute-engine
a2611f8e33f5cb9b4d09b7c9aff7053620a24305
configure.py
python
setup_python
(environ_cp)
Setup python related env variables.
Setup python related env variables.
[ "Setup", "python", "related", "env", "variables", "." ]
def setup_python(environ_cp): """Setup python related env variables.""" # Get PYTHON_BIN_PATH, default is the current running python. default_python_bin_path = sys.executable ask_python_bin_path = ( "Please specify the location of python. [Default is " "{}]: " ).format(default_python_bin_path) while True: python_bin_path = get_from_env_or_user_or_default( environ_cp, "PYTHON_BIN_PATH", ask_python_bin_path, default_python_bin_path ) # Check if the path is valid if os.path.isfile(python_bin_path) and os.access(python_bin_path, os.X_OK): break elif not os.path.exists(python_bin_path): print("Invalid python path: {} cannot be found.".format(python_bin_path)) else: print( "{} is not executable. Is it the python binary?".format( python_bin_path ) ) environ_cp["PYTHON_BIN_PATH"] = "" # Convert python path to Windows style before checking lib and version if is_windows() or is_cygwin(): python_bin_path = cygpath(python_bin_path) # Get PYTHON_LIB_PATH python_lib_path = environ_cp.get("PYTHON_LIB_PATH") if not python_lib_path: python_lib_paths = get_python_path(environ_cp, python_bin_path) if environ_cp.get("USE_DEFAULT_PYTHON_LIB_PATH") == "1": python_lib_path = python_lib_paths[0] else: print( "Found possible Python library paths:\n %s" % "\n ".join(python_lib_paths) ) default_python_lib_path = python_lib_paths[0] python_lib_path = get_input( "Please input the desired Python library path to use. " "[Default is {}]\n".format(python_lib_paths[0]) ) if not python_lib_path: python_lib_path = default_python_lib_path environ_cp["PYTHON_LIB_PATH"] = python_lib_path python_version = get_python_version(python_bin_path) if int(python_version[0]) < 3: raise ValueError("Python versions prior to 3.x are unsupported.") print( f"Configuring builds with Python {python_version} support. To use a different " "Python version, re-run configuration inside a virtual environment or pass " "different binary/lib paths when prompted.\n" ) # Convert python path to Windows style before writing into bazel.rc if is_windows() or is_cygwin(): python_lib_path = cygpath(python_lib_path) # Set-up env variables used by python_configure.bzl write_action_env_to_bazelrc("PYTHON_BIN_PATH", python_bin_path) write_action_env_to_bazelrc("PYTHON_LIB_PATH", python_lib_path) write_to_bazelrc('build --python_path="{}"'.format(python_bin_path)) environ_cp["PYTHON_BIN_PATH"] = python_bin_path # If choosen python_lib_path is from a path specified in the PYTHONPATH # variable, need to tell bazel to include PYTHONPATH if environ_cp.get("PYTHONPATH"): python_paths = environ_cp.get("PYTHONPATH").split(":") if python_lib_path in python_paths: write_action_env_to_bazelrc("PYTHONPATH", environ_cp.get("PYTHONPATH"))
[ "def", "setup_python", "(", "environ_cp", ")", ":", "# Get PYTHON_BIN_PATH, default is the current running python.", "default_python_bin_path", "=", "sys", ".", "executable", "ask_python_bin_path", "=", "(", "\"Please specify the location of python. [Default is \"", "\"{}]: \"", ")...
https://github.com/larq/compute-engine/blob/a2611f8e33f5cb9b4d09b7c9aff7053620a24305/configure.py#L308-L380
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py2/setuptools/archive_util.py
python
default_filter
(src, dst)
return dst
The default progress/filter callback; returns True for all files
The default progress/filter callback; returns True for all files
[ "The", "default", "progress", "/", "filter", "callback", ";", "returns", "True", "for", "all", "files" ]
def default_filter(src, dst): """The default progress/filter callback; returns True for all files""" return dst
[ "def", "default_filter", "(", "src", ",", "dst", ")", ":", "return", "dst" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/archive_util.py#L23-L25
koth/kcws
88efbd36a7022de4e6e90f5a1fb880cf87cfae9f
third_party/setuptools/pkg_resources.py
python
split_sections
(s)
Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``.
Split a string or iterable thereof into (section, content) pairs
[ "Split", "a", "string", "or", "iterable", "thereof", "into", "(", "section", "content", ")", "pairs" ]
def split_sections(s): """Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """ section = None content = [] for line in yield_lines(s): if line.startswith("["): if line.endswith("]"): if section or content: yield section, content section = line[1:-1].strip() content = [] else: raise ValueError("Invalid section heading", line) else: content.append(line) # wrap up last segment yield section, content
[ "def", "split_sections", "(", "s", ")", ":", "section", "=", "None", "content", "=", "[", "]", "for", "line", "in", "yield_lines", "(", "s", ")", ":", "if", "line", ".", "startswith", "(", "\"[\"", ")", ":", "if", "line", ".", "endswith", "(", "\"]...
https://github.com/koth/kcws/blob/88efbd36a7022de4e6e90f5a1fb880cf87cfae9f/third_party/setuptools/pkg_resources.py#L2790-L2813
facebookresearch/faiss
eb8781557f556505ca93f6f21fff932e17f0d9e0
contrib/exhaustive_search.py
python
threshold_radius_nres
(nres, dis, ids, thresh, keep_max=False)
return new_nres, dis[mask], ids[mask]
select a set of results
select a set of results
[ "select", "a", "set", "of", "results" ]
def threshold_radius_nres(nres, dis, ids, thresh, keep_max=False): """ select a set of results """ if keep_max: mask = dis > thresh else: mask = dis < thresh new_nres = np.zeros_like(nres) o = 0 for i, nr in enumerate(nres): nr = int(nr) # avoid issues with int64 + uint64 new_nres[i] = mask[o:o + nr].sum() o += nr return new_nres, dis[mask], ids[mask]
[ "def", "threshold_radius_nres", "(", "nres", ",", "dis", ",", "ids", ",", "thresh", ",", "keep_max", "=", "False", ")", ":", "if", "keep_max", ":", "mask", "=", "dis", ">", "thresh", "else", ":", "mask", "=", "dis", "<", "thresh", "new_nres", "=", "n...
https://github.com/facebookresearch/faiss/blob/eb8781557f556505ca93f6f21fff932e17f0d9e0/contrib/exhaustive_search.py#L151-L163
Samsung/veles
95ed733c2e49bc011ad98ccf2416ecec23fbf352
veles/txzmq/manager.py
python
ZmqContextManager.__init__
(self)
Constructor. Create ZeroMQ context.
Constructor.
[ "Constructor", "." ]
def __init__(self): """ Constructor. Create ZeroMQ context. """ if not self.initialized: self.initialized = True self.connections = set() self.context = Context(self.ioThreads) reactor.addSystemEventTrigger('during', 'shutdown', self.shutdown)
[ "def", "__init__", "(", "self", ")", ":", "if", "not", "self", ".", "initialized", ":", "self", ".", "initialized", "=", "True", "self", ".", "connections", "=", "set", "(", ")", "self", ".", "context", "=", "Context", "(", "self", ".", "ioThreads", ...
https://github.com/Samsung/veles/blob/95ed733c2e49bc011ad98ccf2416ecec23fbf352/veles/txzmq/manager.py#L72-L82