nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py2/setuptools/config.py
python
ConfigHandler._parse_file
(cls, value)
return '\n'.join( cls._read_file(path) for path in filepaths if (cls._assert_local(path) or True) and os.path.isfile(path) )
Represents value as a string, allowing including text from nearest files using `file:` directive. Directive is sandboxed and won't reach anything outside directory with setup.py. Examples: file: README.rst, CHANGELOG.md, src/file.txt :param str value: :rtype: str
Represents value as a string, allowing including text from nearest files using `file:` directive.
[ "Represents", "value", "as", "a", "string", "allowing", "including", "text", "from", "nearest", "files", "using", "file", ":", "directive", "." ]
def _parse_file(cls, value): """Represents value as a string, allowing including text from nearest files using `file:` directive. Directive is sandboxed and won't reach anything outside directory with setup.py. Examples: file: README.rst, CHANGELOG.md, src/file.txt :param str value: :rtype: str """ include_directive = 'file:' if not isinstance(value, string_types): return value if not value.startswith(include_directive): return value spec = value[len(include_directive):] filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) return '\n'.join( cls._read_file(path) for path in filepaths if (cls._assert_local(path) or True) and os.path.isfile(path) )
[ "def", "_parse_file", "(", "cls", ",", "value", ")", ":", "include_directive", "=", "'file:'", "if", "not", "isinstance", "(", "value", ",", "string_types", ")", ":", "return", "value", "if", "not", "value", ".", "startswith", "(", "include_directive", ")", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/config.py#L271-L299
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Code/Tools/waf-1.7.13/waflib/Node.py
python
Node.bldpath
(self)
return self.path_from(self.ctx.bldnode)
Path seen from the build directory default/src/foo.cpp
Path seen from the build directory default/src/foo.cpp
[ "Path", "seen", "from", "the", "build", "directory", "default", "/", "src", "/", "foo", ".", "cpp" ]
def bldpath(self): "Path seen from the build directory default/src/foo.cpp" return self.path_from(self.ctx.bldnode)
[ "def", "bldpath", "(", "self", ")", ":", "return", "self", ".", "path_from", "(", "self", ".", "ctx", ".", "bldnode", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/waflib/Node.py#L766-L768
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
xpcom/idl-parser/xpidl.py
python
IDLParser.p_number_add
(self, p)
number : number '+' number | number '-' number | number '*' number
number : number '+' number | number '-' number | number '*' number
[ "number", ":", "number", "+", "number", "|", "number", "-", "number", "|", "number", "*", "number" ]
def p_number_add(self, p): """number : number '+' number | number '-' number | number '*' number""" n1 = p[1] n2 = p[3] if p[2] == '+': p[0] = lambda i: n1(i) + n2(i) elif p[2] == '-': p[0] = lambda i: n1(i) - n2(i) else: p[0] = lambda i: n1(i) * n2(i)
[ "def", "p_number_add", "(", "self", ",", "p", ")", ":", "n1", "=", "p", "[", "1", "]", "n2", "=", "p", "[", "3", "]", "if", "p", "[", "2", "]", "==", "'+'", ":", "p", "[", "0", "]", "=", "lambda", "i", ":", "n1", "(", "i", ")", "+", "...
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/xpcom/idl-parser/xpidl.py#L1258-L1269
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
Rect2D.IsEmpty
(*args, **kwargs)
return _core_.Rect2D_IsEmpty(*args, **kwargs)
IsEmpty(self) -> bool
IsEmpty(self) -> bool
[ "IsEmpty", "(", "self", ")", "-", ">", "bool" ]
def IsEmpty(*args, **kwargs): """IsEmpty(self) -> bool""" return _core_.Rect2D_IsEmpty(*args, **kwargs)
[ "def", "IsEmpty", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Rect2D_IsEmpty", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L1975-L1977
Kitware/ParaView
f760af9124ff4634b23ebbeab95a4f56e0261955
ThirdParty/cinema/paraview/tpl/cinema_python/database/store.py
python
Store.iterate
(self, parameters=None, fixedargs=None, progressObject=None)
Run through all combinations of parameter/value pairs without visiting any combinations that do not satisfy dependencies among them. Parameters, if supplied, is a list of parameter names to enforce an ordering. Fixed arguments, if supplied, are parameter/value pairs that we want to hold constant in the exploration.
Run through all combinations of parameter/value pairs without visiting any combinations that do not satisfy dependencies among them. Parameters, if supplied, is a list of parameter names to enforce an ordering. Fixed arguments, if supplied, are parameter/value pairs that we want to hold constant in the exploration.
[ "Run", "through", "all", "combinations", "of", "parameter", "/", "value", "pairs", "without", "visiting", "any", "combinations", "that", "do", "not", "satisfy", "dependencies", "among", "them", ".", "Parameters", "if", "supplied", "is", "a", "list", "of", "par...
def iterate(self, parameters=None, fixedargs=None, progressObject=None): """ Run through all combinations of parameter/value pairs without visiting any combinations that do not satisfy dependencies among them. Parameters, if supplied, is a list of parameter names to enforce an ordering. Fixed arguments, if supplied, are parameter/value pairs that we want to hold constant in the exploration. """ # optimization - cache and reuse to avoid expensive search argstr = json.dumps((parameters, fixedargs), sort_keys=True) if argstr in self.cached_searches: for x in self.cached_searches[argstr]: yield x return # prepare to iterate through all the possibilities, in order if one is # given param_names = parameters if parameters else self.parameter_list.keys() # print ("PARAMETERS", param_names) params = [] values = [] total_elem = 1.0 for name in param_names: vals = self.get_parameter(name)['values'] if fixedargs and name in fixedargs: continue total_elem *= len(vals) params.append(name) values.append(vals) # The algorithm is to iterate through all combinations, and remove # the impossible ones. I use a set to avoid redundant combinations. # In order to use the set I serialize to make something hashable. # Then I insert into a list to preserve the (hopefully optimized) # order. ok_descs = set() ordered_descs = [] elem_accum = 0.0 for element in itertools.product(*values): descriptor = dict(zip(params, element)) if progressObject: elem_accum += 1.0 progressObject.UpdateProgress(elem_accum / total_elem) if fixedargs is not None: descriptor.update(fixedargs) ok_desc = {} for param, value in py23iteritems(descriptor): if self.dependencies_satisfied(param, descriptor): ok_desc.update({param: value}) OK = True if fixedargs: for k, v in py23iteritems(fixedargs): if not (k in ok_desc and ok_desc[k] == v): OK = False if OK: strval = "{ " for name in sorted(ok_desc.keys()): strval = strval + '"' + name + '": "' + str( ok_desc[name]) + '", ' strval = strval[0:-2] + "}" # strval = json.dumps(ok_desc, sort_keys=True) # slower if strval not in ok_descs: ok_descs.add(strval) ordered_descs.append(ok_desc) yield ok_desc self.cached_searches[argstr] = ordered_descs
[ "def", "iterate", "(", "self", ",", "parameters", "=", "None", ",", "fixedargs", "=", "None", ",", "progressObject", "=", "None", ")", ":", "# optimization - cache and reuse to avoid expensive search", "argstr", "=", "json", ".", "dumps", "(", "(", "parameters", ...
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/ThirdParty/cinema/paraview/tpl/cinema_python/database/store.py#L551-L624
deepmind/spiral
5ee538cedf1d9cc827ced93fe86a44f8b8742ac0
spiral/environments/libmypaint.py
python
LibMyPaint.step
(self, action)
return time_step
Performs an environment step.
Performs an environment step.
[ "Performs", "an", "environment", "step", "." ]
def step(self, action): """Performs an environment step.""" # If the environment has just been created or finished an episode # we should reset it (ignoring the action). if self._prev_step_type in {None, environment.StepType.LAST}: return self.reset() for k in action.keys(): self._action_spec[k].validate(action[k]) locations, flag, pressure, log_size, red, green, blue = ( self._process_action(action)) loc_control, loc_end = locations # Perform action. self._surface.BeginAtomic() if flag == 1: # The agent produces a visible stroke. self._action_mask = self._action_masks["paint"] y_c, x_c = loc_control y_e, x_e = loc_end self._bezier_to(y_c, x_c, y_e, x_e, pressure, log_size, red, green, blue) # Update episode statistics. self.stats["total_strokes"] += 1 if not self._prev_brush_params["is_painting"]: self.stats["total_disjoint"] += 1 elif flag == 0: # The agent moves to a new location. self._action_mask = self._action_masks["move"] y_e, x_e = loc_end self._move_to(y_e, x_e) else: raise ValueError("Invalid flag value") self._surface.EndAtomic() # Handle termination of the episode. reward = 0.0 self._episode_step += 1 if self._episode_step == self._episode_length: time_step = environment.termination(reward=reward, observation=self.observation()) else: time_step = environment.transition(reward=reward, observation=self.observation(), discount=self._discount) self._prev_step_type = time_step.step_type return time_step
[ "def", "step", "(", "self", ",", "action", ")", ":", "# If the environment has just been created or finished an episode", "# we should reset it (ignoring the action).", "if", "self", ".", "_prev_step_type", "in", "{", "None", ",", "environment", ".", "StepType", ".", "LAS...
https://github.com/deepmind/spiral/blob/5ee538cedf1d9cc827ced93fe86a44f8b8742ac0/spiral/environments/libmypaint.py#L420-L468
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/s3transfer/bandwidth.py
python
RequestExceededException.__init__
(self, requested_amt, retry_time)
Error when requested amount exceeds what is allowed The request that raised this error should be retried after waiting the time specified by ``retry_time``. :type requested_amt: int :param requested_amt: The originally requested byte amount :type retry_time: float :param retry_time: The length in time to wait to retry for the requested amount
Error when requested amount exceeds what is allowed
[ "Error", "when", "requested", "amount", "exceeds", "what", "is", "allowed" ]
def __init__(self, requested_amt, retry_time): """Error when requested amount exceeds what is allowed The request that raised this error should be retried after waiting the time specified by ``retry_time``. :type requested_amt: int :param requested_amt: The originally requested byte amount :type retry_time: float :param retry_time: The length in time to wait to retry for the requested amount """ self.requested_amt = requested_amt self.retry_time = retry_time msg = ( 'Request amount %s exceeded the amount available. Retry in %s' % ( requested_amt, retry_time) ) super(RequestExceededException, self).__init__(msg)
[ "def", "__init__", "(", "self", ",", "requested_amt", ",", "retry_time", ")", ":", "self", ".", "requested_amt", "=", "requested_amt", "self", ".", "retry_time", "=", "retry_time", "msg", "=", "(", "'Request amount %s exceeded the amount available. Retry in %s'", "%",...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/s3transfer/bandwidth.py#L18-L37
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/botocore/vendored/requests/packages/urllib3/packages/six.py
python
iteritems
(d)
return iter(getattr(d, _iteritems)())
Return an iterator over the (key, value) pairs of a dictionary.
Return an iterator over the (key, value) pairs of a dictionary.
[ "Return", "an", "iterator", "over", "the", "(", "key", "value", ")", "pairs", "of", "a", "dictionary", "." ]
def iteritems(d): """Return an iterator over the (key, value) pairs of a dictionary.""" return iter(getattr(d, _iteritems)())
[ "def", "iteritems", "(", "d", ")", ":", "return", "iter", "(", "getattr", "(", "d", ",", "_iteritems", ")", "(", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/botocore/vendored/requests/packages/urllib3/packages/six.py#L271-L273
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py
python
Spinbox.__init__
(self, master=None, cnf={}, **kw)
Construct a spinbox widget with the parent MASTER. STANDARD OPTIONS activebackground, background, borderwidth, cursor, exportselection, font, foreground, highlightbackground, highlightcolor, highlightthickness, insertbackground, insertborderwidth, insertofftime, insertontime, insertwidth, justify, relief, repeatdelay, repeatinterval, selectbackground, selectborderwidth selectforeground, takefocus, textvariable xscrollcommand. WIDGET-SPECIFIC OPTIONS buttonbackground, buttoncursor, buttondownrelief, buttonuprelief, command, disabledbackground, disabledforeground, format, from, invalidcommand, increment, readonlybackground, state, to, validate, validatecommand values, width, wrap,
Construct a spinbox widget with the parent MASTER.
[ "Construct", "a", "spinbox", "widget", "with", "the", "parent", "MASTER", "." ]
def __init__(self, master=None, cnf={}, **kw): """Construct a spinbox widget with the parent MASTER. STANDARD OPTIONS activebackground, background, borderwidth, cursor, exportselection, font, foreground, highlightbackground, highlightcolor, highlightthickness, insertbackground, insertborderwidth, insertofftime, insertontime, insertwidth, justify, relief, repeatdelay, repeatinterval, selectbackground, selectborderwidth selectforeground, takefocus, textvariable xscrollcommand. WIDGET-SPECIFIC OPTIONS buttonbackground, buttoncursor, buttondownrelief, buttonuprelief, command, disabledbackground, disabledforeground, format, from, invalidcommand, increment, readonlybackground, state, to, validate, validatecommand values, width, wrap, """ Widget.__init__(self, master, 'spinbox', cnf, kw)
[ "def", "__init__", "(", "self", ",", "master", "=", "None", ",", "cnf", "=", "{", "}", ",", "*", "*", "kw", ")", ":", "Widget", ".", "__init__", "(", "self", ",", "master", ",", "'spinbox'", ",", "cnf", ",", "kw", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py#L3619-L3646
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/lambda-code/ServiceLambda/api/service_upload.py
python
__generate_uuid
()
return str(uuid.uuid4())
Generates universally unique identifier using uuid4 to guarantee uniqueness without exposing computer network address.
Generates universally unique identifier using uuid4 to guarantee uniqueness without exposing computer network address.
[ "Generates", "universally", "unique", "identifier", "using", "uuid4", "to", "guarantee", "uniqueness", "without", "exposing", "computer", "network", "address", "." ]
def __generate_uuid(): '''Generates universally unique identifier using uuid4 to guarantee uniqueness without exposing computer network address.''' return str(uuid.uuid4())
[ "def", "__generate_uuid", "(", ")", ":", "return", "str", "(", "uuid", ".", "uuid4", "(", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/lambda-code/ServiceLambda/api/service_upload.py#L208-L211
KhronosGroup/Vulkan-Headers
b32da5329b50e3cb96229aaecba9ded032fe29cc
registry/conventions.py
python
ConventionsBase.should_skip_checking_codes
(self)
return False
Return True if more than the basic validation of return codes should be skipped for a command.
Return True if more than the basic validation of return codes should be skipped for a command.
[ "Return", "True", "if", "more", "than", "the", "basic", "validation", "of", "return", "codes", "should", "be", "skipped", "for", "a", "command", "." ]
def should_skip_checking_codes(self): """Return True if more than the basic validation of return codes should be skipped for a command.""" return False
[ "def", "should_skip_checking_codes", "(", "self", ")", ":", "return", "False" ]
https://github.com/KhronosGroup/Vulkan-Headers/blob/b32da5329b50e3cb96229aaecba9ded032fe29cc/registry/conventions.py#L307-L311
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/ops/_grad/grad_quant_ops.py
python
get_bprop_fakequant_with_minmax_per_layer_update
(self)
return bprop
Generate bprop for MinMaxUpdatePerLayer for Ascend
Generate bprop for MinMaxUpdatePerLayer for Ascend
[ "Generate", "bprop", "for", "MinMaxUpdatePerLayer", "for", "Ascend" ]
def get_bprop_fakequant_with_minmax_per_layer_update(self): """Generate bprop for MinMaxUpdatePerLayer for Ascend""" def bprop(x, x_min, x_max, out, dout): return zeros_like(x), zeros_like(x_min), zeros_like(x_max) return bprop
[ "def", "get_bprop_fakequant_with_minmax_per_layer_update", "(", "self", ")", ":", "def", "bprop", "(", "x", ",", "x_min", ",", "x_max", ",", "out", ",", "dout", ")", ":", "return", "zeros_like", "(", "x", ")", ",", "zeros_like", "(", "x_min", ")", ",", "...
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/_grad/grad_quant_ops.py#L165-L171
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/network/lazy_wheel.py
python
LazyZipOverHTTP._stream_response
(self, start, end, base_headers=HEADERS)
return self._session.get(self._url, headers=headers, stream=True)
Return HTTP response to a range request from start to end.
Return HTTP response to a range request from start to end.
[ "Return", "HTTP", "response", "to", "a", "range", "request", "from", "start", "to", "end", "." ]
def _stream_response(self, start, end, base_headers=HEADERS): # type: (int, int, Dict[str, str]) -> Response """Return HTTP response to a range request from start to end.""" headers = base_headers.copy() headers['Range'] = f'bytes={start}-{end}' # TODO: Get range requests to be correctly cached headers['Cache-Control'] = 'no-cache' return self._session.get(self._url, headers=headers, stream=True)
[ "def", "_stream_response", "(", "self", ",", "start", ",", "end", ",", "base_headers", "=", "HEADERS", ")", ":", "# type: (int, int, Dict[str, str]) -> Response", "headers", "=", "base_headers", ".", "copy", "(", ")", "headers", "[", "'Range'", "]", "=", "f'byte...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/network/lazy_wheel.py#L189-L196
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/tools/gyp/pylib/gyp/generator/cmake.py
python
CreateCMakeTargetBaseName
(qualified_target)
return StringToCMakeTargetName(cmake_target_base_name)
This is the name we would like the target to have.
This is the name we would like the target to have.
[ "This", "is", "the", "name", "we", "would", "like", "the", "target", "to", "have", "." ]
def CreateCMakeTargetBaseName(qualified_target): """This is the name we would like the target to have.""" _, gyp_target_name, gyp_target_toolset = ( gyp.common.ParseQualifiedTarget(qualified_target)) cmake_target_base_name = gyp_target_name if gyp_target_toolset and gyp_target_toolset != 'target': cmake_target_base_name += '_' + gyp_target_toolset return StringToCMakeTargetName(cmake_target_base_name)
[ "def", "CreateCMakeTargetBaseName", "(", "qualified_target", ")", ":", "_", ",", "gyp_target_name", ",", "gyp_target_toolset", "=", "(", "gyp", ".", "common", ".", "ParseQualifiedTarget", "(", "qualified_target", ")", ")", "cmake_target_base_name", "=", "gyp_target_na...
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/gyp/pylib/gyp/generator/cmake.py#L557-L564
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
xmlNode.docCopyNodeList
(self, doc)
return __tmp
Do a recursive copy of the node list.
Do a recursive copy of the node list.
[ "Do", "a", "recursive", "copy", "of", "the", "node", "list", "." ]
def docCopyNodeList(self, doc): """Do a recursive copy of the node list. """ if doc is None: doc__o = None else: doc__o = doc._o ret = libxml2mod.xmlDocCopyNodeList(doc__o, self._o) if ret is None:raise treeError('xmlDocCopyNodeList() failed') __tmp = xmlNode(_obj=ret) return __tmp
[ "def", "docCopyNodeList", "(", "self", ",", "doc", ")", ":", "if", "doc", "is", "None", ":", "doc__o", "=", "None", "else", ":", "doc__o", "=", "doc", ".", "_o", "ret", "=", "libxml2mod", ".", "xmlDocCopyNodeList", "(", "doc__o", ",", "self", ".", "_...
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L3195-L3202
runtimejs/runtime
0a6e84c30823d35a4548d6634166784260ae7b74
deps/v8/.ycm_extra_conf.py
python
FlagsForFile
(filename)
return { 'flags': final_flags, 'do_cache': True }
This is the main entry point for YCM. Its interface is fixed. Args: filename: (String) Path to source file being edited. Returns: (Dictionary) 'flags': (List of Strings) Command line flags. 'do_cache': (Boolean) True if the result should be cached.
This is the main entry point for YCM. Its interface is fixed.
[ "This", "is", "the", "main", "entry", "point", "for", "YCM", ".", "Its", "interface", "is", "fixed", "." ]
def FlagsForFile(filename): """This is the main entry point for YCM. Its interface is fixed. Args: filename: (String) Path to source file being edited. Returns: (Dictionary) 'flags': (List of Strings) Command line flags. 'do_cache': (Boolean) True if the result should be cached. """ v8_root = FindV8SrcFromFilename(filename) v8_flags = GetClangCommandFromNinjaForFilename(v8_root, filename) final_flags = flags + v8_flags return { 'flags': final_flags, 'do_cache': True }
[ "def", "FlagsForFile", "(", "filename", ")", ":", "v8_root", "=", "FindV8SrcFromFilename", "(", "filename", ")", "v8_flags", "=", "GetClangCommandFromNinjaForFilename", "(", "v8_root", ",", "filename", ")", "final_flags", "=", "flags", "+", "v8_flags", "return", "...
https://github.com/runtimejs/runtime/blob/0a6e84c30823d35a4548d6634166784260ae7b74/deps/v8/.ycm_extra_conf.py#L176-L193
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/contrib/layers/python/layers/summaries.py
python
summarize_tensors
(tensors, summarizer=summarize_tensor)
return [summarizer(tensor) for tensor in tensors]
Summarize a set of tensors.
Summarize a set of tensors.
[ "Summarize", "a", "set", "of", "tensors", "." ]
def summarize_tensors(tensors, summarizer=summarize_tensor): """Summarize a set of tensors.""" return [summarizer(tensor) for tensor in tensors]
[ "def", "summarize_tensors", "(", "tensors", ",", "summarizer", "=", "summarize_tensor", ")", ":", "return", "[", "summarizer", "(", "tensor", ")", "for", "tensor", "in", "tensors", "]" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/layers/python/layers/summaries.py#L150-L152
domino-team/openwrt-cc
8b181297c34d14d3ca521cc9f31430d561dbc688
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/v8_inspector/third_party/jinja2/jinja2/compiler.py
python
CodeGenerator.outdent
(self, step=1)
Outdent by step.
Outdent by step.
[ "Outdent", "by", "step", "." ]
def outdent(self, step=1): """Outdent by step.""" self._indentation -= step
[ "def", "outdent", "(", "self", ",", "step", "=", "1", ")", ":", "self", ".", "_indentation", "-=", "step" ]
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/v8_inspector/third_party/jinja2/jinja2/compiler.py#L459-L461
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
lldb/examples/python/mach_o.py
python
TerminalColors.underline
(self, on=True)
return ''
Enable or disable underline depending on the "on" parameter.
Enable or disable underline depending on the "on" parameter.
[ "Enable", "or", "disable", "underline", "depending", "on", "the", "on", "parameter", "." ]
def underline(self, on=True): '''Enable or disable underline depending on the "on" parameter.''' if self.enabled: if on: return "\x1b[4m" else: return "\x1b[24m" return ''
[ "def", "underline", "(", "self", ",", "on", "=", "True", ")", ":", "if", "self", ".", "enabled", ":", "if", "on", ":", "return", "\"\\x1b[4m\"", "else", ":", "return", "\"\\x1b[24m\"", "return", "''" ]
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/lldb/examples/python/mach_o.py#L244-L251
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/contrib/session_bundle/exporter.py
python
generic_signature
(name_tensor_map)
return signature
Creates a generic signature of name to Tensor name. Args: name_tensor_map: Map from logical name to Tensor. Returns: A Signature message.
Creates a generic signature of name to Tensor name.
[ "Creates", "a", "generic", "signature", "of", "name", "to", "Tensor", "name", "." ]
def generic_signature(name_tensor_map): """Creates a generic signature of name to Tensor name. Args: name_tensor_map: Map from logical name to Tensor. Returns: A Signature message. """ signature = manifest_pb2.Signature() for name, tensor in six.iteritems(name_tensor_map): signature.generic_signature.map[name].tensor_name = tensor.name return signature
[ "def", "generic_signature", "(", "name_tensor_map", ")", ":", "signature", "=", "manifest_pb2", ".", "Signature", "(", ")", "for", "name", ",", "tensor", "in", "six", ".", "iteritems", "(", "name_tensor_map", ")", ":", "signature", ".", "generic_signature", "....
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/session_bundle/exporter.py#L109-L121
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/lib-tk/turtle.py
python
TPen.hideturtle
(self)
Makes the turtle invisible. Aliases: hideturtle | ht No argument. It's a good idea to do this while you're in the middle of a complicated drawing, because hiding the turtle speeds up the drawing observably. Example (for a Turtle instance named turtle): >>> turtle.hideturtle()
Makes the turtle invisible.
[ "Makes", "the", "turtle", "invisible", "." ]
def hideturtle(self): """Makes the turtle invisible. Aliases: hideturtle | ht No argument. It's a good idea to do this while you're in the middle of a complicated drawing, because hiding the turtle speeds up the drawing observably. Example (for a Turtle instance named turtle): >>> turtle.hideturtle() """ self.pen(shown=False)
[ "def", "hideturtle", "(", "self", ")", ":", "self", ".", "pen", "(", "shown", "=", "False", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/lib-tk/turtle.py#L2223-L2237
y123456yz/reading-and-annotate-mongodb-3.6
93280293672ca7586dc24af18132aa61e4ed7fcf
mongo/buildscripts/idl/idl/compiler.py
python
_write_dependencies
(spec)
Write a list of dependencies to standard out.
Write a list of dependencies to standard out.
[ "Write", "a", "list", "of", "dependencies", "to", "standard", "out", "." ]
def _write_dependencies(spec): # type: (syntax.IDLSpec) -> None """Write a list of dependencies to standard out.""" if not spec.imports: return dependencies = sorted(spec.imports.dependencies) for resolved_file_name in dependencies: print(resolved_file_name)
[ "def", "_write_dependencies", "(", "spec", ")", ":", "# type: (syntax.IDLSpec) -> None", "if", "not", "spec", ".", "imports", ":", "return", "dependencies", "=", "sorted", "(", "spec", ".", "imports", ".", "dependencies", ")", "for", "resolved_file_name", "in", ...
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/buildscripts/idl/idl/compiler.py#L99-L107
vmware/concord-bft
ec036a384b4c81be0423d4b429bd37900b13b864
util/pyclient/bft_client.py
python
TcpTlsClient._recv_data
(self, required_replies, dest_replicas, cancel_scope)
Receive reply messages until a quorum is achieved or the enclosing cancel_scope times out.
Receive reply messages until a quorum is achieved or the enclosing cancel_scope times out.
[ "Receive", "reply", "messages", "until", "a", "quorum", "is", "achieved", "or", "the", "enclosing", "cancel_scope", "times", "out", "." ]
async def _recv_data(self, required_replies, dest_replicas, cancel_scope): """ Receive reply messages until a quorum is achieved or the enclosing cancel_scope times out. """ replicas_addr = [(r.ip, r.port) for r in dest_replicas] async with trio.open_nursery() as nursery: for dest_addr in replicas_addr: if dest_addr in self.ssl_streams.keys(): nursery.start_soon(self._receive_from_replica, dest_addr, replicas_addr, required_replies, nursery.cancel_scope) else: self.establish_ssl_stream_parklot[dest_addr].unpark()
[ "async", "def", "_recv_data", "(", "self", ",", "required_replies", ",", "dest_replicas", ",", "cancel_scope", ")", ":", "replicas_addr", "=", "[", "(", "r", ".", "ip", ",", "r", ".", "port", ")", "for", "r", "in", "dest_replicas", "]", "async", "with", ...
https://github.com/vmware/concord-bft/blob/ec036a384b4c81be0423d4b429bd37900b13b864/util/pyclient/bft_client.py#L604-L615
Cisco-Talos/moflow
ed71dfb0540d9e0d7a4c72f0881b58958d573728
BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/mox.py
python
MockAnything.__getattr__
(self, method_name)
return self._CreateMockMethod(method_name)
Intercept method calls on this object. A new MockMethod is returned that is aware of the MockAnything's state (record or replay). The call will be recorded or replayed by the MockMethod's __call__. Args: # method name: the name of the method being called. method_name: str Returns: A new MockMethod aware of MockAnything's state (record or replay).
Intercept method calls on this object.
[ "Intercept", "method", "calls", "on", "this", "object", "." ]
def __getattr__(self, method_name): """Intercept method calls on this object. A new MockMethod is returned that is aware of the MockAnything's state (record or replay). The call will be recorded or replayed by the MockMethod's __call__. Args: # method name: the name of the method being called. method_name: str Returns: A new MockMethod aware of MockAnything's state (record or replay). """ return self._CreateMockMethod(method_name)
[ "def", "__getattr__", "(", "self", ",", "method_name", ")", ":", "return", "self", ".", "_CreateMockMethod", "(", "method_name", ")" ]
https://github.com/Cisco-Talos/moflow/blob/ed71dfb0540d9e0d7a4c72f0881b58958d573728/BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/mox.py#L278-L293
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
chrome/common/extensions/docs/server2/path_canonicalizer.py
python
PathCanonicalizer.Canonicalize
(self, path)
return max_prefix
Returns the canonical path for |path|.
Returns the canonical path for |path|.
[ "Returns", "the", "canonical", "path", "for", "|path|", "." ]
def Canonicalize(self, path): '''Returns the canonical path for |path|. ''' canonical_paths, simplified_paths_map = self._LoadCache().Get() # Path may already be the canonical path. if path in canonical_paths: return path # Path not found. Our single heuristic: find |base| in the directory # structure with the longest common prefix of |path|. _, base = SplitParent(path) potential_paths = simplified_paths_map.get(_SimplifyFileName(base)) if not potential_paths: # There is no file with anything close to that name. return path # The most likely canonical file is the one with the longest common prefix # with |path|. This is slightly weaker than it could be; |path| is # compared, not the simplified form of |path|, which may matter. max_prefix = potential_paths[0] max_prefix_length = len(posixpath.commonprefix((max_prefix, path))) for path_for_file in potential_paths[1:]: prefix_length = len(posixpath.commonprefix((path_for_file, path))) if prefix_length > max_prefix_length: max_prefix, max_prefix_length = path_for_file, prefix_length return max_prefix
[ "def", "Canonicalize", "(", "self", ",", "path", ")", ":", "canonical_paths", ",", "simplified_paths_map", "=", "self", ".", "_LoadCache", "(", ")", ".", "Get", "(", ")", "# Path may already be the canonical path.", "if", "path", "in", "canonical_paths", ":", "r...
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/chrome/common/extensions/docs/server2/path_canonicalizer.py#L79-L106
BitcoinUnlimited/BitcoinUnlimited
05de381c02eb4bfca94957733acadfa217527f25
contrib/devtools/optimize-pngs.py
python
file_hash
(filename)
Return hash of raw file contents
Return hash of raw file contents
[ "Return", "hash", "of", "raw", "file", "contents" ]
def file_hash(filename): '''Return hash of raw file contents''' with open(filename, 'rb') as f: return hashlib.sha256(f.read()).hexdigest()
[ "def", "file_hash", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "return", "hashlib", ".", "sha256", "(", "f", ".", "read", "(", ")", ")", ".", "hexdigest", "(", ")" ]
https://github.com/BitcoinUnlimited/BitcoinUnlimited/blob/05de381c02eb4bfca94957733acadfa217527f25/contrib/devtools/optimize-pngs.py#L12-L15
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/fastparquet/core.py
python
read_col
(column, schema_helper, infile, use_cat=False, grab_dict=False, selfmade=False, assign=None, catdef=None)
Using the given metadata, read one column in one row-group. Parameters ---------- column: thrift structure Details on the column schema_helper: schema.SchemaHelper Based on the schema for this parquet data infile: open file or string If a string, will open; if an open object, will use as-is use_cat: bool (False) If this column is encoded throughout with dict encoding, give back a pandas categorical column; otherwise, decode to values grab_dict: bool (False) Short-cut mode to return the dictionary values only - skips the actual data.
Using the given metadata, read one column in one row-group.
[ "Using", "the", "given", "metadata", "read", "one", "column", "in", "one", "row", "-", "group", "." ]
def read_col(column, schema_helper, infile, use_cat=False, grab_dict=False, selfmade=False, assign=None, catdef=None): """Using the given metadata, read one column in one row-group. Parameters ---------- column: thrift structure Details on the column schema_helper: schema.SchemaHelper Based on the schema for this parquet data infile: open file or string If a string, will open; if an open object, will use as-is use_cat: bool (False) If this column is encoded throughout with dict encoding, give back a pandas categorical column; otherwise, decode to values grab_dict: bool (False) Short-cut mode to return the dictionary values only - skips the actual data. """ cmd = column.meta_data se = schema_helper.schema_element(cmd.path_in_schema) off = min((cmd.dictionary_page_offset or cmd.data_page_offset, cmd.data_page_offset)) infile.seek(off) ph = read_thrift(infile, parquet_thrift.PageHeader) dic = None if ph.type == parquet_thrift.PageType.DICTIONARY_PAGE: dic = np.array(read_dictionary_page(infile, schema_helper, ph, cmd)) ph = read_thrift(infile, parquet_thrift.PageHeader) dic = convert(dic, se) if grab_dict: return dic if use_cat and dic is not None: # fastpath skips the check the number of categories hasn't changed. # In this case, they may change, if the default RangeIndex was used. catdef._set_categories(pd.Index(dic), fastpath=True) if np.iinfo(assign.dtype).max < len(dic): raise RuntimeError('Assigned array dtype (%s) cannot accommodate ' 'number of category labels (%i)' % (assign.dtype, len(dic))) rows = cmd.num_values do_convert = True if use_cat: my_nan = -1 do_convert = False else: if assign.dtype.kind in ['f', 'i']: my_nan = np.nan elif assign.dtype.kind in ["M", 'm']: my_nan = -9223372036854775808 # int64 version of NaT else: my_nan = None num = 0 row_idx = 0 while True: if ph.type == parquet_thrift.PageType.DICTIONARY_PAGE: dic2 = np.array(read_dictionary_page(infile, schema_helper, ph, cmd)) dic2 = convert(dic2, se) if use_cat and (dic2 != dic).any(): raise RuntimeError("Attempt to read as categorical a column" "with multiple dictionary pages.") dic = dic2 ph = read_thrift(infile, parquet_thrift.PageHeader) continue if (selfmade and hasattr(cmd, 'statistics') and getattr(cmd.statistics, 'null_count', 1) == 0): skip_nulls = True else: skip_nulls = False defi, rep, val = read_data_page(infile, schema_helper, ph, cmd, skip_nulls, selfmade=selfmade) if rep is not None and assign.dtype.kind != 'O': # pragma: no cover # this should never get called raise ValueError('Column contains repeated value, must use object ' 'type, but has assumed type: %s' % assign.dtype) d = ph.data_page_header.encoding == parquet_thrift.Encoding.PLAIN_DICTIONARY if use_cat and not d: if not hasattr(catdef, '_set_categories'): raise ValueError('Returning category type requires all chunks' ' to use dictionary encoding; column: %s', cmd.path_in_schema) max_defi = schema_helper.max_definition_level(cmd.path_in_schema) if rep is not None: null = not schema_helper.is_required(cmd.path_in_schema[0]) null_val = (se.repetition_type != parquet_thrift.FieldRepetitionType.REQUIRED) row_idx = 1 + encoding._assemble_objects(assign, defi, rep, val, dic, d, null, null_val, max_defi, row_idx) elif defi is not None: max_defi = schema_helper.max_definition_level(cmd.path_in_schema) part = assign[num:num+len(defi)] part[defi != max_defi] = my_nan if d and not use_cat: part[defi == max_defi] = dic[val] elif do_convert: part[defi == max_defi] = convert(val, se) else: part[defi == max_defi] = val else: piece = assign[num:num+len(val)] if use_cat and not d: # only possible for multi-index warnings.warn("Non-categorical multi-index is likely brittle") val = convert(val, se) try: i = pd.Categorical(val) except: i = pd.Categorical(val.tolist()) catdef._set_categories(pd.Index(i.categories), fastpath=True) piece[:] = i.codes elif d and not use_cat: piece[:] = dic[val] elif do_convert: piece[:] = convert(val, se) else: piece[:] = val num += len(defi) if defi is not None else len(val) if num >= rows: break ph = read_thrift(infile, parquet_thrift.PageHeader)
[ "def", "read_col", "(", "column", ",", "schema_helper", ",", "infile", ",", "use_cat", "=", "False", ",", "grab_dict", "=", "False", ",", "selfmade", "=", "False", ",", "assign", "=", "None", ",", "catdef", "=", "None", ")", ":", "cmd", "=", "column", ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/fastparquet/core.py#L170-L296
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Validation/RecoTrack/python/plotting/ntupleDataFormat.py
python
TrackMatchInfo.__init__
(self, tree, index, trkindex, prefix)
Constructor. Arguments: tree -- TTree object index -- Index of the object (TrackingParticle) matched to track trkindex -- Index of the track match (second index in _trkIdx branch) prefix -- String for prefix of the object (TrackingParticle) matched to track
Constructor.
[ "Constructor", "." ]
def __init__(self, tree, index, trkindex, prefix): """Constructor. Arguments: tree -- TTree object index -- Index of the object (TrackingParticle) matched to track trkindex -- Index of the track match (second index in _trkIdx branch) prefix -- String for prefix of the object (TrackingParticle) matched to track """ super(TrackMatchInfo, self).__init__(tree, index, prefix) self._trkindex = trkindex
[ "def", "__init__", "(", "self", ",", "tree", ",", "index", ",", "trkindex", ",", "prefix", ")", ":", "super", "(", "TrackMatchInfo", ",", "self", ")", ".", "__init__", "(", "tree", ",", "index", ",", "prefix", ")", "self", ".", "_trkindex", "=", "trk...
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Validation/RecoTrack/python/plotting/ntupleDataFormat.py#L617-L627
mozilla/DeepSpeech
aa1d28530d531d0d92289bf5f11a49fe516fdc86
training/deepspeech_training/util/audio.py
python
Sample.change_audio_type
(self, new_audio_type, bitrate=None)
In-place conversion of audio data into a different representation. Parameters ---------- new_audio_type : str New audio-type - see `__init__`. bitrate : int Bitrate to use in case of converting to a lossy audio-type.
In-place conversion of audio data into a different representation.
[ "In", "-", "place", "conversion", "of", "audio", "data", "into", "a", "different", "representation", "." ]
def change_audio_type(self, new_audio_type, bitrate=None): """ In-place conversion of audio data into a different representation. Parameters ---------- new_audio_type : str New audio-type - see `__init__`. bitrate : int Bitrate to use in case of converting to a lossy audio-type. """ if self.audio_type == new_audio_type: return if new_audio_type == AUDIO_TYPE_PCM and self.audio_type in SERIALIZABLE_AUDIO_TYPES: self.audio_format, audio = read_audio(self.audio_type, self.audio) self.audio.close() self.audio = audio elif new_audio_type == AUDIO_TYPE_PCM and self.audio_type == AUDIO_TYPE_NP: self.audio = np_to_pcm(self.audio, self.audio_format) elif new_audio_type == AUDIO_TYPE_NP: self.change_audio_type(AUDIO_TYPE_PCM) self.audio = pcm_to_np(self.audio, self.audio_format) elif new_audio_type in SERIALIZABLE_AUDIO_TYPES: self.change_audio_type(AUDIO_TYPE_PCM) audio_bytes = io.BytesIO() write_audio(new_audio_type, audio_bytes, self.audio, audio_format=self.audio_format, bitrate=bitrate) audio_bytes.seek(0) self.audio = audio_bytes else: raise RuntimeError('Changing audio representation type from "{}" to "{}" not supported' .format(self.audio_type, new_audio_type)) self.audio_type = new_audio_type
[ "def", "change_audio_type", "(", "self", ",", "new_audio_type", ",", "bitrate", "=", "None", ")", ":", "if", "self", ".", "audio_type", "==", "new_audio_type", ":", "return", "if", "new_audio_type", "==", "AUDIO_TYPE_PCM", "and", "self", ".", "audio_type", "in...
https://github.com/mozilla/DeepSpeech/blob/aa1d28530d531d0d92289bf5f11a49fe516fdc86/training/deepspeech_training/util/audio.py#L92-L123
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py3/numpy/linalg/linalg.py
python
tensorinv
(a, ind=2)
return ia.reshape(*invshape)
Compute the 'inverse' of an N-dimensional array. The result is an inverse for `a` relative to the tensordot operation ``tensordot(a, b, ind)``, i. e., up to floating-point accuracy, ``tensordot(tensorinv(a), a, ind)`` is the "identity" tensor for the tensordot operation. Parameters ---------- a : array_like Tensor to 'invert'. Its shape must be 'square', i. e., ``prod(a.shape[:ind]) == prod(a.shape[ind:])``. ind : int, optional Number of first indices that are involved in the inverse sum. Must be a positive integer, default is 2. Returns ------- b : ndarray `a`'s tensordot inverse, shape ``a.shape[ind:] + a.shape[:ind]``. Raises ------ LinAlgError If `a` is singular or not 'square' (in the above sense). See Also -------- numpy.tensordot, tensorsolve Examples -------- >>> a = np.eye(4*6) >>> a.shape = (4, 6, 8, 3) >>> ainv = np.linalg.tensorinv(a, ind=2) >>> ainv.shape (8, 3, 4, 6) >>> b = np.random.randn(4, 6) >>> np.allclose(np.tensordot(ainv, b), np.linalg.tensorsolve(a, b)) True >>> a = np.eye(4*6) >>> a.shape = (24, 8, 3) >>> ainv = np.linalg.tensorinv(a, ind=1) >>> ainv.shape (8, 3, 24) >>> b = np.random.randn(24) >>> np.allclose(np.tensordot(ainv, b, 1), np.linalg.tensorsolve(a, b)) True
Compute the 'inverse' of an N-dimensional array.
[ "Compute", "the", "inverse", "of", "an", "N", "-", "dimensional", "array", "." ]
def tensorinv(a, ind=2): """ Compute the 'inverse' of an N-dimensional array. The result is an inverse for `a` relative to the tensordot operation ``tensordot(a, b, ind)``, i. e., up to floating-point accuracy, ``tensordot(tensorinv(a), a, ind)`` is the "identity" tensor for the tensordot operation. Parameters ---------- a : array_like Tensor to 'invert'. Its shape must be 'square', i. e., ``prod(a.shape[:ind]) == prod(a.shape[ind:])``. ind : int, optional Number of first indices that are involved in the inverse sum. Must be a positive integer, default is 2. Returns ------- b : ndarray `a`'s tensordot inverse, shape ``a.shape[ind:] + a.shape[:ind]``. Raises ------ LinAlgError If `a` is singular or not 'square' (in the above sense). See Also -------- numpy.tensordot, tensorsolve Examples -------- >>> a = np.eye(4*6) >>> a.shape = (4, 6, 8, 3) >>> ainv = np.linalg.tensorinv(a, ind=2) >>> ainv.shape (8, 3, 4, 6) >>> b = np.random.randn(4, 6) >>> np.allclose(np.tensordot(ainv, b), np.linalg.tensorsolve(a, b)) True >>> a = np.eye(4*6) >>> a.shape = (24, 8, 3) >>> ainv = np.linalg.tensorinv(a, ind=1) >>> ainv.shape (8, 3, 24) >>> b = np.random.randn(24) >>> np.allclose(np.tensordot(ainv, b, 1), np.linalg.tensorsolve(a, b)) True """ a = asarray(a) oldshape = a.shape prod = 1 if ind > 0: invshape = oldshape[ind:] + oldshape[:ind] for k in oldshape[ind:]: prod *= k else: raise ValueError("Invalid ind argument.") a = a.reshape(prod, -1) ia = inv(a) return ia.reshape(*invshape)
[ "def", "tensorinv", "(", "a", ",", "ind", "=", "2", ")", ":", "a", "=", "asarray", "(", "a", ")", "oldshape", "=", "a", ".", "shape", "prod", "=", "1", "if", "ind", ">", "0", ":", "invshape", "=", "oldshape", "[", "ind", ":", "]", "+", "oldsh...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/linalg/linalg.py#L403-L467
nasa/meshNetwork
ff4bd66e0ca6bd424fd8897a97252bb3925d8b3c
python/mesh/generic/tdmaComm.py
python
TDMAComm.sendBlockTxPacket
(self)
return newPacket
Send next block transmit packet.
Send next block transmit packet.
[ "Send", "next", "block", "transmit", "packet", "." ]
def sendBlockTxPacket(self): """Send next block transmit packet.""" # Check for missed packets packetsToRemove = [] repeatPacket = None #for entry in range(len(self.blockTxPacketStatus)): for entry in self.blockTxPacketStatus.keys(): status = self.blockTxPacketStatus[entry] status.framesSinceTx += 1 # Check for responses from all directly connected nodes allResponsesRcvd = True for node in self.neighbors: if (node not in status.responsesRcvd): # response not received from this node allResponsesRcvd = False break # Check packet status #print("Responses received, framesSinceTx:", allResponsesRcvd, status.framesSinceTx) if (allResponsesRcvd == True): # packet successfully sent print("Node", self.nodeParams.config.nodeId, "- All responses received for block tx packet", status.packetNum) packetsToRemove.append(entry) elif (allResponsesRcvd == False and status.framesSinceTx >= self.nodeParams.config.commConfig['blockTxReceiptTimeout']): # resend packet status.framesSinceTx = 0 # reset frame counter status.retries += 1 repeatPacket = status.packet if (status.retries >= self.nodeParams.config.commConfig['blockTxPacketRetry']): # Retry limit met, remove packet from status list packetsToRemove.append(entry) break # Remove entries from packet status list #self.blockTxPacketStatus = [self.blockTxPacketStatus[entry] for entry in range(len(self.blockTxStatus)) if entry not in packetsToRemove] for entry in packetsToRemove: del self.blockTxPacketStatus[entry] # Check for packet to resend if (repeatPacket): # packet to resend print("Node " + str(self.nodeParams.config.nodeId) + " - Resending block transmit packet") return repeatPacket ## Send next increment of block data newPacket = self.getBlockTxPacket() if (newPacket != None): # data to send # Add new packet to status list self.blockTxPacketStatus[self.blockTx.packetNum] = BlockTxPacketStatus(newPacket, self.blockTx.packetNum) elif (len(self.blockTxPacketStatus) == 0): # Check for block transmit completion (all packets sent successfully) self.blockTx.complete = True return newPacket
[ "def", "sendBlockTxPacket", "(", "self", ")", ":", "# Check for missed packets", "packetsToRemove", "=", "[", "]", "repeatPacket", "=", "None", "#for entry in range(len(self.blockTxPacketStatus)):", "for", "entry", "in", "self", ".", "blockTxPacketStatus", ".", "keys", ...
https://github.com/nasa/meshNetwork/blob/ff4bd66e0ca6bd424fd8897a97252bb3925d8b3c/python/mesh/generic/tdmaComm.py#L790-L840
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
TIntIntVV.__init__
(self, *args)
__init__(TVec<(TVec<(TInt,int)>,int)> self) -> TIntIntVV __init__(TVec<(TVec<(TInt,int)>,int)> self, TIntIntVV Vec) -> TIntIntVV Parameters: Vec: TVec< TVec< TInt,int >,int > const & __init__(TVec<(TVec<(TInt,int)>,int)> self, int const & _Vals) -> TIntIntVV Parameters: _Vals: int const & __init__(TVec<(TVec<(TInt,int)>,int)> self, int const & _MxVals, int const & _Vals) -> TIntIntVV Parameters: _MxVals: int const & _Vals: int const & __init__(TVec<(TVec<(TInt,int)>,int)> self, TIntV _ValT, int const & _Vals) -> TIntIntVV Parameters: _ValT: TVec< TInt,int > * _Vals: int const & __init__(TVec<(TVec<(TInt,int)>,int)> self, TSIn SIn) -> TIntIntVV Parameters: SIn: TSIn &
__init__(TVec<(TVec<(TInt,int)>,int)> self) -> TIntIntVV __init__(TVec<(TVec<(TInt,int)>,int)> self, TIntIntVV Vec) -> TIntIntVV
[ "__init__", "(", "TVec<", "(", "TVec<", "(", "TInt", "int", ")", ">", "int", ")", ">", "self", ")", "-", ">", "TIntIntVV", "__init__", "(", "TVec<", "(", "TVec<", "(", "TInt", "int", ")", ">", "int", ")", ">", "self", "TIntIntVV", "Vec", ")", "-"...
def __init__(self, *args): """ __init__(TVec<(TVec<(TInt,int)>,int)> self) -> TIntIntVV __init__(TVec<(TVec<(TInt,int)>,int)> self, TIntIntVV Vec) -> TIntIntVV Parameters: Vec: TVec< TVec< TInt,int >,int > const & __init__(TVec<(TVec<(TInt,int)>,int)> self, int const & _Vals) -> TIntIntVV Parameters: _Vals: int const & __init__(TVec<(TVec<(TInt,int)>,int)> self, int const & _MxVals, int const & _Vals) -> TIntIntVV Parameters: _MxVals: int const & _Vals: int const & __init__(TVec<(TVec<(TInt,int)>,int)> self, TIntV _ValT, int const & _Vals) -> TIntIntVV Parameters: _ValT: TVec< TInt,int > * _Vals: int const & __init__(TVec<(TVec<(TInt,int)>,int)> self, TSIn SIn) -> TIntIntVV Parameters: SIn: TSIn & """ _snap.TIntIntVV_swiginit(self,_snap.new_TIntIntVV(*args))
[ "def", "__init__", "(", "self", ",", "*", "args", ")", ":", "_snap", ".", "TIntIntVV_swiginit", "(", "self", ",", "_snap", ".", "new_TIntIntVV", "(", "*", "args", ")", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L16504-L16535
stepcode/stepcode
2a50010e6f6b8bd4843561e48fdb0fd4e8b87f39
src/exp2python/python/SCL/Part21.py
python
Lexer.t_slurp_PART21_START
(self, t)
return t
r'ISO-10303-21;
r'ISO-10303-21;
[ "r", "ISO", "-", "10303", "-", "21", ";" ]
def t_slurp_PART21_START(self, t): r'ISO-10303-21;' t.lexer.begin('INITIAL') return t
[ "def", "t_slurp_PART21_START", "(", "self", ",", "t", ")", ":", "t", ".", "lexer", ".", "begin", "(", "'INITIAL'", ")", "return", "t" ]
https://github.com/stepcode/stepcode/blob/2a50010e6f6b8bd4843561e48fdb0fd4e8b87f39/src/exp2python/python/SCL/Part21.py#L124-L127
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/sparse/base.py
python
spmatrix.nonzero
(self)
return (A.row[nz_mask], A.col[nz_mask])
nonzero indices Returns a tuple of arrays (row,col) containing the indices of the non-zero elements of the matrix. Examples -------- >>> from scipy.sparse import csr_matrix >>> A = csr_matrix([[1,2,0],[0,0,3],[4,0,5]]) >>> A.nonzero() (array([0, 0, 1, 2, 2]), array([0, 1, 2, 0, 2]))
nonzero indices
[ "nonzero", "indices" ]
def nonzero(self): """nonzero indices Returns a tuple of arrays (row,col) containing the indices of the non-zero elements of the matrix. Examples -------- >>> from scipy.sparse import csr_matrix >>> A = csr_matrix([[1,2,0],[0,0,3],[4,0,5]]) >>> A.nonzero() (array([0, 0, 1, 2, 2]), array([0, 1, 2, 0, 2])) """ # convert to COOrdinate format A = self.tocoo() nz_mask = A.data != 0 return (A.row[nz_mask], A.col[nz_mask])
[ "def", "nonzero", "(", "self", ")", ":", "# convert to COOrdinate format", "A", "=", "self", ".", "tocoo", "(", ")", "nz_mask", "=", "A", ".", "data", "!=", "0", "return", "(", "A", ".", "row", "[", "nz_mask", "]", ",", "A", ".", "col", "[", "nz_ma...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/sparse/base.py#L760-L778
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_controls.py
python
PickerBase.IsPickerCtrlGrowable
(*args, **kwargs)
return _controls_.PickerBase_IsPickerCtrlGrowable(*args, **kwargs)
IsPickerCtrlGrowable(self) -> bool
IsPickerCtrlGrowable(self) -> bool
[ "IsPickerCtrlGrowable", "(", "self", ")", "-", ">", "bool" ]
def IsPickerCtrlGrowable(*args, **kwargs): """IsPickerCtrlGrowable(self) -> bool""" return _controls_.PickerBase_IsPickerCtrlGrowable(*args, **kwargs)
[ "def", "IsPickerCtrlGrowable", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "PickerBase_IsPickerCtrlGrowable", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L6798-L6800
lmb-freiburg/ogn
974f72ef4bf840d6f6693d22d1843a79223e77ce
scripts/cpp_lint.py
python
ResetNolintSuppressions
()
Resets the set of NOLINT suppressions to empty.
Resets the set of NOLINT suppressions to empty.
[ "Resets", "the", "set", "of", "NOLINT", "suppressions", "to", "empty", "." ]
def ResetNolintSuppressions(): "Resets the set of NOLINT suppressions to empty." _error_suppressions.clear()
[ "def", "ResetNolintSuppressions", "(", ")", ":", "_error_suppressions", ".", "clear", "(", ")" ]
https://github.com/lmb-freiburg/ogn/blob/974f72ef4bf840d6f6693d22d1843a79223e77ce/scripts/cpp_lint.py#L495-L497
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/core/indexes/base.py
python
Index.is_
(self, other)
More flexible, faster check like ``is`` but that works through views. Note: this is *not* the same as ``Index.identical()``, which checks that metadata is also the same. Parameters ---------- other : object Other object to compare against. Returns ------- bool True if both have same underlying data, False otherwise. See Also -------- Index.identical : Works like ``Index.is_`` but also checks metadata.
More flexible, faster check like ``is`` but that works through views.
[ "More", "flexible", "faster", "check", "like", "is", "but", "that", "works", "through", "views", "." ]
def is_(self, other) -> bool: """ More flexible, faster check like ``is`` but that works through views. Note: this is *not* the same as ``Index.identical()``, which checks that metadata is also the same. Parameters ---------- other : object Other object to compare against. Returns ------- bool True if both have same underlying data, False otherwise. See Also -------- Index.identical : Works like ``Index.is_`` but also checks metadata. """ if self is other: return True elif not hasattr(other, "_id"): return False elif self._id is None or other._id is None: return False else: return self._id is other._id
[ "def", "is_", "(", "self", ",", "other", ")", "->", "bool", ":", "if", "self", "is", "other", ":", "return", "True", "elif", "not", "hasattr", "(", "other", ",", "\"_id\"", ")", ":", "return", "False", "elif", "self", ".", "_id", "is", "None", "or"...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/indexes/base.py#L726-L754
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/swagger_spec_validator/validator20.py
python
validate_duplicate_param
(params, deref)
Validate no duplicate parameters are present. Uniqueness is determined by the tuple ('name', 'in'). :param params: list of all the params :param deref: callable that dereferences $refs :raises: :py:class:`swagger_spec_validator.SwaggerValidationError` when a duplicate parameter is found.
Validate no duplicate parameters are present.
[ "Validate", "no", "duplicate", "parameters", "are", "present", "." ]
def validate_duplicate_param(params, deref): """Validate no duplicate parameters are present. Uniqueness is determined by the tuple ('name', 'in'). :param params: list of all the params :param deref: callable that dereferences $refs :raises: :py:class:`swagger_spec_validator.SwaggerValidationError` when a duplicate parameter is found. """ seen = set() msg = "Duplicate param found with (name, in)" for param in params: param = deref(param) param_key = (param['name'], param['in']) if param_key in seen: raise SwaggerValidationError("{}: {}".format(msg, param_key)) seen.add(param_key)
[ "def", "validate_duplicate_param", "(", "params", ",", "deref", ")", ":", "seen", "=", "set", "(", ")", "msg", "=", "\"Duplicate param found with (name, in)\"", "for", "param", "in", "params", ":", "param", "=", "deref", "(", "param", ")", "param_key", "=", ...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/swagger_spec_validator/validator20.py#L558-L576
timi-liuliang/echo
40a5a24d430eee4118314459ab7e03afcb3b8719
thirdparty/protobuf/python/google/protobuf/internal/decoder.py
python
_DoubleDecoder
()
return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
Returns a decoder for a double field. This code works around a bug in struct.unpack for not-a-number.
Returns a decoder for a double field.
[ "Returns", "a", "decoder", "for", "a", "double", "field", "." ]
def _DoubleDecoder(): """Returns a decoder for a double field. This code works around a bug in struct.unpack for not-a-number. """ local_unpack = struct.unpack b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1') ##PY25 def InnerDecode(buffer, pos): # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand. new_pos = pos + 8 double_bytes = buffer[pos:new_pos] # If this value has all its exponent bits set and at least one significand # bit set, it's not a number. In Python 2.4, struct.unpack will treat it # as inf or -inf. To avoid that, we treat it specially. ##!PY25 if ((double_bytes[7:8] in b'\x7F\xFF') ##!PY25 and (double_bytes[6:7] >= b'\xF0') ##!PY25 and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): if ((double_bytes[7:8] in b('\x7F\xFF')) ##PY25 and (double_bytes[6:7] >= b('\xF0')) ##PY25 and (double_bytes[0:7] != b('\x00\x00\x00\x00\x00\x00\xF0'))): ##PY25 return (_NAN, new_pos) # Note that we expect someone up-stack to catch struct.error and convert # it to _DecodeError -- this way we don't have to set up exception- # handling blocks every time we parse one value. result = local_unpack('<d', double_bytes)[0] return (result, new_pos) return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
[ "def", "_DoubleDecoder", "(", ")", ":", "local_unpack", "=", "struct", ".", "unpack", "b", "=", "(", "lambda", "x", ":", "x", ")", "if", "_PY2", "else", "lambda", "x", ":", "x", ".", "encode", "(", "'latin1'", ")", "##PY25", "def", "InnerDecode", "("...
https://github.com/timi-liuliang/echo/blob/40a5a24d430eee4118314459ab7e03afcb3b8719/thirdparty/protobuf/python/google/protobuf/internal/decoder.py#L337-L368
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/oldnumeric/ma.py
python
ravel
(a)
a as one-dimensional, may share data and mask
a as one-dimensional, may share data and mask
[ "a", "as", "one", "-", "dimensional", "may", "share", "data", "and", "mask" ]
def ravel (a): "a as one-dimensional, may share data and mask" m = getmask(a) d = fromnumeric.ravel(filled(a)) if m is nomask: return masked_array(d) else: return masked_array(d, mask=numeric.ravel(m))
[ "def", "ravel", "(", "a", ")", ":", "m", "=", "getmask", "(", "a", ")", "d", "=", "fromnumeric", ".", "ravel", "(", "filled", "(", "a", ")", ")", "if", "m", "is", "nomask", ":", "return", "masked_array", "(", "d", ")", "else", ":", "return", "m...
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/oldnumeric/ma.py#L1846-L1853
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/platform/benchmark.py
python
Benchmark._get_name
(self, overwrite_name=None)
return name
Returns full name of class and method calling report_benchmark.
Returns full name of class and method calling report_benchmark.
[ "Returns", "full", "name", "of", "class", "and", "method", "calling", "report_benchmark", "." ]
def _get_name(self, overwrite_name=None): """Returns full name of class and method calling report_benchmark.""" # Find the caller method (outermost Benchmark class) stack = tf_inspect.stack() calling_class = None name = None for frame in stack[::-1]: f_locals = frame[0].f_locals f_self = f_locals.get("self", None) if isinstance(f_self, Benchmark): calling_class = f_self # Get the outermost stack Benchmark call name = frame[3] # Get the method name break if calling_class is None: raise ValueError("Unable to determine calling Benchmark class.") # Use the method name, or overwrite_name is provided. name = overwrite_name or name # Prefix the name with the class name. class_name = type(calling_class).__name__ name = "%s.%s" % (class_name, name) return name
[ "def", "_get_name", "(", "self", ",", "overwrite_name", "=", "None", ")", ":", "# Find the caller method (outermost Benchmark class)", "stack", "=", "tf_inspect", ".", "stack", "(", ")", "calling_class", "=", "None", "name", "=", "None", "for", "frame", "in", "s...
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/platform/benchmark.py#L235-L257
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib2to3/fixer_util.py
python
_is_import_binding
(node, name, package=None)
return None
Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples.
Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples.
[ "Will", "reuturn", "node", "if", "node", "will", "import", "name", "or", "node", "will", "import", "*", "from", "package", ".", "None", "is", "returned", "otherwise", ".", "See", "test", "cases", "for", "examples", "." ]
def _is_import_binding(node, name, package=None): """ Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples. """ if node.type == syms.import_name and not package: imp = node.children[1] if imp.type == syms.dotted_as_names: for child in imp.children: if child.type == syms.dotted_as_name: if child.children[2].value == name: return node elif child.type == token.NAME and child.value == name: return node elif imp.type == syms.dotted_as_name: last = imp.children[-1] if last.type == token.NAME and last.value == name: return node elif imp.type == token.NAME and imp.value == name: return node elif node.type == syms.import_from: # unicode(...) is used to make life easier here, because # from a.b import parses to ['import', ['a', '.', 'b'], ...] if package and unicode(node.children[1]).strip() != package: return None n = node.children[3] if package and _find(u"as", n): # See test_from_import_as for explanation return None elif n.type == syms.import_as_names and _find(name, n): return node elif n.type == syms.import_as_name: child = n.children[2] if child.type == token.NAME and child.value == name: return node elif n.type == token.NAME and n.value == name: return node elif package and n.type == token.STAR: return node return None
[ "def", "_is_import_binding", "(", "node", ",", "name", ",", "package", "=", "None", ")", ":", "if", "node", ".", "type", "==", "syms", ".", "import_name", "and", "not", "package", ":", "imp", "=", "node", ".", "children", "[", "1", "]", "if", "imp", ...
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib2to3/fixer_util.py#L393-L432
gromacs/gromacs
7dec3a3f99993cf5687a122de3e12de31c21c399
docs/doxygen/doxygenxml.py
python
DocumentationSet.__init__
(self, xmlroot, reporter)
Initialize the documentation set and read index data.
Initialize the documentation set and read index data.
[ "Initialize", "the", "documentation", "set", "and", "read", "index", "data", "." ]
def __init__(self, xmlroot, reporter): """Initialize the documentation set and read index data.""" self._xmlroot = xmlroot self._reporter = reporter xmlpath = os.path.join(xmlroot, 'index.xml') indextree = ET.parse(xmlpath) self._compounds = dict() self._members = dict() self._files = dict() for compoundelem in indextree.getroot(): name = compoundelem.find('name').text refid = compoundelem.attrib['refid'] kind = compoundelem.attrib['kind'] if kind in ('page', 'example'): # TODO: Model these types as well continue compoundtype = _get_compound_type_from_kind(kind) if compoundtype is None: reporter.xml_assert(xmlpath, "unknown compound kind '{0}'".format(kind)) continue compound = compoundtype(name, refid) compound.set_documentation_set(self) self._compounds[refid] = compound for memberelem in compoundelem.iter('member'): name = memberelem.find('name').text refid = memberelem.attrib['refid'] kind = memberelem.attrib['kind'] if refid in self._members: member = self._members[refid] membertype = _get_member_type_from_kind(kind) if not isinstance(member, membertype): reporter.xml_assert(xmlpath, "id '{0}' used for multiple kinds of members" .format(refid)) continue else: membertype = _get_member_type_from_kind(kind) if membertype is None: reporter.xml_assert(xmlpath, "unknown member kind '{0}'".format(kind)) continue member = membertype(name, refid) member.set_documentation_set(self) self._members[refid] = member member.add_parent_compound(compound) compound.add_member(member)
[ "def", "__init__", "(", "self", ",", "xmlroot", ",", "reporter", ")", ":", "self", ".", "_xmlroot", "=", "xmlroot", "self", ".", "_reporter", "=", "reporter", "xmlpath", "=", "os", ".", "path", ".", "join", "(", "xmlroot", ",", "'index.xml'", ")", "ind...
https://github.com/gromacs/gromacs/blob/7dec3a3f99993cf5687a122de3e12de31c21c399/docs/doxygen/doxygenxml.py#L1088-L1134
OpenNI/OpenNI
1e9524ffd759841789dadb4ca19fb5d4ac5820e7
Externals/PSCommon/Windows/CreateRedist/redist_base.py
python
RedistBase.wix_redist_var_set
(self)
preconsdition: CWD is where wix-variables-file is stored
preconsdition: CWD is where wix-variables-file is stored
[ "preconsdition", ":", "CWD", "is", "where", "wix", "-", "variables", "-", "file", "is", "stored" ]
def wix_redist_var_set(self): """preconsdition: CWD is where wix-variables-file is stored""" temp = self.redist_internal_name.replace('_','') os.system("attrib -r Includes\\%sVariables.wxi"%temp) #print("setting WIX BuildPlatform") #regx_replace("BuildPlatform=(.*)", "BuildPlatform=" + str(vc_build_bits) + "?>", "Includes\\OpenNIVariables.wxi") print("setting WIX BinaryOnlyRedist=True") regx_replace("BinaryOnlyRedist=(.*)", "BinaryOnlyRedist=True?>", "Includes\\%sVariables.wxi"%temp)
[ "def", "wix_redist_var_set", "(", "self", ")", ":", "temp", "=", "self", ".", "redist_internal_name", ".", "replace", "(", "'_'", ",", "''", ")", "os", ".", "system", "(", "\"attrib -r Includes\\\\%sVariables.wxi\"", "%", "temp", ")", "#print(\"setting WIX BuildPl...
https://github.com/OpenNI/OpenNI/blob/1e9524ffd759841789dadb4ca19fb5d4ac5820e7/Externals/PSCommon/Windows/CreateRedist/redist_base.py#L742-L749
microsoft/DirectXShaderCompiler
8348ff8d9e0287610ba05d3a828e10af981a1c05
utils/hct/VerifierHelper.py
python
SourceLocation.ToStringAtLine
(self, line)
return '<' + sloc + '>'
convert to string relative to specified line
convert to string relative to specified line
[ "convert", "to", "string", "relative", "to", "specified", "line" ]
def ToStringAtLine(self, line): "convert to string relative to specified line" if self.Invalid: sloc = self.Invalid else: if self.FromLine and line != self.FromLine: sloc = 'line:%d:%d' % (self.FromLine, self.FromCol) line = self.FromLine else: sloc = 'col:%d' % self.FromCol if self.ToCol: if self.ToLine and line != self.ToLine: sloc += ', line:%d:%d' % (self.ToLine, self.ToCol) else: sloc += ', col:%d' % self.ToCol return '<' + sloc + '>'
[ "def", "ToStringAtLine", "(", "self", ",", "line", ")", ":", "if", "self", ".", "Invalid", ":", "sloc", "=", "self", ".", "Invalid", "else", ":", "if", "self", ".", "FromLine", "and", "line", "!=", "self", ".", "FromLine", ":", "sloc", "=", "'line:%d...
https://github.com/microsoft/DirectXShaderCompiler/blob/8348ff8d9e0287610ba05d3a828e10af981a1c05/utils/hct/VerifierHelper.py#L298-L313
RamadhanAmizudin/malware
2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1
GMBot/gmbot/apps/smsg_r/smsapp/remote_api.py
python
lock_status
(rec, data)
Handles the lock status callback @param rec: Phone data record @type rec: models.PhoneData @param data: Phone data @type data: dict @rtype: None
Handles the lock status callback
[ "Handles", "the", "lock", "status", "callback" ]
def lock_status(rec, data): """ Handles the lock status callback @param rec: Phone data record @type rec: models.PhoneData @param data: Phone data @type data: dict @rtype: None """ rec.locked = (data.get('status') == 'locked') rec.save() msg = { 'info': "Phone {0} is {1}".format(rec, data.get('status')), 'code': rec.uniq_id, 'locked': rec.locked } sys_messages.add_message(rec.uniq_id, msg)
[ "def", "lock_status", "(", "rec", ",", "data", ")", ":", "rec", ".", "locked", "=", "(", "data", ".", "get", "(", "'status'", ")", "==", "'locked'", ")", "rec", ".", "save", "(", ")", "msg", "=", "{", "'info'", ":", "\"Phone {0} is {1}\"", ".", "fo...
https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/GMBot/gmbot/apps/smsg_r/smsapp/remote_api.py#L389-L405
Kitware/ParaView
f760af9124ff4634b23ebbeab95a4f56e0261955
ThirdParty/cinema/paraview/tpl/cinema_python/database/store.py
python
make_parameter
(name, values, **kwargs)
return properties
define a new parameter that will be added to a store. Primarily takes a name and an array of potential values. May also be given a default value from inside the array. May also be given a typechoice to help the UI which is required to be one of 'list', 'range', 'option' or 'hidden'. May also bve given a user friendly label.
define a new parameter that will be added to a store. Primarily takes a name and an array of potential values. May also be given a default value from inside the array. May also be given a typechoice to help the UI which is required to be one of 'list', 'range', 'option' or 'hidden'. May also bve given a user friendly label.
[ "define", "a", "new", "parameter", "that", "will", "be", "added", "to", "a", "store", ".", "Primarily", "takes", "a", "name", "and", "an", "array", "of", "potential", "values", ".", "May", "also", "be", "given", "a", "default", "value", "from", "inside",...
def make_parameter(name, values, **kwargs): """ define a new parameter that will be added to a store. Primarily takes a name and an array of potential values. May also be given a default value from inside the array. May also be given a typechoice to help the UI which is required to be one of 'list', 'range', 'option' or 'hidden'. May also bve given a user friendly label. """ default = kwargs['default'] if 'default' in kwargs else values[0] if default not in values: raise RuntimeError("Invalid default, must be one of %s" % str(values)) typechoice = kwargs['typechoice'] if 'typechoice' in kwargs else 'range' valid_types = ['list', 'range', 'option', 'hidden'] if typechoice not in valid_types: raise RuntimeError( "Invalid typechoice, must be one of %s" % str(valid_types)) label = kwargs['label'] if 'label' in kwargs else name properties = dict() properties['type'] = typechoice properties['label'] = label properties['values'] = values properties['default'] = default return properties
[ "def", "make_parameter", "(", "name", ",", "values", ",", "*", "*", "kwargs", ")", ":", "default", "=", "kwargs", "[", "'default'", "]", "if", "'default'", "in", "kwargs", "else", "values", "[", "0", "]", "if", "default", "not", "in", "values", ":", ...
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/ThirdParty/cinema/paraview/tpl/cinema_python/database/store.py#L627-L652
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_windows.py
python
PrePopupTransientWindow
(*args, **kwargs)
return val
PrePopupTransientWindow() -> PopupTransientWindow
PrePopupTransientWindow() -> PopupTransientWindow
[ "PrePopupTransientWindow", "()", "-", ">", "PopupTransientWindow" ]
def PrePopupTransientWindow(*args, **kwargs): """PrePopupTransientWindow() -> PopupTransientWindow""" val = _windows_.new_PrePopupTransientWindow(*args, **kwargs) return val
[ "def", "PrePopupTransientWindow", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "val", "=", "_windows_", ".", "new_PrePopupTransientWindow", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "val" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L2166-L2169
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
native_client_sdk/src/build_tools/update_nacl_manifest.py
python
Updater.AddVersionToUpdate
(self, bundle_name, version, channel, archives)
Add a pepper version to update in the uploaded manifest. Args: bundle_name: The name of the pepper bundle, e.g. 'pepper_18' version: The version of the pepper bundle, e.g. '18.0.1025.64' channel: The stability of the pepper bundle, e.g. 'beta' archives: A sequence of archive URLs for this bundle.
Add a pepper version to update in the uploaded manifest.
[ "Add", "a", "pepper", "version", "to", "update", "in", "the", "uploaded", "manifest", "." ]
def AddVersionToUpdate(self, bundle_name, version, channel, archives): """Add a pepper version to update in the uploaded manifest. Args: bundle_name: The name of the pepper bundle, e.g. 'pepper_18' version: The version of the pepper bundle, e.g. '18.0.1025.64' channel: The stability of the pepper bundle, e.g. 'beta' archives: A sequence of archive URLs for this bundle.""" self.versions_to_update.append((bundle_name, version, channel, archives))
[ "def", "AddVersionToUpdate", "(", "self", ",", "bundle_name", ",", "version", ",", "channel", ",", "archives", ")", ":", "self", ".", "versions_to_update", ".", "append", "(", "(", "bundle_name", ",", "version", ",", "channel", ",", "archives", ")", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/native_client_sdk/src/build_tools/update_nacl_manifest.py#L608-L616
microsoft/LightGBM
904b2d5158703c4900b68008617951dd2f9ff21b
docs/conf.py
python
setup
(app: Sphinx)
Add new elements at Sphinx initialization time. Parameters ---------- app : sphinx.application.Sphinx The application object representing the Sphinx process.
Add new elements at Sphinx initialization time.
[ "Add", "new", "elements", "at", "Sphinx", "initialization", "time", "." ]
def setup(app: Sphinx) -> None: """Add new elements at Sphinx initialization time. Parameters ---------- app : sphinx.application.Sphinx The application object representing the Sphinx process. """ first_run = not (CURR_PATH / '_FIRST_RUN.flag').exists() if first_run and RTD: (CURR_PATH / '_FIRST_RUN.flag').touch() if C_API: app.connect("builder-inited", generate_doxygen_xml) else: app.add_directive('doxygenfile', IgnoredDirective) if RTD: # build R docs only on Read the Docs site if first_run: app.connect("builder-inited", generate_r_docs) app.connect("build-finished", lambda app, _: copytree(CURR_PATH.parent / "lightgbm_r" / "docs", Path(app.outdir) / "R")) app.add_transform(InternalRefTransform) add_js_file = getattr(app, 'add_js_file', False) or app.add_javascript add_js_file("js/script.js")
[ "def", "setup", "(", "app", ":", "Sphinx", ")", "->", "None", ":", "first_run", "=", "not", "(", "CURR_PATH", "/", "'_FIRST_RUN.flag'", ")", ".", "exists", "(", ")", "if", "first_run", "and", "RTD", ":", "(", "CURR_PATH", "/", "'_FIRST_RUN.flag'", ")", ...
https://github.com/microsoft/LightGBM/blob/904b2d5158703c4900b68008617951dd2f9ff21b/docs/conf.py#L318-L341
fengbingchun/NN_Test
d6305825d5273e4569ccd1eda9ffa2a9c72e18d2
src/libsvm/python/svmutil.py
python
svm_load_model
(model_file_name)
return model
svm_load_model(model_file_name) -> model Load a LIBSVM model from model_file_name and return.
svm_load_model(model_file_name) -> model
[ "svm_load_model", "(", "model_file_name", ")", "-", ">", "model" ]
def svm_load_model(model_file_name): """ svm_load_model(model_file_name) -> model Load a LIBSVM model from model_file_name and return. """ model = libsvm.svm_load_model(model_file_name.encode()) if not model: print("can't open model file %s" % model_file_name) return None model = toPyModel(model) return model
[ "def", "svm_load_model", "(", "model_file_name", ")", ":", "model", "=", "libsvm", ".", "svm_load_model", "(", "model_file_name", ".", "encode", "(", ")", ")", "if", "not", "model", ":", "print", "(", "\"can't open model file %s\"", "%", "model_file_name", ")", ...
https://github.com/fengbingchun/NN_Test/blob/d6305825d5273e4569ccd1eda9ffa2a9c72e18d2/src/libsvm/python/svmutil.py#L36-L47
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqt/mantidqt/widgets/sliceviewer/peaksviewer/representation/painter.py
python
Painted.__init__
(self, painter: MplPainter, artists, effective_bbox=None)
:param painter: A reference to the painter responsible for drawing the artists. :param artists: A list of drawn artists :param effective_bbox: An optional bounding box for artists that represent something with no real extent
:param painter: A reference to the painter responsible for drawing the artists. :param artists: A list of drawn artists :param effective_bbox: An optional bounding box for artists that represent something with no real extent
[ ":", "param", "painter", ":", "A", "reference", "to", "the", "painter", "responsible", "for", "drawing", "the", "artists", ".", ":", "param", "artists", ":", "A", "list", "of", "drawn", "artists", ":", "param", "effective_bbox", ":", "An", "optional", "bou...
def __init__(self, painter: MplPainter, artists, effective_bbox=None): """ :param painter: A reference to the painter responsible for drawing the artists. :param artists: A list of drawn artists :param effective_bbox: An optional bounding box for artists that represent something with no real extent """ self._painter = painter self._artists = artists self._effective_bbox = effective_bbox
[ "def", "__init__", "(", "self", ",", "painter", ":", "MplPainter", ",", "artists", ",", "effective_bbox", "=", "None", ")", ":", "self", ".", "_painter", "=", "painter", "self", ".", "_artists", "=", "artists", "self", ".", "_effective_bbox", "=", "effecti...
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqt/mantidqt/widgets/sliceviewer/peaksviewer/representation/painter.py#L186-L199
taichi-dev/taichi
973c04d6ba40f34e9e3bd5a28ae0ee0802f136a6
scripts/run_clang_tidy.py
python
get_tidy_invocation
(f, clang_tidy_binary, checks, tmpdir, build_path, header_filter, extra_arg, extra_arg_before, quiet, config)
return start
Gets a command line for clang-tidy.
Gets a command line for clang-tidy.
[ "Gets", "a", "command", "line", "for", "clang", "-", "tidy", "." ]
def get_tidy_invocation(f, clang_tidy_binary, checks, tmpdir, build_path, header_filter, extra_arg, extra_arg_before, quiet, config): """Gets a command line for clang-tidy.""" start = [clang_tidy_binary] start.append('-warnings-as-errors=*') if header_filter is not None: start.append('-header-filter=' + header_filter) if checks: start.append('-checks=' + checks) if tmpdir is not None: start.append('-export-fixes') # Get a temporary file. We immediately close the handle so clang-tidy can # overwrite it. (handle, name) = tempfile.mkstemp(suffix='.yaml', dir=tmpdir) os.close(handle) start.append(name) for arg in extra_arg: start.append('-extra-arg=%s' % arg) for arg in extra_arg_before: start.append('-extra-arg-before=%s' % arg) start.append('-p=' + build_path) if quiet: start.append('-quiet') if config: start.append('-config=' + config) start.append(f) return start
[ "def", "get_tidy_invocation", "(", "f", ",", "clang_tidy_binary", ",", "checks", ",", "tmpdir", ",", "build_path", ",", "header_filter", ",", "extra_arg", ",", "extra_arg_before", ",", "quiet", ",", "config", ")", ":", "start", "=", "[", "clang_tidy_binary", "...
https://github.com/taichi-dev/taichi/blob/973c04d6ba40f34e9e3bd5a28ae0ee0802f136a6/scripts/run_clang_tidy.py#L80-L107
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/asynchat.py
python
async_chat.writable
(self)
return self.producer_fifo or (not self.connected)
predicate for inclusion in the writable for select()
predicate for inclusion in the writable for select()
[ "predicate", "for", "inclusion", "in", "the", "writable", "for", "select", "()" ]
def writable (self): "predicate for inclusion in the writable for select()" return self.producer_fifo or (not self.connected)
[ "def", "writable", "(", "self", ")", ":", "return", "self", ".", "producer_fifo", "or", "(", "not", "self", ".", "connected", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/asynchat.py#L200-L202
domino-team/openwrt-cc
8b181297c34d14d3ca521cc9f31430d561dbc688
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/xcodeproj_file.py
python
PBXCopyFilesBuildPhase.SetDestination
(self, path)
Set the dstSubfolderSpec and dstPath properties from path. path may be specified in the same notation used for XCHierarchicalElements, specifically, "$(DIR)/path".
Set the dstSubfolderSpec and dstPath properties from path.
[ "Set", "the", "dstSubfolderSpec", "and", "dstPath", "properties", "from", "path", "." ]
def SetDestination(self, path): """Set the dstSubfolderSpec and dstPath properties from path. path may be specified in the same notation used for XCHierarchicalElements, specifically, "$(DIR)/path". """ path_tree_match = self.path_tree_re.search(path) if path_tree_match: # Everything else needs to be relative to an Xcode variable. path_tree = path_tree_match.group(1) relative_path = path_tree_match.group(3) if path_tree in self.path_tree_to_subfolder: subfolder = self.path_tree_to_subfolder[path_tree] if relative_path is None: relative_path = '' else: # The path starts with an unrecognized Xcode variable # name like $(SRCROOT). Xcode will still handle this # as an "absolute path" that starts with the variable. subfolder = 0 relative_path = path elif path.startswith('/'): # Special case. Absolute paths are in dstSubfolderSpec 0. subfolder = 0 relative_path = path[1:] else: raise ValueError('Can\'t use path %s in a %s' % \ (path, self.__class__.__name__)) self._properties['dstPath'] = relative_path self._properties['dstSubfolderSpec'] = subfolder
[ "def", "SetDestination", "(", "self", ",", "path", ")", ":", "path_tree_match", "=", "self", ".", "path_tree_re", ".", "search", "(", "path", ")", "if", "path_tree_match", ":", "# Everything else needs to be relative to an Xcode variable.", "path_tree", "=", "path_tre...
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/xcodeproj_file.py#L1976-L2008
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/linalg/matfuncs.py
python
funm
(A, func, disp=True)
Evaluate a matrix function specified by a callable. Returns the value of matrix-valued function ``f`` at `A`. The function ``f`` is an extension of the scalar-valued function `func` to matrices. Parameters ---------- A : (N, N) array_like Matrix at which to evaluate the function func : callable Callable object that evaluates a scalar function f. Must be vectorized (eg. using vectorize). disp : bool, optional Print warning if error in the result is estimated large instead of returning estimated error. (Default: True) Returns ------- funm : (N, N) ndarray Value of the matrix function specified by func evaluated at `A` errest : float (if disp == False) 1-norm of the estimated error, ||err||_1 / ||A||_1 Examples -------- >>> from scipy.linalg import funm >>> a = np.array([[1.0, 3.0], [1.0, 4.0]]) >>> funm(a, lambda x: x*x) array([[ 4., 15.], [ 5., 19.]]) >>> a.dot(a) array([[ 4., 15.], [ 5., 19.]]) Notes ----- This function implements the general algorithm based on Schur decomposition (Algorithm 9.1.1. in [1]_). If the input matrix is known to be diagonalizable, then relying on the eigendecomposition is likely to be faster. For example, if your matrix is Hermitian, you can do >>> from scipy.linalg import eigh >>> def funm_herm(a, func, check_finite=False): ... w, v = eigh(a, check_finite=check_finite) ... ## if you further know that your matrix is positive semidefinite, ... ## you can optionally guard against precision errors by doing ... # w = np.maximum(w, 0) ... w = func(w) ... return (v * w).dot(v.conj().T) References ---------- .. [1] Gene H. Golub, Charles F. van Loan, Matrix Computations 4th ed.
Evaluate a matrix function specified by a callable.
[ "Evaluate", "a", "matrix", "function", "specified", "by", "a", "callable", "." ]
def funm(A, func, disp=True): """ Evaluate a matrix function specified by a callable. Returns the value of matrix-valued function ``f`` at `A`. The function ``f`` is an extension of the scalar-valued function `func` to matrices. Parameters ---------- A : (N, N) array_like Matrix at which to evaluate the function func : callable Callable object that evaluates a scalar function f. Must be vectorized (eg. using vectorize). disp : bool, optional Print warning if error in the result is estimated large instead of returning estimated error. (Default: True) Returns ------- funm : (N, N) ndarray Value of the matrix function specified by func evaluated at `A` errest : float (if disp == False) 1-norm of the estimated error, ||err||_1 / ||A||_1 Examples -------- >>> from scipy.linalg import funm >>> a = np.array([[1.0, 3.0], [1.0, 4.0]]) >>> funm(a, lambda x: x*x) array([[ 4., 15.], [ 5., 19.]]) >>> a.dot(a) array([[ 4., 15.], [ 5., 19.]]) Notes ----- This function implements the general algorithm based on Schur decomposition (Algorithm 9.1.1. in [1]_). If the input matrix is known to be diagonalizable, then relying on the eigendecomposition is likely to be faster. For example, if your matrix is Hermitian, you can do >>> from scipy.linalg import eigh >>> def funm_herm(a, func, check_finite=False): ... w, v = eigh(a, check_finite=check_finite) ... ## if you further know that your matrix is positive semidefinite, ... ## you can optionally guard against precision errors by doing ... # w = np.maximum(w, 0) ... w = func(w) ... return (v * w).dot(v.conj().T) References ---------- .. [1] Gene H. Golub, Charles F. van Loan, Matrix Computations 4th ed. """ A = _asarray_square(A) # Perform Shur decomposition (lapack ?gees) T, Z = schur(A) T, Z = rsf2csf(T,Z) n,n = T.shape F = diag(func(diag(T))) # apply function to diagonal elements F = F.astype(T.dtype.char) # e.g. when F is real but T is complex minden = abs(T[0,0]) # implement Algorithm 11.1.1 from Golub and Van Loan # "matrix Computations." for p in range(1,n): for i in range(1,n-p+1): j = i + p s = T[i-1,j-1] * (F[j-1,j-1] - F[i-1,i-1]) ksl = slice(i,j-1) val = dot(T[i-1,ksl],F[ksl,j-1]) - dot(F[i-1,ksl],T[ksl,j-1]) s = s + val den = T[j-1,j-1] - T[i-1,i-1] if den != 0.0: s = s / den F[i-1,j-1] = s minden = min(minden,abs(den)) F = dot(dot(Z, F), transpose(conjugate(Z))) F = _maybe_real(A, F) tol = {0:feps, 1:eps}[_array_precision[F.dtype.char]] if minden == 0.0: minden = tol err = min(1, max(tol,(tol/minden)*norm(triu(T,1),1))) if product(ravel(logical_not(isfinite(F))),axis=0): err = Inf if disp: if err > 1000*tol: print("funm result may be inaccurate, approximate err =", err) return F else: return F, err
[ "def", "funm", "(", "A", ",", "func", ",", "disp", "=", "True", ")", ":", "A", "=", "_asarray_square", "(", "A", ")", "# Perform Shur decomposition (lapack ?gees)", "T", ",", "Z", "=", "schur", "(", "A", ")", "T", ",", "Z", "=", "rsf2csf", "(", "T", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/linalg/matfuncs.py#L559-L660
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/dataview.py
python
DataViewCtrl.EditItem
(*args, **kwargs)
return _dataview.DataViewCtrl_EditItem(*args, **kwargs)
EditItem(self, DataViewItem item, DataViewColumn column)
EditItem(self, DataViewItem item, DataViewColumn column)
[ "EditItem", "(", "self", "DataViewItem", "item", "DataViewColumn", "column", ")" ]
def EditItem(*args, **kwargs): """EditItem(self, DataViewItem item, DataViewColumn column)""" return _dataview.DataViewCtrl_EditItem(*args, **kwargs)
[ "def", "EditItem", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_dataview", ".", "DataViewCtrl_EditItem", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/dataview.py#L1832-L1834
facebook/wangle
2e7e3fbb3a15c4986d6fe0e36c31daeeba614ce3
build/fbcode_builder/getdeps/builder.py
python
CargoBuilder._extract_crates
(cargo_toml_file, dep_to_git)
return deps_to_crates
This functions reads content of provided cargo toml file and extracts crate names per each dependency. The extraction is done by a heuristic so it might be incorrect.
This functions reads content of provided cargo toml file and extracts crate names per each dependency. The extraction is done by a heuristic so it might be incorrect.
[ "This", "functions", "reads", "content", "of", "provided", "cargo", "toml", "file", "and", "extracts", "crate", "names", "per", "each", "dependency", ".", "The", "extraction", "is", "done", "by", "a", "heuristic", "so", "it", "might", "be", "incorrect", "." ...
def _extract_crates(cargo_toml_file, dep_to_git): """ This functions reads content of provided cargo toml file and extracts crate names per each dependency. The extraction is done by a heuristic so it might be incorrect. """ deps_to_crates = {} with open(cargo_toml_file, "r") as f: for line in f.readlines(): if line.startswith("#") or "git = " not in line: continue # filter out commented lines and ones without git deps for name, conf in dep_to_git.items(): if 'git = "{}"'.format(conf["repo_url"]) in line: pkg_template = ' package = "' if pkg_template in line: crate_name, _, _ = line.partition(pkg_template)[ 2 ].partition('"') else: crate_name, _, _ = line.partition("=") deps_to_crates.setdefault(name, set()).add(crate_name.strip()) return deps_to_crates
[ "def", "_extract_crates", "(", "cargo_toml_file", ",", "dep_to_git", ")", ":", "deps_to_crates", "=", "{", "}", "with", "open", "(", "cargo_toml_file", ",", "\"r\"", ")", "as", "f", ":", "for", "line", "in", "f", ".", "readlines", "(", ")", ":", "if", ...
https://github.com/facebook/wangle/blob/2e7e3fbb3a15c4986d6fe0e36c31daeeba614ce3/build/fbcode_builder/getdeps/builder.py#L1419-L1440
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/tools/jinja2/environment.py
python
Template.make_module_async
(self, vars=None, shared=False, locals=None)
As template module creation can invoke template code for asynchronous exections this method must be used instead of the normal :meth:`make_module` one. Likewise the module attribute becomes unavailable in async mode.
As template module creation can invoke template code for asynchronous exections this method must be used instead of the normal :meth:`make_module` one. Likewise the module attribute becomes unavailable in async mode.
[ "As", "template", "module", "creation", "can", "invoke", "template", "code", "for", "asynchronous", "exections", "this", "method", "must", "be", "used", "instead", "of", "the", "normal", ":", "meth", ":", "make_module", "one", ".", "Likewise", "the", "module",...
def make_module_async(self, vars=None, shared=False, locals=None): """As template module creation can invoke template code for asynchronous exections this method must be used instead of the normal :meth:`make_module` one. Likewise the module attribute becomes unavailable in async mode. """ # see asyncsupport for the actual implementation raise NotImplementedError('This feature is not available for this ' 'version of Python')
[ "def", "make_module_async", "(", "self", ",", "vars", "=", "None", ",", "shared", "=", "False", ",", "locals", "=", "None", ")", ":", "# see asyncsupport for the actual implementation", "raise", "NotImplementedError", "(", "'This feature is not available for this '", "'...
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/jinja2/environment.py#L1075-L1083
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/client/session.py
python
BaseSession.list_devices
(self)
return device_list
Lists available devices in this session. ```python devices = sess.list_devices() for d in devices: print(d.name) ``` Where: Each element in the list has the following properties name: A string with the full name of the device. ex: `/job:worker/replica:0/task:3/device:CPU:0` device_type: The type of the device (e.g. `CPU`, `GPU`, `TPU`.) memory_limit: The maximum amount of memory available on the device. Note: depending on the device, it is possible the usable memory could be substantially less. Raises: tf.errors.OpError: If it encounters an error (e.g. session is in an invalid state, or network errors occur). Returns: A list of devices in the session.
Lists available devices in this session.
[ "Lists", "available", "devices", "in", "this", "session", "." ]
def list_devices(self): """Lists available devices in this session. ```python devices = sess.list_devices() for d in devices: print(d.name) ``` Where: Each element in the list has the following properties name: A string with the full name of the device. ex: `/job:worker/replica:0/task:3/device:CPU:0` device_type: The type of the device (e.g. `CPU`, `GPU`, `TPU`.) memory_limit: The maximum amount of memory available on the device. Note: depending on the device, it is possible the usable memory could be substantially less. Raises: tf.errors.OpError: If it encounters an error (e.g. session is in an invalid state, or network errors occur). Returns: A list of devices in the session. """ raw_device_list = tf_session.TF_SessionListDevices(self._session) device_list = [] size = tf_session.TF_DeviceListCount(raw_device_list) for i in range(size): name = tf_session.TF_DeviceListName(raw_device_list, i) device_type = tf_session.TF_DeviceListType(raw_device_list, i) memory = tf_session.TF_DeviceListMemoryBytes(raw_device_list, i) incarnation = tf_session.TF_DeviceListIncarnation(raw_device_list, i) device_list.append( _DeviceAttributes(name, device_type, memory, incarnation)) tf_session.TF_DeleteDeviceList(raw_device_list) return device_list
[ "def", "list_devices", "(", "self", ")", ":", "raw_device_list", "=", "tf_session", ".", "TF_SessionListDevices", "(", "self", ".", "_session", ")", "device_list", "=", "[", "]", "size", "=", "tf_session", ".", "TF_DeviceListCount", "(", "raw_device_list", ")", ...
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/client/session.py#L716-L752
OpenXRay/xray-15
1390dfb08ed20997d7e8c95147ea8e8cb71f5e86
cs/sdk/3d_sdk/maya/ver-2008/devkit/plug-ins/scripted/torusField.py
python
TorusField.__ownerPosition
(self, block)
return ownerPosArray
If this field has an owner, get the owner's position array or centroid, then assign it to the ownerPosArray. If it does not have owner, get the field position in the world space, and assign it to the given array, ownerPosArray.
If this field has an owner, get the owner's position array or centroid, then assign it to the ownerPosArray. If it does not have owner, get the field position in the world space, and assign it to the given array, ownerPosArray.
[ "If", "this", "field", "has", "an", "owner", "get", "the", "owner", "s", "position", "array", "or", "centroid", "then", "assign", "it", "to", "the", "ownerPosArray", ".", "If", "it", "does", "not", "have", "owner", "get", "the", "field", "position", "in"...
def __ownerPosition(self, block): """ If this field has an owner, get the owner's position array or centroid, then assign it to the ownerPosArray. If it does not have owner, get the field position in the world space, and assign it to the given array, ownerPosArray. """ ownerPosArray = OpenMaya.MVectorArray() if self.__applyPerVertexValue(block): ownerPos = OpenMayaMPx.cvar.MPxFieldNode_mOwnerPosData try: hOwnerPos = block.inputValue(ownerPos) except: # get the field position in the world space # and add it into ownerPosArray. # worldPos = self.__getWorldPosition() ownerPosArray.append(worldPos) else: dOwnerPos = hOwnerPos.data() fnOwnerPos = OpenMaya.MFnVectorArrayData(dOwnerPos) try: posArray = fnOwnerPos.array() except: worldPos = self.__getWorldPosition() ownerPosArray.append(worldPos) else: # assign vectors from block to ownerPosArray. # for i in range(posArray.length()): ownerPosArray.append(posArray[i]) else: try: centroidV = self.__ownerCentroidValue(block) except: # get the field position in the world space. # worldPos = self.__getWorldPosition() ownerPosArray.append(worldPos) else: ownerPosArray.append(centroidV) return ownerPosArray
[ "def", "__ownerPosition", "(", "self", ",", "block", ")", ":", "ownerPosArray", "=", "OpenMaya", ".", "MVectorArray", "(", ")", "if", "self", ".", "__applyPerVertexValue", "(", "block", ")", ":", "ownerPos", "=", "OpenMayaMPx", ".", "cvar", ".", "MPxFieldNod...
https://github.com/OpenXRay/xray-15/blob/1390dfb08ed20997d7e8c95147ea8e8cb71f5e86/cs/sdk/3d_sdk/maya/ver-2008/devkit/plug-ins/scripted/torusField.py#L458-L500
lzhang10/maxent
3560c94b737d4272ed86de529e50d823200e6d8e
example/postagger/postagger.py
python
choose_context
(type = None)
Choose context type, default is for English.
Choose context type, default is for English.
[ "Choose", "context", "type", "default", "is", "for", "English", "." ]
def choose_context(type = None): """Choose context type, default is for English.""" if type: return eval('get_context' + str(type)) else: return get_context_english
[ "def", "choose_context", "(", "type", "=", "None", ")", ":", "if", "type", ":", "return", "eval", "(", "'get_context'", "+", "str", "(", "type", ")", ")", "else", ":", "return", "get_context_english" ]
https://github.com/lzhang10/maxent/blob/3560c94b737d4272ed86de529e50d823200e6d8e/example/postagger/postagger.py#L35-L40
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/aui/framemanager.py
python
AuiManagerEvent.GetDC
(self)
return self.dc
Returns the associated :class:`DC` device context (if any).
Returns the associated :class:`DC` device context (if any).
[ "Returns", "the", "associated", ":", "class", ":", "DC", "device", "context", "(", "if", "any", ")", "." ]
def GetDC(self): """ Returns the associated :class:`DC` device context (if any). """ return self.dc
[ "def", "GetDC", "(", "self", ")", ":", "return", "self", ".", "dc" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/framemanager.py#L418-L421
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/PyChop/ISISFermi.py
python
ISISFermi.getFlux
(self, Ei_in=None, frequency=None)
return flux
Calculates the flux at the sample position in n / cm**2 . uA.s flux = getFlux() flux = getFlux(ei) flux = getFlux(ei, omega) Inputs: ei - incident energy in meV [default: preset energy] omega - chopper frequency in Hz [default: preset frequency] Output: flux - the flux in n / cm**2 . uA.s
Calculates the flux at the sample position in n / cm**2 . uA.s
[ "Calculates", "the", "flux", "at", "the", "sample", "position", "in", "n", "/", "cm", "**", "2", ".", "uA", ".", "s" ]
def getFlux(self, Ei_in=None, frequency=None): """ Calculates the flux at the sample position in n / cm**2 . uA.s flux = getFlux() flux = getFlux(ei) flux = getFlux(ei, omega) Inputs: ei - incident energy in meV [default: preset energy] omega - chopper frequency in Hz [default: preset frequency] Output: flux - the flux in n / cm**2 . uA.s """ Ei = self.Ei if Ei_in is None else Ei_in if Ei is None: raise ValueError('Incident energy has not been specified') if frequency: oldfreq = self.freq self.setFrequency(frequency) moderator_flux = self.getMeasuredFlux(Ei) chop_par = self.__chopperParameters[self.instname][self.choppername]['par'] pslit = chop_par[0] / 1000.00 dslat = (chop_par[0] + chop_par[1]) / 1000.00 radius = chop_par[2] / 1000.00 rho = chop_par[3] / 1000.00 chopper_transmission = Chop.achop(Ei, self.freq[0], dslat, pslit, radius, rho) x0 = self.__Instruments[self.instname][0] x1 = self.__Instruments[self.instname][2] flux = 84403.060 * moderator_flux * (chopper_transmission/dslat) / (x0*(x1+x0)) flux /= self.__chopperParameters[self.instname][self.choppername]['fluxcorr'] if frequency: self.setFrequency(oldfreq) return flux
[ "def", "getFlux", "(", "self", ",", "Ei_in", "=", "None", ",", "frequency", "=", "None", ")", ":", "Ei", "=", "self", ".", "Ei", "if", "Ei_in", "is", "None", "else", "Ei_in", "if", "Ei", "is", "None", ":", "raise", "ValueError", "(", "'Incident energ...
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/PyChop/ISISFermi.py#L378-L412
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/locale.py
python
setlocale
(category, locale=None)
return _setlocale(category, locale)
Set the locale for the given category. The locale can be a string, an iterable of two strings (language code and encoding), or None. Iterables are converted to strings using the locale aliasing engine. Locale strings are passed directly to the C lib. category may be given as one of the LC_* values.
Set the locale for the given category. The locale can be a string, an iterable of two strings (language code and encoding), or None.
[ "Set", "the", "locale", "for", "the", "given", "category", ".", "The", "locale", "can", "be", "a", "string", "an", "iterable", "of", "two", "strings", "(", "language", "code", "and", "encoding", ")", "or", "None", "." ]
def setlocale(category, locale=None): """ Set the locale for the given category. The locale can be a string, an iterable of two strings (language code and encoding), or None. Iterables are converted to strings using the locale aliasing engine. Locale strings are passed directly to the C lib. category may be given as one of the LC_* values. """ if locale and not isinstance(locale, _builtin_str): # convert to string locale = normalize(_build_localename(locale)) return _setlocale(category, locale)
[ "def", "setlocale", "(", "category", ",", "locale", "=", "None", ")", ":", "if", "locale", "and", "not", "isinstance", "(", "locale", ",", "_builtin_str", ")", ":", "# convert to string", "locale", "=", "normalize", "(", "_build_localename", "(", "locale", "...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/locale.py#L595-L610
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/multi.py
python
MultiIndex.__setstate__
(self, state)
Necessary for making this object picklable
Necessary for making this object picklable
[ "Necessary", "for", "making", "this", "object", "picklable" ]
def __setstate__(self, state): """Necessary for making this object picklable""" if isinstance(state, dict): levels = state.get("levels") codes = state.get("codes") sortorder = state.get("sortorder") names = state.get("names") elif isinstance(state, tuple): nd_state, own_state = state levels, codes, sortorder, names = own_state self._set_levels([Index(x) for x in levels], validate=False) self._set_codes(codes) new_codes = self._verify_integrity() self._set_codes(new_codes) self._set_names(names) self.sortorder = sortorder self._reset_identity()
[ "def", "__setstate__", "(", "self", ",", "state", ")", ":", "if", "isinstance", "(", "state", ",", "dict", ")", ":", "levels", "=", "state", ".", "get", "(", "\"levels\"", ")", "codes", "=", "state", ".", "get", "(", "\"codes\"", ")", "sortorder", "=...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/multi.py#L1905-L1925
TGAC/KAT
e8870331de2b4bb0a1b3b91c6afb8fb9d59e9216
deps/boost/tools/build/src/build/generators.py
python
Generator.construct_result
(self, consumed, project, name, prop_set)
return result
Constructs the dependency graph that will be returned by this generator. consumed: Already prepared list of consumable targets If generator requires several source files will contain exactly len $(self.source_types_) targets with matching types Otherwise, might contain several targets with the type of self.source_types_ [0] project: name: prop_set: Properties to be used for all actions create here
Constructs the dependency graph that will be returned by this generator. consumed: Already prepared list of consumable targets If generator requires several source files will contain exactly len $(self.source_types_) targets with matching types Otherwise, might contain several targets with the type of self.source_types_ [0] project: name: prop_set: Properties to be used for all actions create here
[ "Constructs", "the", "dependency", "graph", "that", "will", "be", "returned", "by", "this", "generator", ".", "consumed", ":", "Already", "prepared", "list", "of", "consumable", "targets", "If", "generator", "requires", "several", "source", "files", "will", "con...
def construct_result (self, consumed, project, name, prop_set): """ Constructs the dependency graph that will be returned by this generator. consumed: Already prepared list of consumable targets If generator requires several source files will contain exactly len $(self.source_types_) targets with matching types Otherwise, might contain several targets with the type of self.source_types_ [0] project: name: prop_set: Properties to be used for all actions create here """ if __debug__: from .targets import ProjectTarget assert is_iterable_typed(consumed, virtual_target.VirtualTarget) assert isinstance(project, ProjectTarget) assert isinstance(name, basestring) or name is None assert isinstance(prop_set, property_set.PropertySet) result = [] # If this is 1->1 transformation, apply it to all consumed targets in order. if len (self.source_types_) < 2 and not self.composing_: for r in consumed: result.extend(self.generated_targets([r], prop_set, project, name)) elif consumed: result.extend(self.generated_targets(consumed, prop_set, project, name)) return result
[ "def", "construct_result", "(", "self", ",", "consumed", ",", "project", ",", "name", ",", "prop_set", ")", ":", "if", "__debug__", ":", "from", ".", "targets", "import", "ProjectTarget", "assert", "is_iterable_typed", "(", "consumed", ",", "virtual_target", "...
https://github.com/TGAC/KAT/blob/e8870331de2b4bb0a1b3b91c6afb8fb9d59e9216/deps/boost/tools/build/src/build/generators.py#L403-L430
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py
python
Node.cloneNode
(self)
Return a shallow copy of the current node i.e. a node with the same name and attributes but with no parent or child nodes
Return a shallow copy of the current node i.e. a node with the same
[ "Return", "a", "shallow", "copy", "of", "the", "current", "node", "i", ".", "e", ".", "a", "node", "with", "the", "same" ]
def cloneNode(self): """Return a shallow copy of the current node i.e. a node with the same name and attributes but with no parent or child nodes """ raise NotImplementedError
[ "def", "cloneNode", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py#L219-L227
MegEngine/MegEngine
ce9ad07a27ec909fb8db4dd67943d24ba98fb93a
imperative/python/megengine/dtr/dtr.py
python
disable
()
r"""Stop recording computing path of tensors and performing DTR policy.
r"""Stop recording computing path of tensors and performing DTR policy.
[ "r", "Stop", "recording", "computing", "path", "of", "tensors", "and", "performing", "DTR", "policy", "." ]
def disable(): r"""Stop recording computing path of tensors and performing DTR policy.""" _set_option("enable_dtr_auto_drop", 0) _set_option("enable_drop", 0) _set_option("record_computing_path", 0) _clear_candidates()
[ "def", "disable", "(", ")", ":", "_set_option", "(", "\"enable_dtr_auto_drop\"", ",", "0", ")", "_set_option", "(", "\"enable_drop\"", ",", "0", ")", "_set_option", "(", "\"record_computing_path\"", ",", "0", ")", "_clear_candidates", "(", ")" ]
https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/dtr/dtr.py#L127-L132
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Arch/ArchAxisSystem.py
python
makeAxisSystem
(axes,name="Axis System")
return obj
makeAxisSystem(axes): makes a system from the given list of axes
makeAxisSystem(axes): makes a system from the given list of axes
[ "makeAxisSystem", "(", "axes", ")", ":", "makes", "a", "system", "from", "the", "given", "list", "of", "axes" ]
def makeAxisSystem(axes,name="Axis System"): '''makeAxisSystem(axes): makes a system from the given list of axes''' if not isinstance(axes,list): axes = [axes] obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython","AxisSystem") obj.Label = translate("Arch",name) _AxisSystem(obj) obj.Axes = axes if FreeCAD.GuiUp: _ViewProviderAxisSystem(obj.ViewObject) FreeCAD.ActiveDocument.recompute() return obj
[ "def", "makeAxisSystem", "(", "axes", ",", "name", "=", "\"Axis System\"", ")", ":", "if", "not", "isinstance", "(", "axes", ",", "list", ")", ":", "axes", "=", "[", "axes", "]", "obj", "=", "FreeCAD", ".", "ActiveDocument", ".", "addObject", "(", "\"A...
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Arch/ArchAxisSystem.py#L49-L62
H-uru/Plasma
c2140ea046e82e9c199e257a7f2e7edb42602871
Scripts/Python/xAvatarCustomization.py
python
xAvatarCustomization.ISliderToMorph
(self,slider)
return morph
convert slider value (0.0 to 12.0) to morph value (1.0 to 1.0)
convert slider value (0.0 to 12.0) to morph value (1.0 to 1.0)
[ "convert", "slider", "value", "(", "0", ".", "0", "to", "12", ".", "0", ")", "to", "morph", "value", "(", "1", ".", "0", "to", "1", ".", "0", ")" ]
def ISliderToMorph(self,slider): "convert slider value (0.0 to 12.0) to morph value (1.0 to 1.0)" morph = slider / 6.0 # convert to 0 to 2 range morph -= 1.0 # convert to -1 to 1 range morph = -morph # flip the value since it seems to be backwards return morph
[ "def", "ISliderToMorph", "(", "self", ",", "slider", ")", ":", "morph", "=", "slider", "/", "6.0", "# convert to 0 to 2 range", "morph", "-=", "1.0", "# convert to -1 to 1 range", "morph", "=", "-", "morph", "# flip the value since it seems to be backwards", "return", ...
https://github.com/H-uru/Plasma/blob/c2140ea046e82e9c199e257a7f2e7edb42602871/Scripts/Python/xAvatarCustomization.py#L1919-L1924
qgis/QGIS
15a77662d4bb712184f6aa60d0bd663010a76a75
python/plugins/processing/algs/qgis/voronoi.py
python
computeDelaunayTriangulation
(points)
return context.triangles
Takes a list of point objects (which must have x and y fields). Returns a list of 3-tuples: the indices of the points that form a Delaunay triangle.
Takes a list of point objects (which must have x and y fields). Returns a list of 3-tuples: the indices of the points that form a Delaunay triangle.
[ "Takes", "a", "list", "of", "point", "objects", "(", "which", "must", "have", "x", "and", "y", "fields", ")", ".", "Returns", "a", "list", "of", "3", "-", "tuples", ":", "the", "indices", "of", "the", "points", "that", "form", "a", "Delaunay", "trian...
def computeDelaunayTriangulation(points): """ Takes a list of point objects (which must have x and y fields). Returns a list of 3-tuples: the indices of the points that form a Delaunay triangle. """ siteList = SiteList(points) context = Context() context.triangulate = True voronoi(siteList, context) return context.triangles
[ "def", "computeDelaunayTriangulation", "(", "points", ")", ":", "siteList", "=", "SiteList", "(", "points", ")", "context", "=", "Context", "(", ")", "context", ".", "triangulate", "=", "True", "voronoi", "(", "siteList", ",", "context", ")", "return", "cont...
https://github.com/qgis/QGIS/blob/15a77662d4bb712184f6aa60d0bd663010a76a75/python/plugins/processing/algs/qgis/voronoi.py#L836-L845
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/sndhdr.py
python
what
(filename)
return res
Guess the type of a sound file
Guess the type of a sound file
[ "Guess", "the", "type", "of", "a", "sound", "file" ]
def what(filename): """Guess the type of a sound file""" res = whathdr(filename) return res
[ "def", "what", "(", "filename", ")", ":", "res", "=", "whathdr", "(", "filename", ")", "return", "res" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/sndhdr.py#L35-L38
kismetwireless/kismet
a7c0dc270c960fb1f58bd9cec4601c201885fd4e
capture_freaklabs_zigbee/KismetCaptureFreaklabsZigbee/kismetexternal/__init__.py
python
ExternalInterface.kill
(self)
Shutdown the external interface service :return: None
Shutdown the external interface service
[ "Shutdown", "the", "external", "interface", "service" ]
def kill(self): """ Shutdown the external interface service :return: None """ self.bufferlock.acquire() try: self.kill_ioloop = True finally: self.bufferlock.release()
[ "def", "kill", "(", "self", ")", ":", "self", ".", "bufferlock", ".", "acquire", "(", ")", "try", ":", "self", ".", "kill_ioloop", "=", "True", "finally", ":", "self", ".", "bufferlock", ".", "release", "(", ")" ]
https://github.com/kismetwireless/kismet/blob/a7c0dc270c960fb1f58bd9cec4601c201885fd4e/capture_freaklabs_zigbee/KismetCaptureFreaklabsZigbee/kismetexternal/__init__.py#L353-L363
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_aarch64/python2.7/dist-packages/rospkg/manifest.py
python
_get_text
(nodes)
return "".join([n.data for n in nodes if n.nodeType == n.TEXT_NODE])
DOM utility routine for getting contents of text nodes
DOM utility routine for getting contents of text nodes
[ "DOM", "utility", "routine", "for", "getting", "contents", "of", "text", "nodes" ]
def _get_text(nodes): """ DOM utility routine for getting contents of text nodes """ return "".join([n.data for n in nodes if n.nodeType == n.TEXT_NODE])
[ "def", "_get_text", "(", "nodes", ")", ":", "return", "\"\"", ".", "join", "(", "[", "n", ".", "data", "for", "n", "in", "nodes", "if", "n", ".", "nodeType", "==", "n", ".", "TEXT_NODE", "]", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_aarch64/python2.7/dist-packages/rospkg/manifest.py#L346-L350
yue/yue
619d62c191b13c51c01be451dc48917c34a5aefc
building/tools/cpplint.py
python
CloseExpression
(clean_lines, linenum, pos)
return (line, clean_lines.NumLines(), -1)
If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. TODO(unknown): cpplint spends a fair bit of time matching parentheses. Ideally we would want to index all opening and closing parentheses once and have CloseExpression be just a simple lookup, but due to preprocessor tricks, this is not so easy. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum.
If input points to ( or { or [ or <, finds the position that closes it.
[ "If", "input", "points", "to", "(", "or", "{", "or", "[", "or", "<", "finds", "the", "position", "that", "closes", "it", "." ]
def CloseExpression(clean_lines, linenum, pos): """If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. TODO(unknown): cpplint spends a fair bit of time matching parentheses. Ideally we would want to index all opening and closing parentheses once and have CloseExpression be just a simple lookup, but due to preprocessor tricks, this is not so easy. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum. """ line = clean_lines.elided[linenum] if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]): return (line, clean_lines.NumLines(), -1) # Check first line (end_pos, stack) = FindEndOfExpressionInLine(line, pos, []) if end_pos > -1: return (line, linenum, end_pos) # Continue scanning forward while stack and linenum < clean_lines.NumLines() - 1: linenum += 1 line = clean_lines.elided[linenum] (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack) if end_pos > -1: return (line, linenum, end_pos) # Did not find end of expression before end of file, give up return (line, clean_lines.NumLines(), -1)
[ "def", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "(", "line", "[", "pos", "]", "not", "in", "'({[<'", ")", "or", "Match", "(", "r'<[<=]'", ",", "...
https://github.com/yue/yue/blob/619d62c191b13c51c01be451dc48917c34a5aefc/building/tools/cpplint.py#L1591-L1632
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBTypeCategory.GetTypeNameSpecifierForSyntheticAtIndex
(self, *args)
return _lldb.SBTypeCategory_GetTypeNameSpecifierForSyntheticAtIndex(self, *args)
GetTypeNameSpecifierForSyntheticAtIndex(self, uint32_t arg0) -> SBTypeNameSpecifier
GetTypeNameSpecifierForSyntheticAtIndex(self, uint32_t arg0) -> SBTypeNameSpecifier
[ "GetTypeNameSpecifierForSyntheticAtIndex", "(", "self", "uint32_t", "arg0", ")", "-", ">", "SBTypeNameSpecifier" ]
def GetTypeNameSpecifierForSyntheticAtIndex(self, *args): """GetTypeNameSpecifierForSyntheticAtIndex(self, uint32_t arg0) -> SBTypeNameSpecifier""" return _lldb.SBTypeCategory_GetTypeNameSpecifierForSyntheticAtIndex(self, *args)
[ "def", "GetTypeNameSpecifierForSyntheticAtIndex", "(", "self", ",", "*", "args", ")", ":", "return", "_lldb", ".", "SBTypeCategory_GetTypeNameSpecifierForSyntheticAtIndex", "(", "self", ",", "*", "args", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L10796-L10798
xenia-project/xenia
9b1fdac98665ac091b9660a5d0fbb259ed79e578
third_party/google-styleguide/cpplint/cpplint.py
python
_VerboseLevel
()
return _cpplint_state.verbose_level
Returns the module's verbosity setting.
Returns the module's verbosity setting.
[ "Returns", "the", "module", "s", "verbosity", "setting", "." ]
def _VerboseLevel(): """Returns the module's verbosity setting.""" return _cpplint_state.verbose_level
[ "def", "_VerboseLevel", "(", ")", ":", "return", "_cpplint_state", ".", "verbose_level" ]
https://github.com/xenia-project/xenia/blob/9b1fdac98665ac091b9660a5d0fbb259ed79e578/third_party/google-styleguide/cpplint/cpplint.py#L767-L769
KhronosGroup/SPIRV-LLVM
1eb85593f3fe2c39379b9a9b088d51eda4f42b8b
bindings/python/llvm/object.py
python
Symbol.cache
(self)
Cache all cacheable properties.
Cache all cacheable properties.
[ "Cache", "all", "cacheable", "properties", "." ]
def cache(self): """Cache all cacheable properties.""" getattr(self, 'name') getattr(self, 'address') getattr(self, 'size')
[ "def", "cache", "(", "self", ")", ":", "getattr", "(", "self", ",", "'name'", ")", "getattr", "(", "self", ",", "'address'", ")", "getattr", "(", "self", ",", "'size'", ")" ]
https://github.com/KhronosGroup/SPIRV-LLVM/blob/1eb85593f3fe2c39379b9a9b088d51eda4f42b8b/bindings/python/llvm/object.py#L344-L348
priyankchheda/algorithms
c361aa9071573fa9966d5b02d05e524815abcf2b
sort/quick.py
python
quick
(data, low, high)
QuickSort is a Divide and Conquer algorithm. It picks an element as pivot and partitions the given array around the picked pivot. It uses last element as pivot element :param array: list of elements that needs to be sorted :param low: lower index of sub-array :param high: higher index of sub-array
QuickSort is a Divide and Conquer algorithm. It picks an element as pivot and partitions the given array around the picked pivot. It uses last element as pivot element
[ "QuickSort", "is", "a", "Divide", "and", "Conquer", "algorithm", ".", "It", "picks", "an", "element", "as", "pivot", "and", "partitions", "the", "given", "array", "around", "the", "picked", "pivot", ".", "It", "uses", "last", "element", "as", "pivot", "ele...
def quick(data, low, high): """ QuickSort is a Divide and Conquer algorithm. It picks an element as pivot and partitions the given array around the picked pivot. It uses last element as pivot element :param array: list of elements that needs to be sorted :param low: lower index of sub-array :param high: higher index of sub-array """ if high <= low: return j = partition(data, low, high) quick(data, low, j-1) quick(data, j+1, high)
[ "def", "quick", "(", "data", ",", "low", ",", "high", ")", ":", "if", "high", "<=", "low", ":", "return", "j", "=", "partition", "(", "data", ",", "low", ",", "high", ")", "quick", "(", "data", ",", "low", ",", "j", "-", "1", ")", "quick", "(...
https://github.com/priyankchheda/algorithms/blob/c361aa9071573fa9966d5b02d05e524815abcf2b/sort/quick.py#L24-L37
okex/V3-Open-API-SDK
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/pkg_resources/__init__.py
python
WorkingSet.__contains__
(self, dist)
return self.by_key.get(dist.key) == dist
True if `dist` is the active distribution for its project
True if `dist` is the active distribution for its project
[ "True", "if", "dist", "is", "the", "active", "distribution", "for", "its", "project" ]
def __contains__(self, dist): """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist
[ "def", "__contains__", "(", "self", ",", "dist", ")", ":", "return", "self", ".", "by_key", ".", "get", "(", "dist", ".", "key", ")", "==", "dist" ]
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/pkg_resources/__init__.py#L624-L626
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/lib-tk/Tkinter.py
python
Misc.clipboard_clear
(self, **kw)
Clear the data in the Tk clipboard. A widget specified for the optional displayof keyword argument specifies the target display.
Clear the data in the Tk clipboard.
[ "Clear", "the", "data", "in", "the", "Tk", "clipboard", "." ]
def clipboard_clear(self, **kw): """Clear the data in the Tk clipboard. A widget specified for the optional displayof keyword argument specifies the target display.""" if 'displayof' not in kw: kw['displayof'] = self._w self.tk.call(('clipboard', 'clear') + self._options(kw))
[ "def", "clipboard_clear", "(", "self", ",", "*", "*", "kw", ")", ":", "if", "'displayof'", "not", "in", "kw", ":", "kw", "[", "'displayof'", "]", "=", "self", ".", "_w", "self", ".", "tk", ".", "call", "(", "(", "'clipboard'", ",", "'clear'", ")", ...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/lib-tk/Tkinter.py#L656-L662
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/pyparsing.py
python
matchOnlyAtCol
(n)
return verifyCol
Helper method for defining parse actions that require matching at a specific column in the input text.
Helper method for defining parse actions that require matching at
[ "Helper", "method", "for", "defining", "parse", "actions", "that", "require", "matching", "at" ]
def matchOnlyAtCol(n): """Helper method for defining parse actions that require matching at a specific column in the input text. """ def verifyCol(strg, locn, toks): if col(locn, strg) != n: raise ParseException(strg, locn, "matched token not at column %d" % n) return verifyCol
[ "def", "matchOnlyAtCol", "(", "n", ")", ":", "def", "verifyCol", "(", "strg", ",", "locn", ",", "toks", ")", ":", "if", "col", "(", "locn", ",", "strg", ")", "!=", "n", ":", "raise", "ParseException", "(", "strg", ",", "locn", ",", "\"matched token n...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/pyparsing.py#L11421-L11435
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
PaletteChangedEvent.GetChangedWindow
(*args, **kwargs)
return _core_.PaletteChangedEvent_GetChangedWindow(*args, **kwargs)
GetChangedWindow(self) -> Window
GetChangedWindow(self) -> Window
[ "GetChangedWindow", "(", "self", ")", "-", ">", "Window" ]
def GetChangedWindow(*args, **kwargs): """GetChangedWindow(self) -> Window""" return _core_.PaletteChangedEvent_GetChangedWindow(*args, **kwargs)
[ "def", "GetChangedWindow", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "PaletteChangedEvent_GetChangedWindow", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L7174-L7176
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_gdi.py
python
Font.SetDefaultEncoding
(*args, **kwargs)
return _gdi_.Font_SetDefaultEncoding(*args, **kwargs)
SetDefaultEncoding(int encoding) Sets the default font encoding.
SetDefaultEncoding(int encoding)
[ "SetDefaultEncoding", "(", "int", "encoding", ")" ]
def SetDefaultEncoding(*args, **kwargs): """ SetDefaultEncoding(int encoding) Sets the default font encoding. """ return _gdi_.Font_SetDefaultEncoding(*args, **kwargs)
[ "def", "SetDefaultEncoding", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "Font_SetDefaultEncoding", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L2607-L2613
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/asyncio/tasks.py
python
_wait
(fs, timeout, return_when, loop)
return done, pending
Internal helper for wait(). The fs argument must be a collection of Futures.
Internal helper for wait().
[ "Internal", "helper", "for", "wait", "()", "." ]
async def _wait(fs, timeout, return_when, loop): """Internal helper for wait(). The fs argument must be a collection of Futures. """ assert fs, 'Set of Futures is empty.' waiter = loop.create_future() timeout_handle = None if timeout is not None: timeout_handle = loop.call_later(timeout, _release_waiter, waiter) counter = len(fs) def _on_completion(f): nonlocal counter counter -= 1 if (counter <= 0 or return_when == FIRST_COMPLETED or return_when == FIRST_EXCEPTION and (not f.cancelled() and f.exception() is not None)): if timeout_handle is not None: timeout_handle.cancel() if not waiter.done(): waiter.set_result(None) for f in fs: f.add_done_callback(_on_completion) try: await waiter finally: if timeout_handle is not None: timeout_handle.cancel() for f in fs: f.remove_done_callback(_on_completion) done, pending = set(), set() for f in fs: if f.done(): done.add(f) else: pending.add(f) return done, pending
[ "async", "def", "_wait", "(", "fs", ",", "timeout", ",", "return_when", ",", "loop", ")", ":", "assert", "fs", ",", "'Set of Futures is empty.'", "waiter", "=", "loop", ".", "create_future", "(", ")", "timeout_handle", "=", "None", "if", "timeout", "is", "...
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/asyncio/tasks.py#L497-L538
neoml-lib/neoml
a0d370fba05269a1b2258cef126f77bbd2054a3e
NeoML/Python/neoml/Dnn/Pooling.py
python
MaxOverTimePooling.stride_len
(self)
return self._internal.get_stride_len()
Gets the window stride.
Gets the window stride.
[ "Gets", "the", "window", "stride", "." ]
def stride_len(self): """Gets the window stride. """ return self._internal.get_stride_len()
[ "def", "stride_len", "(", "self", ")", ":", "return", "self", ".", "_internal", ".", "get_stride_len", "(", ")" ]
https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/Python/neoml/Dnn/Pooling.py#L359-L362
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_controls.py
python
TextCtrl.write
(*args, **kwargs)
return _controls_.TextCtrl_write(*args, **kwargs)
write(self, String text)
write(self, String text)
[ "write", "(", "self", "String", "text", ")" ]
def write(*args, **kwargs): """write(self, String text)""" return _controls_.TextCtrl_write(*args, **kwargs)
[ "def", "write", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "TextCtrl_write", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L2051-L2053
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/docs/sharedexample.py
python
SharedExampleDocumenter.document_shared_example
(self, example, prefix, section, operation_model)
Documents a single shared example based on its definition. :param example: The model of the example :param prefix: The prefix to use in the method example. :param section: The section to write to. :param operation_model: The model of the operation used in the example
Documents a single shared example based on its definition.
[ "Documents", "a", "single", "shared", "example", "based", "on", "its", "definition", "." ]
def document_shared_example(self, example, prefix, section, operation_model): """Documents a single shared example based on its definition. :param example: The model of the example :param prefix: The prefix to use in the method example. :param section: The section to write to. :param operation_model: The model of the operation used in the example """ section.style.new_paragraph() section.write(example.get('description')) section.style.new_line() self.document_input(section, example, prefix, operation_model.input_shape) self.document_output(section, example, operation_model.output_shape)
[ "def", "document_shared_example", "(", "self", ",", "example", ",", "prefix", ",", "section", ",", "operation_model", ")", ":", "section", ".", "style", ".", "new_paragraph", "(", ")", "section", ".", "write", "(", "example", ".", "get", "(", "'description'"...
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/docs/sharedexample.py#L21-L38
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_windows.py
python
DirDialog.GetMessage
(*args, **kwargs)
return _windows_.DirDialog_GetMessage(*args, **kwargs)
GetMessage(self) -> String Returns the message that will be displayed on the dialog.
GetMessage(self) -> String
[ "GetMessage", "(", "self", ")", "-", ">", "String" ]
def GetMessage(*args, **kwargs): """ GetMessage(self) -> String Returns the message that will be displayed on the dialog. """ return _windows_.DirDialog_GetMessage(*args, **kwargs)
[ "def", "GetMessage", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "DirDialog_GetMessage", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L3078-L3084
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/functional_ops.py
python
_LoopBodyCaptureWrapper
(func)
return Wrapper
Returns a wrapper for `func` that handles loop-carried captured inputs.
Returns a wrapper for `func` that handles loop-carried captured inputs.
[ "Returns", "a", "wrapper", "for", "func", "that", "handles", "loop", "-", "carried", "captured", "inputs", "." ]
def _LoopBodyCaptureWrapper(func): """Returns a wrapper for `func` that handles loop-carried captured inputs.""" @function.Defun( *func.declared_input_types, func_name="%s_Wrapper" % func.name) def Wrapper(*args): """A wrapper that handles loop-carried captured inputs.""" result = func(*args) extra_args = tuple(function.get_extra_args()) # Nullary functions return an Operation. Normal functions can't do this # because their return values are converted to Tensors. if isinstance(result, ops.Operation): return extra_args # Unary functions return a single Tensor value. elif not isinstance(result, tuple): return (result,) + extra_args # N-ary functions return a tuple of Tensors. else: return result + extra_args return Wrapper
[ "def", "_LoopBodyCaptureWrapper", "(", "func", ")", ":", "@", "function", ".", "Defun", "(", "*", "func", ".", "declared_input_types", ",", "func_name", "=", "\"%s_Wrapper\"", "%", "func", ".", "name", ")", "def", "Wrapper", "(", "*", "args", ")", ":", "...
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/functional_ops.py#L587-L607
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftobjects/patharray.py
python
PathArray.set_general_properties
(self, obj, properties)
Set general properties only if they don't exist.
Set general properties only if they don't exist.
[ "Set", "general", "properties", "only", "if", "they", "don", "t", "exist", "." ]
def set_general_properties(self, obj, properties): """Set general properties only if they don't exist.""" if "Base" not in properties: _tip = QT_TRANSLATE_NOOP("App::Property","The base object that will be duplicated") obj.addProperty("App::PropertyLinkGlobal", "Base", "Objects", _tip) obj.Base = None if "PathObject" not in properties: _tip = QT_TRANSLATE_NOOP("App::Property","The object along which the copies will be distributed. It must contain 'Edges'.") obj.addProperty("App::PropertyLinkGlobal", "PathObject", "Objects", _tip) obj.PathObject = None # TODO: the 'PathSubelements' property must be changed, # as it does not need to be an 'App::PropertyLinkSubList'. # # In fact, both 'PathObject' and 'PathSubelements' # could be handled with a single 'App::PropertyLinkSub' property, # as this property can be used to select a single object, # or a single object with its subelements. if "PathSubelements" not in properties: _tip = QT_TRANSLATE_NOOP("App::Property","List of connected edges in the 'Path Object'.\nIf these are present, the copies will be created along these subelements only.\nLeave this property empty to create copies along the entire 'Path Object'.") obj.addProperty("App::PropertyLinkSubListGlobal", "PathSubelements", "Objects", _tip) obj.PathSubelements = [] if "Count" not in properties: _tip = QT_TRANSLATE_NOOP("App::Property","Number of copies to create") obj.addProperty("App::PropertyInteger", "Count", "Objects", _tip) obj.Count = 4 if self.use_link and "ExpandArray" not in properties: _tip = QT_TRANSLATE_NOOP("App::Property","Show the individual array elements (only for Link arrays)") obj.addProperty("App::PropertyBool", "ExpandArray", "Objects", _tip) obj.ExpandArray = False obj.setPropertyStatus('Shape', 'Transient')
[ "def", "set_general_properties", "(", "self", ",", "obj", ",", "properties", ")", ":", "if", "\"Base\"", "not", "in", "properties", ":", "_tip", "=", "QT_TRANSLATE_NOOP", "(", "\"App::Property\"", ",", "\"The base object that will be duplicated\"", ")", "obj", ".", ...
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftobjects/patharray.py#L165-L213
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/analogclock/analogclock.py
python
AnalogClock.SetTickStyle
(self, style, target=ALL)
Set the tick style, according to the options below. ================= ====================================== TICKS_NONE Don't show tick marks. TICKS_SQUARE Use squares as tick marks. TICKS_CIRCLE Use circles as tick marks. TICKS_POLY Use a polygon as tick marks. A polygon can be passed using SetTickPolygon, otherwise the default polygon will be used. TICKS_DECIMAL Use decimal numbers as tick marks. TICKS_ROMAN Use Roman numbers as tick marks. TICKS_BINARY Use binary numbers as tick marks. TICKS_HEX Use hexadecimal numbers as tick marks. ================= ======================================
Set the tick style, according to the options below.
[ "Set", "the", "tick", "style", "according", "to", "the", "options", "below", "." ]
def SetTickStyle(self, style, target=ALL): """ Set the tick style, according to the options below. ================= ====================================== TICKS_NONE Don't show tick marks. TICKS_SQUARE Use squares as tick marks. TICKS_CIRCLE Use circles as tick marks. TICKS_POLY Use a polygon as tick marks. A polygon can be passed using SetTickPolygon, otherwise the default polygon will be used. TICKS_DECIMAL Use decimal numbers as tick marks. TICKS_ROMAN Use Roman numbers as tick marks. TICKS_BINARY Use binary numbers as tick marks. TICKS_HEX Use hexadecimal numbers as tick marks. ================= ====================================== """ self.Box.SetTickStyle(style, target) self.Reset()
[ "def", "SetTickStyle", "(", "self", ",", "style", ",", "target", "=", "ALL", ")", ":", "self", ".", "Box", ".", "SetTickStyle", "(", "style", ",", "target", ")", "self", ".", "Reset", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/analogclock/analogclock.py#L474-L494
p4lang/PI
38d87e81253feff9fff0660d662c885be78fb719
tools/cpplint.py
python
_SetQuiet
(quiet)
return _cpplint_state.SetQuiet(quiet)
Set the module's quiet status, and return previous setting.
Set the module's quiet status, and return previous setting.
[ "Set", "the", "module", "s", "quiet", "status", "and", "return", "previous", "setting", "." ]
def _SetQuiet(quiet): """Set the module's quiet status, and return previous setting.""" return _cpplint_state.SetQuiet(quiet)
[ "def", "_SetQuiet", "(", "quiet", ")", ":", "return", "_cpplint_state", ".", "SetQuiet", "(", "quiet", ")" ]
https://github.com/p4lang/PI/blob/38d87e81253feff9fff0660d662c885be78fb719/tools/cpplint.py#L1429-L1431
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
python
_cf_data_from_bytes
(bytestring)
return CoreFoundation.CFDataCreate( CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) )
Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller.
Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller.
[ "Given", "a", "bytestring", "create", "a", "CFData", "object", "from", "it", ".", "This", "CFData", "object", "must", "be", "CFReleased", "by", "the", "caller", "." ]
def _cf_data_from_bytes(bytestring): """ Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller. """ return CoreFoundation.CFDataCreate( CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) )
[ "def", "_cf_data_from_bytes", "(", "bytestring", ")", ":", "return", "CoreFoundation", ".", "CFDataCreate", "(", "CoreFoundation", ".", "kCFAllocatorDefault", ",", "bytestring", ",", "len", "(", "bytestring", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py#L27-L34
KratosMultiphysics/Kratos
0000833054ed0503424eb28205d6508d9ca6cbbc
applications/ChimeraApplication/python_scripts/chimera_setup_utils.py
python
SetChimeraInternalPartsFlag
(model, chimera_internal_parts)
This function sets the bool variable CHIMERA_INTERNAL_BOUNDARY to true on the specified modelparts so that they are excluded from the extract surface operation later on.
This function sets the bool variable CHIMERA_INTERNAL_BOUNDARY to true on the specified modelparts so that they are excluded from the extract surface operation later on.
[ "This", "function", "sets", "the", "bool", "variable", "CHIMERA_INTERNAL_BOUNDARY", "to", "true", "on", "the", "specified", "modelparts", "so", "that", "they", "are", "excluded", "from", "the", "extract", "surface", "operation", "later", "on", "." ]
def SetChimeraInternalPartsFlag(model, chimera_internal_parts): ''' This function sets the bool variable CHIMERA_INTERNAL_BOUNDARY to true on the specified modelparts so that they are excluded from the extract surface operation later on. ''' for mp_name in chimera_internal_parts: KratosMultiphysics.VariableUtils().SetNonHistoricalVariable(KratosChimera.CHIMERA_INTERNAL_BOUNDARY, True, model[mp_name].Nodes)
[ "def", "SetChimeraInternalPartsFlag", "(", "model", ",", "chimera_internal_parts", ")", ":", "for", "mp_name", "in", "chimera_internal_parts", ":", "KratosMultiphysics", ".", "VariableUtils", "(", ")", ".", "SetNonHistoricalVariable", "(", "KratosChimera", ".", "CHIMERA...
https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/ChimeraApplication/python_scripts/chimera_setup_utils.py#L97-L103
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/PythonInterface/plugins/algorithms/SaveYDA.py
python
SaveYDA.PyExec
(self)
Main execution body
Main execution body
[ "Main", "execution", "body" ]
def PyExec(self): """ Main execution body """ # Properties ws = self.getProperty("InputWorkspace").value filename = self.getProperty("Filename").value run = ws.getRun() ax = ws.getAxis(1) nHist = ws.getNumberHistograms() # check sample logs exists if len(run.getLogData()) == 0: raise NotImplementedError("No sample log data exist in workspace: " + self.getPropertyValue("InputWorkspace")) # save sample log data in lists, commented sequences an commented maps # commented sequences and maps are used to keep Data in the order they get inserted # if a log does not exist a warning is written on the log and the data is not saved in the file metadata = OrderedDict() metadata["format"] = "yaml/frida 2.0" metadata["type"] = "generic tabular data" hist = [] if run.hasProperty("proposal_number"): propn = "Proposal number " + run.getLogData("proposal_number").value hist.append(propn) else: self.log().warning("no proposal number found") if run.hasProperty("proposal_title"): propt = run.getLogData("proposal_title").value hist.append(propt) else: self.log().warning("no proposal title found") if run.hasProperty("experiment_team"): expt = run.getLogData("experiment_team").value hist.append(expt) else: self.log().warning("no experiment team found") hist.append("data reduced with mantid") rpar = [] if run.hasProperty("temperature"): temperature = float(run.getLogData("temperature").value) temp = OrderedDict() temp["name"] = "T" temp["unit"] = "K" temp["val"] = round(temperature, 14) temp["stdv"] = 0 rpar.append(temp) else: self.log().warning("no temperature found") if run.hasProperty("Ei"): eimeV = float(run.getLogData("Ei").value) ei = OrderedDict() ei["name"] = "Ei" ei["unit"] = "meV" ei["val"] = round(eimeV, 14) ei["stdv"] = 0 rpar.append(ei) else: self.log().warning("no Ei found") coord = OrderedDict() x = FlowOrderedDict() x["name"] = "w" x["unit"] = "meV" coord["x"] = x y = FlowOrderedDict() y["name"] = "S(q,w)" y["unit"] = "meV-1" coord["y"] = y z = FlowOrderedDict() if ax.isSpectra(): zname = "2th" zunit = "deg" else: zname = "q" zunit = "A-1" z["name"] = zname z["unit"] = zunit coord["z"] = FlowList() coord["z"].append(z) slices = [] bin = [] # if y axis is SpectrumAxis if ax.isSpectra: samplePos = ws.getInstrument().getSample().getPos() sourcePos = ws.getInstrument().getSource().getPos() beamPos = samplePos - sourcePos for i in range(nHist): detector = ws.getDetector(i) # convert radians to degrees twoTheta = detector.getTwoTheta(samplePos, beamPos)*180/math.pi twoTheta = round(twoTheta, 14) bin.append(twoTheta) elif ax.length() == nHist: # if y axis contains bin centers for i in range(ax.length()): xval = round(ax.getValue(), 14) bin.append(xval) else: # get the bin centers not the bin edges bin = self._get_bin_centers(ax) for i in range(nHist): slicethis = OrderedDict() # add j to slices, j = counts slicethis["j"] = i # save in list and commented Map to keep format val = FlowOrderedDict() val["val"] = bin[i] # z is bin centers of y axis, SpectrumAxis or NumericAxis in q units slicethis["z"] = FlowList() slicethis["z"].append(val) xax = ws.readX(i) # get the bin centers not the bin edges xcenters = self._get_bin_centers(xax) # x axis is NumericAxis in energy transfer units xx = [float(j) for j in xcenters] slicethis["x"] = FlowList(xx) ys = ws.dataY(i) # y is dataY of the workspace yy = [float(round(j, 14)) for j in ys] slicethis["y"] = FlowList(yy) slices.append(slicethis) data = OrderedDict() data["Meta"] = metadata data["History"] = hist data["Coord"] = coord data["RPar"] = rpar data["Slices"] = slices data["Slices"] = slices # create yaml file try: with open(filename, "w") as outfile: yaml.dump(data, outfile, default_flow_style=False, canonical=False, Dumper=MyDumper) outfile.close() except: raise RuntimeError("Can't write in File" + filename)
[ "def", "PyExec", "(", "self", ")", ":", "# Properties", "ws", "=", "self", ".", "getProperty", "(", "\"InputWorkspace\"", ")", ".", "value", "filename", "=", "self", ".", "getProperty", "(", "\"Filename\"", ")", ".", "value", "run", "=", "ws", ".", "getR...
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/SaveYDA.py#L74-L247
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_gdi.py
python
DC.DrawBitmap
(*args, **kwargs)
return _gdi_.DC_DrawBitmap(*args, **kwargs)
DrawBitmap(self, Bitmap bmp, int x, int y, bool useMask=False) Draw a bitmap on the device context at the specified point. If *transparent* is true and the bitmap has a transparency mask, (or alpha channel on the platforms that support it) then the bitmap will be drawn transparently.
DrawBitmap(self, Bitmap bmp, int x, int y, bool useMask=False)
[ "DrawBitmap", "(", "self", "Bitmap", "bmp", "int", "x", "int", "y", "bool", "useMask", "=", "False", ")" ]
def DrawBitmap(*args, **kwargs): """ DrawBitmap(self, Bitmap bmp, int x, int y, bool useMask=False) Draw a bitmap on the device context at the specified point. If *transparent* is true and the bitmap has a transparency mask, (or alpha channel on the platforms that support it) then the bitmap will be drawn transparently. """ return _gdi_.DC_DrawBitmap(*args, **kwargs)
[ "def", "DrawBitmap", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "DC_DrawBitmap", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_gdi.py#L3697-L3706
larroy/clearskies_core
3574ddf0edc8555454c7044126e786a6c29444dc
tools/gyp/pylib/gyp/win_tool.py
python
WinTool.ExecRcWrapper
(self, arch, *args)
return popen.returncode
Filter logo banner from invocations of rc.exe. Older versions of RC don't support the /nologo flag.
Filter logo banner from invocations of rc.exe. Older versions of RC don't support the /nologo flag.
[ "Filter", "logo", "banner", "from", "invocations", "of", "rc", ".", "exe", ".", "Older", "versions", "of", "RC", "don", "t", "support", "the", "/", "nologo", "flag", "." ]
def ExecRcWrapper(self, arch, *args): """Filter logo banner from invocations of rc.exe. Older versions of RC don't support the /nologo flag.""" env = self._GetEnv(arch) popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() for line in out.splitlines(): if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and not line.startswith('Copyright (C) Microsoft Corporation') and line): print line return popen.returncode
[ "def", "ExecRcWrapper", "(", "self", ",", "arch", ",", "*", "args", ")", ":", "env", "=", "self", ".", "_GetEnv", "(", "arch", ")", "popen", "=", "subprocess", ".", "Popen", "(", "args", ",", "shell", "=", "True", ",", "env", "=", "env", ",", "st...
https://github.com/larroy/clearskies_core/blob/3574ddf0edc8555454c7044126e786a6c29444dc/tools/gyp/pylib/gyp/win_tool.py#L264-L276
rdkit/rdkit
ede860ae316d12d8568daf5ee800921c3389c84e
rdkit/ML/Cluster/Clusters.py
python
Cluster.AddChild
(self, child)
Adds a child to our list **Arguments** - child: a Cluster
Adds a child to our list
[ "Adds", "a", "child", "to", "our", "list" ]
def AddChild(self, child): """Adds a child to our list **Arguments** - child: a Cluster """ self.children.append(child) self._GenPoints() self._UpdateLength()
[ "def", "AddChild", "(", "self", ",", "child", ")", ":", "self", ".", "children", ".", "append", "(", "child", ")", "self", ".", "_GenPoints", "(", ")", "self", ".", "_UpdateLength", "(", ")" ]
https://github.com/rdkit/rdkit/blob/ede860ae316d12d8568daf5ee800921c3389c84e/rdkit/ML/Cluster/Clusters.py#L150-L160