nwo stringlengths 5 86 | sha stringlengths 40 40 | path stringlengths 4 189 | language stringclasses 1 value | identifier stringlengths 1 94 | parameters stringlengths 2 4.03k | argument_list stringclasses 1 value | return_statement stringlengths 0 11.5k | docstring stringlengths 1 33.2k | docstring_summary stringlengths 0 5.15k | docstring_tokens list | function stringlengths 34 151k | function_tokens list | url stringlengths 90 278 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/idlelib/dynOptionMenuWidget.py | python | DynOptionMenu.SetMenu | (self,valueList,value=None) | clear and reload the menu with a new set of options.
valueList - list of new options
value - initial value to set the optionmenu's menubutton to | clear and reload the menu with a new set of options.
valueList - list of new options
value - initial value to set the optionmenu's menubutton to | [
"clear",
"and",
"reload",
"the",
"menu",
"with",
"a",
"new",
"set",
"of",
"options",
".",
"valueList",
"-",
"list",
"of",
"new",
"options",
"value",
"-",
"initial",
"value",
"to",
"set",
"the",
"optionmenu",
"s",
"menubutton",
"to"
] | def SetMenu(self,valueList,value=None):
"""
clear and reload the menu with a new set of options.
valueList - list of new options
value - initial value to set the optionmenu's menubutton to
"""
self['menu'].delete(0,'end')
for item in valueList:
self['menu'].add_command(label=item,
command=_setit(self.variable,item,self.command))
if value:
self.variable.set(value) | [
"def",
"SetMenu",
"(",
"self",
",",
"valueList",
",",
"value",
"=",
"None",
")",
":",
"self",
"[",
"'menu'",
"]",
".",
"delete",
"(",
"0",
",",
"'end'",
")",
"for",
"item",
"in",
"valueList",
":",
"self",
"[",
"'menu'",
"]",
".",
"add_command",
"("... | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/idlelib/dynOptionMenuWidget.py#L24-L35 | ||
plumonito/dtslam | 5994bb9cf7a11981b830370db206bceb654c085d | 3rdparty/opencv-git/3rdparty/jinja2/nodes.py | python | Node.set_ctx | (self, ctx) | return self | Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context. | Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context. | [
"Reset",
"the",
"context",
"of",
"a",
"node",
"and",
"all",
"child",
"nodes",
".",
"Per",
"default",
"the",
"parser",
"will",
"all",
"generate",
"nodes",
"that",
"have",
"a",
"load",
"context",
"as",
"it",
"s",
"the",
"most",
"common",
"one",
".",
"Thi... | def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self | [
"def",
"set_ctx",
"(",
"self",
",",
"ctx",
")",
":",
"todo",
"=",
"deque",
"(",
"[",
"self",
"]",
")",
"while",
"todo",
":",
"node",
"=",
"todo",
".",
"popleft",
"(",
")",
"if",
"'ctx'",
"in",
"node",
".",
"fields",
":",
"node",
".",
"ctx",
"="... | https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/3rdparty/jinja2/nodes.py#L194-L206 | |
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py | python | xpathContext.xpathContextSetCache | (self, active, value, options) | return ret | Creates/frees an object cache on the XPath context. If
activates XPath objects (xmlXPathObject) will be cached
internally to be reused. @options: 0: This will set the
XPath object caching: @value: This will set the maximum
number of XPath objects to be cached per slot There are 5
slots for: node-set, string, number, boolean, and misc
objects. Use <0 for the default number (100). Other values
for @options have currently no effect. | Creates/frees an object cache on the XPath context. If
activates XPath objects (xmlXPathObject) will be cached
internally to be reused. | [
"Creates",
"/",
"frees",
"an",
"object",
"cache",
"on",
"the",
"XPath",
"context",
".",
"If",
"activates",
"XPath",
"objects",
"(",
"xmlXPathObject",
")",
"will",
"be",
"cached",
"internally",
"to",
"be",
"reused",
"."
] | def xpathContextSetCache(self, active, value, options):
"""Creates/frees an object cache on the XPath context. If
activates XPath objects (xmlXPathObject) will be cached
internally to be reused. @options: 0: This will set the
XPath object caching: @value: This will set the maximum
number of XPath objects to be cached per slot There are 5
slots for: node-set, string, number, boolean, and misc
objects. Use <0 for the default number (100). Other values
for @options have currently no effect. """
ret = libxml2mod.xmlXPathContextSetCache(self._o, active, value, options)
return ret | [
"def",
"xpathContextSetCache",
"(",
"self",
",",
"active",
",",
"value",
",",
"options",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlXPathContextSetCache",
"(",
"self",
".",
"_o",
",",
"active",
",",
"value",
",",
"options",
")",
"return",
"ret"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L7325-L7335 | |
panda3d/panda3d | 833ad89ebad58395d0af0b7ec08538e5e4308265 | direct/src/showbase/ShowBase.py | python | ShowBase.openMainWindow | (self, *args, **kw) | return success | Creates the initial, main window for the application, and sets
up the mouse and render2d structures appropriately for it. If
this method is called a second time, it will close the
previous main window and open a new one, preserving the lens
properties in base.camLens.
:returns: True on success, or False on failure (in which case base.win
may be either None, or the previous, closed window). | Creates the initial, main window for the application, and sets
up the mouse and render2d structures appropriately for it. If
this method is called a second time, it will close the
previous main window and open a new one, preserving the lens
properties in base.camLens. | [
"Creates",
"the",
"initial",
"main",
"window",
"for",
"the",
"application",
"and",
"sets",
"up",
"the",
"mouse",
"and",
"render2d",
"structures",
"appropriately",
"for",
"it",
".",
"If",
"this",
"method",
"is",
"called",
"a",
"second",
"time",
"it",
"will",
... | def openMainWindow(self, *args, **kw):
"""
Creates the initial, main window for the application, and sets
up the mouse and render2d structures appropriately for it. If
this method is called a second time, it will close the
previous main window and open a new one, preserving the lens
properties in base.camLens.
:returns: True on success, or False on failure (in which case base.win
may be either None, or the previous, closed window).
"""
keepCamera = kw.get('keepCamera', False)
success = 1
oldWin = self.win
oldLens = self.camLens
oldClearColorActive = None
if self.win is not None:
# Close the previous window.
oldClearColorActive = self.win.getClearColorActive()
oldClearColor = VBase4(self.win.getClearColor())
oldClearDepthActive = self.win.getClearDepthActive()
oldClearDepth = self.win.getClearDepth()
oldClearStencilActive = self.win.getClearStencilActive()
oldClearStencil = self.win.getClearStencil()
self.closeWindow(self.win, keepCamera = keepCamera)
# Open a new window.
self.openWindow(*args, **kw)
if self.win is None:
self.win = oldWin
self.winList.append(oldWin)
success = 0
if self.win is not None:
if isinstance(self.win, GraphicsWindow):
self.setupMouse(self.win)
self.makeCamera2d(self.win)
if self.wantRender2dp:
self.makeCamera2dp(self.win)
if oldLens is not None:
# Restore the previous lens properties.
self.camNode.setLens(oldLens)
self.camLens = oldLens
if oldClearColorActive is not None:
# Restore the previous clear properties.
self.win.setClearColorActive(oldClearColorActive)
self.win.setClearColor(oldClearColor)
self.win.setClearDepthActive(oldClearDepthActive)
self.win.setClearDepth(oldClearDepth)
self.win.setClearStencilActive(oldClearStencilActive)
self.win.setClearStencil(oldClearStencil)
flag = ConfigVariableBool('show-frame-rate-meter', False)
self.setFrameRateMeter(flag.value)
flag = ConfigVariableBool('show-scene-graph-analyzer-meter', False)
self.setSceneGraphAnalyzerMeter(flag.value)
return success | [
"def",
"openMainWindow",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"keepCamera",
"=",
"kw",
".",
"get",
"(",
"'keepCamera'",
",",
"False",
")",
"success",
"=",
"1",
"oldWin",
"=",
"self",
".",
"win",
"oldLens",
"=",
"self",
".",... | https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/showbase/ShowBase.py#L1030-L1090 | |
lawy623/SVS | b7c7ae367c82a4797ff4a896a2ff304f02e7f724 | caffe/scripts/cpp_lint.py | python | _SetVerboseLevel | (level) | return _cpplint_state.SetVerboseLevel(level) | Sets the module's verbosity, and returns the previous setting. | Sets the module's verbosity, and returns the previous setting. | [
"Sets",
"the",
"module",
"s",
"verbosity",
"and",
"returns",
"the",
"previous",
"setting",
"."
] | def _SetVerboseLevel(level):
"""Sets the module's verbosity, and returns the previous setting."""
return _cpplint_state.SetVerboseLevel(level) | [
"def",
"_SetVerboseLevel",
"(",
"level",
")",
":",
"return",
"_cpplint_state",
".",
"SetVerboseLevel",
"(",
"level",
")"
] | https://github.com/lawy623/SVS/blob/b7c7ae367c82a4797ff4a896a2ff304f02e7f724/caffe/scripts/cpp_lint.py#L782-L784 | |
gnina/gnina | b9ae032f52fc7a8153987bde09c0efa3620d8bb6 | caffe/python/caffe/pycaffe.py | python | _Net_forward | (self, blobs=None, start=None, end=None, **kwargs) | return {out: self.blobs[out].data for out in outputs} | Forward pass: prepare inputs and run the net forward.
Parameters
----------
blobs : list of blobs to return in addition to output blobs.
kwargs : Keys are input blob names and values are blob ndarrays.
For formatting inputs for Caffe, see Net.preprocess().
If None, input is taken from data layers.
start : optional name of layer at which to begin the forward pass
end : optional name of layer at which to finish the forward pass
(inclusive)
Returns
-------
outs : {blob name: blob ndarray} dict. | Forward pass: prepare inputs and run the net forward. | [
"Forward",
"pass",
":",
"prepare",
"inputs",
"and",
"run",
"the",
"net",
"forward",
"."
] | def _Net_forward(self, blobs=None, start=None, end=None, **kwargs):
"""
Forward pass: prepare inputs and run the net forward.
Parameters
----------
blobs : list of blobs to return in addition to output blobs.
kwargs : Keys are input blob names and values are blob ndarrays.
For formatting inputs for Caffe, see Net.preprocess().
If None, input is taken from data layers.
start : optional name of layer at which to begin the forward pass
end : optional name of layer at which to finish the forward pass
(inclusive)
Returns
-------
outs : {blob name: blob ndarray} dict.
"""
if blobs is None:
blobs = []
if start is not None:
start_ind = list(self._layer_names).index(start)
else:
start_ind = 0
if end is not None:
end_ind = list(self._layer_names).index(end)
outputs = set(self.top_names[end] + blobs)
else:
end_ind = len(self.layers) - 1
outputs = set(self.outputs + blobs)
if kwargs:
if set(kwargs.keys()) != set(self.inputs):
raise Exception('Input blob arguments do not match net inputs.')
# Set input according to defined shapes and make arrays single and
# C-contiguous as Caffe expects.
for in_, blob in six.iteritems(kwargs):
if blob.shape[0] != self.blobs[in_].shape[0]:
raise Exception('Input is not batch sized')
self.blobs[in_].data[...] = blob
self._forward(start_ind, end_ind)
# Unpack blobs to extract
return {out: self.blobs[out].data for out in outputs} | [
"def",
"_Net_forward",
"(",
"self",
",",
"blobs",
"=",
"None",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"blobs",
"is",
"None",
":",
"blobs",
"=",
"[",
"]",
"if",
"start",
"is",
"not",
"None",
... | https://github.com/gnina/gnina/blob/b9ae032f52fc7a8153987bde09c0efa3620d8bb6/caffe/python/caffe/pycaffe.py#L88-L134 | |
hfinkel/llvm-project-cxxjit | 91084ef018240bbb8e24235ff5cd8c355a9c1a1e | clang/bindings/python/clang/cindex.py | python | TranslationUnit.from_ast_file | (cls, filename, index=None) | return cls(ptr=ptr, index=index) | Create a TranslationUnit instance from a saved AST file.
A previously-saved AST file (provided with -emit-ast or
TranslationUnit.save()) is loaded from the filename specified.
If the file cannot be loaded, a TranslationUnitLoadError will be
raised.
index is optional and is the Index instance to use. If not provided,
a default Index will be created.
filename can be str or PathLike. | Create a TranslationUnit instance from a saved AST file. | [
"Create",
"a",
"TranslationUnit",
"instance",
"from",
"a",
"saved",
"AST",
"file",
"."
] | def from_ast_file(cls, filename, index=None):
"""Create a TranslationUnit instance from a saved AST file.
A previously-saved AST file (provided with -emit-ast or
TranslationUnit.save()) is loaded from the filename specified.
If the file cannot be loaded, a TranslationUnitLoadError will be
raised.
index is optional and is the Index instance to use. If not provided,
a default Index will be created.
filename can be str or PathLike.
"""
if index is None:
index = Index.create()
ptr = conf.lib.clang_createTranslationUnit(index, fspath(filename))
if not ptr:
raise TranslationUnitLoadError(filename)
return cls(ptr=ptr, index=index) | [
"def",
"from_ast_file",
"(",
"cls",
",",
"filename",
",",
"index",
"=",
"None",
")",
":",
"if",
"index",
"is",
"None",
":",
"index",
"=",
"Index",
".",
"create",
"(",
")",
"ptr",
"=",
"conf",
".",
"lib",
".",
"clang_createTranslationUnit",
"(",
"index"... | https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/clang/bindings/python/clang/cindex.py#L2835-L2856 | |
eldar/deepcut-cnn | 928bf2f224fce132f6e4404b4c95fb017297a5e0 | scripts/cpp_lint.py | python | FileInfo.BaseName | (self) | return self.Split()[1] | File base name - text after the final slash, before the final period. | File base name - text after the final slash, before the final period. | [
"File",
"base",
"name",
"-",
"text",
"after",
"the",
"final",
"slash",
"before",
"the",
"final",
"period",
"."
] | def BaseName(self):
"""File base name - text after the final slash, before the final period."""
return self.Split()[1] | [
"def",
"BaseName",
"(",
"self",
")",
":",
"return",
"self",
".",
"Split",
"(",
")",
"[",
"1",
"]"
] | https://github.com/eldar/deepcut-cnn/blob/928bf2f224fce132f6e4404b4c95fb017297a5e0/scripts/cpp_lint.py#L944-L946 | |
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/winpython/py3compat.py | python | get_meth_class | (obj) | Return method class | Return method class | [
"Return",
"method",
"class"
] | def get_meth_class(obj):
"""Return method class"""
if PY2:
# Python 2
return obj.im_class
else:
# Python 3
return obj.__self__.__class__ | [
"def",
"get_meth_class",
"(",
"obj",
")",
":",
"if",
"PY2",
":",
"# Python 2",
"return",
"obj",
".",
"im_class",
"else",
":",
"# Python 3",
"return",
"obj",
".",
"__self__",
".",
"__class__"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/winpython/py3compat.py#L196-L203 | ||
Tencent/TNN | 7acca99f54c55747b415a4c57677403eebc7b706 | third_party/flatbuffers/python/flatbuffers/builder.py | python | Builder.StartVector | (self, elemSize, numElems, alignment) | return self.Offset() | StartVector initializes bookkeeping for writing a new vector.
A vector has the following format:
- <UOffsetT: number of elements in this vector>
- <T: data>+, where T is the type of elements of this vector. | StartVector initializes bookkeeping for writing a new vector. | [
"StartVector",
"initializes",
"bookkeeping",
"for",
"writing",
"a",
"new",
"vector",
"."
] | def StartVector(self, elemSize, numElems, alignment):
"""
StartVector initializes bookkeeping for writing a new vector.
A vector has the following format:
- <UOffsetT: number of elements in this vector>
- <T: data>+, where T is the type of elements of this vector.
"""
self.assertNotNested()
self.nested = True
self.vectorNumElems = numElems
self.Prep(N.Uint32Flags.bytewidth, elemSize*numElems)
self.Prep(alignment, elemSize*numElems) # In case alignment > int.
return self.Offset() | [
"def",
"StartVector",
"(",
"self",
",",
"elemSize",
",",
"numElems",
",",
"alignment",
")",
":",
"self",
".",
"assertNotNested",
"(",
")",
"self",
".",
"nested",
"=",
"True",
"self",
".",
"vectorNumElems",
"=",
"numElems",
"self",
".",
"Prep",
"(",
"N",
... | https://github.com/Tencent/TNN/blob/7acca99f54c55747b415a4c57677403eebc7b706/third_party/flatbuffers/python/flatbuffers/builder.py#L363-L377 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | contrib/gizmos/osx_cocoa/gizmos.py | python | TreeListCtrl.Create | (*args, **kwargs) | return _gizmos.TreeListCtrl_Create(*args, **kwargs) | Create(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=TR_DEFAULT_STYLE,
Validator validator=DefaultValidator,
String name=TreeListCtrlNameStr) -> bool
Do the 2nd phase and create the GUI control. | Create(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=TR_DEFAULT_STYLE,
Validator validator=DefaultValidator,
String name=TreeListCtrlNameStr) -> bool | [
"Create",
"(",
"self",
"Window",
"parent",
"int",
"id",
"=",
"-",
"1",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"long",
"style",
"=",
"TR_DEFAULT_STYLE",
"Validator",
"validator",
"=",
"DefaultValidator",
"String",
"name",
"=... | def Create(*args, **kwargs):
"""
Create(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=TR_DEFAULT_STYLE,
Validator validator=DefaultValidator,
String name=TreeListCtrlNameStr) -> bool
Do the 2nd phase and create the GUI control.
"""
return _gizmos.TreeListCtrl_Create(*args, **kwargs) | [
"def",
"Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gizmos",
".",
"TreeListCtrl_Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/osx_cocoa/gizmos.py#L484-L493 | |
wujian16/Cornell-MOE | df299d1be882d2af9796d7a68b3f9505cac7a53e | moe/optimal_learning/python/data_containers.py | python | HistoricalData.append_historical_data | (self, points_sampled, points_sampled_value, points_sampled_noise_variance, validate=False) | Append lists of points_sampled, their values, and their noise variances to the data members of this class.
This class (see class docstring) stores its data members as numpy arrays; this method provides a way for users
who already have data in this format to append directly instead of creating an intermediate :class:`moe.optimal_learning.python.SamplePoint` list.
:param points_sampled: already-sampled points
:type points_sampled: array of float64 with shape (num_sampled, dim)
:param points_sampled_value: function value measured at each point
:type points_sampled_value: array of float64 with shape (num_sampled)
:param points_sampled_noise_variance: noise variance associated with ``points_sampled_value``
:type points_sampled_noise_variance: array of float64 with shape (num_sampled)
:param validate: whether to sanity-check the input sample_points
:type validate: boolean | Append lists of points_sampled, their values, and their noise variances to the data members of this class. | [
"Append",
"lists",
"of",
"points_sampled",
"their",
"values",
"and",
"their",
"noise",
"variances",
"to",
"the",
"data",
"members",
"of",
"this",
"class",
"."
] | def append_historical_data(self, points_sampled, points_sampled_value, points_sampled_noise_variance, validate=False):
"""Append lists of points_sampled, their values, and their noise variances to the data members of this class.
This class (see class docstring) stores its data members as numpy arrays; this method provides a way for users
who already have data in this format to append directly instead of creating an intermediate :class:`moe.optimal_learning.python.SamplePoint` list.
:param points_sampled: already-sampled points
:type points_sampled: array of float64 with shape (num_sampled, dim)
:param points_sampled_value: function value measured at each point
:type points_sampled_value: array of float64 with shape (num_sampled)
:param points_sampled_noise_variance: noise variance associated with ``points_sampled_value``
:type points_sampled_noise_variance: array of float64 with shape (num_sampled)
:param validate: whether to sanity-check the input sample_points
:type validate: boolean
"""
if points_sampled.size == 0:
return
if validate:
self.validate_historical_data(self.dim, points_sampled, points_sampled_value, points_sampled_noise_variance)
self._points_sampled = numpy.append(self._points_sampled, points_sampled, axis=0)
self._points_sampled_value = numpy.append(self._points_sampled_value, points_sampled_value, axis=0)
self._points_sampled_noise_variance = numpy.append(self._points_sampled_noise_variance, points_sampled_noise_variance) | [
"def",
"append_historical_data",
"(",
"self",
",",
"points_sampled",
",",
"points_sampled_value",
",",
"points_sampled_noise_variance",
",",
"validate",
"=",
"False",
")",
":",
"if",
"points_sampled",
".",
"size",
"==",
"0",
":",
"return",
"if",
"validate",
":",
... | https://github.com/wujian16/Cornell-MOE/blob/df299d1be882d2af9796d7a68b3f9505cac7a53e/moe/optimal_learning/python/data_containers.py#L232-L256 | ||
qgis/QGIS | 15a77662d4bb712184f6aa60d0bd663010a76a75 | python/plugins/grassprovider/Grass7Algorithm.py | python | Grass7Algorithm.loadAttributeTable | (self, name, layer, destName=None) | Creates a dedicated command to load an attribute table
into the temporary GRASS DB.
:param name: name of the input parameter.
:param layer: a layer object to import from.
:param destName: force the name for the table into GRASS DB. | Creates a dedicated command to load an attribute table
into the temporary GRASS DB.
:param name: name of the input parameter.
:param layer: a layer object to import from.
:param destName: force the name for the table into GRASS DB. | [
"Creates",
"a",
"dedicated",
"command",
"to",
"load",
"an",
"attribute",
"table",
"into",
"the",
"temporary",
"GRASS",
"DB",
".",
":",
"param",
"name",
":",
"name",
"of",
"the",
"input",
"parameter",
".",
":",
"param",
"layer",
":",
"a",
"layer",
"object... | def loadAttributeTable(self, name, layer, destName=None):
"""
Creates a dedicated command to load an attribute table
into the temporary GRASS DB.
:param name: name of the input parameter.
:param layer: a layer object to import from.
:param destName: force the name for the table into GRASS DB.
"""
self.inputLayers.append(layer)
if not destName:
destName = 'table_{}'.format(os.path.basename(getTempFilename()))
self.exportedLayers[name] = destName
command = 'db.in.ogr --overwrite input="{0}" output="{1}"'.format(
os.path.normpath(layer.source()), destName)
self.commands.append(command) | [
"def",
"loadAttributeTable",
"(",
"self",
",",
"name",
",",
"layer",
",",
"destName",
"=",
"None",
")",
":",
"self",
".",
"inputLayers",
".",
"append",
"(",
"layer",
")",
"if",
"not",
"destName",
":",
"destName",
"=",
"'table_{}'",
".",
"format",
"(",
... | https://github.com/qgis/QGIS/blob/15a77662d4bb712184f6aa60d0bd663010a76a75/python/plugins/grassprovider/Grass7Algorithm.py#L992-L1006 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/propgrid.py | python | PGProperty.GetAttributesAsList | (*args, **kwargs) | return _propgrid.PGProperty_GetAttributesAsList(*args, **kwargs) | GetAttributesAsList(self) -> wxVariant | GetAttributesAsList(self) -> wxVariant | [
"GetAttributesAsList",
"(",
"self",
")",
"-",
">",
"wxVariant"
] | def GetAttributesAsList(*args, **kwargs):
"""GetAttributesAsList(self) -> wxVariant"""
return _propgrid.PGProperty_GetAttributesAsList(*args, **kwargs) | [
"def",
"GetAttributesAsList",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PGProperty_GetAttributesAsList",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L548-L550 | |
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/metrics/python/ops/metric_ops.py | python | streaming_concat | (values,
axis=0,
max_size=None,
metrics_collections=None,
updates_collections=None,
name=None) | Concatenate values along an axis across batches.
The function `streaming_concat` creates two local variables, `array` and
`size`, that are used to store concatenated values. Internally, `array` is
used as storage for a dynamic array (if `maxsize` is `None`), which ensures
that updates can be run in amortized constant time.
For estimation of the metric over a stream of data, the function creates an
`update_op` operation that appends the values of a tensor and returns the
length of the concatenated axis.
This op allows for evaluating metrics that cannot be updated incrementally
using the same framework as other streaming metrics.
Args:
values: `Tensor` to concatenate. Rank and the shape along all axes other
than the axis to concatenate along must be statically known.
axis: optional integer axis to concatenate along.
max_size: optional integer maximum size of `value` along the given axis.
Once the maximum size is reached, further updates are no-ops. By default,
there is no maximum size: the array is resized as necessary.
metrics_collections: An optional list of collections that `value`
should be added to.
updates_collections: An optional list of collections `update_op` should be
added to.
name: An optional variable_scope name.
Returns:
value: A `Tensor` representing the concatenated values.
update_op: An operation that concatenates the next values.
Raises:
ValueError: if `values` does not have a statically known rank, `axis` is
not in the valid range or the size of `values` is not statically known
along any axis other than `axis`. | Concatenate values along an axis across batches. | [
"Concatenate",
"values",
"along",
"an",
"axis",
"across",
"batches",
"."
] | def streaming_concat(values,
axis=0,
max_size=None,
metrics_collections=None,
updates_collections=None,
name=None):
"""Concatenate values along an axis across batches.
The function `streaming_concat` creates two local variables, `array` and
`size`, that are used to store concatenated values. Internally, `array` is
used as storage for a dynamic array (if `maxsize` is `None`), which ensures
that updates can be run in amortized constant time.
For estimation of the metric over a stream of data, the function creates an
`update_op` operation that appends the values of a tensor and returns the
length of the concatenated axis.
This op allows for evaluating metrics that cannot be updated incrementally
using the same framework as other streaming metrics.
Args:
values: `Tensor` to concatenate. Rank and the shape along all axes other
than the axis to concatenate along must be statically known.
axis: optional integer axis to concatenate along.
max_size: optional integer maximum size of `value` along the given axis.
Once the maximum size is reached, further updates are no-ops. By default,
there is no maximum size: the array is resized as necessary.
metrics_collections: An optional list of collections that `value`
should be added to.
updates_collections: An optional list of collections `update_op` should be
added to.
name: An optional variable_scope name.
Returns:
value: A `Tensor` representing the concatenated values.
update_op: An operation that concatenates the next values.
Raises:
ValueError: if `values` does not have a statically known rank, `axis` is
not in the valid range or the size of `values` is not statically known
along any axis other than `axis`.
"""
with variable_scope.variable_scope(name, 'streaming_concat', (values,)):
# pylint: disable=invalid-slice-index
values_shape = values.get_shape()
if values_shape.dims is None:
raise ValueError('`values` must have known statically known rank')
ndim = len(values_shape)
if axis < 0:
axis += ndim
if not 0 <= axis < ndim:
raise ValueError('axis = %r not in [0, %r)' % (axis, ndim))
fixed_shape = [dim.value for n, dim in enumerate(values_shape)
if n != axis]
if any(value is None for value in fixed_shape):
raise ValueError('all dimensions of `values` other than the dimension to '
'concatenate along must have statically known size')
# We move `axis` to the front of the internal array so assign ops can be
# applied to contiguous slices
init_size = 0 if max_size is None else max_size
init_shape = [init_size] + fixed_shape
array = _create_local(
'array', shape=init_shape, validate_shape=False, dtype=values.dtype)
size = _create_local('size', shape=[], dtype=dtypes.int32)
perm = [0 if n == axis else n + 1 if n < axis else n for n in range(ndim)]
valid_array = array[:size]
valid_array.set_shape([None] + fixed_shape)
value = array_ops.transpose(valid_array, perm, name='concat')
values_size = array_ops.shape(values)[axis]
if max_size is None:
batch_size = values_size
else:
batch_size = math_ops.minimum(values_size, max_size - size)
perm = [axis] + [n for n in range(ndim) if n != axis]
batch_values = array_ops.transpose(values, perm)[:batch_size]
def reallocate():
next_size = _next_array_size(new_size)
next_shape = array_ops.stack([next_size] + fixed_shape)
new_value = array_ops.zeros(next_shape, dtype=values.dtype)
old_value = array.value()
assign_op = state_ops.assign(array, new_value, validate_shape=False)
with ops.control_dependencies([assign_op]):
copy_op = array[:size].assign(old_value[:size])
# return value needs to be the same dtype as no_op() for cond
with ops.control_dependencies([copy_op]):
return control_flow_ops.no_op()
new_size = size + batch_size
array_size = array_ops.shape_internal(array, optimize=False)[0]
maybe_reallocate_op = control_flow_ops.cond(
new_size > array_size, reallocate, control_flow_ops.no_op)
with ops.control_dependencies([maybe_reallocate_op]):
append_values_op = array[size:new_size].assign(batch_values)
with ops.control_dependencies([append_values_op]):
update_op = size.assign(new_size)
if metrics_collections:
ops.add_to_collections(metrics_collections, value)
if updates_collections:
ops.add_to_collections(updates_collections, update_op)
return value, update_op | [
"def",
"streaming_concat",
"(",
"values",
",",
"axis",
"=",
"0",
",",
"max_size",
"=",
"None",
",",
"metrics_collections",
"=",
"None",
",",
"updates_collections",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"with",
"variable_scope",
".",
"variable_scop... | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/metrics/python/ops/metric_ops.py#L2265-L2374 | ||
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py | python | URI.user | (self) | return ret | Get the user part from an URI | Get the user part from an URI | [
"Get",
"the",
"user",
"part",
"from",
"an",
"URI"
] | def user(self):
"""Get the user part from an URI """
ret = libxml2mod.xmlURIGetUser(self._o)
return ret | [
"def",
"user",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlURIGetUser",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L7055-L7058 | |
bigartm/bigartm | 47e37f982de87aa67bfd475ff1f39da696b181b3 | utils/cpplint.py | python | _CppLintState.SetFilters | (self, filters) | Sets the error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "+whitespace/indent").
Each filter should start with + or -; else we die.
Raises:
ValueError: The comma-separated filters did not all start with '+' or '-'.
E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter" | Sets the error-message filters. | [
"Sets",
"the",
"error",
"-",
"message",
"filters",
"."
] | def SetFilters(self, filters):
"""Sets the error-message filters.
These filters are applied when deciding whether to emit a given
error message.
Args:
filters: A string of comma-separated filters (eg "+whitespace/indent").
Each filter should start with + or -; else we die.
Raises:
ValueError: The comma-separated filters did not all start with '+' or '-'.
E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
"""
# Default filters always have less priority than the flag ones.
self.filters = _DEFAULT_FILTERS[:]
self.AddFilters(filters) | [
"def",
"SetFilters",
"(",
"self",
",",
"filters",
")",
":",
"# Default filters always have less priority than the flag ones.",
"self",
".",
"filters",
"=",
"_DEFAULT_FILTERS",
"[",
":",
"]",
"self",
".",
"AddFilters",
"(",
"filters",
")"
] | https://github.com/bigartm/bigartm/blob/47e37f982de87aa67bfd475ff1f39da696b181b3/utils/cpplint.py#L789-L805 | ||
cvxpy/cvxpy | 5165b4fb750dfd237de8659383ef24b4b2e33aaf | cvxpy/cvxcore/python/canonInterface.py | python | set_matrix_data | (linC, linPy) | Calls the appropriate cvxcore function to set the matrix data field of
our C++ linOp. | Calls the appropriate cvxcore function to set the matrix data field of
our C++ linOp. | [
"Calls",
"the",
"appropriate",
"cvxcore",
"function",
"to",
"set",
"the",
"matrix",
"data",
"field",
"of",
"our",
"C",
"++",
"linOp",
"."
] | def set_matrix_data(linC, linPy) -> None:
"""Calls the appropriate cvxcore function to set the matrix data field of
our C++ linOp.
"""
if get_type(linPy) == cvxcore.SPARSE_CONST:
coo = format_matrix(linPy.data, format='sparse')
linC.set_sparse_data(coo.data, coo.row.astype(float),
coo.col.astype(float), coo.shape[0],
coo.shape[1])
else:
linC.set_dense_data(format_matrix(linPy.data, shape=linPy.shape))
linC.set_data_ndim(len(linPy.data.shape)) | [
"def",
"set_matrix_data",
"(",
"linC",
",",
"linPy",
")",
"->",
"None",
":",
"if",
"get_type",
"(",
"linPy",
")",
"==",
"cvxcore",
".",
"SPARSE_CONST",
":",
"coo",
"=",
"format_matrix",
"(",
"linPy",
".",
"data",
",",
"format",
"=",
"'sparse'",
")",
"l... | https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/cvxcore/python/canonInterface.py#L416-L427 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/idlelib/run.py | python | MyHandler.exithook | (self) | override SocketIO method - wait for MainThread to shut us down | override SocketIO method - wait for MainThread to shut us down | [
"override",
"SocketIO",
"method",
"-",
"wait",
"for",
"MainThread",
"to",
"shut",
"us",
"down"
] | def exithook(self):
"override SocketIO method - wait for MainThread to shut us down"
time.sleep(10) | [
"def",
"exithook",
"(",
"self",
")",
":",
"time",
".",
"sleep",
"(",
"10",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/idlelib/run.py#L308-L310 | ||
MegEngine/MegEngine | ce9ad07a27ec909fb8db4dd67943d24ba98fb93a | imperative/python/megengine/traced_module/traced_module.py | python | TracedModule.graph | (self) | return list(self.argdef_graph_map.values())[0] | Return the ``InternalGraph`` of this ``TracedModule``. | Return the ``InternalGraph`` of this ``TracedModule``. | [
"Return",
"the",
"InternalGraph",
"of",
"this",
"TracedModule",
"."
] | def graph(self) -> InternalGraph:
"""Return the ``InternalGraph`` of this ``TracedModule``.
"""
assert len(self.argdef_graph_map) == 1
return list(self.argdef_graph_map.values())[0] | [
"def",
"graph",
"(",
"self",
")",
"->",
"InternalGraph",
":",
"assert",
"len",
"(",
"self",
".",
"argdef_graph_map",
")",
"==",
"1",
"return",
"list",
"(",
"self",
".",
"argdef_graph_map",
".",
"values",
"(",
")",
")",
"[",
"0",
"]"
] | https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/traced_module/traced_module.py#L2109-L2113 | |
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tkinter.py | python | Canvas.move | (self, *args) | Move an item TAGORID given in ARGS. | Move an item TAGORID given in ARGS. | [
"Move",
"an",
"item",
"TAGORID",
"given",
"in",
"ARGS",
"."
] | def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args) | [
"def",
"move",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"(",
"self",
".",
"_w",
",",
"'move'",
")",
"+",
"args",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tkinter.py#L2360-L2362 | ||
google/skia | 82d65d0487bd72f5f7332d002429ec2dc61d2463 | infra/bots/git_utils.py | python | NewGitCheckout.root | (self) | return self.name | Returns the root directory containing the checked-out files. | Returns the root directory containing the checked-out files. | [
"Returns",
"the",
"root",
"directory",
"containing",
"the",
"checked",
"-",
"out",
"files",
"."
] | def root(self):
"""Returns the root directory containing the checked-out files."""
return self.name | [
"def",
"root",
"(",
"self",
")",
":",
"return",
"self",
".",
"name"
] | https://github.com/google/skia/blob/82d65d0487bd72f5f7332d002429ec2dc61d2463/infra/bots/git_utils.py#L138-L140 | |
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2class.py | python | recoverMemory | (buffer, size) | return xmlDoc(_obj=ret) | parse an XML in-memory block and build a tree. In the case
the document is not Well Formed, an attempt to build a tree
is tried anyway | parse an XML in-memory block and build a tree. In the case
the document is not Well Formed, an attempt to build a tree
is tried anyway | [
"parse",
"an",
"XML",
"in",
"-",
"memory",
"block",
"and",
"build",
"a",
"tree",
".",
"In",
"the",
"case",
"the",
"document",
"is",
"not",
"Well",
"Formed",
"an",
"attempt",
"to",
"build",
"a",
"tree",
"is",
"tried",
"anyway"
] | def recoverMemory(buffer, size):
"""parse an XML in-memory block and build a tree. In the case
the document is not Well Formed, an attempt to build a tree
is tried anyway """
ret = libxml2mod.xmlRecoverMemory(buffer, size)
if ret is None:raise treeError('xmlRecoverMemory() failed')
return xmlDoc(_obj=ret) | [
"def",
"recoverMemory",
"(",
"buffer",
",",
"size",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlRecoverMemory",
"(",
"buffer",
",",
"size",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlRecoverMemory() failed'",
")",
"return",
"xmlDoc... | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L613-L619 | |
Sigil-Ebook/Sigil | 0d145d3a4874b4a26f7aabd68dbd9d18a2402e52 | src/Resource_Files/plugin_launchers/python/sigil_bs4/element.py | python | PageElement.find_all_next | (self, name=None, attrs=OrderedDict(), text=None, limit=None,
**kwargs) | return self._find_all(name, attrs, text, limit, self.next_elements,
**kwargs) | Returns all items that match the given criteria and appear
after this Tag in the document. | Returns all items that match the given criteria and appear
after this Tag in the document. | [
"Returns",
"all",
"items",
"that",
"match",
"the",
"given",
"criteria",
"and",
"appear",
"after",
"this",
"Tag",
"in",
"the",
"document",
"."
] | def find_all_next(self, name=None, attrs=OrderedDict(), text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
after this Tag in the document."""
return self._find_all(name, attrs, text, limit, self.next_elements,
**kwargs) | [
"def",
"find_all_next",
"(",
"self",
",",
"name",
"=",
"None",
",",
"attrs",
"=",
"OrderedDict",
"(",
")",
",",
"text",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_find_all",
"(",
"name",
",",... | https://github.com/Sigil-Ebook/Sigil/blob/0d145d3a4874b4a26f7aabd68dbd9d18a2402e52/src/Resource_Files/plugin_launchers/python/sigil_bs4/element.py#L441-L446 | |
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/src/robotsim.py | python | IKObjective.copy | (self) | return _robotsim.IKObjective_copy(self) | r"""
copy(IKObjective self) -> IKObjective
Copy constructor. | r"""
copy(IKObjective self) -> IKObjective | [
"r",
"copy",
"(",
"IKObjective",
"self",
")",
"-",
">",
"IKObjective"
] | def copy(self) -> "IKObjective":
r"""
copy(IKObjective self) -> IKObjective
Copy constructor.
"""
return _robotsim.IKObjective_copy(self) | [
"def",
"copy",
"(",
"self",
")",
"->",
"\"IKObjective\"",
":",
"return",
"_robotsim",
".",
"IKObjective_copy",
"(",
"self",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/src/robotsim.py#L6286-L6294 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/config.py | python | ConfigHandler._exclude_files_parser | (cls, key) | return parser | Returns a parser function to make sure field inputs
are not files.
Parses a value after getting the key so error messages are
more informative.
:param key:
:rtype: callable | Returns a parser function to make sure field inputs
are not files. | [
"Returns",
"a",
"parser",
"function",
"to",
"make",
"sure",
"field",
"inputs",
"are",
"not",
"files",
"."
] | def _exclude_files_parser(cls, key):
"""Returns a parser function to make sure field inputs
are not files.
Parses a value after getting the key so error messages are
more informative.
:param key:
:rtype: callable
"""
def parser(value):
exclude_directive = 'file:'
if value.startswith(exclude_directive):
raise ValueError(
'Only strings are accepted for the {0} field, '
'files are not accepted'.format(key))
return value
return parser | [
"def",
"_exclude_files_parser",
"(",
"cls",
",",
"key",
")",
":",
"def",
"parser",
"(",
"value",
")",
":",
"exclude_directive",
"=",
"'file:'",
"if",
"value",
".",
"startswith",
"(",
"exclude_directive",
")",
":",
"raise",
"ValueError",
"(",
"'Only strings are... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/config.py#L291-L308 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_core.py | python | GridBagSizer.SetItemSpan | (*args) | return _core_.GridBagSizer_SetItemSpan(*args) | SetItemSpan(self, item, GBSpan span) -> bool
Set the row/col spanning of the specified *item* where *item* is
either a window or subsizer that is a member of this sizer, or a
zero-based index of an item. Returns True on success. If the move is
not allowed (because an item is already there) then False is returned. | SetItemSpan(self, item, GBSpan span) -> bool | [
"SetItemSpan",
"(",
"self",
"item",
"GBSpan",
"span",
")",
"-",
">",
"bool"
] | def SetItemSpan(*args):
"""
SetItemSpan(self, item, GBSpan span) -> bool
Set the row/col spanning of the specified *item* where *item* is
either a window or subsizer that is a member of this sizer, or a
zero-based index of an item. Returns True on success. If the move is
not allowed (because an item is already there) then False is returned.
"""
return _core_.GridBagSizer_SetItemSpan(*args) | [
"def",
"SetItemSpan",
"(",
"*",
"args",
")",
":",
"return",
"_core_",
".",
"GridBagSizer_SetItemSpan",
"(",
"*",
"args",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L16008-L16017 | |
okex/V3-Open-API-SDK | c5abb0db7e2287718e0055e17e57672ce0ec7fd9 | okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/requests/utils.py | python | extract_zipped_paths | (path) | return extracted_path | Replace nonexistent paths that look like they refer to a member of a zip
archive with the location of an extracted copy of the target, or else
just return the provided path unchanged. | Replace nonexistent paths that look like they refer to a member of a zip
archive with the location of an extracted copy of the target, or else
just return the provided path unchanged. | [
"Replace",
"nonexistent",
"paths",
"that",
"look",
"like",
"they",
"refer",
"to",
"a",
"member",
"of",
"a",
"zip",
"archive",
"with",
"the",
"location",
"of",
"an",
"extracted",
"copy",
"of",
"the",
"target",
"or",
"else",
"just",
"return",
"the",
"provide... | def extract_zipped_paths(path):
"""Replace nonexistent paths that look like they refer to a member of a zip
archive with the location of an extracted copy of the target, or else
just return the provided path unchanged.
"""
if os.path.exists(path):
# this is already a valid path, no need to do anything further
return path
# find the first valid part of the provided path and treat that as a zip archive
# assume the rest of the path is the name of a member in the archive
archive, member = os.path.split(path)
while archive and not os.path.exists(archive):
archive, prefix = os.path.split(archive)
member = '/'.join([prefix, member])
if not zipfile.is_zipfile(archive):
return path
zip_file = zipfile.ZipFile(archive)
if member not in zip_file.namelist():
return path
# we have a valid zip archive and a valid member of that archive
tmp = tempfile.gettempdir()
extracted_path = os.path.join(tmp, *member.split('/'))
if not os.path.exists(extracted_path):
extracted_path = zip_file.extract(member, path=tmp)
return extracted_path | [
"def",
"extract_zipped_paths",
"(",
"path",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"# this is already a valid path, no need to do anything further",
"return",
"path",
"# find the first valid part of the provided path and treat that as a zip arc... | https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/requests/utils.py#L227-L256 | |
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/utils.py | python | unquote_unreserved | (uri) | return ''.join(parts) | Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded. | Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded. | [
"Un",
"-",
"escape",
"any",
"percent",
"-",
"escape",
"sequences",
"in",
"a",
"URI",
"that",
"are",
"unreserved",
"characters",
".",
"This",
"leaves",
"all",
"reserved",
"illegal",
"and",
"non",
"-",
"ASCII",
"bytes",
"encoded",
"."
] | def unquote_unreserved(uri):
"""Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
"""
parts = uri.split('%')
for i in range(1, len(parts)):
h = parts[i][0:2]
if len(h) == 2 and h.isalnum():
try:
c = chr(int(h, 16))
except ValueError:
raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
if c in UNRESERVED_SET:
parts[i] = c + parts[i][2:]
else:
parts[i] = '%' + parts[i]
else:
parts[i] = '%' + parts[i]
return ''.join(parts) | [
"def",
"unquote_unreserved",
"(",
"uri",
")",
":",
"parts",
"=",
"uri",
".",
"split",
"(",
"'%'",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"parts",
")",
")",
":",
"h",
"=",
"parts",
"[",
"i",
"]",
"[",
"0",
":",
"2",
"]",
... | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/utils.py#L395-L414 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/cgitb.py | python | reset | () | return '''<!--: spam
Content-Type: text/html
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> -->
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> -->
</font> </font> </font> </script> </object> </blockquote> </pre>
</table> </table> </table> </table> </table> </font> </font> </font>''' | Return a string that resets the CGI and browser to a known state. | Return a string that resets the CGI and browser to a known state. | [
"Return",
"a",
"string",
"that",
"resets",
"the",
"CGI",
"and",
"browser",
"to",
"a",
"known",
"state",
"."
] | def reset():
"""Return a string that resets the CGI and browser to a known state."""
return '''<!--: spam
Content-Type: text/html
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> -->
<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> -->
</font> </font> </font> </script> </object> </blockquote> </pre>
</table> </table> </table> </table> </table> </font> </font> </font>''' | [
"def",
"reset",
"(",
")",
":",
"return",
"'''<!--: spam\nContent-Type: text/html\n\n<body bgcolor=\"#f0f0f8\"><font color=\"#f0f0f8\" size=\"-5\"> -->\n<body bgcolor=\"#f0f0f8\"><font color=\"#f0f0f8\" size=\"-5\"> --> -->\n</font> </font> </font> </script> </object> </blockquote> </pre>\n</table> </tab... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/cgitb.py#L35-L43 | |
xhzdeng/crpn | a5aef0f80dbe486103123f740c634fb01e6cc9a1 | caffe-fast-rcnn/python/caffe/io.py | python | Transformer.deprocess | (self, in_, data) | return decaf_in | Invert Caffe formatting; see preprocess(). | Invert Caffe formatting; see preprocess(). | [
"Invert",
"Caffe",
"formatting",
";",
"see",
"preprocess",
"()",
"."
] | def deprocess(self, in_, data):
"""
Invert Caffe formatting; see preprocess().
"""
self.__check_input(in_)
decaf_in = data.copy().squeeze()
transpose = self.transpose.get(in_)
channel_swap = self.channel_swap.get(in_)
raw_scale = self.raw_scale.get(in_)
mean = self.mean.get(in_)
input_scale = self.input_scale.get(in_)
if input_scale is not None:
decaf_in /= input_scale
if mean is not None:
decaf_in += mean
if raw_scale is not None:
decaf_in /= raw_scale
if channel_swap is not None:
decaf_in = decaf_in[np.argsort(channel_swap), :, :]
if transpose is not None:
decaf_in = decaf_in.transpose(np.argsort(transpose))
return decaf_in | [
"def",
"deprocess",
"(",
"self",
",",
"in_",
",",
"data",
")",
":",
"self",
".",
"__check_input",
"(",
"in_",
")",
"decaf_in",
"=",
"data",
".",
"copy",
"(",
")",
".",
"squeeze",
"(",
")",
"transpose",
"=",
"self",
".",
"transpose",
".",
"get",
"("... | https://github.com/xhzdeng/crpn/blob/a5aef0f80dbe486103123f740c634fb01e6cc9a1/caffe-fast-rcnn/python/caffe/io.py#L164-L185 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/special_math_ops.py | python | bessel_i1e | (x, name=None) | Computes the Bessel i1e function of `x` element-wise.
Modified Bessel function of order 1.
>>> tf.math.special.bessel_i1e([-1., -0.5, 0.5, 1.]).numpy()
array([-0.20791042, -0.15642083, 0.15642083, 0.20791042], dtype=float32)
Args:
x: A `Tensor` or `SparseTensor`. Must be one of the following types: `half`,
`float32`, `float64`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor`, respectively. Has the same type as `x`.
@compatibility(scipy)
Equivalent to scipy.special.i1e
@end_compatibility | Computes the Bessel i1e function of `x` element-wise. | [
"Computes",
"the",
"Bessel",
"i1e",
"function",
"of",
"x",
"element",
"-",
"wise",
"."
] | def bessel_i1e(x, name=None):
"""Computes the Bessel i1e function of `x` element-wise.
Modified Bessel function of order 1.
>>> tf.math.special.bessel_i1e([-1., -0.5, 0.5, 1.]).numpy()
array([-0.20791042, -0.15642083, 0.15642083, 0.20791042], dtype=float32)
Args:
x: A `Tensor` or `SparseTensor`. Must be one of the following types: `half`,
`float32`, `float64`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor`, respectively. Has the same type as `x`.
@compatibility(scipy)
Equivalent to scipy.special.i1e
@end_compatibility
"""
with ops.name_scope(name, 'bessel_i1e', [x]):
return gen_special_math_ops.bessel_i1e(x) | [
"def",
"bessel_i1e",
"(",
"x",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"'bessel_i1e'",
",",
"[",
"x",
"]",
")",
":",
"return",
"gen_special_math_ops",
".",
"bessel_i1e",
"(",
"x",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/special_math_ops.py#L340-L361 | ||
panda3d/panda3d | 833ad89ebad58395d0af0b7ec08538e5e4308265 | contrib/src/sceneeditor/seTree.py | python | TreeItem.OnSelect | (self) | Called when item selected. | Called when item selected. | [
"Called",
"when",
"item",
"selected",
"."
] | def OnSelect(self):
"""Called when item selected.""" | [
"def",
"OnSelect",
"(",
"self",
")",
":"
] | https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/contrib/src/sceneeditor/seTree.py#L413-L414 | ||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | Framework/PythonInterface/plugins/algorithms/PelicanReduction.py | python | PelicanReduction._get_minimum_tof | (self) | return min_tof * 1e6 | Converts the maximum energy transfer to neutron to an equivalent
minimum tof. The distance from the sample to the detector is 2.4m (fixed) and
source to sample is 0.695m. The result is the minimum tof from source to detector
and the result is returned in microseconds. | Converts the maximum energy transfer to neutron to an equivalent
minimum tof. The distance from the sample to the detector is 2.4m (fixed) and
source to sample is 0.695m. The result is the minimum tof from source to detector
and the result is returned in microseconds. | [
"Converts",
"the",
"maximum",
"energy",
"transfer",
"to",
"neutron",
"to",
"an",
"equivalent",
"minimum",
"tof",
".",
"The",
"distance",
"from",
"the",
"sample",
"to",
"the",
"detector",
"is",
"2",
".",
"4m",
"(",
"fixed",
")",
"and",
"source",
"to",
"sa... | def _get_minimum_tof(self):
'''
Converts the maximum energy transfer to neutron to an equivalent
minimum tof. The distance from the sample to the detector is 2.4m (fixed) and
source to sample is 0.695m. The result is the minimum tof from source to detector
and the result is returned in microseconds.
'''
nom_velocity = 437.4 * math.sqrt(self._efixed)
max_meV = self._efixed + self._max_energy_gain
max_velocity = 437.4 * math.sqrt(max_meV)
min_tof = 0.695 / nom_velocity + 2.4 / max_velocity
return min_tof * 1e6 | [
"def",
"_get_minimum_tof",
"(",
"self",
")",
":",
"nom_velocity",
"=",
"437.4",
"*",
"math",
".",
"sqrt",
"(",
"self",
".",
"_efixed",
")",
"max_meV",
"=",
"self",
".",
"_efixed",
"+",
"self",
".",
"_max_energy_gain",
"max_velocity",
"=",
"437.4",
"*",
"... | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/PelicanReduction.py#L679-L690 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py2/sklearn/ensemble/gradient_boosting.py | python | BaseGradientBoosting._clear_state | (self) | Clear the state of the gradient boosting model. | Clear the state of the gradient boosting model. | [
"Clear",
"the",
"state",
"of",
"the",
"gradient",
"boosting",
"model",
"."
] | def _clear_state(self):
"""Clear the state of the gradient boosting model. """
if hasattr(self, 'estimators_'):
self.estimators_ = np.empty((0, 0), dtype=np.object)
if hasattr(self, 'train_score_'):
del self.train_score_
if hasattr(self, 'oob_improvement_'):
del self.oob_improvement_
if hasattr(self, 'init_'):
del self.init_ | [
"def",
"_clear_state",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'estimators_'",
")",
":",
"self",
".",
"estimators_",
"=",
"np",
".",
"empty",
"(",
"(",
"0",
",",
"0",
")",
",",
"dtype",
"=",
"np",
".",
"object",
")",
"if",
"has... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/ensemble/gradient_boosting.py#L895-L904 | ||
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/python/framework/dtypes.py | python | DType.is_compatible_with | (self, other) | return self._type_enum in (
other.as_datatype_enum, other.base_dtype.as_datatype_enum) | Returns True if the `other` DType will be converted to this DType.
The conversion rules are as follows:
```
DType(T) .is_compatible_with(DType(T)) == True
DType(T) .is_compatible_with(DType(T).as_ref) == True
DType(T).as_ref.is_compatible_with(DType(T)) == False
DType(T).as_ref.is_compatible_with(DType(T).as_ref) == True
```
Args:
other: A `DType` (or object that may be converted to a `DType`).
Returns:
True if a Tensor of the `other` `DType` will be implicitly converted to
this `DType`. | Returns True if the `other` DType will be converted to this DType. | [
"Returns",
"True",
"if",
"the",
"other",
"DType",
"will",
"be",
"converted",
"to",
"this",
"DType",
"."
] | def is_compatible_with(self, other):
"""Returns True if the `other` DType will be converted to this DType.
The conversion rules are as follows:
```
DType(T) .is_compatible_with(DType(T)) == True
DType(T) .is_compatible_with(DType(T).as_ref) == True
DType(T).as_ref.is_compatible_with(DType(T)) == False
DType(T).as_ref.is_compatible_with(DType(T).as_ref) == True
```
Args:
other: A `DType` (or object that may be converted to a `DType`).
Returns:
True if a Tensor of the `other` `DType` will be implicitly converted to
this `DType`.
"""
other = as_dtype(other)
return self._type_enum in (
other.as_datatype_enum, other.base_dtype.as_datatype_enum) | [
"def",
"is_compatible_with",
"(",
"self",
",",
"other",
")",
":",
"other",
"=",
"as_dtype",
"(",
"other",
")",
"return",
"self",
".",
"_type_enum",
"in",
"(",
"other",
".",
"as_datatype_enum",
",",
"other",
".",
"base_dtype",
".",
"as_datatype_enum",
")"
] | https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/framework/dtypes.py#L218-L239 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/traceback.py | python | extract_tb | (tb, limit=None) | return StackSummary.extract(walk_tb(tb), limit=limit) | Return a StackSummary object representing a list of
pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a FrameSummary object
containing attributes filename, lineno, name, and line
representing the information that is usually printed for a stack
trace. The line is a string with leading and trailing
whitespace stripped; if the source is not available it is None. | Return a StackSummary object representing a list of
pre-processed entries from traceback. | [
"Return",
"a",
"StackSummary",
"object",
"representing",
"a",
"list",
"of",
"pre",
"-",
"processed",
"entries",
"from",
"traceback",
"."
] | def extract_tb(tb, limit=None):
"""
Return a StackSummary object representing a list of
pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a FrameSummary object
containing attributes filename, lineno, name, and line
representing the information that is usually printed for a stack
trace. The line is a string with leading and trailing
whitespace stripped; if the source is not available it is None.
"""
return StackSummary.extract(walk_tb(tb), limit=limit) | [
"def",
"extract_tb",
"(",
"tb",
",",
"limit",
"=",
"None",
")",
":",
"return",
"StackSummary",
".",
"extract",
"(",
"walk_tb",
"(",
"tb",
")",
",",
"limit",
"=",
"limit",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/traceback.py#L59-L72 | |
google/earthenterprise | 0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9 | earth_enterprise/src/google/protobuf-py/mox.py | python | MockMethod.AndRaise | (self, exception) | Set the exception to raise when this method is called.
Args:
# exception: the exception to raise when this method is called.
exception: Exception | Set the exception to raise when this method is called. | [
"Set",
"the",
"exception",
"to",
"raise",
"when",
"this",
"method",
"is",
"called",
"."
] | def AndRaise(self, exception):
"""Set the exception to raise when this method is called.
Args:
# exception: the exception to raise when this method is called.
exception: Exception
"""
self._exception = exception | [
"def",
"AndRaise",
"(",
"self",
",",
"exception",
")",
":",
"self",
".",
"_exception",
"=",
"exception"
] | https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/google/protobuf-py/mox.py#L728-L736 | ||
rapidsai/cudf | d5b2448fc69f17509304d594f029d0df56984962 | python/cudf/cudf/core/window/rolling.py | python | Rolling.apply | (self, func, *args, **kwargs) | return self._apply_agg(func) | Counterpart of `pandas.core.window.Rolling.apply
<https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.core.window.rolling.Rolling.apply.html>`_.
Parameters
----------
func : function
A user defined function that takes an 1D array as input
args : tuple
unsupported.
kwargs
unsupported
See also
--------
cudf.Series.applymap : Apply an elementwise function to
transform the values in the Column.
Notes
-----
See notes of the :meth:`cudf.Series.applymap`
Example
-------
>>> import cudf
>>> def count_if_gt_3(window):
... count = 0
... for i in window:
... if i > 3:
... count += 1
... return count
...
>>> s = cudf.Series([0, 1.1, 5.8, 3.1, 6.2, 2.0, 1.5])
>>> s.rolling(3, min_periods=1).apply(count_if_gt_3)
0 0
1 0
2 1
3 2
4 3
5 2
6 1
dtype: int64 | Counterpart of `pandas.core.window.Rolling.apply
<https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.core.window.rolling.Rolling.apply.html>`_. | [
"Counterpart",
"of",
"pandas",
".",
"core",
".",
"window",
".",
"Rolling",
".",
"apply",
"<https",
":",
"//",
"pandas",
".",
"pydata",
".",
"org",
"/",
"pandas",
"-",
"docs",
"/",
"stable",
"/",
"reference",
"/",
"api",
"/",
"pandas",
".",
"core",
".... | def apply(self, func, *args, **kwargs):
"""
Counterpart of `pandas.core.window.Rolling.apply
<https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.core.window.rolling.Rolling.apply.html>`_.
Parameters
----------
func : function
A user defined function that takes an 1D array as input
args : tuple
unsupported.
kwargs
unsupported
See also
--------
cudf.Series.applymap : Apply an elementwise function to
transform the values in the Column.
Notes
-----
See notes of the :meth:`cudf.Series.applymap`
Example
-------
>>> import cudf
>>> def count_if_gt_3(window):
... count = 0
... for i in window:
... if i > 3:
... count += 1
... return count
...
>>> s = cudf.Series([0, 1.1, 5.8, 3.1, 6.2, 2.0, 1.5])
>>> s.rolling(3, min_periods=1).apply(count_if_gt_3)
0 0
1 0
2 1
3 2
4 3
5 2
6 1
dtype: int64
"""
has_nulls = False
if isinstance(self.obj, cudf.Series):
if self.obj._column.has_nulls():
has_nulls = True
else:
for col in self.obj._data:
if self.obj[col].has_nulls:
has_nulls = True
if has_nulls:
raise NotImplementedError(
"Handling UDF with null values is not yet supported"
)
return self._apply_agg(func) | [
"def",
"apply",
"(",
"self",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"has_nulls",
"=",
"False",
"if",
"isinstance",
"(",
"self",
".",
"obj",
",",
"cudf",
".",
"Series",
")",
":",
"if",
"self",
".",
"obj",
".",
"_column",... | https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/window/rolling.py#L282-L339 | |
wenwei202/caffe | f54a74abaf6951d8485cbdcfa1d74a4c37839466 | scripts/cpp_lint.py | python | FindEndOfExpressionInLine | (line, startpos, depth, startchar, endchar) | return (-1, depth) | Find the position just after the matching endchar.
Args:
line: a CleansedLines line.
startpos: start searching at this position.
depth: nesting level at startpos.
startchar: expression opening character.
endchar: expression closing character.
Returns:
On finding matching endchar: (index just after matching endchar, 0)
Otherwise: (-1, new depth at end of this line) | Find the position just after the matching endchar. | [
"Find",
"the",
"position",
"just",
"after",
"the",
"matching",
"endchar",
"."
] | def FindEndOfExpressionInLine(line, startpos, depth, startchar, endchar):
"""Find the position just after the matching endchar.
Args:
line: a CleansedLines line.
startpos: start searching at this position.
depth: nesting level at startpos.
startchar: expression opening character.
endchar: expression closing character.
Returns:
On finding matching endchar: (index just after matching endchar, 0)
Otherwise: (-1, new depth at end of this line)
"""
for i in xrange(startpos, len(line)):
if line[i] == startchar:
depth += 1
elif line[i] == endchar:
depth -= 1
if depth == 0:
return (i + 1, 0)
return (-1, depth) | [
"def",
"FindEndOfExpressionInLine",
"(",
"line",
",",
"startpos",
",",
"depth",
",",
"startchar",
",",
"endchar",
")",
":",
"for",
"i",
"in",
"xrange",
"(",
"startpos",
",",
"len",
"(",
"line",
")",
")",
":",
"if",
"line",
"[",
"i",
"]",
"==",
"start... | https://github.com/wenwei202/caffe/blob/f54a74abaf6951d8485cbdcfa1d74a4c37839466/scripts/cpp_lint.py#L1230-L1251 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/prompt-toolkit/py3/prompt_toolkit/completion/base.py | python | Completion.display_meta_text | (self) | return fragment_list_to_text(self.display_meta) | The 'meta' field as plain text. | The 'meta' field as plain text. | [
"The",
"meta",
"field",
"as",
"plain",
"text",
"."
] | def display_meta_text(self) -> str:
"The 'meta' field as plain text."
from prompt_toolkit.formatted_text import fragment_list_to_text
return fragment_list_to_text(self.display_meta) | [
"def",
"display_meta_text",
"(",
"self",
")",
"->",
"str",
":",
"from",
"prompt_toolkit",
".",
"formatted_text",
"import",
"fragment_list_to_text",
"return",
"fragment_list_to_text",
"(",
"self",
".",
"display_meta",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py3/prompt_toolkit/completion/base.py#L109-L113 | |
KratosMultiphysics/Kratos | 0000833054ed0503424eb28205d6508d9ca6cbbc | kratos/python_scripts/kratos_utilities.py | python | DeleteFileIfExisting | (file_name) | This function tries to delete a file
It uses try/except to also work in MPI | This function tries to delete a file
It uses try/except to also work in MPI | [
"This",
"function",
"tries",
"to",
"delete",
"a",
"file",
"It",
"uses",
"try",
"/",
"except",
"to",
"also",
"work",
"in",
"MPI"
] | def DeleteFileIfExisting(file_name):
"""This function tries to delete a file
It uses try/except to also work in MPI
"""
try:
os.remove(file_name)
except:
pass | [
"def",
"DeleteFileIfExisting",
"(",
"file_name",
")",
":",
"try",
":",
"os",
".",
"remove",
"(",
"file_name",
")",
"except",
":",
"pass"
] | https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/kratos/python_scripts/kratos_utilities.py#L7-L14 | ||
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/profiler/parser/framework_parser.py | python | FrameworkParser._construct_point_info | (self, task_id_full_op_name_dict, step_point_data) | return point_info | step_point_data is a list[step_data], step data is a dict, key is same as STEP_INFO_STRUCT. | step_point_data is a list[step_data], step data is a dict, key is same as STEP_INFO_STRUCT. | [
"step_point_data",
"is",
"a",
"list",
"[",
"step_data",
"]",
"step",
"data",
"is",
"a",
"dict",
"key",
"is",
"same",
"as",
"STEP_INFO_STRUCT",
"."
] | def _construct_point_info(self, task_id_full_op_name_dict, step_point_data):
"""step_point_data is a list[step_data], step data is a dict, key is same as STEP_INFO_STRUCT."""
point_info = {}
for step_point in step_point_data:
task_id = combine_stream_task_id(step_point['streamId'], step_point['taskId'])
tag = step_point['tag']
full_op_name = task_id_full_op_name_dict[task_id]
point_info[tag] = full_op_name
return point_info | [
"def",
"_construct_point_info",
"(",
"self",
",",
"task_id_full_op_name_dict",
",",
"step_point_data",
")",
":",
"point_info",
"=",
"{",
"}",
"for",
"step_point",
"in",
"step_point_data",
":",
"task_id",
"=",
"combine_stream_task_id",
"(",
"step_point",
"[",
"'strea... | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/profiler/parser/framework_parser.py#L306-L314 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/extern/flatnotebook.py | python | PageContainer.GetPageImage | (self, page) | return -1 | Returns the image index associated to a page. | Returns the image index associated to a page. | [
"Returns",
"the",
"image",
"index",
"associated",
"to",
"a",
"page",
"."
] | def GetPageImage(self, page):
""" Returns the image index associated to a page. """
if page < len(self._pagesInfoVec):
return self._pagesInfoVec[page].GetImageIndex()
return -1 | [
"def",
"GetPageImage",
"(",
"self",
",",
"page",
")",
":",
"if",
"page",
"<",
"len",
"(",
"self",
".",
"_pagesInfoVec",
")",
":",
"return",
"self",
".",
"_pagesInfoVec",
"[",
"page",
"]",
".",
"GetImageIndex",
"(",
")",
"return",
"-",
"1"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/flatnotebook.py#L4570-L4577 | |
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TStr.__eq__ | (self, *args) | return _snap.TStr___eq__(self, *args) | __eq__(TStr self, TStr Str) -> bool
Parameters:
Str: TStr const &
__eq__(TStr self, char const * CStr) -> bool
Parameters:
CStr: char const * | __eq__(TStr self, TStr Str) -> bool | [
"__eq__",
"(",
"TStr",
"self",
"TStr",
"Str",
")",
"-",
">",
"bool"
] | def __eq__(self, *args):
"""
__eq__(TStr self, TStr Str) -> bool
Parameters:
Str: TStr const &
__eq__(TStr self, char const * CStr) -> bool
Parameters:
CStr: char const *
"""
return _snap.TStr___eq__(self, *args) | [
"def",
"__eq__",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TStr___eq__",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L9586-L9599 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/optimize/_numdiff.py | python | check_derivative | (fun, jac, x0, bounds=(-np.inf, np.inf), args=(),
kwargs={}) | Check correctness of a function computing derivatives (Jacobian or
gradient) by comparison with a finite difference approximation.
Parameters
----------
fun : callable
Function of which to estimate the derivatives. The argument x
passed to this function is ndarray of shape (n,) (never a scalar
even if n=1). It must return 1-d array_like of shape (m,) or a scalar.
jac : callable
Function which computes Jacobian matrix of `fun`. It must work with
argument x the same way as `fun`. The return value must be array_like
or sparse matrix with an appropriate shape.
x0 : array_like of shape (n,) or float
Point at which to estimate the derivatives. Float will be converted
to 1-d array.
bounds : 2-tuple of array_like, optional
Lower and upper bounds on independent variables. Defaults to no bounds.
Each bound must match the size of `x0` or be a scalar, in the latter
case the bound will be the same for all variables. Use it to limit the
range of function evaluation.
args, kwargs : tuple and dict, optional
Additional arguments passed to `fun` and `jac`. Both empty by default.
The calling signature is ``fun(x, *args, **kwargs)`` and the same
for `jac`.
Returns
-------
accuracy : float
The maximum among all relative errors for elements with absolute values
higher than 1 and absolute errors for elements with absolute values
less or equal than 1. If `accuracy` is on the order of 1e-6 or lower,
then it is likely that your `jac` implementation is correct.
See Also
--------
approx_derivative : Compute finite difference approximation of derivative.
Examples
--------
>>> import numpy as np
>>> from scipy.optimize import check_derivative
>>>
>>>
>>> def f(x, c1, c2):
... return np.array([x[0] * np.sin(c1 * x[1]),
... x[0] * np.cos(c2 * x[1])])
...
>>> def jac(x, c1, c2):
... return np.array([
... [np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1 * x[1])],
... [np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2 * x[1])]
... ])
...
>>>
>>> x0 = np.array([1.0, 0.5 * np.pi])
>>> check_derivative(f, jac, x0, args=(1, 2))
2.4492935982947064e-16 | Check correctness of a function computing derivatives (Jacobian or
gradient) by comparison with a finite difference approximation. | [
"Check",
"correctness",
"of",
"a",
"function",
"computing",
"derivatives",
"(",
"Jacobian",
"or",
"gradient",
")",
"by",
"comparison",
"with",
"a",
"finite",
"difference",
"approximation",
"."
] | def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(),
kwargs={}):
"""Check correctness of a function computing derivatives (Jacobian or
gradient) by comparison with a finite difference approximation.
Parameters
----------
fun : callable
Function of which to estimate the derivatives. The argument x
passed to this function is ndarray of shape (n,) (never a scalar
even if n=1). It must return 1-d array_like of shape (m,) or a scalar.
jac : callable
Function which computes Jacobian matrix of `fun`. It must work with
argument x the same way as `fun`. The return value must be array_like
or sparse matrix with an appropriate shape.
x0 : array_like of shape (n,) or float
Point at which to estimate the derivatives. Float will be converted
to 1-d array.
bounds : 2-tuple of array_like, optional
Lower and upper bounds on independent variables. Defaults to no bounds.
Each bound must match the size of `x0` or be a scalar, in the latter
case the bound will be the same for all variables. Use it to limit the
range of function evaluation.
args, kwargs : tuple and dict, optional
Additional arguments passed to `fun` and `jac`. Both empty by default.
The calling signature is ``fun(x, *args, **kwargs)`` and the same
for `jac`.
Returns
-------
accuracy : float
The maximum among all relative errors for elements with absolute values
higher than 1 and absolute errors for elements with absolute values
less or equal than 1. If `accuracy` is on the order of 1e-6 or lower,
then it is likely that your `jac` implementation is correct.
See Also
--------
approx_derivative : Compute finite difference approximation of derivative.
Examples
--------
>>> import numpy as np
>>> from scipy.optimize import check_derivative
>>>
>>>
>>> def f(x, c1, c2):
... return np.array([x[0] * np.sin(c1 * x[1]),
... x[0] * np.cos(c2 * x[1])])
...
>>> def jac(x, c1, c2):
... return np.array([
... [np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1 * x[1])],
... [np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2 * x[1])]
... ])
...
>>>
>>> x0 = np.array([1.0, 0.5 * np.pi])
>>> check_derivative(f, jac, x0, args=(1, 2))
2.4492935982947064e-16
"""
J_to_test = jac(x0, *args, **kwargs)
if issparse(J_to_test):
J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test,
args=args, kwargs=kwargs)
J_to_test = csr_matrix(J_to_test)
abs_err = J_to_test - J_diff
i, j, abs_err_data = find(abs_err)
J_diff_data = np.asarray(J_diff[i, j]).ravel()
return np.max(np.abs(abs_err_data) /
np.maximum(1, np.abs(J_diff_data)))
else:
J_diff = approx_derivative(fun, x0, bounds=bounds,
args=args, kwargs=kwargs)
abs_err = np.abs(J_to_test - J_diff)
return np.max(abs_err / np.maximum(1, np.abs(J_diff))) | [
"def",
"check_derivative",
"(",
"fun",
",",
"jac",
",",
"x0",
",",
"bounds",
"=",
"(",
"-",
"np",
".",
"inf",
",",
"np",
".",
"inf",
")",
",",
"args",
"=",
"(",
")",
",",
"kwargs",
"=",
"{",
"}",
")",
":",
"J_to_test",
"=",
"jac",
"(",
"x0",
... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/optimize/_numdiff.py#L564-L639 | ||
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | tools/telemetry/telemetry/core/discover.py | python | DiscoverModules | (start_dir, top_level_dir, pattern='*') | return modules | Discover all modules in |start_dir| which match |pattern|.
Args:
start_dir: The directory to recursively search.
top_level_dir: The top level of the package, for importing.
pattern: Unix shell-style pattern for filtering the filenames to import.
Returns:
list of modules. | Discover all modules in |start_dir| which match |pattern|. | [
"Discover",
"all",
"modules",
"in",
"|start_dir|",
"which",
"match",
"|pattern|",
"."
] | def DiscoverModules(start_dir, top_level_dir, pattern='*'):
"""Discover all modules in |start_dir| which match |pattern|.
Args:
start_dir: The directory to recursively search.
top_level_dir: The top level of the package, for importing.
pattern: Unix shell-style pattern for filtering the filenames to import.
Returns:
list of modules.
"""
modules = []
for dir_path, _, filenames in os.walk(start_dir):
for filename in filenames:
# Filter out unwanted filenames.
if filename.startswith('.') or filename.startswith('_'):
continue
if os.path.splitext(filename)[1] != '.py':
continue
if not fnmatch.fnmatch(filename, pattern):
continue
# Find the module.
module_rel_path = os.path.relpath(os.path.join(dir_path, filename),
top_level_dir)
module_name = re.sub(r'[/\\]', '.', os.path.splitext(module_rel_path)[0])
# Import the module.
module = __import__(module_name, fromlist=[True])
modules.append(module)
return modules | [
"def",
"DiscoverModules",
"(",
"start_dir",
",",
"top_level_dir",
",",
"pattern",
"=",
"'*'",
")",
":",
"modules",
"=",
"[",
"]",
"for",
"dir_path",
",",
"_",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"start_dir",
")",
":",
"for",
"filename",
"in... | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/telemetry/telemetry/core/discover.py#L15-L46 | |
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/dataset/engine/validators.py | python | check_gnn_get_neg_sampled_neighbors | (method) | return new_method | A wrapper that wraps a parameter checker around the GNN `get_neg_sampled_neighbors` function. | A wrapper that wraps a parameter checker around the GNN `get_neg_sampled_neighbors` function. | [
"A",
"wrapper",
"that",
"wraps",
"a",
"parameter",
"checker",
"around",
"the",
"GNN",
"get_neg_sampled_neighbors",
"function",
"."
] | def check_gnn_get_neg_sampled_neighbors(method):
"""A wrapper that wraps a parameter checker around the GNN `get_neg_sampled_neighbors` function."""
@wraps(method)
def new_method(self, *args, **kwargs):
[node_list, neg_neighbor_num, neg_neighbor_type], _ = parse_user_args(method, *args, **kwargs)
check_gnn_list_or_ndarray(node_list, 'node_list')
type_check(neg_neighbor_num, (int,), "neg_neighbor_num")
type_check(neg_neighbor_type, (int,), "neg_neighbor_type")
return method(self, *args, **kwargs)
return new_method | [
"def",
"check_gnn_get_neg_sampled_neighbors",
"(",
"method",
")",
":",
"@",
"wraps",
"(",
"method",
")",
"def",
"new_method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"[",
"node_list",
",",
"neg_neighbor_num",
",",
"neg_neighbor_type... | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/dataset/engine/validators.py#L1684-L1697 | |
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/decimal.py | python | Context.scaleb | (self, a, b) | return a.scaleb (b, context=self) | Returns the first operand after adding the second value its exp.
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('-2'))
Decimal('0.0750')
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('0'))
Decimal('7.50')
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('3'))
Decimal('7.50E+3') | Returns the first operand after adding the second value its exp. | [
"Returns",
"the",
"first",
"operand",
"after",
"adding",
"the",
"second",
"value",
"its",
"exp",
"."
] | def scaleb (self, a, b):
"""Returns the first operand after adding the second value its exp.
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('-2'))
Decimal('0.0750')
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('0'))
Decimal('7.50')
>>> ExtendedContext.scaleb(Decimal('7.50'), Decimal('3'))
Decimal('7.50E+3')
"""
return a.scaleb (b, context=self) | [
"def",
"scaleb",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"return",
"a",
".",
"scaleb",
"(",
"b",
",",
"context",
"=",
"self",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/decimal.py#L4775-L4785 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/ebmlib/fileutil.py | python | IsLink | (path) | Is the file a link
@return: bool | Is the file a link
@return: bool | [
"Is",
"the",
"file",
"a",
"link",
"@return",
":",
"bool"
] | def IsLink(path):
"""Is the file a link
@return: bool
"""
if WIN:
return path.endswith(".lnk") or os.path.islink(path)
else:
return os.path.islink(path) | [
"def",
"IsLink",
"(",
"path",
")",
":",
"if",
"WIN",
":",
"return",
"path",
".",
"endswith",
"(",
"\".lnk\"",
")",
"or",
"os",
".",
"path",
".",
"islink",
"(",
"path",
")",
"else",
":",
"return",
"os",
".",
"path",
".",
"islink",
"(",
"path",
")"... | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ebmlib/fileutil.py#L194-L202 | ||
tinyobjloader/tinyobjloader | 8322e00ae685ea623ab6ac5a6cebcfa2d22fbf93 | deps/cpplint.py | python | CheckAccess | (filename, clean_lines, linenum, nesting_state, error) | Checks for improper use of DISALLOW* macros.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found. | Checks for improper use of DISALLOW* macros. | [
"Checks",
"for",
"improper",
"use",
"of",
"DISALLOW",
"*",
"macros",
"."
] | def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
"""Checks for improper use of DISALLOW* macros.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum] # get rid of comments and strings
matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|'
r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line)
if not matched:
return
if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo):
if nesting_state.stack[-1].access != 'private':
error(filename, linenum, 'readability/constructors', 3,
'%s must be in the private: section' % matched.group(1))
else:
# Found DISALLOW* macro outside a class declaration, or perhaps it
# was used inside a function when it should have been part of the
# class declaration. We could issue a warning here, but it
# probably resulted in a compiler error already.
pass | [
"def",
"CheckAccess",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# get rid of comments and strings",
"matched",
"=",
"Match",
"(",
"(",
"r'... | https://github.com/tinyobjloader/tinyobjloader/blob/8322e00ae685ea623ab6ac5a6cebcfa2d22fbf93/deps/cpplint.py#L2969-L2996 | ||
ApolloAuto/apollo | 463fb82f9e979d02dcb25044e60931293ab2dba0 | modules/tools/routing/road_show.py | python | draw_boundary | (line_segment) | :param line_segment:
:return: | :param line_segment:
:return: | [
":",
"param",
"line_segment",
":",
":",
"return",
":"
] | def draw_boundary(line_segment):
"""
:param line_segment:
:return:
"""
px, py = proto_utils.flatten(line_segment.point, ['x', 'y'])
px, py = downsample_array(px), downsample_array(py)
plt.gca().plot(px, py, 'k') | [
"def",
"draw_boundary",
"(",
"line_segment",
")",
":",
"px",
",",
"py",
"=",
"proto_utils",
".",
"flatten",
"(",
"line_segment",
".",
"point",
",",
"[",
"'x'",
",",
"'y'",
"]",
")",
"px",
",",
"py",
"=",
"downsample_array",
"(",
"px",
")",
",",
"down... | https://github.com/ApolloAuto/apollo/blob/463fb82f9e979d02dcb25044e60931293ab2dba0/modules/tools/routing/road_show.py#L74-L81 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/dataview.py | python | DataViewIconText.SetIcon | (*args, **kwargs) | return _dataview.DataViewIconText_SetIcon(*args, **kwargs) | SetIcon(self, Icon icon) | SetIcon(self, Icon icon) | [
"SetIcon",
"(",
"self",
"Icon",
"icon",
")"
] | def SetIcon(*args, **kwargs):
"""SetIcon(self, Icon icon)"""
return _dataview.DataViewIconText_SetIcon(*args, **kwargs) | [
"def",
"SetIcon",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewIconText_SetIcon",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/dataview.py#L1324-L1326 | |
H-uru/Plasma | c2140ea046e82e9c199e257a7f2e7edb42602871 | Scripts/Python/plasma/Plasma.py | python | PtGetDefaultDisplayParams | () | Returns the default resolution and display settings | Returns the default resolution and display settings | [
"Returns",
"the",
"default",
"resolution",
"and",
"display",
"settings"
] | def PtGetDefaultDisplayParams():
"""Returns the default resolution and display settings"""
pass | [
"def",
"PtGetDefaultDisplayParams",
"(",
")",
":",
"pass"
] | https://github.com/H-uru/Plasma/blob/c2140ea046e82e9c199e257a7f2e7edb42602871/Scripts/Python/plasma/Plasma.py#L414-L416 | ||
yrnkrn/zapcc | c6a8aa30006d997eff0d60fd37b0e62b8aa0ea50 | tools/clang/utils/check_cfc/check_cfc.py | python | is_windows | () | return platform.system() == 'Windows' | Returns True if running on Windows. | Returns True if running on Windows. | [
"Returns",
"True",
"if",
"running",
"on",
"Windows",
"."
] | def is_windows():
"""Returns True if running on Windows."""
return platform.system() == 'Windows' | [
"def",
"is_windows",
"(",
")",
":",
"return",
"platform",
".",
"system",
"(",
")",
"==",
"'Windows'"
] | https://github.com/yrnkrn/zapcc/blob/c6a8aa30006d997eff0d60fd37b0e62b8aa0ea50/tools/clang/utils/check_cfc/check_cfc.py#L64-L66 | |
google/earthenterprise | 0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9 | earth_enterprise/src/fusion/portableglobe/servers/stub_search.py | python | StubDatabase.LoadSearchTable | (self, table_name, content) | Load data for search stub. | Load data for search stub. | [
"Load",
"data",
"for",
"search",
"stub",
"."
] | def LoadSearchTable(self, table_name, content):
"""Load data for search stub."""
self.search_tables_.append(table_name) | [
"def",
"LoadSearchTable",
"(",
"self",
",",
"table_name",
",",
"content",
")",
":",
"self",
".",
"search_tables_",
".",
"append",
"(",
"table_name",
")"
] | https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/fusion/portableglobe/servers/stub_search.py#L29-L31 | ||
bingwin/MicroChat | 81d9a71a212c1cbca5bba497ec42659a7d25dccf | mars/lint/cpplint.py | python | _SetVerboseLevel | (level) | return _cpplint_state.SetVerboseLevel(level) | Sets the module's verbosity, and returns the previous setting. | Sets the module's verbosity, and returns the previous setting. | [
"Sets",
"the",
"module",
"s",
"verbosity",
"and",
"returns",
"the",
"previous",
"setting",
"."
] | def _SetVerboseLevel(level):
"""Sets the module's verbosity, and returns the previous setting."""
return _cpplint_state.SetVerboseLevel(level) | [
"def",
"_SetVerboseLevel",
"(",
"level",
")",
":",
"return",
"_cpplint_state",
".",
"SetVerboseLevel",
"(",
"level",
")"
] | https://github.com/bingwin/MicroChat/blob/81d9a71a212c1cbca5bba497ec42659a7d25dccf/mars/lint/cpplint.py#L870-L872 | |
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/pipeline/pipeline/pipeline.py | python | Pipeline.abort | (self, abort_message='') | Mark the entire pipeline up to the root as aborted.
Note this should only be called from *outside* the context of a running
pipeline. Synchronous and generator pipelines should raise the 'Abort'
exception to cause this behavior during execution.
Args:
abort_message: Optional message explaining why the abort happened.
Returns:
True if the abort signal was sent successfully; False if the pipeline
could not be aborted for any reason. | Mark the entire pipeline up to the root as aborted. | [
"Mark",
"the",
"entire",
"pipeline",
"up",
"to",
"the",
"root",
"as",
"aborted",
"."
] | def abort(self, abort_message=''):
"""Mark the entire pipeline up to the root as aborted.
Note this should only be called from *outside* the context of a running
pipeline. Synchronous and generator pipelines should raise the 'Abort'
exception to cause this behavior during execution.
Args:
abort_message: Optional message explaining why the abort happened.
Returns:
True if the abort signal was sent successfully; False if the pipeline
could not be aborted for any reason.
"""
# TODO: Use thread-local variable to enforce that this is not called
# while a pipeline is executing in the current thread.
if (self.async and self._root_pipeline_key == self._pipeline_key and
not self.try_cancel()):
# Handle the special case where the root pipeline is async and thus
# cannot be aborted outright.
return False
else:
return self._context.begin_abort(
self._root_pipeline_key, abort_message=abort_message) | [
"def",
"abort",
"(",
"self",
",",
"abort_message",
"=",
"''",
")",
":",
"# TODO: Use thread-local variable to enforce that this is not called",
"# while a pipeline is executing in the current thread.",
"if",
"(",
"self",
".",
"async",
"and",
"self",
".",
"_root_pipeline_key",... | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/pipeline/pipeline/pipeline.py#L715-L738 | ||
wy1iu/LargeMargin_Softmax_Loss | c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec | tools/extra/extract_seconds.py | python | get_start_time | (line_iterable, year) | return start_datetime | Find start time from group of lines | Find start time from group of lines | [
"Find",
"start",
"time",
"from",
"group",
"of",
"lines"
] | def get_start_time(line_iterable, year):
"""Find start time from group of lines
"""
start_datetime = None
for line in line_iterable:
line = line.strip()
if line.find('Solving') != -1:
start_datetime = extract_datetime_from_line(line, year)
break
return start_datetime | [
"def",
"get_start_time",
"(",
"line_iterable",
",",
"year",
")",
":",
"start_datetime",
"=",
"None",
"for",
"line",
"in",
"line_iterable",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"line",
".",
"find",
"(",
"'Solving'",
")",
"!=",
"-",
"1... | https://github.com/wy1iu/LargeMargin_Softmax_Loss/blob/c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec/tools/extra/extract_seconds.py#L31-L41 | |
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/genmsg/src/genmsg/msg_loader.py | python | _load_constant_line | (orig_line) | return Constant(field_type, name, val_converted, val.strip()) | :raises: :exc:`InvalidMsgSpec` | :raises: :exc:`InvalidMsgSpec` | [
":",
"raises",
":",
":",
"exc",
":",
"InvalidMsgSpec"
] | def _load_constant_line(orig_line):
"""
:raises: :exc:`InvalidMsgSpec`
"""
clean_line = _strip_comments(orig_line)
line_splits = [s for s in [x.strip() for x in clean_line.split(" ")] if s] #split type/name, filter out empties
field_type = line_splits[0]
if not is_valid_constant_type(field_type):
raise InvalidMsgSpec("%s is not a legal constant type"%field_type)
if field_type == 'string':
# strings contain anything to the right of the equals sign, there are no comments allowed
idx = orig_line.find(CONSTCHAR)
name = orig_line[orig_line.find(' ')+1:idx]
val = orig_line[idx+1:]
else:
line_splits = [x.strip() for x in ' '.join(line_splits[1:]).split(CONSTCHAR)] #resplit on '='
if len(line_splits) != 2:
raise InvalidMsgSpec("Invalid constant declaration: %s"%l)
name = line_splits[0]
val = line_splits[1]
try:
val_converted = convert_constant_value(field_type, val)
except Exception as e:
raise InvalidMsgSpec("Invalid constant value: %s"%e)
return Constant(field_type, name, val_converted, val.strip()) | [
"def",
"_load_constant_line",
"(",
"orig_line",
")",
":",
"clean_line",
"=",
"_strip_comments",
"(",
"orig_line",
")",
"line_splits",
"=",
"[",
"s",
"for",
"s",
"in",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"clean_line",
".",
"split",
"(",
... | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/genmsg/src/genmsg/msg_loader.py#L183-L209 | |
rodeofx/OpenWalter | 6116fbe3f04f1146c854afbfbdbe944feaee647e | walter/common/walterWidgets/walterLayersView.py | python | LayersView.__init__ | (self, parent=None) | Called after the instance has been created. | Called after the instance has been created. | [
"Called",
"after",
"the",
"instance",
"has",
"been",
"created",
"."
] | def __init__(self, parent=None):
"""Called after the instance has been created."""
super(LayersView, self).__init__(parent)
model = LayersModel(self)
self.setModel(model)
# Context Menu
self.contextMenu = QtWidgets.QMenu(self)
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showContextMenu)
# Accept drop events
self.setAcceptDrops(True)
self.setDragDropMode(QtWidgets.QAbstractItemView.DragDrop)
self.setDragEnabled(True)
self.setDropIndicatorShown(True) | [
"def",
"__init__",
"(",
"self",
",",
"parent",
"=",
"None",
")",
":",
"super",
"(",
"LayersView",
",",
"self",
")",
".",
"__init__",
"(",
"parent",
")",
"model",
"=",
"LayersModel",
"(",
"self",
")",
"self",
".",
"setModel",
"(",
"model",
")",
"# Con... | https://github.com/rodeofx/OpenWalter/blob/6116fbe3f04f1146c854afbfbdbe944feaee647e/walter/common/walterWidgets/walterLayersView.py#L19-L35 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/imaplib.py | python | IMAP4.close | (self) | return typ, dat | Close currently selected mailbox.
Deleted messages are removed from writable mailbox.
This is the recommended command before 'LOGOUT'.
(typ, [data]) = <instance>.close() | Close currently selected mailbox. | [
"Close",
"currently",
"selected",
"mailbox",
"."
] | def close(self):
"""Close currently selected mailbox.
Deleted messages are removed from writable mailbox.
This is the recommended command before 'LOGOUT'.
(typ, [data]) = <instance>.close()
"""
try:
typ, dat = self._simple_command('CLOSE')
finally:
self.state = 'AUTH'
return typ, dat | [
"def",
"close",
"(",
"self",
")",
":",
"try",
":",
"typ",
",",
"dat",
"=",
"self",
".",
"_simple_command",
"(",
"'CLOSE'",
")",
"finally",
":",
"self",
".",
"state",
"=",
"'AUTH'",
"return",
"typ",
",",
"dat"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/imaplib.py#L452-L464 | |
CaoWGG/TensorRT-YOLOv4 | 4d7c2edce99e8794a4cb4ea3540d51ce91158a36 | onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py | python | Type.get_declaration | (self) | return conf.lib.clang_getTypeDeclaration(self) | Return the cursor for the declaration of the given type. | Return the cursor for the declaration of the given type. | [
"Return",
"the",
"cursor",
"for",
"the",
"declaration",
"of",
"the",
"given",
"type",
"."
] | def get_declaration(self):
"""
Return the cursor for the declaration of the given type.
"""
return conf.lib.clang_getTypeDeclaration(self) | [
"def",
"get_declaration",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getTypeDeclaration",
"(",
"self",
")"
] | https://github.com/CaoWGG/TensorRT-YOLOv4/blob/4d7c2edce99e8794a4cb4ea3540d51ce91158a36/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py#L2048-L2052 | |
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/turtle.py | python | RawTurtle.onrelease | (self, fun, btn=1, add=None) | Bind fun to mouse-button-release event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Example (for a MyTurtle instance named joe):
>>> class MyTurtle(Turtle):
... def glow(self,x,y):
... self.fillcolor("red")
... def unglow(self,x,y):
... self.fillcolor("")
...
>>> joe = MyTurtle()
>>> joe.onclick(joe.glow)
>>> joe.onrelease(joe.unglow)
Clicking on joe turns fillcolor red, unclicking turns it to
transparent. | Bind fun to mouse-button-release event on this turtle on canvas. | [
"Bind",
"fun",
"to",
"mouse",
"-",
"button",
"-",
"release",
"event",
"on",
"this",
"turtle",
"on",
"canvas",
"."
] | def onrelease(self, fun, btn=1, add=None):
"""Bind fun to mouse-button-release event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Example (for a MyTurtle instance named joe):
>>> class MyTurtle(Turtle):
... def glow(self,x,y):
... self.fillcolor("red")
... def unglow(self,x,y):
... self.fillcolor("")
...
>>> joe = MyTurtle()
>>> joe.onclick(joe.glow)
>>> joe.onrelease(joe.unglow)
Clicking on joe turns fillcolor red, unclicking turns it to
transparent.
"""
self.screen._onrelease(self.turtle._item, fun, btn, add)
self._update() | [
"def",
"onrelease",
"(",
"self",
",",
"fun",
",",
"btn",
"=",
"1",
",",
"add",
"=",
"None",
")",
":",
"self",
".",
"screen",
".",
"_onrelease",
"(",
"self",
".",
"turtle",
".",
"_item",
",",
"fun",
",",
"btn",
",",
"add",
")",
"self",
".",
"_up... | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/turtle.py#L3435-L3458 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/random.py | python | Random.shuffle | (self, x, random=None) | Shuffle list x in place, and return None.
Optional argument random is a 0-argument function returning a
random float in [0.0, 1.0); if it is the default None, the
standard random.random will be used. | Shuffle list x in place, and return None. | [
"Shuffle",
"list",
"x",
"in",
"place",
"and",
"return",
"None",
"."
] | def shuffle(self, x, random=None):
"""Shuffle list x in place, and return None.
Optional argument random is a 0-argument function returning a
random float in [0.0, 1.0); if it is the default None, the
standard random.random will be used.
"""
if random is None:
randbelow = self._randbelow
for i in reversed(range(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
j = randbelow(i+1)
x[i], x[j] = x[j], x[i]
else:
_int = int
for i in reversed(range(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
j = _int(random() * (i+1))
x[i], x[j] = x[j], x[i] | [
"def",
"shuffle",
"(",
"self",
",",
"x",
",",
"random",
"=",
"None",
")",
":",
"if",
"random",
"is",
"None",
":",
"randbelow",
"=",
"self",
".",
"_randbelow",
"for",
"i",
"in",
"reversed",
"(",
"range",
"(",
"1",
",",
"len",
"(",
"x",
")",
")",
... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/random.py#L264-L284 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/llvmlite/binding/ffi.py | python | ObjectRef._dispose | (self) | Dispose of the underlying LLVM resource. Should be overriden
by subclasses. Automatically called by close(), __del__() and
__exit__() (unless the resource has been detached). | Dispose of the underlying LLVM resource. Should be overriden
by subclasses. Automatically called by close(), __del__() and
__exit__() (unless the resource has been detached). | [
"Dispose",
"of",
"the",
"underlying",
"LLVM",
"resource",
".",
"Should",
"be",
"overriden",
"by",
"subclasses",
".",
"Automatically",
"called",
"by",
"close",
"()",
"__del__",
"()",
"and",
"__exit__",
"()",
"(",
"unless",
"the",
"resource",
"has",
"been",
"d... | def _dispose(self):
"""
Dispose of the underlying LLVM resource. Should be overriden
by subclasses. Automatically called by close(), __del__() and
__exit__() (unless the resource has been detached).
""" | [
"def",
"_dispose",
"(",
"self",
")",
":"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/llvmlite/binding/ffi.py#L278-L283 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/translate.py | python | resolve_references | (config, definitions) | Recursively replace $ref keys.
To cut down on duplication, common definitions can be declared
(and passed in via the ``definitions`` attribute) and then
references as {"$ref": "name"}, when this happens the reference
dict is placed with the value from the ``definition`` dict.
This is recursively done. | Recursively replace $ref keys. | [
"Recursively",
"replace",
"$ref",
"keys",
"."
] | def resolve_references(config, definitions):
"""Recursively replace $ref keys.
To cut down on duplication, common definitions can be declared
(and passed in via the ``definitions`` attribute) and then
references as {"$ref": "name"}, when this happens the reference
dict is placed with the value from the ``definition`` dict.
This is recursively done.
"""
for key, value in config.items():
if isinstance(value, dict):
if len(value) == 1 and list(value.keys())[0] == '$ref':
# Then we need to resolve this reference.
config[key] = definitions[list(value.values())[0]]
else:
resolve_references(value, definitions) | [
"def",
"resolve_references",
"(",
"config",
",",
"definitions",
")",
":",
"for",
"key",
",",
"value",
"in",
"config",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"if",
"len",
"(",
"value",
")",
"==",
"1",
"... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/translate.py#L59-L76 | ||
deepmind/open_spiel | 4ca53bea32bb2875c7385d215424048ae92f78c8 | open_spiel/python/policy.py | python | TabularPolicy.copy_with_noise | (self,
alpha=0.0,
beta=0.0,
random_state=np.random.RandomState()) | return copied_instance | Returns a copy of this policy perturbed with noise.
Generates a new random distribution using a softmax on normal random
variables with temperature beta, and mixes it with the old distribution
using 1-alpha * old_distribution + alpha * random_distribution.
Args:
alpha: Parameter characterizing the mixture amount between new and old
distributions. Between 0 and 1.
alpha = 0: keep old table.
alpha = 1: keep random table.
beta: Temperature of the softmax. Makes for more extreme policies.
random_state: A numpy `RandomState` object. If not provided, a shared
random state will be used.
Returns:
Perturbed copy. | Returns a copy of this policy perturbed with noise. | [
"Returns",
"a",
"copy",
"of",
"this",
"policy",
"perturbed",
"with",
"noise",
"."
] | def copy_with_noise(self,
alpha=0.0,
beta=0.0,
random_state=np.random.RandomState()):
"""Returns a copy of this policy perturbed with noise.
Generates a new random distribution using a softmax on normal random
variables with temperature beta, and mixes it with the old distribution
using 1-alpha * old_distribution + alpha * random_distribution.
Args:
alpha: Parameter characterizing the mixture amount between new and old
distributions. Between 0 and 1.
alpha = 0: keep old table.
alpha = 1: keep random table.
beta: Temperature of the softmax. Makes for more extreme policies.
random_state: A numpy `RandomState` object. If not provided, a shared
random state will be used.
Returns:
Perturbed copy.
"""
copied_instance = self.__copy__(False)
probability_array = self.action_probability_array
noise_mask = random_state.normal(size=probability_array.shape)
noise_mask = np.exp(beta * noise_mask) * self.legal_actions_mask
noise_mask = noise_mask / (np.sum(noise_mask, axis=1).reshape(-1, 1))
copied_instance.action_probability_array = (
1 - alpha) * probability_array + alpha * noise_mask
return copied_instance | [
"def",
"copy_with_noise",
"(",
"self",
",",
"alpha",
"=",
"0.0",
",",
"beta",
"=",
"0.0",
",",
"random_state",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
")",
")",
":",
"copied_instance",
"=",
"self",
".",
"__copy__",
"(",
"False",
")",
"probab... | https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/policy.py#L359-L387 | |
sdhash/sdhash | b9eff63e4e5867e910f41fd69032bbb1c94a2a5e | sdhash-ui/cherrypy/process/plugins.py | python | Autoreloader.sysfiles | (self) | return files | Return a Set of sys.modules filenames to monitor. | Return a Set of sys.modules filenames to monitor. | [
"Return",
"a",
"Set",
"of",
"sys",
".",
"modules",
"filenames",
"to",
"monitor",
"."
] | def sysfiles(self):
"""Return a Set of sys.modules filenames to monitor."""
files = set()
for k, m in sys.modules.items():
if re.match(self.match, k):
if hasattr(m, '__loader__') and hasattr(m.__loader__, 'archive'):
f = m.__loader__.archive
else:
f = getattr(m, '__file__', None)
if f is not None and not os.path.isabs(f):
# ensure absolute paths so a os.chdir() in the app doesn't break me
f = os.path.normpath(os.path.join(_module__file__base, f))
files.add(f)
return files | [
"def",
"sysfiles",
"(",
"self",
")",
":",
"files",
"=",
"set",
"(",
")",
"for",
"k",
",",
"m",
"in",
"sys",
".",
"modules",
".",
"items",
"(",
")",
":",
"if",
"re",
".",
"match",
"(",
"self",
".",
"match",
",",
"k",
")",
":",
"if",
"hasattr",... | https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/sdhash-ui/cherrypy/process/plugins.py#L583-L596 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/util/request.py | python | make_headers | (keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None, proxy_basic_auth=None, disable_cache=None) | return headers | Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
:param disable_cache:
If ``True``, adds 'cache-control: no-cache' header.
Example::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'} | Shortcuts for generating request headers. | [
"Shortcuts",
"for",
"generating",
"request",
"headers",
"."
] | def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None, proxy_basic_auth=None, disable_cache=None):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
:param disable_cache:
If ``True``, adds 'cache-control: no-cache' header.
Example::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ','.join(accept_encoding)
else:
accept_encoding = ACCEPT_ENCODING
headers['accept-encoding'] = accept_encoding
if user_agent:
headers['user-agent'] = user_agent
if keep_alive:
headers['connection'] = 'keep-alive'
if basic_auth:
headers['authorization'] = 'Basic ' + \
b64encode(b(basic_auth)).decode('utf-8')
if proxy_basic_auth:
headers['proxy-authorization'] = 'Basic ' + \
b64encode(b(proxy_basic_auth)).decode('utf-8')
if disable_cache:
headers['cache-control'] = 'no-cache'
return headers | [
"def",
"make_headers",
"(",
"keep_alive",
"=",
"None",
",",
"accept_encoding",
"=",
"None",
",",
"user_agent",
"=",
"None",
",",
"basic_auth",
"=",
"None",
",",
"proxy_basic_auth",
"=",
"None",
",",
"disable_cache",
"=",
"None",
")",
":",
"headers",
"=",
"... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/util/request.py#L11-L74 | |
forkineye/ESPixelStick | 22926f1c0d1131f1369fc7cad405689a095ae3cb | dist/bin/esptool/serial/serialutil.py | python | SerialBase.stopbits | (self, stopbits) | Change stop bits size. | Change stop bits size. | [
"Change",
"stop",
"bits",
"size",
"."
] | def stopbits(self, stopbits):
"""Change stop bits size."""
if stopbits not in self.STOPBITS:
raise ValueError("Not a valid stop bit size: {!r}".format(stopbits))
self._stopbits = stopbits
if self.is_open:
self._reconfigure_port() | [
"def",
"stopbits",
"(",
"self",
",",
"stopbits",
")",
":",
"if",
"stopbits",
"not",
"in",
"self",
".",
"STOPBITS",
":",
"raise",
"ValueError",
"(",
"\"Not a valid stop bit size: {!r}\"",
".",
"format",
"(",
"stopbits",
")",
")",
"self",
".",
"_stopbits",
"="... | https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/esptool/serial/serialutil.py#L343-L349 | ||
wesnoth/wesnoth | 6ccac5a5e8ff75303c9190c0da60580925cb32c0 | data/tools/wesnoth/wmldata.py | python | DataSub.remove | (self, child) | Removes a sub-element. | Removes a sub-element. | [
"Removes",
"a",
"sub",
"-",
"element",
"."
] | def remove(self, child):
"""Removes a sub-element."""
self.data.remove(child)
self.dict[child.name].remove(child) | [
"def",
"remove",
"(",
"self",
",",
"child",
")",
":",
"self",
".",
"data",
".",
"remove",
"(",
"child",
")",
"self",
".",
"dict",
"[",
"child",
".",
"name",
"]",
".",
"remove",
"(",
"child",
")"
] | https://github.com/wesnoth/wesnoth/blob/6ccac5a5e8ff75303c9190c0da60580925cb32c0/data/tools/wesnoth/wmldata.py#L349-L352 | ||
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | ui/resources/resource_check/resource_scale_factors.py | python | ResourceScaleFactors.__init__ | (self, input_api, output_api, paths) | Initializes ResourceScaleFactors with paths. | Initializes ResourceScaleFactors with paths. | [
"Initializes",
"ResourceScaleFactors",
"with",
"paths",
"."
] | def __init__(self, input_api, output_api, paths):
""" Initializes ResourceScaleFactors with paths."""
self.input_api = input_api
self.output_api = output_api
self.paths = paths | [
"def",
"__init__",
"(",
"self",
",",
"input_api",
",",
"output_api",
",",
"paths",
")",
":",
"self",
".",
"input_api",
"=",
"input_api",
"self",
".",
"output_api",
"=",
"output_api",
"self",
".",
"paths",
"=",
"paths"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/ui/resources/resource_check/resource_scale_factors.py#L35-L39 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/dataview.py | python | DataViewTreeStore.SetItemIcon | (*args, **kwargs) | return _dataview.DataViewTreeStore_SetItemIcon(*args, **kwargs) | SetItemIcon(self, DataViewItem item, Icon icon) | SetItemIcon(self, DataViewItem item, Icon icon) | [
"SetItemIcon",
"(",
"self",
"DataViewItem",
"item",
"Icon",
"icon",
")"
] | def SetItemIcon(*args, **kwargs):
"""SetItemIcon(self, DataViewItem item, Icon icon)"""
return _dataview.DataViewTreeStore_SetItemIcon(*args, **kwargs) | [
"def",
"SetItemIcon",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewTreeStore_SetItemIcon",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/dataview.py#L2420-L2422 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/stc.py | python | StyledTextEvent.GetText | (*args, **kwargs) | return _stc.StyledTextEvent_GetText(*args, **kwargs) | GetText(self) -> String | GetText(self) -> String | [
"GetText",
"(",
"self",
")",
"-",
">",
"String"
] | def GetText(*args, **kwargs):
"""GetText(self) -> String"""
return _stc.StyledTextEvent_GetText(*args, **kwargs) | [
"def",
"GetText",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextEvent_GetText",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/stc.py#L7138-L7140 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/logging/config.py | python | BaseConfigurator.convert | (self, value) | return value | Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do. | Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do. | [
"Convert",
"values",
"to",
"an",
"appropriate",
"type",
".",
"dicts",
"lists",
"and",
"tuples",
"are",
"replaced",
"by",
"their",
"converting",
"alternatives",
".",
"Strings",
"are",
"checked",
"to",
"see",
"if",
"they",
"have",
"a",
"conversion",
"format",
... | def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple) and not hasattr(value, '_fields'):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, str): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value | [
"def",
"convert",
"(",
"self",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"ConvertingDict",
")",
"and",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"value",
"=",
"ConvertingDict",
"(",
"value",
")",
"value",
".",
"conf... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/logging/config.py#L438-L464 | |
hpi-xnor/BMXNet | ed0b201da6667887222b8e4b5f997c4f6b61943d | python/mxnet/symbol/random.py | python | normal | (loc=0, scale=1, shape=_Null, dtype=_Null, **kwargs) | return _random_helper(_internal._random_normal, _internal._sample_normal,
[loc, scale], shape, dtype, kwargs) | Draw random samples from a normal (Gaussian) distribution.
Samples are distributed according to a normal distribution parametrized
by *loc* (mean) and *scale* (standard deviation).
Parameters
----------
loc : float or Symbol
Mean (centre) of the distribution.
scale : float or Symbol
Standard deviation (spread or width) of the distribution.
shape : int or tuple of ints
The number of samples to draw. If shape is, e.g., `(m, n)` and `loc` and
`scale` are scalars, output shape will be `(m, n)`. If `loc` and `scale`
are Symbols with shape, e.g., `(x, y)`, then output will have shape
`(x, y, m, n)`, where `m*n` samples are drawn for each `[loc, scale)` pair.
dtype : {'float16','float32', 'float64'}
Data type of output samples. Default is 'float32' | Draw random samples from a normal (Gaussian) distribution. | [
"Draw",
"random",
"samples",
"from",
"a",
"normal",
"(",
"Gaussian",
")",
"distribution",
"."
] | def normal(loc=0, scale=1, shape=_Null, dtype=_Null, **kwargs):
"""Draw random samples from a normal (Gaussian) distribution.
Samples are distributed according to a normal distribution parametrized
by *loc* (mean) and *scale* (standard deviation).
Parameters
----------
loc : float or Symbol
Mean (centre) of the distribution.
scale : float or Symbol
Standard deviation (spread or width) of the distribution.
shape : int or tuple of ints
The number of samples to draw. If shape is, e.g., `(m, n)` and `loc` and
`scale` are scalars, output shape will be `(m, n)`. If `loc` and `scale`
are Symbols with shape, e.g., `(x, y)`, then output will have shape
`(x, y, m, n)`, where `m*n` samples are drawn for each `[loc, scale)` pair.
dtype : {'float16','float32', 'float64'}
Data type of output samples. Default is 'float32'
"""
return _random_helper(_internal._random_normal, _internal._sample_normal,
[loc, scale], shape, dtype, kwargs) | [
"def",
"normal",
"(",
"loc",
"=",
"0",
",",
"scale",
"=",
"1",
",",
"shape",
"=",
"_Null",
",",
"dtype",
"=",
"_Null",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_random_helper",
"(",
"_internal",
".",
"_random_normal",
",",
"_internal",
".",
"_sa... | https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/python/mxnet/symbol/random.py#L74-L96 | |
PaddlePaddle/Anakin | 5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730 | tools/external_converter_v2/utils/net/net_io.py | python | NetProtoIO.clear_graph | (self) | Clear the graph of net proto. | Clear the graph of net proto. | [
"Clear",
"the",
"graph",
"of",
"net",
"proto",
"."
] | def clear_graph(self):
"""
Clear the graph of net proto.
"""
self.net_proto.graph.Clear() | [
"def",
"clear_graph",
"(",
"self",
")",
":",
"self",
".",
"net_proto",
".",
"graph",
".",
"Clear",
"(",
")"
] | https://github.com/PaddlePaddle/Anakin/blob/5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730/tools/external_converter_v2/utils/net/net_io.py#L94-L98 | ||
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | build/util/version.py | python | write_if_changed | (file_name, contents) | Writes the specified contents to the specified file_name
iff the contents are different than the current contents. | Writes the specified contents to the specified file_name
iff the contents are different than the current contents. | [
"Writes",
"the",
"specified",
"contents",
"to",
"the",
"specified",
"file_name",
"iff",
"the",
"contents",
"are",
"different",
"than",
"the",
"current",
"contents",
"."
] | def write_if_changed(file_name, contents):
"""
Writes the specified contents to the specified file_name
iff the contents are different than the current contents.
"""
try:
old_contents = open(file_name, 'r').read()
except EnvironmentError:
pass
else:
if contents == old_contents:
return
os.unlink(file_name)
open(file_name, 'w').write(contents) | [
"def",
"write_if_changed",
"(",
"file_name",
",",
"contents",
")",
":",
"try",
":",
"old_contents",
"=",
"open",
"(",
"file_name",
",",
"'r'",
")",
".",
"read",
"(",
")",
"except",
"EnvironmentError",
":",
"pass",
"else",
":",
"if",
"contents",
"==",
"ol... | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/build/util/version.py#L91-L104 | ||
apache/qpid-proton | 6bcdfebb55ea3554bc29b1901422532db331a591 | python/proton/_reactor.py | python | Reactor.get_connection_address | (self, connection: Connection) | return connection.connected_address | *Deprecated* in favor of the property proton.Connection.connected_address.
This may be used to retrieve the remote peer address.
:return: string containing the address in URL format or None if no
address is available. Use the proton.Url class to create a Url object
from the returned value. | *Deprecated* in favor of the property proton.Connection.connected_address.
This may be used to retrieve the remote peer address.
:return: string containing the address in URL format or None if no
address is available. Use the proton.Url class to create a Url object
from the returned value. | [
"*",
"Deprecated",
"*",
"in",
"favor",
"of",
"the",
"property",
"proton",
".",
"Connection",
".",
"connected_address",
".",
"This",
"may",
"be",
"used",
"to",
"retrieve",
"the",
"remote",
"peer",
"address",
".",
":",
"return",
":",
"string",
"containing",
... | def get_connection_address(self, connection: Connection) -> str:
"""*Deprecated* in favor of the property proton.Connection.connected_address.
This may be used to retrieve the remote peer address.
:return: string containing the address in URL format or None if no
address is available. Use the proton.Url class to create a Url object
from the returned value.
"""
return connection.connected_address | [
"def",
"get_connection_address",
"(",
"self",
",",
"connection",
":",
"Connection",
")",
"->",
"str",
":",
"return",
"connection",
".",
"connected_address"
] | https://github.com/apache/qpid-proton/blob/6bcdfebb55ea3554bc29b1901422532db331a591/python/proton/_reactor.py#L368-L375 | |
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Draft/draftviewproviders/view_base.py | python | ViewProviderDraft.__getstate__ | (self) | return None | Return a tuple of all serializable objects or None.
When saving the document this view provider object gets stored
using Python's `json` module.
Since we have some un-serializable objects (Coin objects) in here
we must define this method to return a tuple of all serializable
objects or `None`.
Override this method to define the serializable objects to return.
By default it returns `None`.
Returns
-------
None | Return a tuple of all serializable objects or None. | [
"Return",
"a",
"tuple",
"of",
"all",
"serializable",
"objects",
"or",
"None",
"."
] | def __getstate__(self):
"""Return a tuple of all serializable objects or None.
When saving the document this view provider object gets stored
using Python's `json` module.
Since we have some un-serializable objects (Coin objects) in here
we must define this method to return a tuple of all serializable
objects or `None`.
Override this method to define the serializable objects to return.
By default it returns `None`.
Returns
-------
None
"""
return None | [
"def",
"__getstate__",
"(",
"self",
")",
":",
"return",
"None"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftviewproviders/view_base.py#L119-L137 | |
apache/kudu | 90895ce76590f10730ad7aac3613b69d89ff5422 | python/kudu/__init__.py | python | timedelta | (seconds=0, millis=0, micros=0, nanos=0) | return TimeDelta.from_nanos(total_ns) | Construct a Kudu TimeDelta to set timeouts, etc. Use this function instead
of interacting with the TimeDelta class yourself.
Returns
-------
delta : kudu.client.TimeDelta | Construct a Kudu TimeDelta to set timeouts, etc. Use this function instead
of interacting with the TimeDelta class yourself. | [
"Construct",
"a",
"Kudu",
"TimeDelta",
"to",
"set",
"timeouts",
"etc",
".",
"Use",
"this",
"function",
"instead",
"of",
"interacting",
"with",
"the",
"TimeDelta",
"class",
"yourself",
"."
] | def timedelta(seconds=0, millis=0, micros=0, nanos=0):
"""
Construct a Kudu TimeDelta to set timeouts, etc. Use this function instead
of interacting with the TimeDelta class yourself.
Returns
-------
delta : kudu.client.TimeDelta
"""
from kudu.compat import long
# TimeDelta is a wrapper for kudu::MonoDelta
total_ns = (long(0) + seconds * long(1000000000) +
millis * long(1000000) + micros * long(1000) + nanos)
return TimeDelta.from_nanos(total_ns) | [
"def",
"timedelta",
"(",
"seconds",
"=",
"0",
",",
"millis",
"=",
"0",
",",
"micros",
"=",
"0",
",",
"nanos",
"=",
"0",
")",
":",
"from",
"kudu",
".",
"compat",
"import",
"long",
"# TimeDelta is a wrapper for kudu::MonoDelta",
"total_ns",
"=",
"(",
"long",... | https://github.com/apache/kudu/blob/90895ce76590f10730ad7aac3613b69d89ff5422/python/kudu/__init__.py#L111-L124 | |
google/iree | 1224bbdbe65b0d1fdf40e7324f60f68beeaf7c76 | build_tools/benchmarks/run_benchmarks_on_android.py | python | parse_arguments | () | return args | Parses command-line options. | Parses command-line options. | [
"Parses",
"command",
"-",
"line",
"options",
"."
] | def parse_arguments():
"""Parses command-line options."""
def check_dir_path(path):
if os.path.isdir(path):
return path
else:
raise argparse.ArgumentTypeError(path)
def check_exe_path(path):
if os.access(path, os.X_OK):
return path
else:
raise argparse.ArgumentTypeError(f"'{path}' is not an executable")
parser = argparse.ArgumentParser()
parser.add_argument(
"build_dir",
metavar="<build-dir>",
type=check_dir_path,
help="Path to the build directory containing benchmark suites")
parser.add_argument("--normal_benchmark_tool_dir",
"--normal-benchmark-tool-dir",
type=check_exe_path,
required=True,
help="Path to the normal iree tool directory")
parser.add_argument("--traced_benchmark_tool_dir",
"--traced-benchmark-tool-dir",
type=check_exe_path,
default=None,
help="Path to the tracing-enabled iree tool directory")
parser.add_argument("--trace_capture_tool",
"--trace-capture-tool",
type=check_exe_path,
default=None,
help="Path to the tool for collecting captured traces")
parser.add_argument(
"--driver",
type=str,
default=None,
help="Only run benchmarks for a specific driver, e.g., 'vulkan'")
parser.add_argument("--output",
"-o",
default=None,
help="Path to the ouput file")
parser.add_argument("--capture_tarball",
"--capture-tarball",
default=None,
help="Path to the tarball for captures")
parser.add_argument("--no-clean",
action="store_true",
help="Do not clean up the temporary directory used for "
"benchmarking on the Android device")
parser.add_argument("--verbose",
action="store_true",
help="Print internal information during execution")
parser.add_argument(
"--pin-cpu-freq",
"--pin_cpu_freq",
action="store_true",
help="Pin CPU frequency for all cores to the maximum. Requires root")
parser.add_argument("--pin-gpu-freq",
"--pin_gpu_freq",
action="store_true",
help="Pin GPU frequency to the maximum. Requires root")
parser.add_argument(
"--keep_going",
"--keep-going",
action="store_true",
help="Continue running after a failed benchmark. The overall exit status"
" will still indicate failure and all errors will be reported at the end."
)
parser.add_argument(
"--tmp_dir",
"--tmp-dir",
"--tmpdir",
default="/tmp/iree-benchmarks",
help="Base directory in which to store temporary files. A subdirectory"
" with a name matching the git commit hash will be created.")
parser.add_argument(
"--continue_from_directory",
"--continue-from-directory",
default=None,
help="Path to directory with previous benchmark temporary files. This"
" should be for the specific commit (not the general tmp-dir). Previous"
" benchmark and capture results from here will not be rerun and will be"
" combined with the new runs.")
args = parser.parse_args()
return args | [
"def",
"parse_arguments",
"(",
")",
":",
"def",
"check_dir_path",
"(",
"path",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"return",
"path",
"else",
":",
"raise",
"argparse",
".",
"ArgumentTypeError",
"(",
"path",
")",
"def"... | https://github.com/google/iree/blob/1224bbdbe65b0d1fdf40e7324f60f68beeaf7c76/build_tools/benchmarks/run_benchmarks_on_android.py#L611-L701 | |
idaholab/moose | 9eeebc65e098b4c30f8205fb41591fd5b61eb6ff | python/chigger/base/ChiggerResult.py | python | ChiggerResult.__len__ | (self) | return len(self._sources) | The number of source objects. | The number of source objects. | [
"The",
"number",
"of",
"source",
"objects",
"."
] | def __len__(self):
"""
The number of source objects.
"""
return len(self._sources) | [
"def",
"__len__",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"_sources",
")"
] | https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/chigger/base/ChiggerResult.py#L167-L171 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/tpu/tpu_system_metadata.py | python | _query_tpu_system_metadata | (master_address, cluster_def=None,
query_topology=False) | return metadata | Automatically detects the TPU system metadata in the system. | Automatically detects the TPU system metadata in the system. | [
"Automatically",
"detects",
"the",
"TPU",
"system",
"metadata",
"in",
"the",
"system",
"."
] | def _query_tpu_system_metadata(master_address, cluster_def=None,
query_topology=False):
"""Automatically detects the TPU system metadata in the system."""
tpu_core_count = 0
devices = []
device_dict = collections.defaultdict(list)
if context.executing_eagerly():
logical_devices = config.list_logical_devices()
# We want the output type to match in both eager and session mode
devices = [session_lib._DeviceAttributes(device_util.canonicalize(d.name), # pylint: disable=protected-access
d.device_type, 0, 0)
for d in logical_devices]
else:
# TODO(b/120564445): Replace with standard library for retries.
retry_count = 1
while True:
logging.info('Querying Tensorflow master (%s) for TPU system metadata.',
master_address)
try:
with ops.Graph().as_default():
with session_lib.Session(
master_address,
config=get_session_config_with_timeout(
_PINGING_MASTER_TIMEOUT_IN_MS,
cluster_def)) as sess:
devices = sess.list_devices()
break
except errors.DeadlineExceededError:
msg = ('Failed to connect to the Tensorflow master. The TPU worker may '
'not be ready (still scheduling) or the Tensorflow master '
'address is incorrect: got (%s).' %
(master_address))
# TODO(xiejw): For local or grpc master we might not need retry logic
# here.
if retry_count <= _RETRY_TIMES:
logging.warning('%s', msg)
logging.warning('Retrying (%d/%d).', retry_count, _RETRY_TIMES)
retry_count += 1
else:
raise ValueError(msg)
for device in devices:
spec = tf_device.DeviceSpec.from_string(device.name)
if spec.device_type == 'TPU':
device_dict[spec.task].append(spec.device_index)
tpu_core_count += 1
num_of_cores_per_host = 0
if tpu_core_count:
num_cores_per_host_set = set(
[len(core_ids) for core_ids in device_dict.values()])
if len(num_cores_per_host_set) != 1:
raise RuntimeError(
'TPU cores on each host is not same. This should not happen!. '
'devices: {}'.format(devices))
num_of_cores_per_host = num_cores_per_host_set.pop()
topology = None
if query_topology:
if not tpu_core_count:
raise RuntimeError(
'Cannot find any TPU cores in the system (master address {}). '
'This usually means the master address is incorrect or the '
'TPU worker has some problems. Available devices: {}'.format(
master_address, devices))
topology = _obtain_topology(master_address, cluster_def)
# We sort the metadata devices so that downstream users get a sorted list
# for creating mirrored variables correctly.
def _sort_key(device):
spec = tf_device.DeviceSpec.from_string(device.name)
return (spec.job, spec.replica, spec.task, spec.device_type,
spec.device_index)
devices = tuple(sorted(devices, key=_sort_key))
metadata = TPUSystemMetadata(
num_cores=tpu_core_count,
num_hosts=len(device_dict),
num_of_cores_per_host=num_of_cores_per_host,
topology=topology,
devices=devices)
if tpu_core_count:
logging.info('Found TPU system:')
logging.info('*** Num TPU Cores: %d', metadata.num_cores)
logging.info('*** Num TPU Workers: %d', metadata.num_hosts)
logging.info('*** Num TPU Cores Per Worker: %d',
metadata.num_of_cores_per_host)
for device in metadata.devices:
logging.info('*** Available Device: %s', device)
else:
logging.info('Failed to find TPU: %s', metadata)
return metadata | [
"def",
"_query_tpu_system_metadata",
"(",
"master_address",
",",
"cluster_def",
"=",
"None",
",",
"query_topology",
"=",
"False",
")",
":",
"tpu_core_count",
"=",
"0",
"devices",
"=",
"[",
"]",
"device_dict",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
... | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/tpu/tpu_system_metadata.py#L68-L164 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/pydoc.py | python | TextDoc.docroutine | (self, object, name=None, mod=None, cl=None) | Produce text documentation for a function or method object. | Produce text documentation for a function or method object. | [
"Produce",
"text",
"documentation",
"for",
"a",
"function",
"or",
"method",
"object",
"."
] | def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if _is_bound_method(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
if (inspect.iscoroutinefunction(object) or
inspect.isasyncgenfunction(object)):
asyncqualifier = 'async '
else:
asyncqualifier = ''
if name == realname:
title = self.bold(realname)
else:
if cl and inspect.getattr_static(cl, realname, []) is object:
skipdocs = 1
title = self.bold(name) + ' = ' + realname
argspec = None
if inspect.isroutine(object):
try:
signature = inspect.signature(object)
except (ValueError, TypeError):
signature = None
if signature:
argspec = str(signature)
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
if not argspec:
argspec = '(...)'
decl = asyncqualifier + title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n') | [
"def",
"docroutine",
"(",
"self",
",",
"object",
",",
"name",
"=",
"None",
",",
"mod",
"=",
"None",
",",
"cl",
"=",
"None",
")",
":",
"realname",
"=",
"object",
".",
"__name__",
"name",
"=",
"name",
"or",
"realname",
"note",
"=",
"''",
"skipdocs",
... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/pydoc.py#L1458-L1511 | ||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/tensor_forest/client/random_forest.py | python | MultiForestMultiHeadEstimator.__init__ | (self, params_list, device_assigner=None, model_dir=None,
graph_builder_class=tensor_forest.RandomForestGraphs,
config=None, weights_name=None, keys_name=None,
feature_engineering_fn=None,
early_stopping_rounds=100,
num_trainers=1, trainer_id=0,
report_feature_importances=False,
local_eval=False) | Initializes a TensorForestEstimator instance.
Args:
params_list: A list of ForestHParams objects for each head, given in order
of outputs in the label tensor to be trained on.
device_assigner: An `object` instance that controls how trees get
assigned to devices. If `None`, will use
`tensor_forest.RandomForestDeviceAssigner`.
model_dir: Directory to save model parameters, graph, etc. To continue
training a previously saved model, load checkpoints saved to this
directory into an estimator.
graph_builder_class: An `object` instance that defines how TF graphs for
random forest training and inference are built. By default will use
`tensor_forest.RandomForestGraphs`.
config: `RunConfig` object to configure the runtime settings.
weights_name: A string defining feature column name representing
weights. Will be multiplied by the loss of the example. Used to
downweight or boost examples during training.
keys_name: A string naming one of the features to strip out and
pass through into the inference/eval results dict. Useful for
associating specific examples with their prediction.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns features and
labels which will be fed into the model.
early_stopping_rounds: Allows training to terminate early if the forest is
no longer growing. 100 by default. Set to a Falsy value to disable
the default training hook.
num_trainers: Number of training jobs, which will partition trees
among them.
trainer_id: Which trainer this instance is.
report_feature_importances: If True, print out feature importances
during evaluation.
local_eval: If True, don't use a device assigner for eval. This is to
support some common setups where eval is done on a single machine, even
though training might be distributed.
Returns:
A `TensorForestEstimator` instance. | Initializes a TensorForestEstimator instance. | [
"Initializes",
"a",
"TensorForestEstimator",
"instance",
"."
] | def __init__(self, params_list, device_assigner=None, model_dir=None,
graph_builder_class=tensor_forest.RandomForestGraphs,
config=None, weights_name=None, keys_name=None,
feature_engineering_fn=None,
early_stopping_rounds=100,
num_trainers=1, trainer_id=0,
report_feature_importances=False,
local_eval=False):
"""Initializes a TensorForestEstimator instance.
Args:
params_list: A list of ForestHParams objects for each head, given in order
of outputs in the label tensor to be trained on.
device_assigner: An `object` instance that controls how trees get
assigned to devices. If `None`, will use
`tensor_forest.RandomForestDeviceAssigner`.
model_dir: Directory to save model parameters, graph, etc. To continue
training a previously saved model, load checkpoints saved to this
directory into an estimator.
graph_builder_class: An `object` instance that defines how TF graphs for
random forest training and inference are built. By default will use
`tensor_forest.RandomForestGraphs`.
config: `RunConfig` object to configure the runtime settings.
weights_name: A string defining feature column name representing
weights. Will be multiplied by the loss of the example. Used to
downweight or boost examples during training.
keys_name: A string naming one of the features to strip out and
pass through into the inference/eval results dict. Useful for
associating specific examples with their prediction.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns features and
labels which will be fed into the model.
early_stopping_rounds: Allows training to terminate early if the forest is
no longer growing. 100 by default. Set to a Falsy value to disable
the default training hook.
num_trainers: Number of training jobs, which will partition trees
among them.
trainer_id: Which trainer this instance is.
report_feature_importances: If True, print out feature importances
during evaluation.
local_eval: If True, don't use a device assigner for eval. This is to
support some common setups where eval is done on a single machine, even
though training might be distributed.
Returns:
A `TensorForestEstimator` instance.
"""
model_fns = []
for i in range(len(params_list)):
params = params_list[i].fill()
model_fns.append(
get_model_fn(
params,
graph_builder_class,
device_assigner,
model_head=get_default_head(
params, weights_name, name='head{0}'.format(i)),
weights_name=weights_name,
keys_name=keys_name,
early_stopping_rounds=early_stopping_rounds,
num_trainers=num_trainers,
trainer_id=trainer_id,
report_feature_importances=report_feature_importances,
local_eval=local_eval,
head_scope='output{0}'.format(i)))
super(MultiForestMultiHeadEstimator, self).__init__(
model_fn=get_combined_model_fn(model_fns),
model_dir=model_dir,
config=config,
feature_engineering_fn=feature_engineering_fn) | [
"def",
"__init__",
"(",
"self",
",",
"params_list",
",",
"device_assigner",
"=",
"None",
",",
"model_dir",
"=",
"None",
",",
"graph_builder_class",
"=",
"tensor_forest",
".",
"RandomForestGraphs",
",",
"config",
"=",
"None",
",",
"weights_name",
"=",
"None",
"... | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/tensor_forest/client/random_forest.py#L460-L530 | ||
nasa/fprime | 595cf3682d8365943d86c1a6fe7c78f0a116acf0 | Autocoders/Python/src/fprime_ac/utils/pyparsing.py | python | _makeTags | (tagStr, xml) | return openTag, closeTag | Internal helper to construct opening and closing tag expressions, given a tag name | Internal helper to construct opening and closing tag expressions, given a tag name | [
"Internal",
"helper",
"to",
"construct",
"opening",
"and",
"closing",
"tag",
"expressions",
"given",
"a",
"tag",
"name"
] | def _makeTags(tagStr, xml):
"""Internal helper to construct opening and closing tag expressions, given a tag name"""
if isinstance(tagStr, str):
resname = tagStr
tagStr = Keyword(tagStr, caseless=not xml)
else:
resname = tagStr.name
tagAttrName = Word(alphas, alphanums + "_-")
if xml:
tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes)
openTag = (
Suppress("<")
+ tagStr
+ Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue)))
+ Optional("/", default=[False])
.setResultsName("empty")
.setParseAction(lambda s, l, t: t[0] == "/")
+ Suppress(">")
)
else:
printablesLessRAbrack = "".join([c for c in printables if c not in ">"])
tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(
printablesLessRAbrack
)
openTag = (
Suppress("<")
+ tagStr
+ Dict(
ZeroOrMore(
Group(
tagAttrName.setParseAction(downcaseTokens)
+ Suppress("=")
+ tagAttrValue
)
)
)
+ Optional("/", default=[False])
.setResultsName("empty")
.setParseAction(lambda s, l, t: t[0] == "/")
+ Suppress(">")
)
closeTag = Combine("</" + tagStr + ">")
openTag = openTag.setResultsName(
"start" + "".join(resname.replace(":", " ").title().split())
).setName("<%s>" % tagStr)
closeTag = closeTag.setResultsName(
"end" + "".join(resname.replace(":", " ").title().split())
).setName("</%s>" % tagStr)
return openTag, closeTag | [
"def",
"_makeTags",
"(",
"tagStr",
",",
"xml",
")",
":",
"if",
"isinstance",
"(",
"tagStr",
",",
"str",
")",
":",
"resname",
"=",
"tagStr",
"tagStr",
"=",
"Keyword",
"(",
"tagStr",
",",
"caseless",
"=",
"not",
"xml",
")",
"else",
":",
"resname",
"=",... | https://github.com/nasa/fprime/blob/595cf3682d8365943d86c1a6fe7c78f0a116acf0/Autocoders/Python/src/fprime_ac/utils/pyparsing.py#L3146-L3197 | |
rapidsai/cudf | d5b2448fc69f17509304d594f029d0df56984962 | python/dask_cudf/dask_cudf/sorting.py | python | merge_quantiles | (finalq, qs, vals) | return rv.reset_index(drop=True) | Combine several quantile calculations of different data.
[NOTE: Same logic as dask.array merge_percentiles] | Combine several quantile calculations of different data.
[NOTE: Same logic as dask.array merge_percentiles] | [
"Combine",
"several",
"quantile",
"calculations",
"of",
"different",
"data",
".",
"[",
"NOTE",
":",
"Same",
"logic",
"as",
"dask",
".",
"array",
"merge_percentiles",
"]"
] | def merge_quantiles(finalq, qs, vals):
"""Combine several quantile calculations of different data.
[NOTE: Same logic as dask.array merge_percentiles]
"""
if isinstance(finalq, Iterator):
finalq = list(finalq)
finalq = np.array(finalq)
qs = list(map(list, qs))
vals = list(vals)
vals, Ns = zip(*vals)
Ns = list(Ns)
L = list(zip(*[(q, val, N) for q, val, N in zip(qs, vals, Ns) if N]))
if not L:
raise ValueError("No non-trivial arrays found")
qs, vals, Ns = L
if len(vals) != len(qs) or len(Ns) != len(qs):
raise ValueError("qs, vals, and Ns parameters must be the same length")
# transform qs and Ns into number of observations between quantiles
counts = []
for q, N in zip(qs, Ns):
count = np.empty(len(q))
count[1:] = np.diff(q)
count[0] = q[0]
count *= N
counts.append(count)
def _append_counts(val, count):
val["_counts"] = count
return val
# Sort by calculated quantile values, then number of observations.
combined_vals_counts = gd.merge_sorted(
[*map(_append_counts, vals, counts)]
)
combined_counts = cupy.asnumpy(combined_vals_counts["_counts"].values)
combined_vals = combined_vals_counts.drop(columns=["_counts"])
# quantile-like, but scaled by total number of observations
combined_q = np.cumsum(combined_counts)
# rescale finalq quantiles to match combined_q
desired_q = finalq * sum(Ns)
# TODO: Support other interpolation methods
# For now - Always use "nearest" for interpolation
left = np.searchsorted(combined_q, desired_q, side="left")
right = np.searchsorted(combined_q, desired_q, side="right") - 1
np.minimum(left, len(combined_vals) - 1, left) # don't exceed max index
lower = np.minimum(left, right)
upper = np.maximum(left, right)
lower_residual = np.abs(combined_q[lower] - desired_q)
upper_residual = np.abs(combined_q[upper] - desired_q)
mask = lower_residual > upper_residual
index = lower # alias; we no longer need lower
index[mask] = upper[mask]
rv = combined_vals.iloc[index]
return rv.reset_index(drop=True) | [
"def",
"merge_quantiles",
"(",
"finalq",
",",
"qs",
",",
"vals",
")",
":",
"if",
"isinstance",
"(",
"finalq",
",",
"Iterator",
")",
":",
"finalq",
"=",
"list",
"(",
"finalq",
")",
"finalq",
"=",
"np",
".",
"array",
"(",
"finalq",
")",
"qs",
"=",
"l... | https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/dask_cudf/dask_cudf/sorting.py#L48-L107 | |
v8mips/v8mips | f0c9cc0bbfd461c7f516799d9a58e9a7395f737e | tools/stats-viewer.py | python | StatsViewer.MountSharedData | (self) | Mount the binary counters file as a memory-mapped file. If
something goes wrong print an informative message and exit the
program. | Mount the binary counters file as a memory-mapped file. If
something goes wrong print an informative message and exit the
program. | [
"Mount",
"the",
"binary",
"counters",
"file",
"as",
"a",
"memory",
"-",
"mapped",
"file",
".",
"If",
"something",
"goes",
"wrong",
"print",
"an",
"informative",
"message",
"and",
"exit",
"the",
"program",
"."
] | def MountSharedData(self):
"""Mount the binary counters file as a memory-mapped file. If
something goes wrong print an informative message and exit the
program."""
if not os.path.exists(self.data_name):
maps_name = "/proc/%s/maps" % self.data_name
if not os.path.exists(maps_name):
print "\"%s\" is neither a counter file nor a PID." % self.data_name
sys.exit(1)
maps_file = open(maps_name, "r")
try:
self.data_name = None
for m in re.finditer(r"/dev/shm/\S*", maps_file.read()):
if os.path.exists(m.group(0)):
self.data_name = m.group(0)
break
if self.data_name is None:
print "Can't find counter file in maps for PID %s." % self.data_name
sys.exit(1)
finally:
maps_file.close()
data_file = open(self.data_name, "r")
size = os.fstat(data_file.fileno()).st_size
fileno = data_file.fileno()
self.shared_mmap = mmap.mmap(fileno, size, access=mmap.ACCESS_READ)
data_access = SharedDataAccess(self.shared_mmap)
if data_access.IntAt(0) == COUNTERS_FILE_MAGIC_NUMBER:
return CounterCollection(data_access)
elif data_access.IntAt(0) == CHROME_COUNTERS_FILE_MAGIC_NUMBER:
return ChromeCounterCollection(data_access)
print "File %s is not stats data." % self.data_name
sys.exit(1) | [
"def",
"MountSharedData",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"data_name",
")",
":",
"maps_name",
"=",
"\"/proc/%s/maps\"",
"%",
"self",
".",
"data_name",
"if",
"not",
"os",
".",
"path",
".",
"exists"... | https://github.com/v8mips/v8mips/blob/f0c9cc0bbfd461c7f516799d9a58e9a7395f737e/tools/stats-viewer.py#L96-L127 | ||
idaholab/moose | 9eeebc65e098b4c30f8205fb41591fd5b61eb6ff | python/peacock/ExodusViewer/plugins/GoldDiffPlugin.py | python | ExternalVTKWindowPlugin.closeEvent | (self, *args) | Store the size of the window. | Store the size of the window. | [
"Store",
"the",
"size",
"of",
"the",
"window",
"."
] | def closeEvent(self, *args):
"""
Store the size of the window.
"""
self._widget_size = self.size()
self._toggle.setCheckState(QtCore.Qt.Unchecked)
self._toggle.clicked.emit(False) | [
"def",
"closeEvent",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"_widget_size",
"=",
"self",
".",
"size",
"(",
")",
"self",
".",
"_toggle",
".",
"setCheckState",
"(",
"QtCore",
".",
"Qt",
".",
"Unchecked",
")",
"self",
".",
"_toggle",
".",... | https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/peacock/ExodusViewer/plugins/GoldDiffPlugin.py#L53-L59 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/asyncio/tasks.py | python | _unregister_task | (task) | Unregister a task. | Unregister a task. | [
"Unregister",
"a",
"task",
"."
] | def _unregister_task(task):
"""Unregister a task."""
_all_tasks.discard(task) | [
"def",
"_unregister_task",
"(",
"task",
")",
":",
"_all_tasks",
".",
"discard",
"(",
"task",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/asyncio/tasks.py#L879-L881 | ||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/linalg/linalg_impl.py | python | _lu_solve_assertions | (lower_upper, perm, rhs, validate_args) | return assertions | Returns list of assertions related to `lu_solve` assumptions. | Returns list of assertions related to `lu_solve` assumptions. | [
"Returns",
"list",
"of",
"assertions",
"related",
"to",
"lu_solve",
"assumptions",
"."
] | def _lu_solve_assertions(lower_upper, perm, rhs, validate_args):
"""Returns list of assertions related to `lu_solve` assumptions."""
assertions = lu_reconstruct_assertions(lower_upper, perm, validate_args)
message = 'Input `rhs` must have at least 2 dimensions.'
if rhs.shape.ndims is not None:
if rhs.shape.ndims < 2:
raise ValueError(message)
elif validate_args:
assertions.append(
check_ops.assert_rank_at_least(rhs, rank=2, message=message))
message = '`lower_upper.shape[-1]` must equal `rhs.shape[-1]`.'
if (lower_upper.shape[-1] is not None and rhs.shape[-2] is not None):
if lower_upper.shape[-1] != rhs.shape[-2]:
raise ValueError(message)
elif validate_args:
assertions.append(
check_ops.assert_equal(
array_ops.shape(lower_upper)[-1],
array_ops.shape(rhs)[-2],
message=message))
return assertions | [
"def",
"_lu_solve_assertions",
"(",
"lower_upper",
",",
"perm",
",",
"rhs",
",",
"validate_args",
")",
":",
"assertions",
"=",
"lu_reconstruct_assertions",
"(",
"lower_upper",
",",
"perm",
",",
"validate_args",
")",
"message",
"=",
"'Input `rhs` must have at least 2 d... | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/linalg/linalg_impl.py#L1203-L1226 | |
9miao/CrossApp | 1f5375e061bf69841eb19728598f5ae3f508d620 | tools/bindings-generator/clang/cindex.py | python | SourceLocation.from_position | (tu, file, line, column) | return conf.lib.clang_getLocation(tu, file, line, column) | Retrieve the source location associated with a given file/line/column in
a particular translation unit. | Retrieve the source location associated with a given file/line/column in
a particular translation unit. | [
"Retrieve",
"the",
"source",
"location",
"associated",
"with",
"a",
"given",
"file",
"/",
"line",
"/",
"column",
"in",
"a",
"particular",
"translation",
"unit",
"."
] | def from_position(tu, file, line, column):
"""
Retrieve the source location associated with a given file/line/column in
a particular translation unit.
"""
return conf.lib.clang_getLocation(tu, file, line, column) | [
"def",
"from_position",
"(",
"tu",
",",
"file",
",",
"line",
",",
"column",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getLocation",
"(",
"tu",
",",
"file",
",",
"line",
",",
"column",
")"
] | https://github.com/9miao/CrossApp/blob/1f5375e061bf69841eb19728598f5ae3f508d620/tools/bindings-generator/clang/cindex.py#L180-L185 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py2/sklearn/svm/base.py | python | BaseLibSVM._validate_targets | (self, y) | return column_or_1d(y, warn=True).astype(np.float64) | Validation of y and class_weight.
Default implementation for SVR and one-class; overridden in BaseSVC. | Validation of y and class_weight. | [
"Validation",
"of",
"y",
"and",
"class_weight",
"."
] | def _validate_targets(self, y):
"""Validation of y and class_weight.
Default implementation for SVR and one-class; overridden in BaseSVC.
"""
# XXX this is ugly.
# Regression models should not have a class_weight_ attribute.
self.class_weight_ = np.empty(0)
return column_or_1d(y, warn=True).astype(np.float64) | [
"def",
"_validate_targets",
"(",
"self",
",",
"y",
")",
":",
"# XXX this is ugly.",
"# Regression models should not have a class_weight_ attribute.",
"self",
".",
"class_weight_",
"=",
"np",
".",
"empty",
"(",
"0",
")",
"return",
"column_or_1d",
"(",
"y",
",",
"warn... | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/svm/base.py#L204-L212 | |
leela-zero/leela-zero | e3ed6310d33d75078ba74c3adf887d18439fc2e3 | scripts/cpplint.py | python | CleansedLines._CollapseStrings | (elided) | return collapsed | Collapses strings and chars on a line to simple "" or '' blocks.
We nix strings first so we're not fooled by text like '"http://"'
Args:
elided: The line being processed.
Returns:
The line with collapsed strings. | Collapses strings and chars on a line to simple "" or '' blocks. | [
"Collapses",
"strings",
"and",
"chars",
"on",
"a",
"line",
"to",
"simple",
"or",
"blocks",
"."
] | def _CollapseStrings(elided):
"""Collapses strings and chars on a line to simple "" or '' blocks.
We nix strings first so we're not fooled by text like '"http://"'
Args:
elided: The line being processed.
Returns:
The line with collapsed strings.
"""
if _RE_PATTERN_INCLUDE.match(elided):
return elided
# Remove escaped characters first to make quote/single quote collapsing
# basic. Things that look like escaped characters shouldn't occur
# outside of strings and chars.
elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
# Replace quoted strings and digit separators. Both single quotes
# and double quotes are processed in the same loop, otherwise
# nested quotes wouldn't work.
collapsed = ''
while True:
# Find the first quote character
match = Match(r'^([^\'"]*)([\'"])(.*)$', elided)
if not match:
collapsed += elided
break
head, quote, tail = match.groups()
if quote == '"':
# Collapse double quoted strings
second_quote = tail.find('"')
if second_quote >= 0:
collapsed += head + '""'
elided = tail[second_quote + 1:]
else:
# Unmatched double quote, don't bother processing the rest
# of the line since this is probably a multiline string.
collapsed += elided
break
else:
# Found single quote, check nearby text to eliminate digit separators.
#
# There is no special handling for floating point here, because
# the integer/fractional/exponent parts would all be parsed
# correctly as long as there are digits on both sides of the
# separator. So we are fine as long as we don't see something
# like "0.'3" (gcc 4.9.0 will not allow this literal).
if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
collapsed += head + match_literal.group(1).replace("'", '')
elided = match_literal.group(2)
else:
second_quote = tail.find('\'')
if second_quote >= 0:
collapsed += head + "''"
elided = tail[second_quote + 1:]
else:
# Unmatched single quote
collapsed += elided
break
return collapsed | [
"def",
"_CollapseStrings",
"(",
"elided",
")",
":",
"if",
"_RE_PATTERN_INCLUDE",
".",
"match",
"(",
"elided",
")",
":",
"return",
"elided",
"# Remove escaped characters first to make quote/single quote collapsing",
"# basic. Things that look like escaped characters shouldn't occur... | https://github.com/leela-zero/leela-zero/blob/e3ed6310d33d75078ba74c3adf887d18439fc2e3/scripts/cpplint.py#L1318-L1382 | |
ceph/ceph | 959663007321a369c83218414a29bd9dbc8bda3a | qa/tasks/ceph_manager.py | python | OSDThrasher.do_join | (self) | Break out of this Ceph loop | Break out of this Ceph loop | [
"Break",
"out",
"of",
"this",
"Ceph",
"loop"
] | def do_join(self):
"""
Break out of this Ceph loop
"""
self.stopping = True
self.thread.get()
if self.sighup_delay:
self.log("joining the do_sighup greenlet")
self.sighup_thread.get()
if self.optrack_toggle_delay:
self.log("joining the do_optrack_toggle greenlet")
self.optrack_toggle_thread.join()
if self.dump_ops_enable == "true":
self.log("joining the do_dump_ops greenlet")
self.dump_ops_thread.join()
if self.noscrub_toggle_delay:
self.log("joining the do_noscrub_toggle greenlet")
self.noscrub_toggle_thread.join() | [
"def",
"do_join",
"(",
"self",
")",
":",
"self",
".",
"stopping",
"=",
"True",
"self",
".",
"thread",
".",
"get",
"(",
")",
"if",
"self",
".",
"sighup_delay",
":",
"self",
".",
"log",
"(",
"\"joining the do_sighup greenlet\"",
")",
"self",
".",
"sighup_t... | https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/qa/tasks/ceph_manager.py#L817-L834 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/AWSPythonSDK/1.5.8/s3transfer/futures.py | python | TransferCoordinator.submit | (self, executor, task, tag=None) | return future | Submits a task to a provided executor
:type executor: s3transfer.futures.BoundedExecutor
:param executor: The executor to submit the callable to
:type task: s3transfer.tasks.Task
:param task: The task to submit to the executor
:type tag: s3transfer.futures.TaskTag
:param tag: A tag to associate to the submitted task
:rtype: concurrent.futures.Future
:returns: A future representing the submitted task | Submits a task to a provided executor | [
"Submits",
"a",
"task",
"to",
"a",
"provided",
"executor"
] | def submit(self, executor, task, tag=None):
"""Submits a task to a provided executor
:type executor: s3transfer.futures.BoundedExecutor
:param executor: The executor to submit the callable to
:type task: s3transfer.tasks.Task
:param task: The task to submit to the executor
:type tag: s3transfer.futures.TaskTag
:param tag: A tag to associate to the submitted task
:rtype: concurrent.futures.Future
:returns: A future representing the submitted task
"""
logger.debug(
"Submitting task %s to executor %s for transfer request: %s." % (
task, executor, self.transfer_id)
)
future = executor.submit(task, tag=tag)
# Add this created future to the list of associated future just
# in case it is needed during cleanups.
self.add_associated_future(future)
future.add_done_callback(
FunctionContainer(self.remove_associated_future, future))
return future | [
"def",
"submit",
"(",
"self",
",",
"executor",
",",
"task",
",",
"tag",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"\"Submitting task %s to executor %s for transfer request: %s.\"",
"%",
"(",
"task",
",",
"executor",
",",
"self",
".",
"transfer_id",
"... | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/s3transfer/futures.py#L269-L294 | |
hpi-xnor/BMXNet | ed0b201da6667887222b8e4b5f997c4f6b61943d | python/mxnet/module/base_module.py | python | BaseModule.init_optimizer | (self, kvstore='local', optimizer='sgd',
optimizer_params=(('learning_rate', 0.01),), force_init=False) | Installs and initializes optimizers, as well as initialize kvstore for
distributed training
Parameters
----------
kvstore : str or KVStore
Defaults to `'local'`.
optimizer : str or Optimizer
Defaults to `'sgd'`.
optimizer_params : dict
Defaults to ``(('learning_rate', 0.01),)``. The default value is not a dictionary,
just to avoid pylint warning of dangerous default values.
force_init : bool
Defaults to ``False``, indicates whether to force re-initializing an optimizer
if it is already installed.
Examples
--------
>>> # An example of initializing optimizer.
>>> mod.init_optimizer(optimizer='sgd', optimizer_params=(('learning_rate', 0.005),)) | Installs and initializes optimizers, as well as initialize kvstore for
distributed training | [
"Installs",
"and",
"initializes",
"optimizers",
"as",
"well",
"as",
"initialize",
"kvstore",
"for",
"distributed",
"training"
] | def init_optimizer(self, kvstore='local', optimizer='sgd',
optimizer_params=(('learning_rate', 0.01),), force_init=False):
"""Installs and initializes optimizers, as well as initialize kvstore for
distributed training
Parameters
----------
kvstore : str or KVStore
Defaults to `'local'`.
optimizer : str or Optimizer
Defaults to `'sgd'`.
optimizer_params : dict
Defaults to ``(('learning_rate', 0.01),)``. The default value is not a dictionary,
just to avoid pylint warning of dangerous default values.
force_init : bool
Defaults to ``False``, indicates whether to force re-initializing an optimizer
if it is already installed.
Examples
--------
>>> # An example of initializing optimizer.
>>> mod.init_optimizer(optimizer='sgd', optimizer_params=(('learning_rate', 0.005),))
"""
raise NotImplementedError() | [
"def",
"init_optimizer",
"(",
"self",
",",
"kvstore",
"=",
"'local'",
",",
"optimizer",
"=",
"'sgd'",
",",
"optimizer_params",
"=",
"(",
"(",
"'learning_rate'",
",",
"0.01",
")",
",",
")",
",",
"force_init",
"=",
"False",
")",
":",
"raise",
"NotImplemented... | https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/python/mxnet/module/base_module.py#L958-L981 | ||
open-source-parsers/jsoncpp | 42e892d96e47b1f6e29844cc705e148ec4856448 | devtools/batchbuild.py | python | fix_eol | (stdout) | return re.sub('\r*\n', os.linesep, stdout) | Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). | Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). | [
"Fixes",
"wrong",
"EOL",
"produced",
"by",
"cmake",
"--",
"build",
"on",
"Windows",
"(",
"\\",
"r",
"\\",
"r",
"\\",
"n",
"instead",
"of",
"\\",
"r",
"\\",
"n",
")",
"."
] | def fix_eol(stdout):
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
"""
return re.sub('\r*\n', os.linesep, stdout) | [
"def",
"fix_eol",
"(",
"stdout",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'\\r*\\n'",
",",
"os",
".",
"linesep",
",",
"stdout",
")"
] | https://github.com/open-source-parsers/jsoncpp/blob/42e892d96e47b1f6e29844cc705e148ec4856448/devtools/batchbuild.py#L106-L109 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.