nwo stringlengths 5 86 | sha stringlengths 40 40 | path stringlengths 4 189 | language stringclasses 1 value | identifier stringlengths 1 94 | parameters stringlengths 2 4.03k | argument_list stringclasses 1 value | return_statement stringlengths 0 11.5k | docstring stringlengths 1 33.2k | docstring_summary stringlengths 0 5.15k | docstring_tokens list | function stringlengths 34 151k | function_tokens list | url stringlengths 90 278 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_windows.py | python | PrintDialogData.EnableHelp | (*args, **kwargs) | return _windows_.PrintDialogData_EnableHelp(*args, **kwargs) | EnableHelp(self, bool flag) | EnableHelp(self, bool flag) | [
"EnableHelp",
"(",
"self",
"bool",
"flag",
")"
] | def EnableHelp(*args, **kwargs):
"""EnableHelp(self, bool flag)"""
return _windows_.PrintDialogData_EnableHelp(*args, **kwargs) | [
"def",
"EnableHelp",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PrintDialogData_EnableHelp",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L5134-L5136 | |
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2class.py | python | uCSIsUgaritic | (code) | return ret | Check whether the character is part of Ugaritic UCS Block | Check whether the character is part of Ugaritic UCS Block | [
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"Ugaritic",
"UCS",
"Block"
] | def uCSIsUgaritic(code):
"""Check whether the character is part of Ugaritic UCS Block """
ret = libxml2mod.xmlUCSIsUgaritic(code)
return ret | [
"def",
"uCSIsUgaritic",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsUgaritic",
"(",
"code",
")",
"return",
"ret"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L2170-L2173 | |
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/retdec-3.2/scripts/type_extractor/type_extractor/json_types.py | python | convert_typedefs_to_type_for_json | (typedefs, types) | Converts enum to json representation. | Converts enum to json representation. | [
"Converts",
"enum",
"to",
"json",
"representation",
"."
] | def convert_typedefs_to_type_for_json(typedefs, types):
"""Converts enum to json representation."""
for t in typedefs:
parse_typedef_to_type_for_json(t, types) | [
"def",
"convert_typedefs_to_type_for_json",
"(",
"typedefs",
",",
"types",
")",
":",
"for",
"t",
"in",
"typedefs",
":",
"parse_typedef_to_type_for_json",
"(",
"t",
",",
"types",
")"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/retdec-3.2/scripts/type_extractor/type_extractor/json_types.py#L352-L355 | ||
trilinos/Trilinos | 6168be6dd51e35e1cd681e9c4b24433e709df140 | packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/PhactoriDriver.py | python | PhactoriExtractBlockOperation.CreateParaViewFilter | (self, inInputFilter) | return newParaViewFilter | create the extract block filter for ParaView | create the extract block filter for ParaView | [
"create",
"the",
"extract",
"block",
"filter",
"for",
"ParaView"
] | def CreateParaViewFilter(self, inInputFilter):
"create the extract block filter for ParaView"
if PhactoriDbg(100):
myDebugPrint3("PhactoriExtractBlockOperation.CreateParaViewFilter "
"entered\n", 100)
#info in block class should already be parsed and checked
savedActiveSource = GetActiveSource()
UpdatePipelineWithCurrentTimeArgument(inInputFilter)
if PhactoriDbg():
myDebugPrint3(" extractblock inInputFilter point data arrays:\n")
numArrays = inInputFilter.PointData.GetNumberOfArrays()
for ii in range (0, numArrays):
myDebugPrint3(" " + str(ii) + ": " + inInputFilter.PointData.GetArray(ii).GetName() + "\n")
if PhactoriDbg():
myDebugPrint3(" extractblock inInputFilter cell data arrays:\n")
numArrays = inInputFilter.CellData.GetNumberOfArrays()
for ii in range (0, numArrays):
myDebugPrint3(" " + str(ii) + ": " + inInputFilter.CellData.GetArray(ii).GetName() + "\n")
newParaViewFilter = ExtractBlock(inInputFilter)
if self.mFlatBlockIndicesSpecifiedDirectly == False:
self.FigureBlockIndicesFromBlockList(inInputFilter)
#newParaViewFilter.PruneOutput = 1
#newParaViewFilter.MaintainStructure = 0
newParaViewFilter.MaintainStructure = 1
newParaViewFilter.BlockIndices = self.mBlockIndices
SetActiveSource(newParaViewFilter)
UpdatePipelineWithCurrentTimeArgument(newParaViewFilter)
if PhactoriDbg():
myDebugPrint3(" extractblock newParaViewFilter point data arrays:\n")
numArrays = newParaViewFilter.PointData.GetNumberOfArrays()
for ii in range (0, numArrays):
myDebugPrint3(" " + str(ii) + ": " + newParaViewFilter.PointData.GetArray(ii).GetName() + "\n")
if PhactoriDbg():
myDebugPrint3(" extractblock newParaViewFilter cell data arrays:\n")
numArrays = newParaViewFilter.CellData.GetNumberOfArrays()
for ii in range (0, numArrays):
myDebugPrint3(" " + str(ii) + ": " + newParaViewFilter.CellData.GetArray(ii).GetName() + "\n")
SetActiveSource(savedActiveSource)
if PhactoriDbg(100):
myDebugPrint3("PhactoriExtractBlockOperation.CreateParaViewFilter "
"returning\n", 100)
return newParaViewFilter | [
"def",
"CreateParaViewFilter",
"(",
"self",
",",
"inInputFilter",
")",
":",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"\"PhactoriExtractBlockOperation.CreateParaViewFilter \"",
"\"entered\\n\"",
",",
"100",
")",
"#info in block class should already be parsed and checked",
"savedActiveSource",
"=",
"GetActiveSource",
"(",
")",
"UpdatePipelineWithCurrentTimeArgument",
"(",
"inInputFilter",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" extractblock inInputFilter point data arrays:\\n\"",
")",
"numArrays",
"=",
"inInputFilter",
".",
"PointData",
".",
"GetNumberOfArrays",
"(",
")",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"numArrays",
")",
":",
"myDebugPrint3",
"(",
"\" \"",
"+",
"str",
"(",
"ii",
")",
"+",
"\": \"",
"+",
"inInputFilter",
".",
"PointData",
".",
"GetArray",
"(",
"ii",
")",
".",
"GetName",
"(",
")",
"+",
"\"\\n\"",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" extractblock inInputFilter cell data arrays:\\n\"",
")",
"numArrays",
"=",
"inInputFilter",
".",
"CellData",
".",
"GetNumberOfArrays",
"(",
")",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"numArrays",
")",
":",
"myDebugPrint3",
"(",
"\" \"",
"+",
"str",
"(",
"ii",
")",
"+",
"\": \"",
"+",
"inInputFilter",
".",
"CellData",
".",
"GetArray",
"(",
"ii",
")",
".",
"GetName",
"(",
")",
"+",
"\"\\n\"",
")",
"newParaViewFilter",
"=",
"ExtractBlock",
"(",
"inInputFilter",
")",
"if",
"self",
".",
"mFlatBlockIndicesSpecifiedDirectly",
"==",
"False",
":",
"self",
".",
"FigureBlockIndicesFromBlockList",
"(",
"inInputFilter",
")",
"#newParaViewFilter.PruneOutput = 1",
"#newParaViewFilter.MaintainStructure = 0",
"newParaViewFilter",
".",
"MaintainStructure",
"=",
"1",
"newParaViewFilter",
".",
"BlockIndices",
"=",
"self",
".",
"mBlockIndices",
"SetActiveSource",
"(",
"newParaViewFilter",
")",
"UpdatePipelineWithCurrentTimeArgument",
"(",
"newParaViewFilter",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" extractblock newParaViewFilter point data arrays:\\n\"",
")",
"numArrays",
"=",
"newParaViewFilter",
".",
"PointData",
".",
"GetNumberOfArrays",
"(",
")",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"numArrays",
")",
":",
"myDebugPrint3",
"(",
"\" \"",
"+",
"str",
"(",
"ii",
")",
"+",
"\": \"",
"+",
"newParaViewFilter",
".",
"PointData",
".",
"GetArray",
"(",
"ii",
")",
".",
"GetName",
"(",
")",
"+",
"\"\\n\"",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" extractblock newParaViewFilter cell data arrays:\\n\"",
")",
"numArrays",
"=",
"newParaViewFilter",
".",
"CellData",
".",
"GetNumberOfArrays",
"(",
")",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"numArrays",
")",
":",
"myDebugPrint3",
"(",
"\" \"",
"+",
"str",
"(",
"ii",
")",
"+",
"\": \"",
"+",
"newParaViewFilter",
".",
"CellData",
".",
"GetArray",
"(",
"ii",
")",
".",
"GetName",
"(",
")",
"+",
"\"\\n\"",
")",
"SetActiveSource",
"(",
"savedActiveSource",
")",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"\"PhactoriExtractBlockOperation.CreateParaViewFilter \"",
"\"returning\\n\"",
",",
"100",
")",
"return",
"newParaViewFilter"
] | https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/PhactoriDriver.py#L10348-L10403 | |
rrwick/Unicycler | 96ffea71e3a78d63ade19d6124946773e65cf129 | unicycler/assembly_graph.py | python | AssemblyGraph.get_exclusive_inputs | (self, segment_number) | return [abs(x) for x in self.reverse_links[segment_number] if
self.lead_exclusively_to(x, segment_number)] | This function finds all segments which lead into the given segment. If those segments
do not lead into any other segments, then this function returns them in a list.
Specifically, this function returns a list of unsigned numbers. | This function finds all segments which lead into the given segment. If those segments
do not lead into any other segments, then this function returns them in a list.
Specifically, this function returns a list of unsigned numbers. | [
"This",
"function",
"finds",
"all",
"segments",
"which",
"lead",
"into",
"the",
"given",
"segment",
".",
"If",
"those",
"segments",
"do",
"not",
"lead",
"into",
"any",
"other",
"segments",
"then",
"this",
"function",
"returns",
"them",
"in",
"a",
"list",
".",
"Specifically",
"this",
"function",
"returns",
"a",
"list",
"of",
"unsigned",
"numbers",
"."
] | def get_exclusive_inputs(self, segment_number):
"""
This function finds all segments which lead into the given segment. If those segments
do not lead into any other segments, then this function returns them in a list.
Specifically, this function returns a list of unsigned numbers.
"""
if segment_number not in self.reverse_links:
return []
return [abs(x) for x in self.reverse_links[segment_number] if
self.lead_exclusively_to(x, segment_number)] | [
"def",
"get_exclusive_inputs",
"(",
"self",
",",
"segment_number",
")",
":",
"if",
"segment_number",
"not",
"in",
"self",
".",
"reverse_links",
":",
"return",
"[",
"]",
"return",
"[",
"abs",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"reverse_links",
"[",
"segment_number",
"]",
"if",
"self",
".",
"lead_exclusively_to",
"(",
"x",
",",
"segment_number",
")",
"]"
] | https://github.com/rrwick/Unicycler/blob/96ffea71e3a78d63ade19d6124946773e65cf129/unicycler/assembly_graph.py#L751-L760 | |
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/core/defchararray.py | python | chararray.splitlines | (self, keepends=None) | return splitlines(self, keepends) | For each element in `self`, return a list of the lines in the
element, breaking at line boundaries.
See also
--------
char.splitlines | For each element in `self`, return a list of the lines in the
element, breaking at line boundaries. | [
"For",
"each",
"element",
"in",
"self",
"return",
"a",
"list",
"of",
"the",
"lines",
"in",
"the",
"element",
"breaking",
"at",
"line",
"boundaries",
"."
] | def splitlines(self, keepends=None):
"""
For each element in `self`, return a list of the lines in the
element, breaking at line boundaries.
See also
--------
char.splitlines
"""
return splitlines(self, keepends) | [
"def",
"splitlines",
"(",
"self",
",",
"keepends",
"=",
"None",
")",
":",
"return",
"splitlines",
"(",
"self",
",",
"keepends",
")"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/core/defchararray.py#L2352-L2362 | |
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | lts/deps/v8/third_party/jinja2/utils.py | python | generate_lorem_ipsum | (n=5, html=True, min=20, max=100) | return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result)) | Generate some lorem ipsum for the template. | Generate some lorem ipsum for the template. | [
"Generate",
"some",
"lorem",
"ipsum",
"for",
"the",
"template",
"."
] | def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
"""Generate some lorem ipsum for the template."""
from jinja2.constants import LOREM_IPSUM_WORDS
from random import choice, randrange
words = LOREM_IPSUM_WORDS.split()
result = []
for _ in range(n):
next_capitalized = True
last_comma = last_fullstop = 0
word = None
last = None
p = []
# each paragraph contains out of 20 to 100 words.
for idx, _ in enumerate(range(randrange(min, max))):
while True:
word = choice(words)
if word != last:
last = word
break
if next_capitalized:
word = word.capitalize()
next_capitalized = False
# add commas
if idx - randrange(3, 8) > last_comma:
last_comma = idx
last_fullstop += 2
word += ','
# add end of sentences
if idx - randrange(10, 20) > last_fullstop:
last_comma = last_fullstop = idx
word += '.'
next_capitalized = True
p.append(word)
# ensure that the paragraph ends with a dot.
p = u' '.join(p)
if p.endswith(','):
p = p[:-1] + '.'
elif not p.endswith('.'):
p += '.'
result.append(p)
if not html:
return u'\n\n'.join(result)
return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result)) | [
"def",
"generate_lorem_ipsum",
"(",
"n",
"=",
"5",
",",
"html",
"=",
"True",
",",
"min",
"=",
"20",
",",
"max",
"=",
"100",
")",
":",
"from",
"jinja2",
".",
"constants",
"import",
"LOREM_IPSUM_WORDS",
"from",
"random",
"import",
"choice",
",",
"randrange",
"words",
"=",
"LOREM_IPSUM_WORDS",
".",
"split",
"(",
")",
"result",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"n",
")",
":",
"next_capitalized",
"=",
"True",
"last_comma",
"=",
"last_fullstop",
"=",
"0",
"word",
"=",
"None",
"last",
"=",
"None",
"p",
"=",
"[",
"]",
"# each paragraph contains out of 20 to 100 words.",
"for",
"idx",
",",
"_",
"in",
"enumerate",
"(",
"range",
"(",
"randrange",
"(",
"min",
",",
"max",
")",
")",
")",
":",
"while",
"True",
":",
"word",
"=",
"choice",
"(",
"words",
")",
"if",
"word",
"!=",
"last",
":",
"last",
"=",
"word",
"break",
"if",
"next_capitalized",
":",
"word",
"=",
"word",
".",
"capitalize",
"(",
")",
"next_capitalized",
"=",
"False",
"# add commas",
"if",
"idx",
"-",
"randrange",
"(",
"3",
",",
"8",
")",
">",
"last_comma",
":",
"last_comma",
"=",
"idx",
"last_fullstop",
"+=",
"2",
"word",
"+=",
"','",
"# add end of sentences",
"if",
"idx",
"-",
"randrange",
"(",
"10",
",",
"20",
")",
">",
"last_fullstop",
":",
"last_comma",
"=",
"last_fullstop",
"=",
"idx",
"word",
"+=",
"'.'",
"next_capitalized",
"=",
"True",
"p",
".",
"append",
"(",
"word",
")",
"# ensure that the paragraph ends with a dot.",
"p",
"=",
"u' '",
".",
"join",
"(",
"p",
")",
"if",
"p",
".",
"endswith",
"(",
"','",
")",
":",
"p",
"=",
"p",
"[",
":",
"-",
"1",
"]",
"+",
"'.'",
"elif",
"not",
"p",
".",
"endswith",
"(",
"'.'",
")",
":",
"p",
"+=",
"'.'",
"result",
".",
"append",
"(",
"p",
")",
"if",
"not",
"html",
":",
"return",
"u'\\n\\n'",
".",
"join",
"(",
"result",
")",
"return",
"Markup",
"(",
"u'\\n'",
".",
"join",
"(",
"u'<p>%s</p>'",
"%",
"escape",
"(",
"x",
")",
"for",
"x",
"in",
"result",
")",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/v8/third_party/jinja2/utils.py#L238-L284 | |
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/framework/ops.py | python | convert_to_tensor | (value, dtype=None, name=None, preferred_dtype=None) | return internal_convert_to_tensor(
value=value,
dtype=dtype,
name=name,
preferred_dtype=preferred_dtype,
as_ref=False) | Converts the given `value` to a `Tensor`.
This function converts Python objects of various types to `Tensor`
objects. It accepts `Tensor` objects, numpy arrays, Python lists,
and Python scalars. For example:
```python
import numpy as np
def my_func(arg):
arg = tf.convert_to_tensor(arg, dtype=tf.float32)
return tf.matmul(arg, arg) + arg
# The following calls are equivalent.
value_1 = my_func(tf.constant([[1.0, 2.0], [3.0, 4.0]]))
value_2 = my_func([[1.0, 2.0], [3.0, 4.0]])
value_3 = my_func(np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32))
```
This function can be useful when composing a new operation in Python
(such as `my_func` in the example above). All standard Python op
constructors apply this function to each of their Tensor-valued
inputs, which allows those ops to accept numpy arrays, Python lists,
and scalars in addition to `Tensor` objects.
Args:
value: An object whose type has a registered `Tensor` conversion function.
dtype: Optional element type for the returned tensor. If missing, the
type is inferred from the type of `value`.
name: Optional name to use if a new `Tensor` is created.
preferred_dtype: Optional element type for the returned tensor,
used when dtype is None. In some cases, a caller may not have a
dtype in mind when converting to a tensor, so preferred_dtype
can be used as a soft preference. If the conversion to
`preferred_dtype` is not possible, this argument has no effect.
Returns:
An `Output` based on `value`.
Raises:
TypeError: If no conversion function is registered for `value`.
RuntimeError: If a registered conversion function returns an invalid value. | Converts the given `value` to a `Tensor`. | [
"Converts",
"the",
"given",
"value",
"to",
"a",
"Tensor",
"."
] | def convert_to_tensor(value, dtype=None, name=None, preferred_dtype=None):
"""Converts the given `value` to a `Tensor`.
This function converts Python objects of various types to `Tensor`
objects. It accepts `Tensor` objects, numpy arrays, Python lists,
and Python scalars. For example:
```python
import numpy as np
def my_func(arg):
arg = tf.convert_to_tensor(arg, dtype=tf.float32)
return tf.matmul(arg, arg) + arg
# The following calls are equivalent.
value_1 = my_func(tf.constant([[1.0, 2.0], [3.0, 4.0]]))
value_2 = my_func([[1.0, 2.0], [3.0, 4.0]])
value_3 = my_func(np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32))
```
This function can be useful when composing a new operation in Python
(such as `my_func` in the example above). All standard Python op
constructors apply this function to each of their Tensor-valued
inputs, which allows those ops to accept numpy arrays, Python lists,
and scalars in addition to `Tensor` objects.
Args:
value: An object whose type has a registered `Tensor` conversion function.
dtype: Optional element type for the returned tensor. If missing, the
type is inferred from the type of `value`.
name: Optional name to use if a new `Tensor` is created.
preferred_dtype: Optional element type for the returned tensor,
used when dtype is None. In some cases, a caller may not have a
dtype in mind when converting to a tensor, so preferred_dtype
can be used as a soft preference. If the conversion to
`preferred_dtype` is not possible, this argument has no effect.
Returns:
An `Output` based on `value`.
Raises:
TypeError: If no conversion function is registered for `value`.
RuntimeError: If a registered conversion function returns an invalid value.
"""
return internal_convert_to_tensor(
value=value,
dtype=dtype,
name=name,
preferred_dtype=preferred_dtype,
as_ref=False) | [
"def",
"convert_to_tensor",
"(",
"value",
",",
"dtype",
"=",
"None",
",",
"name",
"=",
"None",
",",
"preferred_dtype",
"=",
"None",
")",
":",
"return",
"internal_convert_to_tensor",
"(",
"value",
"=",
"value",
",",
"dtype",
"=",
"dtype",
",",
"name",
"=",
"name",
",",
"preferred_dtype",
"=",
"preferred_dtype",
",",
"as_ref",
"=",
"False",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/framework/ops.py#L809-L859 | |
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/divide-chocolate.py | python | Solution.maximizeSweetness | (self, sweetness, K) | return right | :type sweetness: List[int]
:type K: int
:rtype: int | :type sweetness: List[int]
:type K: int
:rtype: int | [
":",
"type",
"sweetness",
":",
"List",
"[",
"int",
"]",
":",
"type",
"K",
":",
"int",
":",
"rtype",
":",
"int"
] | def maximizeSweetness(self, sweetness, K):
"""
:type sweetness: List[int]
:type K: int
:rtype: int
"""
def check(sweetness, K, x):
curr, cuts = 0, 0
for s in sweetness:
curr += s
if curr >= x:
cuts += 1
curr = 0
return cuts >= K+1
left, right = min(sweetness), sum(sweetness)//(K+1)
while left <= right:
mid = left + (right-left)//2
if not check(sweetness, K, mid):
right = mid-1
else:
left = mid+1
return right | [
"def",
"maximizeSweetness",
"(",
"self",
",",
"sweetness",
",",
"K",
")",
":",
"def",
"check",
"(",
"sweetness",
",",
"K",
",",
"x",
")",
":",
"curr",
",",
"cuts",
"=",
"0",
",",
"0",
"for",
"s",
"in",
"sweetness",
":",
"curr",
"+=",
"s",
"if",
"curr",
">=",
"x",
":",
"cuts",
"+=",
"1",
"curr",
"=",
"0",
"return",
"cuts",
">=",
"K",
"+",
"1",
"left",
",",
"right",
"=",
"min",
"(",
"sweetness",
")",
",",
"sum",
"(",
"sweetness",
")",
"//",
"(",
"K",
"+",
"1",
")",
"while",
"left",
"<=",
"right",
":",
"mid",
"=",
"left",
"+",
"(",
"right",
"-",
"left",
")",
"//",
"2",
"if",
"not",
"check",
"(",
"sweetness",
",",
"K",
",",
"mid",
")",
":",
"right",
"=",
"mid",
"-",
"1",
"else",
":",
"left",
"=",
"mid",
"+",
"1",
"return",
"right"
] | https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/divide-chocolate.py#L5-L27 | |
KDE/krita | 10ea63984e00366865769c193ab298de73a59c5c | cmake/modules/sip-generate.py | python | main | () | return 0 | Generate the project bindings from the command line. | Generate the project bindings from the command line. | [
"Generate",
"the",
"project",
"bindings",
"from",
"the",
"command",
"line",
"."
] | def main():
""" Generate the project bindings from the command line. """
try:
project = AbstractProject.bootstrap(
'build', "Generate the project bindings.")
project.builder._generate_bindings()
project.progress("The project bindings are ready for build.")
except Exception as e:
handle_exception(e)
return 0 | [
"def",
"main",
"(",
")",
":",
"try",
":",
"project",
"=",
"AbstractProject",
".",
"bootstrap",
"(",
"'build'",
",",
"\"Generate the project bindings.\"",
")",
"project",
".",
"builder",
".",
"_generate_bindings",
"(",
")",
"project",
".",
"progress",
"(",
"\"The project bindings are ready for build.\"",
")",
"except",
"Exception",
"as",
"e",
":",
"handle_exception",
"(",
"e",
")",
"return",
"0"
] | https://github.com/KDE/krita/blob/10ea63984e00366865769c193ab298de73a59c5c/cmake/modules/sip-generate.py#L10-L21 | |
geemaple/leetcode | 68bc5032e1ee52c22ef2f2e608053484c487af54 | leetcode/228.summary-ranges.py | python | Solution.summaryRanges | (self, nums) | return res | :type nums: List[int]
:rtype: List[str] | :type nums: List[int]
:rtype: List[str] | [
":",
"type",
"nums",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"List",
"[",
"str",
"]"
] | def summaryRanges(self, nums):
"""
:type nums: List[int]
:rtype: List[str]
"""
if len(nums) == 0:
return []
start = 0
res = []
for i in range(1, len(nums) + 1):
if i == len(nums) or nums[i] - nums[i - 1] != 1:
if start == i - 1:
res.append(str(nums[start]))
else:
res.append(str(nums[start]) + '->' + str(nums[i - 1]))
start = i
return res | [
"def",
"summaryRanges",
"(",
"self",
",",
"nums",
")",
":",
"if",
"len",
"(",
"nums",
")",
"==",
"0",
":",
"return",
"[",
"]",
"start",
"=",
"0",
"res",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"nums",
")",
"+",
"1",
")",
":",
"if",
"i",
"==",
"len",
"(",
"nums",
")",
"or",
"nums",
"[",
"i",
"]",
"-",
"nums",
"[",
"i",
"-",
"1",
"]",
"!=",
"1",
":",
"if",
"start",
"==",
"i",
"-",
"1",
":",
"res",
".",
"append",
"(",
"str",
"(",
"nums",
"[",
"start",
"]",
")",
")",
"else",
":",
"res",
".",
"append",
"(",
"str",
"(",
"nums",
"[",
"start",
"]",
")",
"+",
"'->'",
"+",
"str",
"(",
"nums",
"[",
"i",
"-",
"1",
"]",
")",
")",
"start",
"=",
"i",
"return",
"res"
] | https://github.com/geemaple/leetcode/blob/68bc5032e1ee52c22ef2f2e608053484c487af54/leetcode/228.summary-ranges.py#L2-L22 | |
intel/caffe | 3f494b442ee3f9d17a07b09ecbd5fa2bbda00836 | scripts/cpp_lint.py | python | _CppLintState.SetVerboseLevel | (self, level) | return last_verbose_level | Sets the module's verbosity, and returns the previous setting. | Sets the module's verbosity, and returns the previous setting. | [
"Sets",
"the",
"module",
"s",
"verbosity",
"and",
"returns",
"the",
"previous",
"setting",
"."
] | def SetVerboseLevel(self, level):
"""Sets the module's verbosity, and returns the previous setting."""
last_verbose_level = self.verbose_level
self.verbose_level = level
return last_verbose_level | [
"def",
"SetVerboseLevel",
"(",
"self",
",",
"level",
")",
":",
"last_verbose_level",
"=",
"self",
".",
"verbose_level",
"self",
".",
"verbose_level",
"=",
"level",
"return",
"last_verbose_level"
] | https://github.com/intel/caffe/blob/3f494b442ee3f9d17a07b09ecbd5fa2bbda00836/scripts/cpp_lint.py#L707-L711 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/turtle.py | python | TurtleScreen.update | (self) | Perform a TurtleScreen update. | Perform a TurtleScreen update. | [
"Perform",
"a",
"TurtleScreen",
"update",
"."
] | def update(self):
"""Perform a TurtleScreen update.
"""
tracing = self._tracing
self._tracing = True
for t in self.turtles():
t._update_data()
t._drawturtle()
self._tracing = tracing
self._update() | [
"def",
"update",
"(",
"self",
")",
":",
"tracing",
"=",
"self",
".",
"_tracing",
"self",
".",
"_tracing",
"=",
"True",
"for",
"t",
"in",
"self",
".",
"turtles",
"(",
")",
":",
"t",
".",
"_update_data",
"(",
")",
"t",
".",
"_drawturtle",
"(",
")",
"self",
".",
"_tracing",
"=",
"tracing",
"self",
".",
"_update",
"(",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/turtle.py#L1244-L1253 | ||
llvm-mirror/lldb | d01083a850f577b85501a0902b52fd0930de72c7 | third_party/Python/module/ptyprocess-0.6.0/ptyprocess/ptyprocess.py | python | PtyProcess.sendcontrol | (self, char) | return self._writeb(byte), byte | Helper method that wraps send() with mnemonic access for sending control
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
Ctrl-G (ASCII 7, bell, '\a')::
child.sendcontrol('g')
See also, sendintr() and sendeof(). | Helper method that wraps send() with mnemonic access for sending control
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
Ctrl-G (ASCII 7, bell, '\a'):: | [
"Helper",
"method",
"that",
"wraps",
"send",
"()",
"with",
"mnemonic",
"access",
"for",
"sending",
"control",
"character",
"to",
"the",
"child",
"(",
"such",
"as",
"Ctrl",
"-",
"C",
"or",
"Ctrl",
"-",
"D",
")",
".",
"For",
"example",
"to",
"send",
"Ctrl",
"-",
"G",
"(",
"ASCII",
"7",
"bell",
"\\",
"a",
")",
"::"
] | def sendcontrol(self, char):
'''Helper method that wraps send() with mnemonic access for sending control
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
Ctrl-G (ASCII 7, bell, '\a')::
child.sendcontrol('g')
See also, sendintr() and sendeof().
'''
char = char.lower()
a = ord(char)
if 97 <= a <= 122:
a = a - ord('a') + 1
byte = _byte(a)
return self._writeb(byte), byte
d = {'@': 0, '`': 0,
'[': 27, '{': 27,
'\\': 28, '|': 28,
']': 29, '}': 29,
'^': 30, '~': 30,
'_': 31,
'?': 127}
if char not in d:
return 0, b''
byte = _byte(d[char])
return self._writeb(byte), byte | [
"def",
"sendcontrol",
"(",
"self",
",",
"char",
")",
":",
"char",
"=",
"char",
".",
"lower",
"(",
")",
"a",
"=",
"ord",
"(",
"char",
")",
"if",
"97",
"<=",
"a",
"<=",
"122",
":",
"a",
"=",
"a",
"-",
"ord",
"(",
"'a'",
")",
"+",
"1",
"byte",
"=",
"_byte",
"(",
"a",
")",
"return",
"self",
".",
"_writeb",
"(",
"byte",
")",
",",
"byte",
"d",
"=",
"{",
"'@'",
":",
"0",
",",
"'`'",
":",
"0",
",",
"'['",
":",
"27",
",",
"'{'",
":",
"27",
",",
"'\\\\'",
":",
"28",
",",
"'|'",
":",
"28",
",",
"']'",
":",
"29",
",",
"'}'",
":",
"29",
",",
"'^'",
":",
"30",
",",
"'~'",
":",
"30",
",",
"'_'",
":",
"31",
",",
"'?'",
":",
"127",
"}",
"if",
"char",
"not",
"in",
"d",
":",
"return",
"0",
",",
"b''",
"byte",
"=",
"_byte",
"(",
"d",
"[",
"char",
"]",
")",
"return",
"self",
".",
"_writeb",
"(",
"byte",
")",
",",
"byte"
] | https://github.com/llvm-mirror/lldb/blob/d01083a850f577b85501a0902b52fd0930de72c7/third_party/Python/module/ptyprocess-0.6.0/ptyprocess/ptyprocess.py#L564-L590 | |
SequoiaDB/SequoiaDB | 2894ed7e5bd6fe57330afc900cf76d0ff0df9f64 | tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py | python | uCSIsSupplementalMathematicalOperators | (code) | return ret | Check whether the character is part of
SupplementalMathematicalOperators UCS Block | Check whether the character is part of
SupplementalMathematicalOperators UCS Block | [
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"SupplementalMathematicalOperators",
"UCS",
"Block"
] | def uCSIsSupplementalMathematicalOperators(code):
"""Check whether the character is part of
SupplementalMathematicalOperators UCS Block """
ret = libxml2mod.xmlUCSIsSupplementalMathematicalOperators(code)
return ret | [
"def",
"uCSIsSupplementalMathematicalOperators",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsSupplementalMathematicalOperators",
"(",
"code",
")",
"return",
"ret"
] | https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py#L2834-L2838 | |
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Editor/Python/windows/Lib/site-packages/pkg_resources/_vendor/pyparsing.py | python | ParseExpression.leaveWhitespace | ( self ) | return self | Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
all contained expressions. | Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
all contained expressions. | [
"Extends",
"C",
"{",
"leaveWhitespace",
"}",
"defined",
"in",
"base",
"class",
"and",
"also",
"invokes",
"C",
"{",
"leaveWhitespace",
"}",
"on",
"all",
"contained",
"expressions",
"."
] | def leaveWhitespace( self ):
"""Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
all contained expressions."""
self.skipWhitespace = False
self.exprs = [ e.copy() for e in self.exprs ]
for e in self.exprs:
e.leaveWhitespace()
return self | [
"def",
"leaveWhitespace",
"(",
"self",
")",
":",
"self",
".",
"skipWhitespace",
"=",
"False",
"self",
".",
"exprs",
"=",
"[",
"e",
".",
"copy",
"(",
")",
"for",
"e",
"in",
"self",
".",
"exprs",
"]",
"for",
"e",
"in",
"self",
".",
"exprs",
":",
"e",
".",
"leaveWhitespace",
"(",
")",
"return",
"self"
] | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pkg_resources/_vendor/pyparsing.py#L3288-L3295 | |
scribusproject/scribus | 41ec7c775a060912cf251682a8b1437f753f80f4 | scribus/plugins/scriptplugin/scripts/CalendarWizard.py | python | ScHorizontalEventCalendar.printWeekNo | (self, week) | return | Dummy for now
(for this type of calendar - see ScVerticalEventCalendar) | Dummy for now
(for this type of calendar - see ScVerticalEventCalendar) | [
"Dummy",
"for",
"now",
"(",
"for",
"this",
"type",
"of",
"calendar",
"-",
"see",
"ScVerticalEventCalendar",
")"
] | def printWeekNo(self, week):
""" Dummy for now
(for this type of calendar - see ScVerticalEventCalendar) """
return | [
"def",
"printWeekNo",
"(",
"self",
",",
"week",
")",
":",
"return"
] | https://github.com/scribusproject/scribus/blob/41ec7c775a060912cf251682a8b1437f753f80f4/scribus/plugins/scriptplugin/scripts/CalendarWizard.py#L353-L356 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py | python | HTMLParser.__init__ | (self, tree=None, strict=False, namespaceHTMLElements=True, debug=False) | :arg tree: a treebuilder class controlling the type of tree that will be
returned. Built in treebuilders can be accessed through
html5lib.treebuilders.getTreeBuilder(treeType)
:arg strict: raise an exception when a parse error is encountered
:arg namespaceHTMLElements: whether or not to namespace HTML elements
:arg debug: whether or not to enable debug mode which logs things
Example:
>>> from html5lib.html5parser import HTMLParser
>>> parser = HTMLParser() # generates parser with etree builder
>>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict | :arg tree: a treebuilder class controlling the type of tree that will be
returned. Built in treebuilders can be accessed through
html5lib.treebuilders.getTreeBuilder(treeType) | [
":",
"arg",
"tree",
":",
"a",
"treebuilder",
"class",
"controlling",
"the",
"type",
"of",
"tree",
"that",
"will",
"be",
"returned",
".",
"Built",
"in",
"treebuilders",
"can",
"be",
"accessed",
"through",
"html5lib",
".",
"treebuilders",
".",
"getTreeBuilder",
"(",
"treeType",
")"
] | def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
"""
:arg tree: a treebuilder class controlling the type of tree that will be
returned. Built in treebuilders can be accessed through
html5lib.treebuilders.getTreeBuilder(treeType)
:arg strict: raise an exception when a parse error is encountered
:arg namespaceHTMLElements: whether or not to namespace HTML elements
:arg debug: whether or not to enable debug mode which logs things
Example:
>>> from html5lib.html5parser import HTMLParser
>>> parser = HTMLParser() # generates parser with etree builder
>>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict
"""
# Raise an exception on the first error encountered
self.strict = strict
if tree is None:
tree = treebuilders.getTreeBuilder("etree")
self.tree = tree(namespaceHTMLElements)
self.errors = []
self.phases = {name: cls(self, self.tree) for name, cls in
getPhases(debug).items()} | [
"def",
"__init__",
"(",
"self",
",",
"tree",
"=",
"None",
",",
"strict",
"=",
"False",
",",
"namespaceHTMLElements",
"=",
"True",
",",
"debug",
"=",
"False",
")",
":",
"# Raise an exception on the first error encountered",
"self",
".",
"strict",
"=",
"strict",
"if",
"tree",
"is",
"None",
":",
"tree",
"=",
"treebuilders",
".",
"getTreeBuilder",
"(",
"\"etree\"",
")",
"self",
".",
"tree",
"=",
"tree",
"(",
"namespaceHTMLElements",
")",
"self",
".",
"errors",
"=",
"[",
"]",
"self",
".",
"phases",
"=",
"{",
"name",
":",
"cls",
"(",
"self",
",",
"self",
".",
"tree",
")",
"for",
"name",
",",
"cls",
"in",
"getPhases",
"(",
"debug",
")",
".",
"items",
"(",
")",
"}"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py#L93-L122 | ||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/distributions/python/ops/bijectors/square.py | python | Square.__init__ | (self, validate_args=False, name="square") | Instantiates the `Square` bijector.
Args:
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object. | Instantiates the `Square` bijector. | [
"Instantiates",
"the",
"Square",
"bijector",
"."
] | def __init__(self, validate_args=False, name="square"):
"""Instantiates the `Square` bijector.
Args:
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object.
"""
self._name = name
super(Square, self).__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
name=name) | [
"def",
"__init__",
"(",
"self",
",",
"validate_args",
"=",
"False",
",",
"name",
"=",
"\"square\"",
")",
":",
"self",
".",
"_name",
"=",
"name",
"super",
"(",
"Square",
",",
"self",
")",
".",
"__init__",
"(",
"forward_min_event_ndims",
"=",
"0",
",",
"validate_args",
"=",
"validate_args",
",",
"name",
"=",
"name",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/distributions/python/ops/bijectors/square.py#L61-L73 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/richtext.py | python | RichTextBuffer_SetBulletProportion | (*args, **kwargs) | return _richtext.RichTextBuffer_SetBulletProportion(*args, **kwargs) | RichTextBuffer_SetBulletProportion(float prop) | RichTextBuffer_SetBulletProportion(float prop) | [
"RichTextBuffer_SetBulletProportion",
"(",
"float",
"prop",
")"
] | def RichTextBuffer_SetBulletProportion(*args, **kwargs):
"""RichTextBuffer_SetBulletProportion(float prop)"""
return _richtext.RichTextBuffer_SetBulletProportion(*args, **kwargs) | [
"def",
"RichTextBuffer_SetBulletProportion",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextBuffer_SetBulletProportion",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/richtext.py#L2725-L2727 | |
rodeofx/OpenWalter | 6116fbe3f04f1146c854afbfbdbe944feaee647e | walter/maya/scripts/walter.py | python | Walter.remove | (self, origin, path) | return self.details.remove(origin, path) | Remove the expression in all the render layers.
:Example:
.. code:: python
from walter import Walter
# Remove the expression
Walter().remove("walterStandinShape1", "/*/*")
:param str origin: The stand-in object.
:param str path: The given expression. | Remove the expression in all the render layers. | [
"Remove",
"the",
"expression",
"in",
"all",
"the",
"render",
"layers",
"."
] | def remove(self, origin, path):
"""
Remove the expression in all the render layers.
:Example:
.. code:: python
from walter import Walter
# Remove the expression
Walter().remove("walterStandinShape1", "/*/*")
:param str origin: The stand-in object.
:param str path: The given expression.
"""
return self.details.remove(origin, path) | [
"def",
"remove",
"(",
"self",
",",
"origin",
",",
"path",
")",
":",
"return",
"self",
".",
"details",
".",
"remove",
"(",
"origin",
",",
"path",
")"
] | https://github.com/rodeofx/OpenWalter/blob/6116fbe3f04f1146c854afbfbdbe944feaee647e/walter/maya/scripts/walter.py#L291-L306 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py3/sklearn/metrics/pairwise.py | python | polynomial_kernel | (X, Y=None, degree=3, gamma=None, coef0=1) | return K | Compute the polynomial kernel between X and Y::
K(X, Y) = (gamma <X, Y> + coef0)^degree
Read more in the :ref:`User Guide <polynomial_kernel>`.
Parameters
----------
X : ndarray of shape (n_samples_1, n_features)
Y : ndarray of shape (n_samples_2, n_features)
degree : int, default 3
gamma : float, default None
if None, defaults to 1.0 / n_features
coef0 : float, default 1
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2) | Compute the polynomial kernel between X and Y:: | [
"Compute",
"the",
"polynomial",
"kernel",
"between",
"X",
"and",
"Y",
"::"
] | def polynomial_kernel(X, Y=None, degree=3, gamma=None, coef0=1):
"""
Compute the polynomial kernel between X and Y::
K(X, Y) = (gamma <X, Y> + coef0)^degree
Read more in the :ref:`User Guide <polynomial_kernel>`.
Parameters
----------
X : ndarray of shape (n_samples_1, n_features)
Y : ndarray of shape (n_samples_2, n_features)
degree : int, default 3
gamma : float, default None
if None, defaults to 1.0 / n_features
coef0 : float, default 1
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = safe_sparse_dot(X, Y.T, dense_output=True)
K *= gamma
K += coef0
K **= degree
return K | [
"def",
"polynomial_kernel",
"(",
"X",
",",
"Y",
"=",
"None",
",",
"degree",
"=",
"3",
",",
"gamma",
"=",
"None",
",",
"coef0",
"=",
"1",
")",
":",
"X",
",",
"Y",
"=",
"check_pairwise_arrays",
"(",
"X",
",",
"Y",
")",
"if",
"gamma",
"is",
"None",
":",
"gamma",
"=",
"1.0",
"/",
"X",
".",
"shape",
"[",
"1",
"]",
"K",
"=",
"safe_sparse_dot",
"(",
"X",
",",
"Y",
".",
"T",
",",
"dense_output",
"=",
"True",
")",
"K",
"*=",
"gamma",
"K",
"+=",
"coef0",
"K",
"**=",
"degree",
"return",
"K"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/metrics/pairwise.py#L995-L1028 | |
OpenChemistry/tomviz | 0a903679318f191cb7dd3eb5ff5bc3a7d3320d9a | tomviz/python/tomviz/operators.py | python | Operator.transform_scalars | (self, data) | This method should be overridden by subclasses to implement the
operations the operator should perform. | This method should be overridden by subclasses to implement the
operations the operator should perform. | [
"This",
"method",
"should",
"be",
"overridden",
"by",
"subclasses",
"to",
"implement",
"the",
"operations",
"the",
"operator",
"should",
"perform",
"."
] | def transform_scalars(self, data):
"""
This method should be overridden by subclasses to implement the
operations the operator should perform.
"""
raise NotImplementedError('Must be implemented by subclass') | [
"def",
"transform_scalars",
"(",
"self",
",",
"data",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Must be implemented by subclass'",
")"
] | https://github.com/OpenChemistry/tomviz/blob/0a903679318f191cb7dd3eb5ff5bc3a7d3320d9a/tomviz/python/tomviz/operators.py#L82-L87 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/AWSPythonSDK/1.5.8/docutils/nodes.py | python | document.asdom | (self, dom=None) | return domroot | Return a DOM representation of this document. | Return a DOM representation of this document. | [
"Return",
"a",
"DOM",
"representation",
"of",
"this",
"document",
"."
] | def asdom(self, dom=None):
"""Return a DOM representation of this document."""
if dom is None:
import xml.dom.minidom as dom
domroot = dom.Document()
domroot.appendChild(self._dom_node(domroot))
return domroot | [
"def",
"asdom",
"(",
"self",
",",
"dom",
"=",
"None",
")",
":",
"if",
"dom",
"is",
"None",
":",
"import",
"xml",
".",
"dom",
".",
"minidom",
"as",
"dom",
"domroot",
"=",
"dom",
".",
"Document",
"(",
")",
"domroot",
".",
"appendChild",
"(",
"self",
".",
"_dom_node",
"(",
"domroot",
")",
")",
"return",
"domroot"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/nodes.py#L1267-L1273 | |
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/python/training/coordinator.py | python | Coordinator.clear_stop | (self) | Clears the stop flag.
After this is called, calls to `should_stop()` will return `False`. | Clears the stop flag. | [
"Clears",
"the",
"stop",
"flag",
"."
] | def clear_stop(self):
"""Clears the stop flag.
After this is called, calls to `should_stop()` will return `False`.
"""
with self._lock:
self._joined = False
self._exc_info_to_raise = None
if self._stop_event.is_set():
self._stop_event.clear() | [
"def",
"clear_stop",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"_joined",
"=",
"False",
"self",
".",
"_exc_info_to_raise",
"=",
"None",
"if",
"self",
".",
"_stop_event",
".",
"is_set",
"(",
")",
":",
"self",
".",
"_stop_event",
".",
"clear",
"(",
")"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/training/coordinator.py#L244-L253 | ||
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/maximal-network-rank.py | python | Solution3.maximalNetworkRank | (self, n, roads) | return result | :type n: int
:type roads: List[List[int]]
:rtype: int | :type n: int
:type roads: List[List[int]]
:rtype: int | [
":",
"type",
"n",
":",
"int",
":",
"type",
"roads",
":",
"List",
"[",
"List",
"[",
"int",
"]]",
":",
"rtype",
":",
"int"
] | def maximalNetworkRank(self, n, roads):
"""
:type n: int
:type roads: List[List[int]]
:rtype: int
"""
degree = [0]*n
adj = collections.defaultdict(set)
for a, b in roads:
degree[a] += 1
degree[b] += 1
adj[a].add(b)
adj[b].add(a)
result = 0
for i in xrange(n-1):
for j in xrange(i+1, n):
result = max(result, degree[i]+degree[j]-int(i in adj and j in adj[i]))
return result | [
"def",
"maximalNetworkRank",
"(",
"self",
",",
"n",
",",
"roads",
")",
":",
"degree",
"=",
"[",
"0",
"]",
"*",
"n",
"adj",
"=",
"collections",
".",
"defaultdict",
"(",
"set",
")",
"for",
"a",
",",
"b",
"in",
"roads",
":",
"degree",
"[",
"a",
"]",
"+=",
"1",
"degree",
"[",
"b",
"]",
"+=",
"1",
"adj",
"[",
"a",
"]",
".",
"add",
"(",
"b",
")",
"adj",
"[",
"b",
"]",
".",
"add",
"(",
"a",
")",
"result",
"=",
"0",
"for",
"i",
"in",
"xrange",
"(",
"n",
"-",
"1",
")",
":",
"for",
"j",
"in",
"xrange",
"(",
"i",
"+",
"1",
",",
"n",
")",
":",
"result",
"=",
"max",
"(",
"result",
",",
"degree",
"[",
"i",
"]",
"+",
"degree",
"[",
"j",
"]",
"-",
"int",
"(",
"i",
"in",
"adj",
"and",
"j",
"in",
"adj",
"[",
"i",
"]",
")",
")",
"return",
"result"
] | https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/maximal-network-rank.py#L94-L111 | |
yifita/3PU | 9ca4c3dfe4e3ead08c72e98a62e4cf181d5c70e0 | code/curriculumn_record_provider.py | python | rotate_point_cloud_and_gt | (batch_data, batch_gt=None) | return batch_data, batch_gt | Randomly rotate the point clouds to augument the dataset
rotation is per shape based along up direction
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, rotated batch of point clouds | Randomly rotate the point clouds to augument the dataset
rotation is per shape based along up direction
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, rotated batch of point clouds | [
"Randomly",
"rotate",
"the",
"point",
"clouds",
"to",
"augument",
"the",
"dataset",
"rotation",
"is",
"per",
"shape",
"based",
"along",
"up",
"direction",
"Input",
":",
"BxNx3",
"array",
"original",
"batch",
"of",
"point",
"clouds",
"Return",
":",
"BxNx3",
"array",
"rotated",
"batch",
"of",
"point",
"clouds"
] | def rotate_point_cloud_and_gt(batch_data, batch_gt=None):
""" Randomly rotate the point clouds to augument the dataset
rotation is per shape based along up direction
Input:
BxNx3 array, original batch of point clouds
Return:
BxNx3 array, rotated batch of point clouds
"""
batch_size, num_point, num_channels = batch_data.get_shape().as_list()
angles = tf.random_uniform((batch_size, 3), dtype=tf.float32) * 2 * np.pi
cos_x, cos_y, cos_z = tf.split(tf.cos(angles), 3, axis=-1) # 3*[B, 1]
sin_x, sin_y, sin_z = tf.split(tf.sin(angles), 3, axis=-1) # 3*[B, 1]
one = tf.ones_like(cos_x, dtype=tf.float32)
zero = tf.zeros_like(cos_x, dtype=tf.float32)
# [B, 3, 3]
Rx = tf.stack(
[tf.concat([one, zero, zero], axis=1),
tf.concat([zero, cos_x, sin_x], axis=1),
tf.concat([zero, -sin_x, cos_x], axis=1)], axis=1)
Ry = tf.stack(
[tf.concat([cos_y, zero, -sin_y], axis=1),
tf.concat([zero, one, zero], axis=1),
tf.concat([sin_y, zero, cos_y], axis=1)], axis=1)
Rz = tf.stack(
[tf.concat([cos_z, sin_z, zero], axis=1),
tf.concat([-sin_z, cos_z, zero], axis=1),
tf.concat([zero, zero, one], axis=1)], axis=1)
if is_2D:
rotation_matrix = Rz
else:
rotation_matrix = tf.matmul(Rz, tf.matmul(Ry, Rx))
if num_channels > 3:
batch_data = tf.concat(
[tf.matmul(batch_data[:, :, :3], rotation_matrix),
tf.matmul(batch_data[:, :, 3:], rotation_matrix),
batch_data[:, :, 6:]], axis=-1)
else:
batch_data = tf.matmul(batch_data, rotation_matrix)
if batch_gt is not None:
if num_channels > 3:
batch_gt = tf.concat(
[tf.matmul(batch_gt[:, :, :3], rotation_matrix),
tf.matmul(batch_gt[:, :, 3:], rotation_matrix),
batch_gt[:, :, 6:]], axis=-1)
else:
batch_gt = tf.matmul(batch_gt, rotation_matrix)
return batch_data, batch_gt | [
"def",
"rotate_point_cloud_and_gt",
"(",
"batch_data",
",",
"batch_gt",
"=",
"None",
")",
":",
"batch_size",
",",
"num_point",
",",
"num_channels",
"=",
"batch_data",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"angles",
"=",
"tf",
".",
"random_uniform",
"(",
"(",
"batch_size",
",",
"3",
")",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"*",
"2",
"*",
"np",
".",
"pi",
"cos_x",
",",
"cos_y",
",",
"cos_z",
"=",
"tf",
".",
"split",
"(",
"tf",
".",
"cos",
"(",
"angles",
")",
",",
"3",
",",
"axis",
"=",
"-",
"1",
")",
"# 3*[B, 1]",
"sin_x",
",",
"sin_y",
",",
"sin_z",
"=",
"tf",
".",
"split",
"(",
"tf",
".",
"sin",
"(",
"angles",
")",
",",
"3",
",",
"axis",
"=",
"-",
"1",
")",
"# 3*[B, 1]",
"one",
"=",
"tf",
".",
"ones_like",
"(",
"cos_x",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"zero",
"=",
"tf",
".",
"zeros_like",
"(",
"cos_x",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"# [B, 3, 3]",
"Rx",
"=",
"tf",
".",
"stack",
"(",
"[",
"tf",
".",
"concat",
"(",
"[",
"one",
",",
"zero",
",",
"zero",
"]",
",",
"axis",
"=",
"1",
")",
",",
"tf",
".",
"concat",
"(",
"[",
"zero",
",",
"cos_x",
",",
"sin_x",
"]",
",",
"axis",
"=",
"1",
")",
",",
"tf",
".",
"concat",
"(",
"[",
"zero",
",",
"-",
"sin_x",
",",
"cos_x",
"]",
",",
"axis",
"=",
"1",
")",
"]",
",",
"axis",
"=",
"1",
")",
"Ry",
"=",
"tf",
".",
"stack",
"(",
"[",
"tf",
".",
"concat",
"(",
"[",
"cos_y",
",",
"zero",
",",
"-",
"sin_y",
"]",
",",
"axis",
"=",
"1",
")",
",",
"tf",
".",
"concat",
"(",
"[",
"zero",
",",
"one",
",",
"zero",
"]",
",",
"axis",
"=",
"1",
")",
",",
"tf",
".",
"concat",
"(",
"[",
"sin_y",
",",
"zero",
",",
"cos_y",
"]",
",",
"axis",
"=",
"1",
")",
"]",
",",
"axis",
"=",
"1",
")",
"Rz",
"=",
"tf",
".",
"stack",
"(",
"[",
"tf",
".",
"concat",
"(",
"[",
"cos_z",
",",
"sin_z",
",",
"zero",
"]",
",",
"axis",
"=",
"1",
")",
",",
"tf",
".",
"concat",
"(",
"[",
"-",
"sin_z",
",",
"cos_z",
",",
"zero",
"]",
",",
"axis",
"=",
"1",
")",
",",
"tf",
".",
"concat",
"(",
"[",
"zero",
",",
"zero",
",",
"one",
"]",
",",
"axis",
"=",
"1",
")",
"]",
",",
"axis",
"=",
"1",
")",
"if",
"is_2D",
":",
"rotation_matrix",
"=",
"Rz",
"else",
":",
"rotation_matrix",
"=",
"tf",
".",
"matmul",
"(",
"Rz",
",",
"tf",
".",
"matmul",
"(",
"Ry",
",",
"Rx",
")",
")",
"if",
"num_channels",
">",
"3",
":",
"batch_data",
"=",
"tf",
".",
"concat",
"(",
"[",
"tf",
".",
"matmul",
"(",
"batch_data",
"[",
":",
",",
":",
",",
":",
"3",
"]",
",",
"rotation_matrix",
")",
",",
"tf",
".",
"matmul",
"(",
"batch_data",
"[",
":",
",",
":",
",",
"3",
":",
"]",
",",
"rotation_matrix",
")",
",",
"batch_data",
"[",
":",
",",
":",
",",
"6",
":",
"]",
"]",
",",
"axis",
"=",
"-",
"1",
")",
"else",
":",
"batch_data",
"=",
"tf",
".",
"matmul",
"(",
"batch_data",
",",
"rotation_matrix",
")",
"if",
"batch_gt",
"is",
"not",
"None",
":",
"if",
"num_channels",
">",
"3",
":",
"batch_gt",
"=",
"tf",
".",
"concat",
"(",
"[",
"tf",
".",
"matmul",
"(",
"batch_gt",
"[",
":",
",",
":",
",",
":",
"3",
"]",
",",
"rotation_matrix",
")",
",",
"tf",
".",
"matmul",
"(",
"batch_gt",
"[",
":",
",",
":",
",",
"3",
":",
"]",
",",
"rotation_matrix",
")",
",",
"batch_gt",
"[",
":",
",",
":",
",",
"6",
":",
"]",
"]",
",",
"axis",
"=",
"-",
"1",
")",
"else",
":",
"batch_gt",
"=",
"tf",
".",
"matmul",
"(",
"batch_gt",
",",
"rotation_matrix",
")",
"return",
"batch_data",
",",
"batch_gt"
] | https://github.com/yifita/3PU/blob/9ca4c3dfe4e3ead08c72e98a62e4cf181d5c70e0/code/curriculumn_record_provider.py#L94-L146 | |
weolar/miniblink49 | 1c4678db0594a4abde23d3ebbcc7cd13c3170777 | third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/coverage/control.py | python | coverage.start | (self) | Start measuring code coverage. | Start measuring code coverage. | [
"Start",
"measuring",
"code",
"coverage",
"."
] | def start(self):
"""Start measuring code coverage."""
if self.run_suffix:
# Calling start() means we're running code, so use the run_suffix
# as the data_suffix when we eventually save the data.
self.data_suffix = self.run_suffix
if self.auto_data:
self.load()
# Save coverage data when Python exits.
if not self.atexit_registered:
atexit.register(self.save)
self.atexit_registered = True
# Create the matchers we need for _should_trace
if self.source or self.source_pkgs:
self.source_match = TreeMatcher(self.source)
else:
if self.cover_dir:
self.cover_match = TreeMatcher([self.cover_dir])
if self.pylib_dirs:
self.pylib_match = TreeMatcher(self.pylib_dirs)
if self.include:
self.include_match = FnmatchMatcher(self.include)
if self.omit:
self.omit_match = FnmatchMatcher(self.omit)
self._harvested = False
self.collector.start() | [
"def",
"start",
"(",
"self",
")",
":",
"if",
"self",
".",
"run_suffix",
":",
"# Calling start() means we're running code, so use the run_suffix",
"# as the data_suffix when we eventually save the data.",
"self",
".",
"data_suffix",
"=",
"self",
".",
"run_suffix",
"if",
"self",
".",
"auto_data",
":",
"self",
".",
"load",
"(",
")",
"# Save coverage data when Python exits.",
"if",
"not",
"self",
".",
"atexit_registered",
":",
"atexit",
".",
"register",
"(",
"self",
".",
"save",
")",
"self",
".",
"atexit_registered",
"=",
"True",
"# Create the matchers we need for _should_trace",
"if",
"self",
".",
"source",
"or",
"self",
".",
"source_pkgs",
":",
"self",
".",
"source_match",
"=",
"TreeMatcher",
"(",
"self",
".",
"source",
")",
"else",
":",
"if",
"self",
".",
"cover_dir",
":",
"self",
".",
"cover_match",
"=",
"TreeMatcher",
"(",
"[",
"self",
".",
"cover_dir",
"]",
")",
"if",
"self",
".",
"pylib_dirs",
":",
"self",
".",
"pylib_match",
"=",
"TreeMatcher",
"(",
"self",
".",
"pylib_dirs",
")",
"if",
"self",
".",
"include",
":",
"self",
".",
"include_match",
"=",
"FnmatchMatcher",
"(",
"self",
".",
"include",
")",
"if",
"self",
".",
"omit",
":",
"self",
".",
"omit_match",
"=",
"FnmatchMatcher",
"(",
"self",
".",
"omit",
")",
"self",
".",
"_harvested",
"=",
"False",
"self",
".",
"collector",
".",
"start",
"(",
")"
] | https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/coverage/control.py#L358-L385 | ||
CRYTEK/CRYENGINE | 232227c59a220cbbd311576f0fbeba7bb53b2a8c | Editor/Python/windows/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py | python | ExFileObject.__iter__ | (self) | Get an iterator over the file's lines. | Get an iterator over the file's lines. | [
"Get",
"an",
"iterator",
"over",
"the",
"file",
"s",
"lines",
"."
] | def __iter__(self):
"""Get an iterator over the file's lines.
"""
while True:
line = self.readline()
if not line:
break
yield line | [
"def",
"__iter__",
"(",
"self",
")",
":",
"while",
"True",
":",
"line",
"=",
"self",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"break",
"yield",
"line"
] | https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L910-L917 | ||
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/lib2to3/fixes/fix_urllib.py | python | FixUrllib.transform_dot | (self, node, results) | Transform for calls to module members in code. | Transform for calls to module members in code. | [
"Transform",
"for",
"calls",
"to",
"module",
"members",
"in",
"code",
"."
] | def transform_dot(self, node, results):
"""Transform for calls to module members in code."""
module_dot = results.get('bare_with_attr')
member = results.get('member')
new_name = None
if isinstance(member, list):
member = member[0]
for change in MAPPING[module_dot.value]:
if member.value in change[1]:
new_name = change[0]
break
if new_name:
module_dot.replace(Name(new_name,
prefix=module_dot.prefix))
else:
self.cannot_convert(node, 'This is an invalid module element') | [
"def",
"transform_dot",
"(",
"self",
",",
"node",
",",
"results",
")",
":",
"module_dot",
"=",
"results",
".",
"get",
"(",
"'bare_with_attr'",
")",
"member",
"=",
"results",
".",
"get",
"(",
"'member'",
")",
"new_name",
"=",
"None",
"if",
"isinstance",
"(",
"member",
",",
"list",
")",
":",
"member",
"=",
"member",
"[",
"0",
"]",
"for",
"change",
"in",
"MAPPING",
"[",
"module_dot",
".",
"value",
"]",
":",
"if",
"member",
".",
"value",
"in",
"change",
"[",
"1",
"]",
":",
"new_name",
"=",
"change",
"[",
"0",
"]",
"break",
"if",
"new_name",
":",
"module_dot",
".",
"replace",
"(",
"Name",
"(",
"new_name",
",",
"prefix",
"=",
"module_dot",
".",
"prefix",
")",
")",
"else",
":",
"self",
".",
"cannot_convert",
"(",
"node",
",",
"'This is an invalid module element'",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/lib2to3/fixes/fix_urllib.py#L152-L167 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/propgrid.py | python | FlagsProperty.GetItemCount | (*args, **kwargs) | return _propgrid.FlagsProperty_GetItemCount(*args, **kwargs) | GetItemCount(self) -> size_t | GetItemCount(self) -> size_t | [
"GetItemCount",
"(",
"self",
")",
"-",
">",
"size_t"
] | def GetItemCount(*args, **kwargs):
"""GetItemCount(self) -> size_t"""
return _propgrid.FlagsProperty_GetItemCount(*args, **kwargs) | [
"def",
"GetItemCount",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"FlagsProperty_GetItemCount",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L3046-L3048 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/plugins/filebrowser/filebrowser/browser.py | python | FBMimeMgr.IsKnownBinType | (self, path) | return True | Is a known binary file type
@param path: file path / name | Is a known binary file type
@param path: file path / name | [
"Is",
"a",
"known",
"binary",
"file",
"type",
"@param",
"path",
":",
"file",
"path",
"/",
"name"
] | def IsKnownBinType(self, path):
"""Is a known binary file type
@param path: file path / name
"""
ext = ebmlib.GetFileExtension(path)
if ext in ('exe', 'dll', 'so'): # TODO better mapping
self._ftype = FBMimeMgr.IMG_BIN
else:
return False
return True | [
"def",
"IsKnownBinType",
"(",
"self",
",",
"path",
")",
":",
"ext",
"=",
"ebmlib",
".",
"GetFileExtension",
"(",
"path",
")",
"if",
"ext",
"in",
"(",
"'exe'",
",",
"'dll'",
",",
"'so'",
")",
":",
"# TODO better mapping",
"self",
".",
"_ftype",
"=",
"FBMimeMgr",
".",
"IMG_BIN",
"else",
":",
"return",
"False",
"return",
"True"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/plugins/filebrowser/filebrowser/browser.py#L423-L433 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/control_flow_ops.py | python | ControlFlowContext.GetWhileContext | (self) | return None | Return the while context containing this context. | Return the while context containing this context. | [
"Return",
"the",
"while",
"context",
"containing",
"this",
"context",
"."
] | def GetWhileContext(self):
"""Return the while context containing this context."""
if self._outer_context:
return self._outer_context.GetWhileContext()
return None | [
"def",
"GetWhileContext",
"(",
"self",
")",
":",
"if",
"self",
".",
"_outer_context",
":",
"return",
"self",
".",
"_outer_context",
".",
"GetWhileContext",
"(",
")",
"return",
"None"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/control_flow_ops.py#L774-L778 | |
smartdevicelink/sdl_core | 68f082169e0a40fccd9eb0db3c83911c28870f07 | tools/InterfaceGenerator/generator/generators/SmartFactoryBase.py | python | CodeGenerator._gen_function_impl | (self, function, namespace, class_name) | return self._function_impl_template.substitute(
namespace=namespace,
class_name=class_name,
function_id=function.function_id.primary_name,
message_type=function.message_type.primary_name,
code=self._indent_code(
self._function_impl_code_tempate.substitute(
schema_loc_decl=self._gen_schema_loc_decls(
function.params.values(), processed_enums),
schema_items_decl=self._gen_schema_items_decls(
function.params.values()),
schema_item_fill=self._gen_schema_items_fill(
function.params.values(), function.since, function.until, function.deprecated, function.removed),
schema_params_fill=self._gen_schema_params_fill(
function.message_type.name),
function_history_fill=self._gen_function_history_decl(
function),
function_member=self._gen_function_member(
function)),
1)) | Generate function implementation for source file.
Generates implementation code of method that provides schema for
function. This code should be used in the source file.
Keyword arguments:
function -- function to generate method for.
namespace -- name of destination namespace.
class_name -- name of the parent class.
Returns:
String with function implementation source code. | Generate function implementation for source file. | [
"Generate",
"function",
"implementation",
"for",
"source",
"file",
"."
] | def _gen_function_impl(self, function, namespace, class_name):
"""Generate function implementation for source file.
Generates implementation code of method that provides schema for
function. This code should be used in the source file.
Keyword arguments:
function -- function to generate method for.
namespace -- name of destination namespace.
class_name -- name of the parent class.
Returns:
String with function implementation source code.
"""
processed_enums = []
return self._function_impl_template.substitute(
namespace=namespace,
class_name=class_name,
function_id=function.function_id.primary_name,
message_type=function.message_type.primary_name,
code=self._indent_code(
self._function_impl_code_tempate.substitute(
schema_loc_decl=self._gen_schema_loc_decls(
function.params.values(), processed_enums),
schema_items_decl=self._gen_schema_items_decls(
function.params.values()),
schema_item_fill=self._gen_schema_items_fill(
function.params.values(), function.since, function.until, function.deprecated, function.removed),
schema_params_fill=self._gen_schema_params_fill(
function.message_type.name),
function_history_fill=self._gen_function_history_decl(
function),
function_member=self._gen_function_member(
function)),
1)) | [
"def",
"_gen_function_impl",
"(",
"self",
",",
"function",
",",
"namespace",
",",
"class_name",
")",
":",
"processed_enums",
"=",
"[",
"]",
"return",
"self",
".",
"_function_impl_template",
".",
"substitute",
"(",
"namespace",
"=",
"namespace",
",",
"class_name",
"=",
"class_name",
",",
"function_id",
"=",
"function",
".",
"function_id",
".",
"primary_name",
",",
"message_type",
"=",
"function",
".",
"message_type",
".",
"primary_name",
",",
"code",
"=",
"self",
".",
"_indent_code",
"(",
"self",
".",
"_function_impl_code_tempate",
".",
"substitute",
"(",
"schema_loc_decl",
"=",
"self",
".",
"_gen_schema_loc_decls",
"(",
"function",
".",
"params",
".",
"values",
"(",
")",
",",
"processed_enums",
")",
",",
"schema_items_decl",
"=",
"self",
".",
"_gen_schema_items_decls",
"(",
"function",
".",
"params",
".",
"values",
"(",
")",
")",
",",
"schema_item_fill",
"=",
"self",
".",
"_gen_schema_items_fill",
"(",
"function",
".",
"params",
".",
"values",
"(",
")",
",",
"function",
".",
"since",
",",
"function",
".",
"until",
",",
"function",
".",
"deprecated",
",",
"function",
".",
"removed",
")",
",",
"schema_params_fill",
"=",
"self",
".",
"_gen_schema_params_fill",
"(",
"function",
".",
"message_type",
".",
"name",
")",
",",
"function_history_fill",
"=",
"self",
".",
"_gen_function_history_decl",
"(",
"function",
")",
",",
"function_member",
"=",
"self",
".",
"_gen_function_member",
"(",
"function",
")",
")",
",",
"1",
")",
")"
] | https://github.com/smartdevicelink/sdl_core/blob/68f082169e0a40fccd9eb0db3c83911c28870f07/tools/InterfaceGenerator/generator/generators/SmartFactoryBase.py#L1240-L1276 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/symbol/contrib.py | python | foreach | (body, data, init_states, name="foreach") | return (outs, states) | Run a for loop with user-defined computation over Symbols on dimension 0.
This operator simulates a for loop and body has the computation for an iteration
of the for loop. It runs the computation in body on each slice from the input
NDArrays.
body takes two arguments as input and outputs a tuple of two elements,
as illustrated below:
out, states = body(data1, states)
data1 can be either a symbol or a list of symbols. If data is a symbol,
data1 is a symbol. Otherwise, data1 is a list of symbols and has the same
size as data. states is a list of symbols and have the same size as init_states.
Similarly, out can be either a symbol or a list of symbols, which are concatenated
as the first output of foreach; states from the last execution of body
are the second output of foreach.
foreach can output only output data or states. If a user only wants states,
the body function can return ([], states). Similarly, if a user only wants
output data, the body function can return (out, []).
The computation done by this operator is equivalent to the pseudo code below
when the input data is NDArray::
states = init_states
outs = []
for i in data.shape[0]:
s = data[i]
out, states = body(s, states)
outs.append(out)
outs = stack(*outs)
Parameters
----------
body : a Python function.
Define computation in an iteration.
data: a symbol or a list of symbols.
The input data.
init_states: a Symbol or nested lists of symbols.
The initial values of the loop states.
name: string.
The name of the operator.
Returns
-------
outputs: a Symbol or nested lists of Symbols.
The output data concatenated from the output of all iterations.
states: a Symbol or nested lists of Symbols.
The loop states in the last iteration.
Examples
--------
>>> step = lambda data, states: (data + states[0], [states[0] * 2])
>>> data = mx.sym.var('data')
>>> states = [mx.sym.var('state')]
>>> outs, states = mx.sym.contrib.foreach(step, data, states) | Run a for loop with user-defined computation over Symbols on dimension 0. | [
"Run",
"a",
"for",
"loop",
"with",
"user",
"-",
"defined",
"computation",
"over",
"Symbols",
"on",
"dimension",
"0",
"."
] | def foreach(body, data, init_states, name="foreach"):
"""Run a for loop with user-defined computation over Symbols on dimension 0.
This operator simulates a for loop and body has the computation for an iteration
of the for loop. It runs the computation in body on each slice from the input
NDArrays.
body takes two arguments as input and outputs a tuple of two elements,
as illustrated below:
out, states = body(data1, states)
data1 can be either a symbol or a list of symbols. If data is a symbol,
data1 is a symbol. Otherwise, data1 is a list of symbols and has the same
size as data. states is a list of symbols and have the same size as init_states.
Similarly, out can be either a symbol or a list of symbols, which are concatenated
as the first output of foreach; states from the last execution of body
are the second output of foreach.
foreach can output only output data or states. If a user only wants states,
the body function can return ([], states). Similarly, if a user only wants
output data, the body function can return (out, []).
The computation done by this operator is equivalent to the pseudo code below
when the input data is NDArray::
states = init_states
outs = []
for i in data.shape[0]:
s = data[i]
out, states = body(s, states)
outs.append(out)
outs = stack(*outs)
Parameters
----------
body : a Python function.
Define computation in an iteration.
data: a symbol or a list of symbols.
The input data.
init_states: a Symbol or nested lists of symbols.
The initial values of the loop states.
name: string.
The name of the operator.
Returns
-------
outputs: a Symbol or nested lists of Symbols.
The output data concatenated from the output of all iterations.
states: a Symbol or nested lists of Symbols.
The loop states in the last iteration.
Examples
--------
>>> step = lambda data, states: (data + states[0], [states[0] * 2])
>>> data = mx.sym.var('data')
>>> states = [mx.sym.var('state')]
>>> outs, states = mx.sym.contrib.foreach(step, data, states)
"""
flatten_data, data_fmt = _flatten(data, "foreach input")
_check_data(flatten_data, symbol.Symbol,
"data should be a symbol or a nested list of symbols")
init_flatten_states, init_state_fmt = _flatten(init_states, "foreach states")
_check_data(init_flatten_states, symbol.Symbol,
"init_states should be a symbol or a nested list of symbols")
# If the input python function references to the symbols outside
# the python function, we need to prune the computation graph constructed from
# the function. One way of doing it is to mark the nodes in the computation graph
# with AttrScope and prune the nodes without the special attribute.
name = _get_unique_subgraph_name(name)
with AttrScope(__subgraph_name__=name):
in_eles = [symbol.var(_get_sym_uniq_name(sym)) for sym in flatten_data]
in_eles, _ = _regroup(in_eles, data_fmt)
states = [symbol.var(_get_sym_uniq_name(s)) for s in init_flatten_states]
states, _ = _regroup(states, copy.deepcopy(init_state_fmt))
sym_out, sym_states = body(in_eles, states)
sym_out, out_fmt = _flatten(sym_out, "foreach output")
sym_states, state_fmt = _flatten(sym_states, "foreach loop_vars")
assert init_state_fmt == state_fmt, "The input and output loop_vars have different format"
_check_data(sym_out, symbol.Symbol,
"the output should be an NDArray or a nested list of NDArrays")
_check_data(sym_states, symbol.Symbol,
"the output states should be an NDArray or a nested list of NDArrays")
num_out_data = len(sym_out)
num_states = len(sym_states)
num_outputs = num_out_data + num_states
g = _construct_subgraph(sym_out, sym_states, name)
input_syms = _get_graph_inputs(g)
cut_syms = _cut_subgraph(g)
input_syms = _get_graph_inputs(g)
# Here we need to find out how the input symbols are ordered as well as
# where the loop states are located in the list of inputs.
# This dict contains the symbols of the subgraph.
input_syms = {sym.name:sym for sym in input_syms}
gin_names = input_syms.keys()
# This array contains the symbols for the inputs of foreach.
# They are ordered according to the inputs of the subgraph.
state_names = [_get_sym_uniq_name(sym) for sym in init_flatten_states]
data_names = [_get_sym_uniq_name(sym) for sym in flatten_data]
cut_var_map = {sym.list_outputs()[0]:sym for sym in cut_syms}
cut_var_names = cut_var_map.keys()
subg_input_names = g.list_inputs()
assert len(set(subg_input_names)) == len(subg_input_names), \
"The inputs of the subgraph don't have unique names: " + str(subg_input_names)
# ordered_ins contains input symbols in the following order:
# data_syms, state_syms, followed by cut_vars and vars in the closure.
ordered_ins = [x for x in flatten_data]
# this defines the location of data_syms in the list of subgraph inputs
in_data_locs = []
for dname in data_names:
# Some data may not be used.
if dname in subg_input_names:
in_data_locs.append(subg_input_names.index(dname))
else:
raise AssertionError("the data arrays have to be used in the loop body")
ordered_ins.extend(init_flatten_states)
# this defines the location of state_syms in the list of subgraph inputs.
in_state_locs = []
for sname in state_names:
# Some state may not be used.
if sname in subg_input_names:
in_state_locs.append(subg_input_names.index(sname))
else:
raise AssertionError("the state arrays have to be used in the loop body")
remain_locs = []
for in_name in subg_input_names:
assert in_name in gin_names, "The input variable %s can't be found in graph inputs: %s" \
% (in_name, str(gin_names))
if in_name in cut_var_names:
ordered_ins.append(cut_var_map[in_name])
remain_locs.append(subg_input_names.index(in_name))
elif in_name not in data_names and in_name not in state_names:
# The remaining inputs are the variable nodes created inside the UDF.
# The subgraph can't have nodes shared with the main graph. As such,
# we need to make a copy of these variable nodes.
assert in_name in gin_names
ordered_ins.append(copy.deepcopy(input_syms[in_name]))
remain_locs.append(subg_input_names.index(in_name))
ret = symbol._internal._foreach(g, *ordered_ins, num_outputs=num_outputs,
num_out_data=num_out_data, in_state_locs=in_state_locs,
in_data_locs=in_data_locs, remain_locs=remain_locs)
outs = []
for i in range(num_outputs - num_states):
outs.append(ret[i])
outs, _ = _regroup(outs, out_fmt)
states = []
for i in range(num_states):
states.append(ret[num_outputs - num_states + i])
states, _ = _regroup(states, state_fmt)
return (outs, states) | [
"def",
"foreach",
"(",
"body",
",",
"data",
",",
"init_states",
",",
"name",
"=",
"\"foreach\"",
")",
":",
"flatten_data",
",",
"data_fmt",
"=",
"_flatten",
"(",
"data",
",",
"\"foreach input\"",
")",
"_check_data",
"(",
"flatten_data",
",",
"symbol",
".",
"Symbol",
",",
"\"data should be a symbol or a nested list of symbols\"",
")",
"init_flatten_states",
",",
"init_state_fmt",
"=",
"_flatten",
"(",
"init_states",
",",
"\"foreach states\"",
")",
"_check_data",
"(",
"init_flatten_states",
",",
"symbol",
".",
"Symbol",
",",
"\"init_states should be a symbol or a nested list of symbols\"",
")",
"# If the input python function references to the symbols outside",
"# the python function, we need to prune the computation graph constructed from",
"# the function. One way of doing it is to mark the nodes in the computation graph",
"# with AttrScope and prune the nodes without the special attribute.",
"name",
"=",
"_get_unique_subgraph_name",
"(",
"name",
")",
"with",
"AttrScope",
"(",
"__subgraph_name__",
"=",
"name",
")",
":",
"in_eles",
"=",
"[",
"symbol",
".",
"var",
"(",
"_get_sym_uniq_name",
"(",
"sym",
")",
")",
"for",
"sym",
"in",
"flatten_data",
"]",
"in_eles",
",",
"_",
"=",
"_regroup",
"(",
"in_eles",
",",
"data_fmt",
")",
"states",
"=",
"[",
"symbol",
".",
"var",
"(",
"_get_sym_uniq_name",
"(",
"s",
")",
")",
"for",
"s",
"in",
"init_flatten_states",
"]",
"states",
",",
"_",
"=",
"_regroup",
"(",
"states",
",",
"copy",
".",
"deepcopy",
"(",
"init_state_fmt",
")",
")",
"sym_out",
",",
"sym_states",
"=",
"body",
"(",
"in_eles",
",",
"states",
")",
"sym_out",
",",
"out_fmt",
"=",
"_flatten",
"(",
"sym_out",
",",
"\"foreach output\"",
")",
"sym_states",
",",
"state_fmt",
"=",
"_flatten",
"(",
"sym_states",
",",
"\"foreach loop_vars\"",
")",
"assert",
"init_state_fmt",
"==",
"state_fmt",
",",
"\"The input and output loop_vars have different format\"",
"_check_data",
"(",
"sym_out",
",",
"symbol",
".",
"Symbol",
",",
"\"the output should be an NDArray or a nested list of NDArrays\"",
")",
"_check_data",
"(",
"sym_states",
",",
"symbol",
".",
"Symbol",
",",
"\"the output states should be an NDArray or a nested list of NDArrays\"",
")",
"num_out_data",
"=",
"len",
"(",
"sym_out",
")",
"num_states",
"=",
"len",
"(",
"sym_states",
")",
"num_outputs",
"=",
"num_out_data",
"+",
"num_states",
"g",
"=",
"_construct_subgraph",
"(",
"sym_out",
",",
"sym_states",
",",
"name",
")",
"input_syms",
"=",
"_get_graph_inputs",
"(",
"g",
")",
"cut_syms",
"=",
"_cut_subgraph",
"(",
"g",
")",
"input_syms",
"=",
"_get_graph_inputs",
"(",
"g",
")",
"# Here we need to find out how the input symbols are ordered as well as",
"# where the loop states are located in the list of inputs.",
"# This dict contains the symbols of the subgraph.",
"input_syms",
"=",
"{",
"sym",
".",
"name",
":",
"sym",
"for",
"sym",
"in",
"input_syms",
"}",
"gin_names",
"=",
"input_syms",
".",
"keys",
"(",
")",
"# This array contains the symbols for the inputs of foreach.",
"# They are ordered according to the inputs of the subgraph.",
"state_names",
"=",
"[",
"_get_sym_uniq_name",
"(",
"sym",
")",
"for",
"sym",
"in",
"init_flatten_states",
"]",
"data_names",
"=",
"[",
"_get_sym_uniq_name",
"(",
"sym",
")",
"for",
"sym",
"in",
"flatten_data",
"]",
"cut_var_map",
"=",
"{",
"sym",
".",
"list_outputs",
"(",
")",
"[",
"0",
"]",
":",
"sym",
"for",
"sym",
"in",
"cut_syms",
"}",
"cut_var_names",
"=",
"cut_var_map",
".",
"keys",
"(",
")",
"subg_input_names",
"=",
"g",
".",
"list_inputs",
"(",
")",
"assert",
"len",
"(",
"set",
"(",
"subg_input_names",
")",
")",
"==",
"len",
"(",
"subg_input_names",
")",
",",
"\"The inputs of the subgraph don't have unique names: \"",
"+",
"str",
"(",
"subg_input_names",
")",
"# ordered_ins contains input symbols in the following order:",
"# data_syms, state_syms, followed by cut_vars and vars in the closure.",
"ordered_ins",
"=",
"[",
"x",
"for",
"x",
"in",
"flatten_data",
"]",
"# this defines the location of data_syms in the list of subgraph inputs",
"in_data_locs",
"=",
"[",
"]",
"for",
"dname",
"in",
"data_names",
":",
"# Some data may not be used.",
"if",
"dname",
"in",
"subg_input_names",
":",
"in_data_locs",
".",
"append",
"(",
"subg_input_names",
".",
"index",
"(",
"dname",
")",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"\"the data arrays have to be used in the loop body\"",
")",
"ordered_ins",
".",
"extend",
"(",
"init_flatten_states",
")",
"# this defines the location of state_syms in the list of subgraph inputs.",
"in_state_locs",
"=",
"[",
"]",
"for",
"sname",
"in",
"state_names",
":",
"# Some state may not be used.",
"if",
"sname",
"in",
"subg_input_names",
":",
"in_state_locs",
".",
"append",
"(",
"subg_input_names",
".",
"index",
"(",
"sname",
")",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"\"the state arrays have to be used in the loop body\"",
")",
"remain_locs",
"=",
"[",
"]",
"for",
"in_name",
"in",
"subg_input_names",
":",
"assert",
"in_name",
"in",
"gin_names",
",",
"\"The input variable %s can't be found in graph inputs: %s\"",
"%",
"(",
"in_name",
",",
"str",
"(",
"gin_names",
")",
")",
"if",
"in_name",
"in",
"cut_var_names",
":",
"ordered_ins",
".",
"append",
"(",
"cut_var_map",
"[",
"in_name",
"]",
")",
"remain_locs",
".",
"append",
"(",
"subg_input_names",
".",
"index",
"(",
"in_name",
")",
")",
"elif",
"in_name",
"not",
"in",
"data_names",
"and",
"in_name",
"not",
"in",
"state_names",
":",
"# The remaining inputs are the variable nodes created inside the UDF.",
"# The subgraph can't have nodes shared with the main graph. As such,",
"# we need to make a copy of these variable nodes.",
"assert",
"in_name",
"in",
"gin_names",
"ordered_ins",
".",
"append",
"(",
"copy",
".",
"deepcopy",
"(",
"input_syms",
"[",
"in_name",
"]",
")",
")",
"remain_locs",
".",
"append",
"(",
"subg_input_names",
".",
"index",
"(",
"in_name",
")",
")",
"ret",
"=",
"symbol",
".",
"_internal",
".",
"_foreach",
"(",
"g",
",",
"*",
"ordered_ins",
",",
"num_outputs",
"=",
"num_outputs",
",",
"num_out_data",
"=",
"num_out_data",
",",
"in_state_locs",
"=",
"in_state_locs",
",",
"in_data_locs",
"=",
"in_data_locs",
",",
"remain_locs",
"=",
"remain_locs",
")",
"outs",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"num_outputs",
"-",
"num_states",
")",
":",
"outs",
".",
"append",
"(",
"ret",
"[",
"i",
"]",
")",
"outs",
",",
"_",
"=",
"_regroup",
"(",
"outs",
",",
"out_fmt",
")",
"states",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"num_states",
")",
":",
"states",
".",
"append",
"(",
"ret",
"[",
"num_outputs",
"-",
"num_states",
"+",
"i",
"]",
")",
"states",
",",
"_",
"=",
"_regroup",
"(",
"states",
",",
"state_fmt",
")",
"return",
"(",
"outs",
",",
"states",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/contrib.py#L212-L373 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/importlib/_bootstrap_external.py | python | _path_split | (path) | return '', path | Replacement for os.path.split(). | Replacement for os.path.split(). | [
"Replacement",
"for",
"os",
".",
"path",
".",
"split",
"()",
"."
] | def _path_split(path):
"""Replacement for os.path.split()."""
if len(path_separators) == 1:
front, _, tail = path.rpartition(path_sep)
return front, tail
for x in reversed(path):
if x in path_separators:
front, tail = path.rsplit(x, maxsplit=1)
return front, tail
return '', path | [
"def",
"_path_split",
"(",
"path",
")",
":",
"if",
"len",
"(",
"path_separators",
")",
"==",
"1",
":",
"front",
",",
"_",
",",
"tail",
"=",
"path",
".",
"rpartition",
"(",
"path_sep",
")",
"return",
"front",
",",
"tail",
"for",
"x",
"in",
"reversed",
"(",
"path",
")",
":",
"if",
"x",
"in",
"path_separators",
":",
"front",
",",
"tail",
"=",
"path",
".",
"rsplit",
"(",
"x",
",",
"maxsplit",
"=",
"1",
")",
"return",
"front",
",",
"tail",
"return",
"''",
",",
"path"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/importlib/_bootstrap_external.py#L62-L71 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/more-itertools/py2/more_itertools/recipes.py | python | random_combination_with_replacement | (iterable, r) | return tuple(pool[i] for i in indices) | Return a random *r* length subsequence of elements in *iterable*,
allowing individual elements to be repeated.
>>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
(0, 0, 1, 2, 2)
This equivalent to taking a random selection from
``itertools.combinations_with_replacement(iterable, r)``. | Return a random *r* length subsequence of elements in *iterable*,
allowing individual elements to be repeated. | [
"Return",
"a",
"random",
"*",
"r",
"*",
"length",
"subsequence",
"of",
"elements",
"in",
"*",
"iterable",
"*",
"allowing",
"individual",
"elements",
"to",
"be",
"repeated",
"."
] | def random_combination_with_replacement(iterable, r):
"""Return a random *r* length subsequence of elements in *iterable*,
allowing individual elements to be repeated.
>>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
(0, 0, 1, 2, 2)
This equivalent to taking a random selection from
``itertools.combinations_with_replacement(iterable, r)``.
"""
pool = tuple(iterable)
n = len(pool)
indices = sorted(randrange(n) for i in range(r))
return tuple(pool[i] for i in indices) | [
"def",
"random_combination_with_replacement",
"(",
"iterable",
",",
"r",
")",
":",
"pool",
"=",
"tuple",
"(",
"iterable",
")",
"n",
"=",
"len",
"(",
"pool",
")",
"indices",
"=",
"sorted",
"(",
"randrange",
"(",
"n",
")",
"for",
"i",
"in",
"range",
"(",
"r",
")",
")",
"return",
"tuple",
"(",
"pool",
"[",
"i",
"]",
"for",
"i",
"in",
"indices",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/more-itertools/py2/more_itertools/recipes.py#L513-L527 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cgutils.py | python | loop_nest | (builder, shape, intp, order='C') | Generate a loop nest walking a N-dimensional array.
Yields a tuple of N indices for use in the inner loop body,
iterating over the *shape* space.
If *order* is 'C' (the default), indices are incremented inside-out
(i.e. (0,0), (0,1), (0,2), (1,0) etc.).
If *order* is 'F', they are incremented outside-in
(i.e. (0,0), (1,0), (2,0), (0,1) etc.).
This has performance implications when walking an array as it impacts
the spatial locality of memory accesses. | Generate a loop nest walking a N-dimensional array.
Yields a tuple of N indices for use in the inner loop body,
iterating over the *shape* space. | [
"Generate",
"a",
"loop",
"nest",
"walking",
"a",
"N",
"-",
"dimensional",
"array",
".",
"Yields",
"a",
"tuple",
"of",
"N",
"indices",
"for",
"use",
"in",
"the",
"inner",
"loop",
"body",
"iterating",
"over",
"the",
"*",
"shape",
"*",
"space",
"."
] | def loop_nest(builder, shape, intp, order='C'):
"""
Generate a loop nest walking a N-dimensional array.
Yields a tuple of N indices for use in the inner loop body,
iterating over the *shape* space.
If *order* is 'C' (the default), indices are incremented inside-out
(i.e. (0,0), (0,1), (0,2), (1,0) etc.).
If *order* is 'F', they are incremented outside-in
(i.e. (0,0), (1,0), (2,0), (0,1) etc.).
This has performance implications when walking an array as it impacts
the spatial locality of memory accesses.
"""
assert order in 'CF'
if not shape:
# 0-d array
yield ()
else:
if order == 'F':
_swap = lambda x: x[::-1]
else:
_swap = lambda x: x
with _loop_nest(builder, _swap(shape), intp) as indices:
assert len(indices) == len(shape)
yield _swap(indices) | [
"def",
"loop_nest",
"(",
"builder",
",",
"shape",
",",
"intp",
",",
"order",
"=",
"'C'",
")",
":",
"assert",
"order",
"in",
"'CF'",
"if",
"not",
"shape",
":",
"# 0-d array",
"yield",
"(",
")",
"else",
":",
"if",
"order",
"==",
"'F'",
":",
"_swap",
"=",
"lambda",
"x",
":",
"x",
"[",
":",
":",
"-",
"1",
"]",
"else",
":",
"_swap",
"=",
"lambda",
"x",
":",
"x",
"with",
"_loop_nest",
"(",
"builder",
",",
"_swap",
"(",
"shape",
")",
",",
"intp",
")",
"as",
"indices",
":",
"assert",
"len",
"(",
"indices",
")",
"==",
"len",
"(",
"shape",
")",
"yield",
"_swap",
"(",
"indices",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cgutils.py#L585-L609 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_core.py | python | PyApp.Dispatch | (*args, **kwargs) | return _core_.PyApp_Dispatch(*args, **kwargs) | Dispatch(self) -> bool
Process the first event in the event queue (blocks until an event
appears if there are none currently) | Dispatch(self) -> bool | [
"Dispatch",
"(",
"self",
")",
"-",
">",
"bool"
] | def Dispatch(*args, **kwargs):
"""
Dispatch(self) -> bool
Process the first event in the event queue (blocks until an event
appears if there are none currently)
"""
return _core_.PyApp_Dispatch(*args, **kwargs) | [
"def",
"Dispatch",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"PyApp_Dispatch",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L8025-L8032 | |
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | GenRewire | (*args) | return _snap.GenRewire(*args) | GenRewire(PUNGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PUNGraph
Parameters:
Graph: PUNGraph const &
NSwitch: int const &
Rnd: TRnd &
GenRewire(PUNGraph const & Graph, int const & NSwitch=100) -> PUNGraph
Parameters:
Graph: PUNGraph const &
NSwitch: int const &
GenRewire(PUNGraph const & Graph) -> PUNGraph
Parameters:
Graph: PUNGraph const &
GenRewire(PNGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PNGraph
Parameters:
Graph: PNGraph const &
NSwitch: int const &
Rnd: TRnd &
GenRewire(PNGraph const & Graph, int const & NSwitch=100) -> PNGraph
Parameters:
Graph: PNGraph const &
NSwitch: int const &
GenRewire(PNGraph const & Graph) -> PNGraph
Parameters:
Graph: PNGraph const &
GenRewire(PBPGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PBPGraph
Parameters:
Graph: PBPGraph const &
NSwitch: int const &
Rnd: TRnd &
GenRewire(PBPGraph const & Graph, int const & NSwitch=100) -> PBPGraph
Parameters:
Graph: PBPGraph const &
NSwitch: int const &
GenRewire(PBPGraph const & Graph) -> PBPGraph
Parameters:
Graph: PBPGraph const & | GenRewire(PUNGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PUNGraph | [
"GenRewire",
"(",
"PUNGraph",
"const",
"&",
"Graph",
"int",
"const",
"&",
"NSwitch",
"=",
"100",
"TRnd",
"Rnd",
"=",
"Rnd",
")",
"-",
">",
"PUNGraph"
] | def GenRewire(*args):
"""
GenRewire(PUNGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PUNGraph
Parameters:
Graph: PUNGraph const &
NSwitch: int const &
Rnd: TRnd &
GenRewire(PUNGraph const & Graph, int const & NSwitch=100) -> PUNGraph
Parameters:
Graph: PUNGraph const &
NSwitch: int const &
GenRewire(PUNGraph const & Graph) -> PUNGraph
Parameters:
Graph: PUNGraph const &
GenRewire(PNGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PNGraph
Parameters:
Graph: PNGraph const &
NSwitch: int const &
Rnd: TRnd &
GenRewire(PNGraph const & Graph, int const & NSwitch=100) -> PNGraph
Parameters:
Graph: PNGraph const &
NSwitch: int const &
GenRewire(PNGraph const & Graph) -> PNGraph
Parameters:
Graph: PNGraph const &
GenRewire(PBPGraph const & Graph, int const & NSwitch=100, TRnd Rnd=Rnd) -> PBPGraph
Parameters:
Graph: PBPGraph const &
NSwitch: int const &
Rnd: TRnd &
GenRewire(PBPGraph const & Graph, int const & NSwitch=100) -> PBPGraph
Parameters:
Graph: PBPGraph const &
NSwitch: int const &
GenRewire(PBPGraph const & Graph) -> PBPGraph
Parameters:
Graph: PBPGraph const &
"""
return _snap.GenRewire(*args) | [
"def",
"GenRewire",
"(",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"GenRewire",
"(",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L6425-L6482 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/pyparsing.py | python | line | (loc, strg) | Returns the line of text containing loc within a string, counting newlines as line separators. | Returns the line of text containing loc within a string, counting newlines as line separators. | [
"Returns",
"the",
"line",
"of",
"text",
"containing",
"loc",
"within",
"a",
"string",
"counting",
"newlines",
"as",
"line",
"separators",
"."
] | def line(loc, strg):
"""Returns the line of text containing loc within a string, counting newlines as line separators.
"""
lastCR = strg.rfind("\n", 0, loc)
nextCR = strg.find("\n", loc)
if nextCR >= 0:
return strg[lastCR + 1:nextCR]
else:
return strg[lastCR + 1:] | [
"def",
"line",
"(",
"loc",
",",
"strg",
")",
":",
"lastCR",
"=",
"strg",
".",
"rfind",
"(",
"\"\\n\"",
",",
"0",
",",
"loc",
")",
"nextCR",
"=",
"strg",
".",
"find",
"(",
"\"\\n\"",
",",
"loc",
")",
"if",
"nextCR",
">=",
"0",
":",
"return",
"strg",
"[",
"lastCR",
"+",
"1",
":",
"nextCR",
"]",
"else",
":",
"return",
"strg",
"[",
"lastCR",
"+",
"1",
":",
"]"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/pyparsing.py#L1237-L1245 | ||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/inspect.py | python | isabstract | (object) | return bool(isinstance(object, type) and object.__flags__ & TPFLAGS_IS_ABSTRACT) | Return true if the object is an abstract base class (ABC). | Return true if the object is an abstract base class (ABC). | [
"Return",
"true",
"if",
"the",
"object",
"is",
"an",
"abstract",
"base",
"class",
"(",
"ABC",
")",
"."
] | def isabstract(object):
"""Return true if the object is an abstract base class (ABC)."""
return bool(isinstance(object, type) and object.__flags__ & TPFLAGS_IS_ABSTRACT) | [
"def",
"isabstract",
"(",
"object",
")",
":",
"return",
"bool",
"(",
"isinstance",
"(",
"object",
",",
"type",
")",
"and",
"object",
".",
"__flags__",
"&",
"TPFLAGS_IS_ABSTRACT",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/inspect.py#L243-L245 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/_pydecimal.py | python | Decimal._divide | (self, other, context) | return ans, ans | Return (self // other, self % other), to context.prec precision.
Assumes that neither self nor other is a NaN, that self is not
infinite and that other is nonzero. | Return (self // other, self % other), to context.prec precision. | [
"Return",
"(",
"self",
"//",
"other",
"self",
"%",
"other",
")",
"to",
"context",
".",
"prec",
"precision",
"."
] | def _divide(self, other, context):
"""Return (self // other, self % other), to context.prec precision.
Assumes that neither self nor other is a NaN, that self is not
infinite and that other is nonzero.
"""
sign = self._sign ^ other._sign
if other._isinfinity():
ideal_exp = self._exp
else:
ideal_exp = min(self._exp, other._exp)
expdiff = self.adjusted() - other.adjusted()
if not self or other._isinfinity() or expdiff <= -2:
return (_dec_from_triple(sign, '0', 0),
self._rescale(ideal_exp, context.rounding))
if expdiff <= context.prec:
op1 = _WorkRep(self)
op2 = _WorkRep(other)
if op1.exp >= op2.exp:
op1.int *= 10**(op1.exp - op2.exp)
else:
op2.int *= 10**(op2.exp - op1.exp)
q, r = divmod(op1.int, op2.int)
if q < 10**context.prec:
return (_dec_from_triple(sign, str(q), 0),
_dec_from_triple(self._sign, str(r), ideal_exp))
# Here the quotient is too large to be representable
ans = context._raise_error(DivisionImpossible,
'quotient too large in //, % or divmod')
return ans, ans | [
"def",
"_divide",
"(",
"self",
",",
"other",
",",
"context",
")",
":",
"sign",
"=",
"self",
".",
"_sign",
"^",
"other",
".",
"_sign",
"if",
"other",
".",
"_isinfinity",
"(",
")",
":",
"ideal_exp",
"=",
"self",
".",
"_exp",
"else",
":",
"ideal_exp",
"=",
"min",
"(",
"self",
".",
"_exp",
",",
"other",
".",
"_exp",
")",
"expdiff",
"=",
"self",
".",
"adjusted",
"(",
")",
"-",
"other",
".",
"adjusted",
"(",
")",
"if",
"not",
"self",
"or",
"other",
".",
"_isinfinity",
"(",
")",
"or",
"expdiff",
"<=",
"-",
"2",
":",
"return",
"(",
"_dec_from_triple",
"(",
"sign",
",",
"'0'",
",",
"0",
")",
",",
"self",
".",
"_rescale",
"(",
"ideal_exp",
",",
"context",
".",
"rounding",
")",
")",
"if",
"expdiff",
"<=",
"context",
".",
"prec",
":",
"op1",
"=",
"_WorkRep",
"(",
"self",
")",
"op2",
"=",
"_WorkRep",
"(",
"other",
")",
"if",
"op1",
".",
"exp",
">=",
"op2",
".",
"exp",
":",
"op1",
".",
"int",
"*=",
"10",
"**",
"(",
"op1",
".",
"exp",
"-",
"op2",
".",
"exp",
")",
"else",
":",
"op2",
".",
"int",
"*=",
"10",
"**",
"(",
"op2",
".",
"exp",
"-",
"op1",
".",
"exp",
")",
"q",
",",
"r",
"=",
"divmod",
"(",
"op1",
".",
"int",
",",
"op2",
".",
"int",
")",
"if",
"q",
"<",
"10",
"**",
"context",
".",
"prec",
":",
"return",
"(",
"_dec_from_triple",
"(",
"sign",
",",
"str",
"(",
"q",
")",
",",
"0",
")",
",",
"_dec_from_triple",
"(",
"self",
".",
"_sign",
",",
"str",
"(",
"r",
")",
",",
"ideal_exp",
")",
")",
"# Here the quotient is too large to be representable",
"ans",
"=",
"context",
".",
"_raise_error",
"(",
"DivisionImpossible",
",",
"'quotient too large in //, % or divmod'",
")",
"return",
"ans",
",",
"ans"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/_pydecimal.py#L1383-L1414 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_windows.py | python | MDIChildFrame.Activate | (*args, **kwargs) | return _windows_.MDIChildFrame_Activate(*args, **kwargs) | Activate(self) | Activate(self) | [
"Activate",
"(",
"self",
")"
] | def Activate(*args, **kwargs):
"""Activate(self)"""
return _windows_.MDIChildFrame_Activate(*args, **kwargs) | [
"def",
"Activate",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"MDIChildFrame_Activate",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L4102-L4104 | |
xhzdeng/crpn | a5aef0f80dbe486103123f740c634fb01e6cc9a1 | caffe-fast-rcnn/scripts/cpp_lint.py | python | _IsTestFilename | (filename) | Determines if the given filename has a suffix that identifies it as a test.
Args:
filename: The input filename.
Returns:
True if 'filename' looks like a test, False otherwise. | Determines if the given filename has a suffix that identifies it as a test. | [
"Determines",
"if",
"the",
"given",
"filename",
"has",
"a",
"suffix",
"that",
"identifies",
"it",
"as",
"a",
"test",
"."
] | def _IsTestFilename(filename):
"""Determines if the given filename has a suffix that identifies it as a test.
Args:
filename: The input filename.
Returns:
True if 'filename' looks like a test, False otherwise.
"""
if (filename.endswith('_test.cc') or
filename.endswith('_unittest.cc') or
filename.endswith('_regtest.cc')):
return True
else:
return False | [
"def",
"_IsTestFilename",
"(",
"filename",
")",
":",
"if",
"(",
"filename",
".",
"endswith",
"(",
"'_test.cc'",
")",
"or",
"filename",
".",
"endswith",
"(",
"'_unittest.cc'",
")",
"or",
"filename",
".",
"endswith",
"(",
"'_regtest.cc'",
")",
")",
":",
"return",
"True",
"else",
":",
"return",
"False"
] | https://github.com/xhzdeng/crpn/blob/a5aef0f80dbe486103123f740c634fb01e6cc9a1/caffe-fast-rcnn/scripts/cpp_lint.py#L3607-L3621 | ||
sccn/lsl_archived | 2ff44b7a5172b02fe845b1fc72b9ab5578a489ed | LSL/liblsl-Python/pylsl/pylsl.py | python | ContinuousResolver.__init__ | (self, prop=None, value=None, pred=None, forget_after=5.0) | Construct a new continuous_resolver.
Keyword arguments:
forget_after -- When a stream is no longer visible on the network
(e.g., because it was shut down), this is the time in
seconds after which it is no longer reported by the
resolver. | Construct a new continuous_resolver. | [
"Construct",
"a",
"new",
"continuous_resolver",
"."
] | def __init__(self, prop=None, value=None, pred=None, forget_after=5.0):
"""Construct a new continuous_resolver.
Keyword arguments:
forget_after -- When a stream is no longer visible on the network
(e.g., because it was shut down), this is the time in
seconds after which it is no longer reported by the
resolver.
"""
if pred is not None:
if prop is not None or value is not None:
raise ValueError("you can only either pass the prop/value "
"argument or the pred argument, but not "
"both.")
self.obj = lib.lsl_create_continuous_resolver_bypred(str.encode(pred),
c_double(forget_after))
elif prop is not None and value is not None:
self.obj = lib.lsl_create_continuous_resolver_byprop(str.encode(prop),
str.encode(value),
c_double(forget_after))
elif prop is not None or value is not None:
raise ValueError("if prop is specified, then value must be "
"specified, too, and vice versa.")
else:
self.obj = lib.lsl_create_continuous_resolver(c_double(forget_after))
self.obj = c_void_p(self.obj)
if not self.obj:
raise RuntimeError("could not create continuous resolver.") | [
"def",
"__init__",
"(",
"self",
",",
"prop",
"=",
"None",
",",
"value",
"=",
"None",
",",
"pred",
"=",
"None",
",",
"forget_after",
"=",
"5.0",
")",
":",
"if",
"pred",
"is",
"not",
"None",
":",
"if",
"prop",
"is",
"not",
"None",
"or",
"value",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"you can only either pass the prop/value \"",
"\"argument or the pred argument, but not \"",
"\"both.\"",
")",
"self",
".",
"obj",
"=",
"lib",
".",
"lsl_create_continuous_resolver_bypred",
"(",
"str",
".",
"encode",
"(",
"pred",
")",
",",
"c_double",
"(",
"forget_after",
")",
")",
"elif",
"prop",
"is",
"not",
"None",
"and",
"value",
"is",
"not",
"None",
":",
"self",
".",
"obj",
"=",
"lib",
".",
"lsl_create_continuous_resolver_byprop",
"(",
"str",
".",
"encode",
"(",
"prop",
")",
",",
"str",
".",
"encode",
"(",
"value",
")",
",",
"c_double",
"(",
"forget_after",
")",
")",
"elif",
"prop",
"is",
"not",
"None",
"or",
"value",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"if prop is specified, then value must be \"",
"\"specified, too, and vice versa.\"",
")",
"else",
":",
"self",
".",
"obj",
"=",
"lib",
".",
"lsl_create_continuous_resolver",
"(",
"c_double",
"(",
"forget_after",
")",
")",
"self",
".",
"obj",
"=",
"c_void_p",
"(",
"self",
".",
"obj",
")",
"if",
"not",
"self",
".",
"obj",
":",
"raise",
"RuntimeError",
"(",
"\"could not create continuous resolver.\"",
")"
] | https://github.com/sccn/lsl_archived/blob/2ff44b7a5172b02fe845b1fc72b9ab5578a489ed/LSL/liblsl-Python/pylsl/pylsl.py#L1046-L1074 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/xml/sax/xmlreader.py | python | XMLReader.setFeature | (self, name, state) | Sets the state of a SAX2 feature. | Sets the state of a SAX2 feature. | [
"Sets",
"the",
"state",
"of",
"a",
"SAX2",
"feature",
"."
] | def setFeature(self, name, state):
"Sets the state of a SAX2 feature."
raise SAXNotRecognizedException("Feature '%s' not recognized" % name) | [
"def",
"setFeature",
"(",
"self",
",",
"name",
",",
"state",
")",
":",
"raise",
"SAXNotRecognizedException",
"(",
"\"Feature '%s' not recognized\"",
"%",
"name",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/xml/sax/xmlreader.py#L79-L81 | ||
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | current/tools/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings.GetBundleSharedSupportFolderPath | (self) | Returns the qualified path to the bundle's shared support folder. E.g,
Chromium.app/Contents/SharedSupport. Only valid for bundles. | Returns the qualified path to the bundle's shared support folder. E.g,
Chromium.app/Contents/SharedSupport. Only valid for bundles. | [
"Returns",
"the",
"qualified",
"path",
"to",
"the",
"bundle",
"s",
"shared",
"support",
"folder",
".",
"E",
".",
"g",
"Chromium",
".",
"app",
"/",
"Contents",
"/",
"SharedSupport",
".",
"Only",
"valid",
"for",
"bundles",
"."
] | def GetBundleSharedSupportFolderPath(self):
"""Returns the qualified path to the bundle's shared support folder. E.g,
Chromium.app/Contents/SharedSupport. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return self.GetBundleResourceFolder()
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'SharedSupport') | [
"def",
"GetBundleSharedSupportFolderPath",
"(",
"self",
")",
":",
"assert",
"self",
".",
"_IsBundle",
"(",
")",
"if",
"self",
".",
"spec",
"[",
"'type'",
"]",
"==",
"'shared_library'",
":",
"return",
"self",
".",
"GetBundleResourceFolder",
"(",
")",
"else",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"GetBundleContentsFolderPath",
"(",
")",
",",
"'SharedSupport'",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/xcode_emulation.py#L347-L355 | ||
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Draft/draftgeoutils/wires.py | python | get_placement_perpendicular_to_wire | (wire) | return pl | Return the placement whose base is the wire's first vertex and it's z axis aligned to the wire's tangent. | Return the placement whose base is the wire's first vertex and it's z axis aligned to the wire's tangent. | [
"Return",
"the",
"placement",
"whose",
"base",
"is",
"the",
"wire",
"s",
"first",
"vertex",
"and",
"it",
"s",
"z",
"axis",
"aligned",
"to",
"the",
"wire",
"s",
"tangent",
"."
] | def get_placement_perpendicular_to_wire(wire):
"""Return the placement whose base is the wire's first vertex and it's z axis aligned to the wire's tangent."""
pl = App.Placement()
if wire.Length > 0.0:
pl.Base = wire.OrderedVertexes[0].Point
first_edge = wire.OrderedEdges[0]
if first_edge.Orientation == "Forward":
zaxis = -first_edge.tangentAt(first_edge.FirstParameter)
else:
zaxis = first_edge.tangentAt(first_edge.LastParameter)
pl.Rotation = App.Rotation(App.Vector(1, 0, 0), App.Vector(0, 0, 1), zaxis, "ZYX")
else:
App.Console.PrintError("debug: get_placement_perpendicular_to_wire called with a zero-length wire.\n")
return pl | [
"def",
"get_placement_perpendicular_to_wire",
"(",
"wire",
")",
":",
"pl",
"=",
"App",
".",
"Placement",
"(",
")",
"if",
"wire",
".",
"Length",
">",
"0.0",
":",
"pl",
".",
"Base",
"=",
"wire",
".",
"OrderedVertexes",
"[",
"0",
"]",
".",
"Point",
"first_edge",
"=",
"wire",
".",
"OrderedEdges",
"[",
"0",
"]",
"if",
"first_edge",
".",
"Orientation",
"==",
"\"Forward\"",
":",
"zaxis",
"=",
"-",
"first_edge",
".",
"tangentAt",
"(",
"first_edge",
".",
"FirstParameter",
")",
"else",
":",
"zaxis",
"=",
"first_edge",
".",
"tangentAt",
"(",
"first_edge",
".",
"LastParameter",
")",
"pl",
".",
"Rotation",
"=",
"App",
".",
"Rotation",
"(",
"App",
".",
"Vector",
"(",
"1",
",",
"0",
",",
"0",
")",
",",
"App",
".",
"Vector",
"(",
"0",
",",
"0",
",",
"1",
")",
",",
"zaxis",
",",
"\"ZYX\"",
")",
"else",
":",
"App",
".",
"Console",
".",
"PrintError",
"(",
"\"debug: get_placement_perpendicular_to_wire called with a zero-length wire.\\n\"",
")",
"return",
"pl"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftgeoutils/wires.py#L418-L431 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/locale.py | python | _print_locale | () | Test function. | Test function. | [
"Test",
"function",
"."
] | def _print_locale():
""" Test function.
"""
categories = {}
def _init_categories(categories=categories):
for k,v in globals().items():
if k[:3] == 'LC_':
categories[k] = v
_init_categories()
del categories['LC_ALL']
print 'Locale defaults as determined by getdefaultlocale():'
print '-'*72
lang, enc = getdefaultlocale()
print 'Language: ', lang or '(undefined)'
print 'Encoding: ', enc or '(undefined)'
print
print 'Locale settings on startup:'
print '-'*72
for name,category in categories.items():
print name, '...'
lang, enc = getlocale(category)
print ' Language: ', lang or '(undefined)'
print ' Encoding: ', enc or '(undefined)'
print
print
print 'Locale settings after calling resetlocale():'
print '-'*72
resetlocale()
for name,category in categories.items():
print name, '...'
lang, enc = getlocale(category)
print ' Language: ', lang or '(undefined)'
print ' Encoding: ', enc or '(undefined)'
print
try:
setlocale(LC_ALL, "")
except:
print 'NOTE:'
print 'setlocale(LC_ALL, "") does not support the default locale'
print 'given in the OS environment variables.'
else:
print
print 'Locale settings after calling setlocale(LC_ALL, ""):'
print '-'*72
for name,category in categories.items():
print name, '...'
lang, enc = getlocale(category)
print ' Language: ', lang or '(undefined)'
print ' Encoding: ', enc or '(undefined)'
print | [
"def",
"_print_locale",
"(",
")",
":",
"categories",
"=",
"{",
"}",
"def",
"_init_categories",
"(",
"categories",
"=",
"categories",
")",
":",
"for",
"k",
",",
"v",
"in",
"globals",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"k",
"[",
":",
"3",
"]",
"==",
"'LC_'",
":",
"categories",
"[",
"k",
"]",
"=",
"v",
"_init_categories",
"(",
")",
"del",
"categories",
"[",
"'LC_ALL'",
"]",
"print",
"'Locale defaults as determined by getdefaultlocale():'",
"print",
"'-'",
"*",
"72",
"lang",
",",
"enc",
"=",
"getdefaultlocale",
"(",
")",
"print",
"'Language: '",
",",
"lang",
"or",
"'(undefined)'",
"print",
"'Encoding: '",
",",
"enc",
"or",
"'(undefined)'",
"print",
"print",
"'Locale settings on startup:'",
"print",
"'-'",
"*",
"72",
"for",
"name",
",",
"category",
"in",
"categories",
".",
"items",
"(",
")",
":",
"print",
"name",
",",
"'...'",
"lang",
",",
"enc",
"=",
"getlocale",
"(",
"category",
")",
"print",
"' Language: '",
",",
"lang",
"or",
"'(undefined)'",
"print",
"' Encoding: '",
",",
"enc",
"or",
"'(undefined)'",
"print",
"print",
"print",
"'Locale settings after calling resetlocale():'",
"print",
"'-'",
"*",
"72",
"resetlocale",
"(",
")",
"for",
"name",
",",
"category",
"in",
"categories",
".",
"items",
"(",
")",
":",
"print",
"name",
",",
"'...'",
"lang",
",",
"enc",
"=",
"getlocale",
"(",
"category",
")",
"print",
"' Language: '",
",",
"lang",
"or",
"'(undefined)'",
"print",
"' Encoding: '",
",",
"enc",
"or",
"'(undefined)'",
"print",
"try",
":",
"setlocale",
"(",
"LC_ALL",
",",
"\"\"",
")",
"except",
":",
"print",
"'NOTE:'",
"print",
"'setlocale(LC_ALL, \"\") does not support the default locale'",
"print",
"'given in the OS environment variables.'",
"else",
":",
"print",
"print",
"'Locale settings after calling setlocale(LC_ALL, \"\"):'",
"print",
"'-'",
"*",
"72",
"for",
"name",
",",
"category",
"in",
"categories",
".",
"items",
"(",
")",
":",
"print",
"name",
",",
"'...'",
"lang",
",",
"enc",
"=",
"getlocale",
"(",
"category",
")",
"print",
"' Language: '",
",",
"lang",
"or",
"'(undefined)'",
"print",
"' Encoding: '",
",",
"enc",
"or",
"'(undefined)'",
"print"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/locale.py#L1810-L1864 | ||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tkinter.py | python | Wm.wm_geometry | (self, newGeometry=None) | return self.tk.call('wm', 'geometry', self._w, newGeometry) | Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given. | Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given. | [
"Set",
"geometry",
"to",
"NEWGEOMETRY",
"of",
"the",
"form",
"=",
"widthxheight",
"+",
"x",
"+",
"y",
".",
"Return",
"current",
"value",
"if",
"None",
"is",
"given",
"."
] | def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry) | [
"def",
"wm_geometry",
"(",
"self",
",",
"newGeometry",
"=",
"None",
")",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"'wm'",
",",
"'geometry'",
",",
"self",
".",
"_w",
",",
"newGeometry",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tkinter.py#L1591-L1594 | |
tpfister/caffe-heatmap | 4db69ef53e6b8a0b3b4ebb29328b0ab3dbf67c4e | scripts/cpp_lint.py | python | PrintCategories | () | Prints a list of all the error-categories used by error messages.
These are the categories used to filter messages via --filter. | Prints a list of all the error-categories used by error messages. | [
"Prints",
"a",
"list",
"of",
"all",
"the",
"error",
"-",
"categories",
"used",
"by",
"error",
"messages",
"."
] | def PrintCategories():
"""Prints a list of all the error-categories used by error messages.
These are the categories used to filter messages via --filter.
"""
sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES))
sys.exit(0) | [
"def",
"PrintCategories",
"(",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"''",
".",
"join",
"(",
"' %s\\n'",
"%",
"cat",
"for",
"cat",
"in",
"_ERROR_CATEGORIES",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")"
] | https://github.com/tpfister/caffe-heatmap/blob/4db69ef53e6b8a0b3b4ebb29328b0ab3dbf67c4e/scripts/cpp_lint.py#L4770-L4776 | ||
gabyx/ApproxMVBB | 838f3ff7690a938f1e4199a5f41b6feefc32a603 | example/kdTreeFiltering/python/Tools/Transformations/Transformations.py | python | superimposition_matrix | (v0, v1, scale=False, usesvd=True) | return affine_matrix_from_points(v0, v1, shear=False,
scale=scale, usesvd=usesvd) | Return matrix to transform given 3D point set into second point set.
v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points.
The parameters scale and usesvd are explained in the more general
affine_matrix_from_points function.
The returned matrix is a similarity or Euclidean transformation matrix.
This function has a fast C implementation in transformations.c.
>>> v0 = numpy.random.rand(3, 10)
>>> M = superimposition_matrix(v0, v0)
>>> numpy.allclose(M, numpy.identity(4))
True
>>> R = random_rotation_matrix(numpy.random.random(3))
>>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]]
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> S = scale_matrix(random.random())
>>> T = translation_matrix(numpy.random.random(3)-0.5)
>>> M = concatenate_matrices(T, R, S)
>>> v1 = numpy.dot(M, v0)
>>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1)
>>> M = superimposition_matrix(v0, v1, scale=True)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v = numpy.empty((4, 100, 3))
>>> v[:, :, 0] = v0
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0]))
True | Return matrix to transform given 3D point set into second point set. | [
"Return",
"matrix",
"to",
"transform",
"given",
"3D",
"point",
"set",
"into",
"second",
"point",
"set",
"."
] | def superimposition_matrix(v0, v1, scale=False, usesvd=True):
"""Return matrix to transform given 3D point set into second point set.
v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points.
The parameters scale and usesvd are explained in the more general
affine_matrix_from_points function.
The returned matrix is a similarity or Euclidean transformation matrix.
This function has a fast C implementation in transformations.c.
>>> v0 = numpy.random.rand(3, 10)
>>> M = superimposition_matrix(v0, v0)
>>> numpy.allclose(M, numpy.identity(4))
True
>>> R = random_rotation_matrix(numpy.random.random(3))
>>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]]
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> S = scale_matrix(random.random())
>>> T = translation_matrix(numpy.random.random(3)-0.5)
>>> M = concatenate_matrices(T, R, S)
>>> v1 = numpy.dot(M, v0)
>>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1)
>>> M = superimposition_matrix(v0, v1, scale=True)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v = numpy.empty((4, 100, 3))
>>> v[:, :, 0] = v0
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0]))
True
"""
v0 = numpy.array(v0, dtype=numpy.float64, copy=False)[:3]
v1 = numpy.array(v1, dtype=numpy.float64, copy=False)[:3]
return affine_matrix_from_points(v0, v1, shear=False,
scale=scale, usesvd=usesvd) | [
"def",
"superimposition_matrix",
"(",
"v0",
",",
"v1",
",",
"scale",
"=",
"False",
",",
"usesvd",
"=",
"True",
")",
":",
"v0",
"=",
"numpy",
".",
"array",
"(",
"v0",
",",
"dtype",
"=",
"numpy",
".",
"float64",
",",
"copy",
"=",
"False",
")",
"[",
":",
"3",
"]",
"v1",
"=",
"numpy",
".",
"array",
"(",
"v1",
",",
"dtype",
"=",
"numpy",
".",
"float64",
",",
"copy",
"=",
"False",
")",
"[",
":",
"3",
"]",
"return",
"affine_matrix_from_points",
"(",
"v0",
",",
"v1",
",",
"shear",
"=",
"False",
",",
"scale",
"=",
"scale",
",",
"usesvd",
"=",
"usesvd",
")"
] | https://github.com/gabyx/ApproxMVBB/blob/838f3ff7690a938f1e4199a5f41b6feefc32a603/example/kdTreeFiltering/python/Tools/Transformations/Transformations.py#L993-L1041 | |
pytorch/pytorch | 7176c92687d3cc847cc046bf002269c6949a21c2 | torch/serialization.py | python | _check_dill_version | (pickle_module) | Checks if using dill as the pickle module, and if so, checks if it is the correct version.
If dill version is lower than 0.3.1, a ValueError is raised.
Args:
pickle_module: module used for pickling metadata and objects | Checks if using dill as the pickle module, and if so, checks if it is the correct version.
If dill version is lower than 0.3.1, a ValueError is raised. | [
"Checks",
"if",
"using",
"dill",
"as",
"the",
"pickle",
"module",
"and",
"if",
"so",
"checks",
"if",
"it",
"is",
"the",
"correct",
"version",
".",
"If",
"dill",
"version",
"is",
"lower",
"than",
"0",
".",
"3",
".",
"1",
"a",
"ValueError",
"is",
"raised",
"."
] | def _check_dill_version(pickle_module) -> None:
'''Checks if using dill as the pickle module, and if so, checks if it is the correct version.
If dill version is lower than 0.3.1, a ValueError is raised.
Args:
pickle_module: module used for pickling metadata and objects
'''
if pickle_module.__name__ == 'dill':
required_dill_version = (0, 3, 1)
if not check_module_version_greater_or_equal(pickle_module, required_dill_version, False):
raise ValueError((
"'torch' supports dill >= %s, but you have dill %s."
" Please upgrade dill or switch to 'pickle'"
) % (
'.'.join([str(num) for num in required_dill_version]),
pickle_module.__version__
)) | [
"def",
"_check_dill_version",
"(",
"pickle_module",
")",
"->",
"None",
":",
"if",
"pickle_module",
".",
"__name__",
"==",
"'dill'",
":",
"required_dill_version",
"=",
"(",
"0",
",",
"3",
",",
"1",
")",
"if",
"not",
"check_module_version_greater_or_equal",
"(",
"pickle_module",
",",
"required_dill_version",
",",
"False",
")",
":",
"raise",
"ValueError",
"(",
"(",
"\"'torch' supports dill >= %s, but you have dill %s.\"",
"\" Please upgrade dill or switch to 'pickle'\"",
")",
"%",
"(",
"'.'",
".",
"join",
"(",
"[",
"str",
"(",
"num",
")",
"for",
"num",
"in",
"required_dill_version",
"]",
")",
",",
"pickle_module",
".",
"__version__",
")",
")"
] | https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/serialization.py#L315-L332 | ||
Genius-x/genius-x | 9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0 | cocos2d/tools/bindings-generator/clang/cindex.py | python | TokenKind.__init__ | (self, value, name) | Create a new TokenKind instance from a numeric value and a name. | Create a new TokenKind instance from a numeric value and a name. | [
"Create",
"a",
"new",
"TokenKind",
"instance",
"from",
"a",
"numeric",
"value",
"and",
"a",
"name",
"."
] | def __init__(self, value, name):
"""Create a new TokenKind instance from a numeric value and a name."""
self.value = value
self.name = name | [
"def",
"__init__",
"(",
"self",
",",
"value",
",",
"name",
")",
":",
"self",
".",
"value",
"=",
"value",
"self",
".",
"name",
"=",
"name"
] | https://github.com/Genius-x/genius-x/blob/9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0/cocos2d/tools/bindings-generator/clang/cindex.py#L556-L559 | ||
cathywu/Sentiment-Analysis | eb501fd1375c0c3f3ab430f963255f1bb858e659 | PyML-0.7.9/PyML/utils/misc.py | python | subList | (A, I, J = None) | return a sublist of a list
INPUT
A - list, list of lists, or a list of strings
I - subset of "rows" (first index) to take
J - subset of "columns" (second index) to take (optional)
returns A[i] for i in I
or A[i][j] for i in I and j in J if J is given | return a sublist of a list
INPUT
A - list, list of lists, or a list of strings
I - subset of "rows" (first index) to take
J - subset of "columns" (second index) to take (optional)
returns A[i] for i in I
or A[i][j] for i in I and j in J if J is given | [
"return",
"a",
"sublist",
"of",
"a",
"list",
"INPUT",
"A",
"-",
"list",
"list",
"of",
"lists",
"or",
"a",
"list",
"of",
"strings",
"I",
"-",
"subset",
"of",
"rows",
"(",
"first",
"index",
")",
"to",
"take",
"J",
"-",
"subset",
"of",
"columns",
"(",
"second",
"index",
")",
"to",
"take",
"(",
"optional",
")",
"returns",
"A",
"[",
"i",
"]",
"for",
"i",
"in",
"I",
"or",
"A",
"[",
"i",
"]",
"[",
"j",
"]",
"for",
"i",
"in",
"I",
"and",
"j",
"in",
"J",
"if",
"J",
"is",
"given"
] | def subList(A, I, J = None) :
'''return a sublist of a list
INPUT
A - list, list of lists, or a list of strings
I - subset of "rows" (first index) to take
J - subset of "columns" (second index) to take (optional)
returns A[i] for i in I
or A[i][j] for i in I and j in J if J is given
'''
if J is None :
return [A[i] for i in I]
elif type(A[0]) == type([]) :
print 1
return [[A[i][j] for j in J] for i in I]
elif type(A[0]) == type('') :
result = []
for i in I :
result.append(''.join([A[i][j] for j in J]))
return result
else :
print 'wrong type of input' | [
"def",
"subList",
"(",
"A",
",",
"I",
",",
"J",
"=",
"None",
")",
":",
"if",
"J",
"is",
"None",
":",
"return",
"[",
"A",
"[",
"i",
"]",
"for",
"i",
"in",
"I",
"]",
"elif",
"type",
"(",
"A",
"[",
"0",
"]",
")",
"==",
"type",
"(",
"[",
"]",
")",
":",
"print",
"1",
"return",
"[",
"[",
"A",
"[",
"i",
"]",
"[",
"j",
"]",
"for",
"j",
"in",
"J",
"]",
"for",
"i",
"in",
"I",
"]",
"elif",
"type",
"(",
"A",
"[",
"0",
"]",
")",
"==",
"type",
"(",
"''",
")",
":",
"result",
"=",
"[",
"]",
"for",
"i",
"in",
"I",
":",
"result",
".",
"append",
"(",
"''",
".",
"join",
"(",
"[",
"A",
"[",
"i",
"]",
"[",
"j",
"]",
"for",
"j",
"in",
"J",
"]",
")",
")",
"return",
"result",
"else",
":",
"print",
"'wrong type of input'"
] | https://github.com/cathywu/Sentiment-Analysis/blob/eb501fd1375c0c3f3ab430f963255f1bb858e659/PyML-0.7.9/PyML/utils/misc.py#L259-L280 | ||
shedskin/shedskin | ae88dbca7b1d9671cd8be448cb0b497122758936 | examples/life.py | python | snext | (board) | return new | Calculates the next stage | Calculates the next stage | [
"Calculates",
"the",
"next",
"stage"
] | def snext(board):
""" Calculates the next stage """
new = defaultdict(int, board)
for pos in list(board.keys()):
near = add(board, pos)
item = board[pos]
if near not in (2, 3) and item:
new[pos] = 0
elif near == 3 and not item:
new[pos] = 1
return new | [
"def",
"snext",
"(",
"board",
")",
":",
"new",
"=",
"defaultdict",
"(",
"int",
",",
"board",
")",
"for",
"pos",
"in",
"list",
"(",
"board",
".",
"keys",
"(",
")",
")",
":",
"near",
"=",
"add",
"(",
"board",
",",
"pos",
")",
"item",
"=",
"board",
"[",
"pos",
"]",
"if",
"near",
"not",
"in",
"(",
"2",
",",
"3",
")",
"and",
"item",
":",
"new",
"[",
"pos",
"]",
"=",
"0",
"elif",
"near",
"==",
"3",
"and",
"not",
"item",
":",
"new",
"[",
"pos",
"]",
"=",
"1",
"return",
"new"
] | https://github.com/shedskin/shedskin/blob/ae88dbca7b1d9671cd8be448cb0b497122758936/examples/life.py#L39-L49 | |
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/VBox/Devices/EFI/Firmware/AppPkg/Applications/Python/PyMod-2.7.2/Lib/pydoc.py | python | TextDoc.section | (self, title, contents) | return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n' | Format a section with a given heading. | Format a section with a given heading. | [
"Format",
"a",
"section",
"with",
"a",
"given",
"heading",
"."
] | def section(self, title, contents):
"""Format a section with a given heading."""
return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n' | [
"def",
"section",
"(",
"self",
",",
"title",
",",
"contents",
")",
":",
"return",
"self",
".",
"bold",
"(",
"title",
")",
"+",
"'\\n'",
"+",
"rstrip",
"(",
"self",
".",
"indent",
"(",
"contents",
")",
")",
"+",
"'\\n\\n'"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/Devices/EFI/Firmware/AppPkg/Applications/Python/PyMod-2.7.2/Lib/pydoc.py#L997-L999 | |
qt/qt | 0a2f2382541424726168804be2c90b91381608c6 | src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/make.py | python | MakefileWriter.WriteSubMake | (self, output_filename, makefile_path, targets, build_dir) | Write a "sub-project" Makefile.
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project | Write a "sub-project" Makefile. | [
"Write",
"a",
"sub",
"-",
"project",
"Makefile",
"."
] | def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
"""Write a "sub-project" Makefile.
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
ensure_directory_exists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
# For consistency with other builders, put sub-project build output in the
# sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
self.WriteLn('export builddir_name ?= %s' %
os.path.join(os.path.dirname(output_filename), build_dir))
self.WriteLn('.PHONY: all')
self.WriteLn('all:')
if makefile_path:
makefile_path = ' -C ' + makefile_path
self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
self.fp.close() | [
"def",
"WriteSubMake",
"(",
"self",
",",
"output_filename",
",",
"makefile_path",
",",
"targets",
",",
"build_dir",
")",
":",
"ensure_directory_exists",
"(",
"output_filename",
")",
"self",
".",
"fp",
"=",
"open",
"(",
"output_filename",
",",
"'w'",
")",
"self",
".",
"fp",
".",
"write",
"(",
"header",
")",
"# For consistency with other builders, put sub-project build output in the",
"# sub-project dir (see test/subdirectory/gyptest-subdir-all.py).",
"self",
".",
"WriteLn",
"(",
"'export builddir_name ?= %s'",
"%",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"output_filename",
")",
",",
"build_dir",
")",
")",
"self",
".",
"WriteLn",
"(",
"'.PHONY: all'",
")",
"self",
".",
"WriteLn",
"(",
"'all:'",
")",
"if",
"makefile_path",
":",
"makefile_path",
"=",
"' -C '",
"+",
"makefile_path",
"self",
".",
"WriteLn",
"(",
"'\\t$(MAKE)%s %s'",
"%",
"(",
"makefile_path",
",",
"' '",
".",
"join",
"(",
"targets",
")",
")",
")",
"self",
".",
"fp",
".",
"close",
"(",
")"
] | https://github.com/qt/qt/blob/0a2f2382541424726168804be2c90b91381608c6/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/generator/make.py#L610-L634 | ||
ablab/spades | 3a754192b88540524ce6fb69eef5ea9273a38465 | webvis/pydot.py | python | Common.__get_attribute__ | (self, attr) | return None | Look for default attributes for this node | Look for default attributes for this node | [
"Look",
"for",
"default",
"attributes",
"for",
"this",
"node"
] | def __get_attribute__(self, attr):
"""Look for default attributes for this node"""
attr_val = self.obj_dict['attributes'].get(attr, None)
if attr_val is None:
# get the defaults for nodes/edges
default_node_name = self.obj_dict['type']
# The defaults for graphs are set on a node named 'graph'
if default_node_name in ('subgraph', 'digraph', 'cluster'):
default_node_name = 'graph'
g = self.get_parent_graph()
if g is not None:
defaults = g.get_node( default_node_name )
else:
return None
# Multiple defaults could be set by having repeated 'graph [...]'
# 'node [...]', 'edge [...]' statements. In such case, if the
# same attribute is set in different statements, only the first
# will be returned. In order to get all, one would call the
# get_*_defaults() methods and handle those. Or go node by node
# (of the ones specifying defaults) and modify the attributes
# individually.
#
if not isinstance(defaults, (list, tuple)):
defaults = [defaults]
for default in defaults:
attr_val = default.obj_dict['attributes'].get(attr, None)
if attr_val:
return attr_val
else:
return attr_val
return None | [
"def",
"__get_attribute__",
"(",
"self",
",",
"attr",
")",
":",
"attr_val",
"=",
"self",
".",
"obj_dict",
"[",
"'attributes'",
"]",
".",
"get",
"(",
"attr",
",",
"None",
")",
"if",
"attr_val",
"is",
"None",
":",
"# get the defaults for nodes/edges",
"default_node_name",
"=",
"self",
".",
"obj_dict",
"[",
"'type'",
"]",
"# The defaults for graphs are set on a node named 'graph'",
"if",
"default_node_name",
"in",
"(",
"'subgraph'",
",",
"'digraph'",
",",
"'cluster'",
")",
":",
"default_node_name",
"=",
"'graph'",
"g",
"=",
"self",
".",
"get_parent_graph",
"(",
")",
"if",
"g",
"is",
"not",
"None",
":",
"defaults",
"=",
"g",
".",
"get_node",
"(",
"default_node_name",
")",
"else",
":",
"return",
"None",
"# Multiple defaults could be set by having repeated 'graph [...]'",
"# 'node [...]', 'edge [...]' statements. In such case, if the",
"# same attribute is set in different statements, only the first",
"# will be returned. In order to get all, one would call the",
"# get_*_defaults() methods and handle those. Or go node by node",
"# (of the ones specifying defaults) and modify the attributes",
"# individually.",
"#",
"if",
"not",
"isinstance",
"(",
"defaults",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"defaults",
"=",
"[",
"defaults",
"]",
"for",
"default",
"in",
"defaults",
":",
"attr_val",
"=",
"default",
".",
"obj_dict",
"[",
"'attributes'",
"]",
".",
"get",
"(",
"attr",
",",
"None",
")",
"if",
"attr_val",
":",
"return",
"attr_val",
"else",
":",
"return",
"attr_val",
"return",
"None"
] | https://github.com/ablab/spades/blob/3a754192b88540524ce6fb69eef5ea9273a38465/webvis/pydot.py#L591-L629 | |
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/difference_table_widget/difference_table_widget_view.py | python | DifferenceTableView.contextMenuEvent | (self, _event) | Overridden method for dealing with the right-click context menu | Overridden method for dealing with the right-click context menu | [
"Overridden",
"method",
"for",
"dealing",
"with",
"the",
"right",
"-",
"click",
"context",
"menu"
] | def contextMenuEvent(self, _event):
"""Overridden method for dealing with the right-click context menu"""
self.menu = QtWidgets.QMenu(self)
self.add_diff_action = self._context_menu_add_diff_action(self.add_diff_button.clicked.emit)
self.remove_diff_action = self._context_menu_remove_diff_action(self.remove_diff_button.clicked.emit)
if self._disabled:
self.add_diff_action.setEnabled(False)
self.remove_diff_action.setEnabled(False)
# set-up the menu
self.menu.addAction(self.add_diff_action)
self.menu.addAction(self.remove_diff_action)
self.menu.popup(QtGui.QCursor.pos()) | [
"def",
"contextMenuEvent",
"(",
"self",
",",
"_event",
")",
":",
"self",
".",
"menu",
"=",
"QtWidgets",
".",
"QMenu",
"(",
"self",
")",
"self",
".",
"add_diff_action",
"=",
"self",
".",
"_context_menu_add_diff_action",
"(",
"self",
".",
"add_diff_button",
".",
"clicked",
".",
"emit",
")",
"self",
".",
"remove_diff_action",
"=",
"self",
".",
"_context_menu_remove_diff_action",
"(",
"self",
".",
"remove_diff_button",
".",
"clicked",
".",
"emit",
")",
"if",
"self",
".",
"_disabled",
":",
"self",
".",
"add_diff_action",
".",
"setEnabled",
"(",
"False",
")",
"self",
".",
"remove_diff_action",
".",
"setEnabled",
"(",
"False",
")",
"# set-up the menu",
"self",
".",
"menu",
".",
"addAction",
"(",
"self",
".",
"add_diff_action",
")",
"self",
".",
"menu",
".",
"addAction",
"(",
"self",
".",
"remove_diff_action",
")",
"self",
".",
"menu",
".",
"popup",
"(",
"QtGui",
".",
"QCursor",
".",
"pos",
"(",
")",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/difference_table_widget/difference_table_widget_view.py#L216-L229 | ||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/python/turicreate/util/_cloudpickle/_cloudpickle_fast.py | python | _function_setstate | (obj, state) | Update the state of a dynaamic function.
As __closure__ and __globals__ are readonly attributes of a function, we
cannot rely on the native setstate routine of pickle.load_build, that calls
setattr on items of the slotstate. Instead, we have to modify them inplace. | Update the state of a dynaamic function. | [
"Update",
"the",
"state",
"of",
"a",
"dynaamic",
"function",
"."
] | def _function_setstate(obj, state):
"""Update the state of a dynaamic function.
As __closure__ and __globals__ are readonly attributes of a function, we
cannot rely on the native setstate routine of pickle.load_build, that calls
setattr on items of the slotstate. Instead, we have to modify them inplace.
"""
state, slotstate = state
obj.__dict__.update(state)
obj_globals = slotstate.pop("__globals__")
obj_closure = slotstate.pop("__closure__")
# _cloudpickle_subimports is a set of submodules that must be loaded for
# the pickled function to work correctly at unpickling time. Now that these
# submodules are depickled (hence imported), they can be removed from the
# object's state (the object state only served as a reference holder to
# these submodules)
slotstate.pop("_cloudpickle_submodules")
obj.__globals__.update(obj_globals)
obj.__globals__["__builtins__"] = __builtins__
if obj_closure is not None:
for i, cell in enumerate(obj_closure):
try:
value = cell.cell_contents
except ValueError: # cell is empty
continue
cell_set(obj.__closure__[i], value)
for k, v in slotstate.items():
setattr(obj, k, v) | [
"def",
"_function_setstate",
"(",
"obj",
",",
"state",
")",
":",
"state",
",",
"slotstate",
"=",
"state",
"obj",
".",
"__dict__",
".",
"update",
"(",
"state",
")",
"obj_globals",
"=",
"slotstate",
".",
"pop",
"(",
"\"__globals__\"",
")",
"obj_closure",
"=",
"slotstate",
".",
"pop",
"(",
"\"__closure__\"",
")",
"# _cloudpickle_subimports is a set of submodules that must be loaded for",
"# the pickled function to work correctly at unpickling time. Now that these",
"# submodules are depickled (hence imported), they can be removed from the",
"# object's state (the object state only served as a reference holder to",
"# these submodules)",
"slotstate",
".",
"pop",
"(",
"\"_cloudpickle_submodules\"",
")",
"obj",
".",
"__globals__",
".",
"update",
"(",
"obj_globals",
")",
"obj",
".",
"__globals__",
"[",
"\"__builtins__\"",
"]",
"=",
"__builtins__",
"if",
"obj_closure",
"is",
"not",
"None",
":",
"for",
"i",
",",
"cell",
"in",
"enumerate",
"(",
"obj_closure",
")",
":",
"try",
":",
"value",
"=",
"cell",
".",
"cell_contents",
"except",
"ValueError",
":",
"# cell is empty",
"continue",
"cell_set",
"(",
"obj",
".",
"__closure__",
"[",
"i",
"]",
",",
"value",
")",
"for",
"k",
",",
"v",
"in",
"slotstate",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"obj",
",",
"k",
",",
"v",
")"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/util/_cloudpickle/_cloudpickle_fast.py#L447-L478 | ||
facebookresearch/habitat-sim | 63b6c71d9ca8adaefb140b198196f5d0ca1f1e34 | src_python/habitat_sim/robots/mobile_manipulator.py | python | MobileManipulator.clip_ee_to_workspace | (self, pos: np.ndarray) | return np.clip(
pos, self.params.ee_constraint[:, 0], self.params.ee_constraint[:, 1]
) | Clips a 3D end-effector position within region the robot can reach. | Clips a 3D end-effector position within region the robot can reach. | [
"Clips",
"a",
"3D",
"end",
"-",
"effector",
"position",
"within",
"region",
"the",
"robot",
"can",
"reach",
"."
] | def clip_ee_to_workspace(self, pos: np.ndarray) -> np.ndarray:
"""Clips a 3D end-effector position within region the robot can reach."""
return np.clip(
pos, self.params.ee_constraint[:, 0], self.params.ee_constraint[:, 1]
) | [
"def",
"clip_ee_to_workspace",
"(",
"self",
",",
"pos",
":",
"np",
".",
"ndarray",
")",
"->",
"np",
".",
"ndarray",
":",
"return",
"np",
".",
"clip",
"(",
"pos",
",",
"self",
".",
"params",
".",
"ee_constraint",
"[",
":",
",",
"0",
"]",
",",
"self",
".",
"params",
".",
"ee_constraint",
"[",
":",
",",
"1",
"]",
")"
] | https://github.com/facebookresearch/habitat-sim/blob/63b6c71d9ca8adaefb140b198196f5d0ca1f1e34/src_python/habitat_sim/robots/mobile_manipulator.py#L434-L438 | |
gnuradio/gnuradio | 09c3c4fa4bfb1a02caac74cb5334dfe065391e3b | gr-utils/blocktool/cli.py | python | parse_directory | (**kwargs) | Get parsed json and yaml output for complete header directory | Get parsed json and yaml output for complete header directory | [
"Get",
"parsed",
"json",
"and",
"yaml",
"output",
"for",
"complete",
"header",
"directory"
] | def parse_directory(**kwargs):
"""
Get parsed json and yaml output for complete header directory
"""
kwargs['output'] = True
dir_path = kwargs['file_path']
dir_path = os.path.abspath(dir_path)
list_header = []
dir_name = os.path.basename(dir_path)
for _header in os.listdir(dir_path):
if _header.endswith('.h') and os.path.isfile(os.path.join(dir_path, _header)):
list_header.append(os.path.join(dir_path, _header))
list_header = sorted(list_header)
if list_header:
for header_path in list_header:
kwargs['file_path'] = header_path
header = os.path.basename(header_path)
try:
parse_dir = BlockHeaderParser(**kwargs)
parse_dir.yaml = True
parse_dir.json = True
run_blocktool(parse_dir)
yaml_generator(parse_dir, **kwargs)
if not kwargs['modtool']:
json_generator(parse_dir, **kwargs)
except:
logging.basicConfig(level=logging.DEBUG,
filename=os.path.join('.', dir_name + '_log.out'))
logging.exception(
'Log for Exception raised for the header: {}\n'.format(header))
click.secho('Parsing unsuccessful: {}'.format(
header), fg='yellow')
else:
raise BlockToolException(
'Invalid directory! No header found to be parsed') | [
"def",
"parse_directory",
"(",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'output'",
"]",
"=",
"True",
"dir_path",
"=",
"kwargs",
"[",
"'file_path'",
"]",
"dir_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"dir_path",
")",
"list_header",
"=",
"[",
"]",
"dir_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"dir_path",
")",
"for",
"_header",
"in",
"os",
".",
"listdir",
"(",
"dir_path",
")",
":",
"if",
"_header",
".",
"endswith",
"(",
"'.h'",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dir_path",
",",
"_header",
")",
")",
":",
"list_header",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dir_path",
",",
"_header",
")",
")",
"list_header",
"=",
"sorted",
"(",
"list_header",
")",
"if",
"list_header",
":",
"for",
"header_path",
"in",
"list_header",
":",
"kwargs",
"[",
"'file_path'",
"]",
"=",
"header_path",
"header",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"header_path",
")",
"try",
":",
"parse_dir",
"=",
"BlockHeaderParser",
"(",
"*",
"*",
"kwargs",
")",
"parse_dir",
".",
"yaml",
"=",
"True",
"parse_dir",
".",
"json",
"=",
"True",
"run_blocktool",
"(",
"parse_dir",
")",
"yaml_generator",
"(",
"parse_dir",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"kwargs",
"[",
"'modtool'",
"]",
":",
"json_generator",
"(",
"parse_dir",
",",
"*",
"*",
"kwargs",
")",
"except",
":",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"logging",
".",
"DEBUG",
",",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"'.'",
",",
"dir_name",
"+",
"'_log.out'",
")",
")",
"logging",
".",
"exception",
"(",
"'Log for Exception raised for the header: {}\\n'",
".",
"format",
"(",
"header",
")",
")",
"click",
".",
"secho",
"(",
"'Parsing unsuccessful: {}'",
".",
"format",
"(",
"header",
")",
",",
"fg",
"=",
"'yellow'",
")",
"else",
":",
"raise",
"BlockToolException",
"(",
"'Invalid directory! No header found to be parsed'",
")"
] | https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/gr-utils/blocktool/cli.py#L93-L127 | ||
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/math/se3.py | python | rotation | (T : RigidTransform) | return so3.matrix(R) | Returns the 3x3 rotation matrix corresponding to T's rotation | Returns the 3x3 rotation matrix corresponding to T's rotation | [
"Returns",
"the",
"3x3",
"rotation",
"matrix",
"corresponding",
"to",
"T",
"s",
"rotation"
] | def rotation(T : RigidTransform) -> Rotation:
"""Returns the 3x3 rotation matrix corresponding to T's rotation"""
(R,t) = T
return so3.matrix(R) | [
"def",
"rotation",
"(",
"T",
":",
"RigidTransform",
")",
"->",
"Rotation",
":",
"(",
"R",
",",
"t",
")",
"=",
"T",
"return",
"so3",
".",
"matrix",
"(",
"R",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/math/se3.py#L51-L54 | |
CGRU/cgru | 1881a4128530e3d31ac6c25314c18314fc50c2c7 | afanasy/python/af.py | python | Job.offline | (self) | Missing DocString | Missing DocString | [
"Missing",
"DocString"
] | def offline(self):
"""Missing DocString
"""
self.data["offline"] = True | [
"def",
"offline",
"(",
"self",
")",
":",
"self",
".",
"data",
"[",
"\"offline\"",
"]",
"=",
"True"
] | https://github.com/CGRU/cgru/blob/1881a4128530e3d31ac6c25314c18314fc50c2c7/afanasy/python/af.py#L881-L884 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/stc.py | python | StyledTextCtrl.SetWordChars | (*args, **kwargs) | return _stc.StyledTextCtrl_SetWordChars(*args, **kwargs) | SetWordChars(self, String characters)
Set the set of characters making up words for when moving or selecting by word.
First sets defaults like SetCharsDefault. | SetWordChars(self, String characters) | [
"SetWordChars",
"(",
"self",
"String",
"characters",
")"
] | def SetWordChars(*args, **kwargs):
"""
SetWordChars(self, String characters)
Set the set of characters making up words for when moving or selecting by word.
First sets defaults like SetCharsDefault.
"""
return _stc.StyledTextCtrl_SetWordChars(*args, **kwargs) | [
"def",
"SetWordChars",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_SetWordChars",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L2835-L2842 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tix.py | python | CheckList.autosetmode | (self) | This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close. | This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close. | [
"This",
"command",
"calls",
"the",
"setmode",
"method",
"for",
"all",
"the",
"entries",
"in",
"this",
"Tree",
"widget",
":",
"if",
"an",
"entry",
"has",
"no",
"child",
"entries",
"its",
"mode",
"is",
"set",
"to",
"none",
".",
"Otherwise",
"if",
"the",
"entry",
"has",
"any",
"hidden",
"child",
"entries",
"its",
"mode",
"is",
"set",
"to",
"open",
";",
"otherwise",
"its",
"mode",
"is",
"set",
"to",
"close",
"."
] | def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode') | [
"def",
"autosetmode",
"(",
"self",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'autosetmode'",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tix.py#L1582-L1587 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/ipython/py3/IPython/core/prefilter.py | python | PrefilterManager.handlers | (self) | return self._handlers | Return a dict of all the handlers. | Return a dict of all the handlers. | [
"Return",
"a",
"dict",
"of",
"all",
"the",
"handlers",
"."
] | def handlers(self):
"""Return a dict of all the handlers."""
return self._handlers | [
"def",
"handlers",
"(",
"self",
")",
":",
"return",
"self",
".",
"_handlers"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/core/prefilter.py#L213-L215 | |
tiny-dnn/tiny-dnn | c0f576f5cb7b35893f62127cb7aec18f77a3bcc5 | third_party/cpplint.py | python | _BackupFilters | () | Saves the current filter list to backup storage. | Saves the current filter list to backup storage. | [
"Saves",
"the",
"current",
"filter",
"list",
"to",
"backup",
"storage",
"."
] | def _BackupFilters():
""" Saves the current filter list to backup storage."""
_cpplint_state.BackupFilters() | [
"def",
"_BackupFilters",
"(",
")",
":",
"_cpplint_state",
".",
"BackupFilters",
"(",
")"
] | https://github.com/tiny-dnn/tiny-dnn/blob/c0f576f5cb7b35893f62127cb7aec18f77a3bcc5/third_party/cpplint.py#L1178-L1180 | ||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/reduction_gui/widgets/data_table_view.py | python | DataTableModel.emptyCells | (self, indexes) | empty the cells with the indexes
:param indexes: indexes of the cells to be emptied | empty the cells with the indexes
:param indexes: indexes of the cells to be emptied | [
"empty",
"the",
"cells",
"with",
"the",
"indexes",
":",
"param",
"indexes",
":",
"indexes",
"of",
"the",
"cells",
"to",
"be",
"emptied"
] | def emptyCells(self, indexes):
"""
empty the cells with the indexes
:param indexes: indexes of the cells to be emptied
"""
for index in indexes:
row = index.row()
col = index.column()
self._setCellText(row, col, "")
self._removeEmptyRows()
self.beginResetModel()
self.endResetModel()
# indexes is never empty
self.selectCell.emit(indexes[0]) | [
"def",
"emptyCells",
"(",
"self",
",",
"indexes",
")",
":",
"for",
"index",
"in",
"indexes",
":",
"row",
"=",
"index",
".",
"row",
"(",
")",
"col",
"=",
"index",
".",
"column",
"(",
")",
"self",
".",
"_setCellText",
"(",
"row",
",",
"col",
",",
"\"\"",
")",
"self",
".",
"_removeEmptyRows",
"(",
")",
"self",
".",
"beginResetModel",
"(",
")",
"self",
".",
"endResetModel",
"(",
")",
"# indexes is never empty",
"self",
".",
"selectCell",
".",
"emit",
"(",
"indexes",
"[",
"0",
"]",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/reduction_gui/widgets/data_table_view.py#L122-L137 | ||
NVIDIA/thrust | 627dccb359a635afdd69e95a6cc59698f23f70e2 | internal/benchmark/compare_benchmark_results.py | python | record_aggregator.next | (self) | return (distinguishing_values, combined_dependent_values) | Produce the components of the next output record - a tuple of two
`dict`s. The first `dict` is a mapping of distinguishing variables to
distinguishing values, the second `dict` is a mapping of dependent
variables to combined dependent values. Combining the two dicts forms a
CSV row suitable for output.
This is a requirement for the `Iterator` protocol.
Raises:
StopIteration : If there is no more output.
AssertionError : If class invariants were violated. | Produce the components of the next output record - a tuple of two
`dict`s. The first `dict` is a mapping of distinguishing variables to
distinguishing values, the second `dict` is a mapping of dependent
variables to combined dependent values. Combining the two dicts forms a
CSV row suitable for output. | [
"Produce",
"the",
"components",
"of",
"the",
"next",
"output",
"record",
"-",
"a",
"tuple",
"of",
"two",
"dict",
"s",
".",
"The",
"first",
"dict",
"is",
"a",
"mapping",
"of",
"distinguishing",
"variables",
"to",
"distinguishing",
"values",
"the",
"second",
"dict",
"is",
"a",
"mapping",
"of",
"dependent",
"variables",
"to",
"combined",
"dependent",
"values",
".",
"Combining",
"the",
"two",
"dicts",
"forms",
"a",
"CSV",
"row",
"suitable",
"for",
"output",
"."
] | def next(self):
"""Produce the components of the next output record - a tuple of two
`dict`s. The first `dict` is a mapping of distinguishing variables to
distinguishing values, the second `dict` is a mapping of dependent
variables to combined dependent values. Combining the two dicts forms a
CSV row suitable for output.
This is a requirement for the `Iterator` protocol.
Raises:
StopIteration : If there is no more output.
AssertionError : If class invariants were violated.
"""
assert len(self.dataset.keys()) == len(self.in_order_dataset_keys), \
"Number of dataset keys (`" + str(len(self.dataset.keys())) + \
"`) is not equal to the number of keys in the ordering list (`" + \
str(len(self.in_order_dataset_keys)) + "`)."
if len(self.in_order_dataset_keys) == 0:
raise StopIteration()
# Get the next set of distinguishing values and convert them to a `dict`.
raw_distinguishing_values = self.in_order_dataset_keys.popleft()
distinguishing_values = dict(raw_distinguishing_values)
dependent_values = self.dataset.pop(raw_distinguishing_values)
combined_dependent_values = self.combine_dependent_values(dependent_values)
return (distinguishing_values, combined_dependent_values) | [
"def",
"next",
"(",
"self",
")",
":",
"assert",
"len",
"(",
"self",
".",
"dataset",
".",
"keys",
"(",
")",
")",
"==",
"len",
"(",
"self",
".",
"in_order_dataset_keys",
")",
",",
"\"Number of dataset keys (`\"",
"+",
"str",
"(",
"len",
"(",
"self",
".",
"dataset",
".",
"keys",
"(",
")",
")",
")",
"+",
"\"`) is not equal to the number of keys in the ordering list (`\"",
"+",
"str",
"(",
"len",
"(",
"self",
".",
"in_order_dataset_keys",
")",
")",
"+",
"\"`).\"",
"if",
"len",
"(",
"self",
".",
"in_order_dataset_keys",
")",
"==",
"0",
":",
"raise",
"StopIteration",
"(",
")",
"# Get the next set of distinguishing values and convert them to a `dict`.",
"raw_distinguishing_values",
"=",
"self",
".",
"in_order_dataset_keys",
".",
"popleft",
"(",
")",
"distinguishing_values",
"=",
"dict",
"(",
"raw_distinguishing_values",
")",
"dependent_values",
"=",
"self",
".",
"dataset",
".",
"pop",
"(",
"raw_distinguishing_values",
")",
"combined_dependent_values",
"=",
"self",
".",
"combine_dependent_values",
"(",
"dependent_values",
")",
"return",
"(",
"distinguishing_values",
",",
"combined_dependent_values",
")"
] | https://github.com/NVIDIA/thrust/blob/627dccb359a635afdd69e95a6cc59698f23f70e2/internal/benchmark/compare_benchmark_results.py#L1032-L1061 | |
rdkit/rdkit | ede860ae316d12d8568daf5ee800921c3389c84e | rdkit/sping/PDF/pdfgen.py | python | Canvas.rect | (self, x, y, width, height, stroke=1, fill=0) | draws a rectangle | draws a rectangle | [
"draws",
"a",
"rectangle"
] | def rect(self, x, y, width, height, stroke=1, fill=0):
"draws a rectangle"
self._code.append('n %0.4f %0.4f %0.4f %0.4f re ' % (x, y, width, height) + PATH_OPS[
stroke, fill, self._fillMode]) | [
"def",
"rect",
"(",
"self",
",",
"x",
",",
"y",
",",
"width",
",",
"height",
",",
"stroke",
"=",
"1",
",",
"fill",
"=",
"0",
")",
":",
"self",
".",
"_code",
".",
"append",
"(",
"'n %0.4f %0.4f %0.4f %0.4f re '",
"%",
"(",
"x",
",",
"y",
",",
"width",
",",
"height",
")",
"+",
"PATH_OPS",
"[",
"stroke",
",",
"fill",
",",
"self",
".",
"_fillMode",
"]",
")"
] | https://github.com/rdkit/rdkit/blob/ede860ae316d12d8568daf5ee800921c3389c84e/rdkit/sping/PDF/pdfgen.py#L353-L356 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Cipher/ARC2.py | python | _create_base_cipher | (dict_parameters) | return SmartPointer(cipher.get(), stop_operation) | This method instantiates and returns a handle to a low-level
base cipher. It will absorb named parameters in the process. | This method instantiates and returns a handle to a low-level
base cipher. It will absorb named parameters in the process. | [
"This",
"method",
"instantiates",
"and",
"returns",
"a",
"handle",
"to",
"a",
"low",
"-",
"level",
"base",
"cipher",
".",
"It",
"will",
"absorb",
"named",
"parameters",
"in",
"the",
"process",
"."
] | def _create_base_cipher(dict_parameters):
"""This method instantiates and returns a handle to a low-level
base cipher. It will absorb named parameters in the process."""
try:
key = dict_parameters.pop("key")
except KeyError:
raise TypeError("Missing 'key' parameter")
effective_keylen = dict_parameters.pop("effective_keylen", 1024)
if len(key) not in key_size:
raise ValueError("Incorrect ARC2 key length (%d bytes)" % len(key))
if not (40 <= effective_keylen <= 1024):
raise ValueError("'effective_key_len' must be at least 40 and no larger than 1024 "
"(not %d)" % effective_keylen)
start_operation = _raw_arc2_lib.ARC2_start_operation
stop_operation = _raw_arc2_lib.ARC2_stop_operation
cipher = VoidPointer()
result = start_operation(c_uint8_ptr(key),
c_size_t(len(key)),
c_size_t(effective_keylen),
cipher.address_of())
if result:
raise ValueError("Error %X while instantiating the ARC2 cipher"
% result)
return SmartPointer(cipher.get(), stop_operation) | [
"def",
"_create_base_cipher",
"(",
"dict_parameters",
")",
":",
"try",
":",
"key",
"=",
"dict_parameters",
".",
"pop",
"(",
"\"key\"",
")",
"except",
"KeyError",
":",
"raise",
"TypeError",
"(",
"\"Missing 'key' parameter\"",
")",
"effective_keylen",
"=",
"dict_parameters",
".",
"pop",
"(",
"\"effective_keylen\"",
",",
"1024",
")",
"if",
"len",
"(",
"key",
")",
"not",
"in",
"key_size",
":",
"raise",
"ValueError",
"(",
"\"Incorrect ARC2 key length (%d bytes)\"",
"%",
"len",
"(",
"key",
")",
")",
"if",
"not",
"(",
"40",
"<=",
"effective_keylen",
"<=",
"1024",
")",
":",
"raise",
"ValueError",
"(",
"\"'effective_key_len' must be at least 40 and no larger than 1024 \"",
"\"(not %d)\"",
"%",
"effective_keylen",
")",
"start_operation",
"=",
"_raw_arc2_lib",
".",
"ARC2_start_operation",
"stop_operation",
"=",
"_raw_arc2_lib",
".",
"ARC2_stop_operation",
"cipher",
"=",
"VoidPointer",
"(",
")",
"result",
"=",
"start_operation",
"(",
"c_uint8_ptr",
"(",
"key",
")",
",",
"c_size_t",
"(",
"len",
"(",
"key",
")",
")",
",",
"c_size_t",
"(",
"effective_keylen",
")",
",",
"cipher",
".",
"address_of",
"(",
")",
")",
"if",
"result",
":",
"raise",
"ValueError",
"(",
"\"Error %X while instantiating the ARC2 cipher\"",
"%",
"result",
")",
"return",
"SmartPointer",
"(",
"cipher",
".",
"get",
"(",
")",
",",
"stop_operation",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Cipher/ARC2.py#L62-L92 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/share/gdb/python/gdb/frames.py | python | _sort_list | () | return sorted_frame_filters | Internal Worker function to merge all known frame-filter
lists, prune any filters with the state set to "disabled", and
sort the list on the frame-filter's "priority" attribute.
Returns:
sorted_list: A sorted, pruned list of frame filters to
execute. | Internal Worker function to merge all known frame-filter
lists, prune any filters with the state set to "disabled", and
sort the list on the frame-filter's "priority" attribute. | [
"Internal",
"Worker",
"function",
"to",
"merge",
"all",
"known",
"frame",
"-",
"filter",
"lists",
"prune",
"any",
"filters",
"with",
"the",
"state",
"set",
"to",
"disabled",
"and",
"sort",
"the",
"list",
"on",
"the",
"frame",
"-",
"filter",
"s",
"priority",
"attribute",
"."
] | def _sort_list():
""" Internal Worker function to merge all known frame-filter
lists, prune any filters with the state set to "disabled", and
sort the list on the frame-filter's "priority" attribute.
Returns:
sorted_list: A sorted, pruned list of frame filters to
execute.
"""
all_filters = return_list("all")
sorted_frame_filters = sorted(all_filters, key = get_priority,
reverse = True)
sorted_frame_filters = filter(get_enabled,
sorted_frame_filters)
return sorted_frame_filters | [
"def",
"_sort_list",
"(",
")",
":",
"all_filters",
"=",
"return_list",
"(",
"\"all\"",
")",
"sorted_frame_filters",
"=",
"sorted",
"(",
"all_filters",
",",
"key",
"=",
"get_priority",
",",
"reverse",
"=",
"True",
")",
"sorted_frame_filters",
"=",
"filter",
"(",
"get_enabled",
",",
"sorted_frame_filters",
")",
"return",
"sorted_frame_filters"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/share/gdb/python/gdb/frames.py#L135-L152 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/combo.py | python | ComboCtrl.HidePopup | (*args, **kwargs) | return _combo.ComboCtrl_HidePopup(*args, **kwargs) | HidePopup(self)
Dismisses the popup window. | HidePopup(self) | [
"HidePopup",
"(",
"self",
")"
] | def HidePopup(*args, **kwargs):
"""
HidePopup(self)
Dismisses the popup window.
"""
return _combo.ComboCtrl_HidePopup(*args, **kwargs) | [
"def",
"HidePopup",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_combo",
".",
"ComboCtrl_HidePopup",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/combo.py#L140-L146 | |
microsoft/TSS.MSR | 0f2516fca2cd9929c31d5450e39301c9bde43688 | TSS.Py/src/TpmTypes.py | python | TPM2B_MAX_NV_BUFFER.fromBytes | (buffer) | return TpmBuffer(buffer).createObj(TPM2B_MAX_NV_BUFFER) | Returns new TPM2B_MAX_NV_BUFFER object constructed from its
marshaled representation in the given byte buffer | Returns new TPM2B_MAX_NV_BUFFER object constructed from its
marshaled representation in the given byte buffer | [
"Returns",
"new",
"TPM2B_MAX_NV_BUFFER",
"object",
"constructed",
"from",
"its",
"marshaled",
"representation",
"in",
"the",
"given",
"byte",
"buffer"
] | def fromBytes(buffer):
""" Returns new TPM2B_MAX_NV_BUFFER object constructed from its
marshaled representation in the given byte buffer
"""
return TpmBuffer(buffer).createObj(TPM2B_MAX_NV_BUFFER) | [
"def",
"fromBytes",
"(",
"buffer",
")",
":",
"return",
"TpmBuffer",
"(",
"buffer",
")",
".",
"createObj",
"(",
"TPM2B_MAX_NV_BUFFER",
")"
] | https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L3913-L3917 | |
osrf/gazebo | f570338107862253229a0514ffea10deab4f4517 | tools/cpplint.py | python | CheckForMultilineCommentsAndStrings | (filename, clean_lines, linenum, error) | Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Logs an error if we see /* ... */ or "..." that extend past one line. | [
"Logs",
"an",
"error",
"if",
"we",
"see",
"/",
"*",
"...",
"*",
"/",
"or",
"...",
"that",
"extend",
"past",
"one",
"line",
"."
] | def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
"""Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Remove all \\ (escaped backslashes) from the line. They are OK, and the
# second (escaped) slash may trigger later \" detection erroneously.
line = line.replace('\\\\', '')
if line.count('/*') > line.count('*/'):
error(filename, linenum, 'readability/multiline_comment', 5,
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. '
'Consider replacing these with //-style comments, '
'with #if 0...#endif, '
'or with more clearly structured multi-line comments.') | [
"def",
"CheckForMultilineCommentsAndStrings",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Remove all \\\\ (escaped backslashes) from the line. They are OK, and the",
"# second (escaped) slash may trigger later \\\" detection erroneously.",
"line",
"=",
"line",
".",
"replace",
"(",
"'\\\\\\\\'",
",",
"''",
")",
"if",
"line",
".",
"count",
"(",
"'/*'",
")",
">",
"line",
".",
"count",
"(",
"'*/'",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/multiline_comment'",
",",
"5",
",",
"'Complex multi-line /*...*/-style comment found. '",
"'Lint may give bogus warnings. '",
"'Consider replacing these with //-style comments, '",
"'with #if 0...#endif, '",
"'or with more clearly structured multi-line comments.'",
")"
] | https://github.com/osrf/gazebo/blob/f570338107862253229a0514ffea10deab4f4517/tools/cpplint.py#L1137-L1166 | ||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/external/coremltools_wrap/coremltools/coremltools/converters/mil/frontend/tensorflow/basic_graph_ops.py | python | check_connections | (gd) | Given a graph, checks that all
- inputs/outputs are symmetric
- control_inputs/control_outputs are symmetric
- The graph does not reference vertices outside of the graph
Takes a graph in "dict{str, ParsedNode}" form. Does not return,
asserts false on failure. | Given a graph, checks that all
- inputs/outputs are symmetric
- control_inputs/control_outputs are symmetric
- The graph does not reference vertices outside of the graph | [
"Given",
"a",
"graph",
"checks",
"that",
"all",
"-",
"inputs",
"/",
"outputs",
"are",
"symmetric",
"-",
"control_inputs",
"/",
"control_outputs",
"are",
"symmetric",
"-",
"The",
"graph",
"does",
"not",
"reference",
"vertices",
"outside",
"of",
"the",
"graph"
] | def check_connections(gd):
"""
Given a graph, checks that all
- inputs/outputs are symmetric
- control_inputs/control_outputs are symmetric
- The graph does not reference vertices outside of the graph
Takes a graph in "dict{str, ParsedNode}" form. Does not return,
asserts false on failure.
"""
# check that inputs and outputs line up
for k, v in gd.items():
for i in v.inputs:
if isinstance(i, six.string_types):
assert k in gd[i].outputs
else:
assert k in gd[i.name].outputs
for i in v.outputs:
inputs = [
inp if isinstance(inp, six.string_types) else inp.name
for inp in gd[i].inputs
]
assert k in inputs
for i in v.control_inputs:
if isinstance(i, six.string_types):
assert k in gd[i].control_outputs
else:
assert k in gd[i.name].control_outputs
for i in v.control_outputs:
control_inputs = [
inp if isinstance(inp, six.string_types) else inp.name
for inp in gd[i].control_inputs
]
assert k in control_inputs | [
"def",
"check_connections",
"(",
"gd",
")",
":",
"# check that inputs and outputs line up",
"for",
"k",
",",
"v",
"in",
"gd",
".",
"items",
"(",
")",
":",
"for",
"i",
"in",
"v",
".",
"inputs",
":",
"if",
"isinstance",
"(",
"i",
",",
"six",
".",
"string_types",
")",
":",
"assert",
"k",
"in",
"gd",
"[",
"i",
"]",
".",
"outputs",
"else",
":",
"assert",
"k",
"in",
"gd",
"[",
"i",
".",
"name",
"]",
".",
"outputs",
"for",
"i",
"in",
"v",
".",
"outputs",
":",
"inputs",
"=",
"[",
"inp",
"if",
"isinstance",
"(",
"inp",
",",
"six",
".",
"string_types",
")",
"else",
"inp",
".",
"name",
"for",
"inp",
"in",
"gd",
"[",
"i",
"]",
".",
"inputs",
"]",
"assert",
"k",
"in",
"inputs",
"for",
"i",
"in",
"v",
".",
"control_inputs",
":",
"if",
"isinstance",
"(",
"i",
",",
"six",
".",
"string_types",
")",
":",
"assert",
"k",
"in",
"gd",
"[",
"i",
"]",
".",
"control_outputs",
"else",
":",
"assert",
"k",
"in",
"gd",
"[",
"i",
".",
"name",
"]",
".",
"control_outputs",
"for",
"i",
"in",
"v",
".",
"control_outputs",
":",
"control_inputs",
"=",
"[",
"inp",
"if",
"isinstance",
"(",
"inp",
",",
"six",
".",
"string_types",
")",
"else",
"inp",
".",
"name",
"for",
"inp",
"in",
"gd",
"[",
"i",
"]",
".",
"control_inputs",
"]",
"assert",
"k",
"in",
"control_inputs"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/converters/mil/frontend/tensorflow/basic_graph_ops.py#L217-L250 | ||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/control_flow_ops.py | python | ControlFlowContext._to_values_def | (self, export_scope=None) | return values_def | Converts the values to a `ValuesDef` protocol buffer.
Args:
export_scope: Optional `string`. Name scope to remove.
Returns:
A `ValuesDef` protocol buffer. | Converts the values to a `ValuesDef` protocol buffer. | [
"Converts",
"the",
"values",
"to",
"a",
"ValuesDef",
"protocol",
"buffer",
"."
] | def _to_values_def(self, export_scope=None):
"""Converts the values to a `ValuesDef` protocol buffer.
Args:
export_scope: Optional `string`. Name scope to remove.
Returns:
A `ValuesDef` protocol buffer.
"""
values_def = control_flow_pb2.ValuesDef()
values_def.values.extend(
[ops.strip_name_scope(v, export_scope) for v in sorted(self._values)])
for k, v in self._external_values.items():
k = ops.strip_name_scope(k, export_scope)
values_def.external_values[k] = ops.strip_name_scope(v.name, export_scope)
return values_def | [
"def",
"_to_values_def",
"(",
"self",
",",
"export_scope",
"=",
"None",
")",
":",
"values_def",
"=",
"control_flow_pb2",
".",
"ValuesDef",
"(",
")",
"values_def",
".",
"values",
".",
"extend",
"(",
"[",
"ops",
".",
"strip_name_scope",
"(",
"v",
",",
"export_scope",
")",
"for",
"v",
"in",
"sorted",
"(",
"self",
".",
"_values",
")",
"]",
")",
"for",
"k",
",",
"v",
"in",
"self",
".",
"_external_values",
".",
"items",
"(",
")",
":",
"k",
"=",
"ops",
".",
"strip_name_scope",
"(",
"k",
",",
"export_scope",
")",
"values_def",
".",
"external_values",
"[",
"k",
"]",
"=",
"ops",
".",
"strip_name_scope",
"(",
"v",
".",
"name",
",",
"export_scope",
")",
"return",
"values_def"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/control_flow_ops.py#L723-L738 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/ttk.py | python | Style.map | (self, style, query_opt=None, **kw) | return _splitdict(
self.tk,
self.tk.call(self._name, "map", style, *_format_mapdict(kw)),
conv=_tclobj_to_py) | Query or sets dynamic values of the specified option(s) in
style.
Each key in kw is an option and each value should be a list or a
tuple (usually) containing statespecs grouped in tuples, or list,
or something else of your preference. A statespec is compound of
one or more states and then a value. | Query or sets dynamic values of the specified option(s) in
style. | [
"Query",
"or",
"sets",
"dynamic",
"values",
"of",
"the",
"specified",
"option",
"(",
"s",
")",
"in",
"style",
"."
] | def map(self, style, query_opt=None, **kw):
"""Query or sets dynamic values of the specified option(s) in
style.
Each key in kw is an option and each value should be a list or a
tuple (usually) containing statespecs grouped in tuples, or list,
or something else of your preference. A statespec is compound of
one or more states and then a value."""
if query_opt is not None:
return _list_from_statespec(self.tk.splitlist(
self.tk.call(self._name, "map", style, '-%s' % query_opt)))
return _splitdict(
self.tk,
self.tk.call(self._name, "map", style, *_format_mapdict(kw)),
conv=_tclobj_to_py) | [
"def",
"map",
"(",
"self",
",",
"style",
",",
"query_opt",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"if",
"query_opt",
"is",
"not",
"None",
":",
"return",
"_list_from_statespec",
"(",
"self",
".",
"tk",
".",
"splitlist",
"(",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_name",
",",
"\"map\"",
",",
"style",
",",
"'-%s'",
"%",
"query_opt",
")",
")",
")",
"return",
"_splitdict",
"(",
"self",
".",
"tk",
",",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_name",
",",
"\"map\"",
",",
"style",
",",
"*",
"_format_mapdict",
"(",
"kw",
")",
")",
",",
"conv",
"=",
"_tclobj_to_py",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/ttk.py#L391-L406 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/prompt-toolkit/py2/prompt_toolkit/key_binding/input_processor.py | python | InputProcessor.start_macro | (self) | Start recording macro. | Start recording macro. | [
"Start",
"recording",
"macro",
"."
] | def start_macro(self):
" Start recording macro. "
self.record_macro = True
self.macro = [] | [
"def",
"start_macro",
"(",
"self",
")",
":",
"self",
".",
"record_macro",
"=",
"True",
"self",
".",
"macro",
"=",
"[",
"]"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py2/prompt_toolkit/key_binding/input_processor.py#L106-L109 | ||
microsoft/DirectXShaderCompiler | 8348ff8d9e0287610ba05d3a828e10af981a1c05 | tools/clang/bindings/python/clang/cindex.py | python | Type.spelling | (self) | return conf.lib.clang_getTypeSpelling(self) | Retrieve the spelling of this Type. | Retrieve the spelling of this Type. | [
"Retrieve",
"the",
"spelling",
"of",
"this",
"Type",
"."
] | def spelling(self):
"""Retrieve the spelling of this Type."""
return conf.lib.clang_getTypeSpelling(self) | [
"def",
"spelling",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getTypeSpelling",
"(",
"self",
")"
] | https://github.com/microsoft/DirectXShaderCompiler/blob/8348ff8d9e0287610ba05d3a828e10af981a1c05/tools/clang/bindings/python/clang/cindex.py#L1915-L1917 | |
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/cr/cr/commands/prepare.py | python | PrepareOut.Prepare | (self) | All PrepareOut plugins must override this method to do their work. | All PrepareOut plugins must override this method to do their work. | [
"All",
"PrepareOut",
"plugins",
"must",
"override",
"this",
"method",
"to",
"do",
"their",
"work",
"."
] | def Prepare(self):
"""All PrepareOut plugins must override this method to do their work."""
raise NotImplementedError('Must be overridden.') | [
"def",
"Prepare",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Must be overridden.'",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/cr/cr/commands/prepare.py#L66-L68 | ||
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/python2_version/klampt/src/robotsim.py | python | RobotModelLink.getVelocity | (self) | return _robotsim.RobotModelLink_getVelocity(self) | getVelocity(RobotModelLink self)
Returns the velocity of the link's origin given the robot's current joint
configuration and velocities. Equivalent to getPointVelocity([0,0,0]).
Returns:
(list of 3 floats): the current velocity of the link's origin, in
world coordinates | getVelocity(RobotModelLink self) | [
"getVelocity",
"(",
"RobotModelLink",
"self",
")"
] | def getVelocity(self):
"""
getVelocity(RobotModelLink self)
Returns the velocity of the link's origin given the robot's current joint
configuration and velocities. Equivalent to getPointVelocity([0,0,0]).
Returns:
(list of 3 floats): the current velocity of the link's origin, in
world coordinates
"""
return _robotsim.RobotModelLink_getVelocity(self) | [
"def",
"getVelocity",
"(",
"self",
")",
":",
"return",
"_robotsim",
".",
"RobotModelLink_getVelocity",
"(",
"self",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/src/robotsim.py#L4010-L4025 | |
opengauss-mirror/openGauss-server | e383f1b77720a00ddbe4c0655bc85914d9b02a2b | src/gausskernel/dbmind/tools/ai_manager/tools/env_handler.py | python | EnvHandler.modify_env_file | (self) | Modify env file | Modify env file | [
"Modify",
"env",
"file"
] | def modify_env_file(self):
"""
Modify env file
"""
if not self.env_mapping.keys():
g.logger.info('No need write env file.')
return
with open(self.env_file_path, 'r') as file:
content_list = file.readlines()
for key in ENV_MENU.keys():
content_list = [item.strip() for item in content_list if key not in item]
content_list += ENV_MENU.values()
with open(self.env_file_path, 'w') as file2:
file2.write('\n'.join(content_list))
g.logger.info('Successfully modify env file.') | [
"def",
"modify_env_file",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"env_mapping",
".",
"keys",
"(",
")",
":",
"g",
".",
"logger",
".",
"info",
"(",
"'No need write env file.'",
")",
"return",
"with",
"open",
"(",
"self",
".",
"env_file_path",
",",
"'r'",
")",
"as",
"file",
":",
"content_list",
"=",
"file",
".",
"readlines",
"(",
")",
"for",
"key",
"in",
"ENV_MENU",
".",
"keys",
"(",
")",
":",
"content_list",
"=",
"[",
"item",
".",
"strip",
"(",
")",
"for",
"item",
"in",
"content_list",
"if",
"key",
"not",
"in",
"item",
"]",
"content_list",
"+=",
"ENV_MENU",
".",
"values",
"(",
")",
"with",
"open",
"(",
"self",
".",
"env_file_path",
",",
"'w'",
")",
"as",
"file2",
":",
"file2",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"content_list",
")",
")",
"g",
".",
"logger",
".",
"info",
"(",
"'Successfully modify env file.'",
")"
] | https://github.com/opengauss-mirror/openGauss-server/blob/e383f1b77720a00ddbe4c0655bc85914d9b02a2b/src/gausskernel/dbmind/tools/ai_manager/tools/env_handler.py#L40-L54 | ||
emscripten-core/emscripten | 0d413d3c5af8b28349682496edc14656f5700c2f | tools/system_libs.py | python | Library.get_files | (self) | Gets a list of source files for this library.
Typically, you will use `src_dir`, `src_files`, `src_glob` and `src_glob_exclude`.
If those are insufficient to describe the files needed, you can override this method. | Gets a list of source files for this library. | [
"Gets",
"a",
"list",
"of",
"source",
"files",
"for",
"this",
"library",
"."
] | def get_files(self):
"""
Gets a list of source files for this library.
Typically, you will use `src_dir`, `src_files`, `src_glob` and `src_glob_exclude`.
If those are insufficient to describe the files needed, you can override this method.
"""
if self.src_dir:
if self.src_files and self.src_glob:
raise Exception('Cannot use src_files and src_glob together')
if self.src_files:
return files_in_path(self.src_dir, self.src_files)
elif self.src_glob:
return glob_in_path(self.src_dir, self.src_glob, self.src_glob_exclude or ())
raise NotImplementedError() | [
"def",
"get_files",
"(",
"self",
")",
":",
"if",
"self",
".",
"src_dir",
":",
"if",
"self",
".",
"src_files",
"and",
"self",
".",
"src_glob",
":",
"raise",
"Exception",
"(",
"'Cannot use src_files and src_glob together'",
")",
"if",
"self",
".",
"src_files",
":",
"return",
"files_in_path",
"(",
"self",
".",
"src_dir",
",",
"self",
".",
"src_files",
")",
"elif",
"self",
".",
"src_glob",
":",
"return",
"glob_in_path",
"(",
"self",
".",
"src_dir",
",",
"self",
".",
"src_glob",
",",
"self",
".",
"src_glob_exclude",
"or",
"(",
")",
")",
"raise",
"NotImplementedError",
"(",
")"
] | https://github.com/emscripten-core/emscripten/blob/0d413d3c5af8b28349682496edc14656f5700c2f/tools/system_libs.py#L262-L278 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py2/sklearn/datasets/lfw.py | python | _fetch_lfw_people | (data_folder_path, slice_=None, color=False, resize=None,
min_faces_per_person=0) | return faces, target, target_names | Perform the actual data loading for the lfw people dataset
This operation is meant to be cached by a joblib wrapper. | Perform the actual data loading for the lfw people dataset | [
"Perform",
"the",
"actual",
"data",
"loading",
"for",
"the",
"lfw",
"people",
"dataset"
] | def _fetch_lfw_people(data_folder_path, slice_=None, color=False, resize=None,
min_faces_per_person=0):
"""Perform the actual data loading for the lfw people dataset
This operation is meant to be cached by a joblib wrapper.
"""
# scan the data folder content to retain people with more that
# `min_faces_per_person` face pictures
person_names, file_paths = [], []
for person_name in sorted(listdir(data_folder_path)):
folder_path = join(data_folder_path, person_name)
if not isdir(folder_path):
continue
paths = [join(folder_path, f) for f in listdir(folder_path)]
n_pictures = len(paths)
if n_pictures >= min_faces_per_person:
person_name = person_name.replace('_', ' ')
person_names.extend([person_name] * n_pictures)
file_paths.extend(paths)
n_faces = len(file_paths)
if n_faces == 0:
raise ValueError("min_faces_per_person=%d is too restrictive" %
min_faces_per_person)
target_names = np.unique(person_names)
target = np.searchsorted(target_names, person_names)
faces = _load_imgs(file_paths, slice_, color, resize)
# shuffle the faces with a deterministic RNG scheme to avoid having
# all faces of the same person in a row, as it would break some
# cross validation and learning algorithms such as SGD and online
# k-means that make an IID assumption
indices = np.arange(n_faces)
np.random.RandomState(42).shuffle(indices)
faces, target = faces[indices], target[indices]
return faces, target, target_names | [
"def",
"_fetch_lfw_people",
"(",
"data_folder_path",
",",
"slice_",
"=",
"None",
",",
"color",
"=",
"False",
",",
"resize",
"=",
"None",
",",
"min_faces_per_person",
"=",
"0",
")",
":",
"# scan the data folder content to retain people with more that",
"# `min_faces_per_person` face pictures",
"person_names",
",",
"file_paths",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"person_name",
"in",
"sorted",
"(",
"listdir",
"(",
"data_folder_path",
")",
")",
":",
"folder_path",
"=",
"join",
"(",
"data_folder_path",
",",
"person_name",
")",
"if",
"not",
"isdir",
"(",
"folder_path",
")",
":",
"continue",
"paths",
"=",
"[",
"join",
"(",
"folder_path",
",",
"f",
")",
"for",
"f",
"in",
"listdir",
"(",
"folder_path",
")",
"]",
"n_pictures",
"=",
"len",
"(",
"paths",
")",
"if",
"n_pictures",
">=",
"min_faces_per_person",
":",
"person_name",
"=",
"person_name",
".",
"replace",
"(",
"'_'",
",",
"' '",
")",
"person_names",
".",
"extend",
"(",
"[",
"person_name",
"]",
"*",
"n_pictures",
")",
"file_paths",
".",
"extend",
"(",
"paths",
")",
"n_faces",
"=",
"len",
"(",
"file_paths",
")",
"if",
"n_faces",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"min_faces_per_person=%d is too restrictive\"",
"%",
"min_faces_per_person",
")",
"target_names",
"=",
"np",
".",
"unique",
"(",
"person_names",
")",
"target",
"=",
"np",
".",
"searchsorted",
"(",
"target_names",
",",
"person_names",
")",
"faces",
"=",
"_load_imgs",
"(",
"file_paths",
",",
"slice_",
",",
"color",
",",
"resize",
")",
"# shuffle the faces with a deterministic RNG scheme to avoid having",
"# all faces of the same person in a row, as it would break some",
"# cross validation and learning algorithms such as SGD and online",
"# k-means that make an IID assumption",
"indices",
"=",
"np",
".",
"arange",
"(",
"n_faces",
")",
"np",
".",
"random",
".",
"RandomState",
"(",
"42",
")",
".",
"shuffle",
"(",
"indices",
")",
"faces",
",",
"target",
"=",
"faces",
"[",
"indices",
"]",
",",
"target",
"[",
"indices",
"]",
"return",
"faces",
",",
"target",
",",
"target_names"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/datasets/lfw.py#L188-L226 | |
microsoft/LightGBM | 904b2d5158703c4900b68008617951dd2f9ff21b | python-package/lightgbm/basic.py | python | Sequence.__getitem__ | (self, idx: Union[int, slice, List[int]]) | Return data for given row index.
A basic implementation should look like this:
.. code-block:: python
if isinstance(idx, numbers.Integral):
return self._get_one_line(idx)
elif isinstance(idx, slice):
return np.stack([self._get_one_line(i) for i in range(idx.start, idx.stop)])
elif isinstance(idx, list):
# Only required if using ``Dataset.subset()``.
return np.array([self._get_one_line(i) for i in idx])
else:
raise TypeError(f"Sequence index must be integer, slice or list, got {type(idx).__name__}")
Parameters
----------
idx : int, slice[int], list[int]
Item index.
Returns
-------
result : numpy 1-D array or numpy 2-D array
1-D array if idx is int, 2-D array if idx is slice or list. | Return data for given row index. | [
"Return",
"data",
"for",
"given",
"row",
"index",
"."
] | def __getitem__(self, idx: Union[int, slice, List[int]]) -> np.ndarray:
"""Return data for given row index.
A basic implementation should look like this:
.. code-block:: python
if isinstance(idx, numbers.Integral):
return self._get_one_line(idx)
elif isinstance(idx, slice):
return np.stack([self._get_one_line(i) for i in range(idx.start, idx.stop)])
elif isinstance(idx, list):
# Only required if using ``Dataset.subset()``.
return np.array([self._get_one_line(i) for i in idx])
else:
raise TypeError(f"Sequence index must be integer, slice or list, got {type(idx).__name__}")
Parameters
----------
idx : int, slice[int], list[int]
Item index.
Returns
-------
result : numpy 1-D array or numpy 2-D array
1-D array if idx is int, 2-D array if idx is slice or list.
"""
raise NotImplementedError("Sub-classes of lightgbm.Sequence must implement __getitem__()") | [
"def",
"__getitem__",
"(",
"self",
",",
"idx",
":",
"Union",
"[",
"int",
",",
"slice",
",",
"List",
"[",
"int",
"]",
"]",
")",
"->",
"np",
".",
"ndarray",
":",
"raise",
"NotImplementedError",
"(",
"\"Sub-classes of lightgbm.Sequence must implement __getitem__()\"",
")"
] | https://github.com/microsoft/LightGBM/blob/904b2d5158703c4900b68008617951dd2f9ff21b/python-package/lightgbm/basic.py#L636-L663 | ||
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBStringList.AppendString | (self, str) | return _lldb.SBStringList_AppendString(self, str) | AppendString(SBStringList self, char const * str) | AppendString(SBStringList self, char const * str) | [
"AppendString",
"(",
"SBStringList",
"self",
"char",
"const",
"*",
"str",
")"
] | def AppendString(self, str):
"""AppendString(SBStringList self, char const * str)"""
return _lldb.SBStringList_AppendString(self, str) | [
"def",
"AppendString",
"(",
"self",
",",
"str",
")",
":",
"return",
"_lldb",
".",
"SBStringList_AppendString",
"(",
"self",
",",
"str",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L9615-L9617 | |
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/_checkparam.py | python | Validator.check_string | (arg_value, valid_values, arg_name=None, prim_name=None) | Check whether string is in some value list.
Usage:
- method = check_string(method, ["string1", "string2", "string3"], "method") | Check whether string is in some value list. | [
"Check",
"whether",
"string",
"is",
"in",
"some",
"value",
"list",
"."
] | def check_string(arg_value, valid_values, arg_name=None, prim_name=None):
"""
Check whether string is in some value list.
Usage:
- method = check_string(method, ["string1", "string2", "string3"], "method")
"""
if isinstance(arg_value, str) and arg_value in valid_values:
return arg_value
arg_name = arg_name if arg_name else "Parameter"
msg_prefix = f'For \'{prim_name}\' the' if prim_name else "The"
raise ValueError(f"{msg_prefix} '{arg_name}' should be str and must be in '{valid_values}',"
f" but got '{arg_value}'.") | [
"def",
"check_string",
"(",
"arg_value",
",",
"valid_values",
",",
"arg_name",
"=",
"None",
",",
"prim_name",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"arg_value",
",",
"str",
")",
"and",
"arg_value",
"in",
"valid_values",
":",
"return",
"arg_value",
"arg_name",
"=",
"arg_name",
"if",
"arg_name",
"else",
"\"Parameter\"",
"msg_prefix",
"=",
"f'For \\'{prim_name}\\' the'",
"if",
"prim_name",
"else",
"\"The\"",
"raise",
"ValueError",
"(",
"f\"{msg_prefix} '{arg_name}' should be str and must be in '{valid_values}',\"",
"f\" but got '{arg_value}'.\"",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/_checkparam.py#L427-L439 | ||
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBBroadcaster.BroadcastEvent | (self, event, unique=False) | return _lldb.SBBroadcaster_BroadcastEvent(self, event, unique) | BroadcastEvent(SBBroadcaster self, SBEvent event, bool unique=False)
BroadcastEvent(SBBroadcaster self, SBEvent event) | BroadcastEvent(SBBroadcaster self, SBEvent event, bool unique=False)
BroadcastEvent(SBBroadcaster self, SBEvent event) | [
"BroadcastEvent",
"(",
"SBBroadcaster",
"self",
"SBEvent",
"event",
"bool",
"unique",
"=",
"False",
")",
"BroadcastEvent",
"(",
"SBBroadcaster",
"self",
"SBEvent",
"event",
")"
] | def BroadcastEvent(self, event, unique=False):
"""
BroadcastEvent(SBBroadcaster self, SBEvent event, bool unique=False)
BroadcastEvent(SBBroadcaster self, SBEvent event)
"""
return _lldb.SBBroadcaster_BroadcastEvent(self, event, unique) | [
"def",
"BroadcastEvent",
"(",
"self",
",",
"event",
",",
"unique",
"=",
"False",
")",
":",
"return",
"_lldb",
".",
"SBBroadcaster_BroadcastEvent",
"(",
"self",
",",
"event",
",",
"unique",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L2428-L2433 | |
y123456yz/reading-and-annotate-mongodb-3.6 | 93280293672ca7586dc24af18132aa61e4ed7fcf | mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/SConf.py | python | SConfBase.TryCompile | ( self, text, extension) | return self.TryBuild(self.env.Object, text, extension) | Compiles the program given in text to an env.Object, using extension
as file extension (e.g. '.c'). Returns 1, if compilation was
successful, 0 otherwise. The target is saved in self.lastTarget (for
further processing). | Compiles the program given in text to an env.Object, using extension
as file extension (e.g. '.c'). Returns 1, if compilation was
successful, 0 otherwise. The target is saved in self.lastTarget (for
further processing). | [
"Compiles",
"the",
"program",
"given",
"in",
"text",
"to",
"an",
"env",
".",
"Object",
"using",
"extension",
"as",
"file",
"extension",
"(",
"e",
".",
"g",
".",
".",
"c",
")",
".",
"Returns",
"1",
"if",
"compilation",
"was",
"successful",
"0",
"otherwise",
".",
"The",
"target",
"is",
"saved",
"in",
"self",
".",
"lastTarget",
"(",
"for",
"further",
"processing",
")",
"."
] | def TryCompile( self, text, extension):
"""Compiles the program given in text to an env.Object, using extension
as file extension (e.g. '.c'). Returns 1, if compilation was
successful, 0 otherwise. The target is saved in self.lastTarget (for
further processing).
"""
return self.TryBuild(self.env.Object, text, extension) | [
"def",
"TryCompile",
"(",
"self",
",",
"text",
",",
"extension",
")",
":",
"return",
"self",
".",
"TryBuild",
"(",
"self",
".",
"env",
".",
"Object",
",",
"text",
",",
"extension",
")"
] | https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/SConf.py#L615-L621 | |
google/or-tools | 2cb85b4eead4c38e1c54b48044f92087cf165bce | ortools/sat/python/cp_model.py | python | CpModel.AddAssumption | (self, lit) | Add the literal 'lit' to the model as assumptions. | Add the literal 'lit' to the model as assumptions. | [
"Add",
"the",
"literal",
"lit",
"to",
"the",
"model",
"as",
"assumptions",
"."
] | def AddAssumption(self, lit):
"""Add the literal 'lit' to the model as assumptions."""
self.__model.assumptions.append(self.GetOrMakeBooleanIndex(lit)) | [
"def",
"AddAssumption",
"(",
"self",
",",
"lit",
")",
":",
"self",
".",
"__model",
".",
"assumptions",
".",
"append",
"(",
"self",
".",
"GetOrMakeBooleanIndex",
"(",
"lit",
")",
")"
] | https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/ortools/sat/python/cp_model.py#L1998-L2000 | ||
RGF-team/rgf | 272afb85b4c91571f576e5fc83ecfacce3672eb4 | python-package/rgf/rgf_model.py | python | RGFEstimatorBase.min_samples_leaf_ | (self) | Minimum number of training data points in each leaf node
used in model building process. | Minimum number of training data points in each leaf node
used in model building process. | [
"Minimum",
"number",
"of",
"training",
"data",
"points",
"in",
"each",
"leaf",
"node",
"used",
"in",
"model",
"building",
"process",
"."
] | def min_samples_leaf_(self):
"""
Minimum number of training data points in each leaf node
used in model building process.
"""
if not hasattr(self, '_min_samples_leaf'):
raise NotFittedError(utils.NOT_FITTED_ERROR_DESC)
else:
return self._min_samples_leaf | [
"def",
"min_samples_leaf_",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_min_samples_leaf'",
")",
":",
"raise",
"NotFittedError",
"(",
"utils",
".",
"NOT_FITTED_ERROR_DESC",
")",
"else",
":",
"return",
"self",
".",
"_min_samples_leaf"
] | https://github.com/RGF-team/rgf/blob/272afb85b4c91571f576e5fc83ecfacce3672eb4/python-package/rgf/rgf_model.py#L304-L312 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pydecimal.py | python | Decimal._round_down | (self, prec) | Also known as round-towards-0, truncate. | Also known as round-towards-0, truncate. | [
"Also",
"known",
"as",
"round",
"-",
"towards",
"-",
"0",
"truncate",
"."
] | def _round_down(self, prec):
"""Also known as round-towards-0, truncate."""
if _all_zeros(self._int, prec):
return 0
else:
return -1 | [
"def",
"_round_down",
"(",
"self",
",",
"prec",
")",
":",
"if",
"_all_zeros",
"(",
"self",
".",
"_int",
",",
"prec",
")",
":",
"return",
"0",
"else",
":",
"return",
"-",
"1"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pydecimal.py#L1763-L1768 | ||
microsoft/CNTK | e9396480025b9ca457d26b6f33dd07c474c6aa04 | bindings/python/cntk/io/__init__.py | python | MinibatchSource.get_checkpoint_state | (self) | return super(MinibatchSource, self).get_checkpoint_state() | Gets the checkpoint state of the MinibatchSource.
Returns:
A dict that has the checkpoint state of the MinibatchSource | Gets the checkpoint state of the MinibatchSource. | [
"Gets",
"the",
"checkpoint",
"state",
"of",
"the",
"MinibatchSource",
"."
] | def get_checkpoint_state(self):
'''
Gets the checkpoint state of the MinibatchSource.
Returns:
A dict that has the checkpoint state of the MinibatchSource
'''
return super(MinibatchSource, self).get_checkpoint_state() | [
"def",
"get_checkpoint_state",
"(",
"self",
")",
":",
"return",
"super",
"(",
"MinibatchSource",
",",
"self",
")",
".",
"get_checkpoint_state",
"(",
")"
] | https://github.com/microsoft/CNTK/blob/e9396480025b9ca457d26b6f33dd07c474c6aa04/bindings/python/cntk/io/__init__.py#L346-L353 | |
PaddlePaddle/PaddleOCR | b756bf5f8c90142e0d89d3db0163965c686b6ffe | ppocr/utils/save_load.py | python | _mkdir_if_not_exist | (path, logger) | mkdir if not exists, ignore the exception when multiprocess mkdir together | mkdir if not exists, ignore the exception when multiprocess mkdir together | [
"mkdir",
"if",
"not",
"exists",
"ignore",
"the",
"exception",
"when",
"multiprocess",
"mkdir",
"together"
] | def _mkdir_if_not_exist(path, logger):
"""
mkdir if not exists, ignore the exception when multiprocess mkdir together
"""
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
logger.warning(
'be happy if some process has already created {}'.format(
path))
else:
raise OSError('Failed to mkdir {}'.format(path)) | [
"def",
"_mkdir_if_not_exist",
"(",
"path",
",",
"logger",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"path",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
"EEXIST",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"logger",
".",
"warning",
"(",
"'be happy if some process has already created {}'",
".",
"format",
"(",
"path",
")",
")",
"else",
":",
"raise",
"OSError",
"(",
"'Failed to mkdir {}'",
".",
"format",
"(",
"path",
")",
")"
] | https://github.com/PaddlePaddle/PaddleOCR/blob/b756bf5f8c90142e0d89d3db0163965c686b6ffe/ppocr/utils/save_load.py#L31-L44 | ||
hpi-xnor/BMXNet | ed0b201da6667887222b8e4b5f997c4f6b61943d | example/fcn-xs/data.py | python | FileIter.next | (self) | return one dict which contains "data" and "label" | return one dict which contains "data" and "label" | [
"return",
"one",
"dict",
"which",
"contains",
"data",
"and",
"label"
] | def next(self):
"""return one dict which contains "data" and "label" """
if self.iter_next():
self.data, self.label = self._read()
return {self.data_name : self.data[0][1],
self.label_name : self.label[0][1]}
else:
raise StopIteration | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"iter_next",
"(",
")",
":",
"self",
".",
"data",
",",
"self",
".",
"label",
"=",
"self",
".",
"_read",
"(",
")",
"return",
"{",
"self",
".",
"data_name",
":",
"self",
".",
"data",
"[",
"0",
"]",
"[",
"1",
"]",
",",
"self",
".",
"label_name",
":",
"self",
".",
"label",
"[",
"0",
"]",
"[",
"1",
"]",
"}",
"else",
":",
"raise",
"StopIteration"
] | https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/example/fcn-xs/data.py#L132-L139 | ||
KratosMultiphysics/Kratos | 0000833054ed0503424eb28205d6508d9ca6cbbc | applications/StructuralMechanicsApplication/python_scripts/structural_mechanics_custom_scipy_base_solver.py | python | CustomScipyBaseSolver.SolveSolutionStep | (self) | return True | This method must be overriden in derived class.
The computation of the egenvalue problem is only an example how this solver is to be used. | This method must be overriden in derived class. | [
"This",
"method",
"must",
"be",
"overriden",
"in",
"derived",
"class",
"."
] | def SolveSolutionStep(self):
"""This method must be overriden in derived class.
The computation of the egenvalue problem is only an example how this solver is to be used.
"""
## Obtain scipy matrices
M = self._MassMatrixComputation()
K = self._StiffnessMatrixComputation()
## Compute eigenvalues and eigenvectors
tolerance = 1e-6
iteration = M.size*100
vals, vecs = eigsh(K, 5, M, which='SM', tol=tolerance, maxiter = iteration)
## Assign results to Kratos variables
self._AssignVariables(vals,vecs)
return True | [
"def",
"SolveSolutionStep",
"(",
"self",
")",
":",
"## Obtain scipy matrices",
"M",
"=",
"self",
".",
"_MassMatrixComputation",
"(",
")",
"K",
"=",
"self",
".",
"_StiffnessMatrixComputation",
"(",
")",
"## Compute eigenvalues and eigenvectors",
"tolerance",
"=",
"1e-6",
"iteration",
"=",
"M",
".",
"size",
"*",
"100",
"vals",
",",
"vecs",
"=",
"eigsh",
"(",
"K",
",",
"5",
",",
"M",
",",
"which",
"=",
"'SM'",
",",
"tol",
"=",
"tolerance",
",",
"maxiter",
"=",
"iteration",
")",
"## Assign results to Kratos variables",
"self",
".",
"_AssignVariables",
"(",
"vals",
",",
"vecs",
")",
"return",
"True"
] | https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/StructuralMechanicsApplication/python_scripts/structural_mechanics_custom_scipy_base_solver.py#L175-L192 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.