nwo stringlengths 5 86 | sha stringlengths 40 40 | path stringlengths 4 189 | language stringclasses 1 value | identifier stringlengths 1 94 | parameters stringlengths 2 4.03k | argument_list stringclasses 1 value | return_statement stringlengths 0 11.5k | docstring stringlengths 1 33.2k | docstring_summary stringlengths 0 5.15k | docstring_tokens list | function stringlengths 34 151k | function_tokens list | url stringlengths 90 278 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
tum-vision/fusenet | a1451be2971b348a01b0f525c2a3a7a0e215a591 | scripts/cpp_lint.py | python | GetHeaderGuardCPPVariable | (filename) | return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_' | Returns the CPP variable that should be used as a header guard.
Args:
filename: The name of a C++ header file.
Returns:
The CPP variable that should be used as a header guard in the
named file. | Returns the CPP variable that should be used as a header guard. | [
"Returns",
"the",
"CPP",
"variable",
"that",
"should",
"be",
"used",
"as",
"a",
"header",
"guard",
"."
] | def GetHeaderGuardCPPVariable(filename):
"""Returns the CPP variable that should be used as a header guard.
Args:
filename: The name of a C++ header file.
Returns:
The CPP variable that should be used as a header guard in the
named file.
"""
# Restores original filename in case that cpplint is invoked from Emacs's
# flymake.
filename = re.sub(r'_flymake\.h$', '.h', filename)
filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
fileinfo = FileInfo(filename)
file_path_from_root = fileinfo.RepositoryName()
if _root:
file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root)
return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_' | [
"def",
"GetHeaderGuardCPPVariable",
"(",
"filename",
")",
":",
"# Restores original filename in case that cpplint is invoked from Emacs's",
"# flymake.",
"filename",
"=",
"re",
".",
"sub",
"(",
"r'_flymake\\.h$'",
",",
"'.h'",
",",
"filename",
")",
"filename",
"=",
"re",
".",
"sub",
"(",
"r'/\\.flymake/([^/]*)$'",
",",
"r'/\\1'",
",",
"filename",
")",
"fileinfo",
"=",
"FileInfo",
"(",
"filename",
")",
"file_path_from_root",
"=",
"fileinfo",
".",
"RepositoryName",
"(",
")",
"if",
"_root",
":",
"file_path_from_root",
"=",
"re",
".",
"sub",
"(",
"'^'",
"+",
"_root",
"+",
"os",
".",
"sep",
",",
"''",
",",
"file_path_from_root",
")",
"return",
"re",
".",
"sub",
"(",
"r'[-./\\s]'",
",",
"'_'",
",",
"file_path_from_root",
")",
".",
"upper",
"(",
")",
"+",
"'_'"
] | https://github.com/tum-vision/fusenet/blob/a1451be2971b348a01b0f525c2a3a7a0e215a591/scripts/cpp_lint.py#L1384-L1405 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/propgrid.py | python | PGProperty.GetGrid | (*args, **kwargs) | return _propgrid.PGProperty_GetGrid(*args, **kwargs) | GetGrid(self) -> PropertyGrid | GetGrid(self) -> PropertyGrid | [
"GetGrid",
"(",
"self",
")",
"-",
">",
"PropertyGrid"
] | def GetGrid(*args, **kwargs):
"""GetGrid(self) -> PropertyGrid"""
return _propgrid.PGProperty_GetGrid(*args, **kwargs) | [
"def",
"GetGrid",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PGProperty_GetGrid",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L516-L518 | |
microsoft/ELL | a1d6bacc37a14879cc025d9be2ba40b1a0632315 | tools/importers/common/converters.py | python | ConvertPassthrough.convert | (self, conversion_parameters: typing.Mapping[str, typing.Any]) | return None | Return nothing | Return nothing | [
"Return",
"nothing"
] | def convert(self, conversion_parameters: typing.Mapping[str, typing.Any]):
"""
Return nothing
"""
return None | [
"def",
"convert",
"(",
"self",
",",
"conversion_parameters",
":",
"typing",
".",
"Mapping",
"[",
"str",
",",
"typing",
".",
"Any",
"]",
")",
":",
"return",
"None"
] | https://github.com/microsoft/ELL/blob/a1d6bacc37a14879cc025d9be2ba40b1a0632315/tools/importers/common/converters.py#L1278-L1282 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/stats/mstats_basic.py | python | mode | (a, axis=0) | return ModeResult(*output) | Returns an array of the modal (most common) value in the passed array.
Parameters
----------
a : array_like
n-dimensional array of which to find mode(s).
axis : int or None, optional
Axis along which to operate. Default is 0. If None, compute over
the whole array `a`.
Returns
-------
mode : ndarray
Array of modal values.
count : ndarray
Array of counts for each mode.
Notes
-----
For more details, see `stats.mode`.
Examples
--------
>>> from scipy import stats
>>> from scipy.stats import mstats
>>> m_arr = np.ma.array([1, 1, 0, 0, 0, 0], mask=[0, 0, 1, 1, 1, 0])
>>> stats.mode(m_arr)
ModeResult(mode=array([0]), count=array([4]))
>>> mstats.mode(m_arr)
ModeResult(mode=array([1.]), count=array([2.])) | Returns an array of the modal (most common) value in the passed array. | [
"Returns",
"an",
"array",
"of",
"the",
"modal",
"(",
"most",
"common",
")",
"value",
"in",
"the",
"passed",
"array",
"."
] | def mode(a, axis=0):
"""
Returns an array of the modal (most common) value in the passed array.
Parameters
----------
a : array_like
n-dimensional array of which to find mode(s).
axis : int or None, optional
Axis along which to operate. Default is 0. If None, compute over
the whole array `a`.
Returns
-------
mode : ndarray
Array of modal values.
count : ndarray
Array of counts for each mode.
Notes
-----
For more details, see `stats.mode`.
Examples
--------
>>> from scipy import stats
>>> from scipy.stats import mstats
>>> m_arr = np.ma.array([1, 1, 0, 0, 0, 0], mask=[0, 0, 1, 1, 1, 0])
>>> stats.mode(m_arr)
ModeResult(mode=array([0]), count=array([4]))
>>> mstats.mode(m_arr)
ModeResult(mode=array([1.]), count=array([2.]))
"""
a, axis = _chk_asarray(a, axis)
def _mode1D(a):
(rep,cnt) = find_repeats(a)
if not cnt.ndim:
return (0, 0)
elif cnt.size:
return (rep[cnt.argmax()], cnt.max())
else:
return (a.min(), 1)
if axis is None:
output = _mode1D(ma.ravel(a))
output = (ma.array(output[0]), ma.array(output[1]))
else:
output = ma.apply_along_axis(_mode1D, axis, a)
newshape = list(a.shape)
newshape[axis] = 1
slices = [slice(None)] * output.ndim
slices[axis] = 0
modes = output[tuple(slices)].reshape(newshape)
slices[axis] = 1
counts = output[tuple(slices)].reshape(newshape)
output = (modes, counts)
return ModeResult(*output) | [
"def",
"mode",
"(",
"a",
",",
"axis",
"=",
"0",
")",
":",
"a",
",",
"axis",
"=",
"_chk_asarray",
"(",
"a",
",",
"axis",
")",
"def",
"_mode1D",
"(",
"a",
")",
":",
"(",
"rep",
",",
"cnt",
")",
"=",
"find_repeats",
"(",
"a",
")",
"if",
"not",
"cnt",
".",
"ndim",
":",
"return",
"(",
"0",
",",
"0",
")",
"elif",
"cnt",
".",
"size",
":",
"return",
"(",
"rep",
"[",
"cnt",
".",
"argmax",
"(",
")",
"]",
",",
"cnt",
".",
"max",
"(",
")",
")",
"else",
":",
"return",
"(",
"a",
".",
"min",
"(",
")",
",",
"1",
")",
"if",
"axis",
"is",
"None",
":",
"output",
"=",
"_mode1D",
"(",
"ma",
".",
"ravel",
"(",
"a",
")",
")",
"output",
"=",
"(",
"ma",
".",
"array",
"(",
"output",
"[",
"0",
"]",
")",
",",
"ma",
".",
"array",
"(",
"output",
"[",
"1",
"]",
")",
")",
"else",
":",
"output",
"=",
"ma",
".",
"apply_along_axis",
"(",
"_mode1D",
",",
"axis",
",",
"a",
")",
"newshape",
"=",
"list",
"(",
"a",
".",
"shape",
")",
"newshape",
"[",
"axis",
"]",
"=",
"1",
"slices",
"=",
"[",
"slice",
"(",
"None",
")",
"]",
"*",
"output",
".",
"ndim",
"slices",
"[",
"axis",
"]",
"=",
"0",
"modes",
"=",
"output",
"[",
"tuple",
"(",
"slices",
")",
"]",
".",
"reshape",
"(",
"newshape",
")",
"slices",
"[",
"axis",
"]",
"=",
"1",
"counts",
"=",
"output",
"[",
"tuple",
"(",
"slices",
")",
"]",
".",
"reshape",
"(",
"newshape",
")",
"output",
"=",
"(",
"modes",
",",
"counts",
")",
"return",
"ModeResult",
"(",
"*",
"output",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/mstats_basic.py#L274-L333 | |
omnisci/omniscidb | b9c95f1bd602b4ffc8b0edf18bfad61031e08d86 | python/omnisci/thrift/OmniSci.py | python | Client.load_table_binary_columnar | (self, session, table_name, cols, column_names) | Parameters:
- session
- table_name
- cols
- column_names | Parameters:
- session
- table_name
- cols
- column_names | [
"Parameters",
":",
"-",
"session",
"-",
"table_name",
"-",
"cols",
"-",
"column_names"
] | def load_table_binary_columnar(self, session, table_name, cols, column_names):
"""
Parameters:
- session
- table_name
- cols
- column_names
"""
self.send_load_table_binary_columnar(session, table_name, cols, column_names)
self.recv_load_table_binary_columnar() | [
"def",
"load_table_binary_columnar",
"(",
"self",
",",
"session",
",",
"table_name",
",",
"cols",
",",
"column_names",
")",
":",
"self",
".",
"send_load_table_binary_columnar",
"(",
"session",
",",
"table_name",
",",
"cols",
",",
"column_names",
")",
"self",
".",
"recv_load_table_binary_columnar",
"(",
")"
] | https://github.com/omnisci/omniscidb/blob/b9c95f1bd602b4ffc8b0edf18bfad61031e08d86/python/omnisci/thrift/OmniSci.py#L3235-L3245 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/propgrid.py | python | PropertyGrid.DoHidePropertyError | (*args, **kwargs) | return _propgrid.PropertyGrid_DoHidePropertyError(*args, **kwargs) | DoHidePropertyError(self, PGProperty property) | DoHidePropertyError(self, PGProperty property) | [
"DoHidePropertyError",
"(",
"self",
"PGProperty",
"property",
")"
] | def DoHidePropertyError(*args, **kwargs):
"""DoHidePropertyError(self, PGProperty property)"""
return _propgrid.PropertyGrid_DoHidePropertyError(*args, **kwargs) | [
"def",
"DoHidePropertyError",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PropertyGrid_DoHidePropertyError",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L2435-L2437 | |
rdkit/rdkit | ede860ae316d12d8568daf5ee800921c3389c84e | rdkit/Chem/AtomPairs/Utils.py | python | DiceSimilarity | (v1, v2, bounds=None) | return res | Implements the DICE similarity metric.
This is the recommended metric in both the Topological torsions
and Atom pairs papers.
**Arguments**:
- two vectors (sequences of bit ids)
**Returns**: a float.
**Notes**
- the vectors must be sorted
>>> DiceSimilarity( (1,2,3), (1,2,3) )
1.0
>>> DiceSimilarity( (1,2,3), (5,6) )
0.0
>>> DiceSimilarity( (1,2,3,4), (1,3,5,7) )
0.5
>>> DiceSimilarity( (1,2,3,4,5,6), (1,3) )
0.5
Note that duplicate bit IDs count multiple times:
>>> DiceSimilarity( (1,1,3,4,5,6), (1,1) )
0.5
but only if they are duplicated in both vectors:
>>> DiceSimilarity( (1,1,3,4,5,6), (1,) )==2./7
True
edge case
>>> DiceSimilarity( (), () )
0.0
and bounds check
>>> DiceSimilarity( (1,1,3,4), (1,1))
0.666...
>>> DiceSimilarity( (1,1,3,4), (1,1), bounds=0.3)
0.666...
>>> DiceSimilarity( (1,1,3,4), (1,1), bounds=0.33)
0.666...
>>> DiceSimilarity( (1,1,3,4,5,6), (1,1), bounds=0.34)
0.0 | Implements the DICE similarity metric.
This is the recommended metric in both the Topological torsions
and Atom pairs papers. | [
"Implements",
"the",
"DICE",
"similarity",
"metric",
".",
"This",
"is",
"the",
"recommended",
"metric",
"in",
"both",
"the",
"Topological",
"torsions",
"and",
"Atom",
"pairs",
"papers",
"."
] | def DiceSimilarity(v1, v2, bounds=None):
""" Implements the DICE similarity metric.
This is the recommended metric in both the Topological torsions
and Atom pairs papers.
**Arguments**:
- two vectors (sequences of bit ids)
**Returns**: a float.
**Notes**
- the vectors must be sorted
>>> DiceSimilarity( (1,2,3), (1,2,3) )
1.0
>>> DiceSimilarity( (1,2,3), (5,6) )
0.0
>>> DiceSimilarity( (1,2,3,4), (1,3,5,7) )
0.5
>>> DiceSimilarity( (1,2,3,4,5,6), (1,3) )
0.5
Note that duplicate bit IDs count multiple times:
>>> DiceSimilarity( (1,1,3,4,5,6), (1,1) )
0.5
but only if they are duplicated in both vectors:
>>> DiceSimilarity( (1,1,3,4,5,6), (1,) )==2./7
True
edge case
>>> DiceSimilarity( (), () )
0.0
and bounds check
>>> DiceSimilarity( (1,1,3,4), (1,1))
0.666...
>>> DiceSimilarity( (1,1,3,4), (1,1), bounds=0.3)
0.666...
>>> DiceSimilarity( (1,1,3,4), (1,1), bounds=0.33)
0.666...
>>> DiceSimilarity( (1,1,3,4,5,6), (1,1), bounds=0.34)
0.0
"""
denom = 1.0 * (len(v1) + len(v2))
if not denom:
res = 0.0
else:
if bounds and (min(len(v1), len(v2)) / denom) < bounds:
numer = 0.0
else:
numer = 2.0 * BitsInCommon(v1, v2)
res = numer / denom
return res | [
"def",
"DiceSimilarity",
"(",
"v1",
",",
"v2",
",",
"bounds",
"=",
"None",
")",
":",
"denom",
"=",
"1.0",
"*",
"(",
"len",
"(",
"v1",
")",
"+",
"len",
"(",
"v2",
")",
")",
"if",
"not",
"denom",
":",
"res",
"=",
"0.0",
"else",
":",
"if",
"bounds",
"and",
"(",
"min",
"(",
"len",
"(",
"v1",
")",
",",
"len",
"(",
"v2",
")",
")",
"/",
"denom",
")",
"<",
"bounds",
":",
"numer",
"=",
"0.0",
"else",
":",
"numer",
"=",
"2.0",
"*",
"BitsInCommon",
"(",
"v1",
",",
"v2",
")",
"res",
"=",
"numer",
"/",
"denom",
"return",
"res"
] | https://github.com/rdkit/rdkit/blob/ede860ae316d12d8568daf5ee800921c3389c84e/rdkit/Chem/AtomPairs/Utils.py#L199-L260 | |
SFTtech/openage | d6a08c53c48dc1e157807471df92197f6ca9e04d | openage/convert/value_object/read/media/datfile/unit.py | python | LivingUnit.get_data_format_members | (cls, game_version) | return data_format | Return the members in this struct. | Return the members in this struct. | [
"Return",
"the",
"members",
"in",
"this",
"struct",
"."
] | def get_data_format_members(cls, game_version):
"""
Return the members in this struct.
"""
data_format = [
(READ_GEN, None, None, IncludeMembers(cls=ProjectileUnit)),
(READ_GEN, "resource_cost", StorageType.ARRAY_CONTAINER, SubdataMember(
ref_type=ResourceCost,
length=3,
)),
(READ_GEN, "creation_time", StorageType.INT_MEMBER, "int16_t"), # in seconds
(READ_GEN, "train_location_id", StorageType.ID_MEMBER, "int16_t"), # e.g. 118 = villager builder
# where to place the button with the given icon
# creation page:
# +------------------------+
# | 01 | 02 | 03 | 04 | 05 |
# |----|----|----|----|----|
# | 06 | 07 | 08 | 09 | 10 |
# |----|----|----|----|----|
# | 11 | 12 | 13 | 14 | 15 |
# +------------------------+
#
# additional page (dock):
# +------------------------+
# | 21 | 22 | 23 | 24 | 25 |
# |----|----|----|----|----|
# | 26 | 27 | 28 | 29 | 30 |
# |----|----|----|----|----|
# | 31 | 32 | 33 | 34 | 35 |
# +------------------------+
(READ, "creation_button_id", StorageType.ID_MEMBER, "int8_t"),
]
if game_version[0].game_id not in ("ROR", "AOE1DE"):
if game_version[0].game_id == "AOE2DE":
data_format.extend([
(READ_GEN, "heal_timer", StorageType.FLOAT_MEMBER, "float"),
])
else:
data_format.extend([
(SKIP, "rear_attack_modifier", StorageType.FLOAT_MEMBER, "float"),
])
data_format.extend([
(SKIP, "flank_attack_modifier", StorageType.FLOAT_MEMBER, "float"),
(READ_GEN, "creatable_type", StorageType.ID_MEMBER, EnumLookupMember(
raw_type="int8_t",
type_name="creatable_types",
lookup_dict=CREATABLE_TYPES
)),
# if building: "others" tab in editor, if living unit: "heroes" tab,
# regenerate health + monk immunity
(READ_GEN, "hero_mode", StorageType.BOOLEAN_MEMBER, "int8_t"),
# graphic to display when units are garrisoned
(READ_GEN, "garrison_graphic", StorageType.ID_MEMBER, "int32_t"),
# projectile count when nothing garrisoned, including both normal and
# duplicated projectiles
])
if game_version[0].game_id == "AOE2DE":
data_format.extend([
(READ_GEN, "spawn_graphic_id", StorageType.ID_MEMBER, "int16_t"),
(READ_GEN, "upgrade_graphic_id", StorageType.ID_MEMBER, "int16_t"),
(READ_GEN, "hero_glow_graphic_id", StorageType.ID_MEMBER, "int16_t"),
(READ_GEN, "max_charge", StorageType.FLOAT_MEMBER, "float"),
(READ_GEN, "charge_regen_rate", StorageType.FLOAT_MEMBER, "float"),
(READ_GEN, "charge_cost", StorageType.ID_MEMBER, "int16_t"),
(READ_GEN, "charge_type", StorageType.ID_MEMBER, "int16_t"),
])
data_format.extend([
(READ_GEN, "attack_projectile_count", StorageType.INT_MEMBER, "float"),
# total projectiles when fully garrisoned
(READ_GEN, "attack_projectile_max_count", StorageType.INT_MEMBER, "int8_t"),
(READ_GEN, "attack_projectile_spawning_area_width", StorageType.FLOAT_MEMBER, "float"),
(READ_GEN, "attack_projectile_spawning_area_length", StorageType.FLOAT_MEMBER, "float"),
# placement randomness, 0=from single spot, 1=random, 1<less random
(READ_GEN, "attack_projectile_spawning_area_randomness", StorageType.FLOAT_MEMBER, "float"),
# uses its own attack values
(READ_GEN, "attack_projectile_secondary_unit_id", StorageType.ID_MEMBER, "int32_t"),
# used just before unit reaches its target enemy, configuration:
(READ_GEN, "special_graphic_id", StorageType.ID_MEMBER, "int32_t"),
# determines adjacent unit graphics, if 1: building can adapt graphics
# by adjacent buildings
(READ_GEN, "special_activation", StorageType.ID_MEMBER, "int8_t"),
# 0: default: only works when facing the hit angle.
# 1: block: activates special graphic when receiving damage and not pursuing the attacker.
# while idle, blocking decreases damage taken by 1/3.
# also: a wall changes the graphics (when not-an-end piece) because of this.
# 2: counter charge: activates special graphic when idle and enemy is near.
# while idle, attacks back once on first received hit.
# enemy must be unit type 70 and have less than 0.2 max range.
# 3: charge: activates special graphic when closer than two tiles to the target.
# deals 2X damage on 1st
# hit
])
# unit stats display of pierce armor
data_format.append((SKIP, "pierce_armor_displayed", StorageType.INT_MEMBER, "int16_t"))
return data_format | [
"def",
"get_data_format_members",
"(",
"cls",
",",
"game_version",
")",
":",
"data_format",
"=",
"[",
"(",
"READ_GEN",
",",
"None",
",",
"None",
",",
"IncludeMembers",
"(",
"cls",
"=",
"ProjectileUnit",
")",
")",
",",
"(",
"READ_GEN",
",",
"\"resource_cost\"",
",",
"StorageType",
".",
"ARRAY_CONTAINER",
",",
"SubdataMember",
"(",
"ref_type",
"=",
"ResourceCost",
",",
"length",
"=",
"3",
",",
")",
")",
",",
"(",
"READ_GEN",
",",
"\"creation_time\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"int16_t\"",
")",
",",
"# in seconds",
"(",
"READ_GEN",
",",
"\"train_location_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int16_t\"",
")",
",",
"# e.g. 118 = villager builder",
"# where to place the button with the given icon",
"# creation page:",
"# +------------------------+",
"# | 01 | 02 | 03 | 04 | 05 |",
"# |----|----|----|----|----|",
"# | 06 | 07 | 08 | 09 | 10 |",
"# |----|----|----|----|----|",
"# | 11 | 12 | 13 | 14 | 15 |",
"# +------------------------+",
"#",
"# additional page (dock):",
"# +------------------------+",
"# | 21 | 22 | 23 | 24 | 25 |",
"# |----|----|----|----|----|",
"# | 26 | 27 | 28 | 29 | 30 |",
"# |----|----|----|----|----|",
"# | 31 | 32 | 33 | 34 | 35 |",
"# +------------------------+",
"(",
"READ",
",",
"\"creation_button_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int8_t\"",
")",
",",
"]",
"if",
"game_version",
"[",
"0",
"]",
".",
"game_id",
"not",
"in",
"(",
"\"ROR\"",
",",
"\"AOE1DE\"",
")",
":",
"if",
"game_version",
"[",
"0",
"]",
".",
"game_id",
"==",
"\"AOE2DE\"",
":",
"data_format",
".",
"extend",
"(",
"[",
"(",
"READ_GEN",
",",
"\"heal_timer\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"]",
")",
"else",
":",
"data_format",
".",
"extend",
"(",
"[",
"(",
"SKIP",
",",
"\"rear_attack_modifier\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"]",
")",
"data_format",
".",
"extend",
"(",
"[",
"(",
"SKIP",
",",
"\"flank_attack_modifier\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"(",
"READ_GEN",
",",
"\"creatable_type\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"EnumLookupMember",
"(",
"raw_type",
"=",
"\"int8_t\"",
",",
"type_name",
"=",
"\"creatable_types\"",
",",
"lookup_dict",
"=",
"CREATABLE_TYPES",
")",
")",
",",
"# if building: \"others\" tab in editor, if living unit: \"heroes\" tab,",
"# regenerate health + monk immunity",
"(",
"READ_GEN",
",",
"\"hero_mode\"",
",",
"StorageType",
".",
"BOOLEAN_MEMBER",
",",
"\"int8_t\"",
")",
",",
"# graphic to display when units are garrisoned",
"(",
"READ_GEN",
",",
"\"garrison_graphic\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int32_t\"",
")",
",",
"# projectile count when nothing garrisoned, including both normal and",
"# duplicated projectiles",
"]",
")",
"if",
"game_version",
"[",
"0",
"]",
".",
"game_id",
"==",
"\"AOE2DE\"",
":",
"data_format",
".",
"extend",
"(",
"[",
"(",
"READ_GEN",
",",
"\"spawn_graphic_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int16_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"upgrade_graphic_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int16_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"hero_glow_graphic_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int16_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"max_charge\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"(",
"READ_GEN",
",",
"\"charge_regen_rate\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"(",
"READ_GEN",
",",
"\"charge_cost\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int16_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"charge_type\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int16_t\"",
")",
",",
"]",
")",
"data_format",
".",
"extend",
"(",
"[",
"(",
"READ_GEN",
",",
"\"attack_projectile_count\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"float\"",
")",
",",
"# total projectiles when fully garrisoned",
"(",
"READ_GEN",
",",
"\"attack_projectile_max_count\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"int8_t\"",
")",
",",
"(",
"READ_GEN",
",",
"\"attack_projectile_spawning_area_width\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"(",
"READ_GEN",
",",
"\"attack_projectile_spawning_area_length\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"# placement randomness, 0=from single spot, 1=random, 1<less random",
"(",
"READ_GEN",
",",
"\"attack_projectile_spawning_area_randomness\"",
",",
"StorageType",
".",
"FLOAT_MEMBER",
",",
"\"float\"",
")",
",",
"# uses its own attack values",
"(",
"READ_GEN",
",",
"\"attack_projectile_secondary_unit_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int32_t\"",
")",
",",
"# used just before unit reaches its target enemy, configuration:",
"(",
"READ_GEN",
",",
"\"special_graphic_id\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int32_t\"",
")",
",",
"# determines adjacent unit graphics, if 1: building can adapt graphics",
"# by adjacent buildings",
"(",
"READ_GEN",
",",
"\"special_activation\"",
",",
"StorageType",
".",
"ID_MEMBER",
",",
"\"int8_t\"",
")",
",",
"# 0: default: only works when facing the hit angle.",
"# 1: block: activates special graphic when receiving damage and not pursuing the attacker.",
"# while idle, blocking decreases damage taken by 1/3.",
"# also: a wall changes the graphics (when not-an-end piece) because of this.",
"# 2: counter charge: activates special graphic when idle and enemy is near.",
"# while idle, attacks back once on first received hit.",
"# enemy must be unit type 70 and have less than 0.2 max range.",
"# 3: charge: activates special graphic when closer than two tiles to the target.",
"# deals 2X damage on 1st",
"# hit",
"]",
")",
"# unit stats display of pierce armor",
"data_format",
".",
"append",
"(",
"(",
"SKIP",
",",
"\"pierce_armor_displayed\"",
",",
"StorageType",
".",
"INT_MEMBER",
",",
"\"int16_t\"",
")",
")",
"return",
"data_format"
] | https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/value_object/read/media/datfile/unit.py#L791-L892 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_core.py | python | Point.SetDefaults | (*args, **kwargs) | return _core_.Point_SetDefaults(*args, **kwargs) | SetDefaults(self, Point pt) | SetDefaults(self, Point pt) | [
"SetDefaults",
"(",
"self",
"Point",
"pt",
")"
] | def SetDefaults(*args, **kwargs):
"""SetDefaults(self, Point pt)"""
return _core_.Point_SetDefaults(*args, **kwargs) | [
"def",
"SetDefaults",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Point_SetDefaults",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L1183-L1185 | |
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/remove-colored-pieces-if-both-neighbors-are-the-same-color.py | python | Solution.winnerOfGame | (self, colors) | return cnt1 > cnt2 | :type colors: str
:rtype: bool | :type colors: str
:rtype: bool | [
":",
"type",
"colors",
":",
"str",
":",
"rtype",
":",
"bool"
] | def winnerOfGame(self, colors):
"""
:type colors: str
:rtype: bool
"""
cnt1 = cnt2 = 0
for i in xrange(1, len(colors)-1):
if not (colors[i-1] == colors[i] == colors[i+1]):
continue
if colors[i] == 'A':
cnt1 += 1
else:
cnt2 += 1
return cnt1 > cnt2 | [
"def",
"winnerOfGame",
"(",
"self",
",",
"colors",
")",
":",
"cnt1",
"=",
"cnt2",
"=",
"0",
"for",
"i",
"in",
"xrange",
"(",
"1",
",",
"len",
"(",
"colors",
")",
"-",
"1",
")",
":",
"if",
"not",
"(",
"colors",
"[",
"i",
"-",
"1",
"]",
"==",
"colors",
"[",
"i",
"]",
"==",
"colors",
"[",
"i",
"+",
"1",
"]",
")",
":",
"continue",
"if",
"colors",
"[",
"i",
"]",
"==",
"'A'",
":",
"cnt1",
"+=",
"1",
"else",
":",
"cnt2",
"+=",
"1",
"return",
"cnt1",
">",
"cnt2"
] | https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/remove-colored-pieces-if-both-neighbors-are-the-same-color.py#L5-L18 | |
falkTX/Carla | 74a1ae82c90db85f20550ddcdc8a927b8fb7e414 | source/modules/lilv/lilv-0.24.0/bindings/python/lilv.py | python | World.new_uri | (self, uri) | return Node.wrap(_lib.lilv_new_uri(self.world, uri)) | Create a new URI node. | Create a new URI node. | [
"Create",
"a",
"new",
"URI",
"node",
"."
] | def new_uri(self, uri):
"""Create a new URI node."""
return Node.wrap(_lib.lilv_new_uri(self.world, uri)) | [
"def",
"new_uri",
"(",
"self",
",",
"uri",
")",
":",
"return",
"Node",
".",
"wrap",
"(",
"_lib",
".",
"lilv_new_uri",
"(",
"self",
".",
"world",
",",
"uri",
")",
")"
] | https://github.com/falkTX/Carla/blob/74a1ae82c90db85f20550ddcdc8a927b8fb7e414/source/modules/lilv/lilv-0.24.0/bindings/python/lilv.py#L1155-L1157 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py | python | BaseProcess.kill | (self) | Terminate process; sends SIGKILL signal or uses TerminateProcess() | Terminate process; sends SIGKILL signal or uses TerminateProcess() | [
"Terminate",
"process",
";",
"sends",
"SIGKILL",
"signal",
"or",
"uses",
"TerminateProcess",
"()"
] | def kill(self):
'''
Terminate process; sends SIGKILL signal or uses TerminateProcess()
'''
self._check_closed()
self._popen.kill() | [
"def",
"kill",
"(",
"self",
")",
":",
"self",
".",
"_check_closed",
"(",
")",
"self",
".",
"_popen",
".",
"kill",
"(",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/process.py#L126-L131 | ||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/fit_controls_view.py | python | FitControlsView.set_slot_for_undo_fit_clicked | (self, slot) | Connect the slot for the Undo Fit button. | Connect the slot for the Undo Fit button. | [
"Connect",
"the",
"slot",
"for",
"the",
"Undo",
"Fit",
"button",
"."
] | def set_slot_for_undo_fit_clicked(self, slot) -> None:
"""Connect the slot for the Undo Fit button."""
self.undo_fit_button.clicked.connect(slot) | [
"def",
"set_slot_for_undo_fit_clicked",
"(",
"self",
",",
"slot",
")",
"->",
"None",
":",
"self",
".",
"undo_fit_button",
".",
"clicked",
".",
"connect",
"(",
"slot",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/fit_controls_view.py#L42-L44 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/frame.py | python | DataFrame.cov | (self, min_periods=None) | return self._constructor(baseCov, index=idx, columns=cols) | Compute pairwise covariance of columns, excluding NA/null values.
Compute the pairwise covariance among the series of a DataFrame.
The returned data frame is the `covariance matrix
<https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns
of the DataFrame.
Both NA and null values are automatically excluded from the
calculation. (See the note below about bias from missing values.)
A threshold can be set for the minimum number of
observations for each value created. Comparisons with observations
below this threshold will be returned as ``NaN``.
This method is generally used for the analysis of time series data to
understand the relationship between different measures
across time.
Parameters
----------
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result.
Returns
-------
DataFrame
The covariance matrix of the series of the DataFrame.
See Also
--------
pandas.Series.cov : Compute covariance with another Series.
pandas.core.window.EWM.cov: Exponential weighted sample covariance.
pandas.core.window.Expanding.cov : Expanding sample covariance.
pandas.core.window.Rolling.cov : Rolling sample covariance.
Notes
-----
Returns the covariance matrix of the DataFrame's time series.
The covariance is normalized by N-1.
For DataFrames that have Series that are missing data (assuming that
data is `missing at random
<https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__)
the returned covariance matrix will be an unbiased estimate
of the variance and covariance between the member Series.
However, for many applications this estimate may not be acceptable
because the estimate covariance matrix is not guaranteed to be positive
semi-definite. This could lead to estimate correlations having
absolute values which are greater than one, and/or a non-invertible
covariance matrix. See `Estimation of covariance matrices
<http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_
matrices>`__ for more details.
Examples
--------
>>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)],
... columns=['dogs', 'cats'])
>>> df.cov()
dogs cats
dogs 0.666667 -1.000000
cats -1.000000 1.666667
>>> np.random.seed(42)
>>> df = pd.DataFrame(np.random.randn(1000, 5),
... columns=['a', 'b', 'c', 'd', 'e'])
>>> df.cov()
a b c d e
a 0.998438 -0.020161 0.059277 -0.008943 0.014144
b -0.020161 1.059352 -0.008543 -0.024738 0.009826
c 0.059277 -0.008543 1.010670 -0.001486 -0.000271
d -0.008943 -0.024738 -0.001486 0.921297 -0.013692
e 0.014144 0.009826 -0.000271 -0.013692 0.977795
**Minimum number of periods**
This method also supports an optional ``min_periods`` keyword
that specifies the required minimum number of non-NA observations for
each column pair in order to have a valid result:
>>> np.random.seed(42)
>>> df = pd.DataFrame(np.random.randn(20, 3),
... columns=['a', 'b', 'c'])
>>> df.loc[df.index[:5], 'a'] = np.nan
>>> df.loc[df.index[5:10], 'b'] = np.nan
>>> df.cov(min_periods=12)
a b c
a 0.316741 NaN -0.150812
b NaN 1.248003 0.191417
c -0.150812 0.191417 0.895202 | Compute pairwise covariance of columns, excluding NA/null values. | [
"Compute",
"pairwise",
"covariance",
"of",
"columns",
"excluding",
"NA",
"/",
"null",
"values",
"."
] | def cov(self, min_periods=None):
"""
Compute pairwise covariance of columns, excluding NA/null values.
Compute the pairwise covariance among the series of a DataFrame.
The returned data frame is the `covariance matrix
<https://en.wikipedia.org/wiki/Covariance_matrix>`__ of the columns
of the DataFrame.
Both NA and null values are automatically excluded from the
calculation. (See the note below about bias from missing values.)
A threshold can be set for the minimum number of
observations for each value created. Comparisons with observations
below this threshold will be returned as ``NaN``.
This method is generally used for the analysis of time series data to
understand the relationship between different measures
across time.
Parameters
----------
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result.
Returns
-------
DataFrame
The covariance matrix of the series of the DataFrame.
See Also
--------
pandas.Series.cov : Compute covariance with another Series.
pandas.core.window.EWM.cov: Exponential weighted sample covariance.
pandas.core.window.Expanding.cov : Expanding sample covariance.
pandas.core.window.Rolling.cov : Rolling sample covariance.
Notes
-----
Returns the covariance matrix of the DataFrame's time series.
The covariance is normalized by N-1.
For DataFrames that have Series that are missing data (assuming that
data is `missing at random
<https://en.wikipedia.org/wiki/Missing_data#Missing_at_random>`__)
the returned covariance matrix will be an unbiased estimate
of the variance and covariance between the member Series.
However, for many applications this estimate may not be acceptable
because the estimate covariance matrix is not guaranteed to be positive
semi-definite. This could lead to estimate correlations having
absolute values which are greater than one, and/or a non-invertible
covariance matrix. See `Estimation of covariance matrices
<http://en.wikipedia.org/w/index.php?title=Estimation_of_covariance_
matrices>`__ for more details.
Examples
--------
>>> df = pd.DataFrame([(1, 2), (0, 3), (2, 0), (1, 1)],
... columns=['dogs', 'cats'])
>>> df.cov()
dogs cats
dogs 0.666667 -1.000000
cats -1.000000 1.666667
>>> np.random.seed(42)
>>> df = pd.DataFrame(np.random.randn(1000, 5),
... columns=['a', 'b', 'c', 'd', 'e'])
>>> df.cov()
a b c d e
a 0.998438 -0.020161 0.059277 -0.008943 0.014144
b -0.020161 1.059352 -0.008543 -0.024738 0.009826
c 0.059277 -0.008543 1.010670 -0.001486 -0.000271
d -0.008943 -0.024738 -0.001486 0.921297 -0.013692
e 0.014144 0.009826 -0.000271 -0.013692 0.977795
**Minimum number of periods**
This method also supports an optional ``min_periods`` keyword
that specifies the required minimum number of non-NA observations for
each column pair in order to have a valid result:
>>> np.random.seed(42)
>>> df = pd.DataFrame(np.random.randn(20, 3),
... columns=['a', 'b', 'c'])
>>> df.loc[df.index[:5], 'a'] = np.nan
>>> df.loc[df.index[5:10], 'b'] = np.nan
>>> df.cov(min_periods=12)
a b c
a 0.316741 NaN -0.150812
b NaN 1.248003 0.191417
c -0.150812 0.191417 0.895202
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
idx = cols.copy()
mat = numeric_df.values
if notna(mat).all():
if min_periods is not None and min_periods > len(mat):
baseCov = np.empty((mat.shape[1], mat.shape[1]))
baseCov.fill(np.nan)
else:
baseCov = np.cov(mat.T)
baseCov = baseCov.reshape((len(cols), len(cols)))
else:
baseCov = libalgos.nancorr(ensure_float64(mat), cov=True,
minp=min_periods)
return self._constructor(baseCov, index=idx, columns=cols) | [
"def",
"cov",
"(",
"self",
",",
"min_periods",
"=",
"None",
")",
":",
"numeric_df",
"=",
"self",
".",
"_get_numeric_data",
"(",
")",
"cols",
"=",
"numeric_df",
".",
"columns",
"idx",
"=",
"cols",
".",
"copy",
"(",
")",
"mat",
"=",
"numeric_df",
".",
"values",
"if",
"notna",
"(",
"mat",
")",
".",
"all",
"(",
")",
":",
"if",
"min_periods",
"is",
"not",
"None",
"and",
"min_periods",
">",
"len",
"(",
"mat",
")",
":",
"baseCov",
"=",
"np",
".",
"empty",
"(",
"(",
"mat",
".",
"shape",
"[",
"1",
"]",
",",
"mat",
".",
"shape",
"[",
"1",
"]",
")",
")",
"baseCov",
".",
"fill",
"(",
"np",
".",
"nan",
")",
"else",
":",
"baseCov",
"=",
"np",
".",
"cov",
"(",
"mat",
".",
"T",
")",
"baseCov",
"=",
"baseCov",
".",
"reshape",
"(",
"(",
"len",
"(",
"cols",
")",
",",
"len",
"(",
"cols",
")",
")",
")",
"else",
":",
"baseCov",
"=",
"libalgos",
".",
"nancorr",
"(",
"ensure_float64",
"(",
"mat",
")",
",",
"cov",
"=",
"True",
",",
"minp",
"=",
"min_periods",
")",
"return",
"self",
".",
"_constructor",
"(",
"baseCov",
",",
"index",
"=",
"idx",
",",
"columns",
"=",
"cols",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/frame.py#L7040-L7149 | |
desura/Desurium | 7d218139682cf1ddcc64beffdcecc984955436f0 | third_party/courgette/tools/file_util.py | python | file_exists | (name) | return os.path.exists(name) | Returns true if the file currently exists. | Returns true if the file currently exists. | [
"Returns",
"true",
"if",
"the",
"file",
"currently",
"exists",
"."
] | def file_exists(name):
""" Returns true if the file currently exists. """
return os.path.exists(name) | [
"def",
"file_exists",
"(",
"name",
")",
":",
"return",
"os",
".",
"path",
".",
"exists",
"(",
"name",
")"
] | https://github.com/desura/Desurium/blob/7d218139682cf1ddcc64beffdcecc984955436f0/third_party/courgette/tools/file_util.py#L38-L40 | |
mapnik/mapnik | f3da900c355e1d15059c4a91b00203dcc9d9f0ef | scons/scons-local-4.1.0/SCons/Util.py | python | Proxy.__getattr__ | (self, name) | return getattr(self._subject, name) | Retrieve an attribute from the wrapped object. If the named
attribute doesn't exist, AttributeError is raised | Retrieve an attribute from the wrapped object. If the named
attribute doesn't exist, AttributeError is raised | [
"Retrieve",
"an",
"attribute",
"from",
"the",
"wrapped",
"object",
".",
"If",
"the",
"named",
"attribute",
"doesn",
"t",
"exist",
"AttributeError",
"is",
"raised"
] | def __getattr__(self, name):
"""Retrieve an attribute from the wrapped object. If the named
attribute doesn't exist, AttributeError is raised"""
return getattr(self._subject, name) | [
"def",
"__getattr__",
"(",
"self",
",",
"name",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"_subject",
",",
"name",
")"
] | https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Util.py#L602-L605 | |
hszhao/PSPNet | cf7e5a99ba37e46118026e96be5821a9bc63bde0 | scripts/cpp_lint.py | python | _CppLintState.ResetErrorCounts | (self) | Sets the module's error statistic back to zero. | Sets the module's error statistic back to zero. | [
"Sets",
"the",
"module",
"s",
"error",
"statistic",
"back",
"to",
"zero",
"."
] | def ResetErrorCounts(self):
"""Sets the module's error statistic back to zero."""
self.error_count = 0
self.errors_by_category = {} | [
"def",
"ResetErrorCounts",
"(",
"self",
")",
":",
"self",
".",
"error_count",
"=",
"0",
"self",
".",
"errors_by_category",
"=",
"{",
"}"
] | https://github.com/hszhao/PSPNet/blob/cf7e5a99ba37e46118026e96be5821a9bc63bde0/scripts/cpp_lint.py#L742-L745 | ||
google/filament | d21f092645b8e1e312307cbf89f1484891347c63 | third_party/libassimp/port/PyAssimp/scripts/transformations.py | python | Arcball.drag | (self, point) | Update current cursor window coordinates. | Update current cursor window coordinates. | [
"Update",
"current",
"cursor",
"window",
"coordinates",
"."
] | def drag(self, point):
"""Update current cursor window coordinates."""
vnow = arcball_map_to_sphere(point, self._center, self._radius)
if self._axis is not None:
vnow = arcball_constrain_to_axis(vnow, self._axis)
self._qpre = self._qnow
t = numpy.cross(self._vdown, vnow)
if numpy.dot(t, t) < _EPS:
self._qnow = self._qdown
else:
q = [t[0], t[1], t[2], numpy.dot(self._vdown, vnow)]
self._qnow = quaternion_multiply(q, self._qdown) | [
"def",
"drag",
"(",
"self",
",",
"point",
")",
":",
"vnow",
"=",
"arcball_map_to_sphere",
"(",
"point",
",",
"self",
".",
"_center",
",",
"self",
".",
"_radius",
")",
"if",
"self",
".",
"_axis",
"is",
"not",
"None",
":",
"vnow",
"=",
"arcball_constrain_to_axis",
"(",
"vnow",
",",
"self",
".",
"_axis",
")",
"self",
".",
"_qpre",
"=",
"self",
".",
"_qnow",
"t",
"=",
"numpy",
".",
"cross",
"(",
"self",
".",
"_vdown",
",",
"vnow",
")",
"if",
"numpy",
".",
"dot",
"(",
"t",
",",
"t",
")",
"<",
"_EPS",
":",
"self",
".",
"_qnow",
"=",
"self",
".",
"_qdown",
"else",
":",
"q",
"=",
"[",
"t",
"[",
"0",
"]",
",",
"t",
"[",
"1",
"]",
",",
"t",
"[",
"2",
"]",
",",
"numpy",
".",
"dot",
"(",
"self",
".",
"_vdown",
",",
"vnow",
")",
"]",
"self",
".",
"_qnow",
"=",
"quaternion_multiply",
"(",
"q",
",",
"self",
".",
"_qdown",
")"
] | https://github.com/google/filament/blob/d21f092645b8e1e312307cbf89f1484891347c63/third_party/libassimp/port/PyAssimp/scripts/transformations.py#L1446-L1460 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/extern/flatnotebook.py | python | PageContainer.DoDeletePage | (self, page) | Does the actual page deletion. | Does the actual page deletion. | [
"Does",
"the",
"actual",
"page",
"deletion",
"."
] | def DoDeletePage(self, page):
""" Does the actual page deletion. """
# Remove the page from the vector
book = self.GetParent()
self._pagesInfoVec.pop(page)
# Thanks to Yiaanis AKA Mandrav
if self._iActivePage >= page:
self._iActivePage = self._iActivePage - 1
self._iPreviousActivePage = -1
# The delete page was the last first on the array,
# but the book still has more pages, so we set the
# active page to be the first one (0)
if self._iActivePage < 0 and len(self._pagesInfoVec) > 0:
self._iActivePage = 0
self._iPreviousActivePage = -1
# Refresh the tabs
if self._iActivePage >= 0:
book._bForceSelection = True
# Check for selection and send event
event = FlatNotebookEvent(wxEVT_FLATNOTEBOOK_PAGE_CHANGING, self.GetParent().GetId())
event.SetSelection(self._iActivePage)
event.SetOldSelection(self._iPreviousActivePage)
event.SetEventObject(self.GetParent())
self.GetParent().GetEventHandler().ProcessEvent(event)
book.SetSelection(self._iActivePage)
book._bForceSelection = False
# Fire a wxEVT_FLATNOTEBOOK_PAGE_CHANGED event
event.SetEventType(wxEVT_FLATNOTEBOOK_PAGE_CHANGED)
event.SetOldSelection(self._iPreviousActivePage)
self.GetParent().GetEventHandler().ProcessEvent(event) | [
"def",
"DoDeletePage",
"(",
"self",
",",
"page",
")",
":",
"# Remove the page from the vector",
"book",
"=",
"self",
".",
"GetParent",
"(",
")",
"self",
".",
"_pagesInfoVec",
".",
"pop",
"(",
"page",
")",
"# Thanks to Yiaanis AKA Mandrav",
"if",
"self",
".",
"_iActivePage",
">=",
"page",
":",
"self",
".",
"_iActivePage",
"=",
"self",
".",
"_iActivePage",
"-",
"1",
"self",
".",
"_iPreviousActivePage",
"=",
"-",
"1",
"# The delete page was the last first on the array,",
"# but the book still has more pages, so we set the",
"# active page to be the first one (0)",
"if",
"self",
".",
"_iActivePage",
"<",
"0",
"and",
"len",
"(",
"self",
".",
"_pagesInfoVec",
")",
">",
"0",
":",
"self",
".",
"_iActivePage",
"=",
"0",
"self",
".",
"_iPreviousActivePage",
"=",
"-",
"1",
"# Refresh the tabs",
"if",
"self",
".",
"_iActivePage",
">=",
"0",
":",
"book",
".",
"_bForceSelection",
"=",
"True",
"# Check for selection and send event",
"event",
"=",
"FlatNotebookEvent",
"(",
"wxEVT_FLATNOTEBOOK_PAGE_CHANGING",
",",
"self",
".",
"GetParent",
"(",
")",
".",
"GetId",
"(",
")",
")",
"event",
".",
"SetSelection",
"(",
"self",
".",
"_iActivePage",
")",
"event",
".",
"SetOldSelection",
"(",
"self",
".",
"_iPreviousActivePage",
")",
"event",
".",
"SetEventObject",
"(",
"self",
".",
"GetParent",
"(",
")",
")",
"self",
".",
"GetParent",
"(",
")",
".",
"GetEventHandler",
"(",
")",
".",
"ProcessEvent",
"(",
"event",
")",
"book",
".",
"SetSelection",
"(",
"self",
".",
"_iActivePage",
")",
"book",
".",
"_bForceSelection",
"=",
"False",
"# Fire a wxEVT_FLATNOTEBOOK_PAGE_CHANGED event",
"event",
".",
"SetEventType",
"(",
"wxEVT_FLATNOTEBOOK_PAGE_CHANGED",
")",
"event",
".",
"SetOldSelection",
"(",
"self",
".",
"_iPreviousActivePage",
")",
"self",
".",
"GetParent",
"(",
")",
".",
"GetEventHandler",
"(",
")",
".",
"ProcessEvent",
"(",
"event",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/flatnotebook.py#L4258-L4295 | ||
HKUST-Aerial-Robotics/Teach-Repeat-Replan | 98505a7f74b13c8b501176ff838a38423dbef536 | utils/pose_utils/build/catkin_generated/installspace/_setup_util.py | python | _get_workspaces | (environ, include_fuerte=False, include_non_existing=False) | return workspaces | Based on CMAKE_PREFIX_PATH return all catkin workspaces.
:param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool`` | Based on CMAKE_PREFIX_PATH return all catkin workspaces. | [
"Based",
"on",
"CMAKE_PREFIX_PATH",
"return",
"all",
"catkin",
"workspaces",
"."
] | def _get_workspaces(environ, include_fuerte=False, include_non_existing=False):
'''
Based on CMAKE_PREFIX_PATH return all catkin workspaces.
:param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool``
'''
# get all cmake prefix paths
env_name = 'CMAKE_PREFIX_PATH'
value = environ[env_name] if env_name in environ else ''
paths = [path for path in value.split(os.pathsep) if path]
# remove non-workspace paths
workspaces = [path for path in paths if os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE)) or (include_fuerte and path.startswith('/opt/ros/fuerte')) or (include_non_existing and not os.path.exists(path))]
return workspaces | [
"def",
"_get_workspaces",
"(",
"environ",
",",
"include_fuerte",
"=",
"False",
",",
"include_non_existing",
"=",
"False",
")",
":",
"# get all cmake prefix paths",
"env_name",
"=",
"'CMAKE_PREFIX_PATH'",
"value",
"=",
"environ",
"[",
"env_name",
"]",
"if",
"env_name",
"in",
"environ",
"else",
"''",
"paths",
"=",
"[",
"path",
"for",
"path",
"in",
"value",
".",
"split",
"(",
"os",
".",
"pathsep",
")",
"if",
"path",
"]",
"# remove non-workspace paths",
"workspaces",
"=",
"[",
"path",
"for",
"path",
"in",
"paths",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"CATKIN_MARKER_FILE",
")",
")",
"or",
"(",
"include_fuerte",
"and",
"path",
".",
"startswith",
"(",
"'/opt/ros/fuerte'",
")",
")",
"or",
"(",
"include_non_existing",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
")",
"]",
"return",
"workspaces"
] | https://github.com/HKUST-Aerial-Robotics/Teach-Repeat-Replan/blob/98505a7f74b13c8b501176ff838a38423dbef536/utils/pose_utils/build/catkin_generated/installspace/_setup_util.py#L114-L126 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/onnx/mx2onnx/_op_translations/_op_translations_opset12.py | python | convert_broadcast_div | (node, **kwargs) | return create_basic_op_node('Div', node, kwargs) | Map MXNet's broadcast_div operator attributes to onnx's Div operator
and return the created node. | Map MXNet's broadcast_div operator attributes to onnx's Div operator
and return the created node. | [
"Map",
"MXNet",
"s",
"broadcast_div",
"operator",
"attributes",
"to",
"onnx",
"s",
"Div",
"operator",
"and",
"return",
"the",
"created",
"node",
"."
] | def convert_broadcast_div(node, **kwargs):
"""Map MXNet's broadcast_div operator attributes to onnx's Div operator
and return the created node.
"""
return create_basic_op_node('Div', node, kwargs) | [
"def",
"convert_broadcast_div",
"(",
"node",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"create_basic_op_node",
"(",
"'Div'",
",",
"node",
",",
"kwargs",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/onnx/mx2onnx/_op_translations/_op_translations_opset12.py#L1717-L1721 | |
nasa/fprime | 595cf3682d8365943d86c1a6fe7c78f0a116acf0 | Autocoders/Python/src/fprime_ac/utils/pyparsing.py | python | ParserElement.__and__ | (self, other) | return Each([self, other]) | Implementation of & operator - returns Each | Implementation of & operator - returns Each | [
"Implementation",
"of",
"&",
"operator",
"-",
"returns",
"Each"
] | def __and__(self, other):
"""Implementation of & operator - returns Each"""
if isinstance(other, str):
other = Literal(other)
if not isinstance(other, ParserElement):
warnings.warn(
"Cannot add element of type %s to ParserElement" % type(other),
SyntaxWarning,
stacklevel=2,
)
return Each([self, other]) | [
"def",
"__and__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"str",
")",
":",
"other",
"=",
"Literal",
"(",
"other",
")",
"if",
"not",
"isinstance",
"(",
"other",
",",
"ParserElement",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Cannot add element of type %s to ParserElement\"",
"%",
"type",
"(",
"other",
")",
",",
"SyntaxWarning",
",",
"stacklevel",
"=",
"2",
",",
")",
"return",
"Each",
"(",
"[",
"self",
",",
"other",
"]",
")"
] | https://github.com/nasa/fprime/blob/595cf3682d8365943d86c1a6fe7c78f0a116acf0/Autocoders/Python/src/fprime_ac/utils/pyparsing.py#L1053-L1063 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/defusedxml/expatbuilder.py | python | parseString | (string, namespaces=True, forbid_dtd=False,
forbid_entities=True, forbid_external=True) | return builder.parseString(string) | Parse a document from a string, returning the resulting
Document node. | Parse a document from a string, returning the resulting
Document node. | [
"Parse",
"a",
"document",
"from",
"a",
"string",
"returning",
"the",
"resulting",
"Document",
"node",
"."
] | def parseString(string, namespaces=True, forbid_dtd=False,
forbid_entities=True, forbid_external=True):
"""Parse a document from a string, returning the resulting
Document node.
"""
if namespaces:
build_builder = DefusedExpatBuilderNS
else:
build_builder = DefusedExpatBuilder
builder = build_builder(forbid_dtd=forbid_dtd,
forbid_entities=forbid_entities,
forbid_external=forbid_external)
return builder.parseString(string) | [
"def",
"parseString",
"(",
"string",
",",
"namespaces",
"=",
"True",
",",
"forbid_dtd",
"=",
"False",
",",
"forbid_entities",
"=",
"True",
",",
"forbid_external",
"=",
"True",
")",
":",
"if",
"namespaces",
":",
"build_builder",
"=",
"DefusedExpatBuilderNS",
"else",
":",
"build_builder",
"=",
"DefusedExpatBuilder",
"builder",
"=",
"build_builder",
"(",
"forbid_dtd",
"=",
"forbid_dtd",
",",
"forbid_entities",
"=",
"forbid_entities",
",",
"forbid_external",
"=",
"forbid_external",
")",
"return",
"builder",
".",
"parseString",
"(",
"string",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/defusedxml/expatbuilder.py#L98-L110 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/types.py | python | new_class | (name, bases=(), kwds=None, exec_body=None) | return meta(name, resolved_bases, ns, **kwds) | Create a class object dynamically using the appropriate metaclass. | Create a class object dynamically using the appropriate metaclass. | [
"Create",
"a",
"class",
"object",
"dynamically",
"using",
"the",
"appropriate",
"metaclass",
"."
] | def new_class(name, bases=(), kwds=None, exec_body=None):
"""Create a class object dynamically using the appropriate metaclass."""
resolved_bases = resolve_bases(bases)
meta, ns, kwds = prepare_class(name, resolved_bases, kwds)
if exec_body is not None:
exec_body(ns)
if resolved_bases is not bases:
ns['__orig_bases__'] = bases
return meta(name, resolved_bases, ns, **kwds) | [
"def",
"new_class",
"(",
"name",
",",
"bases",
"=",
"(",
")",
",",
"kwds",
"=",
"None",
",",
"exec_body",
"=",
"None",
")",
":",
"resolved_bases",
"=",
"resolve_bases",
"(",
"bases",
")",
"meta",
",",
"ns",
",",
"kwds",
"=",
"prepare_class",
"(",
"name",
",",
"resolved_bases",
",",
"kwds",
")",
"if",
"exec_body",
"is",
"not",
"None",
":",
"exec_body",
"(",
"ns",
")",
"if",
"resolved_bases",
"is",
"not",
"bases",
":",
"ns",
"[",
"'__orig_bases__'",
"]",
"=",
"bases",
"return",
"meta",
"(",
"name",
",",
"resolved_bases",
",",
"ns",
",",
"*",
"*",
"kwds",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/types.py#L62-L70 | |
Kitware/ParaView | f760af9124ff4634b23ebbeab95a4f56e0261955 | Wrapping/Python/paraview/servermanager.py | python | DoubleMapProperty.values | (self) | return self.GetData().values() | Returns the values | Returns the values | [
"Returns",
"the",
"values"
] | def values(self):
"""Returns the values"""
return self.GetData().values() | [
"def",
"values",
"(",
"self",
")",
":",
"return",
"self",
".",
"GetData",
"(",
")",
".",
"values",
"(",
")"
] | https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Wrapping/Python/paraview/servermanager.py#L918-L920 | |
shedskin/shedskin | ae88dbca7b1d9671cd8be448cb0b497122758936 | examples/sha.py | python | sha.digest | (self) | return digest | Terminate the message-digest computation and return digest.
Return the digest of the strings passed to the update()
method so far. This is a 16-byte string which may contain
non-ASCII characters, including null bytes. | Terminate the message-digest computation and return digest. | [
"Terminate",
"the",
"message",
"-",
"digest",
"computation",
"and",
"return",
"digest",
"."
] | def digest(self):
"""Terminate the message-digest computation and return digest.
Return the digest of the strings passed to the update()
method so far. This is a 16-byte string which may contain
non-ASCII characters, including null bytes.
"""
H0 = self.H0
H1 = self.H1
H2 = self.H2
H3 = self.H3
H4 = self.H4
input = [] + self.input
count = [] + self.count
index = (self.count[1] >> 3) & 0x3fL
if index < 56:
padLen = 56 - index
else:
padLen = 120 - index
padding = ['\200'] + ['\000'] * 63
self.update(padding[:padLen])
# Append length (before padding).
bits = _bytelist2longBigEndian(self.input[:56]) + count
self._transform(bits)
# Store state in digest.
digest = _long2bytesBigEndian(self.H0, 4) + \
_long2bytesBigEndian(self.H1, 4) + \
_long2bytesBigEndian(self.H2, 4) + \
_long2bytesBigEndian(self.H3, 4) + \
_long2bytesBigEndian(self.H4, 4)
self.H0 = H0
self.H1 = H1
self.H2 = H2
self.H3 = H3
self.H4 = H4
self.input = input
self.count = count
return digest | [
"def",
"digest",
"(",
"self",
")",
":",
"H0",
"=",
"self",
".",
"H0",
"H1",
"=",
"self",
".",
"H1",
"H2",
"=",
"self",
".",
"H2",
"H3",
"=",
"self",
".",
"H3",
"H4",
"=",
"self",
".",
"H4",
"input",
"=",
"[",
"]",
"+",
"self",
".",
"input",
"count",
"=",
"[",
"]",
"+",
"self",
".",
"count",
"index",
"=",
"(",
"self",
".",
"count",
"[",
"1",
"]",
">>",
"3",
")",
"&",
"0x3fL",
"if",
"index",
"<",
"56",
":",
"padLen",
"=",
"56",
"-",
"index",
"else",
":",
"padLen",
"=",
"120",
"-",
"index",
"padding",
"=",
"[",
"'\\200'",
"]",
"+",
"[",
"'\\000'",
"]",
"*",
"63",
"self",
".",
"update",
"(",
"padding",
"[",
":",
"padLen",
"]",
")",
"# Append length (before padding).",
"bits",
"=",
"_bytelist2longBigEndian",
"(",
"self",
".",
"input",
"[",
":",
"56",
"]",
")",
"+",
"count",
"self",
".",
"_transform",
"(",
"bits",
")",
"# Store state in digest.",
"digest",
"=",
"_long2bytesBigEndian",
"(",
"self",
".",
"H0",
",",
"4",
")",
"+",
"_long2bytesBigEndian",
"(",
"self",
".",
"H1",
",",
"4",
")",
"+",
"_long2bytesBigEndian",
"(",
"self",
".",
"H2",
",",
"4",
")",
"+",
"_long2bytesBigEndian",
"(",
"self",
".",
"H3",
",",
"4",
")",
"+",
"_long2bytesBigEndian",
"(",
"self",
".",
"H4",
",",
"4",
")",
"self",
".",
"H0",
"=",
"H0",
"self",
".",
"H1",
"=",
"H1",
"self",
".",
"H2",
"=",
"H2",
"self",
".",
"H3",
"=",
"H3",
"self",
".",
"H4",
"=",
"H4",
"self",
".",
"input",
"=",
"input",
"self",
".",
"count",
"=",
"count",
"return",
"digest"
] | https://github.com/shedskin/shedskin/blob/ae88dbca7b1d9671cd8be448cb0b497122758936/examples/sha.py#L281-L327 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/re.py | python | search | (pattern, string, flags=0) | return _compile(pattern, flags).search(string) | Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found. | Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found. | [
"Scan",
"through",
"string",
"looking",
"for",
"a",
"match",
"to",
"the",
"pattern",
"returning",
"a",
"match",
"object",
"or",
"None",
"if",
"no",
"match",
"was",
"found",
"."
] | def search(pattern, string, flags=0):
"""Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).search(string) | [
"def",
"search",
"(",
"pattern",
",",
"string",
",",
"flags",
"=",
"0",
")",
":",
"return",
"_compile",
"(",
"pattern",
",",
"flags",
")",
".",
"search",
"(",
"string",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/re.py#L143-L146 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/gluon/parameter.py | python | Parameter.set_data | (self, data) | Sets this parameter's value on all devices. | Sets this parameter's value on all devices. | [
"Sets",
"this",
"parameter",
"s",
"value",
"on",
"all",
"devices",
"."
] | def set_data(self, data):
"""Sets this parameter's value on all devices."""
self.shape = data.shape
if self._data is None:
assert self._deferred_init, \
"Parameter '%s' has not been initialized"%self.name
self._deferred_init = self._deferred_init[:3] + (data,)
return
# if update_on_kvstore, we need to make sure the copy stored in kvstore is in sync
trainer = self._trainer() if self._trainer else None
if trainer and trainer._kv_initialized and trainer._update_on_kvstore:
if self not in trainer._params_to_init:
trainer._reset_kvstore()
for arr in self._check_and_get(self._data, list):
arr[:] = data | [
"def",
"set_data",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"shape",
"=",
"data",
".",
"shape",
"if",
"self",
".",
"_data",
"is",
"None",
":",
"assert",
"self",
".",
"_deferred_init",
",",
"\"Parameter '%s' has not been initialized\"",
"%",
"self",
".",
"name",
"self",
".",
"_deferred_init",
"=",
"self",
".",
"_deferred_init",
"[",
":",
"3",
"]",
"+",
"(",
"data",
",",
")",
"return",
"# if update_on_kvstore, we need to make sure the copy stored in kvstore is in sync",
"trainer",
"=",
"self",
".",
"_trainer",
"(",
")",
"if",
"self",
".",
"_trainer",
"else",
"None",
"if",
"trainer",
"and",
"trainer",
".",
"_kv_initialized",
"and",
"trainer",
".",
"_update_on_kvstore",
":",
"if",
"self",
"not",
"in",
"trainer",
".",
"_params_to_init",
":",
"trainer",
".",
"_reset_kvstore",
"(",
")",
"for",
"arr",
"in",
"self",
".",
"_check_and_get",
"(",
"self",
".",
"_data",
",",
"list",
")",
":",
"arr",
"[",
":",
"]",
"=",
"data"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/gluon/parameter.py#L524-L541 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/pyparsing.py | python | ParserElement.__rsub__ | (self, other) | return other - self | Implementation of - operator when left operand is not a :class:`ParserElement` | [] | def __rsub__(self, other):
"""
Implementation of - operator when left operand is not a :class:`ParserElement`
"""
if isinstance(other, basestring):
other = self._literalStringClass(other)
if not isinstance(other, ParserElement):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other - self | [
"def",
"__rsub__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"basestring",
")",
":",
"other",
"=",
"self",
".",
"_literalStringClass",
"(",
"other",
")",
"if",
"not",
"isinstance",
"(",
"other",
",",
"ParserElement",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Cannot combine element of type %s with ParserElement\"",
"%",
"type",
"(",
"other",
")",
",",
"SyntaxWarning",
",",
"stacklevel",
"=",
"2",
")",
"return",
"None",
"return",
"other",
"-",
"self"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/pyparsing.py#L4405-L4425 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py2/numpy/linalg/linalg.py | python | slogdet | (a) | return sign, logdet | Compute the sign and (natural) logarithm of the determinant of an array.
If an array has a very small or very large determinant, then a call to
`det` may overflow or underflow. This routine is more robust against such
issues, because it computes the logarithm of the determinant rather than
the determinant itself.
Parameters
----------
a : (..., M, M) array_like
Input array, has to be a square 2-D array.
Returns
-------
sign : (...) array_like
A number representing the sign of the determinant. For a real matrix,
this is 1, 0, or -1. For a complex matrix, this is a complex number
with absolute value 1 (i.e., it is on the unit circle), or else 0.
logdet : (...) array_like
The natural log of the absolute value of the determinant.
If the determinant is zero, then `sign` will be 0 and `logdet` will be
-Inf. In all cases, the determinant is equal to ``sign * np.exp(logdet)``.
See Also
--------
det
Notes
-----
.. versionadded:: 1.8.0
Broadcasting rules apply, see the `numpy.linalg` documentation for
details.
.. versionadded:: 1.6.0
The determinant is computed via LU factorization using the LAPACK
routine z/dgetrf.
Examples
--------
The determinant of a 2-D array ``[[a, b], [c, d]]`` is ``ad - bc``:
>>> a = np.array([[1, 2], [3, 4]])
>>> (sign, logdet) = np.linalg.slogdet(a)
>>> (sign, logdet)
(-1, 0.69314718055994529)
>>> sign * np.exp(logdet)
-2.0
Computing log-determinants for a stack of matrices:
>>> a = np.array([ [[1, 2], [3, 4]], [[1, 2], [2, 1]], [[1, 3], [3, 1]] ])
>>> a.shape
(3, 2, 2)
>>> sign, logdet = np.linalg.slogdet(a)
>>> (sign, logdet)
(array([-1., -1., -1.]), array([ 0.69314718, 1.09861229, 2.07944154]))
>>> sign * np.exp(logdet)
array([-2., -3., -8.])
This routine succeeds where ordinary `det` does not:
>>> np.linalg.det(np.eye(500) * 0.1)
0.0
>>> np.linalg.slogdet(np.eye(500) * 0.1)
(1, -1151.2925464970228) | Compute the sign and (natural) logarithm of the determinant of an array. | [
"Compute",
"the",
"sign",
"and",
"(",
"natural",
")",
"logarithm",
"of",
"the",
"determinant",
"of",
"an",
"array",
"."
] | def slogdet(a):
"""
Compute the sign and (natural) logarithm of the determinant of an array.
If an array has a very small or very large determinant, then a call to
`det` may overflow or underflow. This routine is more robust against such
issues, because it computes the logarithm of the determinant rather than
the determinant itself.
Parameters
----------
a : (..., M, M) array_like
Input array, has to be a square 2-D array.
Returns
-------
sign : (...) array_like
A number representing the sign of the determinant. For a real matrix,
this is 1, 0, or -1. For a complex matrix, this is a complex number
with absolute value 1 (i.e., it is on the unit circle), or else 0.
logdet : (...) array_like
The natural log of the absolute value of the determinant.
If the determinant is zero, then `sign` will be 0 and `logdet` will be
-Inf. In all cases, the determinant is equal to ``sign * np.exp(logdet)``.
See Also
--------
det
Notes
-----
.. versionadded:: 1.8.0
Broadcasting rules apply, see the `numpy.linalg` documentation for
details.
.. versionadded:: 1.6.0
The determinant is computed via LU factorization using the LAPACK
routine z/dgetrf.
Examples
--------
The determinant of a 2-D array ``[[a, b], [c, d]]`` is ``ad - bc``:
>>> a = np.array([[1, 2], [3, 4]])
>>> (sign, logdet) = np.linalg.slogdet(a)
>>> (sign, logdet)
(-1, 0.69314718055994529)
>>> sign * np.exp(logdet)
-2.0
Computing log-determinants for a stack of matrices:
>>> a = np.array([ [[1, 2], [3, 4]], [[1, 2], [2, 1]], [[1, 3], [3, 1]] ])
>>> a.shape
(3, 2, 2)
>>> sign, logdet = np.linalg.slogdet(a)
>>> (sign, logdet)
(array([-1., -1., -1.]), array([ 0.69314718, 1.09861229, 2.07944154]))
>>> sign * np.exp(logdet)
array([-2., -3., -8.])
This routine succeeds where ordinary `det` does not:
>>> np.linalg.det(np.eye(500) * 0.1)
0.0
>>> np.linalg.slogdet(np.eye(500) * 0.1)
(1, -1151.2925464970228)
"""
a = asarray(a)
_assertRankAtLeast2(a)
_assertNdSquareness(a)
t, result_t = _commonType(a)
real_t = _realType(result_t)
signature = 'D->Dd' if isComplexType(t) else 'd->dd'
sign, logdet = _umath_linalg.slogdet(a, signature=signature)
sign = sign.astype(result_t, copy=False)
logdet = logdet.astype(real_t, copy=False)
return sign, logdet | [
"def",
"slogdet",
"(",
"a",
")",
":",
"a",
"=",
"asarray",
"(",
"a",
")",
"_assertRankAtLeast2",
"(",
"a",
")",
"_assertNdSquareness",
"(",
"a",
")",
"t",
",",
"result_t",
"=",
"_commonType",
"(",
"a",
")",
"real_t",
"=",
"_realType",
"(",
"result_t",
")",
"signature",
"=",
"'D->Dd'",
"if",
"isComplexType",
"(",
"t",
")",
"else",
"'d->dd'",
"sign",
",",
"logdet",
"=",
"_umath_linalg",
".",
"slogdet",
"(",
"a",
",",
"signature",
"=",
"signature",
")",
"sign",
"=",
"sign",
".",
"astype",
"(",
"result_t",
",",
"copy",
"=",
"False",
")",
"logdet",
"=",
"logdet",
".",
"astype",
"(",
"real_t",
",",
"copy",
"=",
"False",
")",
"return",
"sign",
",",
"logdet"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/linalg/linalg.py#L1954-L2037 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/nanops.py | python | _wrap_results | (result, dtype, fill_value=None) | return result | wrap our results if needed | wrap our results if needed | [
"wrap",
"our",
"results",
"if",
"needed"
] | def _wrap_results(result, dtype, fill_value=None):
""" wrap our results if needed """
if is_datetime64_dtype(dtype) or is_datetime64tz_dtype(dtype):
if fill_value is None:
# GH#24293
fill_value = iNaT
if not isinstance(result, np.ndarray):
tz = getattr(dtype, 'tz', None)
assert not isna(fill_value), "Expected non-null fill_value"
if result == fill_value:
result = np.nan
result = tslibs.Timestamp(result, tz=tz)
else:
result = result.view(dtype)
elif is_timedelta64_dtype(dtype):
if not isinstance(result, np.ndarray):
if result == fill_value:
result = np.nan
# raise if we have a timedelta64[ns] which is too large
if np.fabs(result) > _int64_max:
raise ValueError("overflow in timedelta operation")
result = tslibs.Timedelta(result, unit='ns')
else:
result = result.astype('i8').view(dtype)
return result | [
"def",
"_wrap_results",
"(",
"result",
",",
"dtype",
",",
"fill_value",
"=",
"None",
")",
":",
"if",
"is_datetime64_dtype",
"(",
"dtype",
")",
"or",
"is_datetime64tz_dtype",
"(",
"dtype",
")",
":",
"if",
"fill_value",
"is",
"None",
":",
"# GH#24293",
"fill_value",
"=",
"iNaT",
"if",
"not",
"isinstance",
"(",
"result",
",",
"np",
".",
"ndarray",
")",
":",
"tz",
"=",
"getattr",
"(",
"dtype",
",",
"'tz'",
",",
"None",
")",
"assert",
"not",
"isna",
"(",
"fill_value",
")",
",",
"\"Expected non-null fill_value\"",
"if",
"result",
"==",
"fill_value",
":",
"result",
"=",
"np",
".",
"nan",
"result",
"=",
"tslibs",
".",
"Timestamp",
"(",
"result",
",",
"tz",
"=",
"tz",
")",
"else",
":",
"result",
"=",
"result",
".",
"view",
"(",
"dtype",
")",
"elif",
"is_timedelta64_dtype",
"(",
"dtype",
")",
":",
"if",
"not",
"isinstance",
"(",
"result",
",",
"np",
".",
"ndarray",
")",
":",
"if",
"result",
"==",
"fill_value",
":",
"result",
"=",
"np",
".",
"nan",
"# raise if we have a timedelta64[ns] which is too large",
"if",
"np",
".",
"fabs",
"(",
"result",
")",
">",
"_int64_max",
":",
"raise",
"ValueError",
"(",
"\"overflow in timedelta operation\"",
")",
"result",
"=",
"tslibs",
".",
"Timedelta",
"(",
"result",
",",
"unit",
"=",
"'ns'",
")",
"else",
":",
"result",
"=",
"result",
".",
"astype",
"(",
"'i8'",
")",
".",
"view",
"(",
"dtype",
")",
"return",
"result"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/nanops.py#L276-L304 | |
Tencent/CMONGO | c40380caa14e05509f46993aa8b8da966b09b0b5 | src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Variables/__init__.py | python | Variables.__init__ | (self, files=None, args=None, is_global=1) | files - [optional] List of option configuration files to load
(backward compatibility) If a single string is passed it is
automatically placed in a file list | files - [optional] List of option configuration files to load
(backward compatibility) If a single string is passed it is
automatically placed in a file list | [
"files",
"-",
"[",
"optional",
"]",
"List",
"of",
"option",
"configuration",
"files",
"to",
"load",
"(",
"backward",
"compatibility",
")",
"If",
"a",
"single",
"string",
"is",
"passed",
"it",
"is",
"automatically",
"placed",
"in",
"a",
"file",
"list"
] | def __init__(self, files=None, args=None, is_global=1):
"""
files - [optional] List of option configuration files to load
(backward compatibility) If a single string is passed it is
automatically placed in a file list
"""
# initialize arguments
if files is None:
files = []
if args is None:
args = {}
self.options = []
self.args = args
if not SCons.Util.is_List(files):
if files:
files = [ files ]
else:
files = []
self.files = files
self.unknown = {}
# create the singleton instance
if is_global:
self=Variables.instance
if not Variables.instance:
Variables.instance=self | [
"def",
"__init__",
"(",
"self",
",",
"files",
"=",
"None",
",",
"args",
"=",
"None",
",",
"is_global",
"=",
"1",
")",
":",
"# initialize arguments",
"if",
"files",
"is",
"None",
":",
"files",
"=",
"[",
"]",
"if",
"args",
"is",
"None",
":",
"args",
"=",
"{",
"}",
"self",
".",
"options",
"=",
"[",
"]",
"self",
".",
"args",
"=",
"args",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"files",
")",
":",
"if",
"files",
":",
"files",
"=",
"[",
"files",
"]",
"else",
":",
"files",
"=",
"[",
"]",
"self",
".",
"files",
"=",
"files",
"self",
".",
"unknown",
"=",
"{",
"}",
"# create the singleton instance",
"if",
"is_global",
":",
"self",
"=",
"Variables",
".",
"instance",
"if",
"not",
"Variables",
".",
"instance",
":",
"Variables",
".",
"instance",
"=",
"self"
] | https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Variables/__init__.py#L53-L79 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_windows.py | python | PyWindow.DoSetSize | (*args, **kwargs) | return _windows_.PyWindow_DoSetSize(*args, **kwargs) | DoSetSize(self, int x, int y, int width, int height, int sizeFlags=SIZE_AUTO) | DoSetSize(self, int x, int y, int width, int height, int sizeFlags=SIZE_AUTO) | [
"DoSetSize",
"(",
"self",
"int",
"x",
"int",
"y",
"int",
"width",
"int",
"height",
"int",
"sizeFlags",
"=",
"SIZE_AUTO",
")"
] | def DoSetSize(*args, **kwargs):
"""DoSetSize(self, int x, int y, int width, int height, int sizeFlags=SIZE_AUTO)"""
return _windows_.PyWindow_DoSetSize(*args, **kwargs) | [
"def",
"DoSetSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PyWindow_DoSetSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L4150-L4152 | |
microsoft/onnxruntime | f92e47e95b13a240e37caf7b36577983544f98fc | orttraining/tools/scripts/nv_run_pretraining.py | python | pretraining_dataset.__len__ | (self) | return len(self.inputs[0]) | Denotes the total number of samples | Denotes the total number of samples | [
"Denotes",
"the",
"total",
"number",
"of",
"samples"
] | def __len__(self):
'Denotes the total number of samples'
return len(self.inputs[0]) | [
"def",
"__len__",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"inputs",
"[",
"0",
"]",
")"
] | https://github.com/microsoft/onnxruntime/blob/f92e47e95b13a240e37caf7b36577983544f98fc/orttraining/tools/scripts/nv_run_pretraining.py#L82-L84 | |
google/flatbuffers | b3006913369e0a7550795e477011ac5bebb93497 | python/flatbuffers/encode.py | python | Get | (packer_type, buf, head) | return packer_type.unpack_from(memoryview_type(buf), head)[0] | Get decodes a value at buf[head] using `packer_type`. | Get decodes a value at buf[head] using `packer_type`. | [
"Get",
"decodes",
"a",
"value",
"at",
"buf",
"[",
"head",
"]",
"using",
"packer_type",
"."
] | def Get(packer_type, buf, head):
""" Get decodes a value at buf[head] using `packer_type`. """
return packer_type.unpack_from(memoryview_type(buf), head)[0] | [
"def",
"Get",
"(",
"packer_type",
",",
"buf",
",",
"head",
")",
":",
"return",
"packer_type",
".",
"unpack_from",
"(",
"memoryview_type",
"(",
"buf",
")",
",",
"head",
")",
"[",
"0",
"]"
] | https://github.com/google/flatbuffers/blob/b3006913369e0a7550795e477011ac5bebb93497/python/flatbuffers/encode.py#L24-L26 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/numpy/multiarray.py | python | ravel | (x, order='C') | return _mx_nd_np.ravel(x, order) | r"""
ravel(x)
Return a contiguous flattened array.
A 1-D array, containing the elements of the input, is returned. A copy is
made only if needed.
Parameters
----------
x : ndarray
Input array. The elements in `x` are read in row-major, C-style order and
packed as a 1-D array.
order : `C`, optional
Only support row-major, C-style order.
Returns
-------
y : ndarray
y is an array of the same subtype as `x`, with shape ``(x.size,)``.
Note that matrices are special cased for backward compatibility, if `x`
is a matrix, then y is a 1-D ndarray.
.. note::
This function differs from the original numpy.arange in the following aspects:
* Only support row-major, C-style order.
Examples
--------
It is equivalent to ``reshape(x, -1)``.
>>> x = np.array([[1, 2, 3], [4, 5, 6]])
>>> print(np.ravel(x))
[1. 2. 3. 4. 5. 6.]
>>> print(x.reshape(-1))
[1. 2. 3. 4. 5. 6.]
>>> print(np.ravel(x.T))
[1. 4. 2. 5. 3. 6.] | r"""
ravel(x) | [
"r",
"ravel",
"(",
"x",
")"
] | def ravel(x, order='C'):
r"""
ravel(x)
Return a contiguous flattened array.
A 1-D array, containing the elements of the input, is returned. A copy is
made only if needed.
Parameters
----------
x : ndarray
Input array. The elements in `x` are read in row-major, C-style order and
packed as a 1-D array.
order : `C`, optional
Only support row-major, C-style order.
Returns
-------
y : ndarray
y is an array of the same subtype as `x`, with shape ``(x.size,)``.
Note that matrices are special cased for backward compatibility, if `x`
is a matrix, then y is a 1-D ndarray.
.. note::
This function differs from the original numpy.arange in the following aspects:
* Only support row-major, C-style order.
Examples
--------
It is equivalent to ``reshape(x, -1)``.
>>> x = np.array([[1, 2, 3], [4, 5, 6]])
>>> print(np.ravel(x))
[1. 2. 3. 4. 5. 6.]
>>> print(x.reshape(-1))
[1. 2. 3. 4. 5. 6.]
>>> print(np.ravel(x.T))
[1. 4. 2. 5. 3. 6.]
"""
return _mx_nd_np.ravel(x, order) | [
"def",
"ravel",
"(",
"x",
",",
"order",
"=",
"'C'",
")",
":",
"return",
"_mx_nd_np",
".",
"ravel",
"(",
"x",
",",
"order",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/numpy/multiarray.py#L8799-L8841 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/keras/saving/saved_model/load.py | python | KerasObjectLoader._add_children_recreated_from_config | (self, obj, proto, node_id) | Recursively records objects recreated from config. | Recursively records objects recreated from config. | [
"Recursively",
"records",
"objects",
"recreated",
"from",
"config",
"."
] | def _add_children_recreated_from_config(self, obj, proto, node_id):
"""Recursively records objects recreated from config."""
# pylint: disable=protected-access
if node_id in self._traversed_nodes_from_config:
return
parent_path = self._node_paths[node_id]
self._traversed_nodes_from_config.add(node_id)
obj._maybe_initialize_trackable()
if isinstance(obj, base_layer.Layer) and not obj.built:
metadata = json_utils.decode(self._metadata[node_id].metadata)
self._try_build_layer(obj, node_id, metadata.get('build_input_shape'))
# Create list of all possible children
children = []
# Look for direct children
for reference in proto.children:
obj_child = obj._lookup_dependency(reference.local_name)
children.append((obj_child, reference.node_id, reference.local_name))
# Add metrics that may have been added to the layer._metrics list.
# This is stored in the SavedModel as layer.keras_api.layer_metrics in
# SavedModels created after Tf 2.2.
metric_list_node_id = self._search_for_child_node(
node_id, [constants.KERAS_ATTR, 'layer_metrics'])
if metric_list_node_id is not None and hasattr(obj, '_metrics'):
obj_metrics = {m.name: m for m in obj._metrics}
for reference in self._proto.nodes[metric_list_node_id].children:
metric = obj_metrics.get(reference.local_name)
if metric is not None:
metric_path = '{}.layer_metrics.{}'.format(constants.KERAS_ATTR,
reference.local_name)
children.append((metric, reference.node_id, metric_path))
for (obj_child, child_id, child_name) in children:
child_proto = self._proto.nodes[child_id]
if not isinstance(obj_child, trackable.Trackable):
continue
if (child_proto.user_object.identifier in
revived_types.registered_identifiers()):
setter = revived_types.get_setter(child_proto.user_object)
elif obj_child._object_identifier in constants.KERAS_OBJECT_IDENTIFIERS:
setter = _revive_setter
else:
setter = setattr
# pylint: enable=protected-access
if child_id in self.loaded_nodes:
if self.loaded_nodes[child_id][0] is not obj_child:
# This means that the same trackable object is referenced by two
# different objects that were recreated from the config.
logging.warning(
'Looks like there is an object (perhaps variable or '
'layer) that is shared between different layers/models. '
'This may cause issues when restoring the variable '
'values. Object: {}'.format(obj_child))
continue
# Overwrite variable names with the ones saved in the SavedModel.
if (child_proto.WhichOneof('kind') == 'variable' and
child_proto.variable.name):
obj_child._handle_name = child_proto.variable.name + ':0' # pylint: disable=protected-access
if isinstance(obj_child, data_structures.TrackableDataStructure):
setter = lambda *args: None
child_path = '{}.{}'.format(parent_path, child_name)
self._node_paths[child_id] = child_path
self._add_children_recreated_from_config(
obj_child, child_proto, child_id)
self.loaded_nodes[child_id] = obj_child, setter | [
"def",
"_add_children_recreated_from_config",
"(",
"self",
",",
"obj",
",",
"proto",
",",
"node_id",
")",
":",
"# pylint: disable=protected-access",
"if",
"node_id",
"in",
"self",
".",
"_traversed_nodes_from_config",
":",
"return",
"parent_path",
"=",
"self",
".",
"_node_paths",
"[",
"node_id",
"]",
"self",
".",
"_traversed_nodes_from_config",
".",
"add",
"(",
"node_id",
")",
"obj",
".",
"_maybe_initialize_trackable",
"(",
")",
"if",
"isinstance",
"(",
"obj",
",",
"base_layer",
".",
"Layer",
")",
"and",
"not",
"obj",
".",
"built",
":",
"metadata",
"=",
"json_utils",
".",
"decode",
"(",
"self",
".",
"_metadata",
"[",
"node_id",
"]",
".",
"metadata",
")",
"self",
".",
"_try_build_layer",
"(",
"obj",
",",
"node_id",
",",
"metadata",
".",
"get",
"(",
"'build_input_shape'",
")",
")",
"# Create list of all possible children",
"children",
"=",
"[",
"]",
"# Look for direct children",
"for",
"reference",
"in",
"proto",
".",
"children",
":",
"obj_child",
"=",
"obj",
".",
"_lookup_dependency",
"(",
"reference",
".",
"local_name",
")",
"children",
".",
"append",
"(",
"(",
"obj_child",
",",
"reference",
".",
"node_id",
",",
"reference",
".",
"local_name",
")",
")",
"# Add metrics that may have been added to the layer._metrics list.",
"# This is stored in the SavedModel as layer.keras_api.layer_metrics in",
"# SavedModels created after Tf 2.2.",
"metric_list_node_id",
"=",
"self",
".",
"_search_for_child_node",
"(",
"node_id",
",",
"[",
"constants",
".",
"KERAS_ATTR",
",",
"'layer_metrics'",
"]",
")",
"if",
"metric_list_node_id",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"obj",
",",
"'_metrics'",
")",
":",
"obj_metrics",
"=",
"{",
"m",
".",
"name",
":",
"m",
"for",
"m",
"in",
"obj",
".",
"_metrics",
"}",
"for",
"reference",
"in",
"self",
".",
"_proto",
".",
"nodes",
"[",
"metric_list_node_id",
"]",
".",
"children",
":",
"metric",
"=",
"obj_metrics",
".",
"get",
"(",
"reference",
".",
"local_name",
")",
"if",
"metric",
"is",
"not",
"None",
":",
"metric_path",
"=",
"'{}.layer_metrics.{}'",
".",
"format",
"(",
"constants",
".",
"KERAS_ATTR",
",",
"reference",
".",
"local_name",
")",
"children",
".",
"append",
"(",
"(",
"metric",
",",
"reference",
".",
"node_id",
",",
"metric_path",
")",
")",
"for",
"(",
"obj_child",
",",
"child_id",
",",
"child_name",
")",
"in",
"children",
":",
"child_proto",
"=",
"self",
".",
"_proto",
".",
"nodes",
"[",
"child_id",
"]",
"if",
"not",
"isinstance",
"(",
"obj_child",
",",
"trackable",
".",
"Trackable",
")",
":",
"continue",
"if",
"(",
"child_proto",
".",
"user_object",
".",
"identifier",
"in",
"revived_types",
".",
"registered_identifiers",
"(",
")",
")",
":",
"setter",
"=",
"revived_types",
".",
"get_setter",
"(",
"child_proto",
".",
"user_object",
")",
"elif",
"obj_child",
".",
"_object_identifier",
"in",
"constants",
".",
"KERAS_OBJECT_IDENTIFIERS",
":",
"setter",
"=",
"_revive_setter",
"else",
":",
"setter",
"=",
"setattr",
"# pylint: enable=protected-access",
"if",
"child_id",
"in",
"self",
".",
"loaded_nodes",
":",
"if",
"self",
".",
"loaded_nodes",
"[",
"child_id",
"]",
"[",
"0",
"]",
"is",
"not",
"obj_child",
":",
"# This means that the same trackable object is referenced by two",
"# different objects that were recreated from the config.",
"logging",
".",
"warning",
"(",
"'Looks like there is an object (perhaps variable or '",
"'layer) that is shared between different layers/models. '",
"'This may cause issues when restoring the variable '",
"'values. Object: {}'",
".",
"format",
"(",
"obj_child",
")",
")",
"continue",
"# Overwrite variable names with the ones saved in the SavedModel.",
"if",
"(",
"child_proto",
".",
"WhichOneof",
"(",
"'kind'",
")",
"==",
"'variable'",
"and",
"child_proto",
".",
"variable",
".",
"name",
")",
":",
"obj_child",
".",
"_handle_name",
"=",
"child_proto",
".",
"variable",
".",
"name",
"+",
"':0'",
"# pylint: disable=protected-access",
"if",
"isinstance",
"(",
"obj_child",
",",
"data_structures",
".",
"TrackableDataStructure",
")",
":",
"setter",
"=",
"lambda",
"*",
"args",
":",
"None",
"child_path",
"=",
"'{}.{}'",
".",
"format",
"(",
"parent_path",
",",
"child_name",
")",
"self",
".",
"_node_paths",
"[",
"child_id",
"]",
"=",
"child_path",
"self",
".",
"_add_children_recreated_from_config",
"(",
"obj_child",
",",
"child_proto",
",",
"child_id",
")",
"self",
".",
"loaded_nodes",
"[",
"child_id",
"]",
"=",
"obj_child",
",",
"setter"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/saving/saved_model/load.py#L308-L379 | ||
SFTtech/openage | d6a08c53c48dc1e157807471df92197f6ca9e04d | openage/convert/entity_object/conversion/genie_structure.py | python | GenieStructure.get_data_format | (cls, game_version, allowed_modes=False,
flatten_includes=False, is_parent=False) | return all members of this exportable (a struct.)
can filter by export modes and can also return included members:
inherited members can either be returned as to-be-included,
or can be fetched and displayed as if they weren't inherited. | return all members of this exportable (a struct.) | [
"return",
"all",
"members",
"of",
"this",
"exportable",
"(",
"a",
"struct",
".",
")"
] | def get_data_format(cls, game_version, allowed_modes=False,
flatten_includes=False, is_parent=False):
"""
return all members of this exportable (a struct.)
can filter by export modes and can also return included members:
inherited members can either be returned as to-be-included,
or can be fetched and displayed as if they weren't inherited.
"""
for member in cls.get_data_format_members(game_version):
if len(member) != 4:
print(member[1])
export, _, _, read_type = member
definitively_return_member = False
if isinstance(read_type, IncludeMembers):
if flatten_includes:
# recursive call
yield from read_type.cls.get_data_format(game_version,
allowed_modes,
flatten_includes,
is_parent=True)
continue
elif isinstance(read_type, ContinueReadMember):
definitively_return_member = True
if allowed_modes:
if export not in allowed_modes:
if not definitively_return_member:
continue
member_entry = (is_parent,) + member
yield member_entry | [
"def",
"get_data_format",
"(",
"cls",
",",
"game_version",
",",
"allowed_modes",
"=",
"False",
",",
"flatten_includes",
"=",
"False",
",",
"is_parent",
"=",
"False",
")",
":",
"for",
"member",
"in",
"cls",
".",
"get_data_format_members",
"(",
"game_version",
")",
":",
"if",
"len",
"(",
"member",
")",
"!=",
"4",
":",
"print",
"(",
"member",
"[",
"1",
"]",
")",
"export",
",",
"_",
",",
"_",
",",
"read_type",
"=",
"member",
"definitively_return_member",
"=",
"False",
"if",
"isinstance",
"(",
"read_type",
",",
"IncludeMembers",
")",
":",
"if",
"flatten_includes",
":",
"# recursive call",
"yield",
"from",
"read_type",
".",
"cls",
".",
"get_data_format",
"(",
"game_version",
",",
"allowed_modes",
",",
"flatten_includes",
",",
"is_parent",
"=",
"True",
")",
"continue",
"elif",
"isinstance",
"(",
"read_type",
",",
"ContinueReadMember",
")",
":",
"definitively_return_member",
"=",
"True",
"if",
"allowed_modes",
":",
"if",
"export",
"not",
"in",
"allowed_modes",
":",
"if",
"not",
"definitively_return_member",
":",
"continue",
"member_entry",
"=",
"(",
"is_parent",
",",
")",
"+",
"member",
"yield",
"member_entry"
] | https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/entity_object/conversion/genie_structure.py#L507-L542 | ||
widelands/widelands | e9f047d46a23d81312237d52eabf7d74e8de52d6 | utils/glossary_checks.py | python | translation_has_term | (entry, target) | return result | Verify the target translation against all translation variations from
the glossary. | Verify the target translation against all translation variations from
the glossary. | [
"Verify",
"the",
"target",
"translation",
"against",
"all",
"translation",
"variations",
"from",
"the",
"glossary",
"."
] | def translation_has_term(entry, target):
"""Verify the target translation against all translation variations from
the glossary."""
result = False
for translation in entry.translations:
if contains_term(target, translation):
result = True
break
return result | [
"def",
"translation_has_term",
"(",
"entry",
",",
"target",
")",
":",
"result",
"=",
"False",
"for",
"translation",
"in",
"entry",
".",
"translations",
":",
"if",
"contains_term",
"(",
"target",
",",
"translation",
")",
":",
"result",
"=",
"True",
"break",
"return",
"result"
] | https://github.com/widelands/widelands/blob/e9f047d46a23d81312237d52eabf7d74e8de52d6/utils/glossary_checks.py#L331-L339 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/autocomp/pycomp.py | python | PyCompleter.get_completions | (self, context, match='', ctip=False) | Get the completions for the given context
@param context: command string to get completions for
@keyword match: for matching an incomplete command string
@keyword ctip: Get a calltip for the context instead of completion list
@return: list of dictionaries | Get the completions for the given context
@param context: command string to get completions for
@keyword match: for matching an incomplete command string
@keyword ctip: Get a calltip for the context instead of completion list
@return: list of dictionaries | [
"Get",
"the",
"completions",
"for",
"the",
"given",
"context",
"@param",
"context",
":",
"command",
"string",
"to",
"get",
"completions",
"for",
"@keyword",
"match",
":",
"for",
"matching",
"an",
"incomplete",
"command",
"string",
"@keyword",
"ctip",
":",
"Get",
"a",
"calltip",
"for",
"the",
"context",
"instead",
"of",
"completion",
"list",
"@return",
":",
"list",
"of",
"dictionaries"
] | def get_completions(self, context, match='', ctip=False):
"""Get the completions for the given context
@param context: command string to get completions for
@keyword match: for matching an incomplete command string
@keyword ctip: Get a calltip for the context instead of completion list
@return: list of dictionaries
"""
dbg("[pycomp] get_completions('%s','%s')" % (context, match))
stmt = context + match
try:
result = None
compdict = {}
ridx = stmt.rfind('.')
if len(stmt) > 0 and stmt[-1] == '(':
if ctip:
# Try introspect.getCallTip since it is generally
# better at getting tips for c modules
tip = introspect.getCallTip(_sanitize(stmt),
self.compldict)[2]
if not isinstance(tip, basestring):
tip = u""
if not tip:
# Internal calltip code
result = eval(_sanitize(stmt.rstrip('(')), self.compldict)
doc = max(getattr(result, '__doc__', ''), ' ')
argval = context + _cleanstr(self.get_arguments(result))
tip = '\n'.join([argval, _cleanstr(doc)])
dbg("[pycomp][info] Used internal calltips")
return tip
elif ridx == -1:
match = stmt
compdict = self.compldict
else:
match = stmt[ridx+1:]
stmt = _sanitize(stmt[:ridx])
result = eval(stmt, self.compldict)
compdict = dir(result)
dbg("[pycomp] completing: stmt:%s" % stmt)
completions = []
isdict = isinstance(compdict, dict)
for meth in compdict:
if meth == "_PyCmplNoType":
continue #this is internal
try:
# dbg('[pycomp] possible completion: %s' % meth)
if meth.find(match) == 0:
if result is None:
# NOTE: when result is none compdict is a list
inst = meth #compdict[meth]
else:
inst = getattr(result, meth, None)
# TODO: necessary check to handle some odd swig related
# errors. Find out why type 'swigvarlink' causes
# the exception Unknown C global variable.
if len(dir(inst)):
doc = getattr(inst, '__doc__', None)
if doc is None:
doc = max(getattr(result, '__doc__', ' '), ' ')
else:
doc = ' '
if isdict:
typestr = str(compdict[inst])
else:
typestr = str(inst)
comp = {'word' : meth,
'abbr' : meth,
'info' : _cleanstr(str(doc)),
'type' : typestr}
if "function" in typestr:
comp['word'] += '('
comp['abbr'] += '(' + _cleanstr(self.get_arguments(inst))
comp['type'] = "function"
elif "method" in typestr or "slot wrapper" in typestr:
comp['word'] += '('
comp['abbr'] += '(' + _cleanstr(self.get_arguments(inst))
comp['type'] = "method"
elif "module" in typestr:
comp['word'] += '.'
comp['type'] = "module"
elif "class" in typestr:
comp['word'] += '('
comp['abbr'] += '('
comp['type'] = "class"
elif "attribute" in typestr or \
(not typestr.startswith('__') and \
not typestr.startswith('<')):
comp['type'] = "attribute"
elif "property" in typestr:
comp['type'] = "property"
# else:
# print typestr, meth
completions.append(comp)
except Exception, msg:
dbg("[pycomp][err] inner completion: %s [stmt='%s']:" % (msg, stmt))
return completions
except Exception, msg:
dbg("[pycomp][err] get_completions: %s [stmt='%s']" % (msg, stmt))
if ctip:
return u""
return list() | [
"def",
"get_completions",
"(",
"self",
",",
"context",
",",
"match",
"=",
"''",
",",
"ctip",
"=",
"False",
")",
":",
"dbg",
"(",
"\"[pycomp] get_completions('%s','%s')\"",
"%",
"(",
"context",
",",
"match",
")",
")",
"stmt",
"=",
"context",
"+",
"match",
"try",
":",
"result",
"=",
"None",
"compdict",
"=",
"{",
"}",
"ridx",
"=",
"stmt",
".",
"rfind",
"(",
"'.'",
")",
"if",
"len",
"(",
"stmt",
")",
">",
"0",
"and",
"stmt",
"[",
"-",
"1",
"]",
"==",
"'('",
":",
"if",
"ctip",
":",
"# Try introspect.getCallTip since it is generally",
"# better at getting tips for c modules",
"tip",
"=",
"introspect",
".",
"getCallTip",
"(",
"_sanitize",
"(",
"stmt",
")",
",",
"self",
".",
"compldict",
")",
"[",
"2",
"]",
"if",
"not",
"isinstance",
"(",
"tip",
",",
"basestring",
")",
":",
"tip",
"=",
"u\"\"",
"if",
"not",
"tip",
":",
"# Internal calltip code",
"result",
"=",
"eval",
"(",
"_sanitize",
"(",
"stmt",
".",
"rstrip",
"(",
"'('",
")",
")",
",",
"self",
".",
"compldict",
")",
"doc",
"=",
"max",
"(",
"getattr",
"(",
"result",
",",
"'__doc__'",
",",
"''",
")",
",",
"' '",
")",
"argval",
"=",
"context",
"+",
"_cleanstr",
"(",
"self",
".",
"get_arguments",
"(",
"result",
")",
")",
"tip",
"=",
"'\\n'",
".",
"join",
"(",
"[",
"argval",
",",
"_cleanstr",
"(",
"doc",
")",
"]",
")",
"dbg",
"(",
"\"[pycomp][info] Used internal calltips\"",
")",
"return",
"tip",
"elif",
"ridx",
"==",
"-",
"1",
":",
"match",
"=",
"stmt",
"compdict",
"=",
"self",
".",
"compldict",
"else",
":",
"match",
"=",
"stmt",
"[",
"ridx",
"+",
"1",
":",
"]",
"stmt",
"=",
"_sanitize",
"(",
"stmt",
"[",
":",
"ridx",
"]",
")",
"result",
"=",
"eval",
"(",
"stmt",
",",
"self",
".",
"compldict",
")",
"compdict",
"=",
"dir",
"(",
"result",
")",
"dbg",
"(",
"\"[pycomp] completing: stmt:%s\"",
"%",
"stmt",
")",
"completions",
"=",
"[",
"]",
"isdict",
"=",
"isinstance",
"(",
"compdict",
",",
"dict",
")",
"for",
"meth",
"in",
"compdict",
":",
"if",
"meth",
"==",
"\"_PyCmplNoType\"",
":",
"continue",
"#this is internal",
"try",
":",
"# dbg('[pycomp] possible completion: %s' % meth)",
"if",
"meth",
".",
"find",
"(",
"match",
")",
"==",
"0",
":",
"if",
"result",
"is",
"None",
":",
"# NOTE: when result is none compdict is a list",
"inst",
"=",
"meth",
"#compdict[meth]",
"else",
":",
"inst",
"=",
"getattr",
"(",
"result",
",",
"meth",
",",
"None",
")",
"# TODO: necessary check to handle some odd swig related",
"# errors. Find out why type 'swigvarlink' causes",
"# the exception Unknown C global variable.",
"if",
"len",
"(",
"dir",
"(",
"inst",
")",
")",
":",
"doc",
"=",
"getattr",
"(",
"inst",
",",
"'__doc__'",
",",
"None",
")",
"if",
"doc",
"is",
"None",
":",
"doc",
"=",
"max",
"(",
"getattr",
"(",
"result",
",",
"'__doc__'",
",",
"' '",
")",
",",
"' '",
")",
"else",
":",
"doc",
"=",
"' '",
"if",
"isdict",
":",
"typestr",
"=",
"str",
"(",
"compdict",
"[",
"inst",
"]",
")",
"else",
":",
"typestr",
"=",
"str",
"(",
"inst",
")",
"comp",
"=",
"{",
"'word'",
":",
"meth",
",",
"'abbr'",
":",
"meth",
",",
"'info'",
":",
"_cleanstr",
"(",
"str",
"(",
"doc",
")",
")",
",",
"'type'",
":",
"typestr",
"}",
"if",
"\"function\"",
"in",
"typestr",
":",
"comp",
"[",
"'word'",
"]",
"+=",
"'('",
"comp",
"[",
"'abbr'",
"]",
"+=",
"'('",
"+",
"_cleanstr",
"(",
"self",
".",
"get_arguments",
"(",
"inst",
")",
")",
"comp",
"[",
"'type'",
"]",
"=",
"\"function\"",
"elif",
"\"method\"",
"in",
"typestr",
"or",
"\"slot wrapper\"",
"in",
"typestr",
":",
"comp",
"[",
"'word'",
"]",
"+=",
"'('",
"comp",
"[",
"'abbr'",
"]",
"+=",
"'('",
"+",
"_cleanstr",
"(",
"self",
".",
"get_arguments",
"(",
"inst",
")",
")",
"comp",
"[",
"'type'",
"]",
"=",
"\"method\"",
"elif",
"\"module\"",
"in",
"typestr",
":",
"comp",
"[",
"'word'",
"]",
"+=",
"'.'",
"comp",
"[",
"'type'",
"]",
"=",
"\"module\"",
"elif",
"\"class\"",
"in",
"typestr",
":",
"comp",
"[",
"'word'",
"]",
"+=",
"'('",
"comp",
"[",
"'abbr'",
"]",
"+=",
"'('",
"comp",
"[",
"'type'",
"]",
"=",
"\"class\"",
"elif",
"\"attribute\"",
"in",
"typestr",
"or",
"(",
"not",
"typestr",
".",
"startswith",
"(",
"'__'",
")",
"and",
"not",
"typestr",
".",
"startswith",
"(",
"'<'",
")",
")",
":",
"comp",
"[",
"'type'",
"]",
"=",
"\"attribute\"",
"elif",
"\"property\"",
"in",
"typestr",
":",
"comp",
"[",
"'type'",
"]",
"=",
"\"property\"",
"# else:",
"# print typestr, meth",
"completions",
".",
"append",
"(",
"comp",
")",
"except",
"Exception",
",",
"msg",
":",
"dbg",
"(",
"\"[pycomp][err] inner completion: %s [stmt='%s']:\"",
"%",
"(",
"msg",
",",
"stmt",
")",
")",
"return",
"completions",
"except",
"Exception",
",",
"msg",
":",
"dbg",
"(",
"\"[pycomp][err] get_completions: %s [stmt='%s']\"",
"%",
"(",
"msg",
",",
"stmt",
")",
")",
"if",
"ctip",
":",
"return",
"u\"\"",
"return",
"list",
"(",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/autocomp/pycomp.py#L284-L394 | ||
takemaru/graphillion | 51879f92bb96b53ef8f914ef37a05252ce383617 | graphillion/graphset.py | python | GraphSet.quotient_update | (self, other) | return self | Updates `self` by the quotient.
Examples:
>>> graph1 = [(1, 2), (1, 4)]
>>> graph2 = [(2, 3), (2, 5)]
>>> graph3 = [(1, 4)]
>>> gs = GraphSet([graph1, graph2])
>>> gs /= GraphSet([graph3])
>>> gs
GraphSet([[(1, 2)]])
Returns:
A new GraphSet object.
See Also:
quotient() | Updates `self` by the quotient. | [
"Updates",
"self",
"by",
"the",
"quotient",
"."
] | def quotient_update(self, other):
"""Updates `self` by the quotient.
Examples:
>>> graph1 = [(1, 2), (1, 4)]
>>> graph2 = [(2, 3), (2, 5)]
>>> graph3 = [(1, 4)]
>>> gs = GraphSet([graph1, graph2])
>>> gs /= GraphSet([graph3])
>>> gs
GraphSet([[(1, 2)]])
Returns:
A new GraphSet object.
See Also:
quotient()
"""
self._ss.quotient_update(other._ss)
return self | [
"def",
"quotient_update",
"(",
"self",
",",
"other",
")",
":",
"self",
".",
"_ss",
".",
"quotient_update",
"(",
"other",
".",
"_ss",
")",
"return",
"self"
] | https://github.com/takemaru/graphillion/blob/51879f92bb96b53ef8f914ef37a05252ce383617/graphillion/graphset.py#L399-L418 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/difflib.py | python | context_diff | (a, b, fromfile='', tofile='',
fromfiledate='', tofiledate='', n=3, lineterm='\n') | r"""
Compare two sequences of lines; generate the delta as a context diff.
Context diffs are a compact way of showing line changes and a few
lines of context. The number of context lines is set by 'n' which
defaults to three.
By default, the diff control lines (those with *** or ---) are
created with a trailing newline. This is helpful so that inputs
created from file.readlines() result in diffs that are suitable for
file.writelines() since both the inputs and outputs have trailing
newlines.
For inputs that do not have trailing newlines, set the lineterm
argument to "" so that the output will be uniformly newline free.
The context diff format normally has a header for filenames and
modification times. Any or all of these may be specified using
strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
The modification times are normally expressed in the ISO 8601 format.
If not specified, the strings default to blanks.
Example:
>>> print(''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(True),
... 'zero\none\ntree\nfour\n'.splitlines(True), 'Original', 'Current')),
... end="")
*** Original
--- Current
***************
*** 1,4 ****
one
! two
! three
four
--- 1,4 ----
+ zero
one
! tree
four | r"""
Compare two sequences of lines; generate the delta as a context diff. | [
"r",
"Compare",
"two",
"sequences",
"of",
"lines",
";",
"generate",
"the",
"delta",
"as",
"a",
"context",
"diff",
"."
] | def context_diff(a, b, fromfile='', tofile='',
fromfiledate='', tofiledate='', n=3, lineterm='\n'):
r"""
Compare two sequences of lines; generate the delta as a context diff.
Context diffs are a compact way of showing line changes and a few
lines of context. The number of context lines is set by 'n' which
defaults to three.
By default, the diff control lines (those with *** or ---) are
created with a trailing newline. This is helpful so that inputs
created from file.readlines() result in diffs that are suitable for
file.writelines() since both the inputs and outputs have trailing
newlines.
For inputs that do not have trailing newlines, set the lineterm
argument to "" so that the output will be uniformly newline free.
The context diff format normally has a header for filenames and
modification times. Any or all of these may be specified using
strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
The modification times are normally expressed in the ISO 8601 format.
If not specified, the strings default to blanks.
Example:
>>> print(''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(True),
... 'zero\none\ntree\nfour\n'.splitlines(True), 'Original', 'Current')),
... end="")
*** Original
--- Current
***************
*** 1,4 ****
one
! two
! three
four
--- 1,4 ----
+ zero
one
! tree
four
"""
_check_types(a, b, fromfile, tofile, fromfiledate, tofiledate, lineterm)
prefix = dict(insert='+ ', delete='- ', replace='! ', equal=' ')
started = False
for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
if not started:
started = True
fromdate = '\t{}'.format(fromfiledate) if fromfiledate else ''
todate = '\t{}'.format(tofiledate) if tofiledate else ''
yield '*** {}{}{}'.format(fromfile, fromdate, lineterm)
yield '--- {}{}{}'.format(tofile, todate, lineterm)
first, last = group[0], group[-1]
yield '***************' + lineterm
file1_range = _format_range_context(first[1], last[2])
yield '*** {} ****{}'.format(file1_range, lineterm)
if any(tag in {'replace', 'delete'} for tag, _, _, _, _ in group):
for tag, i1, i2, _, _ in group:
if tag != 'insert':
for line in a[i1:i2]:
yield prefix[tag] + line
file2_range = _format_range_context(first[3], last[4])
yield '--- {} ----{}'.format(file2_range, lineterm)
if any(tag in {'replace', 'insert'} for tag, _, _, _, _ in group):
for tag, _, _, j1, j2 in group:
if tag != 'delete':
for line in b[j1:j2]:
yield prefix[tag] + line | [
"def",
"context_diff",
"(",
"a",
",",
"b",
",",
"fromfile",
"=",
"''",
",",
"tofile",
"=",
"''",
",",
"fromfiledate",
"=",
"''",
",",
"tofiledate",
"=",
"''",
",",
"n",
"=",
"3",
",",
"lineterm",
"=",
"'\\n'",
")",
":",
"_check_types",
"(",
"a",
",",
"b",
",",
"fromfile",
",",
"tofile",
",",
"fromfiledate",
",",
"tofiledate",
",",
"lineterm",
")",
"prefix",
"=",
"dict",
"(",
"insert",
"=",
"'+ '",
",",
"delete",
"=",
"'- '",
",",
"replace",
"=",
"'! '",
",",
"equal",
"=",
"' '",
")",
"started",
"=",
"False",
"for",
"group",
"in",
"SequenceMatcher",
"(",
"None",
",",
"a",
",",
"b",
")",
".",
"get_grouped_opcodes",
"(",
"n",
")",
":",
"if",
"not",
"started",
":",
"started",
"=",
"True",
"fromdate",
"=",
"'\\t{}'",
".",
"format",
"(",
"fromfiledate",
")",
"if",
"fromfiledate",
"else",
"''",
"todate",
"=",
"'\\t{}'",
".",
"format",
"(",
"tofiledate",
")",
"if",
"tofiledate",
"else",
"''",
"yield",
"'*** {}{}{}'",
".",
"format",
"(",
"fromfile",
",",
"fromdate",
",",
"lineterm",
")",
"yield",
"'--- {}{}{}'",
".",
"format",
"(",
"tofile",
",",
"todate",
",",
"lineterm",
")",
"first",
",",
"last",
"=",
"group",
"[",
"0",
"]",
",",
"group",
"[",
"-",
"1",
"]",
"yield",
"'***************'",
"+",
"lineterm",
"file1_range",
"=",
"_format_range_context",
"(",
"first",
"[",
"1",
"]",
",",
"last",
"[",
"2",
"]",
")",
"yield",
"'*** {} ****{}'",
".",
"format",
"(",
"file1_range",
",",
"lineterm",
")",
"if",
"any",
"(",
"tag",
"in",
"{",
"'replace'",
",",
"'delete'",
"}",
"for",
"tag",
",",
"_",
",",
"_",
",",
"_",
",",
"_",
"in",
"group",
")",
":",
"for",
"tag",
",",
"i1",
",",
"i2",
",",
"_",
",",
"_",
"in",
"group",
":",
"if",
"tag",
"!=",
"'insert'",
":",
"for",
"line",
"in",
"a",
"[",
"i1",
":",
"i2",
"]",
":",
"yield",
"prefix",
"[",
"tag",
"]",
"+",
"line",
"file2_range",
"=",
"_format_range_context",
"(",
"first",
"[",
"3",
"]",
",",
"last",
"[",
"4",
"]",
")",
"yield",
"'--- {} ----{}'",
".",
"format",
"(",
"file2_range",
",",
"lineterm",
")",
"if",
"any",
"(",
"tag",
"in",
"{",
"'replace'",
",",
"'insert'",
"}",
"for",
"tag",
",",
"_",
",",
"_",
",",
"_",
",",
"_",
"in",
"group",
")",
":",
"for",
"tag",
",",
"_",
",",
"_",
",",
"j1",
",",
"j2",
"in",
"group",
":",
"if",
"tag",
"!=",
"'delete'",
":",
"for",
"line",
"in",
"b",
"[",
"j1",
":",
"j2",
"]",
":",
"yield",
"prefix",
"[",
"tag",
"]",
"+",
"line"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/difflib.py#L1221-L1295 | ||
fifengine/fifengine | 4b62c42e85bec19893cef8e63e6855927cff2c47 | engine/python/fife/extensions/serializers/simplexml.py | python | SimpleSerializer.getModuleNameList | (self) | @note: Returns all the module names that are present in the
settings.xml file as a list of strings | [] | def getModuleNameList(self):
"""
@note: Returns all the module names that are present in the
settings.xml file as a list of strings
"""
pass | [
"def",
"getModuleNameList",
"(",
"self",
")",
":",
"pass"
] | https://github.com/fifengine/fifengine/blob/4b62c42e85bec19893cef8e63e6855927cff2c47/engine/python/fife/extensions/serializers/simplexml.py#L71-L76 | |||
adobe/chromium | cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7 | build/android/valgrind_tools.py | python | ValgrindTool.NeedsDebugInfo | (self) | return True | Whether this tool requires debug info.
Returns True if this tool can not work with stripped binaries. | Whether this tool requires debug info. | [
"Whether",
"this",
"tool",
"requires",
"debug",
"info",
"."
] | def NeedsDebugInfo(self):
"""Whether this tool requires debug info.
Returns True if this tool can not work with stripped binaries.
"""
return True | [
"def",
"NeedsDebugInfo",
"(",
"self",
")",
":",
"return",
"True"
] | https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/build/android/valgrind_tools.py#L110-L115 | |
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/contrib/tensor_forest/python/tensor_forest.py | python | ForestHParams.fill | (self) | return self | Intelligently sets any non-specific parameters. | Intelligently sets any non-specific parameters. | [
"Intelligently",
"sets",
"any",
"non",
"-",
"specific",
"parameters",
"."
] | def fill(self):
"""Intelligently sets any non-specific parameters."""
# Fail fast if num_classes or num_features isn't set.
_ = getattr(self, 'num_classes')
_ = getattr(self, 'num_features')
self.bagged_num_features = int(self.feature_bagging_fraction *
self.num_features)
self.bagged_features = None
if self.feature_bagging_fraction < 1.0:
self.bagged_features = [random.sample(
range(self.num_features),
self.bagged_num_features) for _ in range(self.num_trees)]
self.regression = getattr(self, 'regression', False)
# Num_outputs is the actual number of outputs (a single prediction for
# classification, a N-dimenensional point for regression).
self.num_outputs = self.num_classes if self.regression else 1
# Add an extra column to classes for storing counts, which is needed for
# regression and avoids having to recompute sums for classification.
self.num_output_columns = self.num_classes + 1
# The Random Forest literature recommends sqrt(# features) for
# classification problems, and p/3 for regression problems.
# TODO(thomaswc): Consider capping this for large number of features.
self.num_splits_to_consider = (
self.num_splits_to_consider or
max(10, int(math.ceil(math.sqrt(self.num_features)))))
# max_fertile_nodes doesn't effect performance, only training speed.
# We therefore set it primarily based upon space considerations.
# Each fertile node takes up num_splits_to_consider times as much
# as space as a non-fertile node. We want the fertile nodes to in
# total only take up as much space as the non-fertile nodes, so
num_fertile = int(math.ceil(self.max_nodes / self.num_splits_to_consider))
# But always use at least 1000 accumulate slots.
num_fertile = max(num_fertile, 1000)
self.max_fertile_nodes = self.max_fertile_nodes or num_fertile
# But it also never needs to be larger than the number of leaves,
# which is max_nodes / 2.
self.max_fertile_nodes = min(self.max_fertile_nodes,
int(math.ceil(self.max_nodes / 2.0)))
# We have num_splits_to_consider slots to fill, and we want to spend
# approximately split_after_samples samples initializing them.
num_split_initializiations_per_input = max(1, int(math.floor(
self.num_splits_to_consider / self.split_after_samples)))
self.split_initializations_per_input = getattr(
self, 'split_initializations_per_input',
num_split_initializiations_per_input)
# If base_random_seed is 0, the current time will be used to seed the
# random number generators for each tree. If non-zero, the i-th tree
# will be seeded with base_random_seed + i.
self.base_random_seed = getattr(self, 'base_random_seed', 0)
return self | [
"def",
"fill",
"(",
"self",
")",
":",
"# Fail fast if num_classes or num_features isn't set.",
"_",
"=",
"getattr",
"(",
"self",
",",
"'num_classes'",
")",
"_",
"=",
"getattr",
"(",
"self",
",",
"'num_features'",
")",
"self",
".",
"bagged_num_features",
"=",
"int",
"(",
"self",
".",
"feature_bagging_fraction",
"*",
"self",
".",
"num_features",
")",
"self",
".",
"bagged_features",
"=",
"None",
"if",
"self",
".",
"feature_bagging_fraction",
"<",
"1.0",
":",
"self",
".",
"bagged_features",
"=",
"[",
"random",
".",
"sample",
"(",
"range",
"(",
"self",
".",
"num_features",
")",
",",
"self",
".",
"bagged_num_features",
")",
"for",
"_",
"in",
"range",
"(",
"self",
".",
"num_trees",
")",
"]",
"self",
".",
"regression",
"=",
"getattr",
"(",
"self",
",",
"'regression'",
",",
"False",
")",
"# Num_outputs is the actual number of outputs (a single prediction for",
"# classification, a N-dimenensional point for regression).",
"self",
".",
"num_outputs",
"=",
"self",
".",
"num_classes",
"if",
"self",
".",
"regression",
"else",
"1",
"# Add an extra column to classes for storing counts, which is needed for",
"# regression and avoids having to recompute sums for classification.",
"self",
".",
"num_output_columns",
"=",
"self",
".",
"num_classes",
"+",
"1",
"# The Random Forest literature recommends sqrt(# features) for",
"# classification problems, and p/3 for regression problems.",
"# TODO(thomaswc): Consider capping this for large number of features.",
"self",
".",
"num_splits_to_consider",
"=",
"(",
"self",
".",
"num_splits_to_consider",
"or",
"max",
"(",
"10",
",",
"int",
"(",
"math",
".",
"ceil",
"(",
"math",
".",
"sqrt",
"(",
"self",
".",
"num_features",
")",
")",
")",
")",
")",
"# max_fertile_nodes doesn't effect performance, only training speed.",
"# We therefore set it primarily based upon space considerations.",
"# Each fertile node takes up num_splits_to_consider times as much",
"# as space as a non-fertile node. We want the fertile nodes to in",
"# total only take up as much space as the non-fertile nodes, so",
"num_fertile",
"=",
"int",
"(",
"math",
".",
"ceil",
"(",
"self",
".",
"max_nodes",
"/",
"self",
".",
"num_splits_to_consider",
")",
")",
"# But always use at least 1000 accumulate slots.",
"num_fertile",
"=",
"max",
"(",
"num_fertile",
",",
"1000",
")",
"self",
".",
"max_fertile_nodes",
"=",
"self",
".",
"max_fertile_nodes",
"or",
"num_fertile",
"# But it also never needs to be larger than the number of leaves,",
"# which is max_nodes / 2.",
"self",
".",
"max_fertile_nodes",
"=",
"min",
"(",
"self",
".",
"max_fertile_nodes",
",",
"int",
"(",
"math",
".",
"ceil",
"(",
"self",
".",
"max_nodes",
"/",
"2.0",
")",
")",
")",
"# We have num_splits_to_consider slots to fill, and we want to spend",
"# approximately split_after_samples samples initializing them.",
"num_split_initializiations_per_input",
"=",
"max",
"(",
"1",
",",
"int",
"(",
"math",
".",
"floor",
"(",
"self",
".",
"num_splits_to_consider",
"/",
"self",
".",
"split_after_samples",
")",
")",
")",
"self",
".",
"split_initializations_per_input",
"=",
"getattr",
"(",
"self",
",",
"'split_initializations_per_input'",
",",
"num_split_initializiations_per_input",
")",
"# If base_random_seed is 0, the current time will be used to seed the",
"# random number generators for each tree. If non-zero, the i-th tree",
"# will be seeded with base_random_seed + i.",
"self",
".",
"base_random_seed",
"=",
"getattr",
"(",
"self",
",",
"'base_random_seed'",
",",
"0",
")",
"return",
"self"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/tensor_forest/python/tensor_forest.py#L83-L142 | |
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Show/mTempoVis.py | python | TempoVis.sketchClipPlane | (self, sketch, enable = None) | sketchClipPlane(sketch, enable = None): Clips all objects by plane of sketch.
If enable argument is omitted, calling the routine repeatedly will toggle clipping plane. | sketchClipPlane(sketch, enable = None): Clips all objects by plane of sketch.
If enable argument is omitted, calling the routine repeatedly will toggle clipping plane. | [
"sketchClipPlane",
"(",
"sketch",
"enable",
"=",
"None",
")",
":",
"Clips",
"all",
"objects",
"by",
"plane",
"of",
"sketch",
".",
"If",
"enable",
"argument",
"is",
"omitted",
"calling",
"the",
"routine",
"repeatedly",
"will",
"toggle",
"clipping",
"plane",
"."
] | def sketchClipPlane(self, sketch, enable = None):
'''sketchClipPlane(sketch, enable = None): Clips all objects by plane of sketch.
If enable argument is omitted, calling the routine repeatedly will toggle clipping plane.'''
from .SceneDetails.ClipPlane import ClipPlane
editDoc = Gui.editDocument()
if editDoc is None:
doc = sketch.Document
pla = sketch.getGlobalPlacement()
else:
doc = editDoc.Document
pla = App.Placement(editDoc.EditingTransform)
toggle = {False: 0, True: 1, None: -1}[enable]
self.modify(ClipPlane(doc, toggle, pla, 0.02)) | [
"def",
"sketchClipPlane",
"(",
"self",
",",
"sketch",
",",
"enable",
"=",
"None",
")",
":",
"from",
".",
"SceneDetails",
".",
"ClipPlane",
"import",
"ClipPlane",
"editDoc",
"=",
"Gui",
".",
"editDocument",
"(",
")",
"if",
"editDoc",
"is",
"None",
":",
"doc",
"=",
"sketch",
".",
"Document",
"pla",
"=",
"sketch",
".",
"getGlobalPlacement",
"(",
")",
"else",
":",
"doc",
"=",
"editDoc",
".",
"Document",
"pla",
"=",
"App",
".",
"Placement",
"(",
"editDoc",
".",
"EditingTransform",
")",
"toggle",
"=",
"{",
"False",
":",
"0",
",",
"True",
":",
"1",
",",
"None",
":",
"-",
"1",
"}",
"[",
"enable",
"]",
"self",
".",
"modify",
"(",
"ClipPlane",
"(",
"doc",
",",
"toggle",
",",
"pla",
",",
"0.02",
")",
")"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Show/mTempoVis.py#L438-L453 | ||
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/OpenSCAD/importCSG.py | python | p_resize_action | (p) | resize_action : resize LPAREN keywordargument_list RPAREN OBRACE block_list EBRACE | resize_action : resize LPAREN keywordargument_list RPAREN OBRACE block_list EBRACE | [
"resize_action",
":",
"resize",
"LPAREN",
"keywordargument_list",
"RPAREN",
"OBRACE",
"block_list",
"EBRACE"
] | def p_resize_action(p):
'''
resize_action : resize LPAREN keywordargument_list RPAREN OBRACE block_list EBRACE '''
new_size = p[3]['newsize']
auto = p[3]['auto']
p[6][0].recompute()
if p[6][0].Shape.isNull():
doc.recompute()
p[6][0].Shape.tessellate(0.05)
old_bbox = p[6][0].Shape.BoundBox
old_size = [old_bbox.XLength, old_bbox.YLength, old_bbox.ZLength]
for r in range(0,3):
if auto[r] == '1':
new_size[r] = new_size[0]
if new_size[r] == '0':
new_size[r] = str(old_size[r])
# Calculate a transform matrix from the current bounding box to the new one:
transform_matrix = FreeCAD.Matrix()
scale = FreeCAD.Vector(float(new_size[0])/old_size[0],
float(new_size[1])/old_size[1],
float(new_size[2])/old_size[2])
transform_matrix.scale(scale)
new_part=doc.addObject("Part::FeaturePython",'Matrix Deformation')
new_part.Shape = p[6][0].Shape.transformGeometry(transform_matrix)
if gui:
if FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/OpenSCAD").\
GetBool('useViewProviderTree'):
from OpenSCADFeatures import ViewProviderTree
ViewProviderTree(new_part.ViewObject)
else:
new_part.ViewObject.Proxy = 0
p[6][0].ViewObject.hide()
p[0] = [new_part] | [
"def",
"p_resize_action",
"(",
"p",
")",
":",
"new_size",
"=",
"p",
"[",
"3",
"]",
"[",
"'newsize'",
"]",
"auto",
"=",
"p",
"[",
"3",
"]",
"[",
"'auto'",
"]",
"p",
"[",
"6",
"]",
"[",
"0",
"]",
".",
"recompute",
"(",
")",
"if",
"p",
"[",
"6",
"]",
"[",
"0",
"]",
".",
"Shape",
".",
"isNull",
"(",
")",
":",
"doc",
".",
"recompute",
"(",
")",
"p",
"[",
"6",
"]",
"[",
"0",
"]",
".",
"Shape",
".",
"tessellate",
"(",
"0.05",
")",
"old_bbox",
"=",
"p",
"[",
"6",
"]",
"[",
"0",
"]",
".",
"Shape",
".",
"BoundBox",
"old_size",
"=",
"[",
"old_bbox",
".",
"XLength",
",",
"old_bbox",
".",
"YLength",
",",
"old_bbox",
".",
"ZLength",
"]",
"for",
"r",
"in",
"range",
"(",
"0",
",",
"3",
")",
":",
"if",
"auto",
"[",
"r",
"]",
"==",
"'1'",
":",
"new_size",
"[",
"r",
"]",
"=",
"new_size",
"[",
"0",
"]",
"if",
"new_size",
"[",
"r",
"]",
"==",
"'0'",
":",
"new_size",
"[",
"r",
"]",
"=",
"str",
"(",
"old_size",
"[",
"r",
"]",
")",
"# Calculate a transform matrix from the current bounding box to the new one:",
"transform_matrix",
"=",
"FreeCAD",
".",
"Matrix",
"(",
")",
"scale",
"=",
"FreeCAD",
".",
"Vector",
"(",
"float",
"(",
"new_size",
"[",
"0",
"]",
")",
"/",
"old_size",
"[",
"0",
"]",
",",
"float",
"(",
"new_size",
"[",
"1",
"]",
")",
"/",
"old_size",
"[",
"1",
"]",
",",
"float",
"(",
"new_size",
"[",
"2",
"]",
")",
"/",
"old_size",
"[",
"2",
"]",
")",
"transform_matrix",
".",
"scale",
"(",
"scale",
")",
"new_part",
"=",
"doc",
".",
"addObject",
"(",
"\"Part::FeaturePython\"",
",",
"'Matrix Deformation'",
")",
"new_part",
".",
"Shape",
"=",
"p",
"[",
"6",
"]",
"[",
"0",
"]",
".",
"Shape",
".",
"transformGeometry",
"(",
"transform_matrix",
")",
"if",
"gui",
":",
"if",
"FreeCAD",
".",
"ParamGet",
"(",
"\"User parameter:BaseApp/Preferences/Mod/OpenSCAD\"",
")",
".",
"GetBool",
"(",
"'useViewProviderTree'",
")",
":",
"from",
"OpenSCADFeatures",
"import",
"ViewProviderTree",
"ViewProviderTree",
"(",
"new_part",
".",
"ViewObject",
")",
"else",
":",
"new_part",
".",
"ViewObject",
".",
"Proxy",
"=",
"0",
"p",
"[",
"6",
"]",
"[",
"0",
"]",
".",
"ViewObject",
".",
"hide",
"(",
")",
"p",
"[",
"0",
"]",
"=",
"[",
"new_part",
"]"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/OpenSCAD/importCSG.py#L497-L533 | ||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/decimal.py | python | Context.compare_total_mag | (self, a, b) | return a.compare_total_mag(b) | Compares two operands using their abstract representation ignoring sign.
Like compare_total, but with operand's sign ignored and assumed to be 0. | Compares two operands using their abstract representation ignoring sign. | [
"Compares",
"two",
"operands",
"using",
"their",
"abstract",
"representation",
"ignoring",
"sign",
"."
] | def compare_total_mag(self, a, b):
"""Compares two operands using their abstract representation ignoring sign.
Like compare_total, but with operand's sign ignored and assumed to be 0.
"""
a = _convert_other(a, raiseit=True)
return a.compare_total_mag(b) | [
"def",
"compare_total_mag",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"return",
"a",
".",
"compare_total_mag",
"(",
"b",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/decimal.py#L4113-L4119 | |
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py | python | URI.path | (self) | return ret | Get the path part from an URI | Get the path part from an URI | [
"Get",
"the",
"path",
"part",
"from",
"an",
"URI"
] | def path(self):
"""Get the path part from an URI """
ret = libxml2mod.xmlURIGetPath(self._o)
return ret | [
"def",
"path",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlURIGetPath",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L6196-L6199 | |
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | chrome/common/extensions/docs/server2/document_parser.py | python | ParseDocument | (document, expect_title=False) | return parser.parse_result | Parses the title and a document structure form |document| and returns a
ParseResult. | Parses the title and a document structure form |document| and returns a
ParseResult. | [
"Parses",
"the",
"title",
"and",
"a",
"document",
"structure",
"form",
"|document|",
"and",
"returns",
"a",
"ParseResult",
"."
] | def ParseDocument(document, expect_title=False):
'''Parses the title and a document structure form |document| and returns a
ParseResult.
'''
parser = _DocumentParser(expect_title)
parser.feed(document)
parser.close()
return parser.parse_result | [
"def",
"ParseDocument",
"(",
"document",
",",
"expect_title",
"=",
"False",
")",
":",
"parser",
"=",
"_DocumentParser",
"(",
"expect_title",
")",
"parser",
".",
"feed",
"(",
"document",
")",
"parser",
".",
"close",
"(",
")",
"return",
"parser",
".",
"parse_result"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/chrome/common/extensions/docs/server2/document_parser.py#L60-L67 | |
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/android/loading/controller.py | python | LocalChromeController.__init__ | (self) | Initialize the controller.
Caution: The browser state might need to be manually reseted. | Initialize the controller. | [
"Initialize",
"the",
"controller",
"."
] | def __init__(self):
"""Initialize the controller.
Caution: The browser state might need to be manually reseted.
"""
super(LocalChromeController, self).__init__()
if OPTIONS.no_sandbox:
self.AddChromeArguments(['--no-sandbox'])
self._profile_dir = OPTIONS.local_profile_dir
self._using_temp_profile_dir = self._profile_dir is None
if self._using_temp_profile_dir:
self._profile_dir = tempfile.mkdtemp(suffix='.profile')
self._chrome_env_override = {}
self._metadata['platform'] = {
'os': platform.system()[0] + '-' + platform.release(),
'product_model': 'unknown'
} | [
"def",
"__init__",
"(",
"self",
")",
":",
"super",
"(",
"LocalChromeController",
",",
"self",
")",
".",
"__init__",
"(",
")",
"if",
"OPTIONS",
".",
"no_sandbox",
":",
"self",
".",
"AddChromeArguments",
"(",
"[",
"'--no-sandbox'",
"]",
")",
"self",
".",
"_profile_dir",
"=",
"OPTIONS",
".",
"local_profile_dir",
"self",
".",
"_using_temp_profile_dir",
"=",
"self",
".",
"_profile_dir",
"is",
"None",
"if",
"self",
".",
"_using_temp_profile_dir",
":",
"self",
".",
"_profile_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"suffix",
"=",
"'.profile'",
")",
"self",
".",
"_chrome_env_override",
"=",
"{",
"}",
"self",
".",
"_metadata",
"[",
"'platform'",
"]",
"=",
"{",
"'os'",
":",
"platform",
".",
"system",
"(",
")",
"[",
"0",
"]",
"+",
"'-'",
"+",
"platform",
".",
"release",
"(",
")",
",",
"'product_model'",
":",
"'unknown'",
"}"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/android/loading/controller.py#L464-L480 | ||
grpc/grpc | 27bc6fe7797e43298dc931b96dc57322d0852a9f | src/python/grpcio/grpc/__init__.py | python | ServicerContext.set_trailing_metadata | (self, trailing_metadata) | Sets the trailing metadata for the RPC.
Sets the trailing metadata to be sent upon completion of the RPC.
If this method is invoked multiple times throughout the lifetime of an
RPC, the value supplied in the final invocation will be the value sent
over the wire.
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
Args:
trailing_metadata: The trailing :term:`metadata`. | Sets the trailing metadata for the RPC. | [
"Sets",
"the",
"trailing",
"metadata",
"for",
"the",
"RPC",
"."
] | def set_trailing_metadata(self, trailing_metadata):
"""Sets the trailing metadata for the RPC.
Sets the trailing metadata to be sent upon completion of the RPC.
If this method is invoked multiple times throughout the lifetime of an
RPC, the value supplied in the final invocation will be the value sent
over the wire.
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
Args:
trailing_metadata: The trailing :term:`metadata`.
"""
raise NotImplementedError() | [
"def",
"set_trailing_metadata",
"(",
"self",
",",
"trailing_metadata",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | https://github.com/grpc/grpc/blob/27bc6fe7797e43298dc931b96dc57322d0852a9f/src/python/grpcio/grpc/__init__.py#L1164-L1179 | ||
SoarGroup/Soar | a1c5e249499137a27da60533c72969eef3b8ab6b | scons/scons-local-4.1.0/SCons/Util.py | python | AddMethod | (obj, function, name=None) | Adds a method to an object.
Adds `function` to `obj` if `obj` is a class object.
Adds `function` as a bound method if `obj` is an instance object.
If `obj` looks like an environment instance, use `MethodWrapper`
to add it. If `name` is supplied it is used as the name of `function`.
Although this works for any class object, the intent as a public
API is to be used on Environment, to be able to add a method to all
construction environments; it is preferred to use env.AddMethod
to add to an individual environment.
Example::
class A:
...
a = A()
def f(self, x, y):
self.z = x + y
AddMethod(f, A, "add")
a.add(2, 4)
print(a.z)
AddMethod(lambda self, i: self.l[i], a, "listIndex")
print(a.listIndex(5)) | Adds a method to an object. | [
"Adds",
"a",
"method",
"to",
"an",
"object",
"."
] | def AddMethod(obj, function, name=None):
"""Adds a method to an object.
Adds `function` to `obj` if `obj` is a class object.
Adds `function` as a bound method if `obj` is an instance object.
If `obj` looks like an environment instance, use `MethodWrapper`
to add it. If `name` is supplied it is used as the name of `function`.
Although this works for any class object, the intent as a public
API is to be used on Environment, to be able to add a method to all
construction environments; it is preferred to use env.AddMethod
to add to an individual environment.
Example::
class A:
...
a = A()
def f(self, x, y):
self.z = x + y
AddMethod(f, A, "add")
a.add(2, 4)
print(a.z)
AddMethod(lambda self, i: self.l[i], a, "listIndex")
print(a.listIndex(5))
"""
if name is None:
name = function.__name__
else:
# "rename"
function = FunctionType(
function.__code__, function.__globals__, name, function.__defaults__
)
if hasattr(obj, '__class__') and obj.__class__ is not type:
# obj is an instance, so it gets a bound method.
if hasattr(obj, "added_methods"):
method = MethodWrapper(obj, function, name)
obj.added_methods.append(method)
else:
method = MethodType(function, obj)
else:
# obj is a class
method = function
setattr(obj, name, method) | [
"def",
"AddMethod",
"(",
"obj",
",",
"function",
",",
"name",
"=",
"None",
")",
":",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"function",
".",
"__name__",
"else",
":",
"# \"rename\"",
"function",
"=",
"FunctionType",
"(",
"function",
".",
"__code__",
",",
"function",
".",
"__globals__",
",",
"name",
",",
"function",
".",
"__defaults__",
")",
"if",
"hasattr",
"(",
"obj",
",",
"'__class__'",
")",
"and",
"obj",
".",
"__class__",
"is",
"not",
"type",
":",
"# obj is an instance, so it gets a bound method.",
"if",
"hasattr",
"(",
"obj",
",",
"\"added_methods\"",
")",
":",
"method",
"=",
"MethodWrapper",
"(",
"obj",
",",
"function",
",",
"name",
")",
"obj",
".",
"added_methods",
".",
"append",
"(",
"method",
")",
"else",
":",
"method",
"=",
"MethodType",
"(",
"function",
",",
"obj",
")",
"else",
":",
"# obj is a class",
"method",
"=",
"function",
"setattr",
"(",
"obj",
",",
"name",
",",
"method",
")"
] | https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Util.py#L1424-L1469 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/AWSPythonSDK/1.5.8/docutils/utils/math/math2html.py | python | FilePosition.extract | (self, length) | return self.reader.currentline()[self.pos : self.pos + length] | Extract the next string of the given length, or None if not enough text. | Extract the next string of the given length, or None if not enough text. | [
"Extract",
"the",
"next",
"string",
"of",
"the",
"given",
"length",
"or",
"None",
"if",
"not",
"enough",
"text",
"."
] | def extract(self, length):
"Extract the next string of the given length, or None if not enough text."
if self.pos + length > len(self.reader.currentline()):
return None
return self.reader.currentline()[self.pos : self.pos + length] | [
"def",
"extract",
"(",
"self",
",",
"length",
")",
":",
"if",
"self",
".",
"pos",
"+",
"length",
">",
"len",
"(",
"self",
".",
"reader",
".",
"currentline",
"(",
")",
")",
":",
"return",
"None",
"return",
"self",
".",
"reader",
".",
"currentline",
"(",
")",
"[",
"self",
".",
"pos",
":",
"self",
".",
"pos",
"+",
"length",
"]"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/utils/math/math2html.py#L2165-L2169 | |
NVIDIA/DALI | bf16cc86ba8f091b145f91962f21fe1b6aff243d | docs/examples/use_cases/tensorflow/efficientdet/dataset/create_pascal_tfrecord.py | python | get_ann_id | () | return GLOBAL_ANN_ID | Return unique annotation id across images. | Return unique annotation id across images. | [
"Return",
"unique",
"annotation",
"id",
"across",
"images",
"."
] | def get_ann_id():
"""Return unique annotation id across images."""
global GLOBAL_ANN_ID
GLOBAL_ANN_ID += 1
return GLOBAL_ANN_ID | [
"def",
"get_ann_id",
"(",
")",
":",
"global",
"GLOBAL_ANN_ID",
"GLOBAL_ANN_ID",
"+=",
"1",
"return",
"GLOBAL_ANN_ID"
] | https://github.com/NVIDIA/DALI/blob/bf16cc86ba8f091b145f91962f21fe1b6aff243d/docs/examples/use_cases/tensorflow/efficientdet/dataset/create_pascal_tfrecord.py#L100-L104 | |
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/ros_comm/rospy/src/rospy/impl/udpros.py | python | UDPROSHandler.init_publisher | (self, topic_name, protocol_params) | return 1, "ready", [UDPROS] | Initialize this node to start publishing to a new UDP location.
@param resolved_name: topic name
@type resolved__name: str
@param protocol_params: requested protocol
parameters. protocol[0] must be the string 'UDPROS'
@type protocol_params: [str, value*]
@return: (code, msg, [UDPROS, addr, port])
@rtype: (int, str, list) | Initialize this node to start publishing to a new UDP location. | [
"Initialize",
"this",
"node",
"to",
"start",
"publishing",
"to",
"a",
"new",
"UDP",
"location",
"."
] | def init_publisher(self, topic_name, protocol_params):
"""
Initialize this node to start publishing to a new UDP location.
@param resolved_name: topic name
@type resolved__name: str
@param protocol_params: requested protocol
parameters. protocol[0] must be the string 'UDPROS'
@type protocol_params: [str, value*]
@return: (code, msg, [UDPROS, addr, port])
@rtype: (int, str, list)
"""
if protocol_params[0] != UDPROS:
return 0, "Internal error: protocol does not match UDPROS: %s"%protocol, []
#TODO
_, header, host, port, max_datagram_size = protocol_params
#TODO: connection_id, max_datagraph_size
return 1, "ready", [UDPROS] | [
"def",
"init_publisher",
"(",
"self",
",",
"topic_name",
",",
"protocol_params",
")",
":",
"if",
"protocol_params",
"[",
"0",
"]",
"!=",
"UDPROS",
":",
"return",
"0",
",",
"\"Internal error: protocol does not match UDPROS: %s\"",
"%",
"protocol",
",",
"[",
"]",
"#TODO",
"_",
",",
"header",
",",
"host",
",",
"port",
",",
"max_datagram_size",
"=",
"protocol_params",
"#TODO: connection_id, max_datagraph_size",
"return",
"1",
",",
"\"ready\"",
",",
"[",
"UDPROS",
"]"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros_comm/rospy/src/rospy/impl/udpros.py#L152-L171 | |
cms-sw/cmssw | fd9de012d503d3405420bcbeec0ec879baa57cf2 | FWCore/PythonUtilities/python/LumiList.py | python | LumiList.removeRuns | (self, runList) | return | removes runs from runList from collection | removes runs from runList from collection | [
"removes",
"runs",
"from",
"runList",
"from",
"collection"
] | def removeRuns (self, runList):
'''
removes runs from runList from collection
'''
for run in runList:
run = str(run)
if run in self.compactList:
del self.compactList[run]
return | [
"def",
"removeRuns",
"(",
"self",
",",
"runList",
")",
":",
"for",
"run",
"in",
"runList",
":",
"run",
"=",
"str",
"(",
"run",
")",
"if",
"run",
"in",
"self",
".",
"compactList",
":",
"del",
"self",
".",
"compactList",
"[",
"run",
"]",
"return"
] | https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/FWCore/PythonUtilities/python/LumiList.py#L322-L331 | |
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/src/robotsim.py | python | PointCloud.transform | (self, R: "double const [9]", t: "double const [3]") | return _robotsim.PointCloud_transform(self, R, t) | r"""
transform(PointCloud self, double const [9] R, double const [3] t)
Transforms all the points by the rigid transform v=R*v+t. | r"""
transform(PointCloud self, double const [9] R, double const [3] t) | [
"r",
"transform",
"(",
"PointCloud",
"self",
"double",
"const",
"[",
"9",
"]",
"R",
"double",
"const",
"[",
"3",
"]",
"t",
")"
] | def transform(self, R: "double const [9]", t: "double const [3]") -> "void":
r"""
transform(PointCloud self, double const [9] R, double const [3] t)
Transforms all the points by the rigid transform v=R*v+t.
"""
return _robotsim.PointCloud_transform(self, R, t) | [
"def",
"transform",
"(",
"self",
",",
"R",
":",
"\"double const [9]\"",
",",
"t",
":",
"\"double const [3]\"",
")",
"->",
"\"void\"",
":",
"return",
"_robotsim",
".",
"PointCloud_transform",
"(",
"self",
",",
"R",
",",
"t",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/src/robotsim.py#L1338-L1346 | |
3drobotics/ardupilot-solo | 05a123b002c11dccc905d4d7703a38e5f36ee723 | Tools/LogAnalyzer/DataflashLog.py | python | LogIterator.next | (self) | return self | increment iterator to next log line | increment iterator to next log line | [
"increment",
"iterator",
"to",
"next",
"log",
"line"
] | def next(self):
'''increment iterator to next log line'''
self.currentLine += 1
if self.currentLine > self.logdata.lineCount:
return self
for lineLabel in self.iterators.keys():
# check if the currentLine has gone past our the line we're pointing to for this type of data
dataLabel = self.logdata.formats[lineLabel].labels[0]
(index, lineNumber) = self.iterators[lineLabel]
# if so, and it is not the last entry in the log, then increment the indices for all dataLabels under that lineLabel
if (self.currentLine > lineNumber) and (index < len(self.logdata.channels[lineLabel][dataLabel].listData)-1):
index += 1
lineNumber = self.logdata.channels[lineLabel][dataLabel].listData[index][0]
self.iterators[lineLabel] = (index,lineNumber)
return self | [
"def",
"next",
"(",
"self",
")",
":",
"self",
".",
"currentLine",
"+=",
"1",
"if",
"self",
".",
"currentLine",
">",
"self",
".",
"logdata",
".",
"lineCount",
":",
"return",
"self",
"for",
"lineLabel",
"in",
"self",
".",
"iterators",
".",
"keys",
"(",
")",
":",
"# check if the currentLine has gone past our the line we're pointing to for this type of data",
"dataLabel",
"=",
"self",
".",
"logdata",
".",
"formats",
"[",
"lineLabel",
"]",
".",
"labels",
"[",
"0",
"]",
"(",
"index",
",",
"lineNumber",
")",
"=",
"self",
".",
"iterators",
"[",
"lineLabel",
"]",
"# if so, and it is not the last entry in the log, then increment the indices for all dataLabels under that lineLabel",
"if",
"(",
"self",
".",
"currentLine",
">",
"lineNumber",
")",
"and",
"(",
"index",
"<",
"len",
"(",
"self",
".",
"logdata",
".",
"channels",
"[",
"lineLabel",
"]",
"[",
"dataLabel",
"]",
".",
"listData",
")",
"-",
"1",
")",
":",
"index",
"+=",
"1",
"lineNumber",
"=",
"self",
".",
"logdata",
".",
"channels",
"[",
"lineLabel",
"]",
"[",
"dataLabel",
"]",
".",
"listData",
"[",
"index",
"]",
"[",
"0",
"]",
"self",
".",
"iterators",
"[",
"lineLabel",
"]",
"=",
"(",
"index",
",",
"lineNumber",
")",
"return",
"self"
] | https://github.com/3drobotics/ardupilot-solo/blob/05a123b002c11dccc905d4d7703a38e5f36ee723/Tools/LogAnalyzer/DataflashLog.py#L297-L311 | |
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | example/reinforcement-learning/ddpg/policies.py | python | DeterministicMLPPolicy.define_loss | (self, loss_exp) | Define loss of the policy. No need to do so here. | Define loss of the policy. No need to do so here. | [
"Define",
"loss",
"of",
"the",
"policy",
".",
"No",
"need",
"to",
"do",
"so",
"here",
"."
] | def define_loss(self, loss_exp):
"""
Define loss of the policy. No need to do so here.
"""
raise NotImplementedError | [
"def",
"define_loss",
"(",
"self",
",",
"loss_exp",
")",
":",
"raise",
"NotImplementedError"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/example/reinforcement-learning/ddpg/policies.py#L72-L77 | ||
deepmind/open_spiel | 4ca53bea32bb2875c7385d215424048ae92f78c8 | open_spiel/python/algorithms/adidas_utils/helpers/symmetric/updates.py | python | Solver.__init__ | (self, proj_grad=True, euclidean=False, rnd_init=False,
seed=None) | Ctor. | Ctor. | [
"Ctor",
"."
] | def __init__(self, proj_grad=True, euclidean=False, rnd_init=False,
seed=None):
"""Ctor."""
self.num_players = None
self.proj_grad = proj_grad
self.rnd_init = rnd_init
self.lrs = (None, None, None)
self.has_aux = False
self.euclidean = euclidean
if euclidean:
self.update = self.euc_descent_step
else:
self.update = self.mirror_descent_step
self.seed = seed
self.random = np.random.RandomState(seed) | [
"def",
"__init__",
"(",
"self",
",",
"proj_grad",
"=",
"True",
",",
"euclidean",
"=",
"False",
",",
"rnd_init",
"=",
"False",
",",
"seed",
"=",
"None",
")",
":",
"self",
".",
"num_players",
"=",
"None",
"self",
".",
"proj_grad",
"=",
"proj_grad",
"self",
".",
"rnd_init",
"=",
"rnd_init",
"self",
".",
"lrs",
"=",
"(",
"None",
",",
"None",
",",
"None",
")",
"self",
".",
"has_aux",
"=",
"False",
"self",
".",
"euclidean",
"=",
"euclidean",
"if",
"euclidean",
":",
"self",
".",
"update",
"=",
"self",
".",
"euc_descent_step",
"else",
":",
"self",
".",
"update",
"=",
"self",
".",
"mirror_descent_step",
"self",
".",
"seed",
"=",
"seed",
"self",
".",
"random",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
"seed",
")"
] | https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/adidas_utils/helpers/symmetric/updates.py#L30-L46 | ||
gv22ga/dlib-face-recognition-android | 42d6305cbd85833f2b85bb79b70ab9ab004153c9 | tools/lint/cpplint.py | python | _OutputFormat | () | return _cpplint_state.output_format | Gets the module's output format. | Gets the module's output format. | [
"Gets",
"the",
"module",
"s",
"output",
"format",
"."
] | def _OutputFormat():
"""Gets the module's output format."""
return _cpplint_state.output_format | [
"def",
"_OutputFormat",
"(",
")",
":",
"return",
"_cpplint_state",
".",
"output_format"
] | https://github.com/gv22ga/dlib-face-recognition-android/blob/42d6305cbd85833f2b85bb79b70ab9ab004153c9/tools/lint/cpplint.py#L886-L888 | |
su2code/SU2 | 72b2fa977b64b9683a388920f05298a40d39e5c5 | SU2_PY/SU2_Nastran/pysu2_nastran.py | python | Solver.__setInitialConditions | (self) | This method uses the list of initial modal amplitudes to set the initial conditions | This method uses the list of initial modal amplitudes to set the initial conditions | [
"This",
"method",
"uses",
"the",
"list",
"of",
"initial",
"modal",
"amplitudes",
"to",
"set",
"the",
"initial",
"conditions"
] | def __setInitialConditions(self):
"""
This method uses the list of initial modal amplitudes to set the initial conditions
"""
print('Setting initial conditions.')
print('Using modal amplitudes from config file')
for imode in range(self.nDof):
if imode in self.Config["INITIAL_MODES"].keys():
self.q[imode] = float(self.Config["INITIAL_MODES"][imode])
self.q_n[imode] = float(self.Config["INITIAL_MODES"][imode])
RHS = np.zeros((self.nDof,1))
RHS += self.F
RHS -= self.C.dot(self.qdot)
RHS -= self.K.dot(self.q)
self.qddot = linalg.solve(self.M, RHS)
self.qddot_n = np.copy(self.qddot)
self.a = np.copy(self.qddot)
self.a_n = np.copy(self.qddot) | [
"def",
"__setInitialConditions",
"(",
"self",
")",
":",
"print",
"(",
"'Setting initial conditions.'",
")",
"print",
"(",
"'Using modal amplitudes from config file'",
")",
"for",
"imode",
"in",
"range",
"(",
"self",
".",
"nDof",
")",
":",
"if",
"imode",
"in",
"self",
".",
"Config",
"[",
"\"INITIAL_MODES\"",
"]",
".",
"keys",
"(",
")",
":",
"self",
".",
"q",
"[",
"imode",
"]",
"=",
"float",
"(",
"self",
".",
"Config",
"[",
"\"INITIAL_MODES\"",
"]",
"[",
"imode",
"]",
")",
"self",
".",
"q_n",
"[",
"imode",
"]",
"=",
"float",
"(",
"self",
".",
"Config",
"[",
"\"INITIAL_MODES\"",
"]",
"[",
"imode",
"]",
")",
"RHS",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"nDof",
",",
"1",
")",
")",
"RHS",
"+=",
"self",
".",
"F",
"RHS",
"-=",
"self",
".",
"C",
".",
"dot",
"(",
"self",
".",
"qdot",
")",
"RHS",
"-=",
"self",
".",
"K",
".",
"dot",
"(",
"self",
".",
"q",
")",
"self",
".",
"qddot",
"=",
"linalg",
".",
"solve",
"(",
"self",
".",
"M",
",",
"RHS",
")",
"self",
".",
"qddot_n",
"=",
"np",
".",
"copy",
"(",
"self",
".",
"qddot",
")",
"self",
".",
"a",
"=",
"np",
".",
"copy",
"(",
"self",
".",
"qddot",
")",
"self",
".",
"a_n",
"=",
"np",
".",
"copy",
"(",
"self",
".",
"qddot",
")"
] | https://github.com/su2code/SU2/blob/72b2fa977b64b9683a388920f05298a40d39e5c5/SU2_PY/SU2_Nastran/pysu2_nastran.py#L697-L717 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/simulator/cudadrv/devicearray.py | python | verify_cuda_ndarray_interface | (obj) | Verify the CUDA ndarray interface for an obj | Verify the CUDA ndarray interface for an obj | [
"Verify",
"the",
"CUDA",
"ndarray",
"interface",
"for",
"an",
"obj"
] | def verify_cuda_ndarray_interface(obj):
"Verify the CUDA ndarray interface for an obj"
require_cuda_ndarray(obj)
def requires_attr(attr, typ):
if not hasattr(obj, attr):
raise AttributeError(attr)
if not isinstance(getattr(obj, attr), typ):
raise AttributeError('%s must be of type %s' % (attr, typ))
requires_attr('shape', tuple)
requires_attr('strides', tuple)
requires_attr('dtype', np.dtype)
requires_attr('size', six.integer_types) | [
"def",
"verify_cuda_ndarray_interface",
"(",
"obj",
")",
":",
"require_cuda_ndarray",
"(",
"obj",
")",
"def",
"requires_attr",
"(",
"attr",
",",
"typ",
")",
":",
"if",
"not",
"hasattr",
"(",
"obj",
",",
"attr",
")",
":",
"raise",
"AttributeError",
"(",
"attr",
")",
"if",
"not",
"isinstance",
"(",
"getattr",
"(",
"obj",
",",
"attr",
")",
",",
"typ",
")",
":",
"raise",
"AttributeError",
"(",
"'%s must be of type %s'",
"%",
"(",
"attr",
",",
"typ",
")",
")",
"requires_attr",
"(",
"'shape'",
",",
"tuple",
")",
"requires_attr",
"(",
"'strides'",
",",
"tuple",
")",
"requires_attr",
"(",
"'dtype'",
",",
"np",
".",
"dtype",
")",
"requires_attr",
"(",
"'size'",
",",
"six",
".",
"integer_types",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/simulator/cudadrv/devicearray.py#L268-L281 | ||
apache/impala | 8ddac48f3428c86f2cbd037ced89cfb903298b12 | shell/ext-py/prettytable-0.7.2/prettytable.py | python | PrettyTable.__init__ | (self, field_names=None, **kwargs) | Return a new PrettyTable instance
Arguments:
encoding - Unicode encoding scheme used to decode any encoded input
field_names - list or tuple of field names
fields - list or tuple of field names to include in displays
start - index of first data row to include in output
end - index of last data row to include in output PLUS ONE (list slice style)
header - print a header showing field names (True or False)
header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None)
border - print a border around the table (True or False)
hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, HEADER, ALL, NONE
vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE
int_format - controls formatting of integer data
float_format - controls formatting of floating point data
padding_width - number of spaces on either side of column data (only used if left and right paddings are None)
left_padding_width - number of spaces on left hand side of column data
right_padding_width - number of spaces on right hand side of column data
vertical_char - single character string used to draw vertical lines
horizontal_char - single character string used to draw horizontal lines
junction_char - single character string used to draw line junctions
sortby - name of field to sort rows by
sort_key - sorting key function, applied to data points before sorting
valign - default valign for each row (None, "t", "m" or "b")
reversesort - True or False to sort in descending or ascending order | Return a new PrettyTable instance | [
"Return",
"a",
"new",
"PrettyTable",
"instance"
] | def __init__(self, field_names=None, **kwargs):
"""Return a new PrettyTable instance
Arguments:
encoding - Unicode encoding scheme used to decode any encoded input
field_names - list or tuple of field names
fields - list or tuple of field names to include in displays
start - index of first data row to include in output
end - index of last data row to include in output PLUS ONE (list slice style)
header - print a header showing field names (True or False)
header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None)
border - print a border around the table (True or False)
hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, HEADER, ALL, NONE
vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE
int_format - controls formatting of integer data
float_format - controls formatting of floating point data
padding_width - number of spaces on either side of column data (only used if left and right paddings are None)
left_padding_width - number of spaces on left hand side of column data
right_padding_width - number of spaces on right hand side of column data
vertical_char - single character string used to draw vertical lines
horizontal_char - single character string used to draw horizontal lines
junction_char - single character string used to draw line junctions
sortby - name of field to sort rows by
sort_key - sorting key function, applied to data points before sorting
valign - default valign for each row (None, "t", "m" or "b")
reversesort - True or False to sort in descending or ascending order"""
self.encoding = kwargs.get("encoding", "UTF-8")
# Data
self._field_names = []
self._align = {}
self._valign = {}
self._max_width = {}
self._rows = []
if field_names:
self.field_names = field_names
else:
self._widths = []
# Options
self._options = "start end fields header border sortby reversesort sort_key attributes format hrules vrules".split()
self._options.extend("int_format float_format padding_width left_padding_width right_padding_width".split())
self._options.extend("vertical_char horizontal_char junction_char header_style valign xhtml print_empty".split())
for option in self._options:
if option in kwargs:
self._validate_option(option, kwargs[option])
else:
kwargs[option] = None
self._start = kwargs["start"] or 0
self._end = kwargs["end"] or None
self._fields = kwargs["fields"] or None
if kwargs["header"] in (True, False):
self._header = kwargs["header"]
else:
self._header = True
self._header_style = kwargs["header_style"] or None
if kwargs["border"] in (True, False):
self._border = kwargs["border"]
else:
self._border = True
self._hrules = kwargs["hrules"] or FRAME
self._vrules = kwargs["vrules"] or ALL
self._sortby = kwargs["sortby"] or None
if kwargs["reversesort"] in (True, False):
self._reversesort = kwargs["reversesort"]
else:
self._reversesort = False
self._sort_key = kwargs["sort_key"] or (lambda x: x)
self._int_format = kwargs["int_format"] or {}
self._float_format = kwargs["float_format"] or {}
self._padding_width = kwargs["padding_width"] or 1
self._left_padding_width = kwargs["left_padding_width"] or None
self._right_padding_width = kwargs["right_padding_width"] or None
self._vertical_char = kwargs["vertical_char"] or self._unicode("|")
self._horizontal_char = kwargs["horizontal_char"] or self._unicode("-")
self._junction_char = kwargs["junction_char"] or self._unicode("+")
if kwargs["print_empty"] in (True, False):
self._print_empty = kwargs["print_empty"]
else:
self._print_empty = True
self._format = kwargs["format"] or False
self._xhtml = kwargs["xhtml"] or False
self._attributes = kwargs["attributes"] or {} | [
"def",
"__init__",
"(",
"self",
",",
"field_names",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"encoding",
"=",
"kwargs",
".",
"get",
"(",
"\"encoding\"",
",",
"\"UTF-8\"",
")",
"# Data",
"self",
".",
"_field_names",
"=",
"[",
"]",
"self",
".",
"_align",
"=",
"{",
"}",
"self",
".",
"_valign",
"=",
"{",
"}",
"self",
".",
"_max_width",
"=",
"{",
"}",
"self",
".",
"_rows",
"=",
"[",
"]",
"if",
"field_names",
":",
"self",
".",
"field_names",
"=",
"field_names",
"else",
":",
"self",
".",
"_widths",
"=",
"[",
"]",
"# Options",
"self",
".",
"_options",
"=",
"\"start end fields header border sortby reversesort sort_key attributes format hrules vrules\"",
".",
"split",
"(",
")",
"self",
".",
"_options",
".",
"extend",
"(",
"\"int_format float_format padding_width left_padding_width right_padding_width\"",
".",
"split",
"(",
")",
")",
"self",
".",
"_options",
".",
"extend",
"(",
"\"vertical_char horizontal_char junction_char header_style valign xhtml print_empty\"",
".",
"split",
"(",
")",
")",
"for",
"option",
"in",
"self",
".",
"_options",
":",
"if",
"option",
"in",
"kwargs",
":",
"self",
".",
"_validate_option",
"(",
"option",
",",
"kwargs",
"[",
"option",
"]",
")",
"else",
":",
"kwargs",
"[",
"option",
"]",
"=",
"None",
"self",
".",
"_start",
"=",
"kwargs",
"[",
"\"start\"",
"]",
"or",
"0",
"self",
".",
"_end",
"=",
"kwargs",
"[",
"\"end\"",
"]",
"or",
"None",
"self",
".",
"_fields",
"=",
"kwargs",
"[",
"\"fields\"",
"]",
"or",
"None",
"if",
"kwargs",
"[",
"\"header\"",
"]",
"in",
"(",
"True",
",",
"False",
")",
":",
"self",
".",
"_header",
"=",
"kwargs",
"[",
"\"header\"",
"]",
"else",
":",
"self",
".",
"_header",
"=",
"True",
"self",
".",
"_header_style",
"=",
"kwargs",
"[",
"\"header_style\"",
"]",
"or",
"None",
"if",
"kwargs",
"[",
"\"border\"",
"]",
"in",
"(",
"True",
",",
"False",
")",
":",
"self",
".",
"_border",
"=",
"kwargs",
"[",
"\"border\"",
"]",
"else",
":",
"self",
".",
"_border",
"=",
"True",
"self",
".",
"_hrules",
"=",
"kwargs",
"[",
"\"hrules\"",
"]",
"or",
"FRAME",
"self",
".",
"_vrules",
"=",
"kwargs",
"[",
"\"vrules\"",
"]",
"or",
"ALL",
"self",
".",
"_sortby",
"=",
"kwargs",
"[",
"\"sortby\"",
"]",
"or",
"None",
"if",
"kwargs",
"[",
"\"reversesort\"",
"]",
"in",
"(",
"True",
",",
"False",
")",
":",
"self",
".",
"_reversesort",
"=",
"kwargs",
"[",
"\"reversesort\"",
"]",
"else",
":",
"self",
".",
"_reversesort",
"=",
"False",
"self",
".",
"_sort_key",
"=",
"kwargs",
"[",
"\"sort_key\"",
"]",
"or",
"(",
"lambda",
"x",
":",
"x",
")",
"self",
".",
"_int_format",
"=",
"kwargs",
"[",
"\"int_format\"",
"]",
"or",
"{",
"}",
"self",
".",
"_float_format",
"=",
"kwargs",
"[",
"\"float_format\"",
"]",
"or",
"{",
"}",
"self",
".",
"_padding_width",
"=",
"kwargs",
"[",
"\"padding_width\"",
"]",
"or",
"1",
"self",
".",
"_left_padding_width",
"=",
"kwargs",
"[",
"\"left_padding_width\"",
"]",
"or",
"None",
"self",
".",
"_right_padding_width",
"=",
"kwargs",
"[",
"\"right_padding_width\"",
"]",
"or",
"None",
"self",
".",
"_vertical_char",
"=",
"kwargs",
"[",
"\"vertical_char\"",
"]",
"or",
"self",
".",
"_unicode",
"(",
"\"|\"",
")",
"self",
".",
"_horizontal_char",
"=",
"kwargs",
"[",
"\"horizontal_char\"",
"]",
"or",
"self",
".",
"_unicode",
"(",
"\"-\"",
")",
"self",
".",
"_junction_char",
"=",
"kwargs",
"[",
"\"junction_char\"",
"]",
"or",
"self",
".",
"_unicode",
"(",
"\"+\"",
")",
"if",
"kwargs",
"[",
"\"print_empty\"",
"]",
"in",
"(",
"True",
",",
"False",
")",
":",
"self",
".",
"_print_empty",
"=",
"kwargs",
"[",
"\"print_empty\"",
"]",
"else",
":",
"self",
".",
"_print_empty",
"=",
"True",
"self",
".",
"_format",
"=",
"kwargs",
"[",
"\"format\"",
"]",
"or",
"False",
"self",
".",
"_xhtml",
"=",
"kwargs",
"[",
"\"xhtml\"",
"]",
"or",
"False",
"self",
".",
"_attributes",
"=",
"kwargs",
"[",
"\"attributes\"",
"]",
"or",
"{",
"}"
] | https://github.com/apache/impala/blob/8ddac48f3428c86f2cbd037ced89cfb903298b12/shell/ext-py/prettytable-0.7.2/prettytable.py#L84-L175 | ||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/saved_model/loader_impl.py | python | _get_asset_tensors | (export_dir, meta_graph_def_to_load) | return asset_tensor_dict | Gets the asset tensors, if defined in the meta graph def to load.
Args:
export_dir: Directory where the SavedModel is located.
meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded.
Returns:
A dictionary of asset tensors, keyed by the name of the asset tensor. The
value in the map corresponds to the absolute path of the asset file. | Gets the asset tensors, if defined in the meta graph def to load. | [
"Gets",
"the",
"asset",
"tensors",
"if",
"defined",
"in",
"the",
"meta",
"graph",
"def",
"to",
"load",
"."
] | def _get_asset_tensors(export_dir, meta_graph_def_to_load):
"""Gets the asset tensors, if defined in the meta graph def to load.
Args:
export_dir: Directory where the SavedModel is located.
meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded.
Returns:
A dictionary of asset tensors, keyed by the name of the asset tensor. The
value in the map corresponds to the absolute path of the asset file.
"""
# Collection-def that may contain the assets key.
collection_def = meta_graph_def_to_load.collection_def
asset_tensor_dict = {}
if constants.ASSETS_KEY in collection_def:
# Location of the assets for SavedModel.
assets_directory = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
assets_any_proto = collection_def[constants.ASSETS_KEY].any_list.value
# Process each asset and add it to the asset tensor dictionary.
for asset_any_proto in assets_any_proto:
asset_proto = meta_graph_pb2.AssetFileDef()
asset_any_proto.Unpack(asset_proto)
asset_tensor_dict[asset_proto.tensor_info.name] = os.path.join(
compat.as_bytes(assets_directory),
compat.as_bytes(asset_proto.filename))
return asset_tensor_dict | [
"def",
"_get_asset_tensors",
"(",
"export_dir",
",",
"meta_graph_def_to_load",
")",
":",
"# Collection-def that may contain the assets key.",
"collection_def",
"=",
"meta_graph_def_to_load",
".",
"collection_def",
"asset_tensor_dict",
"=",
"{",
"}",
"if",
"constants",
".",
"ASSETS_KEY",
"in",
"collection_def",
":",
"# Location of the assets for SavedModel.",
"assets_directory",
"=",
"os",
".",
"path",
".",
"join",
"(",
"compat",
".",
"as_bytes",
"(",
"export_dir",
")",
",",
"compat",
".",
"as_bytes",
"(",
"constants",
".",
"ASSETS_DIRECTORY",
")",
")",
"assets_any_proto",
"=",
"collection_def",
"[",
"constants",
".",
"ASSETS_KEY",
"]",
".",
"any_list",
".",
"value",
"# Process each asset and add it to the asset tensor dictionary.",
"for",
"asset_any_proto",
"in",
"assets_any_proto",
":",
"asset_proto",
"=",
"meta_graph_pb2",
".",
"AssetFileDef",
"(",
")",
"asset_any_proto",
".",
"Unpack",
"(",
"asset_proto",
")",
"asset_tensor_dict",
"[",
"asset_proto",
".",
"tensor_info",
".",
"name",
"]",
"=",
"os",
".",
"path",
".",
"join",
"(",
"compat",
".",
"as_bytes",
"(",
"assets_directory",
")",
",",
"compat",
".",
"as_bytes",
"(",
"asset_proto",
".",
"filename",
")",
")",
"return",
"asset_tensor_dict"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/saved_model/loader_impl.py#L81-L109 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/device.py | python | Device.device_type | (self) | return Device.devtype2str[self.device_typeid] | Returns the device type of current device.
Examples
-------
>>> mx.device.current_device().device_type
'cpu'
>>> mx.current_device().device_type
'cpu'
Returns
-------
device_type : str | Returns the device type of current device. | [
"Returns",
"the",
"device",
"type",
"of",
"current",
"device",
"."
] | def device_type(self):
"""Returns the device type of current device.
Examples
-------
>>> mx.device.current_device().device_type
'cpu'
>>> mx.current_device().device_type
'cpu'
Returns
-------
device_type : str
"""
return Device.devtype2str[self.device_typeid] | [
"def",
"device_type",
"(",
"self",
")",
":",
"return",
"Device",
".",
"devtype2str",
"[",
"self",
".",
"device_typeid",
"]"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/device.py#L77-L91 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/pdb.py | python | Pdb.do_unalias | (self, arg) | unalias name
Delete the specified alias. | unalias name
Delete the specified alias. | [
"unalias",
"name",
"Delete",
"the",
"specified",
"alias",
"."
] | def do_unalias(self, arg):
"""unalias name
Delete the specified alias.
"""
args = arg.split()
if len(args) == 0: return
if args[0] in self.aliases:
del self.aliases[args[0]] | [
"def",
"do_unalias",
"(",
"self",
",",
"arg",
")",
":",
"args",
"=",
"arg",
".",
"split",
"(",
")",
"if",
"len",
"(",
"args",
")",
"==",
"0",
":",
"return",
"if",
"args",
"[",
"0",
"]",
"in",
"self",
".",
"aliases",
":",
"del",
"self",
".",
"aliases",
"[",
"args",
"[",
"0",
"]",
"]"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/pdb.py#L1432-L1439 | ||
ARM-software/armnn | 5e9965cae1cc6162649910f423ebd86001fc1931 | python/pyarmnn/examples/common/cv_utils.py | python | resize_with_aspect_ratio | (frame: np.ndarray, input_binding_info: tuple) | return frame | Resizes frame while maintaining aspect ratio, padding any empty space.
Args:
frame: Captured frame.
input_binding_info: Contains shape of model input layer.
Returns:
Frame resized to the size of model input layer. | Resizes frame while maintaining aspect ratio, padding any empty space. | [
"Resizes",
"frame",
"while",
"maintaining",
"aspect",
"ratio",
"padding",
"any",
"empty",
"space",
"."
] | def resize_with_aspect_ratio(frame: np.ndarray, input_binding_info: tuple):
"""
Resizes frame while maintaining aspect ratio, padding any empty space.
Args:
frame: Captured frame.
input_binding_info: Contains shape of model input layer.
Returns:
Frame resized to the size of model input layer.
"""
aspect_ratio = frame.shape[1] / frame.shape[0]
model_height, model_width = list(input_binding_info[1].GetShape())[1:3]
if aspect_ratio >= 1.0:
new_height, new_width = int(model_width / aspect_ratio), model_width
b_padding, r_padding = model_height - new_height, 0
else:
new_height, new_width = model_height, int(model_height * aspect_ratio)
b_padding, r_padding = 0, model_width - new_width
# Resize and pad any empty space
frame = cv2.resize(frame, (new_width, new_height), interpolation=cv2.INTER_LINEAR)
frame = cv2.copyMakeBorder(frame, top=0, bottom=b_padding, left=0, right=r_padding,
borderType=cv2.BORDER_CONSTANT, value=[0, 0, 0])
return frame | [
"def",
"resize_with_aspect_ratio",
"(",
"frame",
":",
"np",
".",
"ndarray",
",",
"input_binding_info",
":",
"tuple",
")",
":",
"aspect_ratio",
"=",
"frame",
".",
"shape",
"[",
"1",
"]",
"/",
"frame",
".",
"shape",
"[",
"0",
"]",
"model_height",
",",
"model_width",
"=",
"list",
"(",
"input_binding_info",
"[",
"1",
"]",
".",
"GetShape",
"(",
")",
")",
"[",
"1",
":",
"3",
"]",
"if",
"aspect_ratio",
">=",
"1.0",
":",
"new_height",
",",
"new_width",
"=",
"int",
"(",
"model_width",
"/",
"aspect_ratio",
")",
",",
"model_width",
"b_padding",
",",
"r_padding",
"=",
"model_height",
"-",
"new_height",
",",
"0",
"else",
":",
"new_height",
",",
"new_width",
"=",
"model_height",
",",
"int",
"(",
"model_height",
"*",
"aspect_ratio",
")",
"b_padding",
",",
"r_padding",
"=",
"0",
",",
"model_width",
"-",
"new_width",
"# Resize and pad any empty space",
"frame",
"=",
"cv2",
".",
"resize",
"(",
"frame",
",",
"(",
"new_width",
",",
"new_height",
")",
",",
"interpolation",
"=",
"cv2",
".",
"INTER_LINEAR",
")",
"frame",
"=",
"cv2",
".",
"copyMakeBorder",
"(",
"frame",
",",
"top",
"=",
"0",
",",
"bottom",
"=",
"b_padding",
",",
"left",
"=",
"0",
",",
"right",
"=",
"r_padding",
",",
"borderType",
"=",
"cv2",
".",
"BORDER_CONSTANT",
",",
"value",
"=",
"[",
"0",
",",
"0",
",",
"0",
"]",
")",
"return",
"frame"
] | https://github.com/ARM-software/armnn/blob/5e9965cae1cc6162649910f423ebd86001fc1931/python/pyarmnn/examples/common/cv_utils.py#L49-L74 | |
google/certificate-transparency | 2588562fd306a447958471b6f06c1069619c1641 | python/utilities/log_list/openssl_generator.py | python | generate_openssl_conf | (json_log_list, output_path) | Given a log list read from JSON, writes an OpenSSL log list to a file | Given a log list read from JSON, writes an OpenSSL log list to a file | [
"Given",
"a",
"log",
"list",
"read",
"from",
"JSON",
"writes",
"an",
"OpenSSL",
"log",
"list",
"to",
"a",
"file"
] | def generate_openssl_conf(json_log_list, output_path):
'''Given a log list read from JSON, writes an OpenSSL log list to a file'''
with open(output_path, "w") as output:
logs = json_log_list["logs"]
log_confs = (_log_conf(log) for log in logs)
output.write(_enabled_logs_conf(logs) + "\n")
output.write("\n".join(log_confs)) | [
"def",
"generate_openssl_conf",
"(",
"json_log_list",
",",
"output_path",
")",
":",
"with",
"open",
"(",
"output_path",
",",
"\"w\"",
")",
"as",
"output",
":",
"logs",
"=",
"json_log_list",
"[",
"\"logs\"",
"]",
"log_confs",
"=",
"(",
"_log_conf",
"(",
"log",
")",
"for",
"log",
"in",
"logs",
")",
"output",
".",
"write",
"(",
"_enabled_logs_conf",
"(",
"logs",
")",
"+",
"\"\\n\"",
")",
"output",
".",
"write",
"(",
"\"\\n\"",
".",
"join",
"(",
"log_confs",
")",
")"
] | https://github.com/google/certificate-transparency/blob/2588562fd306a447958471b6f06c1069619c1641/python/utilities/log_list/openssl_generator.py#L36-L43 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/ptyprocess/ptyprocess/ptyprocess.py | python | PtyProcess.__del__ | (self) | This makes sure that no system resources are left open. Python only
garbage collects Python objects. OS file descriptors are not Python
objects, so they must be handled explicitly. If the child file
descriptor was opened outside of this class (passed to the constructor)
then this does not close it. | This makes sure that no system resources are left open. Python only
garbage collects Python objects. OS file descriptors are not Python
objects, so they must be handled explicitly. If the child file
descriptor was opened outside of this class (passed to the constructor)
then this does not close it. | [
"This",
"makes",
"sure",
"that",
"no",
"system",
"resources",
"are",
"left",
"open",
".",
"Python",
"only",
"garbage",
"collects",
"Python",
"objects",
".",
"OS",
"file",
"descriptors",
"are",
"not",
"Python",
"objects",
"so",
"they",
"must",
"be",
"handled",
"explicitly",
".",
"If",
"the",
"child",
"file",
"descriptor",
"was",
"opened",
"outside",
"of",
"this",
"class",
"(",
"passed",
"to",
"the",
"constructor",
")",
"then",
"this",
"does",
"not",
"close",
"it",
"."
] | def __del__(self):
'''This makes sure that no system resources are left open. Python only
garbage collects Python objects. OS file descriptors are not Python
objects, so they must be handled explicitly. If the child file
descriptor was opened outside of this class (passed to the constructor)
then this does not close it. '''
if not self.closed:
# It is possible for __del__ methods to execute during the
# teardown of the Python VM itself. Thus self.close() may
# trigger an exception because os.close may be None.
try:
self.close()
# which exception, shouldn't we catch explicitly .. ?
except:
pass | [
"def",
"__del__",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"closed",
":",
"# It is possible for __del__ methods to execute during the",
"# teardown of the Python VM itself. Thus self.close() may",
"# trigger an exception because os.close may be None.",
"try",
":",
"self",
".",
"close",
"(",
")",
"# which exception, shouldn't we catch explicitly .. ?",
"except",
":",
"pass"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ptyprocess/ptyprocess/ptyprocess.py#L370-L385 | ||
plaidml/plaidml | f3c6681db21460e5fdc11ae651d6d7b6c27f8262 | plaidml/edsl/__init__.py | python | TensorIndex.__rmul__ | (self, lhs) | return TensorIndex(_poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) | Performs a multiplication between a TensorIndex and another operand
in a polynomial expression.
Example:
>>> i, j = TensorIndexes(2)
>>> A = Placeholder(DType.FLOAT32, [3, 3])
>>> R = Contraction().sum(A[5 * i, j]).build() | Performs a multiplication between a TensorIndex and another operand
in a polynomial expression. | [
"Performs",
"a",
"multiplication",
"between",
"a",
"TensorIndex",
"and",
"another",
"operand",
"in",
"a",
"polynomial",
"expression",
"."
] | def __rmul__(self, lhs):
"""Performs a multiplication between a TensorIndex and another operand
in a polynomial expression.
Example:
>>> i, j = TensorIndexes(2)
>>> A = Placeholder(DType.FLOAT32, [3, 3])
>>> R = Contraction().sum(A[5 * i, j]).build()
"""
return TensorIndex(_poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) | [
"def",
"__rmul__",
"(",
"self",
",",
"lhs",
")",
":",
"return",
"TensorIndex",
"(",
"_poly_op",
"(",
"lib",
".",
"PLAIDML_INT_OP_MUL",
",",
"lhs",
",",
"self",
")",
")"
] | https://github.com/plaidml/plaidml/blob/f3c6681db21460e5fdc11ae651d6d7b6c27f8262/plaidml/edsl/__init__.py#L258-L267 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/indexes/multi.py | python | MultiIndex.reindex | (self, target, method=None, level=None, limit=None,
tolerance=None) | return target, indexer | Create index with target's values (move/add/delete values as necessary)
Returns
-------
new_index : pd.MultiIndex
Resulting index
indexer : np.ndarray or None
Indices of output values in original index | Create index with target's values (move/add/delete values as necessary) | [
"Create",
"index",
"with",
"target",
"s",
"values",
"(",
"move",
"/",
"add",
"/",
"delete",
"values",
"as",
"necessary",
")"
] | def reindex(self, target, method=None, level=None, limit=None,
tolerance=None):
"""
Create index with target's values (move/add/delete values as necessary)
Returns
-------
new_index : pd.MultiIndex
Resulting index
indexer : np.ndarray or None
Indices of output values in original index
"""
# GH6552: preserve names when reindexing to non-named target
# (i.e. neither Index nor Series).
preserve_names = not hasattr(target, 'names')
if level is not None:
if method is not None:
raise TypeError('Fill method not supported if level passed')
# GH7774: preserve dtype/tz if target is empty and not an Index.
# target may be an iterator
target = ibase._ensure_has_len(target)
if len(target) == 0 and not isinstance(target, Index):
idx = self.levels[level]
attrs = idx._get_attributes_dict()
attrs.pop('freq', None) # don't preserve freq
target = type(idx)._simple_new(np.empty(0, dtype=idx.dtype),
**attrs)
else:
target = ensure_index(target)
target, indexer, _ = self._join_level(target, level, how='right',
return_indexers=True,
keep_order=False)
else:
target = ensure_index(target)
if self.equals(target):
indexer = None
else:
if self.is_unique:
indexer = self.get_indexer(target, method=method,
limit=limit,
tolerance=tolerance)
else:
raise ValueError("cannot handle a non-unique multi-index!")
if not isinstance(target, MultiIndex):
if indexer is None:
target = self
elif (indexer >= 0).all():
target = self.take(indexer)
else:
# hopefully?
target = MultiIndex.from_tuples(target)
if (preserve_names and target.nlevels == self.nlevels and
target.names != self.names):
target = target.copy(deep=False)
target.names = self.names
return target, indexer | [
"def",
"reindex",
"(",
"self",
",",
"target",
",",
"method",
"=",
"None",
",",
"level",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"tolerance",
"=",
"None",
")",
":",
"# GH6552: preserve names when reindexing to non-named target",
"# (i.e. neither Index nor Series).",
"preserve_names",
"=",
"not",
"hasattr",
"(",
"target",
",",
"'names'",
")",
"if",
"level",
"is",
"not",
"None",
":",
"if",
"method",
"is",
"not",
"None",
":",
"raise",
"TypeError",
"(",
"'Fill method not supported if level passed'",
")",
"# GH7774: preserve dtype/tz if target is empty and not an Index.",
"# target may be an iterator",
"target",
"=",
"ibase",
".",
"_ensure_has_len",
"(",
"target",
")",
"if",
"len",
"(",
"target",
")",
"==",
"0",
"and",
"not",
"isinstance",
"(",
"target",
",",
"Index",
")",
":",
"idx",
"=",
"self",
".",
"levels",
"[",
"level",
"]",
"attrs",
"=",
"idx",
".",
"_get_attributes_dict",
"(",
")",
"attrs",
".",
"pop",
"(",
"'freq'",
",",
"None",
")",
"# don't preserve freq",
"target",
"=",
"type",
"(",
"idx",
")",
".",
"_simple_new",
"(",
"np",
".",
"empty",
"(",
"0",
",",
"dtype",
"=",
"idx",
".",
"dtype",
")",
",",
"*",
"*",
"attrs",
")",
"else",
":",
"target",
"=",
"ensure_index",
"(",
"target",
")",
"target",
",",
"indexer",
",",
"_",
"=",
"self",
".",
"_join_level",
"(",
"target",
",",
"level",
",",
"how",
"=",
"'right'",
",",
"return_indexers",
"=",
"True",
",",
"keep_order",
"=",
"False",
")",
"else",
":",
"target",
"=",
"ensure_index",
"(",
"target",
")",
"if",
"self",
".",
"equals",
"(",
"target",
")",
":",
"indexer",
"=",
"None",
"else",
":",
"if",
"self",
".",
"is_unique",
":",
"indexer",
"=",
"self",
".",
"get_indexer",
"(",
"target",
",",
"method",
"=",
"method",
",",
"limit",
"=",
"limit",
",",
"tolerance",
"=",
"tolerance",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"cannot handle a non-unique multi-index!\"",
")",
"if",
"not",
"isinstance",
"(",
"target",
",",
"MultiIndex",
")",
":",
"if",
"indexer",
"is",
"None",
":",
"target",
"=",
"self",
"elif",
"(",
"indexer",
">=",
"0",
")",
".",
"all",
"(",
")",
":",
"target",
"=",
"self",
".",
"take",
"(",
"indexer",
")",
"else",
":",
"# hopefully?",
"target",
"=",
"MultiIndex",
".",
"from_tuples",
"(",
"target",
")",
"if",
"(",
"preserve_names",
"and",
"target",
".",
"nlevels",
"==",
"self",
".",
"nlevels",
"and",
"target",
".",
"names",
"!=",
"self",
".",
"names",
")",
":",
"target",
"=",
"target",
".",
"copy",
"(",
"deep",
"=",
"False",
")",
"target",
".",
"names",
"=",
"self",
".",
"names",
"return",
"target",
",",
"indexer"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/indexes/multi.py#L2182-L2243 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/setuptools/py2/setuptools/msvc.py | python | RegistryInfo.vc | (self) | return join(self.sxs, 'VC7') | Microsoft Visual C++ VC7 registry key.
Return
------
str
Registry key | Microsoft Visual C++ VC7 registry key. | [
"Microsoft",
"Visual",
"C",
"++",
"VC7",
"registry",
"key",
"."
] | def vc(self):
"""
Microsoft Visual C++ VC7 registry key.
Return
------
str
Registry key
"""
return join(self.sxs, 'VC7') | [
"def",
"vc",
"(",
"self",
")",
":",
"return",
"join",
"(",
"self",
".",
"sxs",
",",
"'VC7'",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/msvc.py#L383-L392 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/aui/framemanager.py | python | AuiPaneInfo.MinSize2 | (self, x, y) | return self | Sets the minimum size of the pane.
:see: :meth:`MinSize` for an explanation of input parameters. | Sets the minimum size of the pane. | [
"Sets",
"the",
"minimum",
"size",
"of",
"the",
"pane",
"."
] | def MinSize2(self, x, y):
"""
Sets the minimum size of the pane.
:see: :meth:`MinSize` for an explanation of input parameters.
"""
self.min_size = wx.Size(x, y)
return self | [
"def",
"MinSize2",
"(",
"self",
",",
"x",
",",
"y",
")",
":",
"self",
".",
"min_size",
"=",
"wx",
".",
"Size",
"(",
"x",
",",
"y",
")",
"return",
"self"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/framemanager.py#L1071-L1079 | |
idaholab/moose | 9eeebc65e098b4c30f8205fb41591fd5b61eb6ff | python/peacock/PostprocessorViewer/plugins/PostprocessorTableWidget.py | python | PostprocessorTableWidget.sizeHint | (self, *args) | Return the saved size. | Return the saved size. | [
"Return",
"the",
"saved",
"size",
"."
] | def sizeHint(self, *args):
"""
Return the saved size.
"""
if self._size:
return self._size
else:
return super(PostprocessorTableWidget, self).size() | [
"def",
"sizeHint",
"(",
"self",
",",
"*",
"args",
")",
":",
"if",
"self",
".",
"_size",
":",
"return",
"self",
".",
"_size",
"else",
":",
"return",
"super",
"(",
"PostprocessorTableWidget",
",",
"self",
")",
".",
"size",
"(",
")"
] | https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/peacock/PostprocessorViewer/plugins/PostprocessorTableWidget.py#L25-L32 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/inline_closurecall.py | python | _find_iter_range | (func_ir, range_iter_var, swapped) | Find the iterator's actual range if it is either range(n), or range(m, n),
otherwise return raise GuardException. | Find the iterator's actual range if it is either range(n), or range(m, n),
otherwise return raise GuardException. | [
"Find",
"the",
"iterator",
"s",
"actual",
"range",
"if",
"it",
"is",
"either",
"range",
"(",
"n",
")",
"or",
"range",
"(",
"m",
"n",
")",
"otherwise",
"return",
"raise",
"GuardException",
"."
] | def _find_iter_range(func_ir, range_iter_var, swapped):
"""Find the iterator's actual range if it is either range(n), or range(m, n),
otherwise return raise GuardException.
"""
debug_print = _make_debug_print("find_iter_range")
range_iter_def = get_definition(func_ir, range_iter_var)
debug_print("range_iter_var = ", range_iter_var, " def = ", range_iter_def)
require(isinstance(range_iter_def, ir.Expr) and range_iter_def.op == 'getiter')
range_var = range_iter_def.value
range_def = get_definition(func_ir, range_var)
debug_print("range_var = ", range_var, " range_def = ", range_def)
require(isinstance(range_def, ir.Expr) and range_def.op == 'call')
func_var = range_def.func
func_def = get_definition(func_ir, func_var)
debug_print("func_var = ", func_var, " func_def = ", func_def)
require(isinstance(func_def, ir.Global) and
(func_def.value == range or func_def.value == numba.special.prange))
nargs = len(range_def.args)
swapping = [('"array comprehension"', 'closure of'), range_def.func.loc]
if nargs == 1:
swapped[range_def.func.name] = swapping
stop = get_definition(func_ir, range_def.args[0], lhs_only=True)
return (0, range_def.args[0], func_def)
elif nargs == 2:
swapped[range_def.func.name] = swapping
start = get_definition(func_ir, range_def.args[0], lhs_only=True)
stop = get_definition(func_ir, range_def.args[1], lhs_only=True)
return (start, stop, func_def)
else:
raise GuardException | [
"def",
"_find_iter_range",
"(",
"func_ir",
",",
"range_iter_var",
",",
"swapped",
")",
":",
"debug_print",
"=",
"_make_debug_print",
"(",
"\"find_iter_range\"",
")",
"range_iter_def",
"=",
"get_definition",
"(",
"func_ir",
",",
"range_iter_var",
")",
"debug_print",
"(",
"\"range_iter_var = \"",
",",
"range_iter_var",
",",
"\" def = \"",
",",
"range_iter_def",
")",
"require",
"(",
"isinstance",
"(",
"range_iter_def",
",",
"ir",
".",
"Expr",
")",
"and",
"range_iter_def",
".",
"op",
"==",
"'getiter'",
")",
"range_var",
"=",
"range_iter_def",
".",
"value",
"range_def",
"=",
"get_definition",
"(",
"func_ir",
",",
"range_var",
")",
"debug_print",
"(",
"\"range_var = \"",
",",
"range_var",
",",
"\" range_def = \"",
",",
"range_def",
")",
"require",
"(",
"isinstance",
"(",
"range_def",
",",
"ir",
".",
"Expr",
")",
"and",
"range_def",
".",
"op",
"==",
"'call'",
")",
"func_var",
"=",
"range_def",
".",
"func",
"func_def",
"=",
"get_definition",
"(",
"func_ir",
",",
"func_var",
")",
"debug_print",
"(",
"\"func_var = \"",
",",
"func_var",
",",
"\" func_def = \"",
",",
"func_def",
")",
"require",
"(",
"isinstance",
"(",
"func_def",
",",
"ir",
".",
"Global",
")",
"and",
"(",
"func_def",
".",
"value",
"==",
"range",
"or",
"func_def",
".",
"value",
"==",
"numba",
".",
"special",
".",
"prange",
")",
")",
"nargs",
"=",
"len",
"(",
"range_def",
".",
"args",
")",
"swapping",
"=",
"[",
"(",
"'\"array comprehension\"'",
",",
"'closure of'",
")",
",",
"range_def",
".",
"func",
".",
"loc",
"]",
"if",
"nargs",
"==",
"1",
":",
"swapped",
"[",
"range_def",
".",
"func",
".",
"name",
"]",
"=",
"swapping",
"stop",
"=",
"get_definition",
"(",
"func_ir",
",",
"range_def",
".",
"args",
"[",
"0",
"]",
",",
"lhs_only",
"=",
"True",
")",
"return",
"(",
"0",
",",
"range_def",
".",
"args",
"[",
"0",
"]",
",",
"func_def",
")",
"elif",
"nargs",
"==",
"2",
":",
"swapped",
"[",
"range_def",
".",
"func",
".",
"name",
"]",
"=",
"swapping",
"start",
"=",
"get_definition",
"(",
"func_ir",
",",
"range_def",
".",
"args",
"[",
"0",
"]",
",",
"lhs_only",
"=",
"True",
")",
"stop",
"=",
"get_definition",
"(",
"func_ir",
",",
"range_def",
".",
"args",
"[",
"1",
"]",
",",
"lhs_only",
"=",
"True",
")",
"return",
"(",
"start",
",",
"stop",
",",
"func_def",
")",
"else",
":",
"raise",
"GuardException"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/inline_closurecall.py#L605-L634 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/locale.py | python | getlocale | (category=LC_CTYPE) | return _parse_localename(localename) | Returns the current setting for the given locale category as
tuple (language code, encoding).
category may be one of the LC_* value except LC_ALL. It
defaults to LC_CTYPE.
Except for the code 'C', the language code corresponds to RFC
1766. code and encoding can be None in case the values cannot
be determined. | Returns the current setting for the given locale category as
tuple (language code, encoding). | [
"Returns",
"the",
"current",
"setting",
"for",
"the",
"given",
"locale",
"category",
"as",
"tuple",
"(",
"language",
"code",
"encoding",
")",
"."
] | def getlocale(category=LC_CTYPE):
""" Returns the current setting for the given locale category as
tuple (language code, encoding).
category may be one of the LC_* value except LC_ALL. It
defaults to LC_CTYPE.
Except for the code 'C', the language code corresponds to RFC
1766. code and encoding can be None in case the values cannot
be determined.
"""
localename = _setlocale(category)
if category == LC_ALL and ';' in localename:
raise TypeError('category LC_ALL is not supported')
return _parse_localename(localename) | [
"def",
"getlocale",
"(",
"category",
"=",
"LC_CTYPE",
")",
":",
"localename",
"=",
"_setlocale",
"(",
"category",
")",
"if",
"category",
"==",
"LC_ALL",
"and",
"';'",
"in",
"localename",
":",
"raise",
"TypeError",
"(",
"'category LC_ALL is not supported'",
")",
"return",
"_parse_localename",
"(",
"localename",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/locale.py#L577-L593 | |
lmb-freiburg/ogn | 974f72ef4bf840d6f6693d22d1843a79223e77ce | python/caffe/pycaffe.py | python | _Net_blob_loss_weights | (self) | return self._blob_loss_weights_dict | An OrderedDict (bottom to top, i.e., input to output) of network
blob loss weights indexed by name | An OrderedDict (bottom to top, i.e., input to output) of network
blob loss weights indexed by name | [
"An",
"OrderedDict",
"(",
"bottom",
"to",
"top",
"i",
".",
"e",
".",
"input",
"to",
"output",
")",
"of",
"network",
"blob",
"loss",
"weights",
"indexed",
"by",
"name"
] | def _Net_blob_loss_weights(self):
"""
An OrderedDict (bottom to top, i.e., input to output) of network
blob loss weights indexed by name
"""
if not hasattr(self, '_blobs_loss_weights_dict'):
self._blob_loss_weights_dict = OrderedDict(zip(self._blob_names,
self._blob_loss_weights))
return self._blob_loss_weights_dict | [
"def",
"_Net_blob_loss_weights",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_blobs_loss_weights_dict'",
")",
":",
"self",
".",
"_blob_loss_weights_dict",
"=",
"OrderedDict",
"(",
"zip",
"(",
"self",
".",
"_blob_names",
",",
"self",
".",
"_blob_loss_weights",
")",
")",
"return",
"self",
".",
"_blob_loss_weights_dict"
] | https://github.com/lmb-freiburg/ogn/blob/974f72ef4bf840d6f6693d22d1843a79223e77ce/python/caffe/pycaffe.py#L36-L44 | |
RobotLocomotion/drake | 0e18a34604c45ed65bc9018a54f7610f91cdad5b | tools/workspace/drake_visualizer/_drake_visualizer_builtin_scripts/show_image.py | python | create_image | (w, h, num_channels=1, dtype=np.uint8) | return image | Creates a VTK image. | Creates a VTK image. | [
"Creates",
"a",
"VTK",
"image",
"."
] | def create_image(w, h, num_channels=1, dtype=np.uint8):
""" Creates a VTK image. """
image = vtk.vtkImageData()
image.SetExtent(0, w - 1, 0, h - 1, 0, 0)
image.SetSpacing(1., 1., 1.)
image.SetOrigin(0., 0., 0.)
if _is_vtk_5:
image.SetWholeExtent(image.GetExtent())
image.SetScalarType(get_vtk_array_type(dtype))
image.SetNumberOfScalarComponents(num_channels)
image.AllocateScalars()
else:
image.AllocateScalars(get_vtk_array_type(dtype), num_channels)
return image | [
"def",
"create_image",
"(",
"w",
",",
"h",
",",
"num_channels",
"=",
"1",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
":",
"image",
"=",
"vtk",
".",
"vtkImageData",
"(",
")",
"image",
".",
"SetExtent",
"(",
"0",
",",
"w",
"-",
"1",
",",
"0",
",",
"h",
"-",
"1",
",",
"0",
",",
"0",
")",
"image",
".",
"SetSpacing",
"(",
"1.",
",",
"1.",
",",
"1.",
")",
"image",
".",
"SetOrigin",
"(",
"0.",
",",
"0.",
",",
"0.",
")",
"if",
"_is_vtk_5",
":",
"image",
".",
"SetWholeExtent",
"(",
"image",
".",
"GetExtent",
"(",
")",
")",
"image",
".",
"SetScalarType",
"(",
"get_vtk_array_type",
"(",
"dtype",
")",
")",
"image",
".",
"SetNumberOfScalarComponents",
"(",
"num_channels",
")",
"image",
".",
"AllocateScalars",
"(",
")",
"else",
":",
"image",
".",
"AllocateScalars",
"(",
"get_vtk_array_type",
"(",
"dtype",
")",
",",
"num_channels",
")",
"return",
"image"
] | https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/tools/workspace/drake_visualizer/_drake_visualizer_builtin_scripts/show_image.py#L274-L287 | |
microsoft/clang | 86d4513d3e0daa4d5a29b0b1de7c854ca15f9fe5 | bindings/python/clang/cindex.py | python | Cursor.lexical_parent | (self) | return self._lexical_parent | Return the lexical parent for this cursor. | Return the lexical parent for this cursor. | [
"Return",
"the",
"lexical",
"parent",
"for",
"this",
"cursor",
"."
] | def lexical_parent(self):
"""Return the lexical parent for this cursor."""
if not hasattr(self, '_lexical_parent'):
self._lexical_parent = conf.lib.clang_getCursorLexicalParent(self)
return self._lexical_parent | [
"def",
"lexical_parent",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_lexical_parent'",
")",
":",
"self",
".",
"_lexical_parent",
"=",
"conf",
".",
"lib",
".",
"clang_getCursorLexicalParent",
"(",
"self",
")",
"return",
"self",
".",
"_lexical_parent"
] | https://github.com/microsoft/clang/blob/86d4513d3e0daa4d5a29b0b1de7c854ca15f9fe5/bindings/python/clang/cindex.py#L1747-L1752 | |
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/ros/rosunit/src/rosunit/pmon.py | python | ProcessMonitor.get_active_names | (self) | return retval | @return [str]: list of active process names | [] | def get_active_names(self):
"""
@return [str]: list of active process names
"""
with self.plock:
retval = [p.name for p in self.procs]
return retval | [
"def",
"get_active_names",
"(",
"self",
")",
":",
"with",
"self",
".",
"plock",
":",
"retval",
"=",
"[",
"p",
".",
"name",
"for",
"p",
"in",
"self",
".",
"procs",
"]",
"return",
"retval"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros/rosunit/src/rosunit/pmon.py#L345-L351 | ||
Ewenwan/MVision | 97b394dfa48cb21c82cd003b1a952745e413a17f | vSLAM/ch8/data/associate.py | python | associate | (first_list, second_list,offset,max_difference) | return matches | Associate two dictionaries of (stamp,data). As the time stamps never match exactly, we aim
to find the closest match for every input tuple.
Input:
first_list -- first dictionary of (stamp,data) tuples
second_list -- second dictionary of (stamp,data) tuples
offset -- time offset between both dictionaries (e.g., to model the delay between the sensors)
max_difference -- search radius for candidate generation
Output:
matches -- list of matched tuples ((stamp1,data1),(stamp2,data2)) | Associate two dictionaries of (stamp,data). As the time stamps never match exactly, we aim
to find the closest match for every input tuple.
Input:
first_list -- first dictionary of (stamp,data) tuples
second_list -- second dictionary of (stamp,data) tuples
offset -- time offset between both dictionaries (e.g., to model the delay between the sensors)
max_difference -- search radius for candidate generation | [
"Associate",
"two",
"dictionaries",
"of",
"(",
"stamp",
"data",
")",
".",
"As",
"the",
"time",
"stamps",
"never",
"match",
"exactly",
"we",
"aim",
"to",
"find",
"the",
"closest",
"match",
"for",
"every",
"input",
"tuple",
".",
"Input",
":",
"first_list",
"--",
"first",
"dictionary",
"of",
"(",
"stamp",
"data",
")",
"tuples",
"second_list",
"--",
"second",
"dictionary",
"of",
"(",
"stamp",
"data",
")",
"tuples",
"offset",
"--",
"time",
"offset",
"between",
"both",
"dictionaries",
"(",
"e",
".",
"g",
".",
"to",
"model",
"the",
"delay",
"between",
"the",
"sensors",
")",
"max_difference",
"--",
"search",
"radius",
"for",
"candidate",
"generation"
] | def associate(first_list, second_list,offset,max_difference):
"""
Associate two dictionaries of (stamp,data). As the time stamps never match exactly, we aim
to find the closest match for every input tuple.
Input:
first_list -- first dictionary of (stamp,data) tuples
second_list -- second dictionary of (stamp,data) tuples
offset -- time offset between both dictionaries (e.g., to model the delay between the sensors)
max_difference -- search radius for candidate generation
Output:
matches -- list of matched tuples ((stamp1,data1),(stamp2,data2))
"""
first_keys = first_list.keys()
second_keys = second_list.keys()
potential_matches = [(abs(a - (b + offset)), a, b)
for a in first_keys
for b in second_keys
if abs(a - (b + offset)) < max_difference]
potential_matches.sort()
matches = []
for diff, a, b in potential_matches:
if a in first_keys and b in second_keys:
first_keys.remove(a)
second_keys.remove(b)
matches.append((a, b))
matches.sort()
return matches | [
"def",
"associate",
"(",
"first_list",
",",
"second_list",
",",
"offset",
",",
"max_difference",
")",
":",
"first_keys",
"=",
"first_list",
".",
"keys",
"(",
")",
"second_keys",
"=",
"second_list",
".",
"keys",
"(",
")",
"potential_matches",
"=",
"[",
"(",
"abs",
"(",
"a",
"-",
"(",
"b",
"+",
"offset",
")",
")",
",",
"a",
",",
"b",
")",
"for",
"a",
"in",
"first_keys",
"for",
"b",
"in",
"second_keys",
"if",
"abs",
"(",
"a",
"-",
"(",
"b",
"+",
"offset",
")",
")",
"<",
"max_difference",
"]",
"potential_matches",
".",
"sort",
"(",
")",
"matches",
"=",
"[",
"]",
"for",
"diff",
",",
"a",
",",
"b",
"in",
"potential_matches",
":",
"if",
"a",
"in",
"first_keys",
"and",
"b",
"in",
"second_keys",
":",
"first_keys",
".",
"remove",
"(",
"a",
")",
"second_keys",
".",
"remove",
"(",
"b",
")",
"matches",
".",
"append",
"(",
"(",
"a",
",",
"b",
")",
")",
"matches",
".",
"sort",
"(",
")",
"return",
"matches"
] | https://github.com/Ewenwan/MVision/blob/97b394dfa48cb21c82cd003b1a952745e413a17f/vSLAM/ch8/data/associate.py#L71-L101 | |
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/learn/python/learn/estimators/linear.py | python | LinearRegressor.predict | (self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True) | return super(LinearRegressor, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=outputs,
as_iterable=as_iterable) | Returns predictions for given features.
By default, returns predicted scores. But this default will be dropped
soon. Users should either pass `outputs`, or call `predict_scores` method.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
outputs: list of `str`, name of the output to predict.
If `None`, returns scores.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted scores (or an iterable of predicted scores if
as_iterable is True). If `label_dimension == 1`, the shape of the output
is `[batch_size]`, otherwise the shape is `[batch_size, label_dimension]`.
If `outputs` is set, returns a dict of predictions. | Returns predictions for given features. | [
"Returns",
"predictions",
"for",
"given",
"features",
"."
] | def predict(self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True):
"""Returns predictions for given features.
By default, returns predicted scores. But this default will be dropped
soon. Users should either pass `outputs`, or call `predict_scores` method.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
outputs: list of `str`, name of the output to predict.
If `None`, returns scores.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted scores (or an iterable of predicted scores if
as_iterable is True). If `label_dimension == 1`, the shape of the output
is `[batch_size]`, otherwise the shape is `[batch_size, label_dimension]`.
If `outputs` is set, returns a dict of predictions.
"""
if not outputs:
return self.predict_scores(
x=x,
input_fn=input_fn,
batch_size=batch_size,
as_iterable=as_iterable)
return super(LinearRegressor, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=outputs,
as_iterable=as_iterable) | [
"def",
"predict",
"(",
"self",
",",
"x",
"=",
"None",
",",
"input_fn",
"=",
"None",
",",
"batch_size",
"=",
"None",
",",
"outputs",
"=",
"None",
",",
"as_iterable",
"=",
"True",
")",
":",
"if",
"not",
"outputs",
":",
"return",
"self",
".",
"predict_scores",
"(",
"x",
"=",
"x",
",",
"input_fn",
"=",
"input_fn",
",",
"batch_size",
"=",
"batch_size",
",",
"as_iterable",
"=",
"as_iterable",
")",
"return",
"super",
"(",
"LinearRegressor",
",",
"self",
")",
".",
"predict",
"(",
"x",
"=",
"x",
",",
"input_fn",
"=",
"input_fn",
",",
"batch_size",
"=",
"batch_size",
",",
"outputs",
"=",
"outputs",
",",
"as_iterable",
"=",
"as_iterable",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/learn/python/learn/estimators/linear.py#L768-L803 | |
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | current/tools/configure.d/nodedownload.py | python | reporthook | (count, size, total) | internal hook used by retrievefile | internal hook used by retrievefile | [
"internal",
"hook",
"used",
"by",
"retrievefile"
] | def reporthook(count, size, total):
"""internal hook used by retrievefile"""
sys.stdout.write(' Fetch: %c %sMB total, %sMB downloaded \r' %
(spin(count),
formatSize(total),
formatSize(count*size))) | [
"def",
"reporthook",
"(",
"count",
",",
"size",
",",
"total",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"' Fetch: %c %sMB total, %sMB downloaded \\r'",
"%",
"(",
"spin",
"(",
"count",
")",
",",
"formatSize",
"(",
"total",
")",
",",
"formatSize",
"(",
"count",
"*",
"size",
")",
")",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/configure.d/nodedownload.py#L29-L34 | ||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/quantization/keras/vitis/vitis_quantize.py | python | VitisQuantizer.get_analysed_model | (self, dataset) | return self._analysed_model, model_info | Get analysed model. | Get analysed model. | [
"Get",
"analysed",
"model",
"."
] | def get_analysed_model(self, dataset):
"""Get analysed model."""
if not self._analyse_model:
with self._custom_object_scope:
model_info = self._create_analysed_model(dataset)
return self._analysed_model, model_info | [
"def",
"get_analysed_model",
"(",
"self",
",",
"dataset",
")",
":",
"if",
"not",
"self",
".",
"_analyse_model",
":",
"with",
"self",
".",
"_custom_object_scope",
":",
"model_info",
"=",
"self",
".",
"_create_analysed_model",
"(",
"dataset",
")",
"return",
"self",
".",
"_analysed_model",
",",
"model_info"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/quantization/keras/vitis/vitis_quantize.py#L457-L462 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_controls.py | python | ToolBarBase.GetToolClientData | (*args, **kwargs) | return _controls_.ToolBarBase_GetToolClientData(*args, **kwargs) | GetToolClientData(self, int id) -> PyObject | GetToolClientData(self, int id) -> PyObject | [
"GetToolClientData",
"(",
"self",
"int",
"id",
")",
"-",
">",
"PyObject"
] | def GetToolClientData(*args, **kwargs):
"""GetToolClientData(self, int id) -> PyObject"""
return _controls_.ToolBarBase_GetToolClientData(*args, **kwargs) | [
"def",
"GetToolClientData",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"ToolBarBase_GetToolClientData",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L3803-L3805 | |
cyberbotics/webots | af7fa7d68dcf7b4550f1f2e132092b41e83698fc | projects/humans/c3d/controllers/c3d_viewer/c3d.py | python | Param.uint32_value | (self) | return self._as('I') | Get the param as a 32-bit unsigned integer. | Get the param as a 32-bit unsigned integer. | [
"Get",
"the",
"param",
"as",
"a",
"32",
"-",
"bit",
"unsigned",
"integer",
"."
] | def uint32_value(self):
'''Get the param as a 32-bit unsigned integer.'''
return self._as('I') | [
"def",
"uint32_value",
"(",
"self",
")",
":",
"return",
"self",
".",
"_as",
"(",
"'I'",
")"
] | https://github.com/cyberbotics/webots/blob/af7fa7d68dcf7b4550f1f2e132092b41e83698fc/projects/humans/c3d/controllers/c3d_viewer/c3d.py#L305-L307 | |
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/keras/python/keras/backend.py | python | spatial_2d_padding | (x, padding=((1, 1), (1, 1)), data_format=None) | return array_ops.pad(x, pattern) | Pads the 2nd and 3rd dimensions of a 4D tensor.
Arguments:
x: Tensor or variable.
padding: Tuple of 2 tuples, padding pattern.
data_format: One of `channels_last` or `channels_first`.
Returns:
A padded 4D tensor.
Raises:
ValueError: if `data_format` is neither
`channels_last` or `channels_first`. | Pads the 2nd and 3rd dimensions of a 4D tensor. | [
"Pads",
"the",
"2nd",
"and",
"3rd",
"dimensions",
"of",
"a",
"4D",
"tensor",
"."
] | def spatial_2d_padding(x, padding=((1, 1), (1, 1)), data_format=None):
"""Pads the 2nd and 3rd dimensions of a 4D tensor.
Arguments:
x: Tensor or variable.
padding: Tuple of 2 tuples, padding pattern.
data_format: One of `channels_last` or `channels_first`.
Returns:
A padded 4D tensor.
Raises:
ValueError: if `data_format` is neither
`channels_last` or `channels_first`.
"""
assert len(padding) == 2
assert len(padding[0]) == 2
assert len(padding[1]) == 2
if data_format is None:
data_format = image_data_format()
if data_format not in {'channels_first', 'channels_last'}:
raise ValueError('Unknown data_format ' + str(data_format))
if data_format == 'channels_first':
pattern = [[0, 0], [0, 0], list(padding[0]), list(padding[1])]
else:
pattern = [[0, 0], list(padding[0]), list(padding[1]), [0, 0]]
return array_ops.pad(x, pattern) | [
"def",
"spatial_2d_padding",
"(",
"x",
",",
"padding",
"=",
"(",
"(",
"1",
",",
"1",
")",
",",
"(",
"1",
",",
"1",
")",
")",
",",
"data_format",
"=",
"None",
")",
":",
"assert",
"len",
"(",
"padding",
")",
"==",
"2",
"assert",
"len",
"(",
"padding",
"[",
"0",
"]",
")",
"==",
"2",
"assert",
"len",
"(",
"padding",
"[",
"1",
"]",
")",
"==",
"2",
"if",
"data_format",
"is",
"None",
":",
"data_format",
"=",
"image_data_format",
"(",
")",
"if",
"data_format",
"not",
"in",
"{",
"'channels_first'",
",",
"'channels_last'",
"}",
":",
"raise",
"ValueError",
"(",
"'Unknown data_format '",
"+",
"str",
"(",
"data_format",
")",
")",
"if",
"data_format",
"==",
"'channels_first'",
":",
"pattern",
"=",
"[",
"[",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
"]",
",",
"list",
"(",
"padding",
"[",
"0",
"]",
")",
",",
"list",
"(",
"padding",
"[",
"1",
"]",
")",
"]",
"else",
":",
"pattern",
"=",
"[",
"[",
"0",
",",
"0",
"]",
",",
"list",
"(",
"padding",
"[",
"0",
"]",
")",
",",
"list",
"(",
"padding",
"[",
"1",
"]",
")",
",",
"[",
"0",
",",
"0",
"]",
"]",
"return",
"array_ops",
".",
"pad",
"(",
"x",
",",
"pattern",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/keras/python/keras/backend.py#L2148-L2175 | |
devpack/android-python27 | d42dd67565e104cf7b0b50eb473f615db3e69901 | python-build-with-qt/sip-4.11.2/siputils.py | python | Makefile.optional_string | (self, name, default="") | return s | Return an optional Makefile macro as a string.
name is the name of the macro.
default is the default value | Return an optional Makefile macro as a string. | [
"Return",
"an",
"optional",
"Makefile",
"macro",
"as",
"a",
"string",
"."
] | def optional_string(self, name, default=""):
"""Return an optional Makefile macro as a string.
name is the name of the macro.
default is the default value
"""
s = ' '.join(self.optional_list(name))
if not s:
s = default
return s | [
"def",
"optional_string",
"(",
"self",
",",
"name",
",",
"default",
"=",
"\"\"",
")",
":",
"s",
"=",
"' '",
".",
"join",
"(",
"self",
".",
"optional_list",
"(",
"name",
")",
")",
"if",
"not",
"s",
":",
"s",
"=",
"default",
"return",
"s"
] | https://github.com/devpack/android-python27/blob/d42dd67565e104cf7b0b50eb473f615db3e69901/python-build-with-qt/sip-4.11.2/siputils.py#L790-L801 | |
google/clif | cab24d6a105609a65c95a36a1712ae3c20c7b5df | clif/python/postproc.py | python | RuntimeErrorOnFalse | (ok, *args) | return _RaiseOnFalse('RuntimeErrorOnFalse', RuntimeError, ok, *args) | Returns None / arg / (args,...) if ok, otherwise raises RuntimeError. | Returns None / arg / (args,...) if ok, otherwise raises RuntimeError. | [
"Returns",
"None",
"/",
"arg",
"/",
"(",
"args",
"...",
")",
"if",
"ok",
"otherwise",
"raises",
"RuntimeError",
"."
] | def RuntimeErrorOnFalse(ok, *args):
"""Returns None / arg / (args,...) if ok, otherwise raises RuntimeError."""
return _RaiseOnFalse('RuntimeErrorOnFalse', RuntimeError, ok, *args) | [
"def",
"RuntimeErrorOnFalse",
"(",
"ok",
",",
"*",
"args",
")",
":",
"return",
"_RaiseOnFalse",
"(",
"'RuntimeErrorOnFalse'",
",",
"RuntimeError",
",",
"ok",
",",
"*",
"args",
")"
] | https://github.com/google/clif/blob/cab24d6a105609a65c95a36a1712ae3c20c7b5df/clif/python/postproc.py#L54-L56 | |
bareos/bareos | 56a10bb368b0a81e977bb51304033fe49d59efb0 | restapi/bareos_restapi/__init__.py | python | read_catalog_info_for_all_clients | (
response: Response,
current_user: User = Depends(get_current_user),
name: Optional[str] = None,
) | Read status information from catalog about all clients or just one client by name.
Built on console command _llist client_ | Read status information from catalog about all clients or just one client by name.
Built on console command _llist client_ | [
"Read",
"status",
"information",
"from",
"catalog",
"about",
"all",
"clients",
"or",
"just",
"one",
"client",
"by",
"name",
".",
"Built",
"on",
"console",
"command",
"_llist",
"client_"
] | def read_catalog_info_for_all_clients(
response: Response,
current_user: User = Depends(get_current_user),
name: Optional[str] = None,
):
"""
Read status information from catalog about all clients or just one client by name.
Built on console command _llist client_
"""
if name:
listCommand = "llist client=%s" % name
else:
listCommand = "llist clients"
try:
responseDict = current_user.jsonDirector.call(listCommand)
except Exception as e:
response.status_code = 500
return {
"message": "Could not read client list from director %s. Message: '%s'"
% (CONFIG_DIRECTOR_NAME, e)
}
if "clients" in responseDict:
totalItems = len(responseDict["clients"])
return {"totalItems": totalItems, "clients": responseDict["clients"]}
else:
response.status_code = 404
return {"message": "No clients found."} | [
"def",
"read_catalog_info_for_all_clients",
"(",
"response",
":",
"Response",
",",
"current_user",
":",
"User",
"=",
"Depends",
"(",
"get_current_user",
")",
",",
"name",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
")",
":",
"if",
"name",
":",
"listCommand",
"=",
"\"llist client=%s\"",
"%",
"name",
"else",
":",
"listCommand",
"=",
"\"llist clients\"",
"try",
":",
"responseDict",
"=",
"current_user",
".",
"jsonDirector",
".",
"call",
"(",
"listCommand",
")",
"except",
"Exception",
"as",
"e",
":",
"response",
".",
"status_code",
"=",
"500",
"return",
"{",
"\"message\"",
":",
"\"Could not read client list from director %s. Message: '%s'\"",
"%",
"(",
"CONFIG_DIRECTOR_NAME",
",",
"e",
")",
"}",
"if",
"\"clients\"",
"in",
"responseDict",
":",
"totalItems",
"=",
"len",
"(",
"responseDict",
"[",
"\"clients\"",
"]",
")",
"return",
"{",
"\"totalItems\"",
":",
"totalItems",
",",
"\"clients\"",
":",
"responseDict",
"[",
"\"clients\"",
"]",
"}",
"else",
":",
"response",
".",
"status_code",
"=",
"404",
"return",
"{",
"\"message\"",
":",
"\"No clients found.\"",
"}"
] | https://github.com/bareos/bareos/blob/56a10bb368b0a81e977bb51304033fe49d59efb0/restapi/bareos_restapi/__init__.py#L429-L455 | ||
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Arch/ArchIFC.py | python | IfcRoot.setupIfcComplexAttributes | (self, obj) | Add the IFC type's complex attributes to the object.
Get the object's IFC type schema, and add the schema for the type's
complex attributes within the IfcData property. | Add the IFC type's complex attributes to the object. | [
"Add",
"the",
"IFC",
"type",
"s",
"complex",
"attributes",
"to",
"the",
"object",
"."
] | def setupIfcComplexAttributes(self, obj):
"""Add the IFC type's complex attributes to the object.
Get the object's IFC type schema, and add the schema for the type's
complex attributes within the IfcData property.
"""
ifcTypeSchema = self.getIfcTypeSchema(obj.IfcType)
if ifcTypeSchema is None:
return
IfcData = obj.IfcData
if "complex_attributes" not in IfcData:
IfcData["complex_attributes"] = "{}"
ifcComplexAttributes = json.loads(IfcData["complex_attributes"])
for attribute in ifcTypeSchema["complex_attributes"]:
if attribute["name"] not in ifcComplexAttributes.keys():
ifcComplexAttributes[attribute["name"]] = {}
IfcData["complex_attributes"] = json.dumps(ifcComplexAttributes)
obj.IfcData = IfcData | [
"def",
"setupIfcComplexAttributes",
"(",
"self",
",",
"obj",
")",
":",
"ifcTypeSchema",
"=",
"self",
".",
"getIfcTypeSchema",
"(",
"obj",
".",
"IfcType",
")",
"if",
"ifcTypeSchema",
"is",
"None",
":",
"return",
"IfcData",
"=",
"obj",
".",
"IfcData",
"if",
"\"complex_attributes\"",
"not",
"in",
"IfcData",
":",
"IfcData",
"[",
"\"complex_attributes\"",
"]",
"=",
"\"{}\"",
"ifcComplexAttributes",
"=",
"json",
".",
"loads",
"(",
"IfcData",
"[",
"\"complex_attributes\"",
"]",
")",
"for",
"attribute",
"in",
"ifcTypeSchema",
"[",
"\"complex_attributes\"",
"]",
":",
"if",
"attribute",
"[",
"\"name\"",
"]",
"not",
"in",
"ifcComplexAttributes",
".",
"keys",
"(",
")",
":",
"ifcComplexAttributes",
"[",
"attribute",
"[",
"\"name\"",
"]",
"]",
"=",
"{",
"}",
"IfcData",
"[",
"\"complex_attributes\"",
"]",
"=",
"json",
".",
"dumps",
"(",
"ifcComplexAttributes",
")",
"obj",
".",
"IfcData",
"=",
"IfcData"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Arch/ArchIFC.py#L121-L139 | ||
NVIDIA/MDL-SDK | aa9642b2546ad7b6236b5627385d882c2ed83c5d | src/mdl/jit/llvm/dist/examples/Kaleidoscope/MCJIT/cached/genk-timing.py | python | KScriptGenerator.updateCalledFunctionList | (self, callee) | Maintains a list of functions that will actually be called | Maintains a list of functions that will actually be called | [
"Maintains",
"a",
"list",
"of",
"functions",
"that",
"will",
"actually",
"be",
"called"
] | def updateCalledFunctionList(self, callee):
"""Maintains a list of functions that will actually be called"""
# Update the total call count
self.updateTotalCallCount(callee)
# If this function is already in the list, don't do anything else
if callee in self.calledFunctions:
return
# Add this function to the list of those that will be called.
self.calledFunctions.append(callee)
# If this function calls other functions, add them too
if callee in self.calledFunctionTable:
for subCallee in self.calledFunctionTable[callee]:
self.updateCalledFunctionList(subCallee) | [
"def",
"updateCalledFunctionList",
"(",
"self",
",",
"callee",
")",
":",
"# Update the total call count",
"self",
".",
"updateTotalCallCount",
"(",
"callee",
")",
"# If this function is already in the list, don't do anything else",
"if",
"callee",
"in",
"self",
".",
"calledFunctions",
":",
"return",
"# Add this function to the list of those that will be called.",
"self",
".",
"calledFunctions",
".",
"append",
"(",
"callee",
")",
"# If this function calls other functions, add them too",
"if",
"callee",
"in",
"self",
".",
"calledFunctionTable",
":",
"for",
"subCallee",
"in",
"self",
".",
"calledFunctionTable",
"[",
"callee",
"]",
":",
"self",
".",
"updateCalledFunctionList",
"(",
"subCallee",
")"
] | https://github.com/NVIDIA/MDL-SDK/blob/aa9642b2546ad7b6236b5627385d882c2ed83c5d/src/mdl/jit/llvm/dist/examples/Kaleidoscope/MCJIT/cached/genk-timing.py#L66-L78 | ||
microsoft/onnxruntime | f92e47e95b13a240e37caf7b36577983544f98fc | orttraining/orttraining/python/training/ortmodule/_io.py | python | _combine_input_buffers_initializers | (params, onnx_input_names, input_info, buffer_names, inputs, kwargs, device) | return result | Creates forward `*inputs` list from user input and PyTorch initializers
ONNX Runtime forward requires an ordered list of:
* User input: computed from forward InferenceSession
* Initializers: computed from original PyTorch model parameters | Creates forward `*inputs` list from user input and PyTorch initializers | [
"Creates",
"forward",
"*",
"inputs",
"list",
"from",
"user",
"input",
"and",
"PyTorch",
"initializers"
] | def _combine_input_buffers_initializers(params, onnx_input_names, input_info, buffer_names, inputs, kwargs, device):
'''Creates forward `*inputs` list from user input and PyTorch initializers
ONNX Runtime forward requires an ordered list of:
* User input: computed from forward InferenceSession
* Initializers: computed from original PyTorch model parameters
'''
def _expand_inputs(current_input, non_none_inputs):
# The exporter handles input lists by expanding them so that each
# element of the list is its own input.
# ORTModule must match this behavior by also expanding the inputs.
if current_input is None or isinstance(current_input, str):
# Drop all None and string inputs
return
if isinstance(current_input, abc.Sequence):
# If the input is a sequence (like a list), expand the list so that
# each element of the list is an input by itself
for inp in current_input:
_expand_inputs(inp, non_none_inputs)
elif isinstance(current_input, abc.Mapping):
# If the input is a mapping (like a dict), expand the dict so that
# each element of the dict is an input by itself
for _, val in current_input.items():
_expand_inputs(val, non_none_inputs)
else:
# else just collect all the non none inputs within non_none_inputs
non_none_inputs.append(current_input)
# User inputs
non_none_inputs = []
_expand_inputs(inputs, non_none_inputs)
buffer_names_dict = {buffer_name: inp for buffer_name, inp in buffer_names}
result = []
for input_idx, name in enumerate(onnx_input_names):
inp = None
if name in kwargs and kwargs[name] is not None:
# Only use keywords coming from user that are expected by ONNX model
inp = kwargs[name]
if inp is None:
try:
# Only use positionals coming from user that are expected by ONNX model
# if input_idx >= len(input_info.names), IndexError will be thrown
if name != input_info.names[input_idx]:
# When ONNX drops unused inputs, get correct index from user input
# if name is not in input_info.names, ValueError will be thrown
input_idx = input_info.names.index(name)
inp = non_none_inputs[input_idx]
except (IndexError, ValueError):
# ONNX input name is not present in input_info.names.
pass
if inp is None:
# Registered buffers are translated to user_input+initializer in ONNX
try:
inp = buffer_names_dict[name]
except KeyError:
# ONNX input name is not present in the registered buffer dict.
pass
if inp is not None:
if _PrimitiveType.is_primitive_type(inp):
inp = _PrimitiveType.get_tensor(inp, device)
result.append(inp)
else:
raise wrap_exception(ORTModuleONNXModelException,
RuntimeError(f'Input is present in ONNX graph but not provided: {name}.'))
# params is a list of all initializers known to the onnx graph
result.extend(params)
return result | [
"def",
"_combine_input_buffers_initializers",
"(",
"params",
",",
"onnx_input_names",
",",
"input_info",
",",
"buffer_names",
",",
"inputs",
",",
"kwargs",
",",
"device",
")",
":",
"def",
"_expand_inputs",
"(",
"current_input",
",",
"non_none_inputs",
")",
":",
"# The exporter handles input lists by expanding them so that each",
"# element of the list is its own input.",
"# ORTModule must match this behavior by also expanding the inputs.",
"if",
"current_input",
"is",
"None",
"or",
"isinstance",
"(",
"current_input",
",",
"str",
")",
":",
"# Drop all None and string inputs",
"return",
"if",
"isinstance",
"(",
"current_input",
",",
"abc",
".",
"Sequence",
")",
":",
"# If the input is a sequence (like a list), expand the list so that",
"# each element of the list is an input by itself",
"for",
"inp",
"in",
"current_input",
":",
"_expand_inputs",
"(",
"inp",
",",
"non_none_inputs",
")",
"elif",
"isinstance",
"(",
"current_input",
",",
"abc",
".",
"Mapping",
")",
":",
"# If the input is a mapping (like a dict), expand the dict so that",
"# each element of the dict is an input by itself",
"for",
"_",
",",
"val",
"in",
"current_input",
".",
"items",
"(",
")",
":",
"_expand_inputs",
"(",
"val",
",",
"non_none_inputs",
")",
"else",
":",
"# else just collect all the non none inputs within non_none_inputs",
"non_none_inputs",
".",
"append",
"(",
"current_input",
")",
"# User inputs",
"non_none_inputs",
"=",
"[",
"]",
"_expand_inputs",
"(",
"inputs",
",",
"non_none_inputs",
")",
"buffer_names_dict",
"=",
"{",
"buffer_name",
":",
"inp",
"for",
"buffer_name",
",",
"inp",
"in",
"buffer_names",
"}",
"result",
"=",
"[",
"]",
"for",
"input_idx",
",",
"name",
"in",
"enumerate",
"(",
"onnx_input_names",
")",
":",
"inp",
"=",
"None",
"if",
"name",
"in",
"kwargs",
"and",
"kwargs",
"[",
"name",
"]",
"is",
"not",
"None",
":",
"# Only use keywords coming from user that are expected by ONNX model",
"inp",
"=",
"kwargs",
"[",
"name",
"]",
"if",
"inp",
"is",
"None",
":",
"try",
":",
"# Only use positionals coming from user that are expected by ONNX model",
"# if input_idx >= len(input_info.names), IndexError will be thrown",
"if",
"name",
"!=",
"input_info",
".",
"names",
"[",
"input_idx",
"]",
":",
"# When ONNX drops unused inputs, get correct index from user input",
"# if name is not in input_info.names, ValueError will be thrown",
"input_idx",
"=",
"input_info",
".",
"names",
".",
"index",
"(",
"name",
")",
"inp",
"=",
"non_none_inputs",
"[",
"input_idx",
"]",
"except",
"(",
"IndexError",
",",
"ValueError",
")",
":",
"# ONNX input name is not present in input_info.names.",
"pass",
"if",
"inp",
"is",
"None",
":",
"# Registered buffers are translated to user_input+initializer in ONNX",
"try",
":",
"inp",
"=",
"buffer_names_dict",
"[",
"name",
"]",
"except",
"KeyError",
":",
"# ONNX input name is not present in the registered buffer dict.",
"pass",
"if",
"inp",
"is",
"not",
"None",
":",
"if",
"_PrimitiveType",
".",
"is_primitive_type",
"(",
"inp",
")",
":",
"inp",
"=",
"_PrimitiveType",
".",
"get_tensor",
"(",
"inp",
",",
"device",
")",
"result",
".",
"append",
"(",
"inp",
")",
"else",
":",
"raise",
"wrap_exception",
"(",
"ORTModuleONNXModelException",
",",
"RuntimeError",
"(",
"f'Input is present in ONNX graph but not provided: {name}.'",
")",
")",
"# params is a list of all initializers known to the onnx graph",
"result",
".",
"extend",
"(",
"params",
")",
"return",
"result"
] | https://github.com/microsoft/onnxruntime/blob/f92e47e95b13a240e37caf7b36577983544f98fc/orttraining/orttraining/python/training/ortmodule/_io.py#L132-L206 | |
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | tools/git/move_source_file.py | python | MakeIncludeGuardName | (path_from_root) | return guard.upper() | Returns an include guard name given a path from root. | Returns an include guard name given a path from root. | [
"Returns",
"an",
"include",
"guard",
"name",
"given",
"a",
"path",
"from",
"root",
"."
] | def MakeIncludeGuardName(path_from_root):
"""Returns an include guard name given a path from root."""
guard = path_from_root.replace('/', '_')
guard = guard.replace('\\', '_')
guard = guard.replace('.', '_')
guard += '_'
return guard.upper() | [
"def",
"MakeIncludeGuardName",
"(",
"path_from_root",
")",
":",
"guard",
"=",
"path_from_root",
".",
"replace",
"(",
"'/'",
",",
"'_'",
")",
"guard",
"=",
"guard",
".",
"replace",
"(",
"'\\\\'",
",",
"'_'",
")",
"guard",
"=",
"guard",
".",
"replace",
"(",
"'.'",
",",
"'_'",
")",
"guard",
"+=",
"'_'",
"return",
"guard",
".",
"upper",
"(",
")"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/git/move_source_file.py#L119-L125 | |
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/contrib/slim/python/slim/model_analyzer.py | python | analyze_ops | (graph, print_info=False) | return total_size | Compute the estimated size of the ops.outputs in the graph.
Args:
graph: the graph containing the operations.
print_info: Optional, if true print ops and their outputs.
Returns:
total size of the ops.outputs | Compute the estimated size of the ops.outputs in the graph. | [
"Compute",
"the",
"estimated",
"size",
"of",
"the",
"ops",
".",
"outputs",
"in",
"the",
"graph",
"."
] | def analyze_ops(graph, print_info=False):
"""Compute the estimated size of the ops.outputs in the graph.
Args:
graph: the graph containing the operations.
print_info: Optional, if true print ops and their outputs.
Returns:
total size of the ops.outputs
"""
if print_info:
print('---------')
print('Operations: name -> (type shapes) [size]')
print('---------')
total_size = 0
for op in graph.get_operations():
op_size = 0
shapes = []
for output in op.outputs:
# if output.num_elements() is None or [] assume size 0.
output_size = output.get_shape().num_elements() or 0
if output.get_shape():
shapes.append(tensor_description(output))
op_size += output_size
if print_info:
print(op.name, '\t->', ', '.join(shapes), '[' + str(op_size) + ']')
total_size += op_size
return total_size | [
"def",
"analyze_ops",
"(",
"graph",
",",
"print_info",
"=",
"False",
")",
":",
"if",
"print_info",
":",
"print",
"(",
"'---------'",
")",
"print",
"(",
"'Operations: name -> (type shapes) [size]'",
")",
"print",
"(",
"'---------'",
")",
"total_size",
"=",
"0",
"for",
"op",
"in",
"graph",
".",
"get_operations",
"(",
")",
":",
"op_size",
"=",
"0",
"shapes",
"=",
"[",
"]",
"for",
"output",
"in",
"op",
".",
"outputs",
":",
"# if output.num_elements() is None or [] assume size 0.",
"output_size",
"=",
"output",
".",
"get_shape",
"(",
")",
".",
"num_elements",
"(",
")",
"or",
"0",
"if",
"output",
".",
"get_shape",
"(",
")",
":",
"shapes",
".",
"append",
"(",
"tensor_description",
"(",
"output",
")",
")",
"op_size",
"+=",
"output_size",
"if",
"print_info",
":",
"print",
"(",
"op",
".",
"name",
",",
"'\\t->'",
",",
"', '",
".",
"join",
"(",
"shapes",
")",
",",
"'['",
"+",
"str",
"(",
"op_size",
")",
"+",
"']'",
")",
"total_size",
"+=",
"op_size",
"return",
"total_size"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/slim/python/slim/model_analyzer.py#L53-L80 | |
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TIntIntVH.Swap | (self, *args) | return _snap.TIntIntVH_Swap(self, *args) | Swap(TIntIntVH self, TIntIntVH Hash)
Parameters:
Hash: THash< TInt,TVec< TInt,int > > & | Swap(TIntIntVH self, TIntIntVH Hash) | [
"Swap",
"(",
"TIntIntVH",
"self",
"TIntIntVH",
"Hash",
")"
] | def Swap(self, *args):
"""
Swap(TIntIntVH self, TIntIntVH Hash)
Parameters:
Hash: THash< TInt,TVec< TInt,int > > &
"""
return _snap.TIntIntVH_Swap(self, *args) | [
"def",
"Swap",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TIntIntVH_Swap",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L18098-L18106 | |
openmm/openmm | cb293447c4fc8b03976dfe11399f107bab70f3d9 | wrappers/python/openmm/unit/quantity.py | python | Quantity.min | (self, *args, **kwargs) | return Quantity(mymin, self.unit) | Computes the minimum value of the sequence, with the result having the
same unit as the current sequence.
If the value is not iterable, it raises a TypeError
This function can take as arguments any arguments recognized by
`numpy.min`. If arguments are passed to a non-numpy array, a TypeError
is raised | Computes the minimum value of the sequence, with the result having the
same unit as the current sequence. | [
"Computes",
"the",
"minimum",
"value",
"of",
"the",
"sequence",
"with",
"the",
"result",
"having",
"the",
"same",
"unit",
"as",
"the",
"current",
"sequence",
"."
] | def min(self, *args, **kwargs):
"""
Computes the minimum value of the sequence, with the result having the
same unit as the current sequence.
If the value is not iterable, it raises a TypeError
This function can take as arguments any arguments recognized by
`numpy.min`. If arguments are passed to a non-numpy array, a TypeError
is raised
"""
try:
# Faster for numpy arrays
mymin = self._value.min(*args, **kwargs)
except AttributeError:
if args or kwargs:
raise TypeError('Unsupported arguments for Quantity.min')
mymin = min(self._value)
return Quantity(mymin, self.unit) | [
"def",
"min",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"# Faster for numpy arrays",
"mymin",
"=",
"self",
".",
"_value",
".",
"min",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"AttributeError",
":",
"if",
"args",
"or",
"kwargs",
":",
"raise",
"TypeError",
"(",
"'Unsupported arguments for Quantity.min'",
")",
"mymin",
"=",
"min",
"(",
"self",
".",
"_value",
")",
"return",
"Quantity",
"(",
"mymin",
",",
"self",
".",
"unit",
")"
] | https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/unit/quantity.py#L549-L567 | |
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/lookup_ops.py | python | index_table_from_file | (vocabulary_file=None,
num_oov_buckets=0,
vocab_size=None,
default_value=-1,
hasher_spec=FastHashSpec,
key_dtype=dtypes.string,
name=None) | Returns a lookup table that converts a string tensor into int64 IDs.
This operation constructs a lookup table to convert tensor of strings into
int64 IDs. The mapping can be initialized from a vocabulary file specified in
`vocabulary_file`, where the whole line is the key and the zero-based line
number is the ID.
Any lookup of an out-of-vocabulary token will return a bucket ID based on its
hash if `num_oov_buckets` is greater than zero. Otherwise it is assigned the
`default_value`.
The bucket ID range is
`[vocabulary size, vocabulary size + num_oov_buckets - 1]`.
The underlying table must be initialized by calling
`tf.tables_initializer.run()` or `table.init.run()` once.
Sample Usages:
If we have a vocabulary file "test.txt" with the following content:
```
emerson
lake
palmer
```
```python
features = tf.constant(["emerson", "lake", "and", "palmer"])
table = tf.contrib.lookup.index_table_from_file(
vocabulary_file="test.txt", num_oov_buckets=1)
ids = table.lookup(features)
...
tf.tables_initializer().run()
ids.eval() ==> [0, 1, 3, 2] # where 3 is the out-of-vocabulary bucket
```
Args:
vocabulary_file: The vocabulary filename, may be a constant scalar `Tensor`.
num_oov_buckets: The number of out-of-vocabulary buckets.
vocab_size: Number of the elements in the vocabulary, if known.
default_value: The value to use for out-of-vocabulary feature values.
Defaults to -1.
hasher_spec: A `HasherSpec` to specify the hash function to use for
assignation of out-of-vocabulary buckets.
key_dtype: The `key` data type.
name: A name for this op (optional).
Returns:
The lookup table to map a `key_dtype` `Tensor` to index `int64` `Tensor`.
Raises:
ValueError: If `vocabulary_file` is not set.
ValueError: If `num_oov_buckets` is negative or `vocab_size` is not greater
than zero. | Returns a lookup table that converts a string tensor into int64 IDs. | [
"Returns",
"a",
"lookup",
"table",
"that",
"converts",
"a",
"string",
"tensor",
"into",
"int64",
"IDs",
"."
] | def index_table_from_file(vocabulary_file=None,
num_oov_buckets=0,
vocab_size=None,
default_value=-1,
hasher_spec=FastHashSpec,
key_dtype=dtypes.string,
name=None):
"""Returns a lookup table that converts a string tensor into int64 IDs.
This operation constructs a lookup table to convert tensor of strings into
int64 IDs. The mapping can be initialized from a vocabulary file specified in
`vocabulary_file`, where the whole line is the key and the zero-based line
number is the ID.
Any lookup of an out-of-vocabulary token will return a bucket ID based on its
hash if `num_oov_buckets` is greater than zero. Otherwise it is assigned the
`default_value`.
The bucket ID range is
`[vocabulary size, vocabulary size + num_oov_buckets - 1]`.
The underlying table must be initialized by calling
`tf.tables_initializer.run()` or `table.init.run()` once.
Sample Usages:
If we have a vocabulary file "test.txt" with the following content:
```
emerson
lake
palmer
```
```python
features = tf.constant(["emerson", "lake", "and", "palmer"])
table = tf.contrib.lookup.index_table_from_file(
vocabulary_file="test.txt", num_oov_buckets=1)
ids = table.lookup(features)
...
tf.tables_initializer().run()
ids.eval() ==> [0, 1, 3, 2] # where 3 is the out-of-vocabulary bucket
```
Args:
vocabulary_file: The vocabulary filename, may be a constant scalar `Tensor`.
num_oov_buckets: The number of out-of-vocabulary buckets.
vocab_size: Number of the elements in the vocabulary, if known.
default_value: The value to use for out-of-vocabulary feature values.
Defaults to -1.
hasher_spec: A `HasherSpec` to specify the hash function to use for
assignation of out-of-vocabulary buckets.
key_dtype: The `key` data type.
name: A name for this op (optional).
Returns:
The lookup table to map a `key_dtype` `Tensor` to index `int64` `Tensor`.
Raises:
ValueError: If `vocabulary_file` is not set.
ValueError: If `num_oov_buckets` is negative or `vocab_size` is not greater
than zero.
"""
if vocabulary_file is None or (
isinstance(vocabulary_file, str) and not vocabulary_file):
raise ValueError("vocabulary_file must be specified and must not be empty.")
if num_oov_buckets < 0:
raise ValueError("num_oov_buckets must be greater or equal than 0, got %d."
% num_oov_buckets)
if vocab_size is not None and vocab_size < 1:
raise ValueError("vocab_size must be greater than 0, got %d." % vocab_size)
if (not key_dtype.is_integer) and (dtypes.string != key_dtype.base_dtype):
raise TypeError("Only integer and string keys are supported.")
with ops.name_scope(name, "string_to_index") as feat_to_id_scope:
table = None
shared_name = ""
with ops.name_scope(None, "hash_table") as hash_table_scope:
if vocab_size:
# Keep the shared_name:
# <table_type>_<filename>_<vocab_size>_<key_index>_<value_index>
shared_name = "hash_table_%s_%d_%s_%s" % (vocabulary_file, vocab_size,
TextFileIndex.WHOLE_LINE,
TextFileIndex.LINE_NUMBER)
else:
# Keep the shared_name
# <table_type>_<filename>_<key_index>_<value_index>
shared_name = "hash_table_%s_%s_%s" % (vocabulary_file,
TextFileIndex.WHOLE_LINE,
TextFileIndex.LINE_NUMBER)
init = TextFileIdTableInitializer(
vocabulary_file,
vocab_size=vocab_size,
key_dtype=dtypes.int64 if key_dtype.is_integer else key_dtype,
name="table_init")
table = HashTable(
init, default_value, shared_name=shared_name, name=hash_table_scope)
if num_oov_buckets:
table = IdTableWithHashBuckets(
table,
num_oov_buckets=num_oov_buckets,
hasher_spec=hasher_spec,
name=feat_to_id_scope,
key_dtype=key_dtype)
return table | [
"def",
"index_table_from_file",
"(",
"vocabulary_file",
"=",
"None",
",",
"num_oov_buckets",
"=",
"0",
",",
"vocab_size",
"=",
"None",
",",
"default_value",
"=",
"-",
"1",
",",
"hasher_spec",
"=",
"FastHashSpec",
",",
"key_dtype",
"=",
"dtypes",
".",
"string",
",",
"name",
"=",
"None",
")",
":",
"if",
"vocabulary_file",
"is",
"None",
"or",
"(",
"isinstance",
"(",
"vocabulary_file",
",",
"str",
")",
"and",
"not",
"vocabulary_file",
")",
":",
"raise",
"ValueError",
"(",
"\"vocabulary_file must be specified and must not be empty.\"",
")",
"if",
"num_oov_buckets",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"num_oov_buckets must be greater or equal than 0, got %d.\"",
"%",
"num_oov_buckets",
")",
"if",
"vocab_size",
"is",
"not",
"None",
"and",
"vocab_size",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"\"vocab_size must be greater than 0, got %d.\"",
"%",
"vocab_size",
")",
"if",
"(",
"not",
"key_dtype",
".",
"is_integer",
")",
"and",
"(",
"dtypes",
".",
"string",
"!=",
"key_dtype",
".",
"base_dtype",
")",
":",
"raise",
"TypeError",
"(",
"\"Only integer and string keys are supported.\"",
")",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"string_to_index\"",
")",
"as",
"feat_to_id_scope",
":",
"table",
"=",
"None",
"shared_name",
"=",
"\"\"",
"with",
"ops",
".",
"name_scope",
"(",
"None",
",",
"\"hash_table\"",
")",
"as",
"hash_table_scope",
":",
"if",
"vocab_size",
":",
"# Keep the shared_name:",
"# <table_type>_<filename>_<vocab_size>_<key_index>_<value_index>",
"shared_name",
"=",
"\"hash_table_%s_%d_%s_%s\"",
"%",
"(",
"vocabulary_file",
",",
"vocab_size",
",",
"TextFileIndex",
".",
"WHOLE_LINE",
",",
"TextFileIndex",
".",
"LINE_NUMBER",
")",
"else",
":",
"# Keep the shared_name",
"# <table_type>_<filename>_<key_index>_<value_index>",
"shared_name",
"=",
"\"hash_table_%s_%s_%s\"",
"%",
"(",
"vocabulary_file",
",",
"TextFileIndex",
".",
"WHOLE_LINE",
",",
"TextFileIndex",
".",
"LINE_NUMBER",
")",
"init",
"=",
"TextFileIdTableInitializer",
"(",
"vocabulary_file",
",",
"vocab_size",
"=",
"vocab_size",
",",
"key_dtype",
"=",
"dtypes",
".",
"int64",
"if",
"key_dtype",
".",
"is_integer",
"else",
"key_dtype",
",",
"name",
"=",
"\"table_init\"",
")",
"table",
"=",
"HashTable",
"(",
"init",
",",
"default_value",
",",
"shared_name",
"=",
"shared_name",
",",
"name",
"=",
"hash_table_scope",
")",
"if",
"num_oov_buckets",
":",
"table",
"=",
"IdTableWithHashBuckets",
"(",
"table",
",",
"num_oov_buckets",
"=",
"num_oov_buckets",
",",
"hasher_spec",
"=",
"hasher_spec",
",",
"name",
"=",
"feat_to_id_scope",
",",
"key_dtype",
"=",
"key_dtype",
")",
"return",
"table"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/lookup_ops.py#L856-L962 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.