nwo stringlengths 5 86 | sha stringlengths 40 40 | path stringlengths 4 189 | language stringclasses 1 value | identifier stringlengths 1 94 | parameters stringlengths 2 4.03k | argument_list stringclasses 1 value | return_statement stringlengths 0 11.5k | docstring stringlengths 1 33.2k | docstring_summary stringlengths 0 5.15k | docstring_tokens list | function stringlengths 34 151k | function_tokens list | url stringlengths 90 278 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
francinexue/xuefu | b6ff79747a42e020588c0c0a921048e08fe4680c | ctpx/ctp2/ctptd.py | python | CtpTd.onRspQryTransferSerial | (self, TransferSerialField, RspInfoField, requestId, final) | 请求查询转帐流水响应 | 请求查询转帐流水响应 | [
"请求查询转帐流水响应"
] | def onRspQryTransferSerial(self, TransferSerialField, RspInfoField, requestId, final):
"""请求查询转帐流水响应"""
pass | [
"def",
"onRspQryTransferSerial",
"(",
"self",
",",
"TransferSerialField",
",",
"RspInfoField",
",",
"requestId",
",",
"final",
")",
":",
"pass"
] | https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/ctpx/ctp2/ctptd.py#L338-L340 | ||
natanielruiz/android-yolo | 1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f | jni-build/jni/include/tensorflow/contrib/learn/python/learn/dataframe/series.py | python | Series.build | (self, cache, **kwargs) | Returns a Tensor. | Returns a Tensor. | [
"Returns",
"a",
"Tensor",
"."
] | def build(self, cache, **kwargs):
"""Returns a Tensor."""
raise NotImplementedError() | [
"def",
"build",
"(",
"self",
",",
"cache",
",",
"*",
"*",
"kwargs",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/learn/python/learn/dataframe/series.py#L101-L103 | ||
hakuna-m/wubiuefi | caec1af0a09c78fd5a345180ada1fe45e0c63493 | src/sets/sets.py | python | BaseSet.intersection | (self, other) | return self.__class__(common) | Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.) | Return the intersection of two sets as a new set. | [
"Return",
"the",
"intersection",
"of",
"two",
"sets",
"as",
"a",
"new",
"set",
"."
] | def intersection(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
other = Set(other)
if len(self) <= len(other):
little, big = self, other
else:
little, big = other, self
common = ifilter(big._data.has_key, little)
return self.__class__(common) | [
"def",
"intersection",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"BaseSet",
")",
":",
"other",
"=",
"Set",
"(",
"other",
")",
"if",
"len",
"(",
"self",
")",
"<=",
"len",
"(",
"other",
")",
":",
"little",
",",
"big",
"=",
"self",
",",
"other",
"else",
":",
"little",
",",
"big",
"=",
"other",
",",
"self",
"common",
"=",
"ifilter",
"(",
"big",
".",
"_data",
".",
"has_key",
",",
"little",
")",
"return",
"self",
".",
"__class__",
"(",
"common",
")"
] | https://github.com/hakuna-m/wubiuefi/blob/caec1af0a09c78fd5a345180ada1fe45e0c63493/src/sets/sets.py#L223-L235 | |
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/node-10.15.3/tools/gyp/pylib/gyp/generator/eclipse.py | python | GenerateOutput | (target_list, target_dicts, data, params) | Generate an XML settings file that can be imported into a CDT project. | Generate an XML settings file that can be imported into a CDT project. | [
"Generate",
"an",
"XML",
"settings",
"file",
"that",
"can",
"be",
"imported",
"into",
"a",
"CDT",
"project",
"."
] | def GenerateOutput(target_list, target_dicts, data, params):
"""Generate an XML settings file that can be imported into a CDT project."""
if params['options'].generator_output:
raise NotImplementedError("--generator_output not implemented for eclipse")
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name) | [
"def",
"GenerateOutput",
"(",
"target_list",
",",
"target_dicts",
",",
"data",
",",
"params",
")",
":",
"if",
"params",
"[",
"'options'",
"]",
".",
"generator_output",
":",
"raise",
"NotImplementedError",
"(",
"\"--generator_output not implemented for eclipse\"",
")",
"user_config",
"=",
"params",
".",
"get",
"(",
"'generator_flags'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'config'",
",",
"None",
")",
"if",
"user_config",
":",
"GenerateOutputForConfig",
"(",
"target_list",
",",
"target_dicts",
",",
"data",
",",
"params",
",",
"user_config",
")",
"else",
":",
"config_names",
"=",
"target_dicts",
"[",
"target_list",
"[",
"0",
"]",
"]",
"[",
"'configurations'",
"]",
".",
"keys",
"(",
")",
"for",
"config_name",
"in",
"config_names",
":",
"GenerateOutputForConfig",
"(",
"target_list",
",",
"target_dicts",
",",
"data",
",",
"params",
",",
"config_name",
")"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/gyp/pylib/gyp/generator/eclipse.py#L410-L424 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/Jinja2/py3/jinja2/environment.py | python | TemplateStream.enable_buffering | (self, size: int = 5) | Enable buffering. Buffer `size` items before yielding them. | Enable buffering. Buffer `size` items before yielding them. | [
"Enable",
"buffering",
".",
"Buffer",
"size",
"items",
"before",
"yielding",
"them",
"."
] | def enable_buffering(self, size: int = 5) -> None:
"""Enable buffering. Buffer `size` items before yielding them."""
if size <= 1:
raise ValueError("buffer size too small")
self.buffered = True
self._next = partial(next, self._buffered_generator(size)) | [
"def",
"enable_buffering",
"(",
"self",
",",
"size",
":",
"int",
"=",
"5",
")",
"->",
"None",
":",
"if",
"size",
"<=",
"1",
":",
"raise",
"ValueError",
"(",
"\"buffer size too small\"",
")",
"self",
".",
"buffered",
"=",
"True",
"self",
".",
"_next",
"=",
"partial",
"(",
"next",
",",
"self",
".",
"_buffered_generator",
"(",
"size",
")",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/Jinja2/py3/jinja2/environment.py#L1644-L1650 | ||
google/syzygy | 8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5 | third_party/numpy/files/numpy/ma/extras.py | python | intersect1d | (ar1, ar2, assume_unique=False) | return aux[aux[1:] == aux[:-1]] | Returns the unique elements common to both arrays.
Masked values are considered equal one to the other.
The output is always a masked array.
See `numpy.intersect1d` for more details.
See Also
--------
numpy.intersect1d : Equivalent function for ndarrays.
Examples
--------
>>> x = array([1, 3, 3, 3], mask=[0, 0, 0, 1])
>>> y = array([3, 1, 1, 1], mask=[0, 0, 0, 1])
>>> intersect1d(x, y)
masked_array(data = [1 3 --],
mask = [False False True],
fill_value = 999999) | Returns the unique elements common to both arrays. | [
"Returns",
"the",
"unique",
"elements",
"common",
"to",
"both",
"arrays",
"."
] | def intersect1d(ar1, ar2, assume_unique=False):
"""
Returns the unique elements common to both arrays.
Masked values are considered equal one to the other.
The output is always a masked array.
See `numpy.intersect1d` for more details.
See Also
--------
numpy.intersect1d : Equivalent function for ndarrays.
Examples
--------
>>> x = array([1, 3, 3, 3], mask=[0, 0, 0, 1])
>>> y = array([3, 1, 1, 1], mask=[0, 0, 0, 1])
>>> intersect1d(x, y)
masked_array(data = [1 3 --],
mask = [False False True],
fill_value = 999999)
"""
if assume_unique:
aux = ma.concatenate((ar1, ar2))
else:
# Might be faster than unique( intersect1d( ar1, ar2 ) )?
aux = ma.concatenate((unique(ar1), unique(ar2)))
aux.sort()
return aux[aux[1:] == aux[:-1]] | [
"def",
"intersect1d",
"(",
"ar1",
",",
"ar2",
",",
"assume_unique",
"=",
"False",
")",
":",
"if",
"assume_unique",
":",
"aux",
"=",
"ma",
".",
"concatenate",
"(",
"(",
"ar1",
",",
"ar2",
")",
")",
"else",
":",
"# Might be faster than unique( intersect1d( ar1, ar2 ) )?",
"aux",
"=",
"ma",
".",
"concatenate",
"(",
"(",
"unique",
"(",
"ar1",
")",
",",
"unique",
"(",
"ar2",
")",
")",
")",
"aux",
".",
"sort",
"(",
")",
"return",
"aux",
"[",
"aux",
"[",
"1",
":",
"]",
"==",
"aux",
"[",
":",
"-",
"1",
"]",
"]"
] | https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/ma/extras.py#L1064-L1093 | |
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | current/tools/gyp/pylib/gyp/generator/msvs.py | python | _AddActionStep | (actions_dict, inputs, outputs, description, command) | Merge action into an existing list of actions.
Care must be taken so that actions which have overlapping inputs either don't
get assigned to the same input, or get collapsed into one.
Arguments:
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
inputs: list of inputs
outputs: list of outputs
description: description of the action
command: command line to execute | Merge action into an existing list of actions. | [
"Merge",
"action",
"into",
"an",
"existing",
"list",
"of",
"actions",
"."
] | def _AddActionStep(actions_dict, inputs, outputs, description, command):
"""Merge action into an existing list of actions.
Care must be taken so that actions which have overlapping inputs either don't
get assigned to the same input, or get collapsed into one.
Arguments:
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
inputs: list of inputs
outputs: list of outputs
description: description of the action
command: command line to execute
"""
# Require there to be at least one input (call sites will ensure this).
assert inputs
action = {
'inputs': inputs,
'outputs': outputs,
'description': description,
'command': command,
}
# Pick where to stick this action.
# While less than optimal in terms of build time, attach them to the first
# input for now.
chosen_input = inputs[0]
# Add it there.
if chosen_input not in actions_dict:
actions_dict[chosen_input] = []
actions_dict[chosen_input].append(action) | [
"def",
"_AddActionStep",
"(",
"actions_dict",
",",
"inputs",
",",
"outputs",
",",
"description",
",",
"command",
")",
":",
"# Require there to be at least one input (call sites will ensure this).",
"assert",
"inputs",
"action",
"=",
"{",
"'inputs'",
":",
"inputs",
",",
"'outputs'",
":",
"outputs",
",",
"'description'",
":",
"description",
",",
"'command'",
":",
"command",
",",
"}",
"# Pick where to stick this action.",
"# While less than optimal in terms of build time, attach them to the first",
"# input for now.",
"chosen_input",
"=",
"inputs",
"[",
"0",
"]",
"# Add it there.",
"if",
"chosen_input",
"not",
"in",
"actions_dict",
":",
"actions_dict",
"[",
"chosen_input",
"]",
"=",
"[",
"]",
"actions_dict",
"[",
"chosen_input",
"]",
".",
"append",
"(",
"action",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/generator/msvs.py#L424-L456 | ||
fatih/subvim | 241b6d170597857105da219c9b7d36059e9f11fb | vim/base/YouCompleteMe/third_party/requests/requests/packages/urllib3/packages/six.py | python | remove_move | (name) | Remove item from six.moves. | Remove item from six.moves. | [
"Remove",
"item",
"from",
"six",
".",
"moves",
"."
] | def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,)) | [
"def",
"remove_move",
"(",
"name",
")",
":",
"try",
":",
"delattr",
"(",
"_MovedItems",
",",
"name",
")",
"except",
"AttributeError",
":",
"try",
":",
"del",
"moves",
".",
"__dict__",
"[",
"name",
"]",
"except",
"KeyError",
":",
"raise",
"AttributeError",
"(",
"\"no such move, %r\"",
"%",
"(",
"name",
",",
")",
")"
] | https://github.com/fatih/subvim/blob/241b6d170597857105da219c9b7d36059e9f11fb/vim/base/YouCompleteMe/third_party/requests/requests/packages/urllib3/packages/six.py#L194-L202 | ||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/rfc822.py | python | formatdate | (timeval=None) | return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]],
timeval[2],
("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1],
timeval[0], timeval[3], timeval[4], timeval[5]) | Returns time format preferred for Internet standards.
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
According to RFC 1123, day and month names must always be in
English. If not for that, this code could use strftime(). It
can't because strftime() honors the locale and could generated
non-English names. | Returns time format preferred for Internet standards. | [
"Returns",
"time",
"format",
"preferred",
"for",
"Internet",
"standards",
"."
] | def formatdate(timeval=None):
"""Returns time format preferred for Internet standards.
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
According to RFC 1123, day and month names must always be in
English. If not for that, this code could use strftime(). It
can't because strftime() honors the locale and could generated
non-English names.
"""
if timeval is None:
timeval = time.time()
timeval = time.gmtime(timeval)
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]],
timeval[2],
("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1],
timeval[0], timeval[3], timeval[4], timeval[5]) | [
"def",
"formatdate",
"(",
"timeval",
"=",
"None",
")",
":",
"if",
"timeval",
"is",
"None",
":",
"timeval",
"=",
"time",
".",
"time",
"(",
")",
"timeval",
"=",
"time",
".",
"gmtime",
"(",
"timeval",
")",
"return",
"\"%s, %02d %s %04d %02d:%02d:%02d GMT\"",
"%",
"(",
"(",
"\"Mon\"",
",",
"\"Tue\"",
",",
"\"Wed\"",
",",
"\"Thu\"",
",",
"\"Fri\"",
",",
"\"Sat\"",
",",
"\"Sun\"",
")",
"[",
"timeval",
"[",
"6",
"]",
"]",
",",
"timeval",
"[",
"2",
"]",
",",
"(",
"\"Jan\"",
",",
"\"Feb\"",
",",
"\"Mar\"",
",",
"\"Apr\"",
",",
"\"May\"",
",",
"\"Jun\"",
",",
"\"Jul\"",
",",
"\"Aug\"",
",",
"\"Sep\"",
",",
"\"Oct\"",
",",
"\"Nov\"",
",",
"\"Dec\"",
")",
"[",
"timeval",
"[",
"1",
"]",
"-",
"1",
"]",
",",
"timeval",
"[",
"0",
"]",
",",
"timeval",
"[",
"3",
"]",
",",
"timeval",
"[",
"4",
"]",
",",
"timeval",
"[",
"5",
"]",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/rfc822.py#L952-L970 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/rexec.py | python | RExec.s_eval | (self, *args) | return self.s_apply(self.r_eval, args) | Evaluate code within a restricted environment.
Similar to the r_eval() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned. | Evaluate code within a restricted environment. | [
"Evaluate",
"code",
"within",
"a",
"restricted",
"environment",
"."
] | def s_eval(self, *args):
"""Evaluate code within a restricted environment.
Similar to the r_eval() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
return self.s_apply(self.r_eval, args) | [
"def",
"s_eval",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"self",
".",
"s_apply",
"(",
"self",
".",
"r_eval",
",",
"args",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/rexec.py#L436-L449 | |
google/orbit | 7c0a530f402f0c3753d0bc52f8e3eb620f65d017 | third_party/include-what-you-use/fix_includes.py | python | FileInfo.parse | (filename) | return FileInfo(linesep, encoding) | Return a FileInfo object describing file encoding details. | Return a FileInfo object describing file encoding details. | [
"Return",
"a",
"FileInfo",
"object",
"describing",
"file",
"encoding",
"details",
"."
] | def parse(filename):
""" Return a FileInfo object describing file encoding details. """
with open(filename, 'rb') as f:
content = f.read()
linesep = FileInfo.guess_linesep(content)
encoding = FileInfo.guess_encoding(content)
return FileInfo(linesep, encoding) | [
"def",
"parse",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"f",
":",
"content",
"=",
"f",
".",
"read",
"(",
")",
"linesep",
"=",
"FileInfo",
".",
"guess_linesep",
"(",
"content",
")",
"encoding",
"=",
"FileInfo",
".",
"guess_encoding",
"(",
"content",
")",
"return",
"FileInfo",
"(",
"linesep",
",",
"encoding",
")"
] | https://github.com/google/orbit/blob/7c0a530f402f0c3753d0bc52f8e3eb620f65d017/third_party/include-what-you-use/fix_includes.py#L540-L547 | |
klzgrad/naiveproxy | ed2c513637c77b18721fe428d7ed395b4d284c83 | src/build/fuchsia/binary_sizes.py | python | GetBlobs | (far_file, build_out_dir) | return blobs | Calculates compressed and uncompressed blob sizes for specified FAR file.
Marks ICU blobs and blobs from SDK libraries as not counted. | Calculates compressed and uncompressed blob sizes for specified FAR file.
Marks ICU blobs and blobs from SDK libraries as not counted. | [
"Calculates",
"compressed",
"and",
"uncompressed",
"blob",
"sizes",
"for",
"specified",
"FAR",
"file",
".",
"Marks",
"ICU",
"blobs",
"and",
"blobs",
"from",
"SDK",
"libraries",
"as",
"not",
"counted",
"."
] | def GetBlobs(far_file, build_out_dir):
"""Calculates compressed and uncompressed blob sizes for specified FAR file.
Marks ICU blobs and blobs from SDK libraries as not counted."""
base_name = FarBaseName(far_file)
extract_dir = tempfile.mkdtemp()
# Extract files and blobs from the specified Fuchsia archive.
far_file_path = os.path.join(build_out_dir, far_file)
far_extract_dir = os.path.join(extract_dir, base_name)
ExtractFarFile(far_file_path, far_extract_dir)
# Extract the meta.far archive contained in the specified Fuchsia archive.
meta_far_file_path = os.path.join(far_extract_dir, 'meta.far')
meta_far_extract_dir = os.path.join(extract_dir, '%s_meta' % base_name)
ExtractFarFile(meta_far_file_path, meta_far_extract_dir)
# Map Linux filesystem blob names to blob hashes.
blob_name_hashes = GetBlobNameHashes(meta_far_extract_dir)
# "System" files whose sizes are not charged against component size budgets.
# Fuchsia SDK modules and the ICU icudtl.dat file sizes are not counted.
system_files = GetSdkModules() | set(['icudtl.dat'])
# Add the meta.far file blob.
blobs = {}
meta_name = 'meta.far'
meta_hash = GetPackageMerkleRoot(meta_far_file_path)
compressed = GetCompressedSize(meta_far_file_path)
uncompressed = os.path.getsize(meta_far_file_path)
blobs[meta_name] = Blob(meta_name, meta_hash, compressed, uncompressed, True)
# Add package blobs.
for blob_name, blob_hash in blob_name_hashes.items():
extracted_blob_path = os.path.join(far_extract_dir, blob_hash)
compressed = GetCompressedSize(extracted_blob_path)
uncompressed = os.path.getsize(extracted_blob_path)
is_counted = os.path.basename(blob_name) not in system_files
blobs[blob_name] = Blob(blob_name, blob_hash, compressed, uncompressed,
is_counted)
shutil.rmtree(extract_dir)
return blobs | [
"def",
"GetBlobs",
"(",
"far_file",
",",
"build_out_dir",
")",
":",
"base_name",
"=",
"FarBaseName",
"(",
"far_file",
")",
"extract_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"# Extract files and blobs from the specified Fuchsia archive.",
"far_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"build_out_dir",
",",
"far_file",
")",
"far_extract_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"extract_dir",
",",
"base_name",
")",
"ExtractFarFile",
"(",
"far_file_path",
",",
"far_extract_dir",
")",
"# Extract the meta.far archive contained in the specified Fuchsia archive.",
"meta_far_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"far_extract_dir",
",",
"'meta.far'",
")",
"meta_far_extract_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"extract_dir",
",",
"'%s_meta'",
"%",
"base_name",
")",
"ExtractFarFile",
"(",
"meta_far_file_path",
",",
"meta_far_extract_dir",
")",
"# Map Linux filesystem blob names to blob hashes.",
"blob_name_hashes",
"=",
"GetBlobNameHashes",
"(",
"meta_far_extract_dir",
")",
"# \"System\" files whose sizes are not charged against component size budgets.",
"# Fuchsia SDK modules and the ICU icudtl.dat file sizes are not counted.",
"system_files",
"=",
"GetSdkModules",
"(",
")",
"|",
"set",
"(",
"[",
"'icudtl.dat'",
"]",
")",
"# Add the meta.far file blob.",
"blobs",
"=",
"{",
"}",
"meta_name",
"=",
"'meta.far'",
"meta_hash",
"=",
"GetPackageMerkleRoot",
"(",
"meta_far_file_path",
")",
"compressed",
"=",
"GetCompressedSize",
"(",
"meta_far_file_path",
")",
"uncompressed",
"=",
"os",
".",
"path",
".",
"getsize",
"(",
"meta_far_file_path",
")",
"blobs",
"[",
"meta_name",
"]",
"=",
"Blob",
"(",
"meta_name",
",",
"meta_hash",
",",
"compressed",
",",
"uncompressed",
",",
"True",
")",
"# Add package blobs.",
"for",
"blob_name",
",",
"blob_hash",
"in",
"blob_name_hashes",
".",
"items",
"(",
")",
":",
"extracted_blob_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"far_extract_dir",
",",
"blob_hash",
")",
"compressed",
"=",
"GetCompressedSize",
"(",
"extracted_blob_path",
")",
"uncompressed",
"=",
"os",
".",
"path",
".",
"getsize",
"(",
"extracted_blob_path",
")",
"is_counted",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"blob_name",
")",
"not",
"in",
"system_files",
"blobs",
"[",
"blob_name",
"]",
"=",
"Blob",
"(",
"blob_name",
",",
"blob_hash",
",",
"compressed",
",",
"uncompressed",
",",
"is_counted",
")",
"shutil",
".",
"rmtree",
"(",
"extract_dir",
")",
"return",
"blobs"
] | https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/build/fuchsia/binary_sizes.py#L351-L395 | |
apache/incubator-weex | 5c25f0b59f7ac90703c363e7261f60bd06356dbe | weex_core/tools/cpplint.py | python | CheckInvalidIncrement | (filename, clean_lines, linenum, error) | Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Checks for invalid increment *count++. | [
"Checks",
"for",
"invalid",
"increment",
"*",
"count",
"++",
"."
] | def CheckInvalidIncrement(filename, clean_lines, linenum, error):
"""Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
if _RE_PATTERN_INVALID_INCREMENT.match(line):
error(filename, linenum, 'runtime/invalid_increment', 5,
'Changing pointer instead of value (or unused value of operator*).') | [
"def",
"CheckInvalidIncrement",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"if",
"_RE_PATTERN_INVALID_INCREMENT",
".",
"match",
"(",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/invalid_increment'",
",",
"5",
",",
"'Changing pointer instead of value (or unused value of operator*).'",
")"
] | https://github.com/apache/incubator-weex/blob/5c25f0b59f7ac90703c363e7261f60bd06356dbe/weex_core/tools/cpplint.py#L2169-L2188 | ||
emscripten-core/emscripten | 0d413d3c5af8b28349682496edc14656f5700c2f | tools/building.py | python | is_wasm_dylib | (filename) | return False | Detect wasm dynamic libraries by the presence of the "dylink" custom section. | Detect wasm dynamic libraries by the presence of the "dylink" custom section. | [
"Detect",
"wasm",
"dynamic",
"libraries",
"by",
"the",
"presence",
"of",
"the",
"dylink",
"custom",
"section",
"."
] | def is_wasm_dylib(filename):
"""Detect wasm dynamic libraries by the presence of the "dylink" custom section."""
if not is_wasm(filename):
return False
module = webassembly.Module(filename)
section = next(module.sections())
if section.type == webassembly.SecType.CUSTOM:
module.seek(section.offset)
if module.readString() in ('dylink', 'dylink.0'):
return True
return False | [
"def",
"is_wasm_dylib",
"(",
"filename",
")",
":",
"if",
"not",
"is_wasm",
"(",
"filename",
")",
":",
"return",
"False",
"module",
"=",
"webassembly",
".",
"Module",
"(",
"filename",
")",
"section",
"=",
"next",
"(",
"module",
".",
"sections",
"(",
")",
")",
"if",
"section",
".",
"type",
"==",
"webassembly",
".",
"SecType",
".",
"CUSTOM",
":",
"module",
".",
"seek",
"(",
"section",
".",
"offset",
")",
"if",
"module",
".",
"readString",
"(",
")",
"in",
"(",
"'dylink'",
",",
"'dylink.0'",
")",
":",
"return",
"True",
"return",
"False"
] | https://github.com/emscripten-core/emscripten/blob/0d413d3c5af8b28349682496edc14656f5700c2f/tools/building.py#L1353-L1363 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/nntplib.py | python | NNTP.__init__ | (self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=True) | Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode. | Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting. | [
"Initialize",
"an",
"instance",
".",
"Arguments",
":",
"-",
"host",
":",
"hostname",
"to",
"connect",
"to",
"-",
"port",
":",
"port",
"to",
"connect",
"to",
"(",
"default",
"the",
"standard",
"NNTP",
"port",
")",
"-",
"user",
":",
"username",
"to",
"authenticate",
"with",
"-",
"password",
":",
"password",
"to",
"use",
"with",
"username",
"-",
"readermode",
":",
"if",
"true",
"send",
"mode",
"reader",
"command",
"after",
"connecting",
"."
] | def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=True):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.create_connection((host, port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
# 'mode reader' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'mode reader' and 'authinfo' need to
# arrive differs between some NNTP servers. Try to send
# 'mode reader', and if it fails with an authorization failed
# error, try again after sending authinfo.
readermode_afterauth = 0
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
except NNTPTemporaryError, e:
if user and e.response[:3] == '480':
# Need authorization before 'mode reader'
readermode_afterauth = 1
else:
raise
# If no login/password was specified, try to get them from ~/.netrc
# Presume that if .netc has an entry, NNRP authentication is required.
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(host)
if auth:
user = auth[0]
password = auth[2]
except IOError:
pass
# Perform NNRP authentication if needed.
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
if readermode_afterauth:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass | [
"def",
"__init__",
"(",
"self",
",",
"host",
",",
"port",
"=",
"NNTP_PORT",
",",
"user",
"=",
"None",
",",
"password",
"=",
"None",
",",
"readermode",
"=",
"None",
",",
"usenetrc",
"=",
"True",
")",
":",
"self",
".",
"host",
"=",
"host",
"self",
".",
"port",
"=",
"port",
"self",
".",
"sock",
"=",
"socket",
".",
"create_connection",
"(",
"(",
"host",
",",
"port",
")",
")",
"self",
".",
"file",
"=",
"self",
".",
"sock",
".",
"makefile",
"(",
"'rb'",
")",
"self",
".",
"debugging",
"=",
"0",
"self",
".",
"welcome",
"=",
"self",
".",
"getresp",
"(",
")",
"# 'mode reader' is sometimes necessary to enable 'reader' mode.",
"# However, the order in which 'mode reader' and 'authinfo' need to",
"# arrive differs between some NNTP servers. Try to send",
"# 'mode reader', and if it fails with an authorization failed",
"# error, try again after sending authinfo.",
"readermode_afterauth",
"=",
"0",
"if",
"readermode",
":",
"try",
":",
"self",
".",
"welcome",
"=",
"self",
".",
"shortcmd",
"(",
"'mode reader'",
")",
"except",
"NNTPPermanentError",
":",
"# error 500, probably 'not implemented'",
"pass",
"except",
"NNTPTemporaryError",
",",
"e",
":",
"if",
"user",
"and",
"e",
".",
"response",
"[",
":",
"3",
"]",
"==",
"'480'",
":",
"# Need authorization before 'mode reader'",
"readermode_afterauth",
"=",
"1",
"else",
":",
"raise",
"# If no login/password was specified, try to get them from ~/.netrc",
"# Presume that if .netc has an entry, NNRP authentication is required.",
"try",
":",
"if",
"usenetrc",
"and",
"not",
"user",
":",
"import",
"netrc",
"credentials",
"=",
"netrc",
".",
"netrc",
"(",
")",
"auth",
"=",
"credentials",
".",
"authenticators",
"(",
"host",
")",
"if",
"auth",
":",
"user",
"=",
"auth",
"[",
"0",
"]",
"password",
"=",
"auth",
"[",
"2",
"]",
"except",
"IOError",
":",
"pass",
"# Perform NNRP authentication if needed.",
"if",
"user",
":",
"resp",
"=",
"self",
".",
"shortcmd",
"(",
"'authinfo user '",
"+",
"user",
")",
"if",
"resp",
"[",
":",
"3",
"]",
"==",
"'381'",
":",
"if",
"not",
"password",
":",
"raise",
"NNTPReplyError",
"(",
"resp",
")",
"else",
":",
"resp",
"=",
"self",
".",
"shortcmd",
"(",
"'authinfo pass '",
"+",
"password",
")",
"if",
"resp",
"[",
":",
"3",
"]",
"!=",
"'281'",
":",
"raise",
"NNTPPermanentError",
"(",
"resp",
")",
"if",
"readermode_afterauth",
":",
"try",
":",
"self",
".",
"welcome",
"=",
"self",
".",
"shortcmd",
"(",
"'mode reader'",
")",
"except",
"NNTPPermanentError",
":",
"# error 500, probably 'not implemented'",
"pass"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/nntplib.py#L101-L170 | ||
tensorflow/deepmath | b5b721f54de1d5d6a02d78f5da5995237f9995f9 | deepmath/guidance/driver_lib.py | python | mode_batch_size | (mode, hparams) | Returns the batch size for a given mode (train or eval).
Args:
mode: Either 'train' or 'eval'.
hparams: Hyperparameters.
Returns:
Integer batch size.
Raises:
ValueError: If mode is not 'train' or 'eval'. | Returns the batch size for a given mode (train or eval). | [
"Returns",
"the",
"batch",
"size",
"for",
"a",
"given",
"mode",
"(",
"train",
"or",
"eval",
")",
"."
] | def mode_batch_size(mode, hparams):
"""Returns the batch size for a given mode (train or eval).
Args:
mode: Either 'train' or 'eval'.
hparams: Hyperparameters.
Returns:
Integer batch size.
Raises:
ValueError: If mode is not 'train' or 'eval'.
"""
if mode == 'train':
return hparams.batch_size
elif mode == 'eval':
return hparams.eval_batch_size
else:
raise ValueError('Invalid --mode=%r' % mode) | [
"def",
"mode_batch_size",
"(",
"mode",
",",
"hparams",
")",
":",
"if",
"mode",
"==",
"'train'",
":",
"return",
"hparams",
".",
"batch_size",
"elif",
"mode",
"==",
"'eval'",
":",
"return",
"hparams",
".",
"eval_batch_size",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid --mode=%r'",
"%",
"mode",
")"
] | https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/guidance/driver_lib.py#L50-L68 | ||
google/skia | 82d65d0487bd72f5f7332d002429ec2dc61d2463 | infra/bots/recipes/skpbench.py | python | skpbench_steps | (api) | benchmark Skia using skpbench. | benchmark Skia using skpbench. | [
"benchmark",
"Skia",
"using",
"skpbench",
"."
] | def skpbench_steps(api):
"""benchmark Skia using skpbench."""
is_vulkan = 'Vulkan' in api.vars.builder_name
is_metal = 'Metal' in api.vars.builder_name
is_android = 'Android' in api.vars.builder_name
is_apple_m1 = 'AppleM1' in api.vars.builder_name
is_all_paths_volatile = 'AllPathsVolatile' in api.vars.builder_name
is_mskp = 'Mskp' in api.vars.builder_name
is_ddl = 'DDL' in api.vars.builder_name
is_9x9 = '9x9' in api.vars.builder_name
api.file.ensure_directory(
'makedirs perf_dir', api.flavor.host_dirs.perf_data_dir)
if is_android:
app = api.vars.build_dir.join('skpbench')
_adb(api, 'push skpbench', 'push', app, api.flavor.device_dirs.bin_dir)
skpbench_dir = api.vars.workdir.join('skia', 'tools', 'skpbench')
table = api.path.join(api.vars.swarming_out_dir, 'table')
if is_vulkan:
config = 'vk'
elif is_metal:
config = 'mtl'
elif is_android:
config = 'gles'
if "MaliG77" in api.vars.builder_name:
config = 'glesdmsaa,' + config
else:
config = 'gl'
if "QuadroP400" in api.vars.builder_name or is_apple_m1:
config = 'gldmsaa,' + config
internal_samples = 4 if is_android or is_apple_m1 else 8
if is_all_paths_volatile:
config = "%smsaa%i" % (config, internal_samples)
skpbench_invocation = api.path.join(api.flavor.device_dirs.bin_dir, 'skpbench')
# skbug.com/10184
if is_vulkan and 'GalaxyS20' in api.vars.builder_name:
skpbench_invocation = "LD_LIBRARY_PATH=/data/local/tmp %s" % skpbench_invocation
skpbench_args = [
skpbench_invocation,
'--resultsfile', table,
'--config', config,
'--internalSamples', str(internal_samples),
# TODO(dogben): Track down what's causing bots to die.
'-v5']
if is_ddl:
skpbench_args += ['--ddl']
# disable the mask generation threads for simplicity's sake in DDL mode
skpbench_args += ['--gpuThreads', '0']
if is_9x9:
skpbench_args += [
'--ddlNumRecordingThreads', 9,
'--ddlTilingWidthHeight', 3]
if is_android:
skpbench_args += [
'--adb',
'--adb_binary', ADB_BINARY]
if is_mskp:
skpbench_args += [api.flavor.device_dirs.mskp_dir]
elif is_all_paths_volatile:
skpbench_args += [
'--allPathsVolatile',
'--suffix', "_volatile",
api.path.join(api.flavor.device_dirs.skp_dir, 'desk_*svg.skp'),
api.path.join(api.flavor.device_dirs.skp_dir, 'desk_motionmark*.skp'),
api.path.join(api.flavor.device_dirs.skp_dir, 'desk_chalkboard.skp')]
else:
skpbench_args += [api.flavor.device_dirs.skp_dir]
if api.properties.get('dont_reduce_ops_task_splitting') == 'true':
skpbench_args += ['--dontReduceOpsTaskSplitting']
if api.properties.get('gpu_resource_cache_limit'):
skpbench_args += ['--gpuResourceCacheLimit', api.properties.get('gpu_resource_cache_limit')]
api.run(api.python, 'skpbench',
script=skpbench_dir.join('skpbench.py'),
args=skpbench_args)
skiaperf_args = [
table,
'--properties',
'gitHash', api.properties['revision'],
]
if api.vars.is_trybot:
skiaperf_args.extend([
'issue', api.vars.issue,
'patchset', api.vars.patchset,
'patch_storage', api.vars.patch_storage,
])
skiaperf_args.extend(['swarming_bot_id', api.vars.swarming_bot_id])
skiaperf_args.extend(['swarming_task_id', api.vars.swarming_task_id])
now = api.time.utcnow()
ts = int(calendar.timegm(now.utctimetuple()))
json_path = api.path.join(
api.flavor.host_dirs.perf_data_dir,
'skpbench_%s_%d.json' % (api.properties['revision'], ts))
skiaperf_args.extend([
'--outfile', json_path
])
skiaperf_args.append('--key')
for k in sorted(api.vars.builder_cfg.keys()):
if not k in ['configuration', 'role', 'is_trybot']:
skiaperf_args.extend([k, api.vars.builder_cfg[k]])
api.run(api.python, 'Parse skpbench output into Perf json',
script=skpbench_dir.join('skiaperf.py'),
args=skiaperf_args) | [
"def",
"skpbench_steps",
"(",
"api",
")",
":",
"is_vulkan",
"=",
"'Vulkan'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_metal",
"=",
"'Metal'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_android",
"=",
"'Android'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_apple_m1",
"=",
"'AppleM1'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_all_paths_volatile",
"=",
"'AllPathsVolatile'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_mskp",
"=",
"'Mskp'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_ddl",
"=",
"'DDL'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"is_9x9",
"=",
"'9x9'",
"in",
"api",
".",
"vars",
".",
"builder_name",
"api",
".",
"file",
".",
"ensure_directory",
"(",
"'makedirs perf_dir'",
",",
"api",
".",
"flavor",
".",
"host_dirs",
".",
"perf_data_dir",
")",
"if",
"is_android",
":",
"app",
"=",
"api",
".",
"vars",
".",
"build_dir",
".",
"join",
"(",
"'skpbench'",
")",
"_adb",
"(",
"api",
",",
"'push skpbench'",
",",
"'push'",
",",
"app",
",",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"bin_dir",
")",
"skpbench_dir",
"=",
"api",
".",
"vars",
".",
"workdir",
".",
"join",
"(",
"'skia'",
",",
"'tools'",
",",
"'skpbench'",
")",
"table",
"=",
"api",
".",
"path",
".",
"join",
"(",
"api",
".",
"vars",
".",
"swarming_out_dir",
",",
"'table'",
")",
"if",
"is_vulkan",
":",
"config",
"=",
"'vk'",
"elif",
"is_metal",
":",
"config",
"=",
"'mtl'",
"elif",
"is_android",
":",
"config",
"=",
"'gles'",
"if",
"\"MaliG77\"",
"in",
"api",
".",
"vars",
".",
"builder_name",
":",
"config",
"=",
"'glesdmsaa,'",
"+",
"config",
"else",
":",
"config",
"=",
"'gl'",
"if",
"\"QuadroP400\"",
"in",
"api",
".",
"vars",
".",
"builder_name",
"or",
"is_apple_m1",
":",
"config",
"=",
"'gldmsaa,'",
"+",
"config",
"internal_samples",
"=",
"4",
"if",
"is_android",
"or",
"is_apple_m1",
"else",
"8",
"if",
"is_all_paths_volatile",
":",
"config",
"=",
"\"%smsaa%i\"",
"%",
"(",
"config",
",",
"internal_samples",
")",
"skpbench_invocation",
"=",
"api",
".",
"path",
".",
"join",
"(",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"bin_dir",
",",
"'skpbench'",
")",
"# skbug.com/10184",
"if",
"is_vulkan",
"and",
"'GalaxyS20'",
"in",
"api",
".",
"vars",
".",
"builder_name",
":",
"skpbench_invocation",
"=",
"\"LD_LIBRARY_PATH=/data/local/tmp %s\"",
"%",
"skpbench_invocation",
"skpbench_args",
"=",
"[",
"skpbench_invocation",
",",
"'--resultsfile'",
",",
"table",
",",
"'--config'",
",",
"config",
",",
"'--internalSamples'",
",",
"str",
"(",
"internal_samples",
")",
",",
"# TODO(dogben): Track down what's causing bots to die.",
"'-v5'",
"]",
"if",
"is_ddl",
":",
"skpbench_args",
"+=",
"[",
"'--ddl'",
"]",
"# disable the mask generation threads for simplicity's sake in DDL mode",
"skpbench_args",
"+=",
"[",
"'--gpuThreads'",
",",
"'0'",
"]",
"if",
"is_9x9",
":",
"skpbench_args",
"+=",
"[",
"'--ddlNumRecordingThreads'",
",",
"9",
",",
"'--ddlTilingWidthHeight'",
",",
"3",
"]",
"if",
"is_android",
":",
"skpbench_args",
"+=",
"[",
"'--adb'",
",",
"'--adb_binary'",
",",
"ADB_BINARY",
"]",
"if",
"is_mskp",
":",
"skpbench_args",
"+=",
"[",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"mskp_dir",
"]",
"elif",
"is_all_paths_volatile",
":",
"skpbench_args",
"+=",
"[",
"'--allPathsVolatile'",
",",
"'--suffix'",
",",
"\"_volatile\"",
",",
"api",
".",
"path",
".",
"join",
"(",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"skp_dir",
",",
"'desk_*svg.skp'",
")",
",",
"api",
".",
"path",
".",
"join",
"(",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"skp_dir",
",",
"'desk_motionmark*.skp'",
")",
",",
"api",
".",
"path",
".",
"join",
"(",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"skp_dir",
",",
"'desk_chalkboard.skp'",
")",
"]",
"else",
":",
"skpbench_args",
"+=",
"[",
"api",
".",
"flavor",
".",
"device_dirs",
".",
"skp_dir",
"]",
"if",
"api",
".",
"properties",
".",
"get",
"(",
"'dont_reduce_ops_task_splitting'",
")",
"==",
"'true'",
":",
"skpbench_args",
"+=",
"[",
"'--dontReduceOpsTaskSplitting'",
"]",
"if",
"api",
".",
"properties",
".",
"get",
"(",
"'gpu_resource_cache_limit'",
")",
":",
"skpbench_args",
"+=",
"[",
"'--gpuResourceCacheLimit'",
",",
"api",
".",
"properties",
".",
"get",
"(",
"'gpu_resource_cache_limit'",
")",
"]",
"api",
".",
"run",
"(",
"api",
".",
"python",
",",
"'skpbench'",
",",
"script",
"=",
"skpbench_dir",
".",
"join",
"(",
"'skpbench.py'",
")",
",",
"args",
"=",
"skpbench_args",
")",
"skiaperf_args",
"=",
"[",
"table",
",",
"'--properties'",
",",
"'gitHash'",
",",
"api",
".",
"properties",
"[",
"'revision'",
"]",
",",
"]",
"if",
"api",
".",
"vars",
".",
"is_trybot",
":",
"skiaperf_args",
".",
"extend",
"(",
"[",
"'issue'",
",",
"api",
".",
"vars",
".",
"issue",
",",
"'patchset'",
",",
"api",
".",
"vars",
".",
"patchset",
",",
"'patch_storage'",
",",
"api",
".",
"vars",
".",
"patch_storage",
",",
"]",
")",
"skiaperf_args",
".",
"extend",
"(",
"[",
"'swarming_bot_id'",
",",
"api",
".",
"vars",
".",
"swarming_bot_id",
"]",
")",
"skiaperf_args",
".",
"extend",
"(",
"[",
"'swarming_task_id'",
",",
"api",
".",
"vars",
".",
"swarming_task_id",
"]",
")",
"now",
"=",
"api",
".",
"time",
".",
"utcnow",
"(",
")",
"ts",
"=",
"int",
"(",
"calendar",
".",
"timegm",
"(",
"now",
".",
"utctimetuple",
"(",
")",
")",
")",
"json_path",
"=",
"api",
".",
"path",
".",
"join",
"(",
"api",
".",
"flavor",
".",
"host_dirs",
".",
"perf_data_dir",
",",
"'skpbench_%s_%d.json'",
"%",
"(",
"api",
".",
"properties",
"[",
"'revision'",
"]",
",",
"ts",
")",
")",
"skiaperf_args",
".",
"extend",
"(",
"[",
"'--outfile'",
",",
"json_path",
"]",
")",
"skiaperf_args",
".",
"append",
"(",
"'--key'",
")",
"for",
"k",
"in",
"sorted",
"(",
"api",
".",
"vars",
".",
"builder_cfg",
".",
"keys",
"(",
")",
")",
":",
"if",
"not",
"k",
"in",
"[",
"'configuration'",
",",
"'role'",
",",
"'is_trybot'",
"]",
":",
"skiaperf_args",
".",
"extend",
"(",
"[",
"k",
",",
"api",
".",
"vars",
".",
"builder_cfg",
"[",
"k",
"]",
"]",
")",
"api",
".",
"run",
"(",
"api",
".",
"python",
",",
"'Parse skpbench output into Perf json'",
",",
"script",
"=",
"skpbench_dir",
".",
"join",
"(",
"'skiaperf.py'",
")",
",",
"args",
"=",
"skiaperf_args",
")"
] | https://github.com/google/skia/blob/82d65d0487bd72f5f7332d002429ec2dc61d2463/infra/bots/recipes/skpbench.py#L42-L160 | ||
wyrover/book-code | 7f4883d9030d553bc6bcfa3da685e34789839900 | 3rdparty/protobuf/python/mox.py | python | MockMethod.__getattr__ | (self, name) | Raise an AttributeError with a helpful message. | Raise an AttributeError with a helpful message. | [
"Raise",
"an",
"AttributeError",
"with",
"a",
"helpful",
"message",
"."
] | def __getattr__(self, name):
"""Raise an AttributeError with a helpful message."""
raise AttributeError('MockMethod has no attribute "%s". '
'Did you remember to put your mocks in replay mode?' % name) | [
"def",
"__getattr__",
"(",
"self",
",",
"name",
")",
":",
"raise",
"AttributeError",
"(",
"'MockMethod has no attribute \"%s\". '",
"'Did you remember to put your mocks in replay mode?'",
"%",
"name",
")"
] | https://github.com/wyrover/book-code/blob/7f4883d9030d553bc6bcfa3da685e34789839900/3rdparty/protobuf/python/mox.py#L575-L579 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py3/sklearn/covariance/_empirical_covariance.py | python | EmpiricalCovariance.score | (self, X_test, y=None) | return res | Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix.
Parameters
----------
X_test : array-like of shape (n_samples, n_features)
Test data of which we compute the likelihood, where n_samples is
the number of samples and n_features is the number of features.
X_test is assumed to be drawn from the same distribution than
the data used in fit (including centering).
y
not used, present for API consistence purpose.
Returns
-------
res : float
The likelihood of the data set with `self.covariance_` as an
estimator of its covariance matrix. | Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix. | [
"Computes",
"the",
"log",
"-",
"likelihood",
"of",
"a",
"Gaussian",
"data",
"set",
"with",
"self",
".",
"covariance_",
"as",
"an",
"estimator",
"of",
"its",
"covariance",
"matrix",
"."
] | def score(self, X_test, y=None):
"""Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix.
Parameters
----------
X_test : array-like of shape (n_samples, n_features)
Test data of which we compute the likelihood, where n_samples is
the number of samples and n_features is the number of features.
X_test is assumed to be drawn from the same distribution than
the data used in fit (including centering).
y
not used, present for API consistence purpose.
Returns
-------
res : float
The likelihood of the data set with `self.covariance_` as an
estimator of its covariance matrix.
"""
# compute empirical covariance of the test set
test_cov = empirical_covariance(
X_test - self.location_, assume_centered=True)
# compute log likelihood
res = log_likelihood(test_cov, self.get_precision())
return res | [
"def",
"score",
"(",
"self",
",",
"X_test",
",",
"y",
"=",
"None",
")",
":",
"# compute empirical covariance of the test set",
"test_cov",
"=",
"empirical_covariance",
"(",
"X_test",
"-",
"self",
".",
"location_",
",",
"assume_centered",
"=",
"True",
")",
"# compute log likelihood",
"res",
"=",
"log_likelihood",
"(",
"test_cov",
",",
"self",
".",
"get_precision",
"(",
")",
")",
"return",
"res"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/covariance/_empirical_covariance.py#L205-L233 | |
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/python/turicreate/toolkits/drawing_classifier/util/_visualization.py | python | draw_strokes | (stroke_based_drawings) | return sf_with_drawings["drawings"] | Visualizes drawings (ground truth or predictions) by
returning images to represent the stroke-based data from
the user.
Parameters
----------
stroke_based_drawings: SArray or list
An `SArray` of type `list`. Each element in the SArray
should be a list of strokes, where each stroke is a list
of points, and each point is represented as a dictionary
with two keys, "x" and "y". A single stroke-based drawing
is also supported, in which case, the type of the input
would be list.
Returns
-------
drawings: SArray or _tc.Image
Each stroke-based drawing is converted into a 28x28
grayscale drawing for the user to visualize what their
strokes traced. | Visualizes drawings (ground truth or predictions) by
returning images to represent the stroke-based data from
the user. | [
"Visualizes",
"drawings",
"(",
"ground",
"truth",
"or",
"predictions",
")",
"by",
"returning",
"images",
"to",
"represent",
"the",
"stroke",
"-",
"based",
"data",
"from",
"the",
"user",
"."
] | def draw_strokes(stroke_based_drawings):
"""
Visualizes drawings (ground truth or predictions) by
returning images to represent the stroke-based data from
the user.
Parameters
----------
stroke_based_drawings: SArray or list
An `SArray` of type `list`. Each element in the SArray
should be a list of strokes, where each stroke is a list
of points, and each point is represented as a dictionary
with two keys, "x" and "y". A single stroke-based drawing
is also supported, in which case, the type of the input
would be list.
Returns
-------
drawings: SArray or _tc.Image
Each stroke-based drawing is converted into a 28x28
grayscale drawing for the user to visualize what their
strokes traced.
"""
single_input = False
if not isinstance(stroke_based_drawings, _tc.SArray) and not isinstance(
stroke_based_drawings, list
):
raise _ToolkitError(
"Input to draw_strokes must be of type "
+ "turicreate.SArray or list (for a single stroke-based drawing)"
)
if (
isinstance(stroke_based_drawings, _tc.SArray)
and stroke_based_drawings.dtype != list
):
raise _ToolkitError(
"SArray input to draw_strokes must have dtype "
+ "list. Each element in the SArray should be a list of strokes, "
+ "where each stroke is a list of points, "
+ "and each point is represented as a dictionary "
+ 'with two keys, "x" and "y".'
)
if isinstance(stroke_based_drawings, list):
single_input = True
stroke_based_drawings = _tc.SArray([stroke_based_drawings])
sf = _tc.SFrame({"drawings": stroke_based_drawings})
sf_with_drawings = _extensions._drawing_classifier_prepare_data(sf, "drawings")
if single_input:
return sf_with_drawings["drawings"][0]
return sf_with_drawings["drawings"] | [
"def",
"draw_strokes",
"(",
"stroke_based_drawings",
")",
":",
"single_input",
"=",
"False",
"if",
"not",
"isinstance",
"(",
"stroke_based_drawings",
",",
"_tc",
".",
"SArray",
")",
"and",
"not",
"isinstance",
"(",
"stroke_based_drawings",
",",
"list",
")",
":",
"raise",
"_ToolkitError",
"(",
"\"Input to draw_strokes must be of type \"",
"+",
"\"turicreate.SArray or list (for a single stroke-based drawing)\"",
")",
"if",
"(",
"isinstance",
"(",
"stroke_based_drawings",
",",
"_tc",
".",
"SArray",
")",
"and",
"stroke_based_drawings",
".",
"dtype",
"!=",
"list",
")",
":",
"raise",
"_ToolkitError",
"(",
"\"SArray input to draw_strokes must have dtype \"",
"+",
"\"list. Each element in the SArray should be a list of strokes, \"",
"+",
"\"where each stroke is a list of points, \"",
"+",
"\"and each point is represented as a dictionary \"",
"+",
"'with two keys, \"x\" and \"y\".'",
")",
"if",
"isinstance",
"(",
"stroke_based_drawings",
",",
"list",
")",
":",
"single_input",
"=",
"True",
"stroke_based_drawings",
"=",
"_tc",
".",
"SArray",
"(",
"[",
"stroke_based_drawings",
"]",
")",
"sf",
"=",
"_tc",
".",
"SFrame",
"(",
"{",
"\"drawings\"",
":",
"stroke_based_drawings",
"}",
")",
"sf_with_drawings",
"=",
"_extensions",
".",
"_drawing_classifier_prepare_data",
"(",
"sf",
",",
"\"drawings\"",
")",
"if",
"single_input",
":",
"return",
"sf_with_drawings",
"[",
"\"drawings\"",
"]",
"[",
"0",
"]",
"return",
"sf_with_drawings",
"[",
"\"drawings\"",
"]"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/toolkits/drawing_classifier/util/_visualization.py#L11-L61 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_gdi.py | python | Font.MakeItalic | (*args, **kwargs) | return _gdi_.Font_MakeItalic(*args, **kwargs) | MakeItalic(self) -> Font | MakeItalic(self) -> Font | [
"MakeItalic",
"(",
"self",
")",
"-",
">",
"Font"
] | def MakeItalic(*args, **kwargs):
"""MakeItalic(self) -> Font"""
return _gdi_.Font_MakeItalic(*args, **kwargs) | [
"def",
"MakeItalic",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"Font_MakeItalic",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L2541-L2543 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | contrib/gizmos/osx_carbon/gizmos.py | python | TreeListCtrl.Expand | (*args, **kwargs) | return _gizmos.TreeListCtrl_Expand(*args, **kwargs) | Expand(self, TreeItemId item) | Expand(self, TreeItemId item) | [
"Expand",
"(",
"self",
"TreeItemId",
"item",
")"
] | def Expand(*args, **kwargs):
"""Expand(self, TreeItemId item)"""
return _gizmos.TreeListCtrl_Expand(*args, **kwargs) | [
"def",
"Expand",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gizmos",
".",
"TreeListCtrl_Expand",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/osx_carbon/gizmos.py#L871-L873 | |
google/mysql-protobuf | 467cda676afaa49e762c5c9164a43f6ad31a1fbf | storage/ndb/mcc/remote_clusterhost.py | python | RemoteClusterHost.execute_command | (self, cmdv, inFile=None) | Execute an OS command blocking on the local host, using
subprocess module. Returns dict contaning output from process.
cmdv - complete command vector (argv) of the OS command.
inFile - File-like object providing stdin to the command. | Execute an OS command blocking on the local host, using
subprocess module. Returns dict contaning output from process.
cmdv - complete command vector (argv) of the OS command.
inFile - File-like object providing stdin to the command. | [
"Execute",
"an",
"OS",
"command",
"blocking",
"on",
"the",
"local",
"host",
"using",
"subprocess",
"module",
".",
"Returns",
"dict",
"contaning",
"output",
"from",
"process",
".",
"cmdv",
"-",
"complete",
"command",
"vector",
"(",
"argv",
")",
"of",
"the",
"OS",
"command",
".",
"inFile",
"-",
"File",
"-",
"like",
"object",
"providing",
"stdin",
"to",
"the",
"command",
"."
] | def execute_command(self, cmdv, inFile=None):
"""Execute an OS command blocking on the local host, using
subprocess module. Returns dict contaning output from process.
cmdv - complete command vector (argv) of the OS command.
inFile - File-like object providing stdin to the command.
"""
cmdln = ' '.join([quote_if_contains_space(a) for a in cmdv])
_logger.debug('cmdln='+cmdln)
with contextlib.closing(self.client.get_transport().open_session()) as chan:
chan.exec_command(cmdln)
if inFile:
chan.sendall(inFile.read())
chan.shutdown_write()
result = {
'exitstatus': chan.recv_exit_status()
}
with contextlib.closing(chan.makefile('rb')) as outFile:
result['out'] = outFile.read()
with contextlib.closing(chan.makefile_stderr('rb')) as errFile:
result['err'] = errFile.read(),
return result | [
"def",
"execute_command",
"(",
"self",
",",
"cmdv",
",",
"inFile",
"=",
"None",
")",
":",
"cmdln",
"=",
"' '",
".",
"join",
"(",
"[",
"quote_if_contains_space",
"(",
"a",
")",
"for",
"a",
"in",
"cmdv",
"]",
")",
"_logger",
".",
"debug",
"(",
"'cmdln='",
"+",
"cmdln",
")",
"with",
"contextlib",
".",
"closing",
"(",
"self",
".",
"client",
".",
"get_transport",
"(",
")",
".",
"open_session",
"(",
")",
")",
"as",
"chan",
":",
"chan",
".",
"exec_command",
"(",
"cmdln",
")",
"if",
"inFile",
":",
"chan",
".",
"sendall",
"(",
"inFile",
".",
"read",
"(",
")",
")",
"chan",
".",
"shutdown_write",
"(",
")",
"result",
"=",
"{",
"'exitstatus'",
":",
"chan",
".",
"recv_exit_status",
"(",
")",
"}",
"with",
"contextlib",
".",
"closing",
"(",
"chan",
".",
"makefile",
"(",
"'rb'",
")",
")",
"as",
"outFile",
":",
"result",
"[",
"'out'",
"]",
"=",
"outFile",
".",
"read",
"(",
")",
"with",
"contextlib",
".",
"closing",
"(",
"chan",
".",
"makefile_stderr",
"(",
"'rb'",
")",
")",
"as",
"errFile",
":",
"result",
"[",
"'err'",
"]",
"=",
"errFile",
".",
"read",
"(",
")",
",",
"return",
"result"
] | https://github.com/google/mysql-protobuf/blob/467cda676afaa49e762c5c9164a43f6ad31a1fbf/storage/ndb/mcc/remote_clusterhost.py#L290-L314 | ||
alibaba/MNN | c4d9566171d589c3ded23aa18ffb197016995a12 | 3rd_party/flatbuffers/conanfile.py | python | FlatbuffersConan.package_info | (self) | Collect built libraries names and solve flatc path. | Collect built libraries names and solve flatc path. | [
"Collect",
"built",
"libraries",
"names",
"and",
"solve",
"flatc",
"path",
"."
] | def package_info(self):
"""Collect built libraries names and solve flatc path.
"""
self.cpp_info.libs = tools.collect_libs(self)
self.user_info.flatc = os.path.join(self.package_folder, "bin", "flatc") | [
"def",
"package_info",
"(",
"self",
")",
":",
"self",
".",
"cpp_info",
".",
"libs",
"=",
"tools",
".",
"collect_libs",
"(",
"self",
")",
"self",
".",
"user_info",
".",
"flatc",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"package_folder",
",",
"\"bin\"",
",",
"\"flatc\"",
")"
] | https://github.com/alibaba/MNN/blob/c4d9566171d589c3ded23aa18ffb197016995a12/3rd_party/flatbuffers/conanfile.py#L71-L75 | ||
google/earthenterprise | 0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9 | earth_enterprise/src/fusion/portableglobe/servers/portable_globe.py | python | Globe.ReadTerrainPacket | (self, qtpath, layer_id) | return self.ReadDataPacket(
qtpath, glc_unpacker.kTerrainPacket, 1, layer_id) | Returns terrain packet at given address.
If terrain packet is not found, throws an exception.
Args:
qtpath: the quadtree node of the terrain packet.
layer_id: id of layer in the composite globe.
Returns:
The terrain packet itself. | Returns terrain packet at given address. | [
"Returns",
"terrain",
"packet",
"at",
"given",
"address",
"."
] | def ReadTerrainPacket(self, qtpath, layer_id):
"""Returns terrain packet at given address.
If terrain packet is not found, throws an exception.
Args:
qtpath: the quadtree node of the terrain packet.
layer_id: id of layer in the composite globe.
Returns:
The terrain packet itself.
"""
return self.ReadDataPacket(
qtpath, glc_unpacker.kTerrainPacket, 1, layer_id) | [
"def",
"ReadTerrainPacket",
"(",
"self",
",",
"qtpath",
",",
"layer_id",
")",
":",
"return",
"self",
".",
"ReadDataPacket",
"(",
"qtpath",
",",
"glc_unpacker",
".",
"kTerrainPacket",
",",
"1",
",",
"layer_id",
")"
] | https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/fusion/portableglobe/servers/portable_globe.py#L271-L283 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/lib2to3/pytree.py | python | Base.replace | (self, new) | Replace this node with a new one in the parent. | Replace this node with a new one in the parent. | [
"Replace",
"this",
"node",
"with",
"a",
"new",
"one",
"in",
"the",
"parent",
"."
] | def replace(self, new):
"""Replace this node with a new one in the parent."""
assert self.parent is not None, str(self)
assert new is not None
if not isinstance(new, list):
new = [new]
l_children = []
found = False
for ch in self.parent.children:
if ch is self:
assert not found, (self.parent.children, self, new)
if new is not None:
l_children.extend(new)
found = True
else:
l_children.append(ch)
assert found, (self.children, self, new)
self.parent.changed()
self.parent.children = l_children
for x in new:
x.parent = self.parent
self.parent = None | [
"def",
"replace",
"(",
"self",
",",
"new",
")",
":",
"assert",
"self",
".",
"parent",
"is",
"not",
"None",
",",
"str",
"(",
"self",
")",
"assert",
"new",
"is",
"not",
"None",
"if",
"not",
"isinstance",
"(",
"new",
",",
"list",
")",
":",
"new",
"=",
"[",
"new",
"]",
"l_children",
"=",
"[",
"]",
"found",
"=",
"False",
"for",
"ch",
"in",
"self",
".",
"parent",
".",
"children",
":",
"if",
"ch",
"is",
"self",
":",
"assert",
"not",
"found",
",",
"(",
"self",
".",
"parent",
".",
"children",
",",
"self",
",",
"new",
")",
"if",
"new",
"is",
"not",
"None",
":",
"l_children",
".",
"extend",
"(",
"new",
")",
"found",
"=",
"True",
"else",
":",
"l_children",
".",
"append",
"(",
"ch",
")",
"assert",
"found",
",",
"(",
"self",
".",
"children",
",",
"self",
",",
"new",
")",
"self",
".",
"parent",
".",
"changed",
"(",
")",
"self",
".",
"parent",
".",
"children",
"=",
"l_children",
"for",
"x",
"in",
"new",
":",
"x",
".",
"parent",
"=",
"self",
".",
"parent",
"self",
".",
"parent",
"=",
"None"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/lib2to3/pytree.py#L101-L122 | ||
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | buildscripts/resmokelib/hang_analyzer/process_list.py | python | _LinuxProcessList.dump_processes | (self, logger) | return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"] | Get list of [Pid, Process Name]. | Get list of [Pid, Process Name]. | [
"Get",
"list",
"of",
"[",
"Pid",
"Process",
"Name",
"]",
"."
] | def dump_processes(self, logger):
"""Get list of [Pid, Process Name]."""
ps = self.__find_ps()
logger.info("Getting list of processes using %s", ps)
call([ps, "--version"], logger)
ret = callo([ps, "-eo", "pid,args"], logger)
buff = io.StringIO(ret)
csv_reader = csv.reader(buff, delimiter=' ', quoting=csv.QUOTE_NONE, skipinitialspace=True)
return [[int(row[0]), os.path.split(row[1])[1]] for row in csv_reader if row[0] != "PID"] | [
"def",
"dump_processes",
"(",
"self",
",",
"logger",
")",
":",
"ps",
"=",
"self",
".",
"__find_ps",
"(",
")",
"logger",
".",
"info",
"(",
"\"Getting list of processes using %s\"",
",",
"ps",
")",
"call",
"(",
"[",
"ps",
",",
"\"--version\"",
"]",
",",
"logger",
")",
"ret",
"=",
"callo",
"(",
"[",
"ps",
",",
"\"-eo\"",
",",
"\"pid,args\"",
"]",
",",
"logger",
")",
"buff",
"=",
"io",
".",
"StringIO",
"(",
"ret",
")",
"csv_reader",
"=",
"csv",
".",
"reader",
"(",
"buff",
",",
"delimiter",
"=",
"' '",
",",
"quoting",
"=",
"csv",
".",
"QUOTE_NONE",
",",
"skipinitialspace",
"=",
"True",
")",
"return",
"[",
"[",
"int",
"(",
"row",
"[",
"0",
"]",
")",
",",
"os",
".",
"path",
".",
"split",
"(",
"row",
"[",
"1",
"]",
")",
"[",
"1",
"]",
"]",
"for",
"row",
"in",
"csv_reader",
"if",
"row",
"[",
"0",
"]",
"!=",
"\"PID\"",
"]"
] | https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/hang_analyzer/process_list.py#L160-L173 | |
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/nn/probability/distribution/uniform.py | python | Uniform._var | (self, low=None, high=None) | return self.sq(high - low) / 12.0 | r"""
.. math::
VAR(U) = \frac{(high -low) ^ 2}{12}. | r"""
.. math::
VAR(U) = \frac{(high -low) ^ 2}{12}. | [
"r",
"..",
"math",
"::",
"VAR",
"(",
"U",
")",
"=",
"\\",
"frac",
"{",
"(",
"high",
"-",
"low",
")",
"^",
"2",
"}",
"{",
"12",
"}",
"."
] | def _var(self, low=None, high=None):
r"""
.. math::
VAR(U) = \frac{(high -low) ^ 2}{12}.
"""
low, high = self._check_param_type(low, high)
return self.sq(high - low) / 12.0 | [
"def",
"_var",
"(",
"self",
",",
"low",
"=",
"None",
",",
"high",
"=",
"None",
")",
":",
"low",
",",
"high",
"=",
"self",
".",
"_check_param_type",
"(",
"low",
",",
"high",
")",
"return",
"self",
".",
"sq",
"(",
"high",
"-",
"low",
")",
"/",
"12.0"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/probability/distribution/uniform.py#L256-L262 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/codecs.py | python | StreamReader.__getattr__ | (self, name,
getattr=getattr) | return getattr(self.stream, name) | Inherit all other methods from the underlying stream. | Inherit all other methods from the underlying stream. | [
"Inherit",
"all",
"other",
"methods",
"from",
"the",
"underlying",
"stream",
"."
] | def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name) | [
"def",
"__getattr__",
"(",
"self",
",",
"name",
",",
"getattr",
"=",
"getattr",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"stream",
",",
"name",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/codecs.py#L623-L628 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/keras/losses.py | python | serialize | (loss) | return serialize_keras_object(loss) | Serializes loss function or `Loss` instance.
Args:
loss: A Keras `Loss` instance or a loss function.
Returns:
Loss configuration dictionary. | Serializes loss function or `Loss` instance. | [
"Serializes",
"loss",
"function",
"or",
"Loss",
"instance",
"."
] | def serialize(loss):
"""Serializes loss function or `Loss` instance.
Args:
loss: A Keras `Loss` instance or a loss function.
Returns:
Loss configuration dictionary.
"""
return serialize_keras_object(loss) | [
"def",
"serialize",
"(",
"loss",
")",
":",
"return",
"serialize_keras_object",
"(",
"loss",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/losses.py#L2081-L2090 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/grid.py | python | Grid.SetCellOverflow | (*args, **kwargs) | return _grid.Grid_SetCellOverflow(*args, **kwargs) | SetCellOverflow(self, int row, int col, bool allow) | SetCellOverflow(self, int row, int col, bool allow) | [
"SetCellOverflow",
"(",
"self",
"int",
"row",
"int",
"col",
"bool",
"allow",
")"
] | def SetCellOverflow(*args, **kwargs):
"""SetCellOverflow(self, int row, int col, bool allow)"""
return _grid.Grid_SetCellOverflow(*args, **kwargs) | [
"def",
"SetCellOverflow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_SetCellOverflow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/grid.py#L1970-L1972 | |
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/train/loss_scale_manager.py | python | FixedLossScaleManager.update_loss_scale | (self, overflow) | Update loss scale value. The interface at `FixedLossScaleManager` will do nothing.
Args:
overflow (bool): Whether it overflows. | Update loss scale value. The interface at `FixedLossScaleManager` will do nothing. | [
"Update",
"loss",
"scale",
"value",
".",
"The",
"interface",
"at",
"FixedLossScaleManager",
"will",
"do",
"nothing",
"."
] | def update_loss_scale(self, overflow):
"""
Update loss scale value. The interface at `FixedLossScaleManager` will do nothing.
Args:
overflow (bool): Whether it overflows.
""" | [
"def",
"update_loss_scale",
"(",
"self",
",",
"overflow",
")",
":"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/train/loss_scale_manager.py#L97-L103 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py3/numpy/distutils/conv_template.py | python | parse_loop_header | (loophead) | return dlist | Find all named replacements in the header
Returns a list of dictionaries, one for each loop iteration,
where each key is a name to be substituted and the corresponding
value is the replacement string.
Also return a list of exclusions. The exclusions are dictionaries
of key value pairs. There can be more than one exclusion.
[{'var1':'value1', 'var2', 'value2'[,...]}, ...] | Find all named replacements in the header | [
"Find",
"all",
"named",
"replacements",
"in",
"the",
"header"
] | def parse_loop_header(loophead) :
"""Find all named replacements in the header
Returns a list of dictionaries, one for each loop iteration,
where each key is a name to be substituted and the corresponding
value is the replacement string.
Also return a list of exclusions. The exclusions are dictionaries
of key value pairs. There can be more than one exclusion.
[{'var1':'value1', 'var2', 'value2'[,...]}, ...]
"""
# Strip out '\n' and leading '*', if any, in continuation lines.
# This should not effect code previous to this change as
# continuation lines were not allowed.
loophead = stripast.sub("", loophead)
# parse out the names and lists of values
names = []
reps = named_re.findall(loophead)
nsub = None
for rep in reps:
name = rep[0]
vals = parse_values(rep[1])
size = len(vals)
if nsub is None :
nsub = size
elif nsub != size :
msg = "Mismatch in number of values, %d != %d\n%s = %s"
raise ValueError(msg % (nsub, size, name, vals))
names.append((name, vals))
# Find any exclude variables
excludes = []
for obj in exclude_re.finditer(loophead):
span = obj.span()
# find next newline
endline = loophead.find('\n', span[1])
substr = loophead[span[1]:endline]
ex_names = exclude_vars_re.findall(substr)
excludes.append(dict(ex_names))
# generate list of dictionaries, one for each template iteration
dlist = []
if nsub is None :
raise ValueError("No substitution variables found")
for i in range(nsub):
tmp = {name: vals[i] for name, vals in names}
dlist.append(tmp)
return dlist | [
"def",
"parse_loop_header",
"(",
"loophead",
")",
":",
"# Strip out '\\n' and leading '*', if any, in continuation lines.",
"# This should not effect code previous to this change as",
"# continuation lines were not allowed.",
"loophead",
"=",
"stripast",
".",
"sub",
"(",
"\"\"",
",",
"loophead",
")",
"# parse out the names and lists of values",
"names",
"=",
"[",
"]",
"reps",
"=",
"named_re",
".",
"findall",
"(",
"loophead",
")",
"nsub",
"=",
"None",
"for",
"rep",
"in",
"reps",
":",
"name",
"=",
"rep",
"[",
"0",
"]",
"vals",
"=",
"parse_values",
"(",
"rep",
"[",
"1",
"]",
")",
"size",
"=",
"len",
"(",
"vals",
")",
"if",
"nsub",
"is",
"None",
":",
"nsub",
"=",
"size",
"elif",
"nsub",
"!=",
"size",
":",
"msg",
"=",
"\"Mismatch in number of values, %d != %d\\n%s = %s\"",
"raise",
"ValueError",
"(",
"msg",
"%",
"(",
"nsub",
",",
"size",
",",
"name",
",",
"vals",
")",
")",
"names",
".",
"append",
"(",
"(",
"name",
",",
"vals",
")",
")",
"# Find any exclude variables",
"excludes",
"=",
"[",
"]",
"for",
"obj",
"in",
"exclude_re",
".",
"finditer",
"(",
"loophead",
")",
":",
"span",
"=",
"obj",
".",
"span",
"(",
")",
"# find next newline",
"endline",
"=",
"loophead",
".",
"find",
"(",
"'\\n'",
",",
"span",
"[",
"1",
"]",
")",
"substr",
"=",
"loophead",
"[",
"span",
"[",
"1",
"]",
":",
"endline",
"]",
"ex_names",
"=",
"exclude_vars_re",
".",
"findall",
"(",
"substr",
")",
"excludes",
".",
"append",
"(",
"dict",
"(",
"ex_names",
")",
")",
"# generate list of dictionaries, one for each template iteration",
"dlist",
"=",
"[",
"]",
"if",
"nsub",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"No substitution variables found\"",
")",
"for",
"i",
"in",
"range",
"(",
"nsub",
")",
":",
"tmp",
"=",
"{",
"name",
":",
"vals",
"[",
"i",
"]",
"for",
"name",
",",
"vals",
"in",
"names",
"}",
"dlist",
".",
"append",
"(",
"tmp",
")",
"return",
"dlist"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/distutils/conv_template.py#L158-L208 | |
IntelRealSense/librealsense | c94410a420b74e5fb6a414bd12215c05ddd82b69 | tools/rs-imu-calibration/rs-imu-calibration.py | python | main | () | wtw = dot(transpose(w),w)
wtwi = np.linalg.inv(wtw)
print(wtwi)
X = dot(wtwi, Y)
print(X) | wtw = dot(transpose(w),w)
wtwi = np.linalg.inv(wtw)
print(wtwi)
X = dot(wtwi, Y)
print(X) | [
"wtw",
"=",
"dot",
"(",
"transpose",
"(",
"w",
")",
"w",
")",
"wtwi",
"=",
"np",
".",
"linalg",
".",
"inv",
"(",
"wtw",
")",
"print",
"(",
"wtwi",
")",
"X",
"=",
"dot",
"(",
"wtwi",
"Y",
")",
"print",
"(",
"X",
")"
] | def main():
if any([help_str in sys.argv for help_str in ['-h', '--help', '/?']]):
print("Usage:", sys.argv[0], "[Options]")
print
print('[Options]:')
print('-i : /path/to/accel.txt [/path/to/gyro.txt]')
print('-s : serial number of device to calibrate.')
print('-g : show graph of norm values - original values in blue and corrected in green.')
print
print('If -i option is given, calibration is done using previosly saved files')
print('Otherwise, an interactive process is followed.')
sys.exit(1)
try:
accel_file = None
gyro_file = None
serial_no = ''
show_graph = '-g' in sys.argv
for idx in range(len(sys.argv)):
if sys.argv[idx] == '-i':
accel_file = sys.argv[idx+1]
if len(sys.argv) > idx+2 and not sys.argv[idx+2].startswith('-'):
gyro_file = sys.argv[idx+2]
if sys.argv[idx] == '-s':
serial_no = sys.argv[idx+1]
print('waiting for realsense device...')
dev = wait_for_rs_device(serial_no)
product_line = dev.get_info(rs.camera_info.product_line)
if product_line == 'L500':
print('checking minimum firmware requirement ...')
fw_version = dev.get_info(rs.camera_info.firmware_version)
if fw_version < L515_FW_VER_REQUIRED:
raise Exception('L515 requires firmware ' + L515_FW_VER_REQUIRED + " or later to support IMU calibration. Please upgrade firmware and try again.")
else:
print(' firmware ' + fw_version + ' passed check.')
buckets = [[0, -g, 0], [ g, 0, 0],
[0, g, 0], [-g, 0, 0],
[0, 0, -g], [ 0, 0, g]]
# all D400 and L500 cameras with IMU equipped with a mounting screw at the bottom of the device
# when device is in normal use position upright facing out, mount screw is pointing down, aligned with positive Y direction in depth coordinate system
# IMU output on each of these devices is transformed into the depth coordinate system, i.e.,
# looking from back of the camera towards front, the positive x-axis points to the right, the positive y-axis points down, and the positive z-axis points forward.
# output of motion data is consistent with convention that positive direction aligned with gravity leads to -1g and opposite direction leads to +1g, for example,
# positive z_aixs points forward away from front glass of the device,
# 1) if place the device flat on a table, facing up, positive z-axis points up, z-axis acceleration is around +1g
# 2) facing down, positive z-axis points down, z-axis accleration would be around -1g
#
buckets_labels = ["Mounting screw pointing down, device facing out", "Mounting screw pointing left, device facing out", "Mounting screw pointing up, device facing out", "Mounting screw pointing right, device facing out", "Viewing direction facing down", "Viewing direction facing up"]
gyro_bais = np.zeros(3, np.float32)
old_settings = None
if accel_file:
if gyro_file:
#compute gyro bais
#assume the first 4 seconds the device is still
gyro = np.loadtxt(gyro_file, delimiter=",")
gyro = gyro[gyro[:, 0] < gyro[0, 0]+4000, :]
gyro_bais = np.mean(gyro[:, 1:], axis=0)
print(gyro_bais)
#compute accel intrinsic parameters
max_norm = np.linalg.norm(np.array([0.5, 0.5, 0.5]))
measurements = [[], [], [], [], [], []]
import csv
with open(accel_file, 'r') as csvfile:
reader = csv.reader(csvfile)
rnum = 0
for row in reader:
M = np.array([float(row[1]), float(row[2]), float(row[3])])
is_ok = False
for i in range(0, len(buckets)):
if np.linalg.norm(M - buckets[i]) < max_norm:
is_ok = True
measurements[i].append(M)
rnum += 1
print('read %d rows.' % rnum)
else:
print('Start interactive mode:')
if os.name == 'posix':
old_settings = termios.tcgetattr(sys.stdin)
tty.setcbreak(sys.stdin.fileno())
imu = imu_wrapper()
if not imu.enable_imu_device(serial_no):
print('Failed to enable device.')
return -1
measurements, gyro = imu.get_measurements(buckets, buckets_labels)
con_mm = np.concatenate(measurements)
if os.name == 'posix':
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
header = input('\nWould you like to save the raw data? Enter footer for saving files (accel_<footer>.txt and gyro_<footer>.txt)\nEnter nothing to not save raw data to disk. >')
print('\n')
if header:
accel_file = 'accel_%s.txt' % header
gyro_file = 'gyro_%s.txt' % header
print('Writing files:\n%s\n%s' % (accel_file, gyro_file))
np.savetxt(accel_file, con_mm, delimiter=',', fmt='%s')
np.savetxt(gyro_file, gyro, delimiter=',', fmt='%s')
else:
print('Not writing to files.')
# remove times from measurements:
measurements = [mm[:,1:] for mm in measurements]
gyro_bais = np.mean(gyro[:, 1:], axis=0)
print(gyro_bais)
mlen = np.array([len(meas) for meas in measurements])
print(mlen)
print('using %d measurements.' % mlen.sum())
nrows = mlen.sum()
w = np.zeros([nrows, 4])
Y = np.zeros([nrows, 3])
row = 0
for i in range(0, len(buckets)):
for m in measurements[i]:
w[row, 0] = m[0]
w[row, 1] = m[1]
w[row, 2] = m[2]
w[row, 3] = -1
Y[row, 0] = buckets[i][0]
Y[row, 1] = buckets[i][1]
Y[row, 2] = buckets[i][2]
row += 1
np_version = [int(x) for x in np.version.version.split('.')]
rcond_val = None if (np_version[1] >= 14 or np_version[0] > 1) else -1
X, residuals, rank, singular = np.linalg.lstsq(w, Y, rcond=rcond_val)
print(X)
print("residuals:", residuals)
print("rank:", rank)
print("singular:", singular)
check_X(X, w[:,:3], show_graph)
calibration = {}
if product_line == 'L500':
calibration["device_type"] = "L515"
else:
calibration["device_type"] = "D435i"
calibration["imus"] = list()
calibration["imus"].append({})
calibration["imus"][0]["accelerometer"] = {}
calibration["imus"][0]["accelerometer"]["scale_and_alignment"] = X.flatten()[:9].tolist()
calibration["imus"][0]["accelerometer"]["bias"] = X.flatten()[9:].tolist()
calibration["imus"][0]["gyroscope"] = {}
calibration["imus"][0]["gyroscope"]["scale_and_alignment"] = np.eye(3).flatten().tolist()
calibration["imus"][0]["gyroscope"]["bias"] = gyro_bais.tolist()
json_data = json.dumps(calibration, indent=4, sort_keys=True)
directory = os.path.dirname(accel_file) if accel_file else '.'
with open(os.path.join(directory,"calibration.json"), 'w') as outfile:
outfile.write(json_data)
#concatinate the two 12 element arrays and save
intrinsic_buffer = np.zeros([6,4])
intrinsic_buffer[:3,:4] = X.T
intrinsic_buffer[3:,:3] = np.eye(3)
intrinsic_buffer[3:,3] = gyro_bais
# intrinsic_buffer = ((np.array(range(24),np.float32)+1)/10).reshape([6,4])
imu_calib_table = get_IMU_Calib_Table(intrinsic_buffer, product_line)
with open(os.path.join(directory,"calibration.bin"), 'wb') as outfile:
outfile.write(imu_calib_table.astype('f').tostring())
is_write = input('Would you like to write the results to the camera? (Y/N)')
is_write = 'Y' in is_write.upper()
if is_write:
print('Writing calibration to device.')
if product_line == 'L500':
l500_send_command(dev, WRITE_TABLE, 0, 0, 0, 0, imu_calib_table)
else:
calibration_table = get_calibration_table(imu_calib_table)
eeprom = get_eeprom(calibration_table)
write_eeprom_to_camera(eeprom, serial_no)
print('Done.')
else:
print('Abort writing to device')
except Exception as e:
print ('\nError: %s' % e)
finally:
if os.name == 'posix' and old_settings is not None:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
"""
wtw = dot(transpose(w),w)
wtwi = np.linalg.inv(wtw)
print(wtwi)
X = dot(wtwi, Y)
print(X)
""" | [
"def",
"main",
"(",
")",
":",
"if",
"any",
"(",
"[",
"help_str",
"in",
"sys",
".",
"argv",
"for",
"help_str",
"in",
"[",
"'-h'",
",",
"'--help'",
",",
"'/?'",
"]",
"]",
")",
":",
"print",
"(",
"\"Usage:\"",
",",
"sys",
".",
"argv",
"[",
"0",
"]",
",",
"\"[Options]\"",
")",
"print",
"print",
"(",
"'[Options]:'",
")",
"print",
"(",
"'-i : /path/to/accel.txt [/path/to/gyro.txt]'",
")",
"print",
"(",
"'-s : serial number of device to calibrate.'",
")",
"print",
"(",
"'-g : show graph of norm values - original values in blue and corrected in green.'",
")",
"print",
"print",
"(",
"'If -i option is given, calibration is done using previosly saved files'",
")",
"print",
"(",
"'Otherwise, an interactive process is followed.'",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"try",
":",
"accel_file",
"=",
"None",
"gyro_file",
"=",
"None",
"serial_no",
"=",
"''",
"show_graph",
"=",
"'-g'",
"in",
"sys",
".",
"argv",
"for",
"idx",
"in",
"range",
"(",
"len",
"(",
"sys",
".",
"argv",
")",
")",
":",
"if",
"sys",
".",
"argv",
"[",
"idx",
"]",
"==",
"'-i'",
":",
"accel_file",
"=",
"sys",
".",
"argv",
"[",
"idx",
"+",
"1",
"]",
"if",
"len",
"(",
"sys",
".",
"argv",
")",
">",
"idx",
"+",
"2",
"and",
"not",
"sys",
".",
"argv",
"[",
"idx",
"+",
"2",
"]",
".",
"startswith",
"(",
"'-'",
")",
":",
"gyro_file",
"=",
"sys",
".",
"argv",
"[",
"idx",
"+",
"2",
"]",
"if",
"sys",
".",
"argv",
"[",
"idx",
"]",
"==",
"'-s'",
":",
"serial_no",
"=",
"sys",
".",
"argv",
"[",
"idx",
"+",
"1",
"]",
"print",
"(",
"'waiting for realsense device...'",
")",
"dev",
"=",
"wait_for_rs_device",
"(",
"serial_no",
")",
"product_line",
"=",
"dev",
".",
"get_info",
"(",
"rs",
".",
"camera_info",
".",
"product_line",
")",
"if",
"product_line",
"==",
"'L500'",
":",
"print",
"(",
"'checking minimum firmware requirement ...'",
")",
"fw_version",
"=",
"dev",
".",
"get_info",
"(",
"rs",
".",
"camera_info",
".",
"firmware_version",
")",
"if",
"fw_version",
"<",
"L515_FW_VER_REQUIRED",
":",
"raise",
"Exception",
"(",
"'L515 requires firmware '",
"+",
"L515_FW_VER_REQUIRED",
"+",
"\" or later to support IMU calibration. Please upgrade firmware and try again.\"",
")",
"else",
":",
"print",
"(",
"' firmware '",
"+",
"fw_version",
"+",
"' passed check.'",
")",
"buckets",
"=",
"[",
"[",
"0",
",",
"-",
"g",
",",
"0",
"]",
",",
"[",
"g",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"g",
",",
"0",
"]",
",",
"[",
"-",
"g",
",",
"0",
",",
"0",
"]",
",",
"[",
"0",
",",
"0",
",",
"-",
"g",
"]",
",",
"[",
"0",
",",
"0",
",",
"g",
"]",
"]",
"# all D400 and L500 cameras with IMU equipped with a mounting screw at the bottom of the device",
"# when device is in normal use position upright facing out, mount screw is pointing down, aligned with positive Y direction in depth coordinate system",
"# IMU output on each of these devices is transformed into the depth coordinate system, i.e.,",
"# looking from back of the camera towards front, the positive x-axis points to the right, the positive y-axis points down, and the positive z-axis points forward.",
"# output of motion data is consistent with convention that positive direction aligned with gravity leads to -1g and opposite direction leads to +1g, for example,",
"# positive z_aixs points forward away from front glass of the device,",
"# 1) if place the device flat on a table, facing up, positive z-axis points up, z-axis acceleration is around +1g",
"# 2) facing down, positive z-axis points down, z-axis accleration would be around -1g",
"#",
"buckets_labels",
"=",
"[",
"\"Mounting screw pointing down, device facing out\"",
",",
"\"Mounting screw pointing left, device facing out\"",
",",
"\"Mounting screw pointing up, device facing out\"",
",",
"\"Mounting screw pointing right, device facing out\"",
",",
"\"Viewing direction facing down\"",
",",
"\"Viewing direction facing up\"",
"]",
"gyro_bais",
"=",
"np",
".",
"zeros",
"(",
"3",
",",
"np",
".",
"float32",
")",
"old_settings",
"=",
"None",
"if",
"accel_file",
":",
"if",
"gyro_file",
":",
"#compute gyro bais",
"#assume the first 4 seconds the device is still",
"gyro",
"=",
"np",
".",
"loadtxt",
"(",
"gyro_file",
",",
"delimiter",
"=",
"\",\"",
")",
"gyro",
"=",
"gyro",
"[",
"gyro",
"[",
":",
",",
"0",
"]",
"<",
"gyro",
"[",
"0",
",",
"0",
"]",
"+",
"4000",
",",
":",
"]",
"gyro_bais",
"=",
"np",
".",
"mean",
"(",
"gyro",
"[",
":",
",",
"1",
":",
"]",
",",
"axis",
"=",
"0",
")",
"print",
"(",
"gyro_bais",
")",
"#compute accel intrinsic parameters",
"max_norm",
"=",
"np",
".",
"linalg",
".",
"norm",
"(",
"np",
".",
"array",
"(",
"[",
"0.5",
",",
"0.5",
",",
"0.5",
"]",
")",
")",
"measurements",
"=",
"[",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
"]",
"import",
"csv",
"with",
"open",
"(",
"accel_file",
",",
"'r'",
")",
"as",
"csvfile",
":",
"reader",
"=",
"csv",
".",
"reader",
"(",
"csvfile",
")",
"rnum",
"=",
"0",
"for",
"row",
"in",
"reader",
":",
"M",
"=",
"np",
".",
"array",
"(",
"[",
"float",
"(",
"row",
"[",
"1",
"]",
")",
",",
"float",
"(",
"row",
"[",
"2",
"]",
")",
",",
"float",
"(",
"row",
"[",
"3",
"]",
")",
"]",
")",
"is_ok",
"=",
"False",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"buckets",
")",
")",
":",
"if",
"np",
".",
"linalg",
".",
"norm",
"(",
"M",
"-",
"buckets",
"[",
"i",
"]",
")",
"<",
"max_norm",
":",
"is_ok",
"=",
"True",
"measurements",
"[",
"i",
"]",
".",
"append",
"(",
"M",
")",
"rnum",
"+=",
"1",
"print",
"(",
"'read %d rows.'",
"%",
"rnum",
")",
"else",
":",
"print",
"(",
"'Start interactive mode:'",
")",
"if",
"os",
".",
"name",
"==",
"'posix'",
":",
"old_settings",
"=",
"termios",
".",
"tcgetattr",
"(",
"sys",
".",
"stdin",
")",
"tty",
".",
"setcbreak",
"(",
"sys",
".",
"stdin",
".",
"fileno",
"(",
")",
")",
"imu",
"=",
"imu_wrapper",
"(",
")",
"if",
"not",
"imu",
".",
"enable_imu_device",
"(",
"serial_no",
")",
":",
"print",
"(",
"'Failed to enable device.'",
")",
"return",
"-",
"1",
"measurements",
",",
"gyro",
"=",
"imu",
".",
"get_measurements",
"(",
"buckets",
",",
"buckets_labels",
")",
"con_mm",
"=",
"np",
".",
"concatenate",
"(",
"measurements",
")",
"if",
"os",
".",
"name",
"==",
"'posix'",
":",
"termios",
".",
"tcsetattr",
"(",
"sys",
".",
"stdin",
",",
"termios",
".",
"TCSADRAIN",
",",
"old_settings",
")",
"header",
"=",
"input",
"(",
"'\\nWould you like to save the raw data? Enter footer for saving files (accel_<footer>.txt and gyro_<footer>.txt)\\nEnter nothing to not save raw data to disk. >'",
")",
"print",
"(",
"'\\n'",
")",
"if",
"header",
":",
"accel_file",
"=",
"'accel_%s.txt'",
"%",
"header",
"gyro_file",
"=",
"'gyro_%s.txt'",
"%",
"header",
"print",
"(",
"'Writing files:\\n%s\\n%s'",
"%",
"(",
"accel_file",
",",
"gyro_file",
")",
")",
"np",
".",
"savetxt",
"(",
"accel_file",
",",
"con_mm",
",",
"delimiter",
"=",
"','",
",",
"fmt",
"=",
"'%s'",
")",
"np",
".",
"savetxt",
"(",
"gyro_file",
",",
"gyro",
",",
"delimiter",
"=",
"','",
",",
"fmt",
"=",
"'%s'",
")",
"else",
":",
"print",
"(",
"'Not writing to files.'",
")",
"# remove times from measurements:",
"measurements",
"=",
"[",
"mm",
"[",
":",
",",
"1",
":",
"]",
"for",
"mm",
"in",
"measurements",
"]",
"gyro_bais",
"=",
"np",
".",
"mean",
"(",
"gyro",
"[",
":",
",",
"1",
":",
"]",
",",
"axis",
"=",
"0",
")",
"print",
"(",
"gyro_bais",
")",
"mlen",
"=",
"np",
".",
"array",
"(",
"[",
"len",
"(",
"meas",
")",
"for",
"meas",
"in",
"measurements",
"]",
")",
"print",
"(",
"mlen",
")",
"print",
"(",
"'using %d measurements.'",
"%",
"mlen",
".",
"sum",
"(",
")",
")",
"nrows",
"=",
"mlen",
".",
"sum",
"(",
")",
"w",
"=",
"np",
".",
"zeros",
"(",
"[",
"nrows",
",",
"4",
"]",
")",
"Y",
"=",
"np",
".",
"zeros",
"(",
"[",
"nrows",
",",
"3",
"]",
")",
"row",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"buckets",
")",
")",
":",
"for",
"m",
"in",
"measurements",
"[",
"i",
"]",
":",
"w",
"[",
"row",
",",
"0",
"]",
"=",
"m",
"[",
"0",
"]",
"w",
"[",
"row",
",",
"1",
"]",
"=",
"m",
"[",
"1",
"]",
"w",
"[",
"row",
",",
"2",
"]",
"=",
"m",
"[",
"2",
"]",
"w",
"[",
"row",
",",
"3",
"]",
"=",
"-",
"1",
"Y",
"[",
"row",
",",
"0",
"]",
"=",
"buckets",
"[",
"i",
"]",
"[",
"0",
"]",
"Y",
"[",
"row",
",",
"1",
"]",
"=",
"buckets",
"[",
"i",
"]",
"[",
"1",
"]",
"Y",
"[",
"row",
",",
"2",
"]",
"=",
"buckets",
"[",
"i",
"]",
"[",
"2",
"]",
"row",
"+=",
"1",
"np_version",
"=",
"[",
"int",
"(",
"x",
")",
"for",
"x",
"in",
"np",
".",
"version",
".",
"version",
".",
"split",
"(",
"'.'",
")",
"]",
"rcond_val",
"=",
"None",
"if",
"(",
"np_version",
"[",
"1",
"]",
">=",
"14",
"or",
"np_version",
"[",
"0",
"]",
">",
"1",
")",
"else",
"-",
"1",
"X",
",",
"residuals",
",",
"rank",
",",
"singular",
"=",
"np",
".",
"linalg",
".",
"lstsq",
"(",
"w",
",",
"Y",
",",
"rcond",
"=",
"rcond_val",
")",
"print",
"(",
"X",
")",
"print",
"(",
"\"residuals:\"",
",",
"residuals",
")",
"print",
"(",
"\"rank:\"",
",",
"rank",
")",
"print",
"(",
"\"singular:\"",
",",
"singular",
")",
"check_X",
"(",
"X",
",",
"w",
"[",
":",
",",
":",
"3",
"]",
",",
"show_graph",
")",
"calibration",
"=",
"{",
"}",
"if",
"product_line",
"==",
"'L500'",
":",
"calibration",
"[",
"\"device_type\"",
"]",
"=",
"\"L515\"",
"else",
":",
"calibration",
"[",
"\"device_type\"",
"]",
"=",
"\"D435i\"",
"calibration",
"[",
"\"imus\"",
"]",
"=",
"list",
"(",
")",
"calibration",
"[",
"\"imus\"",
"]",
".",
"append",
"(",
"{",
"}",
")",
"calibration",
"[",
"\"imus\"",
"]",
"[",
"0",
"]",
"[",
"\"accelerometer\"",
"]",
"=",
"{",
"}",
"calibration",
"[",
"\"imus\"",
"]",
"[",
"0",
"]",
"[",
"\"accelerometer\"",
"]",
"[",
"\"scale_and_alignment\"",
"]",
"=",
"X",
".",
"flatten",
"(",
")",
"[",
":",
"9",
"]",
".",
"tolist",
"(",
")",
"calibration",
"[",
"\"imus\"",
"]",
"[",
"0",
"]",
"[",
"\"accelerometer\"",
"]",
"[",
"\"bias\"",
"]",
"=",
"X",
".",
"flatten",
"(",
")",
"[",
"9",
":",
"]",
".",
"tolist",
"(",
")",
"calibration",
"[",
"\"imus\"",
"]",
"[",
"0",
"]",
"[",
"\"gyroscope\"",
"]",
"=",
"{",
"}",
"calibration",
"[",
"\"imus\"",
"]",
"[",
"0",
"]",
"[",
"\"gyroscope\"",
"]",
"[",
"\"scale_and_alignment\"",
"]",
"=",
"np",
".",
"eye",
"(",
"3",
")",
".",
"flatten",
"(",
")",
".",
"tolist",
"(",
")",
"calibration",
"[",
"\"imus\"",
"]",
"[",
"0",
"]",
"[",
"\"gyroscope\"",
"]",
"[",
"\"bias\"",
"]",
"=",
"gyro_bais",
".",
"tolist",
"(",
")",
"json_data",
"=",
"json",
".",
"dumps",
"(",
"calibration",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")",
"directory",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"accel_file",
")",
"if",
"accel_file",
"else",
"'.'",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"\"calibration.json\"",
")",
",",
"'w'",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"json_data",
")",
"#concatinate the two 12 element arrays and save",
"intrinsic_buffer",
"=",
"np",
".",
"zeros",
"(",
"[",
"6",
",",
"4",
"]",
")",
"intrinsic_buffer",
"[",
":",
"3",
",",
":",
"4",
"]",
"=",
"X",
".",
"T",
"intrinsic_buffer",
"[",
"3",
":",
",",
":",
"3",
"]",
"=",
"np",
".",
"eye",
"(",
"3",
")",
"intrinsic_buffer",
"[",
"3",
":",
",",
"3",
"]",
"=",
"gyro_bais",
"# intrinsic_buffer = ((np.array(range(24),np.float32)+1)/10).reshape([6,4])",
"imu_calib_table",
"=",
"get_IMU_Calib_Table",
"(",
"intrinsic_buffer",
",",
"product_line",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"\"calibration.bin\"",
")",
",",
"'wb'",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"imu_calib_table",
".",
"astype",
"(",
"'f'",
")",
".",
"tostring",
"(",
")",
")",
"is_write",
"=",
"input",
"(",
"'Would you like to write the results to the camera? (Y/N)'",
")",
"is_write",
"=",
"'Y'",
"in",
"is_write",
".",
"upper",
"(",
")",
"if",
"is_write",
":",
"print",
"(",
"'Writing calibration to device.'",
")",
"if",
"product_line",
"==",
"'L500'",
":",
"l500_send_command",
"(",
"dev",
",",
"WRITE_TABLE",
",",
"0",
",",
"0",
",",
"0",
",",
"0",
",",
"imu_calib_table",
")",
"else",
":",
"calibration_table",
"=",
"get_calibration_table",
"(",
"imu_calib_table",
")",
"eeprom",
"=",
"get_eeprom",
"(",
"calibration_table",
")",
"write_eeprom_to_camera",
"(",
"eeprom",
",",
"serial_no",
")",
"print",
"(",
"'Done.'",
")",
"else",
":",
"print",
"(",
"'Abort writing to device'",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"'\\nError: %s'",
"%",
"e",
")",
"finally",
":",
"if",
"os",
".",
"name",
"==",
"'posix'",
"and",
"old_settings",
"is",
"not",
"None",
":",
"termios",
".",
"tcsetattr",
"(",
"sys",
".",
"stdin",
",",
"termios",
".",
"TCSADRAIN",
",",
"old_settings",
")"
] | https://github.com/IntelRealSense/librealsense/blob/c94410a420b74e5fb6a414bd12215c05ddd82b69/tools/rs-imu-calibration/rs-imu-calibration.py#L547-L755 | ||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/distribute/sharded_variable.py | python | ShardedVariableMixin.scatter_sub | (self, sparse_delta, use_locking=False, name=None) | return self | Implements tf.Variable.scatter_sub. | Implements tf.Variable.scatter_sub. | [
"Implements",
"tf",
".",
"Variable",
".",
"scatter_sub",
"."
] | def scatter_sub(self, sparse_delta, use_locking=False, name=None):
"""Implements tf.Variable.scatter_sub."""
per_var_sparse_delta = self._decompose_indexed_slices(sparse_delta)
for i, v in enumerate(self._variables):
new_name = None
if name is not None:
new_name = '{}/part_{}'.format(name, i)
v.scatter_sub(per_var_sparse_delta[i], name=new_name)
return self | [
"def",
"scatter_sub",
"(",
"self",
",",
"sparse_delta",
",",
"use_locking",
"=",
"False",
",",
"name",
"=",
"None",
")",
":",
"per_var_sparse_delta",
"=",
"self",
".",
"_decompose_indexed_slices",
"(",
"sparse_delta",
")",
"for",
"i",
",",
"v",
"in",
"enumerate",
"(",
"self",
".",
"_variables",
")",
":",
"new_name",
"=",
"None",
"if",
"name",
"is",
"not",
"None",
":",
"new_name",
"=",
"'{}/part_{}'",
".",
"format",
"(",
"name",
",",
"i",
")",
"v",
".",
"scatter_sub",
"(",
"per_var_sparse_delta",
"[",
"i",
"]",
",",
"name",
"=",
"new_name",
")",
"return",
"self"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/sharded_variable.py#L657-L665 | |
perilouswithadollarsign/cstrike15_src | f82112a2388b841d72cb62ca48ab1846dfcc11c8 | thirdparty/protobuf-2.5.0/python/mox.py | python | MockAnything._Verify | (self) | Verify that all of the expected calls have been made.
Raises:
ExpectedMethodCallsError: if there are still more method calls in the
expected queue. | Verify that all of the expected calls have been made. | [
"Verify",
"that",
"all",
"of",
"the",
"expected",
"calls",
"have",
"been",
"made",
"."
] | def _Verify(self):
"""Verify that all of the expected calls have been made.
Raises:
ExpectedMethodCallsError: if there are still more method calls in the
expected queue.
"""
# If the list of expected calls is not empty, raise an exception
if self._expected_calls_queue:
# The last MultipleTimesGroup is not popped from the queue.
if (len(self._expected_calls_queue) == 1 and
isinstance(self._expected_calls_queue[0], MultipleTimesGroup) and
self._expected_calls_queue[0].IsSatisfied()):
pass
else:
raise ExpectedMethodCallsError(self._expected_calls_queue) | [
"def",
"_Verify",
"(",
"self",
")",
":",
"# If the list of expected calls is not empty, raise an exception",
"if",
"self",
".",
"_expected_calls_queue",
":",
"# The last MultipleTimesGroup is not popped from the queue.",
"if",
"(",
"len",
"(",
"self",
".",
"_expected_calls_queue",
")",
"==",
"1",
"and",
"isinstance",
"(",
"self",
".",
"_expected_calls_queue",
"[",
"0",
"]",
",",
"MultipleTimesGroup",
")",
"and",
"self",
".",
"_expected_calls_queue",
"[",
"0",
"]",
".",
"IsSatisfied",
"(",
")",
")",
":",
"pass",
"else",
":",
"raise",
"ExpectedMethodCallsError",
"(",
"self",
".",
"_expected_calls_queue",
")"
] | https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/mox.py#L331-L347 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | contrib/gizmos/msw/gizmos.py | python | TreeListCtrl.Delete | (*args, **kwargs) | return _gizmos.TreeListCtrl_Delete(*args, **kwargs) | Delete(self, TreeItemId item) | Delete(self, TreeItemId item) | [
"Delete",
"(",
"self",
"TreeItemId",
"item",
")"
] | def Delete(*args, **kwargs):
"""Delete(self, TreeItemId item)"""
return _gizmos.TreeListCtrl_Delete(*args, **kwargs) | [
"def",
"Delete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gizmos",
".",
"TreeListCtrl_Delete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/msw/gizmos.py#L858-L860 | |
eclipse/sumo | 7132a9b8b6eea734bdec38479026b4d8c4336d03 | tools/import/vissim/convert_vissimXML_flows_statRoutes.py | python | parse_vehicle_types | (xmldoc, acc_d, length_d) | return veh_type_d | parses the vehicle types from the VISSIM data
:param xmldoc: input VISSIM xml
:type xmldoc: xml.dom.minidom.Document
:return: relevant VISSIM vehicle type data
:rtype: dict of dict | parses the vehicle types from the VISSIM data
:param xmldoc: input VISSIM xml
:type xmldoc: xml.dom.minidom.Document
:return: relevant VISSIM vehicle type data
:rtype: dict of dict | [
"parses",
"the",
"vehicle",
"types",
"from",
"the",
"VISSIM",
"data",
":",
"param",
"xmldoc",
":",
"input",
"VISSIM",
"xml",
":",
"type",
"xmldoc",
":",
"xml",
".",
"dom",
".",
"minidom",
".",
"Document",
":",
"return",
":",
"relevant",
"VISSIM",
"vehicle",
"type",
"data",
":",
"rtype",
":",
"dict",
"of",
"dict"
] | def parse_vehicle_types(xmldoc, acc_d, length_d):
"""parses the vehicle types from the VISSIM data
:param xmldoc: input VISSIM xml
:type xmldoc: xml.dom.minidom.Document
:return: relevant VISSIM vehicle type data
:rtype: dict of dict
"""
veh_type_d = dict()
for veh_type in xmldoc.getElementsByTagName('vehicleType'):
type_d = {
'id': veh_type.getAttribute('no'),
'length': length_d[veh_type.getAttribute('model2D3DDistr')],
'acc': acc_d[veh_type.getAttribute('maxAccelFunc')],
}
veh_type_d[veh_type.getAttribute('no')] = type_d
return veh_type_d | [
"def",
"parse_vehicle_types",
"(",
"xmldoc",
",",
"acc_d",
",",
"length_d",
")",
":",
"veh_type_d",
"=",
"dict",
"(",
")",
"for",
"veh_type",
"in",
"xmldoc",
".",
"getElementsByTagName",
"(",
"'vehicleType'",
")",
":",
"type_d",
"=",
"{",
"'id'",
":",
"veh_type",
".",
"getAttribute",
"(",
"'no'",
")",
",",
"'length'",
":",
"length_d",
"[",
"veh_type",
".",
"getAttribute",
"(",
"'model2D3DDistr'",
")",
"]",
",",
"'acc'",
":",
"acc_d",
"[",
"veh_type",
".",
"getAttribute",
"(",
"'maxAccelFunc'",
")",
"]",
",",
"}",
"veh_type_d",
"[",
"veh_type",
".",
"getAttribute",
"(",
"'no'",
")",
"]",
"=",
"type_d",
"return",
"veh_type_d"
] | https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/import/vissim/convert_vissimXML_flows_statRoutes.py#L169-L184 | |
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2class.py | python | xmlTextReader.Name | (self) | return ret | The qualified name of the node, equal to Prefix :LocalName. | The qualified name of the node, equal to Prefix :LocalName. | [
"The",
"qualified",
"name",
"of",
"the",
"node",
"equal",
"to",
"Prefix",
":",
"LocalName",
"."
] | def Name(self):
"""The qualified name of the node, equal to Prefix :LocalName. """
ret = libxml2mod.xmlTextReaderConstName(self._o)
return ret | [
"def",
"Name",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlTextReaderConstName",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L5934-L5937 | |
panda3d/panda3d | 833ad89ebad58395d0af0b7ec08538e5e4308265 | direct/src/gui/DirectEntry.py | python | DirectEntry.get | (self, plain = False) | Returns the text currently showing in the typable region.
If plain is True, the returned text will not include any
formatting characters like nested color-change codes. | Returns the text currently showing in the typable region.
If plain is True, the returned text will not include any
formatting characters like nested color-change codes. | [
"Returns",
"the",
"text",
"currently",
"showing",
"in",
"the",
"typable",
"region",
".",
"If",
"plain",
"is",
"True",
"the",
"returned",
"text",
"will",
"not",
"include",
"any",
"formatting",
"characters",
"like",
"nested",
"color",
"-",
"change",
"codes",
"."
] | def get(self, plain = False):
""" Returns the text currently showing in the typable region.
If plain is True, the returned text will not include any
formatting characters like nested color-change codes. """
wantWide = self.unicodeText or self.guiItem.isWtext()
if not self.directWtext.getValue():
# If the user has configured wide-text off, then always
# return an 8-bit string. This will be encoded if
# necessary, according to Panda's default encoding.
wantWide = False
if plain:
if wantWide:
return self.guiItem.getPlainWtext()
else:
return self.guiItem.getPlainText()
else:
if wantWide:
return self.guiItem.getWtext()
else:
return self.guiItem.getText() | [
"def",
"get",
"(",
"self",
",",
"plain",
"=",
"False",
")",
":",
"wantWide",
"=",
"self",
".",
"unicodeText",
"or",
"self",
".",
"guiItem",
".",
"isWtext",
"(",
")",
"if",
"not",
"self",
".",
"directWtext",
".",
"getValue",
"(",
")",
":",
"# If the user has configured wide-text off, then always",
"# return an 8-bit string. This will be encoded if",
"# necessary, according to Panda's default encoding.",
"wantWide",
"=",
"False",
"if",
"plain",
":",
"if",
"wantWide",
":",
"return",
"self",
".",
"guiItem",
".",
"getPlainWtext",
"(",
")",
"else",
":",
"return",
"self",
".",
"guiItem",
".",
"getPlainText",
"(",
")",
"else",
":",
"if",
"wantWide",
":",
"return",
"self",
".",
"guiItem",
".",
"getWtext",
"(",
")",
"else",
":",
"return",
"self",
".",
"guiItem",
".",
"getText",
"(",
")"
] | https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/gui/DirectEntry.py#L280-L301 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/dataview.py | python | PyDataViewIndexListModel.__init__ | (self, *args, **kwargs) | __init__(self, unsigned int initial_size=0) -> PyDataViewIndexListModel | __init__(self, unsigned int initial_size=0) -> PyDataViewIndexListModel | [
"__init__",
"(",
"self",
"unsigned",
"int",
"initial_size",
"=",
"0",
")",
"-",
">",
"PyDataViewIndexListModel"
] | def __init__(self, *args, **kwargs):
"""__init__(self, unsigned int initial_size=0) -> PyDataViewIndexListModel"""
_dataview.PyDataViewIndexListModel_swiginit(self,_dataview.new_PyDataViewIndexListModel(*args, **kwargs))
PyDataViewIndexListModel._setCallbackInfo(self, self, PyDataViewIndexListModel) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_dataview",
".",
"PyDataViewIndexListModel_swiginit",
"(",
"self",
",",
"_dataview",
".",
"new_PyDataViewIndexListModel",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"PyDataViewIndexListModel",
".",
"_setCallbackInfo",
"(",
"self",
",",
"self",
",",
"PyDataViewIndexListModel",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/dataview.py#L927-L930 | ||
networkit/networkit | 695b7a786a894a303fa8587597d5ef916e797729 | networkit/coloring.py | python | SpectralColoring.run | (self) | run()
Main method of SpectralColoring. This computes a valid coloring. | run() | [
"run",
"()"
] | def run(self):
"""
run()
Main method of SpectralColoring. This computes a valid coloring.
"""
self.prepareSpectrum()
self.colors = {0 : set(self.graph.iterNodes())}
self.nextColor = 1
self.split(0)
self.buildReverseDict() | [
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"prepareSpectrum",
"(",
")",
"self",
".",
"colors",
"=",
"{",
"0",
":",
"set",
"(",
"self",
".",
"graph",
".",
"iterNodes",
"(",
")",
")",
"}",
"self",
".",
"nextColor",
"=",
"1",
"self",
".",
"split",
"(",
"0",
")",
"self",
".",
"buildReverseDict",
"(",
")"
] | https://github.com/networkit/networkit/blob/695b7a786a894a303fa8587597d5ef916e797729/networkit/coloring.py#L101-L113 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_misc.py | python | DROP_ICON | (filename) | Returns either a `wx.Cursor` or `wx.Icon` created from the image file
``filename``. This function is useful with the `wx.DropSource` class
which, depending on platform accepts either an icon or a cursor. | Returns either a `wx.Cursor` or `wx.Icon` created from the image file
``filename``. This function is useful with the `wx.DropSource` class
which, depending on platform accepts either an icon or a cursor. | [
"Returns",
"either",
"a",
"wx",
".",
"Cursor",
"or",
"wx",
".",
"Icon",
"created",
"from",
"the",
"image",
"file",
"filename",
".",
"This",
"function",
"is",
"useful",
"with",
"the",
"wx",
".",
"DropSource",
"class",
"which",
"depending",
"on",
"platform",
"accepts",
"either",
"an",
"icon",
"or",
"a",
"cursor",
"."
] | def DROP_ICON(filename):
"""
Returns either a `wx.Cursor` or `wx.Icon` created from the image file
``filename``. This function is useful with the `wx.DropSource` class
which, depending on platform accepts either an icon or a cursor.
"""
img = wx.Image(filename)
if wx.Platform == '__WXGTK__':
return wx.IconFromBitmap(wx.BitmapFromImage(img))
else:
return wx.CursorFromImage(img) | [
"def",
"DROP_ICON",
"(",
"filename",
")",
":",
"img",
"=",
"wx",
".",
"Image",
"(",
"filename",
")",
"if",
"wx",
".",
"Platform",
"==",
"'__WXGTK__'",
":",
"return",
"wx",
".",
"IconFromBitmap",
"(",
"wx",
".",
"BitmapFromImage",
"(",
"img",
")",
")",
"else",
":",
"return",
"wx",
".",
"CursorFromImage",
"(",
"img",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_misc.py#L5532-L5542 | ||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/basic_fitting_presenter.py | python | BasicFittingPresenter._get_single_fit_functions_from_view | (self) | return [None] * self.view.number_of_datasets() | Returns the fit functions corresponding to each domain as a list. | Returns the fit functions corresponding to each domain as a list. | [
"Returns",
"the",
"fit",
"functions",
"corresponding",
"to",
"each",
"domain",
"as",
"a",
"list",
"."
] | def _get_single_fit_functions_from_view(self) -> list:
"""Returns the fit functions corresponding to each domain as a list."""
if self.view.fit_object:
if isinstance(self.view.fit_object, MultiDomainFunction):
return [function.clone() for function in self.view.fit_object.createEquivalentFunctions()]
return [self.view.fit_object]
return [None] * self.view.number_of_datasets() | [
"def",
"_get_single_fit_functions_from_view",
"(",
"self",
")",
"->",
"list",
":",
"if",
"self",
".",
"view",
".",
"fit_object",
":",
"if",
"isinstance",
"(",
"self",
".",
"view",
".",
"fit_object",
",",
"MultiDomainFunction",
")",
":",
"return",
"[",
"function",
".",
"clone",
"(",
")",
"for",
"function",
"in",
"self",
".",
"view",
".",
"fit_object",
".",
"createEquivalentFunctions",
"(",
")",
"]",
"return",
"[",
"self",
".",
"view",
".",
"fit_object",
"]",
"return",
"[",
"None",
"]",
"*",
"self",
".",
"view",
".",
"number_of_datasets",
"(",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/basic_fitting_presenter.py#L472-L478 | |
bcrusco/Forward-Plus-Renderer | 1f130f1ae58882f651d94695823044f9833cfa30 | Forward-Plus/Forward-Plus/external/assimp-3.1.1/port/PyAssimp/pyassimp/helper.py | python | transform | (vector3, matrix4x4) | return numpy.dot(matrix4x4, numpy.append(vector3, 1.)) | Apply a transformation matrix on a 3D vector.
:param vector3: a numpy array with 3 elements
:param matrix4x4: a numpy 4x4 matrix | Apply a transformation matrix on a 3D vector. | [
"Apply",
"a",
"transformation",
"matrix",
"on",
"a",
"3D",
"vector",
"."
] | def transform(vector3, matrix4x4):
""" Apply a transformation matrix on a 3D vector.
:param vector3: a numpy array with 3 elements
:param matrix4x4: a numpy 4x4 matrix
"""
return numpy.dot(matrix4x4, numpy.append(vector3, 1.)) | [
"def",
"transform",
"(",
"vector3",
",",
"matrix4x4",
")",
":",
"return",
"numpy",
".",
"dot",
"(",
"matrix4x4",
",",
"numpy",
".",
"append",
"(",
"vector3",
",",
"1.",
")",
")"
] | https://github.com/bcrusco/Forward-Plus-Renderer/blob/1f130f1ae58882f651d94695823044f9833cfa30/Forward-Plus/Forward-Plus/external/assimp-3.1.1/port/PyAssimp/pyassimp/helper.py#L45-L51 | |
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/third_party/lib_x86_64/python2.7/dist-packages/geodesy/utm.py | python | UTMPoint.valid | (self) | return (self.easting == self.easting
and self.northing == self.northing
and self.band != ' ') | :returns: True if this is a valid UTM point. | :returns: True if this is a valid UTM point. | [
":",
"returns",
":",
"True",
"if",
"this",
"is",
"a",
"valid",
"UTM",
"point",
"."
] | def valid(self):
""":returns: True if this is a valid UTM point. """
return (self.easting == self.easting
and self.northing == self.northing
and self.band != ' ') | [
"def",
"valid",
"(",
"self",
")",
":",
"return",
"(",
"self",
".",
"easting",
"==",
"self",
".",
"easting",
"and",
"self",
".",
"northing",
"==",
"self",
".",
"northing",
"and",
"self",
".",
"band",
"!=",
"' '",
")"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/geodesy/utm.py#L125-L129 | |
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/grit/grit/format/policy_templates/writers/admx_writer.py | python | ADMXWriter._AddMainPolicy | (self, parent) | Generates ADMX elements for a Main-Policy amd adds them to the
passed parent element. | Generates ADMX elements for a Main-Policy amd adds them to the
passed parent element. | [
"Generates",
"ADMX",
"elements",
"for",
"a",
"Main",
"-",
"Policy",
"amd",
"adds",
"them",
"to",
"the",
"passed",
"parent",
"element",
"."
] | def _AddMainPolicy(self, parent):
'''Generates ADMX elements for a Main-Policy amd adds them to the
passed parent element.
'''
enabled_value_elem = self.AddElement(parent, 'enabledValue');
self.AddElement(enabled_value_elem, 'decimal', {'value': '1'})
disabled_value_elem = self.AddElement(parent, 'disabledValue');
self.AddElement(disabled_value_elem, 'decimal', {'value': '0'}) | [
"def",
"_AddMainPolicy",
"(",
"self",
",",
"parent",
")",
":",
"enabled_value_elem",
"=",
"self",
".",
"AddElement",
"(",
"parent",
",",
"'enabledValue'",
")",
"self",
".",
"AddElement",
"(",
"enabled_value_elem",
",",
"'decimal'",
",",
"{",
"'value'",
":",
"'1'",
"}",
")",
"disabled_value_elem",
"=",
"self",
".",
"AddElement",
"(",
"parent",
",",
"'disabledValue'",
")",
"self",
".",
"AddElement",
"(",
"disabled_value_elem",
",",
"'decimal'",
",",
"{",
"'value'",
":",
"'0'",
"}",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/grit/grit/format/policy_templates/writers/admx_writer.py#L220-L227 | ||
lammps/lammps | b75c3065430a75b1b5543a10e10f46d9b4c91913 | tools/polybond/lmpsdata.py | python | booleanarray.setelement | (self,rownum, colnum, value) | Assigns value to the list of lists (array) element at rownum and colnum. | Assigns value to the list of lists (array) element at rownum and colnum. | [
"Assigns",
"value",
"to",
"the",
"list",
"of",
"lists",
"(",
"array",
")",
"element",
"at",
"rownum",
"and",
"colnum",
"."
] | def setelement(self,rownum, colnum, value):
"""Assigns value to the list of lists (array) element at rownum and colnum."""
self.array[rownum][colnum]=value | [
"def",
"setelement",
"(",
"self",
",",
"rownum",
",",
"colnum",
",",
"value",
")",
":",
"self",
".",
"array",
"[",
"rownum",
"]",
"[",
"colnum",
"]",
"=",
"value"
] | https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/tools/polybond/lmpsdata.py#L898-L900 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/logging/handlers.py | python | BaseRotatingHandler.emit | (self, record) | Emit a record.
Output the record to the file, catering for rollover as described
in doRollover(). | Emit a record. | [
"Emit",
"a",
"record",
"."
] | def emit(self, record):
"""
Emit a record.
Output the record to the file, catering for rollover as described
in doRollover().
"""
try:
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
except Exception:
self.handleError(record) | [
"def",
"emit",
"(",
"self",
",",
"record",
")",
":",
"try",
":",
"if",
"self",
".",
"shouldRollover",
"(",
"record",
")",
":",
"self",
".",
"doRollover",
"(",
")",
"logging",
".",
"FileHandler",
".",
"emit",
"(",
"self",
",",
"record",
")",
"except",
"Exception",
":",
"self",
".",
"handleError",
"(",
"record",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/logging/handlers.py#L65-L77 | ||
ceph/ceph | 959663007321a369c83218414a29bd9dbc8bda3a | src/pybind/mgr/mgr_module.py | python | MgrModule.mon_command | (self, cmd_dict: dict, inbuf: Optional[str] = None) | return r | Helper for modules that do simple, synchronous mon command
execution.
See send_command for general case.
:return: status int, out std, err str | Helper for modules that do simple, synchronous mon command
execution. | [
"Helper",
"for",
"modules",
"that",
"do",
"simple",
"synchronous",
"mon",
"command",
"execution",
"."
] | def mon_command(self, cmd_dict: dict, inbuf: Optional[str] = None) -> Tuple[int, str, str]:
"""
Helper for modules that do simple, synchronous mon command
execution.
See send_command for general case.
:return: status int, out std, err str
"""
t1 = time.time()
result = CommandResult()
self.send_command(result, "mon", "", json.dumps(cmd_dict), "", inbuf)
r = result.wait()
t2 = time.time()
self.log.debug("mon_command: '{0}' -> {1} in {2:.3f}s".format(
cmd_dict['prefix'], r[0], t2 - t1
))
return r | [
"def",
"mon_command",
"(",
"self",
",",
"cmd_dict",
":",
"dict",
",",
"inbuf",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"Tuple",
"[",
"int",
",",
"str",
",",
"str",
"]",
":",
"t1",
"=",
"time",
".",
"time",
"(",
")",
"result",
"=",
"CommandResult",
"(",
")",
"self",
".",
"send_command",
"(",
"result",
",",
"\"mon\"",
",",
"\"\"",
",",
"json",
".",
"dumps",
"(",
"cmd_dict",
")",
",",
"\"\"",
",",
"inbuf",
")",
"r",
"=",
"result",
".",
"wait",
"(",
")",
"t2",
"=",
"time",
".",
"time",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"mon_command: '{0}' -> {1} in {2:.3f}s\"",
".",
"format",
"(",
"cmd_dict",
"[",
"'prefix'",
"]",
",",
"r",
"[",
"0",
"]",
",",
"t2",
"-",
"t1",
")",
")",
"return",
"r"
] | https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/mgr_module.py#L1565-L1585 | |
weolar/miniblink49 | 1c4678db0594a4abde23d3ebbcc7cd13c3170777 | third_party/skia/tools/copyright/fileparser.py | python | CParser.CreateCopyrightBlock | (self, year, holder) | return self.COPYRIGHT_BLOCK_FORMAT % (year, holder) | Returns a copyright block suitable for this language, with the
given attributes.
@param year year in which to hold copyright (defaults to DEFAULT_YEAR)
@param holder holder of copyright (defaults to DEFAULT_HOLDER) | Returns a copyright block suitable for this language, with the
given attributes. | [
"Returns",
"a",
"copyright",
"block",
"suitable",
"for",
"this",
"language",
"with",
"the",
"given",
"attributes",
"."
] | def CreateCopyrightBlock(self, year, holder):
"""Returns a copyright block suitable for this language, with the
given attributes.
@param year year in which to hold copyright (defaults to DEFAULT_YEAR)
@param holder holder of copyright (defaults to DEFAULT_HOLDER)
"""
if not year:
year = self.DEFAULT_YEAR
if not holder:
holder = self.DEFAULT_HOLDER
return self.COPYRIGHT_BLOCK_FORMAT % (year, holder) | [
"def",
"CreateCopyrightBlock",
"(",
"self",
",",
"year",
",",
"holder",
")",
":",
"if",
"not",
"year",
":",
"year",
"=",
"self",
".",
"DEFAULT_YEAR",
"if",
"not",
"holder",
":",
"holder",
"=",
"self",
".",
"DEFAULT_HOLDER",
"return",
"self",
".",
"COPYRIGHT_BLOCK_FORMAT",
"%",
"(",
"year",
",",
"holder",
")"
] | https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/skia/tools/copyright/fileparser.py#L81-L92 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py2/numpy/lib/format.py | python | dtype_to_descr | (dtype) | Get a serializable descriptor from the dtype.
The .descr attribute of a dtype object cannot be round-tripped through
the dtype() constructor. Simple types, like dtype('float32'), have
a descr which looks like a record array with one field with '' as
a name. The dtype() constructor interprets this as a request to give
a default name. Instead, we construct descriptor that can be passed to
dtype().
Parameters
----------
dtype : dtype
The dtype of the array that will be written to disk.
Returns
-------
descr : object
An object that can be passed to `numpy.dtype()` in order to
replicate the input dtype. | Get a serializable descriptor from the dtype. | [
"Get",
"a",
"serializable",
"descriptor",
"from",
"the",
"dtype",
"."
] | def dtype_to_descr(dtype):
"""
Get a serializable descriptor from the dtype.
The .descr attribute of a dtype object cannot be round-tripped through
the dtype() constructor. Simple types, like dtype('float32'), have
a descr which looks like a record array with one field with '' as
a name. The dtype() constructor interprets this as a request to give
a default name. Instead, we construct descriptor that can be passed to
dtype().
Parameters
----------
dtype : dtype
The dtype of the array that will be written to disk.
Returns
-------
descr : object
An object that can be passed to `numpy.dtype()` in order to
replicate the input dtype.
"""
if dtype.names is not None:
# This is a record array. The .descr is fine. XXX: parts of the
# record array with an empty name, like padding bytes, still get
# fiddled with. This needs to be fixed in the C implementation of
# dtype().
return dtype.descr
else:
return dtype.str | [
"def",
"dtype_to_descr",
"(",
"dtype",
")",
":",
"if",
"dtype",
".",
"names",
"is",
"not",
"None",
":",
"# This is a record array. The .descr is fine. XXX: parts of the",
"# record array with an empty name, like padding bytes, still get",
"# fiddled with. This needs to be fixed in the C implementation of",
"# dtype().",
"return",
"dtype",
".",
"descr",
"else",
":",
"return",
"dtype",
".",
"str"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/lib/format.py#L230-L260 | ||
stitchEm/stitchEm | 0f399501d41ab77933677f2907f41f80ceb704d7 | lib/bindings/samples/server/debug/debug_api.py | python | DebugAPI.get_status | (self, parameters=None) | return status | Get the debugging result.
result={
"inputs": {"latency": "RTMP latency"},
"streaming": {"latency": "RTMP latency"},
"preview": {"latency": "RTMP latency"},
"profiling": {
"fps": "frame rate",
"cpu": "CPU usage",
"gpu": "GPU usage",
"enc": "NVENC usage"
}
} | Get the debugging result. | [
"Get",
"the",
"debugging",
"result",
"."
] | def get_status(self, parameters=None):
"""Get the debugging result.
result={
"inputs": {"latency": "RTMP latency"},
"streaming": {"latency": "RTMP latency"},
"preview": {"latency": "RTMP latency"},
"profiling": {
"fps": "frame rate",
"cpu": "CPU usage",
"gpu": "GPU usage",
"enc": "NVENC usage"
}
}
"""
input_status = { "latency" : self.project_manager.get_latency() }
streaming_status = { "latency" : self.output_manager.get_output("stream").get_latency() }
preview_status = { "latency" : self.output_manager.get_output("preview").get_latency() }
profiling_status = self.output_manager.get_output("profiling").get_statistics()
status = {"inputs": input_status,
"streaming": streaming_status,
"preview": preview_status,
"profiling": profiling_status}
return status | [
"def",
"get_status",
"(",
"self",
",",
"parameters",
"=",
"None",
")",
":",
"input_status",
"=",
"{",
"\"latency\"",
":",
"self",
".",
"project_manager",
".",
"get_latency",
"(",
")",
"}",
"streaming_status",
"=",
"{",
"\"latency\"",
":",
"self",
".",
"output_manager",
".",
"get_output",
"(",
"\"stream\"",
")",
".",
"get_latency",
"(",
")",
"}",
"preview_status",
"=",
"{",
"\"latency\"",
":",
"self",
".",
"output_manager",
".",
"get_output",
"(",
"\"preview\"",
")",
".",
"get_latency",
"(",
")",
"}",
"profiling_status",
"=",
"self",
".",
"output_manager",
".",
"get_output",
"(",
"\"profiling\"",
")",
".",
"get_statistics",
"(",
")",
"status",
"=",
"{",
"\"inputs\"",
":",
"input_status",
",",
"\"streaming\"",
":",
"streaming_status",
",",
"\"preview\"",
":",
"preview_status",
",",
"\"profiling\"",
":",
"profiling_status",
"}",
"return",
"status"
] | https://github.com/stitchEm/stitchEm/blob/0f399501d41ab77933677f2907f41f80ceb704d7/lib/bindings/samples/server/debug/debug_api.py#L203-L227 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/html5lib/html5parser.py | python | parse | (doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs) | return p.parse(doc, **kwargs) | Parse an HTML document as a string or file-like object into a tree
:arg doc: the document to parse as a string or file-like object
:arg treebuilder: the treebuilder to use when parsing
:arg namespaceHTMLElements: whether or not to namespace HTML elements
:returns: parsed tree
Example:
>>> from html5lib.html5parser import parse
>>> parse('<html><body><p>This is a doc</p></body></html>')
<Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0> | Parse an HTML document as a string or file-like object into a tree | [
"Parse",
"an",
"HTML",
"document",
"as",
"a",
"string",
"or",
"file",
"-",
"like",
"object",
"into",
"a",
"tree"
] | def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
"""Parse an HTML document as a string or file-like object into a tree
:arg doc: the document to parse as a string or file-like object
:arg treebuilder: the treebuilder to use when parsing
:arg namespaceHTMLElements: whether or not to namespace HTML elements
:returns: parsed tree
Example:
>>> from html5lib.html5parser import parse
>>> parse('<html><body><p>This is a doc</p></body></html>')
<Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0>
"""
tb = treebuilders.getTreeBuilder(treebuilder)
p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
return p.parse(doc, **kwargs) | [
"def",
"parse",
"(",
"doc",
",",
"treebuilder",
"=",
"\"etree\"",
",",
"namespaceHTMLElements",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"tb",
"=",
"treebuilders",
".",
"getTreeBuilder",
"(",
"treebuilder",
")",
"p",
"=",
"HTMLParser",
"(",
"tb",
",",
"namespaceHTMLElements",
"=",
"namespaceHTMLElements",
")",
"return",
"p",
".",
"parse",
"(",
"doc",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/html5lib/html5parser.py#L26-L46 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/AWS/lambda-code/ServiceLambda/resource_types/cognito/user_pool.py | python | get_provider_name | (user_pool_id) | return beginning + region + middle + user_pool_id | Gets the provider name for a user pool. This is needed for creating and updating identity pools.
:param str user_pool_id: The User pool ID, are of the form: us-east-1_123456789
:return: the Provider name. Provider names are of the form: cognito-idp.us-east-1.amazonaws.com/us-east-1_123456789 | Gets the provider name for a user pool. This is needed for creating and updating identity pools.
:param str user_pool_id: The User pool ID, are of the form: us-east-1_123456789
:return: the Provider name. Provider names are of the form: cognito-idp.us-east-1.amazonaws.com/us-east-1_123456789 | [
"Gets",
"the",
"provider",
"name",
"for",
"a",
"user",
"pool",
".",
"This",
"is",
"needed",
"for",
"creating",
"and",
"updating",
"identity",
"pools",
".",
":",
"param",
"str",
"user_pool_id",
":",
"The",
"User",
"pool",
"ID",
"are",
"of",
"the",
"form",
":",
"us",
"-",
"east",
"-",
"1_123456789",
":",
"return",
":",
"the",
"Provider",
"name",
".",
"Provider",
"names",
"are",
"of",
"the",
"form",
":",
"cognito",
"-",
"idp",
".",
"us",
"-",
"east",
"-",
"1",
".",
"amazonaws",
".",
"com",
"/",
"us",
"-",
"east",
"-",
"1_123456789"
] | def get_provider_name(user_pool_id):
"""
Gets the provider name for a user pool. This is needed for creating and updating identity pools.
:param str user_pool_id: The User pool ID, are of the form: us-east-1_123456789
:return: the Provider name. Provider names are of the form: cognito-idp.us-east-1.amazonaws.com/us-east-1_123456789
"""
beginning = "cognito-idp."
middle = ".amazonaws.com/"
region_size = user_pool_id.find("_") # Get the region from the first part of the Pool ID
region = ""
if region_size >= 0:
region = user_pool_id[0: region_size]
return beginning + region + middle + user_pool_id | [
"def",
"get_provider_name",
"(",
"user_pool_id",
")",
":",
"beginning",
"=",
"\"cognito-idp.\"",
"middle",
"=",
"\".amazonaws.com/\"",
"region_size",
"=",
"user_pool_id",
".",
"find",
"(",
"\"_\"",
")",
"# Get the region from the first part of the Pool ID",
"region",
"=",
"\"\"",
"if",
"region_size",
">=",
"0",
":",
"region",
"=",
"user_pool_id",
"[",
"0",
":",
"region_size",
"]",
"return",
"beginning",
"+",
"region",
"+",
"middle",
"+",
"user_pool_id"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/lambda-code/ServiceLambda/resource_types/cognito/user_pool.py#L53-L66 | |
hpi-xnor/BMXNet | ed0b201da6667887222b8e4b5f997c4f6b61943d | tools/caffe_converter/convert_symbol.py | python | _convert_conv_param | (param) | return param_string | Convert convolution layer parameter from Caffe to MXNet | Convert convolution layer parameter from Caffe to MXNet | [
"Convert",
"convolution",
"layer",
"parameter",
"from",
"Caffe",
"to",
"MXNet"
] | def _convert_conv_param(param):
"""
Convert convolution layer parameter from Caffe to MXNet
"""
param_string = "num_filter=%d" % param.num_output
pad_w = 0
pad_h = 0
if isinstance(param.pad, int):
pad = param.pad
param_string += ", pad=(%d, %d)" % (pad, pad)
else:
if len(param.pad) > 0:
pad = param.pad[0]
param_string += ", pad=(%d, %d)" % (pad, pad)
else:
if isinstance(param.pad_w, int):
pad_w = param.pad_w
if isinstance(param.pad_h, int):
pad_h = param.pad_h
param_string += ", pad=(%d, %d)" % (pad_h, pad_w)
if isinstance(param.kernel_size, int):
kernel_size = param.kernel_size
param_string += ", kernel=(%d,%d)" % (kernel_size, kernel_size)
else:
if len(param.kernel_size) > 0:
kernel_size = param.kernel_size[0]
param_string += ", kernel=(%d,%d)" % (kernel_size, kernel_size)
else:
assert isinstance(param.kernel_w, int)
kernel_w = param.kernel_w
assert isinstance(param.kernel_h, int)
kernel_h = param.kernel_h
param_string += ", kernel=(%d,%d)" % (kernel_h, kernel_w)
stride = 1
if isinstance(param.stride, int):
stride = param.stride
else:
stride = 1 if len(param.stride) == 0 else param.stride[0]
param_string += ", stride=(%d,%d)" % (stride, stride)
dilate = 1
if hasattr(param, 'dilation'):
if isinstance(param.dilation, int):
dilate = param.dilation
else:
dilate = 1 if len(param.dilation) == 0 else param.dilation[0]
param_string += ", no_bias=%s" % (not param.bias_term)
# deal with dilation. Won't be in deconvolution
if dilate > 1:
param_string += ", dilate=(%d, %d)" % (dilate, dilate)
if isinstance(param.group, int):
if param.group != 1:
param_string += ", num_group=%d" % param.group
return param_string | [
"def",
"_convert_conv_param",
"(",
"param",
")",
":",
"param_string",
"=",
"\"num_filter=%d\"",
"%",
"param",
".",
"num_output",
"pad_w",
"=",
"0",
"pad_h",
"=",
"0",
"if",
"isinstance",
"(",
"param",
".",
"pad",
",",
"int",
")",
":",
"pad",
"=",
"param",
".",
"pad",
"param_string",
"+=",
"\", pad=(%d, %d)\"",
"%",
"(",
"pad",
",",
"pad",
")",
"else",
":",
"if",
"len",
"(",
"param",
".",
"pad",
")",
">",
"0",
":",
"pad",
"=",
"param",
".",
"pad",
"[",
"0",
"]",
"param_string",
"+=",
"\", pad=(%d, %d)\"",
"%",
"(",
"pad",
",",
"pad",
")",
"else",
":",
"if",
"isinstance",
"(",
"param",
".",
"pad_w",
",",
"int",
")",
":",
"pad_w",
"=",
"param",
".",
"pad_w",
"if",
"isinstance",
"(",
"param",
".",
"pad_h",
",",
"int",
")",
":",
"pad_h",
"=",
"param",
".",
"pad_h",
"param_string",
"+=",
"\", pad=(%d, %d)\"",
"%",
"(",
"pad_h",
",",
"pad_w",
")",
"if",
"isinstance",
"(",
"param",
".",
"kernel_size",
",",
"int",
")",
":",
"kernel_size",
"=",
"param",
".",
"kernel_size",
"param_string",
"+=",
"\", kernel=(%d,%d)\"",
"%",
"(",
"kernel_size",
",",
"kernel_size",
")",
"else",
":",
"if",
"len",
"(",
"param",
".",
"kernel_size",
")",
">",
"0",
":",
"kernel_size",
"=",
"param",
".",
"kernel_size",
"[",
"0",
"]",
"param_string",
"+=",
"\", kernel=(%d,%d)\"",
"%",
"(",
"kernel_size",
",",
"kernel_size",
")",
"else",
":",
"assert",
"isinstance",
"(",
"param",
".",
"kernel_w",
",",
"int",
")",
"kernel_w",
"=",
"param",
".",
"kernel_w",
"assert",
"isinstance",
"(",
"param",
".",
"kernel_h",
",",
"int",
")",
"kernel_h",
"=",
"param",
".",
"kernel_h",
"param_string",
"+=",
"\", kernel=(%d,%d)\"",
"%",
"(",
"kernel_h",
",",
"kernel_w",
")",
"stride",
"=",
"1",
"if",
"isinstance",
"(",
"param",
".",
"stride",
",",
"int",
")",
":",
"stride",
"=",
"param",
".",
"stride",
"else",
":",
"stride",
"=",
"1",
"if",
"len",
"(",
"param",
".",
"stride",
")",
"==",
"0",
"else",
"param",
".",
"stride",
"[",
"0",
"]",
"param_string",
"+=",
"\", stride=(%d,%d)\"",
"%",
"(",
"stride",
",",
"stride",
")",
"dilate",
"=",
"1",
"if",
"hasattr",
"(",
"param",
",",
"'dilation'",
")",
":",
"if",
"isinstance",
"(",
"param",
".",
"dilation",
",",
"int",
")",
":",
"dilate",
"=",
"param",
".",
"dilation",
"else",
":",
"dilate",
"=",
"1",
"if",
"len",
"(",
"param",
".",
"dilation",
")",
"==",
"0",
"else",
"param",
".",
"dilation",
"[",
"0",
"]",
"param_string",
"+=",
"\", no_bias=%s\"",
"%",
"(",
"not",
"param",
".",
"bias_term",
")",
"# deal with dilation. Won't be in deconvolution",
"if",
"dilate",
">",
"1",
":",
"param_string",
"+=",
"\", dilate=(%d, %d)\"",
"%",
"(",
"dilate",
",",
"dilate",
")",
"if",
"isinstance",
"(",
"param",
".",
"group",
",",
"int",
")",
":",
"if",
"param",
".",
"group",
"!=",
"1",
":",
"param_string",
"+=",
"\", num_group=%d\"",
"%",
"param",
".",
"group",
"return",
"param_string"
] | https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/tools/caffe_converter/convert_symbol.py#L44-L105 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/prompt-toolkit/py3/prompt_toolkit/layout/controls.py | python | BufferControl._create_get_processed_line_func | (
self, document: Document, width: int, height: int
) | return create_func() | Create a function that takes a line number of the current document and
returns a _ProcessedLine(processed_fragments, source_to_display, display_to_source)
tuple. | Create a function that takes a line number of the current document and
returns a _ProcessedLine(processed_fragments, source_to_display, display_to_source)
tuple. | [
"Create",
"a",
"function",
"that",
"takes",
"a",
"line",
"number",
"of",
"the",
"current",
"document",
"and",
"returns",
"a",
"_ProcessedLine",
"(",
"processed_fragments",
"source_to_display",
"display_to_source",
")",
"tuple",
"."
] | def _create_get_processed_line_func(
self, document: Document, width: int, height: int
) -> Callable[[int], _ProcessedLine]:
"""
Create a function that takes a line number of the current document and
returns a _ProcessedLine(processed_fragments, source_to_display, display_to_source)
tuple.
"""
# Merge all input processors together.
input_processors = self.input_processors or []
if self.include_default_input_processors:
input_processors = self.default_input_processors + input_processors
merged_processor = merge_processors(input_processors)
def transform(lineno: int, fragments: StyleAndTextTuples) -> _ProcessedLine:
"Transform the fragments for a given line number."
# Get cursor position at this line.
def source_to_display(i: int) -> int:
"""X position from the buffer to the x position in the
processed fragment list. By default, we start from the 'identity'
operation."""
return i
transformation = merged_processor.apply_transformation(
TransformationInput(
self, document, lineno, source_to_display, fragments, width, height
)
)
return _ProcessedLine(
transformation.fragments,
transformation.source_to_display,
transformation.display_to_source,
)
def create_func() -> Callable[[int], _ProcessedLine]:
get_line = self._get_formatted_text_for_line_func(document)
cache: Dict[int, _ProcessedLine] = {}
def get_processed_line(i: int) -> _ProcessedLine:
try:
return cache[i]
except KeyError:
processed_line = transform(i, get_line(i))
cache[i] = processed_line
return processed_line
return get_processed_line
return create_func() | [
"def",
"_create_get_processed_line_func",
"(",
"self",
",",
"document",
":",
"Document",
",",
"width",
":",
"int",
",",
"height",
":",
"int",
")",
"->",
"Callable",
"[",
"[",
"int",
"]",
",",
"_ProcessedLine",
"]",
":",
"# Merge all input processors together.",
"input_processors",
"=",
"self",
".",
"input_processors",
"or",
"[",
"]",
"if",
"self",
".",
"include_default_input_processors",
":",
"input_processors",
"=",
"self",
".",
"default_input_processors",
"+",
"input_processors",
"merged_processor",
"=",
"merge_processors",
"(",
"input_processors",
")",
"def",
"transform",
"(",
"lineno",
":",
"int",
",",
"fragments",
":",
"StyleAndTextTuples",
")",
"->",
"_ProcessedLine",
":",
"\"Transform the fragments for a given line number.\"",
"# Get cursor position at this line.",
"def",
"source_to_display",
"(",
"i",
":",
"int",
")",
"->",
"int",
":",
"\"\"\"X position from the buffer to the x position in the\n processed fragment list. By default, we start from the 'identity'\n operation.\"\"\"",
"return",
"i",
"transformation",
"=",
"merged_processor",
".",
"apply_transformation",
"(",
"TransformationInput",
"(",
"self",
",",
"document",
",",
"lineno",
",",
"source_to_display",
",",
"fragments",
",",
"width",
",",
"height",
")",
")",
"return",
"_ProcessedLine",
"(",
"transformation",
".",
"fragments",
",",
"transformation",
".",
"source_to_display",
",",
"transformation",
".",
"display_to_source",
",",
")",
"def",
"create_func",
"(",
")",
"->",
"Callable",
"[",
"[",
"int",
"]",
",",
"_ProcessedLine",
"]",
":",
"get_line",
"=",
"self",
".",
"_get_formatted_text_for_line_func",
"(",
"document",
")",
"cache",
":",
"Dict",
"[",
"int",
",",
"_ProcessedLine",
"]",
"=",
"{",
"}",
"def",
"get_processed_line",
"(",
"i",
":",
"int",
")",
"->",
"_ProcessedLine",
":",
"try",
":",
"return",
"cache",
"[",
"i",
"]",
"except",
"KeyError",
":",
"processed_line",
"=",
"transform",
"(",
"i",
",",
"get_line",
"(",
"i",
")",
")",
"cache",
"[",
"i",
"]",
"=",
"processed_line",
"return",
"processed_line",
"return",
"get_processed_line",
"return",
"create_func",
"(",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py3/prompt_toolkit/layout/controls.py#L671-L721 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/botocore/docs/waiter.py | python | document_wait_method | (section, waiter_name, event_emitter,
service_model, service_waiter_model,
include_signature=True) | Documents a the wait method of a waiter
:param section: The section to write to
:param waiter_name: The name of the waiter
:param event_emitter: The event emitter to use to emit events
:param service_model: The service model
:param service_waiter_model: The waiter model associated to the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings. | Documents a the wait method of a waiter | [
"Documents",
"a",
"the",
"wait",
"method",
"of",
"a",
"waiter"
] | def document_wait_method(section, waiter_name, event_emitter,
service_model, service_waiter_model,
include_signature=True):
"""Documents a the wait method of a waiter
:param section: The section to write to
:param waiter_name: The name of the waiter
:param event_emitter: The event emitter to use to emit events
:param service_model: The service model
:param service_waiter_model: The waiter model associated to the service
:param include_signature: Whether or not to include the signature.
It is useful for generating docstrings.
"""
waiter_model = service_waiter_model.get_waiter(waiter_name)
operation_model = service_model.operation_model(
waiter_model.operation)
waiter_config_members = OrderedDict()
waiter_config_members['Delay'] = DocumentedShape(
name='Delay', type_name='integer',
documentation=(
'<p>The amount of time in seconds to wait between '
'attempts. Default: {0}</p>'.format(waiter_model.delay)))
waiter_config_members['MaxAttempts'] = DocumentedShape(
name='MaxAttempts', type_name='integer',
documentation=(
'<p>The maximum number of attempts to be made. '
'Default: {0}</p>'.format(waiter_model.max_attempts)))
botocore_waiter_params = [
DocumentedShape(
name='WaiterConfig', type_name='structure',
documentation=(
'<p>A dictionary that provides parameters to control '
'waiting behavior.</p>'),
members=waiter_config_members)
]
wait_description = (
'Polls :py:meth:`{0}.Client.{1}` every {2} '
'seconds until a successful state is reached. An error is '
'returned after {3} failed checks.'.format(
get_service_module_name(service_model),
xform_name(waiter_model.operation),
waiter_model.delay, waiter_model.max_attempts)
)
document_model_driven_method(
section, 'wait', operation_model,
event_emitter=event_emitter,
method_description=wait_description,
example_prefix='waiter.wait',
include_input=botocore_waiter_params,
document_output=False,
include_signature=include_signature
) | [
"def",
"document_wait_method",
"(",
"section",
",",
"waiter_name",
",",
"event_emitter",
",",
"service_model",
",",
"service_waiter_model",
",",
"include_signature",
"=",
"True",
")",
":",
"waiter_model",
"=",
"service_waiter_model",
".",
"get_waiter",
"(",
"waiter_name",
")",
"operation_model",
"=",
"service_model",
".",
"operation_model",
"(",
"waiter_model",
".",
"operation",
")",
"waiter_config_members",
"=",
"OrderedDict",
"(",
")",
"waiter_config_members",
"[",
"'Delay'",
"]",
"=",
"DocumentedShape",
"(",
"name",
"=",
"'Delay'",
",",
"type_name",
"=",
"'integer'",
",",
"documentation",
"=",
"(",
"'<p>The amount of time in seconds to wait between '",
"'attempts. Default: {0}</p>'",
".",
"format",
"(",
"waiter_model",
".",
"delay",
")",
")",
")",
"waiter_config_members",
"[",
"'MaxAttempts'",
"]",
"=",
"DocumentedShape",
"(",
"name",
"=",
"'MaxAttempts'",
",",
"type_name",
"=",
"'integer'",
",",
"documentation",
"=",
"(",
"'<p>The maximum number of attempts to be made. '",
"'Default: {0}</p>'",
".",
"format",
"(",
"waiter_model",
".",
"max_attempts",
")",
")",
")",
"botocore_waiter_params",
"=",
"[",
"DocumentedShape",
"(",
"name",
"=",
"'WaiterConfig'",
",",
"type_name",
"=",
"'structure'",
",",
"documentation",
"=",
"(",
"'<p>A dictionary that provides parameters to control '",
"'waiting behavior.</p>'",
")",
",",
"members",
"=",
"waiter_config_members",
")",
"]",
"wait_description",
"=",
"(",
"'Polls :py:meth:`{0}.Client.{1}` every {2} '",
"'seconds until a successful state is reached. An error is '",
"'returned after {3} failed checks.'",
".",
"format",
"(",
"get_service_module_name",
"(",
"service_model",
")",
",",
"xform_name",
"(",
"waiter_model",
".",
"operation",
")",
",",
"waiter_model",
".",
"delay",
",",
"waiter_model",
".",
"max_attempts",
")",
")",
"document_model_driven_method",
"(",
"section",
",",
"'wait'",
",",
"operation_model",
",",
"event_emitter",
"=",
"event_emitter",
",",
"method_description",
"=",
"wait_description",
",",
"example_prefix",
"=",
"'waiter.wait'",
",",
"include_input",
"=",
"botocore_waiter_params",
",",
"document_output",
"=",
"False",
",",
"include_signature",
"=",
"include_signature",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/botocore/docs/waiter.py#L65-L127 | ||
apache/qpid-proton | 6bcdfebb55ea3554bc29b1901422532db331a591 | python/proton/_endpoints.py | python | Terminus.dynamic | (self) | return pn_terminus_is_dynamic(self._impl) | Indicates whether the source or target node was dynamically
created | Indicates whether the source or target node was dynamically
created | [
"Indicates",
"whether",
"the",
"source",
"or",
"target",
"node",
"was",
"dynamically",
"created"
] | def dynamic(self) -> bool:
"""Indicates whether the source or target node was dynamically
created"""
return pn_terminus_is_dynamic(self._impl) | [
"def",
"dynamic",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"pn_terminus_is_dynamic",
"(",
"self",
".",
"_impl",
")"
] | https://github.com/apache/qpid-proton/blob/6bcdfebb55ea3554bc29b1901422532db331a591/python/proton/_endpoints.py#L1375-L1378 | |
QMCPACK/qmcpack | d0948ab455e38364458740cc8e2239600a14c5cd | nexus/lib/gaussian_process.py | python | rosenbrock | (x) | return ( (1-x[0:-1])**2+100*(x[1:]-x[0:-1]**2)**2 ).sum() | Global minimum in dim 3-7 at (1,1,...,1), local min at (-1,1,...,1) | Global minimum in dim 3-7 at (1,1,...,1), local min at (-1,1,...,1) | [
"Global",
"minimum",
"in",
"dim",
"3",
"-",
"7",
"at",
"(",
"1",
"1",
"...",
"1",
")",
"local",
"min",
"at",
"(",
"-",
"1",
"1",
"...",
"1",
")"
] | def rosenbrock(x):
"""
Global minimum in dim 3-7 at (1,1,...,1), local min at (-1,1,...,1)
"""
x = x.ravel()
return ( (1-x[0:-1])**2+100*(x[1:]-x[0:-1]**2)**2 ).sum() | [
"def",
"rosenbrock",
"(",
"x",
")",
":",
"x",
"=",
"x",
".",
"ravel",
"(",
")",
"return",
"(",
"(",
"1",
"-",
"x",
"[",
"0",
":",
"-",
"1",
"]",
")",
"**",
"2",
"+",
"100",
"*",
"(",
"x",
"[",
"1",
":",
"]",
"-",
"x",
"[",
"0",
":",
"-",
"1",
"]",
"**",
"2",
")",
"**",
"2",
")",
".",
"sum",
"(",
")"
] | https://github.com/QMCPACK/qmcpack/blob/d0948ab455e38364458740cc8e2239600a14c5cd/nexus/lib/gaussian_process.py#L353-L358 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/optparse.py | python | OptionParser.print_version | (self, file=None) | print_version(file : file = stdout)
Print the version message for this program (self.version) to
'file' (default stdout). As with print_usage(), any occurrence
of "%prog" in self.version is replaced by the current program's
name. Does nothing if self.version is empty or undefined. | print_version(file : file = stdout) | [
"print_version",
"(",
"file",
":",
"file",
"=",
"stdout",
")"
] | def print_version(self, file=None):
"""print_version(file : file = stdout)
Print the version message for this program (self.version) to
'file' (default stdout). As with print_usage(), any occurrence
of "%prog" in self.version is replaced by the current program's
name. Does nothing if self.version is empty or undefined.
"""
if self.version:
print(self.get_version(), file=file) | [
"def",
"print_version",
"(",
"self",
",",
"file",
"=",
"None",
")",
":",
"if",
"self",
".",
"version",
":",
"print",
"(",
"self",
".",
"get_version",
"(",
")",
",",
"file",
"=",
"file",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/optparse.py#L1596-L1605 | ||
hfinkel/llvm-project-cxxjit | 91084ef018240bbb8e24235ff5cd8c355a9c1a1e | clang/tools/scan-build-py/libscanbuild/report.py | python | assemble_cover | (args, prefix, fragments) | Put together the fragments into a final report. | Put together the fragments into a final report. | [
"Put",
"together",
"the",
"fragments",
"into",
"a",
"final",
"report",
"."
] | def assemble_cover(args, prefix, fragments):
""" Put together the fragments into a final report. """
import getpass
import socket
if args.html_title is None:
args.html_title = os.path.basename(prefix) + ' - analyzer results'
with open(os.path.join(args.output, 'index.html'), 'w') as handle:
indent = 0
handle.write(reindent("""
|<!DOCTYPE html>
|<html>
| <head>
| <title>{html_title}</title>
| <link type="text/css" rel="stylesheet" href="scanview.css"/>
| <script type='text/javascript' src="sorttable.js"></script>
| <script type='text/javascript' src='selectable.js'></script>
| </head>""", indent).format(html_title=args.html_title))
handle.write(comment('SUMMARYENDHEAD'))
handle.write(reindent("""
| <body>
| <h1>{html_title}</h1>
| <table>
| <tr><th>User:</th><td>{user_name}@{host_name}</td></tr>
| <tr><th>Working Directory:</th><td>{current_dir}</td></tr>
| <tr><th>Command Line:</th><td>{cmd_args}</td></tr>
| <tr><th>Clang Version:</th><td>{clang_version}</td></tr>
| <tr><th>Date:</th><td>{date}</td></tr>
| </table>""", indent).format(html_title=args.html_title,
user_name=getpass.getuser(),
host_name=socket.gethostname(),
current_dir=prefix,
cmd_args=' '.join(sys.argv),
clang_version=get_version(args.clang),
date=datetime.datetime.today(
).strftime('%c')))
for fragment in fragments:
# copy the content of fragments
with open(fragment, 'r') as input_handle:
shutil.copyfileobj(input_handle, handle)
handle.write(reindent("""
| </body>
|</html>""", indent)) | [
"def",
"assemble_cover",
"(",
"args",
",",
"prefix",
",",
"fragments",
")",
":",
"import",
"getpass",
"import",
"socket",
"if",
"args",
".",
"html_title",
"is",
"None",
":",
"args",
".",
"html_title",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"prefix",
")",
"+",
"' - analyzer results'",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"output",
",",
"'index.html'",
")",
",",
"'w'",
")",
"as",
"handle",
":",
"indent",
"=",
"0",
"handle",
".",
"write",
"(",
"reindent",
"(",
"\"\"\"\n |<!DOCTYPE html>\n |<html>\n | <head>\n | <title>{html_title}</title>\n | <link type=\"text/css\" rel=\"stylesheet\" href=\"scanview.css\"/>\n | <script type='text/javascript' src=\"sorttable.js\"></script>\n | <script type='text/javascript' src='selectable.js'></script>\n | </head>\"\"\"",
",",
"indent",
")",
".",
"format",
"(",
"html_title",
"=",
"args",
".",
"html_title",
")",
")",
"handle",
".",
"write",
"(",
"comment",
"(",
"'SUMMARYENDHEAD'",
")",
")",
"handle",
".",
"write",
"(",
"reindent",
"(",
"\"\"\"\n | <body>\n | <h1>{html_title}</h1>\n | <table>\n | <tr><th>User:</th><td>{user_name}@{host_name}</td></tr>\n | <tr><th>Working Directory:</th><td>{current_dir}</td></tr>\n | <tr><th>Command Line:</th><td>{cmd_args}</td></tr>\n | <tr><th>Clang Version:</th><td>{clang_version}</td></tr>\n | <tr><th>Date:</th><td>{date}</td></tr>\n | </table>\"\"\"",
",",
"indent",
")",
".",
"format",
"(",
"html_title",
"=",
"args",
".",
"html_title",
",",
"user_name",
"=",
"getpass",
".",
"getuser",
"(",
")",
",",
"host_name",
"=",
"socket",
".",
"gethostname",
"(",
")",
",",
"current_dir",
"=",
"prefix",
",",
"cmd_args",
"=",
"' '",
".",
"join",
"(",
"sys",
".",
"argv",
")",
",",
"clang_version",
"=",
"get_version",
"(",
"args",
".",
"clang",
")",
",",
"date",
"=",
"datetime",
".",
"datetime",
".",
"today",
"(",
")",
".",
"strftime",
"(",
"'%c'",
")",
")",
")",
"for",
"fragment",
"in",
"fragments",
":",
"# copy the content of fragments",
"with",
"open",
"(",
"fragment",
",",
"'r'",
")",
"as",
"input_handle",
":",
"shutil",
".",
"copyfileobj",
"(",
"input_handle",
",",
"handle",
")",
"handle",
".",
"write",
"(",
"reindent",
"(",
"\"\"\"\n | </body>\n |</html>\"\"\"",
",",
"indent",
")",
")"
] | https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/clang/tools/scan-build-py/libscanbuild/report.py#L63-L107 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_core.py | python | Window.SetForegroundColour | (*args, **kwargs) | return _core_.Window_SetForegroundColour(*args, **kwargs) | SetForegroundColour(self, Colour colour) -> bool
Sets the foreground colour of the window. Returns True is the colour
was changed. The interpretation of foreground colour is dependent on
the window class; it may be the text colour or other colour, or it may
not be used at all. | SetForegroundColour(self, Colour colour) -> bool | [
"SetForegroundColour",
"(",
"self",
"Colour",
"colour",
")",
"-",
">",
"bool"
] | def SetForegroundColour(*args, **kwargs):
"""
SetForegroundColour(self, Colour colour) -> bool
Sets the foreground colour of the window. Returns True is the colour
was changed. The interpretation of foreground colour is dependent on
the window class; it may be the text colour or other colour, or it may
not be used at all.
"""
return _core_.Window_SetForegroundColour(*args, **kwargs) | [
"def",
"SetForegroundColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_SetForegroundColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L10869-L10878 | |
devsisters/libquic | 8954789a056d8e7d5fcb6452fd1572ca57eb5c4e | src/third_party/protobuf/python/google/protobuf/internal/python_message.py | python | _InternalUnpackAny | (msg) | return message | Unpacks Any message and returns the unpacked message.
This internal method is differnt from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
Args:
msg: An Any message to be unpacked.
Returns:
The unpacked message. | Unpacks Any message and returns the unpacked message. | [
"Unpacks",
"Any",
"message",
"and",
"returns",
"the",
"unpacked",
"message",
"."
] | def _InternalUnpackAny(msg):
"""Unpacks Any message and returns the unpacked message.
This internal method is differnt from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
Args:
msg: An Any message to be unpacked.
Returns:
The unpacked message.
"""
type_url = msg.type_url
db = symbol_database.Default()
if not type_url:
return None
# TODO(haberman): For now we just strip the hostname. Better logic will be
# required.
type_name = type_url.split("/")[-1]
descriptor = db.pool.FindMessageTypeByName(type_name)
if descriptor is None:
return None
message_class = db.GetPrototype(descriptor)
message = message_class()
message.ParseFromString(msg.value)
return message | [
"def",
"_InternalUnpackAny",
"(",
"msg",
")",
":",
"type_url",
"=",
"msg",
".",
"type_url",
"db",
"=",
"symbol_database",
".",
"Default",
"(",
")",
"if",
"not",
"type_url",
":",
"return",
"None",
"# TODO(haberman): For now we just strip the hostname. Better logic will be",
"# required.",
"type_name",
"=",
"type_url",
".",
"split",
"(",
"\"/\"",
")",
"[",
"-",
"1",
"]",
"descriptor",
"=",
"db",
".",
"pool",
".",
"FindMessageTypeByName",
"(",
"type_name",
")",
"if",
"descriptor",
"is",
"None",
":",
"return",
"None",
"message_class",
"=",
"db",
".",
"GetPrototype",
"(",
"descriptor",
")",
"message",
"=",
"message_class",
"(",
")",
"message",
".",
"ParseFromString",
"(",
"msg",
".",
"value",
")",
"return",
"message"
] | https://github.com/devsisters/libquic/blob/8954789a056d8e7d5fcb6452fd1572ca57eb5c4e/src/third_party/protobuf/python/google/protobuf/internal/python_message.py#L916-L947 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_core.py | python | Window.SetSizeWH | (*args, **kwargs) | return _core_.Window_SetSizeWH(*args, **kwargs) | SetSizeWH(self, int width, int height)
Sets the size of the window in pixels. | SetSizeWH(self, int width, int height) | [
"SetSizeWH",
"(",
"self",
"int",
"width",
"int",
"height",
")"
] | def SetSizeWH(*args, **kwargs):
"""
SetSizeWH(self, int width, int height)
Sets the size of the window in pixels.
"""
return _core_.Window_SetSizeWH(*args, **kwargs) | [
"def",
"SetSizeWH",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_SetSizeWH",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L9365-L9371 | |
google/nucleus | 68d3947fafba1337f294c0668a6e1c7f3f1273e3 | nucleus/util/variantcall_utils.py | python | has_full_genotypes | (variant_call) | return all(gt >= 0 for gt in variant_call.genotype) | Returns True iff the VariantCall has only known genotypes.
Args:
variant_call: VariantCall proto. The VariantCall to evaluate.
Returns:
True if all `genotype` fields are known genotypes. | Returns True iff the VariantCall has only known genotypes. | [
"Returns",
"True",
"iff",
"the",
"VariantCall",
"has",
"only",
"known",
"genotypes",
"."
] | def has_full_genotypes(variant_call):
"""Returns True iff the VariantCall has only known genotypes.
Args:
variant_call: VariantCall proto. The VariantCall to evaluate.
Returns:
True if all `genotype` fields are known genotypes.
"""
return all(gt >= 0 for gt in variant_call.genotype) | [
"def",
"has_full_genotypes",
"(",
"variant_call",
")",
":",
"return",
"all",
"(",
"gt",
">=",
"0",
"for",
"gt",
"in",
"variant_call",
".",
"genotype",
")"
] | https://github.com/google/nucleus/blob/68d3947fafba1337f294c0668a6e1c7f3f1273e3/nucleus/util/variantcall_utils.py#L211-L220 | |
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/boost_1_66_0/tools/build/src/tools/common.py | python | format_name | (format, name, target_type, prop_set) | Given a target, as given to a custom tag rule, returns a string formatted
according to the passed format. Format is a list of properties that is
represented in the result. For each element of format the corresponding target
information is obtained and added to the result string. For all, but the
literal, the format value is taken as the as string to prepend to the output
to join the item to the rest of the result. If not given "-" is used as a
joiner.
The format options can be:
<base>[joiner]
:: The basename of the target name.
<toolset>[joiner]
:: The abbreviated toolset tag being used to build the target.
<threading>[joiner]
:: Indication of a multi-threaded build.
<runtime>[joiner]
:: Collective tag of the build runtime.
<version:/version-feature | X.Y[.Z]/>[joiner]
:: Short version tag taken from the given "version-feature"
in the build properties. Or if not present, the literal
value as the version number.
<property:/property-name/>[joiner]
:: Direct lookup of the given property-name value in the
build properties. /property-name/ is a regular expression.
e.g. <property:toolset-.*:flavor> will match every toolset.
/otherwise/
:: The literal value of the format argument.
For example this format:
boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
Might return:
boost_thread-vc80-mt-gd-1_33.dll, or
boost_regex-vc80-gd-1_33.dll
The returned name also has the target type specific prefix and suffix which
puts it in a ready form to use as the value from a custom tag rule. | Given a target, as given to a custom tag rule, returns a string formatted
according to the passed format. Format is a list of properties that is
represented in the result. For each element of format the corresponding target
information is obtained and added to the result string. For all, but the
literal, the format value is taken as the as string to prepend to the output
to join the item to the rest of the result. If not given "-" is used as a
joiner. | [
"Given",
"a",
"target",
"as",
"given",
"to",
"a",
"custom",
"tag",
"rule",
"returns",
"a",
"string",
"formatted",
"according",
"to",
"the",
"passed",
"format",
".",
"Format",
"is",
"a",
"list",
"of",
"properties",
"that",
"is",
"represented",
"in",
"the",
"result",
".",
"For",
"each",
"element",
"of",
"format",
"the",
"corresponding",
"target",
"information",
"is",
"obtained",
"and",
"added",
"to",
"the",
"result",
"string",
".",
"For",
"all",
"but",
"the",
"literal",
"the",
"format",
"value",
"is",
"taken",
"as",
"the",
"as",
"string",
"to",
"prepend",
"to",
"the",
"output",
"to",
"join",
"the",
"item",
"to",
"the",
"rest",
"of",
"the",
"result",
".",
"If",
"not",
"given",
"-",
"is",
"used",
"as",
"a",
"joiner",
"."
] | def format_name(format, name, target_type, prop_set):
""" Given a target, as given to a custom tag rule, returns a string formatted
according to the passed format. Format is a list of properties that is
represented in the result. For each element of format the corresponding target
information is obtained and added to the result string. For all, but the
literal, the format value is taken as the as string to prepend to the output
to join the item to the rest of the result. If not given "-" is used as a
joiner.
The format options can be:
<base>[joiner]
:: The basename of the target name.
<toolset>[joiner]
:: The abbreviated toolset tag being used to build the target.
<threading>[joiner]
:: Indication of a multi-threaded build.
<runtime>[joiner]
:: Collective tag of the build runtime.
<version:/version-feature | X.Y[.Z]/>[joiner]
:: Short version tag taken from the given "version-feature"
in the build properties. Or if not present, the literal
value as the version number.
<property:/property-name/>[joiner]
:: Direct lookup of the given property-name value in the
build properties. /property-name/ is a regular expression.
e.g. <property:toolset-.*:flavor> will match every toolset.
/otherwise/
:: The literal value of the format argument.
For example this format:
boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
Might return:
boost_thread-vc80-mt-gd-1_33.dll, or
boost_regex-vc80-gd-1_33.dll
The returned name also has the target type specific prefix and suffix which
puts it in a ready form to use as the value from a custom tag rule.
"""
if __debug__:
from ..build.property_set import PropertySet
assert is_iterable_typed(format, basestring)
assert isinstance(name, basestring)
assert isinstance(target_type, basestring)
assert isinstance(prop_set, PropertySet)
# assert(isinstance(prop_set, property_set.PropertySet))
if type.is_derived(target_type, 'LIB'):
result = "" ;
for f in format:
grist = get_grist(f)
if grist == '<base>':
result += os.path.basename(name)
elif grist == '<toolset>':
result += join_tag(get_value(f),
toolset_tag(name, target_type, prop_set))
elif grist == '<threading>':
result += join_tag(get_value(f),
threading_tag(name, target_type, prop_set))
elif grist == '<runtime>':
result += join_tag(get_value(f),
runtime_tag(name, target_type, prop_set))
elif grist.startswith('<version:'):
key = grist[len('<version:'):-1]
version = prop_set.get('<' + key + '>')
if not version:
version = key
version = __re_version.match(version)
result += join_tag(get_value(f), version[1] + '_' + version[2])
elif grist.startswith('<property:'):
key = grist[len('<property:'):-1]
property_re = re.compile('<(' + key + ')>')
p0 = None
for prop in prop_set.raw():
match = property_re.match(prop)
if match:
p0 = match[1]
break
if p0:
p = prop_set.get('<' + p0 + '>')
if p:
assert(len(p) == 1)
result += join_tag(ungrist(f), p)
else:
result += f
result = b2.build.virtual_target.add_prefix_and_suffix(
''.join(result), target_type, prop_set)
return result | [
"def",
"format_name",
"(",
"format",
",",
"name",
",",
"target_type",
",",
"prop_set",
")",
":",
"if",
"__debug__",
":",
"from",
".",
".",
"build",
".",
"property_set",
"import",
"PropertySet",
"assert",
"is_iterable_typed",
"(",
"format",
",",
"basestring",
")",
"assert",
"isinstance",
"(",
"name",
",",
"basestring",
")",
"assert",
"isinstance",
"(",
"target_type",
",",
"basestring",
")",
"assert",
"isinstance",
"(",
"prop_set",
",",
"PropertySet",
")",
"# assert(isinstance(prop_set, property_set.PropertySet))",
"if",
"type",
".",
"is_derived",
"(",
"target_type",
",",
"'LIB'",
")",
":",
"result",
"=",
"\"\"",
"for",
"f",
"in",
"format",
":",
"grist",
"=",
"get_grist",
"(",
"f",
")",
"if",
"grist",
"==",
"'<base>'",
":",
"result",
"+=",
"os",
".",
"path",
".",
"basename",
"(",
"name",
")",
"elif",
"grist",
"==",
"'<toolset>'",
":",
"result",
"+=",
"join_tag",
"(",
"get_value",
"(",
"f",
")",
",",
"toolset_tag",
"(",
"name",
",",
"target_type",
",",
"prop_set",
")",
")",
"elif",
"grist",
"==",
"'<threading>'",
":",
"result",
"+=",
"join_tag",
"(",
"get_value",
"(",
"f",
")",
",",
"threading_tag",
"(",
"name",
",",
"target_type",
",",
"prop_set",
")",
")",
"elif",
"grist",
"==",
"'<runtime>'",
":",
"result",
"+=",
"join_tag",
"(",
"get_value",
"(",
"f",
")",
",",
"runtime_tag",
"(",
"name",
",",
"target_type",
",",
"prop_set",
")",
")",
"elif",
"grist",
".",
"startswith",
"(",
"'<version:'",
")",
":",
"key",
"=",
"grist",
"[",
"len",
"(",
"'<version:'",
")",
":",
"-",
"1",
"]",
"version",
"=",
"prop_set",
".",
"get",
"(",
"'<'",
"+",
"key",
"+",
"'>'",
")",
"if",
"not",
"version",
":",
"version",
"=",
"key",
"version",
"=",
"__re_version",
".",
"match",
"(",
"version",
")",
"result",
"+=",
"join_tag",
"(",
"get_value",
"(",
"f",
")",
",",
"version",
"[",
"1",
"]",
"+",
"'_'",
"+",
"version",
"[",
"2",
"]",
")",
"elif",
"grist",
".",
"startswith",
"(",
"'<property:'",
")",
":",
"key",
"=",
"grist",
"[",
"len",
"(",
"'<property:'",
")",
":",
"-",
"1",
"]",
"property_re",
"=",
"re",
".",
"compile",
"(",
"'<('",
"+",
"key",
"+",
"')>'",
")",
"p0",
"=",
"None",
"for",
"prop",
"in",
"prop_set",
".",
"raw",
"(",
")",
":",
"match",
"=",
"property_re",
".",
"match",
"(",
"prop",
")",
"if",
"match",
":",
"p0",
"=",
"match",
"[",
"1",
"]",
"break",
"if",
"p0",
":",
"p",
"=",
"prop_set",
".",
"get",
"(",
"'<'",
"+",
"p0",
"+",
"'>'",
")",
"if",
"p",
":",
"assert",
"(",
"len",
"(",
"p",
")",
"==",
"1",
")",
"result",
"+=",
"join_tag",
"(",
"ungrist",
"(",
"f",
")",
",",
"p",
")",
"else",
":",
"result",
"+=",
"f",
"result",
"=",
"b2",
".",
"build",
".",
"virtual_target",
".",
"add_prefix_and_suffix",
"(",
"''",
".",
"join",
"(",
"result",
")",
",",
"target_type",
",",
"prop_set",
")",
"return",
"result"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/boost_1_66_0/tools/build/src/tools/common.py#L607-L697 | ||
gem5/gem5 | 141cc37c2d4b93959d4c249b8f7e6a8b2ef75338 | src/python/gem5/components/boards/se_binary_workload.py | python | SEBinaryWorkload.set_se_binary_workload | (self, binary: AbstractResource) | Set up the system to run a specific binary.
**Limitations**
* Only supports single threaded applications
* Dynamically linked executables are partially supported when the host
ISA and the simulated ISA are the same.
:param binary: The resource encapsulating the binary to be run. | Set up the system to run a specific binary. | [
"Set",
"up",
"the",
"system",
"to",
"run",
"a",
"specific",
"binary",
"."
] | def set_se_binary_workload(self, binary: AbstractResource) -> None:
"""Set up the system to run a specific binary.
**Limitations**
* Only supports single threaded applications
* Dynamically linked executables are partially supported when the host
ISA and the simulated ISA are the same.
:param binary: The resource encapsulating the binary to be run.
"""
self.workload = SEWorkload.init_compatible(binary.get_local_path())
process = Process()
process.cmd = [binary.get_local_path()]
self.get_processor().get_cores()[0].set_workload(process) | [
"def",
"set_se_binary_workload",
"(",
"self",
",",
"binary",
":",
"AbstractResource",
")",
"->",
"None",
":",
"self",
".",
"workload",
"=",
"SEWorkload",
".",
"init_compatible",
"(",
"binary",
".",
"get_local_path",
"(",
")",
")",
"process",
"=",
"Process",
"(",
")",
"process",
".",
"cmd",
"=",
"[",
"binary",
".",
"get_local_path",
"(",
")",
"]",
"self",
".",
"get_processor",
"(",
")",
".",
"get_cores",
"(",
")",
"[",
"0",
"]",
".",
"set_workload",
"(",
"process",
")"
] | https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/src/python/gem5/components/boards/se_binary_workload.py#L41-L56 | ||
google/llvm-propeller | 45c226984fe8377ebfb2ad7713c680d652ba678d | clang/utils/check_cfc/check_cfc.py | python | set_output_file | (args, output_file) | return args | Set the output file within the arguments. Appends or replaces as
appropriate. | Set the output file within the arguments. Appends or replaces as
appropriate. | [
"Set",
"the",
"output",
"file",
"within",
"the",
"arguments",
".",
"Appends",
"or",
"replaces",
"as",
"appropriate",
"."
] | def set_output_file(args, output_file):
"""Set the output file within the arguments. Appends or replaces as
appropriate."""
if is_output_specified(args):
args = replace_output_file(args, output_file)
else:
args = add_output_file(args, output_file)
return args | [
"def",
"set_output_file",
"(",
"args",
",",
"output_file",
")",
":",
"if",
"is_output_specified",
"(",
"args",
")",
":",
"args",
"=",
"replace_output_file",
"(",
"args",
",",
"output_file",
")",
"else",
":",
"args",
"=",
"add_output_file",
"(",
"args",
",",
"output_file",
")",
"return",
"args"
] | https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/clang/utils/check_cfc/check_cfc.py#L176-L183 | |
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/python/turicreate/meta/decompiler/simple_instructions.py | python | SimpleInstructions.SLICE_1 | (self, instr) | obj[lower:] | obj[lower:] | [
"obj",
"[",
"lower",
":",
"]"
] | def SLICE_1(self, instr):
"obj[lower:]"
lower = self.ast_stack.pop()
value = self.ast_stack.pop()
kw = dict(lineno=instr.lineno, col_offset=0)
slice = _ast.Slice(lower=lower, step=None, upper=None, **kw)
subscr = _ast.Subscript(value=value, slice=slice, ctx=_ast.Load(), **kw)
self.ast_stack.append(subscr) | [
"def",
"SLICE_1",
"(",
"self",
",",
"instr",
")",
":",
"lower",
"=",
"self",
".",
"ast_stack",
".",
"pop",
"(",
")",
"value",
"=",
"self",
".",
"ast_stack",
".",
"pop",
"(",
")",
"kw",
"=",
"dict",
"(",
"lineno",
"=",
"instr",
".",
"lineno",
",",
"col_offset",
"=",
"0",
")",
"slice",
"=",
"_ast",
".",
"Slice",
"(",
"lower",
"=",
"lower",
",",
"step",
"=",
"None",
",",
"upper",
"=",
"None",
",",
"*",
"*",
"kw",
")",
"subscr",
"=",
"_ast",
".",
"Subscript",
"(",
"value",
"=",
"value",
",",
"slice",
"=",
"slice",
",",
"ctx",
"=",
"_ast",
".",
"Load",
"(",
")",
",",
"*",
"*",
"kw",
")",
"self",
".",
"ast_stack",
".",
"append",
"(",
"subscr",
")"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/meta/decompiler/simple_instructions.py#L819-L828 | ||
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | example/distributed_training/cifar10_dist.py | python | evaluate_accuracy | (data_iterator, network) | return acc.get()[1] | Measure the accuracy of ResNet
Parameters
----------
data_iterator: Iter
examples of dataset
network:
ResNet
Returns
----------
tuple of array element | Measure the accuracy of ResNet | [
"Measure",
"the",
"accuracy",
"of",
"ResNet"
] | def evaluate_accuracy(data_iterator, network):
""" Measure the accuracy of ResNet
Parameters
----------
data_iterator: Iter
examples of dataset
network:
ResNet
Returns
----------
tuple of array element
"""
acc = mx.metric.Accuracy()
# Iterate through data and label
for i, (data, label) in enumerate(data_iterator):
# Get the data and label into the GPU
data = data.as_in_context(ctx[0])
label = label.as_in_context(ctx[0])
# Get network's output which is a probability distribution
# Apply argmax on the probability distribution to get network's classification.
output = network(data)
predictions = nd.argmax(output, axis=1)
# Give network's prediction and the correct label to update the metric
acc.update(preds=predictions, labels=label)
# Return the accuracy
return acc.get()[1] | [
"def",
"evaluate_accuracy",
"(",
"data_iterator",
",",
"network",
")",
":",
"acc",
"=",
"mx",
".",
"metric",
".",
"Accuracy",
"(",
")",
"# Iterate through data and label",
"for",
"i",
",",
"(",
"data",
",",
"label",
")",
"in",
"enumerate",
"(",
"data_iterator",
")",
":",
"# Get the data and label into the GPU",
"data",
"=",
"data",
".",
"as_in_context",
"(",
"ctx",
"[",
"0",
"]",
")",
"label",
"=",
"label",
".",
"as_in_context",
"(",
"ctx",
"[",
"0",
"]",
")",
"# Get network's output which is a probability distribution",
"# Apply argmax on the probability distribution to get network's classification.",
"output",
"=",
"network",
"(",
"data",
")",
"predictions",
"=",
"nd",
".",
"argmax",
"(",
"output",
",",
"axis",
"=",
"1",
")",
"# Give network's prediction and the correct label to update the metric",
"acc",
".",
"update",
"(",
"preds",
"=",
"predictions",
",",
"labels",
"=",
"label",
")",
"# Return the accuracy",
"return",
"acc",
".",
"get",
"(",
")",
"[",
"1",
"]"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/example/distributed_training/cifar10_dist.py#L110-L142 | |
Genius-x/genius-x | 9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0 | cocos2d/tools/bindings-generator/clang/cindex.py | python | TranslationUnit.get_includes | (self) | return iter(includes) | Return an iterable sequence of FileInclusion objects that describe the
sequence of inclusions in a translation unit. The first object in
this sequence is always the input file. Note that this method will not
recursively iterate over header files included through precompiled
headers. | Return an iterable sequence of FileInclusion objects that describe the
sequence of inclusions in a translation unit. The first object in
this sequence is always the input file. Note that this method will not
recursively iterate over header files included through precompiled
headers. | [
"Return",
"an",
"iterable",
"sequence",
"of",
"FileInclusion",
"objects",
"that",
"describe",
"the",
"sequence",
"of",
"inclusions",
"in",
"a",
"translation",
"unit",
".",
"The",
"first",
"object",
"in",
"this",
"sequence",
"is",
"always",
"the",
"input",
"file",
".",
"Note",
"that",
"this",
"method",
"will",
"not",
"recursively",
"iterate",
"over",
"header",
"files",
"included",
"through",
"precompiled",
"headers",
"."
] | def get_includes(self):
"""
Return an iterable sequence of FileInclusion objects that describe the
sequence of inclusions in a translation unit. The first object in
this sequence is always the input file. Note that this method will not
recursively iterate over header files included through precompiled
headers.
"""
def visitor(fobj, lptr, depth, includes):
if depth > 0:
loc = lptr.contents
includes.append(FileInclusion(loc.file, File(fobj), loc, depth))
# Automatically adapt CIndex/ctype pointers to python objects
includes = []
conf.lib.clang_getInclusions(self,
callbacks['translation_unit_includes'](visitor), includes)
return iter(includes) | [
"def",
"get_includes",
"(",
"self",
")",
":",
"def",
"visitor",
"(",
"fobj",
",",
"lptr",
",",
"depth",
",",
"includes",
")",
":",
"if",
"depth",
">",
"0",
":",
"loc",
"=",
"lptr",
".",
"contents",
"includes",
".",
"append",
"(",
"FileInclusion",
"(",
"loc",
".",
"file",
",",
"File",
"(",
"fobj",
")",
",",
"loc",
",",
"depth",
")",
")",
"# Automatically adapt CIndex/ctype pointers to python objects",
"includes",
"=",
"[",
"]",
"conf",
".",
"lib",
".",
"clang_getInclusions",
"(",
"self",
",",
"callbacks",
"[",
"'translation_unit_includes'",
"]",
"(",
"visitor",
")",
",",
"includes",
")",
"return",
"iter",
"(",
"includes",
")"
] | https://github.com/Genius-x/genius-x/blob/9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0/cocos2d/tools/bindings-generator/clang/cindex.py#L2270-L2288 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/nntplib.py | python | _NNTPBase.head | (self, message_spec=None, *, file=None) | return self._artcmd(cmd, file) | Process a HEAD command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the headers in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of header lines) | Process a HEAD command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the headers in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of header lines) | [
"Process",
"a",
"HEAD",
"command",
".",
"Argument",
":",
"-",
"message_spec",
":",
"article",
"number",
"or",
"message",
"id",
"-",
"file",
":",
"filename",
"string",
"or",
"file",
"object",
"to",
"store",
"the",
"headers",
"in",
"Returns",
":",
"-",
"resp",
":",
"server",
"response",
"if",
"successful",
"-",
"ArticleInfo",
":",
"(",
"article",
"number",
"message",
"id",
"list",
"of",
"header",
"lines",
")"
] | def head(self, message_spec=None, *, file=None):
"""Process a HEAD command. Argument:
- message_spec: article number or message id
- file: filename string or file object to store the headers in
Returns:
- resp: server response if successful
- ArticleInfo: (article number, message id, list of header lines)
"""
if message_spec is not None:
cmd = 'HEAD {0}'.format(message_spec)
else:
cmd = 'HEAD'
return self._artcmd(cmd, file) | [
"def",
"head",
"(",
"self",
",",
"message_spec",
"=",
"None",
",",
"*",
",",
"file",
"=",
"None",
")",
":",
"if",
"message_spec",
"is",
"not",
"None",
":",
"cmd",
"=",
"'HEAD {0}'",
".",
"format",
"(",
"message_spec",
")",
"else",
":",
"cmd",
"=",
"'HEAD'",
"return",
"self",
".",
"_artcmd",
"(",
"cmd",
",",
"file",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/nntplib.py#L730-L742 | |
MythTV/mythtv | d282a209cb8be85d036f85a62a8ec971b67d45f4 | mythtv/programs/scripts/internetcontent/nv_python_libs/mtv/mtv_api.py | python | Videos.searchTitle | (self, title, pagenumber, pagelen) | return elements_final | Key word video search of the MTV web site
return an array of matching item dictionaries
return | Key word video search of the MTV web site
return an array of matching item dictionaries
return | [
"Key",
"word",
"video",
"search",
"of",
"the",
"MTV",
"web",
"site",
"return",
"an",
"array",
"of",
"matching",
"item",
"dictionaries",
"return"
] | def searchTitle(self, title, pagenumber, pagelen):
'''Key word video search of the MTV web site
return an array of matching item dictionaries
return
'''
url = self.config['urls']['video.search'] % (urllib.parse.quote_plus(title.encode("utf-8")), pagenumber , pagelen,)
if self.config['debug_enabled']:
print(url)
print()
try:
etree = XmlHandler(url).getEt()
except Exception as errormsg:
raise MtvUrlError(self.error_messages['MtvUrlError'] % (url, errormsg))
if etree is None:
raise MtvVideoNotFound("No MTV Video matches found for search value (%s)" % title)
data = []
for entry in etree:
if not entry.tag.endswith('entry'):
continue
item = {}
for parts in entry:
if parts.tag.endswith('id'):
item['id'] = parts.text
if parts.tag.endswith('title'):
item['title'] = parts.text
if parts.tag.endswith('author'):
for e in parts:
if e.tag.endswith('name'):
item['media_credit'] = e.text
break
if parts.tag.endswith('published'):
item['published_parsed'] = parts.text
if parts.tag.endswith('description'):
item['media_description'] = parts.text
data.append(item)
# Make sure there are no item elements that are None
for item in data:
for key in list(item.keys()):
if item[key] is None:
item[key] = ''
# Massage each field and eliminate any item without a URL
elements_final = []
for item in data:
if not 'id' in list(item.keys()):
continue
video_details = None
try:
video_details = self.videoDetails(item['id'], urllib.parse.quote(item['title'].encode("utf-8")))
except MtvUrlError as msg:
sys.stderr.write(self.error_messages['MtvUrlError'] % msg)
except MtvVideoDetailError as msg:
sys.stderr.write(self.error_messages['MtvVideoDetailError'] % msg)
except Exception as e:
sys.stderr.write("! Error: Unknown error while retrieving a Video's meta data. Skipping video.' (%s)\nError(%s)\n" % (title, e))
if video_details:
for key in list(video_details.keys()):
item[key] = video_details[key]
item['language'] = ''
for key in list(item.keys()):
if key == 'content':
if len(item[key]):
if 'language' in item[key][0]:
if item[key][0]['language'] is not None:
item['language'] = item[key][0]['language']
if key == 'published_parsed': # '2009-12-21T00:00:00Z'
if item[key]:
pub_time = time.strptime(item[key].strip(), "%Y-%m-%dT%H:%M:%SZ")
item[key] = time.strftime('%a, %d %b %Y %H:%M:%S GMT', pub_time)
continue
if key == 'media_description' or key == 'title':
# Strip the HTML tags
if item[key]:
item[key] = self.massageDescription(item[key].strip())
item[key] = item[key].replace('|', '-')
continue
if type(item[key]) == type(''):
if item[key]:
item[key] = item[key].replace('"\n',' ').strip()
elements_final.append(item)
if not len(elements_final):
raise MtvVideoNotFound("No MTV Video matches found for search value (%s)" % title)
return elements_final | [
"def",
"searchTitle",
"(",
"self",
",",
"title",
",",
"pagenumber",
",",
"pagelen",
")",
":",
"url",
"=",
"self",
".",
"config",
"[",
"'urls'",
"]",
"[",
"'video.search'",
"]",
"%",
"(",
"urllib",
".",
"parse",
".",
"quote_plus",
"(",
"title",
".",
"encode",
"(",
"\"utf-8\"",
")",
")",
",",
"pagenumber",
",",
"pagelen",
",",
")",
"if",
"self",
".",
"config",
"[",
"'debug_enabled'",
"]",
":",
"print",
"(",
"url",
")",
"print",
"(",
")",
"try",
":",
"etree",
"=",
"XmlHandler",
"(",
"url",
")",
".",
"getEt",
"(",
")",
"except",
"Exception",
"as",
"errormsg",
":",
"raise",
"MtvUrlError",
"(",
"self",
".",
"error_messages",
"[",
"'MtvUrlError'",
"]",
"%",
"(",
"url",
",",
"errormsg",
")",
")",
"if",
"etree",
"is",
"None",
":",
"raise",
"MtvVideoNotFound",
"(",
"\"No MTV Video matches found for search value (%s)\"",
"%",
"title",
")",
"data",
"=",
"[",
"]",
"for",
"entry",
"in",
"etree",
":",
"if",
"not",
"entry",
".",
"tag",
".",
"endswith",
"(",
"'entry'",
")",
":",
"continue",
"item",
"=",
"{",
"}",
"for",
"parts",
"in",
"entry",
":",
"if",
"parts",
".",
"tag",
".",
"endswith",
"(",
"'id'",
")",
":",
"item",
"[",
"'id'",
"]",
"=",
"parts",
".",
"text",
"if",
"parts",
".",
"tag",
".",
"endswith",
"(",
"'title'",
")",
":",
"item",
"[",
"'title'",
"]",
"=",
"parts",
".",
"text",
"if",
"parts",
".",
"tag",
".",
"endswith",
"(",
"'author'",
")",
":",
"for",
"e",
"in",
"parts",
":",
"if",
"e",
".",
"tag",
".",
"endswith",
"(",
"'name'",
")",
":",
"item",
"[",
"'media_credit'",
"]",
"=",
"e",
".",
"text",
"break",
"if",
"parts",
".",
"tag",
".",
"endswith",
"(",
"'published'",
")",
":",
"item",
"[",
"'published_parsed'",
"]",
"=",
"parts",
".",
"text",
"if",
"parts",
".",
"tag",
".",
"endswith",
"(",
"'description'",
")",
":",
"item",
"[",
"'media_description'",
"]",
"=",
"parts",
".",
"text",
"data",
".",
"append",
"(",
"item",
")",
"# Make sure there are no item elements that are None",
"for",
"item",
"in",
"data",
":",
"for",
"key",
"in",
"list",
"(",
"item",
".",
"keys",
"(",
")",
")",
":",
"if",
"item",
"[",
"key",
"]",
"is",
"None",
":",
"item",
"[",
"key",
"]",
"=",
"''",
"# Massage each field and eliminate any item without a URL",
"elements_final",
"=",
"[",
"]",
"for",
"item",
"in",
"data",
":",
"if",
"not",
"'id'",
"in",
"list",
"(",
"item",
".",
"keys",
"(",
")",
")",
":",
"continue",
"video_details",
"=",
"None",
"try",
":",
"video_details",
"=",
"self",
".",
"videoDetails",
"(",
"item",
"[",
"'id'",
"]",
",",
"urllib",
".",
"parse",
".",
"quote",
"(",
"item",
"[",
"'title'",
"]",
".",
"encode",
"(",
"\"utf-8\"",
")",
")",
")",
"except",
"MtvUrlError",
"as",
"msg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"self",
".",
"error_messages",
"[",
"'MtvUrlError'",
"]",
"%",
"msg",
")",
"except",
"MtvVideoDetailError",
"as",
"msg",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"self",
".",
"error_messages",
"[",
"'MtvVideoDetailError'",
"]",
"%",
"msg",
")",
"except",
"Exception",
"as",
"e",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"! Error: Unknown error while retrieving a Video's meta data. Skipping video.' (%s)\\nError(%s)\\n\"",
"%",
"(",
"title",
",",
"e",
")",
")",
"if",
"video_details",
":",
"for",
"key",
"in",
"list",
"(",
"video_details",
".",
"keys",
"(",
")",
")",
":",
"item",
"[",
"key",
"]",
"=",
"video_details",
"[",
"key",
"]",
"item",
"[",
"'language'",
"]",
"=",
"''",
"for",
"key",
"in",
"list",
"(",
"item",
".",
"keys",
"(",
")",
")",
":",
"if",
"key",
"==",
"'content'",
":",
"if",
"len",
"(",
"item",
"[",
"key",
"]",
")",
":",
"if",
"'language'",
"in",
"item",
"[",
"key",
"]",
"[",
"0",
"]",
":",
"if",
"item",
"[",
"key",
"]",
"[",
"0",
"]",
"[",
"'language'",
"]",
"is",
"not",
"None",
":",
"item",
"[",
"'language'",
"]",
"=",
"item",
"[",
"key",
"]",
"[",
"0",
"]",
"[",
"'language'",
"]",
"if",
"key",
"==",
"'published_parsed'",
":",
"# '2009-12-21T00:00:00Z'",
"if",
"item",
"[",
"key",
"]",
":",
"pub_time",
"=",
"time",
".",
"strptime",
"(",
"item",
"[",
"key",
"]",
".",
"strip",
"(",
")",
",",
"\"%Y-%m-%dT%H:%M:%SZ\"",
")",
"item",
"[",
"key",
"]",
"=",
"time",
".",
"strftime",
"(",
"'%a, %d %b %Y %H:%M:%S GMT'",
",",
"pub_time",
")",
"continue",
"if",
"key",
"==",
"'media_description'",
"or",
"key",
"==",
"'title'",
":",
"# Strip the HTML tags",
"if",
"item",
"[",
"key",
"]",
":",
"item",
"[",
"key",
"]",
"=",
"self",
".",
"massageDescription",
"(",
"item",
"[",
"key",
"]",
".",
"strip",
"(",
")",
")",
"item",
"[",
"key",
"]",
"=",
"item",
"[",
"key",
"]",
".",
"replace",
"(",
"'|'",
",",
"'-'",
")",
"continue",
"if",
"type",
"(",
"item",
"[",
"key",
"]",
")",
"==",
"type",
"(",
"''",
")",
":",
"if",
"item",
"[",
"key",
"]",
":",
"item",
"[",
"key",
"]",
"=",
"item",
"[",
"key",
"]",
".",
"replace",
"(",
"'\"\\n'",
",",
"' '",
")",
".",
"strip",
"(",
")",
"elements_final",
".",
"append",
"(",
"item",
")",
"if",
"not",
"len",
"(",
"elements_final",
")",
":",
"raise",
"MtvVideoNotFound",
"(",
"\"No MTV Video matches found for search value (%s)\"",
"%",
"title",
")",
"return",
"elements_final"
] | https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/internetcontent/nv_python_libs/mtv/mtv_api.py#L349-L440 | |
goldeneye-source/ges-code | 2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d | thirdparty/protobuf-2.3.0/python/google/protobuf/internal/encoder.py | python | GroupEncoder | (field_number, is_repeated, is_packed) | Returns an encoder for a group field. | Returns an encoder for a group field. | [
"Returns",
"an",
"encoder",
"for",
"a",
"group",
"field",
"."
] | def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(start_tag)
element._InternalSerialize(write)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(start_tag)
value._InternalSerialize(write)
return write(end_tag)
return EncodeField | [
"def",
"GroupEncoder",
"(",
"field_number",
",",
"is_repeated",
",",
"is_packed",
")",
":",
"start_tag",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_format",
".",
"WIRETYPE_START_GROUP",
")",
"end_tag",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_format",
".",
"WIRETYPE_END_GROUP",
")",
"assert",
"not",
"is_packed",
"if",
"is_repeated",
":",
"def",
"EncodeRepeatedField",
"(",
"write",
",",
"value",
")",
":",
"for",
"element",
"in",
"value",
":",
"write",
"(",
"start_tag",
")",
"element",
".",
"_InternalSerialize",
"(",
"write",
")",
"write",
"(",
"end_tag",
")",
"return",
"EncodeRepeatedField",
"else",
":",
"def",
"EncodeField",
"(",
"write",
",",
"value",
")",
":",
"write",
"(",
"start_tag",
")",
"value",
".",
"_InternalSerialize",
"(",
"write",
")",
"return",
"write",
"(",
"end_tag",
")",
"return",
"EncodeField"
] | https://github.com/goldeneye-source/ges-code/blob/2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/encoder.py#L615-L633 | ||
y123456yz/reading-and-annotate-mongodb-3.6 | 93280293672ca7586dc24af18132aa61e4ed7fcf | mongo/buildscripts/cpplint.py | python | RemoveMultiLineCommentsFromRange | (lines, begin, end) | Clears a range of lines for multi-line comments. | Clears a range of lines for multi-line comments. | [
"Clears",
"a",
"range",
"of",
"lines",
"for",
"multi",
"-",
"line",
"comments",
"."
] | def RemoveMultiLineCommentsFromRange(lines, begin, end):
"""Clears a range of lines for multi-line comments."""
# Having // dummy comments makes the lines non-empty, so we will not get
# unnecessary blank line warnings later in the code.
for i in range(begin, end):
lines[i] = '// dummy' | [
"def",
"RemoveMultiLineCommentsFromRange",
"(",
"lines",
",",
"begin",
",",
"end",
")",
":",
"# Having // dummy comments makes the lines non-empty, so we will not get",
"# unnecessary blank line warnings later in the code.",
"for",
"i",
"in",
"range",
"(",
"begin",
",",
"end",
")",
":",
"lines",
"[",
"i",
"]",
"=",
"'// dummy'"
] | https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/buildscripts/cpplint.py#L1245-L1250 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/setuptools/py3/setuptools/config.py | python | ConfigHandler._parse_list | (cls, value, separator=',') | return [chunk.strip() for chunk in value if chunk.strip()] | Represents value as a list.
Value is split either by separator (defaults to comma) or by lines.
:param value:
:param separator: List items separator character.
:rtype: list | Represents value as a list. | [
"Represents",
"value",
"as",
"a",
"list",
"."
] | def _parse_list(cls, value, separator=','):
"""Represents value as a list.
Value is split either by separator (defaults to comma) or by lines.
:param value:
:param separator: List items separator character.
:rtype: list
"""
if isinstance(value, list): # _get_parser_compound case
return value
if '\n' in value:
value = value.splitlines()
else:
value = value.split(separator)
return [chunk.strip() for chunk in value if chunk.strip()] | [
"def",
"_parse_list",
"(",
"cls",
",",
"value",
",",
"separator",
"=",
"','",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"# _get_parser_compound case",
"return",
"value",
"if",
"'\\n'",
"in",
"value",
":",
"value",
"=",
"value",
".",
"splitlines",
"(",
")",
"else",
":",
"value",
"=",
"value",
".",
"split",
"(",
"separator",
")",
"return",
"[",
"chunk",
".",
"strip",
"(",
")",
"for",
"chunk",
"in",
"value",
"if",
"chunk",
".",
"strip",
"(",
")",
"]"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/setuptools/config.py#L241-L258 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/eager/wrap_function.py | python | _get_element_from_tensor_info | (tensor_info, graph) | Simplified copy of the deprecated `get_tensor_from_tensor_info`. | Simplified copy of the deprecated `get_tensor_from_tensor_info`. | [
"Simplified",
"copy",
"of",
"the",
"deprecated",
"get_tensor_from_tensor_info",
"."
] | def _get_element_from_tensor_info(tensor_info, graph):
"""Simplified copy of the deprecated `get_tensor_from_tensor_info`."""
encoding = tensor_info.WhichOneof("encoding")
if encoding == "name":
# We may get operations here in some cases. TensorInfo is a bit of a
# misnomer if so.
return graph.as_graph_element(tensor_info.name)
elif encoding == "coo_sparse":
return sparse_tensor.SparseTensor(
graph.get_tensor_by_name(tensor_info.coo_sparse.indices_tensor_name),
graph.get_tensor_by_name(tensor_info.coo_sparse.values_tensor_name),
graph.get_tensor_by_name(
tensor_info.coo_sparse.dense_shape_tensor_name))
elif encoding == "composite_tensor":
spec_proto = struct_pb2.StructuredValue(
type_spec_value=tensor_info.composite_tensor.type_spec)
spec = nested_structure_coder.decode_proto(spec_proto)
components = [graph.get_tensor_by_name(component.name) for component in
tensor_info.composite_tensor.components]
return spec._from_components(components) # pylint: disable=protected-access
else:
raise ValueError(f"Invalid TensorInfo.encoding: {encoding}. Valid "
"encodings are 'name', 'coo_sparse', and "
"'composite_tensor'.") | [
"def",
"_get_element_from_tensor_info",
"(",
"tensor_info",
",",
"graph",
")",
":",
"encoding",
"=",
"tensor_info",
".",
"WhichOneof",
"(",
"\"encoding\"",
")",
"if",
"encoding",
"==",
"\"name\"",
":",
"# We may get operations here in some cases. TensorInfo is a bit of a",
"# misnomer if so.",
"return",
"graph",
".",
"as_graph_element",
"(",
"tensor_info",
".",
"name",
")",
"elif",
"encoding",
"==",
"\"coo_sparse\"",
":",
"return",
"sparse_tensor",
".",
"SparseTensor",
"(",
"graph",
".",
"get_tensor_by_name",
"(",
"tensor_info",
".",
"coo_sparse",
".",
"indices_tensor_name",
")",
",",
"graph",
".",
"get_tensor_by_name",
"(",
"tensor_info",
".",
"coo_sparse",
".",
"values_tensor_name",
")",
",",
"graph",
".",
"get_tensor_by_name",
"(",
"tensor_info",
".",
"coo_sparse",
".",
"dense_shape_tensor_name",
")",
")",
"elif",
"encoding",
"==",
"\"composite_tensor\"",
":",
"spec_proto",
"=",
"struct_pb2",
".",
"StructuredValue",
"(",
"type_spec_value",
"=",
"tensor_info",
".",
"composite_tensor",
".",
"type_spec",
")",
"spec",
"=",
"nested_structure_coder",
".",
"decode_proto",
"(",
"spec_proto",
")",
"components",
"=",
"[",
"graph",
".",
"get_tensor_by_name",
"(",
"component",
".",
"name",
")",
"for",
"component",
"in",
"tensor_info",
".",
"composite_tensor",
".",
"components",
"]",
"return",
"spec",
".",
"_from_components",
"(",
"components",
")",
"# pylint: disable=protected-access",
"else",
":",
"raise",
"ValueError",
"(",
"f\"Invalid TensorInfo.encoding: {encoding}. Valid \"",
"\"encodings are 'name', 'coo_sparse', and \"",
"\"'composite_tensor'.\"",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/eager/wrap_function.py#L94-L117 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/more-itertools/py3/more_itertools/more.py | python | mark_ends | (iterable) | Yield 3-tuples of the form ``(is_first, is_last, item)``.
>>> list(mark_ends('ABC'))
[(True, False, 'A'), (False, False, 'B'), (False, True, 'C')]
Use this when looping over an iterable to take special action on its first
and/or last items:
>>> iterable = ['Header', 100, 200, 'Footer']
>>> total = 0
>>> for is_first, is_last, item in mark_ends(iterable):
... if is_first:
... continue # Skip the header
... if is_last:
... continue # Skip the footer
... total += item
>>> print(total)
300 | Yield 3-tuples of the form ``(is_first, is_last, item)``. | [
"Yield",
"3",
"-",
"tuples",
"of",
"the",
"form",
"(",
"is_first",
"is_last",
"item",
")",
"."
] | def mark_ends(iterable):
"""Yield 3-tuples of the form ``(is_first, is_last, item)``.
>>> list(mark_ends('ABC'))
[(True, False, 'A'), (False, False, 'B'), (False, True, 'C')]
Use this when looping over an iterable to take special action on its first
and/or last items:
>>> iterable = ['Header', 100, 200, 'Footer']
>>> total = 0
>>> for is_first, is_last, item in mark_ends(iterable):
... if is_first:
... continue # Skip the header
... if is_last:
... continue # Skip the footer
... total += item
>>> print(total)
300
"""
it = iter(iterable)
try:
b = next(it)
except StopIteration:
return
try:
for i in count():
a = b
b = next(it)
yield i == 0, False, a
except StopIteration:
yield i == 0, True, a | [
"def",
"mark_ends",
"(",
"iterable",
")",
":",
"it",
"=",
"iter",
"(",
"iterable",
")",
"try",
":",
"b",
"=",
"next",
"(",
"it",
")",
"except",
"StopIteration",
":",
"return",
"try",
":",
"for",
"i",
"in",
"count",
"(",
")",
":",
"a",
"=",
"b",
"b",
"=",
"next",
"(",
"it",
")",
"yield",
"i",
"==",
"0",
",",
"False",
",",
"a",
"except",
"StopIteration",
":",
"yield",
"i",
"==",
"0",
",",
"True",
",",
"a"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/more-itertools/py3/more_itertools/more.py#L2294-L2328 | ||
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/python/training/summary_io.py | python | summary_iterator | (path) | An iterator for reading `Event` protocol buffers from an event file.
You can use this function to read events written to an event file. It returns
a Python iterator that yields `Event` protocol buffers.
Example: Print the contents of an events file.
```python
for e in tf.train.summary_iterator(path to events file):
print(e)
```
Example: Print selected summary values.
```python
# This example supposes that the events file contains summaries with a
# summary value tag 'loss'. These could have been added by calling
# `add_summary()`, passing the output of a scalar summary op created with
# with: `tf.scalar_summary(['loss'], loss_tensor)`.
for e in tf.train.summary_iterator(path to events file):
for v in e.summary.value:
if v.tag == 'loss':
print(v.simple_value)
```
See the protocol buffer definitions of
[Event](https://www.tensorflow.org/code/tensorflow/core/util/event.proto)
and
[Summary](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto)
for more information about their attributes.
Args:
path: The path to an event file created by a `SummaryWriter`.
Yields:
`Event` protocol buffers. | An iterator for reading `Event` protocol buffers from an event file. | [
"An",
"iterator",
"for",
"reading",
"Event",
"protocol",
"buffers",
"from",
"an",
"event",
"file",
"."
] | def summary_iterator(path):
"""An iterator for reading `Event` protocol buffers from an event file.
You can use this function to read events written to an event file. It returns
a Python iterator that yields `Event` protocol buffers.
Example: Print the contents of an events file.
```python
for e in tf.train.summary_iterator(path to events file):
print(e)
```
Example: Print selected summary values.
```python
# This example supposes that the events file contains summaries with a
# summary value tag 'loss'. These could have been added by calling
# `add_summary()`, passing the output of a scalar summary op created with
# with: `tf.scalar_summary(['loss'], loss_tensor)`.
for e in tf.train.summary_iterator(path to events file):
for v in e.summary.value:
if v.tag == 'loss':
print(v.simple_value)
```
See the protocol buffer definitions of
[Event](https://www.tensorflow.org/code/tensorflow/core/util/event.proto)
and
[Summary](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto)
for more information about their attributes.
Args:
path: The path to an event file created by a `SummaryWriter`.
Yields:
`Event` protocol buffers.
"""
for r in tf_record.tf_record_iterator(path):
yield event_pb2.Event.FromString(r) | [
"def",
"summary_iterator",
"(",
"path",
")",
":",
"for",
"r",
"in",
"tf_record",
".",
"tf_record_iterator",
"(",
"path",
")",
":",
"yield",
"event_pb2",
".",
"Event",
".",
"FromString",
"(",
"r",
")"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/training/summary_io.py#L320-L359 | ||
deepmind/spiral | 5ee538cedf1d9cc827ced93fe86a44f8b8742ac0 | spiral/environments/utils.py | python | rgb_to_hsv | (red, green, blue) | return hue, saturation, value | Converts RGB to HSV. | Converts RGB to HSV. | [
"Converts",
"RGB",
"to",
"HSV",
"."
] | def rgb_to_hsv(red, green, blue):
"""Converts RGB to HSV."""
hue = 0.0
red = np.clip(red, 0.0, 1.0)
green = np.clip(green, 0.0, 1.0)
blue = np.clip(blue, 0.0, 1.0)
max_value = np.max([red, green, blue])
min_value = np.min([red, green, blue])
value = max_value
delta = max_value - min_value
if delta > 0.0001:
saturation = delta / max_value
if red == max_value:
hue = (green - blue) / delta
if hue < 0.0:
hue += 6.0
elif green == max_value:
hue = 2.0 + (blue - red) / delta
elif blue == max_value:
hue = 4.0 + (red - green) / delta
hue /= 6.0
else:
saturation = 0.0
hue = 0.0
return hue, saturation, value | [
"def",
"rgb_to_hsv",
"(",
"red",
",",
"green",
",",
"blue",
")",
":",
"hue",
"=",
"0.0",
"red",
"=",
"np",
".",
"clip",
"(",
"red",
",",
"0.0",
",",
"1.0",
")",
"green",
"=",
"np",
".",
"clip",
"(",
"green",
",",
"0.0",
",",
"1.0",
")",
"blue",
"=",
"np",
".",
"clip",
"(",
"blue",
",",
"0.0",
",",
"1.0",
")",
"max_value",
"=",
"np",
".",
"max",
"(",
"[",
"red",
",",
"green",
",",
"blue",
"]",
")",
"min_value",
"=",
"np",
".",
"min",
"(",
"[",
"red",
",",
"green",
",",
"blue",
"]",
")",
"value",
"=",
"max_value",
"delta",
"=",
"max_value",
"-",
"min_value",
"if",
"delta",
">",
"0.0001",
":",
"saturation",
"=",
"delta",
"/",
"max_value",
"if",
"red",
"==",
"max_value",
":",
"hue",
"=",
"(",
"green",
"-",
"blue",
")",
"/",
"delta",
"if",
"hue",
"<",
"0.0",
":",
"hue",
"+=",
"6.0",
"elif",
"green",
"==",
"max_value",
":",
"hue",
"=",
"2.0",
"+",
"(",
"blue",
"-",
"red",
")",
"/",
"delta",
"elif",
"blue",
"==",
"max_value",
":",
"hue",
"=",
"4.0",
"+",
"(",
"red",
"-",
"green",
")",
"/",
"delta",
"hue",
"/=",
"6.0",
"else",
":",
"saturation",
"=",
"0.0",
"hue",
"=",
"0.0",
"return",
"hue",
",",
"saturation",
",",
"value"
] | https://github.com/deepmind/spiral/blob/5ee538cedf1d9cc827ced93fe86a44f8b8742ac0/spiral/environments/utils.py#L31-L62 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/richtext.py | python | RichTextHTMLHandler.ClearTemporaryImageLocations | (*args, **kwargs) | return _richtext.RichTextHTMLHandler_ClearTemporaryImageLocations(*args, **kwargs) | ClearTemporaryImageLocations(self)
Clear the image locations generated by the last operation | ClearTemporaryImageLocations(self) | [
"ClearTemporaryImageLocations",
"(",
"self",
")"
] | def ClearTemporaryImageLocations(*args, **kwargs):
"""
ClearTemporaryImageLocations(self)
Clear the image locations generated by the last operation
"""
return _richtext.RichTextHTMLHandler_ClearTemporaryImageLocations(*args, **kwargs) | [
"def",
"ClearTemporaryImageLocations",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextHTMLHandler_ClearTemporaryImageLocations",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L4352-L4358 | |
microsoft/LightGBM | 904b2d5158703c4900b68008617951dd2f9ff21b | python-package/lightgbm/basic.py | python | _InnerPredictor.__get_num_preds | (self, start_iteration, num_iteration, nrow, predict_type) | return n_preds.value | Get size of prediction result. | Get size of prediction result. | [
"Get",
"size",
"of",
"prediction",
"result",
"."
] | def __get_num_preds(self, start_iteration, num_iteration, nrow, predict_type):
"""Get size of prediction result."""
if nrow > MAX_INT32:
raise LightGBMError('LightGBM cannot perform prediction for data '
f'with number of rows greater than MAX_INT32 ({MAX_INT32}).\n'
'You can split your data into chunks '
'and then concatenate predictions for them')
n_preds = ctypes.c_int64(0)
_safe_call(_LIB.LGBM_BoosterCalcNumPredict(
self.handle,
ctypes.c_int(nrow),
ctypes.c_int(predict_type),
ctypes.c_int(start_iteration),
ctypes.c_int(num_iteration),
ctypes.byref(n_preds)))
return n_preds.value | [
"def",
"__get_num_preds",
"(",
"self",
",",
"start_iteration",
",",
"num_iteration",
",",
"nrow",
",",
"predict_type",
")",
":",
"if",
"nrow",
">",
"MAX_INT32",
":",
"raise",
"LightGBMError",
"(",
"'LightGBM cannot perform prediction for data '",
"f'with number of rows greater than MAX_INT32 ({MAX_INT32}).\\n'",
"'You can split your data into chunks '",
"'and then concatenate predictions for them'",
")",
"n_preds",
"=",
"ctypes",
".",
"c_int64",
"(",
"0",
")",
"_safe_call",
"(",
"_LIB",
".",
"LGBM_BoosterCalcNumPredict",
"(",
"self",
".",
"handle",
",",
"ctypes",
".",
"c_int",
"(",
"nrow",
")",
",",
"ctypes",
".",
"c_int",
"(",
"predict_type",
")",
",",
"ctypes",
".",
"c_int",
"(",
"start_iteration",
")",
",",
"ctypes",
".",
"c_int",
"(",
"num_iteration",
")",
",",
"ctypes",
".",
"byref",
"(",
"n_preds",
")",
")",
")",
"return",
"n_preds",
".",
"value"
] | https://github.com/microsoft/LightGBM/blob/904b2d5158703c4900b68008617951dd2f9ff21b/python-package/lightgbm/basic.py#L826-L841 | |
turi-code/SFrame | 796b9bdfb2fa1b881d82080754643c7e68629cd2 | oss_src/unity/python/sframe/data_structures/sframe.py | python | SFrame.to_odbc | (self, db, table_name, append_if_exists=False, verbose=True) | Convert an SFrame to a table in a database.
By default, searches for a table in the database with the given name.
If found, this will attempt to append all the rows of the SFrame to the
end of the table. If not, this will create a new table with the given
name. This behavior is toggled with the `append_if_exists` flag.
When creating a new table, GraphLab Create uses a heuristic approach to
pick a corresponding type for each column in the SFrame using the type
information supplied by the database's ODBC driver. Your driver must
support giving this type information for GraphLab Create to support
writing to the database.
To allow more expressive and accurate naming, `to_odbc` puts quotes
around each identifier (table names and column names). Depending on
your database, you may need to refer to the created table with quote
characters around the name. This character is not the same for all
databases, but '"' is the most common.
Parameters
----------
db : `graphlab.extensions._odbc_connection.unity_odbc_connection`
An ODBC connection object. This can only be obtained by calling
`graphlab.connect_odbc`. Check that documentation for how to create
this object.
table_name : str
The name of the table you would like to create/append to.
append_if_exists : bool
If True, this will attempt to append to the table named `table_name`
if it is found to exist in the database.
verbose : bool
Print progress updates on the insertion process.
Notes
-----
This functionality is only supported when using GraphLab Create
entirely on your local machine. Therefore, GraphLab Create's EC2 and
Hadoop execution modes will not be able to use ODBC. Note that this
"local machine" rule does not apply to the machine your database is
running on, which can (and often will) be running on a separate
machine.
Examples
--------
>>> db = graphlab.connect_odbc("DSN=my_awesome_dsn;UID=user;PWD=mypassword")
>>> sf = graphlab.SFrame({'a':[1,2,3],'b':['hi','pika','bye']})
>>> sf.to_odbc(db, 'a_cool_table') | Convert an SFrame to a table in a database. | [
"Convert",
"an",
"SFrame",
"to",
"a",
"table",
"in",
"a",
"database",
"."
] | def to_odbc(self, db, table_name, append_if_exists=False, verbose=True):
"""
Convert an SFrame to a table in a database.
By default, searches for a table in the database with the given name.
If found, this will attempt to append all the rows of the SFrame to the
end of the table. If not, this will create a new table with the given
name. This behavior is toggled with the `append_if_exists` flag.
When creating a new table, GraphLab Create uses a heuristic approach to
pick a corresponding type for each column in the SFrame using the type
information supplied by the database's ODBC driver. Your driver must
support giving this type information for GraphLab Create to support
writing to the database.
To allow more expressive and accurate naming, `to_odbc` puts quotes
around each identifier (table names and column names). Depending on
your database, you may need to refer to the created table with quote
characters around the name. This character is not the same for all
databases, but '"' is the most common.
Parameters
----------
db : `graphlab.extensions._odbc_connection.unity_odbc_connection`
An ODBC connection object. This can only be obtained by calling
`graphlab.connect_odbc`. Check that documentation for how to create
this object.
table_name : str
The name of the table you would like to create/append to.
append_if_exists : bool
If True, this will attempt to append to the table named `table_name`
if it is found to exist in the database.
verbose : bool
Print progress updates on the insertion process.
Notes
-----
This functionality is only supported when using GraphLab Create
entirely on your local machine. Therefore, GraphLab Create's EC2 and
Hadoop execution modes will not be able to use ODBC. Note that this
"local machine" rule does not apply to the machine your database is
running on, which can (and often will) be running on a separate
machine.
Examples
--------
>>> db = graphlab.connect_odbc("DSN=my_awesome_dsn;UID=user;PWD=mypassword")
>>> sf = graphlab.SFrame({'a':[1,2,3],'b':['hi','pika','bye']})
>>> sf.to_odbc(db, 'a_cool_table')
"""
_mt._get_metric_tracker().track('sframe.to_odbc')
if (not verbose):
glconnect.get_server().set_log_progress(False)
db._insert_sframe(self, table_name, append_if_exists)
if (not verbose):
glconnect.get_server().set_log_progress(True) | [
"def",
"to_odbc",
"(",
"self",
",",
"db",
",",
"table_name",
",",
"append_if_exists",
"=",
"False",
",",
"verbose",
"=",
"True",
")",
":",
"_mt",
".",
"_get_metric_tracker",
"(",
")",
".",
"track",
"(",
"'sframe.to_odbc'",
")",
"if",
"(",
"not",
"verbose",
")",
":",
"glconnect",
".",
"get_server",
"(",
")",
".",
"set_log_progress",
"(",
"False",
")",
"db",
".",
"_insert_sframe",
"(",
"self",
",",
"table_name",
",",
"append_if_exists",
")",
"if",
"(",
"not",
"verbose",
")",
":",
"glconnect",
".",
"get_server",
"(",
")",
".",
"set_log_progress",
"(",
"True",
")"
] | https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/data_structures/sframe.py#L2073-L2135 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/ipython/py2/IPython/utils/frame.py | python | extract_module_locals | (depth=0) | return (module, f.f_locals) | Returns (module, locals) of the function `depth` frames away from the caller | Returns (module, locals) of the function `depth` frames away from the caller | [
"Returns",
"(",
"module",
"locals",
")",
"of",
"the",
"function",
"depth",
"frames",
"away",
"from",
"the",
"caller"
] | def extract_module_locals(depth=0):
"""Returns (module, locals) of the function `depth` frames away from the caller"""
f = sys._getframe(depth + 1)
global_ns = f.f_globals
module = sys.modules[global_ns['__name__']]
return (module, f.f_locals) | [
"def",
"extract_module_locals",
"(",
"depth",
"=",
"0",
")",
":",
"f",
"=",
"sys",
".",
"_getframe",
"(",
"depth",
"+",
"1",
")",
"global_ns",
"=",
"f",
".",
"f_globals",
"module",
"=",
"sys",
".",
"modules",
"[",
"global_ns",
"[",
"'__name__'",
"]",
"]",
"return",
"(",
"module",
",",
"f",
".",
"f_locals",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py2/IPython/utils/frame.py#L92-L97 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_controls.py | python | TreeCtrl.SetItemPyData | (*args, **kwargs) | return _controls_.TreeCtrl_SetItemPyData(*args, **kwargs) | SetItemPyData(self, TreeItemId item, PyObject obj) | SetItemPyData(self, TreeItemId item, PyObject obj) | [
"SetItemPyData",
"(",
"self",
"TreeItemId",
"item",
"PyObject",
"obj",
")"
] | def SetItemPyData(*args, **kwargs):
"""SetItemPyData(self, TreeItemId item, PyObject obj)"""
return _controls_.TreeCtrl_SetItemPyData(*args, **kwargs) | [
"def",
"SetItemPyData",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"TreeCtrl_SetItemPyData",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_controls.py#L5298-L5300 | |
apiaryio/snowcrash | b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3 | tools/gyp/pylib/gyp/generator/msvs.py | python | _GenerateMSVSProject | (project, options, version, generator_flags) | return missing_sources | Generates a .vcproj file. It may create .rules and .user files too.
Arguments:
project: The project object we will generate the file for.
options: Global options passed to the generator.
version: The VisualStudioVersion object.
generator_flags: dict of generator-specific flags. | Generates a .vcproj file. It may create .rules and .user files too. | [
"Generates",
"a",
".",
"vcproj",
"file",
".",
"It",
"may",
"create",
".",
"rules",
"and",
".",
"user",
"files",
"too",
"."
] | def _GenerateMSVSProject(project, options, version, generator_flags):
"""Generates a .vcproj file. It may create .rules and .user files too.
Arguments:
project: The project object we will generate the file for.
options: Global options passed to the generator.
version: The VisualStudioVersion object.
generator_flags: dict of generator-specific flags.
"""
spec = project.spec
gyp.common.EnsureDirExists(project.path)
platforms = _GetUniquePlatforms(spec)
p = MSVSProject.Writer(project.path, version, spec['target_name'],
project.guid, platforms)
# Get directory project file is in.
project_dir = os.path.split(project.path)[0]
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
config_type = _GetMSVSConfigurationType(spec, project.build_file)
for config_name, config in spec['configurations'].iteritems():
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
# MSVC08 and prior version cannot handle duplicate basenames in the same
# target.
# TODO: Take excluded sources into consideration if possible.
_ValidateSourcesForMSVSProject(spec, version)
# Prepare list of sources and excluded sources.
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
gyp_file)
# Add rules.
actions_to_add = {}
_GenerateRulesForMSVS(p, project_dir, options, spec,
sources, excluded_sources,
actions_to_add)
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
sources, excluded_sources,
list_excluded, version))
# Add in files.
missing_sources = _VerifySourcesExist(sources, project_dir)
p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec)
_HandlePreCompiledHeaders(p, sources, spec)
_AddActions(actions_to_add, spec, relative_path_of_gyp_file)
_AddCopies(actions_to_add, spec)
_WriteMSVSUserFile(project.path, version, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(
excluded_sources, actions_to_add)
_ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
list_excluded)
_AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
# Write it out.
p.WriteIfChanged()
return missing_sources | [
"def",
"_GenerateMSVSProject",
"(",
"project",
",",
"options",
",",
"version",
",",
"generator_flags",
")",
":",
"spec",
"=",
"project",
".",
"spec",
"gyp",
".",
"common",
".",
"EnsureDirExists",
"(",
"project",
".",
"path",
")",
"platforms",
"=",
"_GetUniquePlatforms",
"(",
"spec",
")",
"p",
"=",
"MSVSProject",
".",
"Writer",
"(",
"project",
".",
"path",
",",
"version",
",",
"spec",
"[",
"'target_name'",
"]",
",",
"project",
".",
"guid",
",",
"platforms",
")",
"# Get directory project file is in.",
"project_dir",
"=",
"os",
".",
"path",
".",
"split",
"(",
"project",
".",
"path",
")",
"[",
"0",
"]",
"gyp_path",
"=",
"_NormalizedSource",
"(",
"project",
".",
"build_file",
")",
"relative_path_of_gyp_file",
"=",
"gyp",
".",
"common",
".",
"RelativePath",
"(",
"gyp_path",
",",
"project_dir",
")",
"config_type",
"=",
"_GetMSVSConfigurationType",
"(",
"spec",
",",
"project",
".",
"build_file",
")",
"for",
"config_name",
",",
"config",
"in",
"spec",
"[",
"'configurations'",
"]",
".",
"iteritems",
"(",
")",
":",
"_AddConfigurationToMSVSProject",
"(",
"p",
",",
"spec",
",",
"config_type",
",",
"config_name",
",",
"config",
")",
"# MSVC08 and prior version cannot handle duplicate basenames in the same",
"# target.",
"# TODO: Take excluded sources into consideration if possible.",
"_ValidateSourcesForMSVSProject",
"(",
"spec",
",",
"version",
")",
"# Prepare list of sources and excluded sources.",
"gyp_file",
"=",
"os",
".",
"path",
".",
"split",
"(",
"project",
".",
"build_file",
")",
"[",
"1",
"]",
"sources",
",",
"excluded_sources",
"=",
"_PrepareListOfSources",
"(",
"spec",
",",
"generator_flags",
",",
"gyp_file",
")",
"# Add rules.",
"actions_to_add",
"=",
"{",
"}",
"_GenerateRulesForMSVS",
"(",
"p",
",",
"project_dir",
",",
"options",
",",
"spec",
",",
"sources",
",",
"excluded_sources",
",",
"actions_to_add",
")",
"list_excluded",
"=",
"generator_flags",
".",
"get",
"(",
"'msvs_list_excluded_files'",
",",
"True",
")",
"sources",
",",
"excluded_sources",
",",
"excluded_idl",
"=",
"(",
"_AdjustSourcesAndConvertToFilterHierarchy",
"(",
"spec",
",",
"options",
",",
"project_dir",
",",
"sources",
",",
"excluded_sources",
",",
"list_excluded",
",",
"version",
")",
")",
"# Add in files.",
"missing_sources",
"=",
"_VerifySourcesExist",
"(",
"sources",
",",
"project_dir",
")",
"p",
".",
"AddFiles",
"(",
"sources",
")",
"_AddToolFilesToMSVS",
"(",
"p",
",",
"spec",
")",
"_HandlePreCompiledHeaders",
"(",
"p",
",",
"sources",
",",
"spec",
")",
"_AddActions",
"(",
"actions_to_add",
",",
"spec",
",",
"relative_path_of_gyp_file",
")",
"_AddCopies",
"(",
"actions_to_add",
",",
"spec",
")",
"_WriteMSVSUserFile",
"(",
"project",
".",
"path",
",",
"version",
",",
"spec",
")",
"# NOTE: this stanza must appear after all actions have been decided.",
"# Don't excluded sources with actions attached, or they won't run.",
"excluded_sources",
"=",
"_FilterActionsFromExcluded",
"(",
"excluded_sources",
",",
"actions_to_add",
")",
"_ExcludeFilesFromBeingBuilt",
"(",
"p",
",",
"spec",
",",
"excluded_sources",
",",
"excluded_idl",
",",
"list_excluded",
")",
"_AddAccumulatedActionsToMSVS",
"(",
"p",
",",
"spec",
",",
"actions_to_add",
")",
"# Write it out.",
"p",
".",
"WriteIfChanged",
"(",
")",
"return",
"missing_sources"
] | https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/generator/msvs.py#L967-L1034 | |
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/perf/profile_creators/cookie_profile_extender.py | python | CookieProfileExtender.FetchWebPageReplayArchives | (self) | Superclass override. | Superclass override. | [
"Superclass",
"override",
"."
] | def FetchWebPageReplayArchives(self):
"""Superclass override."""
self._page_set.wpr_archive_info.DownloadArchivesIfNeeded() | [
"def",
"FetchWebPageReplayArchives",
"(",
"self",
")",
":",
"self",
".",
"_page_set",
".",
"wpr_archive_info",
".",
"DownloadArchivesIfNeeded",
"(",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/perf/profile_creators/cookie_profile_extender.py#L63-L65 | ||
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/python/summary/summary_iterator.py | python | SummaryWriter.add_event | (self, event) | Adds an event to the event file.
Args:
event: An `Event` protocol buffer. | Adds an event to the event file. | [
"Adds",
"an",
"event",
"to",
"the",
"event",
"file",
"."
] | def add_event(self, event):
"""Adds an event to the event file.
Args:
event: An `Event` protocol buffer.
"""
if not self._closed:
self._event_queue.put(event) | [
"def",
"add_event",
"(",
"self",
",",
"event",
")",
":",
"if",
"not",
"self",
".",
"_closed",
":",
"self",
".",
"_event_queue",
".",
"put",
"(",
"event",
")"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/summary/summary_iterator.py#L175-L182 | ||
OSGeo/PROJ | ca1ec7b147087f18bec0b3cf0b0548f3f034c675 | docs/plot/plot.py | python | resample_polygon | (polygon) | return Polygon(ext, rings) | Use interp_coords() to resample (multi)polygons. | Use interp_coords() to resample (multi)polygons. | [
"Use",
"interp_coords",
"()",
"to",
"resample",
"(",
"multi",
")",
"polygons",
"."
] | def resample_polygon(polygon):
'''
Use interp_coords() to resample (multi)polygons.
'''
try:
xy = polygon.exterior.coords.xy
except AttributeError: #no xy's
return polygon
ext = interp_coords(xy, 2)
# interiors
rings = []
for int_ring in polygon.interiors:
rings.append(interp_coords(int_ring.coords.xy, 2))
return Polygon(ext, rings) | [
"def",
"resample_polygon",
"(",
"polygon",
")",
":",
"try",
":",
"xy",
"=",
"polygon",
".",
"exterior",
".",
"coords",
".",
"xy",
"except",
"AttributeError",
":",
"#no xy's",
"return",
"polygon",
"ext",
"=",
"interp_coords",
"(",
"xy",
",",
"2",
")",
"# interiors",
"rings",
"=",
"[",
"]",
"for",
"int_ring",
"in",
"polygon",
".",
"interiors",
":",
"rings",
".",
"append",
"(",
"interp_coords",
"(",
"int_ring",
".",
"coords",
".",
"xy",
",",
"2",
")",
")",
"return",
"Polygon",
"(",
"ext",
",",
"rings",
")"
] | https://github.com/OSGeo/PROJ/blob/ca1ec7b147087f18bec0b3cf0b0548f3f034c675/docs/plot/plot.py#L226-L239 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_misc.py | python | ArtProvider.GetNativeSizeHint | (*args, **kwargs) | return _misc_.ArtProvider_GetNativeSizeHint(*args, **kwargs) | GetNativeSizeHint(wxArtClient client) -> Size
Gets native size for given 'client' or wxDefaultSize if it doesn't
have native equivalent. | GetNativeSizeHint(wxArtClient client) -> Size | [
"GetNativeSizeHint",
"(",
"wxArtClient",
"client",
")",
"-",
">",
"Size"
] | def GetNativeSizeHint(*args, **kwargs):
"""
GetNativeSizeHint(wxArtClient client) -> Size
Gets native size for given 'client' or wxDefaultSize if it doesn't
have native equivalent.
"""
return _misc_.ArtProvider_GetNativeSizeHint(*args, **kwargs) | [
"def",
"GetNativeSizeHint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"ArtProvider_GetNativeSizeHint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L2865-L2872 | |
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/python2_version/klampt/plan/motionplanning.py | python | PlannerInterface.getMilestone | (self, arg2) | return _motionplanning.PlannerInterface_getMilestone(self, arg2) | Args:
arg2 (int)
Returns:
(:obj:`object`): | Args:
arg2 (int)
Returns:
(:obj:`object`): | [
"Args",
":",
"arg2",
"(",
"int",
")",
"Returns",
":",
"(",
":",
"obj",
":",
"object",
")",
":"
] | def getMilestone(self, arg2):
"""
Args:
arg2 (int)
Returns:
(:obj:`object`):
"""
return _motionplanning.PlannerInterface_getMilestone(self, arg2) | [
"def",
"getMilestone",
"(",
"self",
",",
"arg2",
")",
":",
"return",
"_motionplanning",
".",
"PlannerInterface_getMilestone",
"(",
"self",
",",
"arg2",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/plan/motionplanning.py#L860-L867 | |
swift/swift | 12d031cf8177fdec0137f9aa7e2912fa23c4416b | 3rdParty/SCons/scons-3.0.1/engine/SCons/Script/SConscript.py | python | SConsEnvironment.EnsurePythonVersion | (self, major, minor) | Exit abnormally if the Python version is not late enough. | Exit abnormally if the Python version is not late enough. | [
"Exit",
"abnormally",
"if",
"the",
"Python",
"version",
"is",
"not",
"late",
"enough",
"."
] | def EnsurePythonVersion(self, major, minor):
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v))
sys.exit(2) | [
"def",
"EnsurePythonVersion",
"(",
"self",
",",
"major",
",",
"minor",
")",
":",
"if",
"sys",
".",
"version_info",
"<",
"(",
"major",
",",
"minor",
")",
":",
"v",
"=",
"sys",
".",
"version",
".",
"split",
"(",
")",
"[",
"0",
"]",
"print",
"(",
"\"Python %d.%d or greater required, but you have Python %s\"",
"%",
"(",
"major",
",",
"minor",
",",
"v",
")",
")",
"sys",
".",
"exit",
"(",
"2",
")"
] | https://github.com/swift/swift/blob/12d031cf8177fdec0137f9aa7e2912fa23c4416b/3rdParty/SCons/scons-3.0.1/engine/SCons/Script/SConscript.py#L479-L484 | ||
olliw42/storm32bgc | 99d62a6130ae2950514022f50eb669c45a8cc1ba | old/betacopter/old/betacopter36dev-v005/modules/uavcan/libuavcan/dsdl_compiler/libuavcan_dsdl_compiler/pyratemp.py | python | EvalPseudoSandbox.f_escape | (self, s, format="HTML") | return escape(unicode(s), format) | ``escape()`` for the sandboxed code. | ``escape()`` for the sandboxed code. | [
"escape",
"()",
"for",
"the",
"sandboxed",
"code",
"."
] | def f_escape(self, s, format="HTML"):
"""``escape()`` for the sandboxed code.
"""
if isinstance(format, (str, unicode)):
format = ESCAPE_SUPPORTED[format.upper()]
return escape(unicode(s), format) | [
"def",
"f_escape",
"(",
"self",
",",
"s",
",",
"format",
"=",
"\"HTML\"",
")",
":",
"if",
"isinstance",
"(",
"format",
",",
"(",
"str",
",",
"unicode",
")",
")",
":",
"format",
"=",
"ESCAPE_SUPPORTED",
"[",
"format",
".",
"upper",
"(",
")",
"]",
"return",
"escape",
"(",
"unicode",
"(",
"s",
")",
",",
"format",
")"
] | https://github.com/olliw42/storm32bgc/blob/99d62a6130ae2950514022f50eb669c45a8cc1ba/old/betacopter/old/betacopter36dev-v005/modules/uavcan/libuavcan/dsdl_compiler/libuavcan_dsdl_compiler/pyratemp.py#L979-L984 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/logging/__init__.py | python | Formatter.__init__ | (self, fmt=None, datefmt=None) | Initialize the formatter with specified format strings.
Initialize the formatter either with the specified format string, or a
default as described above. Allow for specialized date formatting with
the optional datefmt argument (if omitted, you get the ISO8601 format). | Initialize the formatter with specified format strings. | [
"Initialize",
"the",
"formatter",
"with",
"specified",
"format",
"strings",
"."
] | def __init__(self, fmt=None, datefmt=None):
"""
Initialize the formatter with specified format strings.
Initialize the formatter either with the specified format string, or a
default as described above. Allow for specialized date formatting with
the optional datefmt argument (if omitted, you get the ISO8601 format).
"""
if fmt:
self._fmt = fmt
else:
self._fmt = "%(message)s"
self.datefmt = datefmt | [
"def",
"__init__",
"(",
"self",
",",
"fmt",
"=",
"None",
",",
"datefmt",
"=",
"None",
")",
":",
"if",
"fmt",
":",
"self",
".",
"_fmt",
"=",
"fmt",
"else",
":",
"self",
".",
"_fmt",
"=",
"\"%(message)s\"",
"self",
".",
"datefmt",
"=",
"datefmt"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/logging/__init__.py#L390-L402 | ||
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/autoencoder.py | python | TensorFlowDNNAutoencoder.__init__ | (self, hidden_units, n_classes=0, batch_size=32,
steps=200, optimizer="Adagrad", learning_rate=0.1,
clip_gradients=5.0, activation=nn.relu, add_noise=None,
continue_training=False, config=None,
verbose=1, dropout=None) | Initializes a TensorFlowDNNAutoencoder instance.
Args:
hidden_units: List of hidden units per layer.
batch_size: Mini batch size.
activation: activation function used to map inner latent layer onto
reconstruction layer.
add_noise: a function that adds noise to tensor_in,
e.g. def add_noise(x):
return(x + np.random.normal(0, 0.1, (len(x), len(x[0]))))
steps: Number of steps to run over data.
optimizer: Optimizer name (or class), for example "SGD", "Adam",
"Adagrad".
learning_rate: If this is constant float value, no decay function is used.
Instead, a customized decay function can be passed that accepts
global_step as parameter and returns a Tensor.
e.g. exponential decay function:
def exp_decay(global_step):
return tf.train.exponential_decay(
learning_rate=0.1, global_step,
decay_steps=2, decay_rate=0.001)
continue_training: when continue_training is True, once initialized
model will be continuely trained on every call of fit.
config: RunConfig object that controls the configurations of the session,
e.g. num_cores, gpu_memory_fraction, etc.
verbose: Controls the verbosity, possible values:
0: the algorithm and debug information is muted.
1: trainer prints the progress.
2: log device placement is printed.
dropout: When not None, the probability we will drop out a given
coordinate. | Initializes a TensorFlowDNNAutoencoder instance. | [
"Initializes",
"a",
"TensorFlowDNNAutoencoder",
"instance",
"."
] | def __init__(self, hidden_units, n_classes=0, batch_size=32,
steps=200, optimizer="Adagrad", learning_rate=0.1,
clip_gradients=5.0, activation=nn.relu, add_noise=None,
continue_training=False, config=None,
verbose=1, dropout=None):
"""Initializes a TensorFlowDNNAutoencoder instance.
Args:
hidden_units: List of hidden units per layer.
batch_size: Mini batch size.
activation: activation function used to map inner latent layer onto
reconstruction layer.
add_noise: a function that adds noise to tensor_in,
e.g. def add_noise(x):
return(x + np.random.normal(0, 0.1, (len(x), len(x[0]))))
steps: Number of steps to run over data.
optimizer: Optimizer name (or class), for example "SGD", "Adam",
"Adagrad".
learning_rate: If this is constant float value, no decay function is used.
Instead, a customized decay function can be passed that accepts
global_step as parameter and returns a Tensor.
e.g. exponential decay function:
def exp_decay(global_step):
return tf.train.exponential_decay(
learning_rate=0.1, global_step,
decay_steps=2, decay_rate=0.001)
continue_training: when continue_training is True, once initialized
model will be continuely trained on every call of fit.
config: RunConfig object that controls the configurations of the session,
e.g. num_cores, gpu_memory_fraction, etc.
verbose: Controls the verbosity, possible values:
0: the algorithm and debug information is muted.
1: trainer prints the progress.
2: log device placement is printed.
dropout: When not None, the probability we will drop out a given
coordinate.
"""
self.hidden_units = hidden_units
self.dropout = dropout
self.activation = activation
self.add_noise = add_noise
super(TensorFlowDNNAutoencoder, self).__init__(
model_fn=self._model_fn,
n_classes=n_classes,
batch_size=batch_size, steps=steps, optimizer=optimizer,
learning_rate=learning_rate, clip_gradients=clip_gradients,
continue_training=continue_training,
config=config, verbose=verbose) | [
"def",
"__init__",
"(",
"self",
",",
"hidden_units",
",",
"n_classes",
"=",
"0",
",",
"batch_size",
"=",
"32",
",",
"steps",
"=",
"200",
",",
"optimizer",
"=",
"\"Adagrad\"",
",",
"learning_rate",
"=",
"0.1",
",",
"clip_gradients",
"=",
"5.0",
",",
"activation",
"=",
"nn",
".",
"relu",
",",
"add_noise",
"=",
"None",
",",
"continue_training",
"=",
"False",
",",
"config",
"=",
"None",
",",
"verbose",
"=",
"1",
",",
"dropout",
"=",
"None",
")",
":",
"self",
".",
"hidden_units",
"=",
"hidden_units",
"self",
".",
"dropout",
"=",
"dropout",
"self",
".",
"activation",
"=",
"activation",
"self",
".",
"add_noise",
"=",
"add_noise",
"super",
"(",
"TensorFlowDNNAutoencoder",
",",
"self",
")",
".",
"__init__",
"(",
"model_fn",
"=",
"self",
".",
"_model_fn",
",",
"n_classes",
"=",
"n_classes",
",",
"batch_size",
"=",
"batch_size",
",",
"steps",
"=",
"steps",
",",
"optimizer",
"=",
"optimizer",
",",
"learning_rate",
"=",
"learning_rate",
",",
"clip_gradients",
"=",
"clip_gradients",
",",
"continue_training",
"=",
"continue_training",
",",
"config",
"=",
"config",
",",
"verbose",
"=",
"verbose",
")"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/autoencoder.py#L32-L79 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_core.py | python | Window.SetCaret | (*args, **kwargs) | return _core_.Window_SetCaret(*args, **kwargs) | SetCaret(self, Caret caret)
Sets the caret associated with the window. | SetCaret(self, Caret caret) | [
"SetCaret",
"(",
"self",
"Caret",
"caret",
")"
] | def SetCaret(*args, **kwargs):
"""
SetCaret(self, Caret caret)
Sets the caret associated with the window.
"""
return _core_.Window_SetCaret(*args, **kwargs) | [
"def",
"SetCaret",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_SetCaret",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L10998-L11004 | |
moderngl/moderngl | 32fe79927e02b0fa893b3603d677bdae39771e14 | moderngl/renderbuffer.py | python | Renderbuffer.components | (self) | return self._components | int: The components of the renderbuffer. | int: The components of the renderbuffer. | [
"int",
":",
"The",
"components",
"of",
"the",
"renderbuffer",
"."
] | def components(self) -> int:
'''
int: The components of the renderbuffer.
'''
return self._components | [
"def",
"components",
"(",
"self",
")",
"->",
"int",
":",
"return",
"self",
".",
"_components"
] | https://github.com/moderngl/moderngl/blob/32fe79927e02b0fa893b3603d677bdae39771e14/moderngl/renderbuffer.py#L87-L92 | |
epam/Indigo | 30e40b4b1eb9bae0207435a26cfcb81ddcc42be1 | api/python/indigo/__init__.py | python | IndigoObject.fingerprint | (self, type) | return self.dispatcher.IndigoObject(self.dispatcher, newobj, self) | Molecule or reaction method returns fingerprint representation
Args:
type (str): fingerprint type. One of the following: "sim", "sub", "sub-res", "sub-tau", "full"
Returns:
IndigoObject: fingerprint object | Molecule or reaction method returns fingerprint representation | [
"Molecule",
"or",
"reaction",
"method",
"returns",
"fingerprint",
"representation"
] | def fingerprint(self, type):
"""Molecule or reaction method returns fingerprint representation
Args:
type (str): fingerprint type. One of the following: "sim", "sub", "sub-res", "sub-tau", "full"
Returns:
IndigoObject: fingerprint object
"""
self.dispatcher._setSessionId()
newobj = self.dispatcher._checkResult(
Indigo._lib.indigoFingerprint(
self.id, type.encode(ENCODE_ENCODING)
)
)
if newobj == 0:
return None
return self.dispatcher.IndigoObject(self.dispatcher, newobj, self) | [
"def",
"fingerprint",
"(",
"self",
",",
"type",
")",
":",
"self",
".",
"dispatcher",
".",
"_setSessionId",
"(",
")",
"newobj",
"=",
"self",
".",
"dispatcher",
".",
"_checkResult",
"(",
"Indigo",
".",
"_lib",
".",
"indigoFingerprint",
"(",
"self",
".",
"id",
",",
"type",
".",
"encode",
"(",
"ENCODE_ENCODING",
")",
")",
")",
"if",
"newobj",
"==",
"0",
":",
"return",
"None",
"return",
"self",
".",
"dispatcher",
".",
"IndigoObject",
"(",
"self",
".",
"dispatcher",
",",
"newobj",
",",
"self",
")"
] | https://github.com/epam/Indigo/blob/30e40b4b1eb9bae0207435a26cfcb81ddcc42be1/api/python/indigo/__init__.py#L3593-L3610 | |
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/check_ops.py | python | assert_type | (tensor, tf_type, message=None, name=None) | Statically asserts that the given `Tensor` is of the specified type.
Args:
tensor: A `Tensor`.
tf_type: A tensorflow type (`dtypes.float32`, `tf.int64`, `dtypes.bool`,
etc).
message: A string to prefix to the default message.
name: A name to give this `Op`. Defaults to "assert_type"
Raises:
TypeError: If the tensors data type doesn't match `tf_type`.
Returns:
A `no_op` that does nothing. Type can be determined statically. | Statically asserts that the given `Tensor` is of the specified type. | [
"Statically",
"asserts",
"that",
"the",
"given",
"Tensor",
"is",
"of",
"the",
"specified",
"type",
"."
] | def assert_type(tensor, tf_type, message=None, name=None):
"""Statically asserts that the given `Tensor` is of the specified type.
Args:
tensor: A `Tensor`.
tf_type: A tensorflow type (`dtypes.float32`, `tf.int64`, `dtypes.bool`,
etc).
message: A string to prefix to the default message.
name: A name to give this `Op`. Defaults to "assert_type"
Raises:
TypeError: If the tensors data type doesn't match `tf_type`.
Returns:
A `no_op` that does nothing. Type can be determined statically.
"""
message = message or ''
with ops.name_scope(name, 'assert_type', [tensor]):
tensor = ops.convert_to_tensor(tensor, name='tensor')
if tensor.dtype != tf_type:
if context.executing_eagerly():
raise TypeError('%s tensor must be of type %s' % (message, tf_type))
else:
raise TypeError('%s %s must be of type %s' % (message, tensor.name,
tf_type))
return control_flow_ops.no_op('statically_determined_correct_type') | [
"def",
"assert_type",
"(",
"tensor",
",",
"tf_type",
",",
"message",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"message",
"=",
"message",
"or",
"''",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"'assert_type'",
",",
"[",
"tensor",
"]",
")",
":",
"tensor",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"tensor",
",",
"name",
"=",
"'tensor'",
")",
"if",
"tensor",
".",
"dtype",
"!=",
"tf_type",
":",
"if",
"context",
".",
"executing_eagerly",
"(",
")",
":",
"raise",
"TypeError",
"(",
"'%s tensor must be of type %s'",
"%",
"(",
"message",
",",
"tf_type",
")",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'%s %s must be of type %s'",
"%",
"(",
"message",
",",
"tensor",
".",
"name",
",",
"tf_type",
")",
")",
"return",
"control_flow_ops",
".",
"no_op",
"(",
"'statically_determined_correct_type'",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/check_ops.py#L1498-L1524 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.