nwo stringlengths 5 86 | sha stringlengths 40 40 | path stringlengths 4 189 | language stringclasses 1 value | identifier stringlengths 1 94 | parameters stringlengths 2 4.03k | argument_list stringclasses 1 value | return_statement stringlengths 0 11.5k | docstring stringlengths 1 33.2k | docstring_summary stringlengths 0 5.15k | docstring_tokens list | function stringlengths 34 151k | function_tokens list | url stringlengths 90 278 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/ninja.py | python | OpenOutput | (path, mode='w') | return open(path, mode) | Open |path| for writing, creating directories if necessary. | Open |path| for writing, creating directories if necessary. | [
"Open",
"|path|",
"for",
"writing",
"creating",
"directories",
"if",
"necessary",
"."
] | def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
return open(path, mode) | [
"def",
"OpenOutput",
"(",
"path",
",",
"mode",
"=",
"'w'",
")",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
")",
"except",
"OSError",
":",
"pass",
"return",
"open",
"(",
"path",
",",
"mode",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/ninja.py#L1295-L1301 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/dateutil/dateutil/utils.py | python | today | (tzinfo=None) | return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) | Returns a :py:class:`datetime` representing the current day at midnight
:param tzinfo:
The time zone to attach (also used to determine the current day).
:return:
A :py:class:`datetime.datetime` object representing the current day
at midnight. | Returns a :py:class:`datetime` representing the current day at midnight | [
"Returns",
"a",
":",
"py",
":",
"class",
":",
"datetime",
"representing",
"the",
"current",
"day",
"at",
"midnight"
] | def today(tzinfo=None):
"""
Returns a :py:class:`datetime` representing the current day at midnight
:param tzinfo:
The time zone to attach (also used to determine the current day).
:return:
A :py:class:`datetime.datetime` object representing the current day
at midnight.
"""
dt = datetime.now(tzinfo)
return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) | [
"def",
"today",
"(",
"tzinfo",
"=",
"None",
")",
":",
"dt",
"=",
"datetime",
".",
"now",
"(",
"tzinfo",
")",
"return",
"datetime",
".",
"combine",
"(",
"dt",
".",
"date",
"(",
")",
",",
"time",
"(",
"0",
",",
"tzinfo",
"=",
"tzinfo",
")",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/dateutil/dateutil/utils.py#L13-L26 | |
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | third_party/re2/lib/codereview/codereview.py | python | change | (ui, repo, *pats, **opts) | return | create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456. | create, edit or delete a change list | [
"create",
"edit",
"or",
"delete",
"a",
"change",
"list"
] | def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
return codereview_disabled
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
name = "new"
cl = CL("new")
if repo[None].branch() != "default":
return "cannot create CL outside default branch; switch with 'hg update default'"
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.copied_from:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return | [
"def",
"change",
"(",
"ui",
",",
"repo",
",",
"*",
"pats",
",",
"*",
"*",
"opts",
")",
":",
"if",
"codereview_disabled",
":",
"return",
"codereview_disabled",
"dirty",
"=",
"{",
"}",
"if",
"len",
"(",
"pats",
")",
">",
"0",
"and",
"GoodCLName",
"(",
"pats",
"[",
"0",
"]",
")",
":",
"name",
"=",
"pats",
"[",
"0",
"]",
"if",
"len",
"(",
"pats",
")",
"!=",
"1",
":",
"return",
"\"cannot specify CL name and file patterns\"",
"pats",
"=",
"pats",
"[",
"1",
":",
"]",
"cl",
",",
"err",
"=",
"LoadCL",
"(",
"ui",
",",
"repo",
",",
"name",
",",
"web",
"=",
"True",
")",
"if",
"err",
"!=",
"''",
":",
"return",
"err",
"if",
"not",
"cl",
".",
"local",
"and",
"(",
"opts",
"[",
"\"stdin\"",
"]",
"or",
"not",
"opts",
"[",
"\"stdout\"",
"]",
")",
":",
"return",
"\"cannot change non-local CL \"",
"+",
"name",
"else",
":",
"name",
"=",
"\"new\"",
"cl",
"=",
"CL",
"(",
"\"new\"",
")",
"if",
"repo",
"[",
"None",
"]",
".",
"branch",
"(",
")",
"!=",
"\"default\"",
":",
"return",
"\"cannot create CL outside default branch; switch with 'hg update default'\"",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"files",
"=",
"ChangedFiles",
"(",
"ui",
",",
"repo",
",",
"pats",
",",
"taken",
"=",
"Taken",
"(",
"ui",
",",
"repo",
")",
")",
"if",
"opts",
"[",
"\"delete\"",
"]",
"or",
"opts",
"[",
"\"deletelocal\"",
"]",
":",
"if",
"opts",
"[",
"\"delete\"",
"]",
"and",
"opts",
"[",
"\"deletelocal\"",
"]",
":",
"return",
"\"cannot use -d and -D together\"",
"flag",
"=",
"\"-d\"",
"if",
"opts",
"[",
"\"deletelocal\"",
"]",
":",
"flag",
"=",
"\"-D\"",
"if",
"name",
"==",
"\"new\"",
":",
"return",
"\"cannot use \"",
"+",
"flag",
"+",
"\" with file patterns\"",
"if",
"opts",
"[",
"\"stdin\"",
"]",
"or",
"opts",
"[",
"\"stdout\"",
"]",
":",
"return",
"\"cannot use \"",
"+",
"flag",
"+",
"\" with -i or -o\"",
"if",
"not",
"cl",
".",
"local",
":",
"return",
"\"cannot change non-local CL \"",
"+",
"name",
"if",
"opts",
"[",
"\"delete\"",
"]",
":",
"if",
"cl",
".",
"copied_from",
":",
"return",
"\"original author must delete CL; hg change -D will remove locally\"",
"PostMessage",
"(",
"ui",
",",
"cl",
".",
"name",
",",
"\"*** Abandoned ***\"",
",",
"send_mail",
"=",
"cl",
".",
"mailed",
")",
"EditDesc",
"(",
"cl",
".",
"name",
",",
"closed",
"=",
"True",
",",
"private",
"=",
"cl",
".",
"private",
")",
"cl",
".",
"Delete",
"(",
"ui",
",",
"repo",
")",
"return",
"if",
"opts",
"[",
"\"stdin\"",
"]",
":",
"s",
"=",
"sys",
".",
"stdin",
".",
"read",
"(",
")",
"clx",
",",
"line",
",",
"err",
"=",
"ParseCL",
"(",
"s",
",",
"name",
")",
"if",
"err",
"!=",
"''",
":",
"return",
"\"error parsing change list: line %d: %s\"",
"%",
"(",
"line",
",",
"err",
")",
"if",
"clx",
".",
"desc",
"is",
"not",
"None",
":",
"cl",
".",
"desc",
"=",
"clx",
".",
"desc",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"if",
"clx",
".",
"reviewer",
"is",
"not",
"None",
":",
"cl",
".",
"reviewer",
"=",
"clx",
".",
"reviewer",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"if",
"clx",
".",
"cc",
"is",
"not",
"None",
":",
"cl",
".",
"cc",
"=",
"clx",
".",
"cc",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"if",
"clx",
".",
"files",
"is",
"not",
"None",
":",
"cl",
".",
"files",
"=",
"clx",
".",
"files",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"if",
"clx",
".",
"private",
"!=",
"cl",
".",
"private",
":",
"cl",
".",
"private",
"=",
"clx",
".",
"private",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"if",
"not",
"opts",
"[",
"\"stdin\"",
"]",
"and",
"not",
"opts",
"[",
"\"stdout\"",
"]",
":",
"if",
"name",
"==",
"\"new\"",
":",
"cl",
".",
"files",
"=",
"files",
"err",
"=",
"EditCL",
"(",
"ui",
",",
"repo",
",",
"cl",
")",
"if",
"err",
"!=",
"\"\"",
":",
"return",
"err",
"dirty",
"[",
"cl",
"]",
"=",
"True",
"for",
"d",
",",
"_",
"in",
"dirty",
".",
"items",
"(",
")",
":",
"name",
"=",
"d",
".",
"name",
"d",
".",
"Flush",
"(",
"ui",
",",
"repo",
")",
"if",
"name",
"==",
"\"new\"",
":",
"d",
".",
"Upload",
"(",
"ui",
",",
"repo",
",",
"quiet",
"=",
"True",
")",
"if",
"opts",
"[",
"\"stdout\"",
"]",
":",
"ui",
".",
"write",
"(",
"cl",
".",
"EditorText",
"(",
")",
")",
"elif",
"opts",
"[",
"\"pending\"",
"]",
":",
"ui",
".",
"write",
"(",
"cl",
".",
"PendingText",
"(",
")",
")",
"elif",
"name",
"==",
"\"new\"",
":",
"if",
"ui",
".",
"quiet",
":",
"ui",
".",
"write",
"(",
"cl",
".",
"name",
")",
"else",
":",
"ui",
".",
"write",
"(",
"\"CL created: \"",
"+",
"cl",
".",
"url",
"+",
"\"\\n\"",
")",
"return"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/re2/lib/codereview/codereview.py#L1273-L1381 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/importlib/_bootstrap_external.py | python | spec_from_file_location | (name, location=None, *, loader=None,
submodule_search_locations=_POPULATE) | return spec | Return a module spec based on a file location.
To indicate that the module is a package, set
submodule_search_locations to a list of directory paths. An
empty list is sufficient, though its not otherwise useful to the
import system.
The loader must take a spec as its only __init__() arg. | Return a module spec based on a file location. | [
"Return",
"a",
"module",
"spec",
"based",
"on",
"a",
"file",
"location",
"."
] | def spec_from_file_location(name, location=None, *, loader=None,
submodule_search_locations=_POPULATE):
"""Return a module spec based on a file location.
To indicate that the module is a package, set
submodule_search_locations to a list of directory paths. An
empty list is sufficient, though its not otherwise useful to the
import system.
The loader must take a spec as its only __init__() arg.
"""
if location is None:
# The caller may simply want a partially populated location-
# oriented spec. So we set the location to a bogus value and
# fill in as much as we can.
location = '<unknown>'
if hasattr(loader, 'get_filename'):
# ExecutionLoader
try:
location = loader.get_filename(name)
except ImportError:
pass
else:
location = _os.fspath(location)
# If the location is on the filesystem, but doesn't actually exist,
# we could return None here, indicating that the location is not
# valid. However, we don't have a good way of testing since an
# indirect location (e.g. a zip file or URL) will look like a
# non-existent file relative to the filesystem.
spec = _bootstrap.ModuleSpec(name, loader, origin=location)
spec._set_fileattr = True
# Pick a loader if one wasn't provided.
if loader is None:
for loader_class, suffixes in _get_supported_file_loaders():
if location.endswith(tuple(suffixes)):
loader = loader_class(name, location)
spec.loader = loader
break
else:
return None
# Set submodule_search_paths appropriately.
if submodule_search_locations is _POPULATE:
# Check the loader.
if hasattr(loader, 'is_package'):
try:
is_package = loader.is_package(name)
except ImportError:
pass
else:
if is_package:
spec.submodule_search_locations = []
else:
spec.submodule_search_locations = submodule_search_locations
if spec.submodule_search_locations == []:
if location:
dirname = _path_split(location)[0]
spec.submodule_search_locations.append(dirname)
return spec | [
"def",
"spec_from_file_location",
"(",
"name",
",",
"location",
"=",
"None",
",",
"*",
",",
"loader",
"=",
"None",
",",
"submodule_search_locations",
"=",
"_POPULATE",
")",
":",
"if",
"location",
"is",
"None",
":",
"# The caller may simply want a partially populated location-",
"# oriented spec. So we set the location to a bogus value and",
"# fill in as much as we can.",
"location",
"=",
"'<unknown>'",
"if",
"hasattr",
"(",
"loader",
",",
"'get_filename'",
")",
":",
"# ExecutionLoader",
"try",
":",
"location",
"=",
"loader",
".",
"get_filename",
"(",
"name",
")",
"except",
"ImportError",
":",
"pass",
"else",
":",
"location",
"=",
"_os",
".",
"fspath",
"(",
"location",
")",
"# If the location is on the filesystem, but doesn't actually exist,",
"# we could return None here, indicating that the location is not",
"# valid. However, we don't have a good way of testing since an",
"# indirect location (e.g. a zip file or URL) will look like a",
"# non-existent file relative to the filesystem.",
"spec",
"=",
"_bootstrap",
".",
"ModuleSpec",
"(",
"name",
",",
"loader",
",",
"origin",
"=",
"location",
")",
"spec",
".",
"_set_fileattr",
"=",
"True",
"# Pick a loader if one wasn't provided.",
"if",
"loader",
"is",
"None",
":",
"for",
"loader_class",
",",
"suffixes",
"in",
"_get_supported_file_loaders",
"(",
")",
":",
"if",
"location",
".",
"endswith",
"(",
"tuple",
"(",
"suffixes",
")",
")",
":",
"loader",
"=",
"loader_class",
"(",
"name",
",",
"location",
")",
"spec",
".",
"loader",
"=",
"loader",
"break",
"else",
":",
"return",
"None",
"# Set submodule_search_paths appropriately.",
"if",
"submodule_search_locations",
"is",
"_POPULATE",
":",
"# Check the loader.",
"if",
"hasattr",
"(",
"loader",
",",
"'is_package'",
")",
":",
"try",
":",
"is_package",
"=",
"loader",
".",
"is_package",
"(",
"name",
")",
"except",
"ImportError",
":",
"pass",
"else",
":",
"if",
"is_package",
":",
"spec",
".",
"submodule_search_locations",
"=",
"[",
"]",
"else",
":",
"spec",
".",
"submodule_search_locations",
"=",
"submodule_search_locations",
"if",
"spec",
".",
"submodule_search_locations",
"==",
"[",
"]",
":",
"if",
"location",
":",
"dirname",
"=",
"_path_split",
"(",
"location",
")",
"[",
"0",
"]",
"spec",
".",
"submodule_search_locations",
".",
"append",
"(",
"dirname",
")",
"return",
"spec"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/importlib/_bootstrap_external.py#L574-L637 | |
verilog-to-routing/vtr-verilog-to-routing | d9719cf7374821156c3cee31d66991cb85578562 | vtr_flow/scripts/python_libs/vtr/log_parse.py | python | RangeAbsPassRequirement.abs_threshold | (self) | return self._abs_threshold | Get absolute threshold | Get absolute threshold | [
"Get",
"absolute",
"threshold"
] | def abs_threshold(self):
"""Get absolute threshold"""
return self._abs_threshold | [
"def",
"abs_threshold",
"(",
"self",
")",
":",
"return",
"self",
".",
"_abs_threshold"
] | https://github.com/verilog-to-routing/vtr-verilog-to-routing/blob/d9719cf7374821156c3cee31d66991cb85578562/vtr_flow/scripts/python_libs/vtr/log_parse.py#L192-L194 | |
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/image_ops_impl.py | python | per_image_standardization | (image) | return image | Linearly scales `image` to have zero mean and unit norm.
This op computes `(x - mean) / adjusted_stddev`, where `mean` is the average
of all values in image, and
`adjusted_stddev = max(stddev, 1.0/sqrt(image.NumElements()))`.
`stddev` is the standard deviation of all values in `image`. It is capped
away from zero to protect against division by 0 when handling uniform images.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
Returns:
The standardized image with same shape as `image`.
Raises:
ValueError: if the shape of 'image' is incompatible with this function. | Linearly scales `image` to have zero mean and unit norm. | [
"Linearly",
"scales",
"image",
"to",
"have",
"zero",
"mean",
"and",
"unit",
"norm",
"."
] | def per_image_standardization(image):
"""Linearly scales `image` to have zero mean and unit norm.
This op computes `(x - mean) / adjusted_stddev`, where `mean` is the average
of all values in image, and
`adjusted_stddev = max(stddev, 1.0/sqrt(image.NumElements()))`.
`stddev` is the standard deviation of all values in `image`. It is capped
away from zero to protect against division by 0 when handling uniform images.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
Returns:
The standardized image with same shape as `image`.
Raises:
ValueError: if the shape of 'image' is incompatible with this function.
"""
image = ops.convert_to_tensor(image, name='image')
image = control_flow_ops.with_dependencies(
_Check3DImage(image, require_static=False), image)
num_pixels = math_ops.reduce_prod(array_ops.shape(image))
image = math_ops.cast(image, dtype=dtypes.float32)
image_mean = math_ops.reduce_mean(image)
variance = (math_ops.reduce_mean(math_ops.square(image)) -
math_ops.square(image_mean))
variance = gen_nn_ops.relu(variance)
stddev = math_ops.sqrt(variance)
# Apply a minimum normalization that protects us against uniform images.
min_stddev = math_ops.rsqrt(math_ops.cast(num_pixels, dtypes.float32))
pixel_value_scale = math_ops.maximum(stddev, min_stddev)
pixel_value_offset = image_mean
image = math_ops.subtract(image, pixel_value_offset)
image = math_ops.div(image, pixel_value_scale)
return image | [
"def",
"per_image_standardization",
"(",
"image",
")",
":",
"image",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"image",
",",
"name",
"=",
"'image'",
")",
"image",
"=",
"control_flow_ops",
".",
"with_dependencies",
"(",
"_Check3DImage",
"(",
"image",
",",
"require_static",
"=",
"False",
")",
",",
"image",
")",
"num_pixels",
"=",
"math_ops",
".",
"reduce_prod",
"(",
"array_ops",
".",
"shape",
"(",
"image",
")",
")",
"image",
"=",
"math_ops",
".",
"cast",
"(",
"image",
",",
"dtype",
"=",
"dtypes",
".",
"float32",
")",
"image_mean",
"=",
"math_ops",
".",
"reduce_mean",
"(",
"image",
")",
"variance",
"=",
"(",
"math_ops",
".",
"reduce_mean",
"(",
"math_ops",
".",
"square",
"(",
"image",
")",
")",
"-",
"math_ops",
".",
"square",
"(",
"image_mean",
")",
")",
"variance",
"=",
"gen_nn_ops",
".",
"relu",
"(",
"variance",
")",
"stddev",
"=",
"math_ops",
".",
"sqrt",
"(",
"variance",
")",
"# Apply a minimum normalization that protects us against uniform images.",
"min_stddev",
"=",
"math_ops",
".",
"rsqrt",
"(",
"math_ops",
".",
"cast",
"(",
"num_pixels",
",",
"dtypes",
".",
"float32",
")",
")",
"pixel_value_scale",
"=",
"math_ops",
".",
"maximum",
"(",
"stddev",
",",
"min_stddev",
")",
"pixel_value_offset",
"=",
"image_mean",
"image",
"=",
"math_ops",
".",
"subtract",
"(",
"image",
",",
"pixel_value_offset",
")",
"image",
"=",
"math_ops",
".",
"div",
"(",
"image",
",",
"pixel_value_scale",
")",
"return",
"image"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/image_ops_impl.py#L793-L832 | |
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/configobj/configobj.py | python | Section.itervalues | (self) | return iter(self.values()) | D.itervalues() -> an iterator over the values of D | D.itervalues() -> an iterator over the values of D | [
"D",
".",
"itervalues",
"()",
"-",
">",
"an",
"iterator",
"over",
"the",
"values",
"of",
"D"
] | def itervalues(self):
"""D.itervalues() -> an iterator over the values of D"""
return iter(self.values()) | [
"def",
"itervalues",
"(",
"self",
")",
":",
"return",
"iter",
"(",
"self",
".",
"values",
"(",
")",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/configobj/configobj.py#L749-L751 | |
rapidsai/cudf | d5b2448fc69f17509304d594f029d0df56984962 | python/cudf/cudf/api/extensions/accessor.py | python | register_dataframe_accessor | (name) | return _register_accessor(name, cudf.DataFrame) | {docstring} | {docstring} | [
"{",
"docstring",
"}"
] | def register_dataframe_accessor(name):
"""{docstring}"""
return _register_accessor(name, cudf.DataFrame) | [
"def",
"register_dataframe_accessor",
"(",
"name",
")",
":",
"return",
"_register_accessor",
"(",
"name",
",",
"cudf",
".",
"DataFrame",
")"
] | https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/api/extensions/accessor.py#L147-L149 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_controls.py | python | FontPickerEvent.__init__ | (self, *args, **kwargs) | __init__(self, Object generator, int id, Font f) -> FontPickerEvent | __init__(self, Object generator, int id, Font f) -> FontPickerEvent | [
"__init__",
"(",
"self",
"Object",
"generator",
"int",
"id",
"Font",
"f",
")",
"-",
">",
"FontPickerEvent"
] | def __init__(self, *args, **kwargs):
"""__init__(self, Object generator, int id, Font f) -> FontPickerEvent"""
_controls_.FontPickerEvent_swiginit(self,_controls_.new_FontPickerEvent(*args, **kwargs)) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_controls_",
".",
"FontPickerEvent_swiginit",
"(",
"self",
",",
"_controls_",
".",
"new_FontPickerEvent",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L7281-L7283 | ||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/numbers.py | python | Real.__floordiv__ | (self, other) | self // other: The floor() of self/other. | self // other: The floor() of self/other. | [
"self",
"//",
"other",
":",
"The",
"floor",
"()",
"of",
"self",
"/",
"other",
"."
] | def __floordiv__(self, other):
"""self // other: The floor() of self/other."""
raise NotImplementedError | [
"def",
"__floordiv__",
"(",
"self",
",",
"other",
")",
":",
"raise",
"NotImplementedError"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/numbers.py#L217-L219 | ||
s9xie/DSN | 065e49898d239f5c96be558616b2556eabc50351 | scripts/cpp_lint.py | python | CheckForMultilineCommentsAndStrings | (filename, clean_lines, linenum, error) | Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Logs an error if we see /* ... */ or "..." that extend past one line. | [
"Logs",
"an",
"error",
"if",
"we",
"see",
"/",
"*",
"...",
"*",
"/",
"or",
"...",
"that",
"extend",
"past",
"one",
"line",
"."
] | def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
"""Logs an error if we see /* ... */ or "..." that extend past one line.
/* ... */ comments are legit inside macros, for one line.
Otherwise, we prefer // comments, so it's ok to warn about the
other. Likewise, it's ok for strings to extend across multiple
lines, as long as a line continuation character (backslash)
terminates each line. Although not currently prohibited by the C++
style guide, it's ugly and unnecessary. We don't do well with either
in this lint program, so we warn about both.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Remove all \\ (escaped backslashes) from the line. They are OK, and the
# second (escaped) slash may trigger later \" detection erroneously.
line = line.replace('\\\\', '')
if line.count('/*') > line.count('*/'):
error(filename, linenum, 'readability/multiline_comment', 5,
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. '
'Consider replacing these with //-style comments, '
'with #if 0...#endif, '
'or with more clearly structured multi-line comments.')
if (line.count('"') - line.count('\\"')) % 2:
error(filename, linenum, 'readability/multiline_string', 5,
'Multi-line string ("...") found. This lint script doesn\'t '
'do well with such strings, and may give bogus warnings. '
'Use C++11 raw strings or concatenation instead.') | [
"def",
"CheckForMultilineCommentsAndStrings",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Remove all \\\\ (escaped backslashes) from the line. They are OK, and the",
"# second (escaped) slash may trigger later \\\" detection erroneously.",
"line",
"=",
"line",
".",
"replace",
"(",
"'\\\\\\\\'",
",",
"''",
")",
"if",
"line",
".",
"count",
"(",
"'/*'",
")",
">",
"line",
".",
"count",
"(",
"'*/'",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/multiline_comment'",
",",
"5",
",",
"'Complex multi-line /*...*/-style comment found. '",
"'Lint may give bogus warnings. '",
"'Consider replacing these with //-style comments, '",
"'with #if 0...#endif, '",
"'or with more clearly structured multi-line comments.'",
")",
"if",
"(",
"line",
".",
"count",
"(",
"'\"'",
")",
"-",
"line",
".",
"count",
"(",
"'\\\\\"'",
")",
")",
"%",
"2",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/multiline_string'",
",",
"5",
",",
"'Multi-line string (\"...\") found. This lint script doesn\\'t '",
"'do well with such strings, and may give bogus warnings. '",
"'Use C++11 raw strings or concatenation instead.'",
")"
] | https://github.com/s9xie/DSN/blob/065e49898d239f5c96be558616b2556eabc50351/scripts/cpp_lint.py#L1522-L1557 | ||
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/symbol/random.py | python | _random_helper | (random, sampler, params, shape, dtype, kwargs) | Helper function for random generators. | Helper function for random generators. | [
"Helper",
"function",
"for",
"random",
"generators",
"."
] | def _random_helper(random, sampler, params, shape, dtype, kwargs):
"""Helper function for random generators."""
if isinstance(params[0], Symbol):
for i in params[1:]:
assert isinstance(i, Symbol), \
"Distribution parameters must all have the same type, but got " \
"both %s and %s."%(type(params[0]), type(i))
return sampler(*params, shape=shape, dtype=dtype, **kwargs)
elif isinstance(params[0], numeric_types):
for i in params[1:]:
assert isinstance(i, numeric_types), \
"Distribution parameters must all have the same type, but got " \
"both %s and %s."%(type(params[0]), type(i))
return random(*params, shape=shape, dtype=dtype, **kwargs)
raise ValueError("Distribution parameters must be either Symbol or numbers, "
"but got %s."%type(params[0])) | [
"def",
"_random_helper",
"(",
"random",
",",
"sampler",
",",
"params",
",",
"shape",
",",
"dtype",
",",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"params",
"[",
"0",
"]",
",",
"Symbol",
")",
":",
"for",
"i",
"in",
"params",
"[",
"1",
":",
"]",
":",
"assert",
"isinstance",
"(",
"i",
",",
"Symbol",
")",
",",
"\"Distribution parameters must all have the same type, but got \"",
"\"both %s and %s.\"",
"%",
"(",
"type",
"(",
"params",
"[",
"0",
"]",
")",
",",
"type",
"(",
"i",
")",
")",
"return",
"sampler",
"(",
"*",
"params",
",",
"shape",
"=",
"shape",
",",
"dtype",
"=",
"dtype",
",",
"*",
"*",
"kwargs",
")",
"elif",
"isinstance",
"(",
"params",
"[",
"0",
"]",
",",
"numeric_types",
")",
":",
"for",
"i",
"in",
"params",
"[",
"1",
":",
"]",
":",
"assert",
"isinstance",
"(",
"i",
",",
"numeric_types",
")",
",",
"\"Distribution parameters must all have the same type, but got \"",
"\"both %s and %s.\"",
"%",
"(",
"type",
"(",
"params",
"[",
"0",
"]",
")",
",",
"type",
"(",
"i",
")",
")",
"return",
"random",
"(",
"*",
"params",
",",
"shape",
"=",
"shape",
",",
"dtype",
"=",
"dtype",
",",
"*",
"*",
"kwargs",
")",
"raise",
"ValueError",
"(",
"\"Distribution parameters must be either Symbol or numbers, \"",
"\"but got %s.\"",
"%",
"type",
"(",
"params",
"[",
"0",
"]",
")",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/random.py#L29-L45 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/pydoc.py | python | HTMLDoc.namelink | (self, name, *dicts) | return name | Make a link for an identifier, given name-to-URL mappings. | Make a link for an identifier, given name-to-URL mappings. | [
"Make",
"a",
"link",
"for",
"an",
"identifier",
"given",
"name",
"-",
"to",
"-",
"URL",
"mappings",
"."
] | def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name | [
"def",
"namelink",
"(",
"self",
",",
"name",
",",
"*",
"dicts",
")",
":",
"for",
"dict",
"in",
"dicts",
":",
"if",
"name",
"in",
"dict",
":",
"return",
"'<a href=\"%s\">%s</a>'",
"%",
"(",
"dict",
"[",
"name",
"]",
",",
"name",
")",
"return",
"name"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/pydoc.py#L526-L531 | |
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/client/timeline.py | python | _TensorTracker.last_unref | (self) | return max(self._unref_times) | Last unreference timestamp of this tensor (long integer). | Last unreference timestamp of this tensor (long integer). | [
"Last",
"unreference",
"timestamp",
"of",
"this",
"tensor",
"(",
"long",
"integer",
")",
"."
] | def last_unref(self):
"""Last unreference timestamp of this tensor (long integer)."""
return max(self._unref_times) | [
"def",
"last_unref",
"(",
"self",
")",
":",
"return",
"max",
"(",
"self",
".",
"_unref_times",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/client/timeline.py#L325-L327 | |
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/psutil/psutil/__init__.py | python | Process.nice | (self, value=None) | Get or set process niceness (priority). | Get or set process niceness (priority). | [
"Get",
"or",
"set",
"process",
"niceness",
"(",
"priority",
")",
"."
] | def nice(self, value=None):
"""Get or set process niceness (priority)."""
if value is None:
return self._proc.nice_get()
else:
if not self.is_running():
raise NoSuchProcess(self.pid, self._name)
self._proc.nice_set(value) | [
"def",
"nice",
"(",
"self",
",",
"value",
"=",
"None",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"self",
".",
"_proc",
".",
"nice_get",
"(",
")",
"else",
":",
"if",
"not",
"self",
".",
"is_running",
"(",
")",
":",
"raise",
"NoSuchProcess",
"(",
"self",
".",
"pid",
",",
"self",
".",
"_name",
")",
"self",
".",
"_proc",
".",
"nice_set",
"(",
"value",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/psutil/psutil/__init__.py#L583-L590 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/grid.py | python | Grid.EnableCellEditControl | (*args, **kwargs) | return _grid.Grid_EnableCellEditControl(*args, **kwargs) | EnableCellEditControl(self, bool enable=True) | EnableCellEditControl(self, bool enable=True) | [
"EnableCellEditControl",
"(",
"self",
"bool",
"enable",
"=",
"True",
")"
] | def EnableCellEditControl(*args, **kwargs):
"""EnableCellEditControl(self, bool enable=True)"""
return _grid.Grid_EnableCellEditControl(*args, **kwargs) | [
"def",
"EnableCellEditControl",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_EnableCellEditControl",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L1346-L1348 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_windows.py | python | Dialog.DoLayoutAdaptation | (*args, **kwargs) | return _windows_.Dialog_DoLayoutAdaptation(*args, **kwargs) | DoLayoutAdaptation(self) -> bool | DoLayoutAdaptation(self) -> bool | [
"DoLayoutAdaptation",
"(",
"self",
")",
"-",
">",
"bool"
] | def DoLayoutAdaptation(*args, **kwargs):
"""DoLayoutAdaptation(self) -> bool"""
return _windows_.Dialog_DoLayoutAdaptation(*args, **kwargs) | [
"def",
"DoLayoutAdaptation",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"Dialog_DoLayoutAdaptation",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_windows.py#L819-L821 | |
llvm/llvm-project | ffa6262cb4e2a335d26416fad39a581b4f98c5f4 | clang/bindings/python/clang/cindex.py | python | Type.get_align | (self) | return conf.lib.clang_Type_getAlignOf(self) | Retrieve the alignment of the record. | Retrieve the alignment of the record. | [
"Retrieve",
"the",
"alignment",
"of",
"the",
"record",
"."
] | def get_align(self):
"""
Retrieve the alignment of the record.
"""
return conf.lib.clang_Type_getAlignOf(self) | [
"def",
"get_align",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_Type_getAlignOf",
"(",
"self",
")"
] | https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/clang/bindings/python/clang/cindex.py#L2379-L2383 | |
apache/kudu | 90895ce76590f10730ad7aac3613b69d89ff5422 | build-support/iwyu/fix_includes.py | python | _CalculateReorderSpans | (file_lines) | Fills each input_line's reorder_span field.
A 'reorder span' is a range of lines (from file_lines) that only has
#includes and forward-declares in it (and maybe blank lines, and
comments associated with #includes or forward-declares). In
particular, it does not include any "real code" besides #includes
and forward-declares: no functions, no static variable assignment,
no macro #defines, no nothing. We are willing to reorder #includes
and namespaces freely inside a reorder span.
Calculating reorder_span is easy: they're just the union of
contiguous move-spans (with perhaps blank lines and comments
thrown in), because move-spans share the 'no actual code'
requirement.
There's one exception: if any move-span matches the
_BARRIER_INCLUDES regexp, it means that we should consider that
move-span to be a 'barrier': nothing should get reordered from one
side of that move-span to the other. (This is used for #includes
that depend on other #includes being before them to function
properly.) We do that by putting them into their own reorder span.
For lines of type _INCLUDE_RE or _FORWARD_DECLARE_RE, the reorder
span is set to the tuple [start_of_span, end_of_span). All other
lines have an arbitrary value for the reorder span.
Arguments:
file_lines: an array of LineInfo objects with .type and .move_span
fields filled in. | Fills each input_line's reorder_span field. | [
"Fills",
"each",
"input_line",
"s",
"reorder_span",
"field",
"."
] | def _CalculateReorderSpans(file_lines):
"""Fills each input_line's reorder_span field.
A 'reorder span' is a range of lines (from file_lines) that only has
#includes and forward-declares in it (and maybe blank lines, and
comments associated with #includes or forward-declares). In
particular, it does not include any "real code" besides #includes
and forward-declares: no functions, no static variable assignment,
no macro #defines, no nothing. We are willing to reorder #includes
and namespaces freely inside a reorder span.
Calculating reorder_span is easy: they're just the union of
contiguous move-spans (with perhaps blank lines and comments
thrown in), because move-spans share the 'no actual code'
requirement.
There's one exception: if any move-span matches the
_BARRIER_INCLUDES regexp, it means that we should consider that
move-span to be a 'barrier': nothing should get reordered from one
side of that move-span to the other. (This is used for #includes
that depend on other #includes being before them to function
properly.) We do that by putting them into their own reorder span.
For lines of type _INCLUDE_RE or _FORWARD_DECLARE_RE, the reorder
span is set to the tuple [start_of_span, end_of_span). All other
lines have an arbitrary value for the reorder span.
Arguments:
file_lines: an array of LineInfo objects with .type and .move_span
fields filled in.
"""
# Happily, move_spans are disjoint. Just make sure they're sorted and unique.
move_spans = [s.move_span for s in file_lines if s.move_span is not None]
sorted_move_spans = sorted(set(move_spans))
i = 0
while i < len(sorted_move_spans):
reorder_span_start = sorted_move_spans[i][0]
# If we're a 'nosort' include, we're always in a reorder span of
# our own. Otherwise, add in the next move span if we're
# connected to it only by blank lines.
if not _ContainsBarrierInclude(file_lines, sorted_move_spans[i]):
while i < len(sorted_move_spans) - 1:
move_span_end = sorted_move_spans[i][1]
next_move_span_start = sorted_move_spans[i+1][0]
if (_LinesAreAllBlank(file_lines, move_span_end, next_move_span_start)
and not _ContainsBarrierInclude(file_lines, sorted_move_spans[i+1])):
i += 1
else:
break
reorder_span_end = sorted_move_spans[i][1]
# We'll map every line in the span to the span-extent.
for line_number in range(reorder_span_start, reorder_span_end):
file_lines[line_number].reorder_span = (reorder_span_start,
reorder_span_end)
i += 1 | [
"def",
"_CalculateReorderSpans",
"(",
"file_lines",
")",
":",
"# Happily, move_spans are disjoint. Just make sure they're sorted and unique.",
"move_spans",
"=",
"[",
"s",
".",
"move_span",
"for",
"s",
"in",
"file_lines",
"if",
"s",
".",
"move_span",
"is",
"not",
"None",
"]",
"sorted_move_spans",
"=",
"sorted",
"(",
"set",
"(",
"move_spans",
")",
")",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"sorted_move_spans",
")",
":",
"reorder_span_start",
"=",
"sorted_move_spans",
"[",
"i",
"]",
"[",
"0",
"]",
"# If we're a 'nosort' include, we're always in a reorder span of",
"# our own. Otherwise, add in the next move span if we're",
"# connected to it only by blank lines.",
"if",
"not",
"_ContainsBarrierInclude",
"(",
"file_lines",
",",
"sorted_move_spans",
"[",
"i",
"]",
")",
":",
"while",
"i",
"<",
"len",
"(",
"sorted_move_spans",
")",
"-",
"1",
":",
"move_span_end",
"=",
"sorted_move_spans",
"[",
"i",
"]",
"[",
"1",
"]",
"next_move_span_start",
"=",
"sorted_move_spans",
"[",
"i",
"+",
"1",
"]",
"[",
"0",
"]",
"if",
"(",
"_LinesAreAllBlank",
"(",
"file_lines",
",",
"move_span_end",
",",
"next_move_span_start",
")",
"and",
"not",
"_ContainsBarrierInclude",
"(",
"file_lines",
",",
"sorted_move_spans",
"[",
"i",
"+",
"1",
"]",
")",
")",
":",
"i",
"+=",
"1",
"else",
":",
"break",
"reorder_span_end",
"=",
"sorted_move_spans",
"[",
"i",
"]",
"[",
"1",
"]",
"# We'll map every line in the span to the span-extent.",
"for",
"line_number",
"in",
"range",
"(",
"reorder_span_start",
",",
"reorder_span_end",
")",
":",
"file_lines",
"[",
"line_number",
"]",
".",
"reorder_span",
"=",
"(",
"reorder_span_start",
",",
"reorder_span_end",
")",
"i",
"+=",
"1"
] | https://github.com/apache/kudu/blob/90895ce76590f10730ad7aac3613b69d89ff5422/build-support/iwyu/fix_includes.py#L910-L966 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py3/scipy/signal/windows/windows.py | python | boxcar | (M, sym=True) | return _truncate(w, needs_trunc) | Return a boxcar or rectangular window.
Also known as a rectangular window or Dirichlet window, this is equivalent
to no window at all.
Parameters
----------
M : int
Number of points in the output window. If zero or less, an empty
array is returned.
sym : bool, optional
Whether the window is symmetric. (Has no effect for boxcar.)
Returns
-------
w : ndarray
The window, with the maximum value normalized to 1.
Examples
--------
Plot the window and its frequency response:
>>> from scipy import signal
>>> from scipy.fftpack import fft, fftshift
>>> import matplotlib.pyplot as plt
>>> window = signal.boxcar(51)
>>> plt.plot(window)
>>> plt.title("Boxcar window")
>>> plt.ylabel("Amplitude")
>>> plt.xlabel("Sample")
>>> plt.figure()
>>> A = fft(window, 2048) / (len(window)/2.0)
>>> freq = np.linspace(-0.5, 0.5, len(A))
>>> response = 20 * np.log10(np.abs(fftshift(A / abs(A).max())))
>>> plt.plot(freq, response)
>>> plt.axis([-0.5, 0.5, -120, 0])
>>> plt.title("Frequency response of the boxcar window")
>>> plt.ylabel("Normalized magnitude [dB]")
>>> plt.xlabel("Normalized frequency [cycles per sample]") | Return a boxcar or rectangular window. | [
"Return",
"a",
"boxcar",
"or",
"rectangular",
"window",
"."
] | def boxcar(M, sym=True):
"""Return a boxcar or rectangular window.
Also known as a rectangular window or Dirichlet window, this is equivalent
to no window at all.
Parameters
----------
M : int
Number of points in the output window. If zero or less, an empty
array is returned.
sym : bool, optional
Whether the window is symmetric. (Has no effect for boxcar.)
Returns
-------
w : ndarray
The window, with the maximum value normalized to 1.
Examples
--------
Plot the window and its frequency response:
>>> from scipy import signal
>>> from scipy.fftpack import fft, fftshift
>>> import matplotlib.pyplot as plt
>>> window = signal.boxcar(51)
>>> plt.plot(window)
>>> plt.title("Boxcar window")
>>> plt.ylabel("Amplitude")
>>> plt.xlabel("Sample")
>>> plt.figure()
>>> A = fft(window, 2048) / (len(window)/2.0)
>>> freq = np.linspace(-0.5, 0.5, len(A))
>>> response = 20 * np.log10(np.abs(fftshift(A / abs(A).max())))
>>> plt.plot(freq, response)
>>> plt.axis([-0.5, 0.5, -120, 0])
>>> plt.title("Frequency response of the boxcar window")
>>> plt.ylabel("Normalized magnitude [dB]")
>>> plt.xlabel("Normalized frequency [cycles per sample]")
"""
if _len_guards(M):
return np.ones(M)
M, needs_trunc = _extend(M, sym)
w = np.ones(M, float)
return _truncate(w, needs_trunc) | [
"def",
"boxcar",
"(",
"M",
",",
"sym",
"=",
"True",
")",
":",
"if",
"_len_guards",
"(",
"M",
")",
":",
"return",
"np",
".",
"ones",
"(",
"M",
")",
"M",
",",
"needs_trunc",
"=",
"_extend",
"(",
"M",
",",
"sym",
")",
"w",
"=",
"np",
".",
"ones",
"(",
"M",
",",
"float",
")",
"return",
"_truncate",
"(",
"w",
",",
"needs_trunc",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/signal/windows/windows.py#L123-L173 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/ipython/py3/IPython/utils/coloransi.py | python | ColorSchemeTable.set_active_scheme | (self,scheme,case_sensitive=0) | Set the currently active scheme.
Names are by default compared in a case-insensitive way, but this can
be changed by setting the parameter case_sensitive to true. | Set the currently active scheme. | [
"Set",
"the",
"currently",
"active",
"scheme",
"."
] | def set_active_scheme(self,scheme,case_sensitive=0):
"""Set the currently active scheme.
Names are by default compared in a case-insensitive way, but this can
be changed by setting the parameter case_sensitive to true."""
scheme_names = list(self.keys())
if case_sensitive:
valid_schemes = scheme_names
scheme_test = scheme
else:
valid_schemes = [s.lower() for s in scheme_names]
scheme_test = scheme.lower()
try:
scheme_idx = valid_schemes.index(scheme_test)
except ValueError:
raise ValueError('Unrecognized color scheme: ' + scheme + \
'\nValid schemes: '+str(scheme_names).replace("'', ",''))
else:
active = scheme_names[scheme_idx]
self.active_scheme_name = active
self.active_colors = self[active].colors
# Now allow using '' as an index for the current active scheme
self[''] = self[active] | [
"def",
"set_active_scheme",
"(",
"self",
",",
"scheme",
",",
"case_sensitive",
"=",
"0",
")",
":",
"scheme_names",
"=",
"list",
"(",
"self",
".",
"keys",
"(",
")",
")",
"if",
"case_sensitive",
":",
"valid_schemes",
"=",
"scheme_names",
"scheme_test",
"=",
"scheme",
"else",
":",
"valid_schemes",
"=",
"[",
"s",
".",
"lower",
"(",
")",
"for",
"s",
"in",
"scheme_names",
"]",
"scheme_test",
"=",
"scheme",
".",
"lower",
"(",
")",
"try",
":",
"scheme_idx",
"=",
"valid_schemes",
".",
"index",
"(",
"scheme_test",
")",
"except",
"ValueError",
":",
"raise",
"ValueError",
"(",
"'Unrecognized color scheme: '",
"+",
"scheme",
"+",
"'\\nValid schemes: '",
"+",
"str",
"(",
"scheme_names",
")",
".",
"replace",
"(",
"\"'', \"",
",",
"''",
")",
")",
"else",
":",
"active",
"=",
"scheme_names",
"[",
"scheme_idx",
"]",
"self",
".",
"active_scheme_name",
"=",
"active",
"self",
".",
"active_colors",
"=",
"self",
"[",
"active",
"]",
".",
"colors",
"# Now allow using '' as an index for the current active scheme",
"self",
"[",
"''",
"]",
"=",
"self",
"[",
"active",
"]"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/utils/coloransi.py#L164-L187 | ||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/control_flow_ops.py | python | ControlFlowContext.ExitResult | (self, result) | Make a list of tensors available in the outer context. | Make a list of tensors available in the outer context. | [
"Make",
"a",
"list",
"of",
"tensors",
"available",
"in",
"the",
"outer",
"context",
"."
] | def ExitResult(self, result):
"""Make a list of tensors available in the outer context."""
if self._outer_context:
def fn(x):
self._outer_context.AddName(x.name)
return x
nest.map_structure(fn, result, expand_composites=True) | [
"def",
"ExitResult",
"(",
"self",
",",
"result",
")",
":",
"if",
"self",
".",
"_outer_context",
":",
"def",
"fn",
"(",
"x",
")",
":",
"self",
".",
"_outer_context",
".",
"AddName",
"(",
"x",
".",
"name",
")",
"return",
"x",
"nest",
".",
"map_structure",
"(",
"fn",
",",
"result",
",",
"expand_composites",
"=",
"True",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/control_flow_ops.py#L766-L772 | ||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/linalg/linear_operator_algebra.py | python | RegisterCholesky.__call__ | (self, cholesky_fn) | return cholesky_fn | Perform the Cholesky registration.
Args:
cholesky_fn: The function to use for the Cholesky.
Returns:
cholesky_fn
Raises:
TypeError: if cholesky_fn is not a callable.
ValueError: if a Cholesky function has already been registered for
the given argument classes. | Perform the Cholesky registration. | [
"Perform",
"the",
"Cholesky",
"registration",
"."
] | def __call__(self, cholesky_fn):
"""Perform the Cholesky registration.
Args:
cholesky_fn: The function to use for the Cholesky.
Returns:
cholesky_fn
Raises:
TypeError: if cholesky_fn is not a callable.
ValueError: if a Cholesky function has already been registered for
the given argument classes.
"""
if not callable(cholesky_fn):
raise TypeError(
"cholesky_fn must be callable, received: {}".format(cholesky_fn))
if self._key in _CHOLESKY_DECOMPS:
raise ValueError("Cholesky({}) has already been registered to: {}".format(
self._key[0].__name__, _CHOLESKY_DECOMPS[self._key]))
_CHOLESKY_DECOMPS[self._key] = cholesky_fn
return cholesky_fn | [
"def",
"__call__",
"(",
"self",
",",
"cholesky_fn",
")",
":",
"if",
"not",
"callable",
"(",
"cholesky_fn",
")",
":",
"raise",
"TypeError",
"(",
"\"cholesky_fn must be callable, received: {}\"",
".",
"format",
"(",
"cholesky_fn",
")",
")",
"if",
"self",
".",
"_key",
"in",
"_CHOLESKY_DECOMPS",
":",
"raise",
"ValueError",
"(",
"\"Cholesky({}) has already been registered to: {}\"",
".",
"format",
"(",
"self",
".",
"_key",
"[",
"0",
"]",
".",
"__name__",
",",
"_CHOLESKY_DECOMPS",
"[",
"self",
".",
"_key",
"]",
")",
")",
"_CHOLESKY_DECOMPS",
"[",
"self",
".",
"_key",
"]",
"=",
"cholesky_fn",
"return",
"cholesky_fn"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/linalg/linear_operator_algebra.py#L251-L272 | |
macchina-io/macchina.io | ef24ba0e18379c3dd48fb84e6dbf991101cb8db0 | platform/JS/V8/tools/gyp/pylib/gyp/generator/ninja.py | python | ComputeOutputDir | (params) | return os.path.normpath(os.path.join(generator_dir, output_dir)) | Returns the path from the toplevel_dir to the build output directory. | Returns the path from the toplevel_dir to the build output directory. | [
"Returns",
"the",
"path",
"from",
"the",
"toplevel_dir",
"to",
"the",
"build",
"output",
"directory",
"."
] | def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir)) | [
"def",
"ComputeOutputDir",
"(",
"params",
")",
":",
"# generator_dir: relative path from pwd to where make puts build files.",
"# Makes migrating from make to ninja easier, ninja doesn't put anything here.",
"generator_dir",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"params",
"[",
"'options'",
"]",
".",
"generator_output",
"or",
"'.'",
")",
"# output_dir: relative path from generator_dir to the build directory.",
"output_dir",
"=",
"params",
".",
"get",
"(",
"'generator_flags'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'output_dir'",
",",
"'out'",
")",
"# Relative path from source root to our output files. e.g. \"out\"",
"return",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"generator_dir",
",",
"output_dir",
")",
")"
] | https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/tools/gyp/pylib/gyp/generator/ninja.py#L1704-L1714 | |
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/compiler/pyassem.py | python | PyFlowGraph.flattenGraph | (self) | Arrange the blocks in order and resolve jumps | Arrange the blocks in order and resolve jumps | [
"Arrange",
"the",
"blocks",
"in",
"order",
"and",
"resolve",
"jumps"
] | def flattenGraph(self):
"""Arrange the blocks in order and resolve jumps"""
assert self.stage == RAW
self.insts = insts = []
pc = 0
begin = {}
end = {}
for b in self.getBlocksInOrder():
begin[b] = pc
for inst in b.getInstructions():
insts.append(inst)
if len(inst) == 1:
pc = pc + 1
elif inst[0] != "SET_LINENO":
# arg takes 2 bytes
pc = pc + 3
end[b] = pc
pc = 0
for i in range(len(insts)):
inst = insts[i]
if len(inst) == 1:
pc = pc + 1
elif inst[0] != "SET_LINENO":
pc = pc + 3
opname = inst[0]
if self.hasjrel.has_elt(opname):
oparg = inst[1]
offset = begin[oparg] - pc
insts[i] = opname, offset
elif self.hasjabs.has_elt(opname):
insts[i] = opname, begin[inst[1]]
self.stage = FLAT | [
"def",
"flattenGraph",
"(",
"self",
")",
":",
"assert",
"self",
".",
"stage",
"==",
"RAW",
"self",
".",
"insts",
"=",
"insts",
"=",
"[",
"]",
"pc",
"=",
"0",
"begin",
"=",
"{",
"}",
"end",
"=",
"{",
"}",
"for",
"b",
"in",
"self",
".",
"getBlocksInOrder",
"(",
")",
":",
"begin",
"[",
"b",
"]",
"=",
"pc",
"for",
"inst",
"in",
"b",
".",
"getInstructions",
"(",
")",
":",
"insts",
".",
"append",
"(",
"inst",
")",
"if",
"len",
"(",
"inst",
")",
"==",
"1",
":",
"pc",
"=",
"pc",
"+",
"1",
"elif",
"inst",
"[",
"0",
"]",
"!=",
"\"SET_LINENO\"",
":",
"# arg takes 2 bytes",
"pc",
"=",
"pc",
"+",
"3",
"end",
"[",
"b",
"]",
"=",
"pc",
"pc",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"insts",
")",
")",
":",
"inst",
"=",
"insts",
"[",
"i",
"]",
"if",
"len",
"(",
"inst",
")",
"==",
"1",
":",
"pc",
"=",
"pc",
"+",
"1",
"elif",
"inst",
"[",
"0",
"]",
"!=",
"\"SET_LINENO\"",
":",
"pc",
"=",
"pc",
"+",
"3",
"opname",
"=",
"inst",
"[",
"0",
"]",
"if",
"self",
".",
"hasjrel",
".",
"has_elt",
"(",
"opname",
")",
":",
"oparg",
"=",
"inst",
"[",
"1",
"]",
"offset",
"=",
"begin",
"[",
"oparg",
"]",
"-",
"pc",
"insts",
"[",
"i",
"]",
"=",
"opname",
",",
"offset",
"elif",
"self",
".",
"hasjabs",
".",
"has_elt",
"(",
"opname",
")",
":",
"insts",
"[",
"i",
"]",
"=",
"opname",
",",
"begin",
"[",
"inst",
"[",
"1",
"]",
"]",
"self",
".",
"stage",
"=",
"FLAT"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/compiler/pyassem.py#L424-L455 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/arrays/datetimelike.py | python | DatetimeLikeArrayMixin._box_values | (self, values) | return lib.map_infer(values, self._box_func) | apply box func to passed values | apply box func to passed values | [
"apply",
"box",
"func",
"to",
"passed",
"values"
] | def _box_values(self, values):
"""
apply box func to passed values
"""
return lib.map_infer(values, self._box_func) | [
"def",
"_box_values",
"(",
"self",
",",
"values",
")",
":",
"return",
"lib",
".",
"map_infer",
"(",
"values",
",",
"self",
".",
"_box_func",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/arrays/datetimelike.py#L342-L346 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/connection.py | python | HTTPConnection._new_conn | (self) | return conn | Establish a socket connection and set nodelay settings on it.
:return: New socket connection. | Establish a socket connection and set nodelay settings on it. | [
"Establish",
"a",
"socket",
"connection",
"and",
"set",
"nodelay",
"settings",
"on",
"it",
"."
] | def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e)
return conn | [
"def",
"_new_conn",
"(",
"self",
")",
":",
"extra_kw",
"=",
"{",
"}",
"if",
"self",
".",
"source_address",
":",
"extra_kw",
"[",
"'source_address'",
"]",
"=",
"self",
".",
"source_address",
"if",
"self",
".",
"socket_options",
":",
"extra_kw",
"[",
"'socket_options'",
"]",
"=",
"self",
".",
"socket_options",
"try",
":",
"conn",
"=",
"connection",
".",
"create_connection",
"(",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
",",
"self",
".",
"timeout",
",",
"*",
"*",
"extra_kw",
")",
"except",
"SocketTimeout",
"as",
"e",
":",
"raise",
"ConnectTimeoutError",
"(",
"self",
",",
"\"Connection to %s timed out. (connect timeout=%s)\"",
"%",
"(",
"self",
".",
"host",
",",
"self",
".",
"timeout",
")",
")",
"except",
"SocketError",
"as",
"e",
":",
"raise",
"NewConnectionError",
"(",
"self",
",",
"\"Failed to establish a new connection: %s\"",
"%",
"e",
")",
"return",
"conn"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/connection.py#L127-L152 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/datamodel/packer.py | python | ArgPacker.from_arguments | (self, builder, args) | return values | Unflatten all argument values | Unflatten all argument values | [
"Unflatten",
"all",
"argument",
"values"
] | def from_arguments(self, builder, args):
"""Unflatten all argument values
"""
valtree = self._unflattener.unflatten(args)
values = [dm.from_argument(builder, val)
for dm, val in zip(self._dm_args, valtree)
]
return values | [
"def",
"from_arguments",
"(",
"self",
",",
"builder",
",",
"args",
")",
":",
"valtree",
"=",
"self",
".",
"_unflattener",
".",
"unflatten",
"(",
"args",
")",
"values",
"=",
"[",
"dm",
".",
"from_argument",
"(",
"builder",
",",
"val",
")",
"for",
"dm",
",",
"val",
"in",
"zip",
"(",
"self",
".",
"_dm_args",
",",
"valtree",
")",
"]",
"return",
"values"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/datamodel/packer.py#L106-L115 | |
mickem/nscp | 79f89fdbb6da63f91bc9dedb7aea202fe938f237 | scripts/python/lib/google/protobuf/internal/containers.py | python | RepeatedScalarFieldContainer.__delitem__ | (self, key) | Deletes the item at the specified position. | Deletes the item at the specified position. | [
"Deletes",
"the",
"item",
"at",
"the",
"specified",
"position",
"."
] | def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified() | [
"def",
"__delitem__",
"(",
"self",
",",
"key",
")",
":",
"del",
"self",
".",
"_values",
"[",
"key",
"]",
"self",
".",
"_message_listener",
".",
"Modified",
"(",
")"
] | https://github.com/mickem/nscp/blob/79f89fdbb6da63f91bc9dedb7aea202fe938f237/scripts/python/lib/google/protobuf/internal/containers.py#L161-L164 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/jsonschema/exceptions.py | python | ErrorTree.__setitem__ | (self, index, value) | Add an error to the tree at the given ``index``. | Add an error to the tree at the given ``index``. | [
"Add",
"an",
"error",
"to",
"the",
"tree",
"at",
"the",
"given",
"index",
"."
] | def __setitem__(self, index, value):
"""
Add an error to the tree at the given ``index``.
"""
self._contents[index] = value | [
"def",
"__setitem__",
"(",
"self",
",",
"index",
",",
"value",
")",
":",
"self",
".",
"_contents",
"[",
"index",
"]",
"=",
"value"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/jsonschema/exceptions.py#L271-L275 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/calendar.py | python | GenericCalendarCtrl.EnableYearChange | (*args, **kwargs) | return _calendar.GenericCalendarCtrl_EnableYearChange(*args, **kwargs) | EnableYearChange(self, bool enable=True)
This function should be used instead of changing CAL_NO_YEAR_CHANGE
style bit directly. It allows or disallows the user to change the year
interactively. | EnableYearChange(self, bool enable=True) | [
"EnableYearChange",
"(",
"self",
"bool",
"enable",
"=",
"True",
")"
] | def EnableYearChange(*args, **kwargs):
"""
EnableYearChange(self, bool enable=True)
This function should be used instead of changing CAL_NO_YEAR_CHANGE
style bit directly. It allows or disallows the user to change the year
interactively.
"""
return _calendar.GenericCalendarCtrl_EnableYearChange(*args, **kwargs) | [
"def",
"EnableYearChange",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_calendar",
".",
"GenericCalendarCtrl_EnableYearChange",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/calendar.py#L547-L555 | |
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangxx.py | python | generate | (env) | Add Builders and construction variables for clang++ to an Environment. | Add Builders and construction variables for clang++ to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"clang",
"++",
"to",
"an",
"Environment",
"."
] | def generate(env):
"""Add Builders and construction variables for clang++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
SCons.Tool.cxx.generate(env)
env['CXX'] = env.Detect(compilers) or 'clang++'
# platform specific settings
if env['PLATFORM'] == 'aix':
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc')
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
elif env['PLATFORM'] == 'hpux':
env['SHOBJSUFFIX'] = '.pic.o'
elif env['PLATFORM'] == 'sunos':
env['SHOBJSUFFIX'] = '.pic.o'
elif env['PLATFORM'] == 'win32':
# Ensure that we have a proper path for clang++
clangxx = SCons.Tool.find_program_path(env, compilers[0], default_paths=get_clang_install_dirs(env['PLATFORM']))
if clangxx:
clangxx_bin_dir = os.path.dirname(clangxx)
env.AppendENVPath('PATH', clangxx_bin_dir)
# determine compiler version
if env['CXX']:
pipe = SCons.Action._subproc(env, [env['CXX'], '--version'],
stdin='devnull',
stderr='devnull',
stdout=subprocess.PIPE)
if pipe.wait() != 0:
return
# clang -dumpversion is of no use
with pipe.stdout:
line = pipe.stdout.readline()
if sys.version_info[0] > 2:
line = line.decode()
match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line)
if match:
env['CXXVERSION'] = match.group(1) | [
"def",
"generate",
"(",
"env",
")",
":",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"SCons",
".",
"Tool",
".",
"cxx",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'CXX'",
"]",
"=",
"env",
".",
"Detect",
"(",
"compilers",
")",
"or",
"'clang++'",
"# platform specific settings",
"if",
"env",
"[",
"'PLATFORM'",
"]",
"==",
"'aix'",
":",
"env",
"[",
"'SHCXXFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CXXFLAGS -mminimal-toc'",
")",
"env",
"[",
"'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'",
"]",
"=",
"1",
"env",
"[",
"'SHOBJSUFFIX'",
"]",
"=",
"'$OBJSUFFIX'",
"elif",
"env",
"[",
"'PLATFORM'",
"]",
"==",
"'hpux'",
":",
"env",
"[",
"'SHOBJSUFFIX'",
"]",
"=",
"'.pic.o'",
"elif",
"env",
"[",
"'PLATFORM'",
"]",
"==",
"'sunos'",
":",
"env",
"[",
"'SHOBJSUFFIX'",
"]",
"=",
"'.pic.o'",
"elif",
"env",
"[",
"'PLATFORM'",
"]",
"==",
"'win32'",
":",
"# Ensure that we have a proper path for clang++",
"clangxx",
"=",
"SCons",
".",
"Tool",
".",
"find_program_path",
"(",
"env",
",",
"compilers",
"[",
"0",
"]",
",",
"default_paths",
"=",
"get_clang_install_dirs",
"(",
"env",
"[",
"'PLATFORM'",
"]",
")",
")",
"if",
"clangxx",
":",
"clangxx_bin_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"clangxx",
")",
"env",
".",
"AppendENVPath",
"(",
"'PATH'",
",",
"clangxx_bin_dir",
")",
"# determine compiler version",
"if",
"env",
"[",
"'CXX'",
"]",
":",
"pipe",
"=",
"SCons",
".",
"Action",
".",
"_subproc",
"(",
"env",
",",
"[",
"env",
"[",
"'CXX'",
"]",
",",
"'--version'",
"]",
",",
"stdin",
"=",
"'devnull'",
",",
"stderr",
"=",
"'devnull'",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"if",
"pipe",
".",
"wait",
"(",
")",
"!=",
"0",
":",
"return",
"# clang -dumpversion is of no use",
"with",
"pipe",
".",
"stdout",
":",
"line",
"=",
"pipe",
".",
"stdout",
".",
"readline",
"(",
")",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
">",
"2",
":",
"line",
"=",
"line",
".",
"decode",
"(",
")",
"match",
"=",
"re",
".",
"search",
"(",
"r'clang +version +([0-9]+(?:\\.[0-9]+)+)'",
",",
"line",
")",
"if",
"match",
":",
"env",
"[",
"'CXXVERSION'",
"]",
"=",
"match",
".",
"group",
"(",
"1",
")"
] | https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/clangxx.py#L54-L94 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_windows.py | python | Dialog.CanDoLayoutAdaptation | (*args, **kwargs) | return _windows_.Dialog_CanDoLayoutAdaptation(*args, **kwargs) | CanDoLayoutAdaptation(self) -> bool | CanDoLayoutAdaptation(self) -> bool | [
"CanDoLayoutAdaptation",
"(",
"self",
")",
"-",
">",
"bool"
] | def CanDoLayoutAdaptation(*args, **kwargs):
"""CanDoLayoutAdaptation(self) -> bool"""
return _windows_.Dialog_CanDoLayoutAdaptation(*args, **kwargs) | [
"def",
"CanDoLayoutAdaptation",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"Dialog_CanDoLayoutAdaptation",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L823-L825 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/pickletools.py | python | dis | (pickle, out=None, memo=None, indentlevel=4) | Produce a symbolic disassembly of a pickle.
'pickle' is a file-like object, or string, containing a (at least one)
pickle. The pickle is disassembled from the current position, through
the first STOP opcode encountered.
Optional arg 'out' is a file-like object to which the disassembly is
printed. It defaults to sys.stdout.
Optional arg 'memo' is a Python dict, used as the pickle's memo. It
may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes.
Passing the same memo object to another dis() call then allows disassembly
to proceed across multiple pickles that were all created by the same
pickler with the same memo. Ordinarily you don't need to worry about this.
Optional arg indentlevel is the number of blanks by which to indent
a new MARK level. It defaults to 4.
In addition to printing the disassembly, some sanity checks are made:
+ All embedded opcode arguments "make sense".
+ Explicit and implicit pop operations have enough items on the stack.
+ When an opcode implicitly refers to a markobject, a markobject is
actually on the stack.
+ A memo entry isn't referenced before it's defined.
+ The markobject isn't stored in the memo.
+ A memo entry isn't redefined. | Produce a symbolic disassembly of a pickle. | [
"Produce",
"a",
"symbolic",
"disassembly",
"of",
"a",
"pickle",
"."
] | def dis(pickle, out=None, memo=None, indentlevel=4):
"""Produce a symbolic disassembly of a pickle.
'pickle' is a file-like object, or string, containing a (at least one)
pickle. The pickle is disassembled from the current position, through
the first STOP opcode encountered.
Optional arg 'out' is a file-like object to which the disassembly is
printed. It defaults to sys.stdout.
Optional arg 'memo' is a Python dict, used as the pickle's memo. It
may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes.
Passing the same memo object to another dis() call then allows disassembly
to proceed across multiple pickles that were all created by the same
pickler with the same memo. Ordinarily you don't need to worry about this.
Optional arg indentlevel is the number of blanks by which to indent
a new MARK level. It defaults to 4.
In addition to printing the disassembly, some sanity checks are made:
+ All embedded opcode arguments "make sense".
+ Explicit and implicit pop operations have enough items on the stack.
+ When an opcode implicitly refers to a markobject, a markobject is
actually on the stack.
+ A memo entry isn't referenced before it's defined.
+ The markobject isn't stored in the memo.
+ A memo entry isn't redefined.
"""
# Most of the hair here is for sanity checks, but most of it is needed
# anyway to detect when a protocol 0 POP takes a MARK off the stack
# (which in turn is needed to indent MARK blocks correctly).
stack = [] # crude emulation of unpickler stack
if memo is None:
memo = {} # crude emulation of unpickler memo
maxproto = -1 # max protocol number seen
markstack = [] # bytecode positions of MARK opcodes
indentchunk = ' ' * indentlevel
errormsg = None
for opcode, arg, pos in genops(pickle):
if pos is not None:
print >> out, "%5d:" % pos,
line = "%-4s %s%s" % (repr(opcode.code)[1:-1],
indentchunk * len(markstack),
opcode.name)
maxproto = max(maxproto, opcode.proto)
before = opcode.stack_before # don't mutate
after = opcode.stack_after # don't mutate
numtopop = len(before)
# See whether a MARK should be popped.
markmsg = None
if markobject in before or (opcode.name == "POP" and
stack and
stack[-1] is markobject):
assert markobject not in after
if __debug__:
if markobject in before:
assert before[-1] is stackslice
if markstack:
markpos = markstack.pop()
if markpos is None:
markmsg = "(MARK at unknown opcode offset)"
else:
markmsg = "(MARK at %d)" % markpos
# Pop everything at and after the topmost markobject.
while stack[-1] is not markobject:
stack.pop()
stack.pop()
# Stop later code from popping too much.
try:
numtopop = before.index(markobject)
except ValueError:
assert opcode.name == "POP"
numtopop = 0
else:
errormsg = markmsg = "no MARK exists on stack"
# Check for correct memo usage.
if opcode.name in ("PUT", "BINPUT", "LONG_BINPUT"):
assert arg is not None
if arg in memo:
errormsg = "memo key %r already defined" % arg
elif not stack:
errormsg = "stack is empty -- can't store into memo"
elif stack[-1] is markobject:
errormsg = "can't store markobject in the memo"
else:
memo[arg] = stack[-1]
elif opcode.name in ("GET", "BINGET", "LONG_BINGET"):
if arg in memo:
assert len(after) == 1
after = [memo[arg]] # for better stack emulation
else:
errormsg = "memo key %r has never been stored into" % arg
if arg is not None or markmsg:
# make a mild effort to align arguments
line += ' ' * (10 - len(opcode.name))
if arg is not None:
line += ' ' + repr(arg)
if markmsg:
line += ' ' + markmsg
print >> out, line
if errormsg:
# Note that we delayed complaining until the offending opcode
# was printed.
raise ValueError(errormsg)
# Emulate the stack effects.
if len(stack) < numtopop:
raise ValueError("tries to pop %d items from stack with "
"only %d items" % (numtopop, len(stack)))
if numtopop:
del stack[-numtopop:]
if markobject in after:
assert markobject not in before
markstack.append(pos)
stack.extend(after)
print >> out, "highest protocol among opcodes =", maxproto
if stack:
raise ValueError("stack not empty after STOP: %r" % stack) | [
"def",
"dis",
"(",
"pickle",
",",
"out",
"=",
"None",
",",
"memo",
"=",
"None",
",",
"indentlevel",
"=",
"4",
")",
":",
"# Most of the hair here is for sanity checks, but most of it is needed",
"# anyway to detect when a protocol 0 POP takes a MARK off the stack",
"# (which in turn is needed to indent MARK blocks correctly).",
"stack",
"=",
"[",
"]",
"# crude emulation of unpickler stack",
"if",
"memo",
"is",
"None",
":",
"memo",
"=",
"{",
"}",
"# crude emulation of unpickler memo",
"maxproto",
"=",
"-",
"1",
"# max protocol number seen",
"markstack",
"=",
"[",
"]",
"# bytecode positions of MARK opcodes",
"indentchunk",
"=",
"' '",
"*",
"indentlevel",
"errormsg",
"=",
"None",
"for",
"opcode",
",",
"arg",
",",
"pos",
"in",
"genops",
"(",
"pickle",
")",
":",
"if",
"pos",
"is",
"not",
"None",
":",
"print",
">>",
"out",
",",
"\"%5d:\"",
"%",
"pos",
",",
"line",
"=",
"\"%-4s %s%s\"",
"%",
"(",
"repr",
"(",
"opcode",
".",
"code",
")",
"[",
"1",
":",
"-",
"1",
"]",
",",
"indentchunk",
"*",
"len",
"(",
"markstack",
")",
",",
"opcode",
".",
"name",
")",
"maxproto",
"=",
"max",
"(",
"maxproto",
",",
"opcode",
".",
"proto",
")",
"before",
"=",
"opcode",
".",
"stack_before",
"# don't mutate",
"after",
"=",
"opcode",
".",
"stack_after",
"# don't mutate",
"numtopop",
"=",
"len",
"(",
"before",
")",
"# See whether a MARK should be popped.",
"markmsg",
"=",
"None",
"if",
"markobject",
"in",
"before",
"or",
"(",
"opcode",
".",
"name",
"==",
"\"POP\"",
"and",
"stack",
"and",
"stack",
"[",
"-",
"1",
"]",
"is",
"markobject",
")",
":",
"assert",
"markobject",
"not",
"in",
"after",
"if",
"__debug__",
":",
"if",
"markobject",
"in",
"before",
":",
"assert",
"before",
"[",
"-",
"1",
"]",
"is",
"stackslice",
"if",
"markstack",
":",
"markpos",
"=",
"markstack",
".",
"pop",
"(",
")",
"if",
"markpos",
"is",
"None",
":",
"markmsg",
"=",
"\"(MARK at unknown opcode offset)\"",
"else",
":",
"markmsg",
"=",
"\"(MARK at %d)\"",
"%",
"markpos",
"# Pop everything at and after the topmost markobject.",
"while",
"stack",
"[",
"-",
"1",
"]",
"is",
"not",
"markobject",
":",
"stack",
".",
"pop",
"(",
")",
"stack",
".",
"pop",
"(",
")",
"# Stop later code from popping too much.",
"try",
":",
"numtopop",
"=",
"before",
".",
"index",
"(",
"markobject",
")",
"except",
"ValueError",
":",
"assert",
"opcode",
".",
"name",
"==",
"\"POP\"",
"numtopop",
"=",
"0",
"else",
":",
"errormsg",
"=",
"markmsg",
"=",
"\"no MARK exists on stack\"",
"# Check for correct memo usage.",
"if",
"opcode",
".",
"name",
"in",
"(",
"\"PUT\"",
",",
"\"BINPUT\"",
",",
"\"LONG_BINPUT\"",
")",
":",
"assert",
"arg",
"is",
"not",
"None",
"if",
"arg",
"in",
"memo",
":",
"errormsg",
"=",
"\"memo key %r already defined\"",
"%",
"arg",
"elif",
"not",
"stack",
":",
"errormsg",
"=",
"\"stack is empty -- can't store into memo\"",
"elif",
"stack",
"[",
"-",
"1",
"]",
"is",
"markobject",
":",
"errormsg",
"=",
"\"can't store markobject in the memo\"",
"else",
":",
"memo",
"[",
"arg",
"]",
"=",
"stack",
"[",
"-",
"1",
"]",
"elif",
"opcode",
".",
"name",
"in",
"(",
"\"GET\"",
",",
"\"BINGET\"",
",",
"\"LONG_BINGET\"",
")",
":",
"if",
"arg",
"in",
"memo",
":",
"assert",
"len",
"(",
"after",
")",
"==",
"1",
"after",
"=",
"[",
"memo",
"[",
"arg",
"]",
"]",
"# for better stack emulation",
"else",
":",
"errormsg",
"=",
"\"memo key %r has never been stored into\"",
"%",
"arg",
"if",
"arg",
"is",
"not",
"None",
"or",
"markmsg",
":",
"# make a mild effort to align arguments",
"line",
"+=",
"' '",
"*",
"(",
"10",
"-",
"len",
"(",
"opcode",
".",
"name",
")",
")",
"if",
"arg",
"is",
"not",
"None",
":",
"line",
"+=",
"' '",
"+",
"repr",
"(",
"arg",
")",
"if",
"markmsg",
":",
"line",
"+=",
"' '",
"+",
"markmsg",
"print",
">>",
"out",
",",
"line",
"if",
"errormsg",
":",
"# Note that we delayed complaining until the offending opcode",
"# was printed.",
"raise",
"ValueError",
"(",
"errormsg",
")",
"# Emulate the stack effects.",
"if",
"len",
"(",
"stack",
")",
"<",
"numtopop",
":",
"raise",
"ValueError",
"(",
"\"tries to pop %d items from stack with \"",
"\"only %d items\"",
"%",
"(",
"numtopop",
",",
"len",
"(",
"stack",
")",
")",
")",
"if",
"numtopop",
":",
"del",
"stack",
"[",
"-",
"numtopop",
":",
"]",
"if",
"markobject",
"in",
"after",
":",
"assert",
"markobject",
"not",
"in",
"before",
"markstack",
".",
"append",
"(",
"pos",
")",
"stack",
".",
"extend",
"(",
"after",
")",
"print",
">>",
"out",
",",
"\"highest protocol among opcodes =\"",
",",
"maxproto",
"if",
"stack",
":",
"raise",
"ValueError",
"(",
"\"stack not empty after STOP: %r\"",
"%",
"stack",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/pickletools.py#L1887-L2021 | ||
microsoft/checkedc-clang | a173fefde5d7877b7750e7ce96dd08cf18baebf2 | compiler-rt/lib/sanitizer_common/scripts/cpplint.py | python | _CppLintState.ResetErrorCounts | (self) | Sets the module's error statistic back to zero. | Sets the module's error statistic back to zero. | [
"Sets",
"the",
"module",
"s",
"error",
"statistic",
"back",
"to",
"zero",
"."
] | def ResetErrorCounts(self):
"""Sets the module's error statistic back to zero."""
self.error_count = 0
self.errors_by_category = {} | [
"def",
"ResetErrorCounts",
"(",
"self",
")",
":",
"self",
".",
"error_count",
"=",
"0",
"self",
".",
"errors_by_category",
"=",
"{",
"}"
] | https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L938-L941 | ||
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBSymbolContextList.Clear | (self) | return _lldb.SBSymbolContextList_Clear(self) | Clear(SBSymbolContextList self) | Clear(SBSymbolContextList self) | [
"Clear",
"(",
"SBSymbolContextList",
"self",
")"
] | def Clear(self):
"""Clear(SBSymbolContextList self)"""
return _lldb.SBSymbolContextList_Clear(self) | [
"def",
"Clear",
"(",
"self",
")",
":",
"return",
"_lldb",
".",
"SBSymbolContextList_Clear",
"(",
"self",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L10139-L10141 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/stc.py | python | StyledTextCtrl.RegisterImage | (*args, **kwargs) | return _stc.StyledTextCtrl_RegisterImage(*args, **kwargs) | RegisterImage(self, int type, Bitmap bmp)
Register an image for use in autocompletion lists. | RegisterImage(self, int type, Bitmap bmp) | [
"RegisterImage",
"(",
"self",
"int",
"type",
"Bitmap",
"bmp",
")"
] | def RegisterImage(*args, **kwargs):
"""
RegisterImage(self, int type, Bitmap bmp)
Register an image for use in autocompletion lists.
"""
return _stc.StyledTextCtrl_RegisterImage(*args, **kwargs) | [
"def",
"RegisterImage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_RegisterImage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L3203-L3209 | |
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/build/checksums.py | python | setup_logging | (level=logging.DEBUG) | This function sets up the logging module using a speficiable logging
module logging level. The default log level is DEBUG.
The output is in the format:
<level> - <message>
Example:
DEBUG - Finished reading in file | This function sets up the logging module using a speficiable logging
module logging level. The default log level is DEBUG. | [
"This",
"function",
"sets",
"up",
"the",
"logging",
"module",
"using",
"a",
"speficiable",
"logging",
"module",
"logging",
"level",
".",
"The",
"default",
"log",
"level",
"is",
"DEBUG",
"."
] | def setup_logging(level=logging.DEBUG):
'''This function sets up the logging module using a speficiable logging
module logging level. The default log level is DEBUG.
The output is in the format:
<level> - <message>
Example:
DEBUG - Finished reading in file
'''
logger = logging.getLogger('checksums.py')
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter("%(levelname)s - %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler) | [
"def",
"setup_logging",
"(",
"level",
"=",
"logging",
".",
"DEBUG",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"'checksums.py'",
")",
"logger",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"handler",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"handler",
".",
"setLevel",
"(",
"level",
")",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"\"%(levelname)s - %(message)s\"",
")",
"handler",
".",
"setFormatter",
"(",
"formatter",
")",
"logger",
".",
"addHandler",
"(",
"handler",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/build/checksums.py#L88-L104 | ||
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py | python | xmlDoc.htmlIsAutoClosed | (self, elem) | return ret | The HTML DTD allows a tag to implicitly close other tags.
The list is kept in htmlStartClose array. This function
checks if a tag is autoclosed by one of it's child | The HTML DTD allows a tag to implicitly close other tags.
The list is kept in htmlStartClose array. This function
checks if a tag is autoclosed by one of it's child | [
"The",
"HTML",
"DTD",
"allows",
"a",
"tag",
"to",
"implicitly",
"close",
"other",
"tags",
".",
"The",
"list",
"is",
"kept",
"in",
"htmlStartClose",
"array",
".",
"This",
"function",
"checks",
"if",
"a",
"tag",
"is",
"autoclosed",
"by",
"one",
"of",
"it",
"s",
"child"
] | def htmlIsAutoClosed(self, elem):
"""The HTML DTD allows a tag to implicitly close other tags.
The list is kept in htmlStartClose array. This function
checks if a tag is autoclosed by one of it's child """
ret = libxml2mod.htmlIsAutoClosed(self._o, elem)
return ret | [
"def",
"htmlIsAutoClosed",
"(",
"self",
",",
"elem",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"htmlIsAutoClosed",
"(",
"self",
".",
"_o",
",",
"elem",
")",
"return",
"ret"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2.py#L3984-L3989 | |
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/xml/dom/minidom.py | python | parseString | (string, parser=None) | Parse a file into a DOM from a string. | Parse a file into a DOM from a string. | [
"Parse",
"a",
"file",
"into",
"a",
"DOM",
"from",
"a",
"string",
"."
] | def parseString(string, parser=None):
"""Parse a file into a DOM from a string."""
if parser is None:
from xml.dom import expatbuilder
return expatbuilder.parseString(string)
else:
from xml.dom import pulldom
return _do_pulldom_parse(pulldom.parseString, (string,),
{'parser': parser}) | [
"def",
"parseString",
"(",
"string",
",",
"parser",
"=",
"None",
")",
":",
"if",
"parser",
"is",
"None",
":",
"from",
"xml",
".",
"dom",
"import",
"expatbuilder",
"return",
"expatbuilder",
".",
"parseString",
"(",
"string",
")",
"else",
":",
"from",
"xml",
".",
"dom",
"import",
"pulldom",
"return",
"_do_pulldom_parse",
"(",
"pulldom",
".",
"parseString",
",",
"(",
"string",
",",
")",
",",
"{",
"'parser'",
":",
"parser",
"}",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/xml/dom/minidom.py#L1924-L1932 | ||
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/ops/composite/base.py | python | _Tail.__init__ | (self, name) | Initialize _Tail. | Initialize _Tail. | [
"Initialize",
"_Tail",
"."
] | def __init__(self, name):
"""Initialize _Tail."""
Tail_.__init__(self, name) | [
"def",
"__init__",
"(",
"self",
",",
"name",
")",
":",
"Tail_",
".",
"__init__",
"(",
"self",
",",
"name",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/composite/base.py#L830-L832 | ||
sdhash/sdhash | b9eff63e4e5867e910f41fd69032bbb1c94a2a5e | external/tools/build/v2/build/virtual_target.py | python | VirtualTarget.depends | (self, d) | Adds additional instances of 'VirtualTarget' that this
one depends on. | Adds additional instances of 'VirtualTarget' that this
one depends on. | [
"Adds",
"additional",
"instances",
"of",
"VirtualTarget",
"that",
"this",
"one",
"depends",
"on",
"."
] | def depends (self, d):
""" Adds additional instances of 'VirtualTarget' that this
one depends on.
"""
self.dependencies_ = unique (self.dependencies_ + d).sort () | [
"def",
"depends",
"(",
"self",
",",
"d",
")",
":",
"self",
".",
"dependencies_",
"=",
"unique",
"(",
"self",
".",
"dependencies_",
"+",
"d",
")",
".",
"sort",
"(",
")"
] | https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/external/tools/build/v2/build/virtual_target.py#L281-L285 | ||
yuxng/PoseCNN | 9f3dd7b7bce21dcafc05e8f18ccc90da3caabd04 | lib/datasets/rgbd_scene.py | python | rgbd_scene.depth_path_at | (self, i) | return self.depth_path_from_index(self.image_index[i]) | Return the absolute path to depth i in the image sequence. | Return the absolute path to depth i in the image sequence. | [
"Return",
"the",
"absolute",
"path",
"to",
"depth",
"i",
"in",
"the",
"image",
"sequence",
"."
] | def depth_path_at(self, i):
"""
Return the absolute path to depth i in the image sequence.
"""
return self.depth_path_from_index(self.image_index[i]) | [
"def",
"depth_path_at",
"(",
"self",
",",
"i",
")",
":",
"return",
"self",
".",
"depth_path_from_index",
"(",
"self",
".",
"image_index",
"[",
"i",
"]",
")"
] | https://github.com/yuxng/PoseCNN/blob/9f3dd7b7bce21dcafc05e8f18ccc90da3caabd04/lib/datasets/rgbd_scene.py#L50-L54 | |
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/contrib/slim/python/slim/nets/inception_v3.py | python | _reduced_kernel_size_for_small_input | (input_tensor, kernel_size) | return kernel_size_out | Define kernel size which is automatically reduced for small input.
If the shape of the input images is unknown at graph construction time this
function assumes that the input images are is large enough.
Args:
input_tensor: input tensor of size [batch_size, height, width, channels].
kernel_size: desired kernel size of length 2: [kernel_height, kernel_width]
Returns:
a tensor with the kernel size.
TODO(jrru): Make this function work with unknown shapes. Theoretically, this
can be done with the code below. Problems are two-fold: (1) If the shape was
known, it will be lost. (2) inception.tf.contrib.slim.ops._two_element_tuple
cannot
handle tensors that define the kernel size.
shape = tf.shape(input_tensor)
return = tf.stack([tf.minimum(shape[1], kernel_size[0]),
tf.minimum(shape[2], kernel_size[1])]) | Define kernel size which is automatically reduced for small input. | [
"Define",
"kernel",
"size",
"which",
"is",
"automatically",
"reduced",
"for",
"small",
"input",
"."
] | def _reduced_kernel_size_for_small_input(input_tensor, kernel_size):
"""Define kernel size which is automatically reduced for small input.
If the shape of the input images is unknown at graph construction time this
function assumes that the input images are is large enough.
Args:
input_tensor: input tensor of size [batch_size, height, width, channels].
kernel_size: desired kernel size of length 2: [kernel_height, kernel_width]
Returns:
a tensor with the kernel size.
TODO(jrru): Make this function work with unknown shapes. Theoretically, this
can be done with the code below. Problems are two-fold: (1) If the shape was
known, it will be lost. (2) inception.tf.contrib.slim.ops._two_element_tuple
cannot
handle tensors that define the kernel size.
shape = tf.shape(input_tensor)
return = tf.stack([tf.minimum(shape[1], kernel_size[0]),
tf.minimum(shape[2], kernel_size[1])])
"""
shape = input_tensor.get_shape().as_list()
if shape[1] is None or shape[2] is None:
kernel_size_out = kernel_size
else:
kernel_size_out = [
min(shape[1], kernel_size[0]), min(shape[2], kernel_size[1])
]
return kernel_size_out | [
"def",
"_reduced_kernel_size_for_small_input",
"(",
"input_tensor",
",",
"kernel_size",
")",
":",
"shape",
"=",
"input_tensor",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"if",
"shape",
"[",
"1",
"]",
"is",
"None",
"or",
"shape",
"[",
"2",
"]",
"is",
"None",
":",
"kernel_size_out",
"=",
"kernel_size",
"else",
":",
"kernel_size_out",
"=",
"[",
"min",
"(",
"shape",
"[",
"1",
"]",
",",
"kernel_size",
"[",
"0",
"]",
")",
",",
"min",
"(",
"shape",
"[",
"2",
"]",
",",
"kernel_size",
"[",
"1",
"]",
")",
"]",
"return",
"kernel_size_out"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/slim/python/slim/nets/inception_v3.py#L644-L674 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/macpath.py | python | abspath | (path) | return normpath(path) | Return an absolute path. | Return an absolute path. | [
"Return",
"an",
"absolute",
"path",
"."
] | def abspath(path):
"""Return an absolute path."""
if not isabs(path):
if isinstance(path, _unicode):
cwd = os.getcwdu()
else:
cwd = os.getcwd()
path = join(cwd, path)
return normpath(path) | [
"def",
"abspath",
"(",
"path",
")",
":",
"if",
"not",
"isabs",
"(",
"path",
")",
":",
"if",
"isinstance",
"(",
"path",
",",
"_unicode",
")",
":",
"cwd",
"=",
"os",
".",
"getcwdu",
"(",
")",
"else",
":",
"cwd",
"=",
"os",
".",
"getcwd",
"(",
")",
"path",
"=",
"join",
"(",
"cwd",
",",
"path",
")",
"return",
"normpath",
"(",
"path",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/macpath.py#L187-L195 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/platebtn.py | python | PlateButton.__PostEvent | (self) | Post a button event to parent of this control | Post a button event to parent of this control | [
"Post",
"a",
"button",
"event",
"to",
"parent",
"of",
"this",
"control"
] | def __PostEvent(self):
"""Post a button event to parent of this control"""
if self._style & PB_STYLE_TOGGLE:
etype = wx.wxEVT_COMMAND_TOGGLEBUTTON_CLICKED
else:
etype = wx.wxEVT_COMMAND_BUTTON_CLICKED
bevt = wx.CommandEvent(etype, self.GetId())
bevt.SetEventObject(self)
bevt.SetString(self.GetLabel())
self.GetEventHandler().ProcessEvent(bevt) | [
"def",
"__PostEvent",
"(",
"self",
")",
":",
"if",
"self",
".",
"_style",
"&",
"PB_STYLE_TOGGLE",
":",
"etype",
"=",
"wx",
".",
"wxEVT_COMMAND_TOGGLEBUTTON_CLICKED",
"else",
":",
"etype",
"=",
"wx",
".",
"wxEVT_COMMAND_BUTTON_CLICKED",
"bevt",
"=",
"wx",
".",
"CommandEvent",
"(",
"etype",
",",
"self",
".",
"GetId",
"(",
")",
")",
"bevt",
".",
"SetEventObject",
"(",
"self",
")",
"bevt",
".",
"SetString",
"(",
"self",
".",
"GetLabel",
"(",
")",
")",
"self",
".",
"GetEventHandler",
"(",
")",
".",
"ProcessEvent",
"(",
"bevt",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/platebtn.py#L248-L257 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/plat-mac/PixMapWrapper.py | python | PixMapWrapper.fromstring | (self,s,width,height,format=imgformat.macrgb) | Stuff this pixmap with raw pixel data from a string.
Supply width, height, and one of the imgformat specifiers. | Stuff this pixmap with raw pixel data from a string.
Supply width, height, and one of the imgformat specifiers. | [
"Stuff",
"this",
"pixmap",
"with",
"raw",
"pixel",
"data",
"from",
"a",
"string",
".",
"Supply",
"width",
"height",
"and",
"one",
"of",
"the",
"imgformat",
"specifiers",
"."
] | def fromstring(self,s,width,height,format=imgformat.macrgb):
"""Stuff this pixmap with raw pixel data from a string.
Supply width, height, and one of the imgformat specifiers."""
# we only support 16- and 32-bit mac rgb...
# so convert if necessary
if format != imgformat.macrgb and format != imgformat.macrgb16:
# (LATER!)
raise "NotImplementedError", "conversion to macrgb or macrgb16"
self.data = s
self.bounds = (0,0,width,height)
self.cmpCount = 3
self.pixelType = QuickDraw.RGBDirect
if format == imgformat.macrgb:
self.pixelSize = 32
self.cmpSize = 8
else:
self.pixelSize = 16
self.cmpSize = 5
self.rowBytes = width*self.pixelSize/8 | [
"def",
"fromstring",
"(",
"self",
",",
"s",
",",
"width",
",",
"height",
",",
"format",
"=",
"imgformat",
".",
"macrgb",
")",
":",
"# we only support 16- and 32-bit mac rgb...",
"# so convert if necessary",
"if",
"format",
"!=",
"imgformat",
".",
"macrgb",
"and",
"format",
"!=",
"imgformat",
".",
"macrgb16",
":",
"# (LATER!)",
"raise",
"\"NotImplementedError\"",
",",
"\"conversion to macrgb or macrgb16\"",
"self",
".",
"data",
"=",
"s",
"self",
".",
"bounds",
"=",
"(",
"0",
",",
"0",
",",
"width",
",",
"height",
")",
"self",
".",
"cmpCount",
"=",
"3",
"self",
".",
"pixelType",
"=",
"QuickDraw",
".",
"RGBDirect",
"if",
"format",
"==",
"imgformat",
".",
"macrgb",
":",
"self",
".",
"pixelSize",
"=",
"32",
"self",
".",
"cmpSize",
"=",
"8",
"else",
":",
"self",
".",
"pixelSize",
"=",
"16",
"self",
".",
"cmpSize",
"=",
"5",
"self",
".",
"rowBytes",
"=",
"width",
"*",
"self",
".",
"pixelSize",
"/",
"8"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/plat-mac/PixMapWrapper.py#L161-L179 | ||
infinidb/infinidb | 6c9f5dfdabc41ad80e81ba9e1a4eb0d7271a5d23 | writeengine/bulk/bulkload.py | python | find_paths | () | return (bulk_dir, data_dir) | Find DBRoot and BulkRoot. | Find DBRoot and BulkRoot. | [
"Find",
"DBRoot",
"and",
"BulkRoot",
"."
] | def find_paths():
"""Find DBRoot and BulkRoot."""
try:
config_file = os.environ['CALPONT_CONFIG_FILE']
except KeyError:
try:
logger.info("Environment variable CALPONT_CONFIG_FILE not set, looking for system Calpont.xml")
config_file = '/usr/local/Calpont/etc/Calpont.xml'
os.lstat(config_file)
except:
logger.error('No config file available')
sys.exit('No config file available')
try:
xmldoc = xml.dom.minidom.parse(config_file)
bulk_node = xmldoc.getElementsByTagName('BulkRoot')[0]
db_node = xmldoc.getElementsByTagName('DBRoot1')[0]
bulk_dir = bulk_node.childNodes[0].nodeValue
data_dir = db_node.childNodes[0].nodeValue
except Exception, e:
logger.error('Error parsing config file')
logger.error(e)
sys.exit('Error parsing config file')
return (bulk_dir, data_dir) | [
"def",
"find_paths",
"(",
")",
":",
"try",
":",
"config_file",
"=",
"os",
".",
"environ",
"[",
"'CALPONT_CONFIG_FILE'",
"]",
"except",
"KeyError",
":",
"try",
":",
"logger",
".",
"info",
"(",
"\"Environment variable CALPONT_CONFIG_FILE not set, looking for system Calpont.xml\"",
")",
"config_file",
"=",
"'/usr/local/Calpont/etc/Calpont.xml'",
"os",
".",
"lstat",
"(",
"config_file",
")",
"except",
":",
"logger",
".",
"error",
"(",
"'No config file available'",
")",
"sys",
".",
"exit",
"(",
"'No config file available'",
")",
"try",
":",
"xmldoc",
"=",
"xml",
".",
"dom",
".",
"minidom",
".",
"parse",
"(",
"config_file",
")",
"bulk_node",
"=",
"xmldoc",
".",
"getElementsByTagName",
"(",
"'BulkRoot'",
")",
"[",
"0",
"]",
"db_node",
"=",
"xmldoc",
".",
"getElementsByTagName",
"(",
"'DBRoot1'",
")",
"[",
"0",
"]",
"bulk_dir",
"=",
"bulk_node",
".",
"childNodes",
"[",
"0",
"]",
".",
"nodeValue",
"data_dir",
"=",
"db_node",
".",
"childNodes",
"[",
"0",
"]",
".",
"nodeValue",
"except",
"Exception",
",",
"e",
":",
"logger",
".",
"error",
"(",
"'Error parsing config file'",
")",
"logger",
".",
"error",
"(",
"e",
")",
"sys",
".",
"exit",
"(",
"'Error parsing config file'",
")",
"return",
"(",
"bulk_dir",
",",
"data_dir",
")"
] | https://github.com/infinidb/infinidb/blob/6c9f5dfdabc41ad80e81ba9e1a4eb0d7271a5d23/writeengine/bulk/bulkload.py#L51-L76 | |
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/sched.py | python | scheduler.queue | (self) | return map(heapq.heappop, [events]*len(events)) | An ordered list of upcoming events.
Events are named tuples with fields for:
time, priority, action, arguments | An ordered list of upcoming events. | [
"An",
"ordered",
"list",
"of",
"upcoming",
"events",
"."
] | def queue(self):
"""An ordered list of upcoming events.
Events are named tuples with fields for:
time, priority, action, arguments
"""
# Use heapq to sort the queue rather than using 'sorted(self._queue)'.
# With heapq, two events scheduled at the same time will show in
# the actual order they would be retrieved.
events = self._queue[:]
return map(heapq.heappop, [events]*len(events)) | [
"def",
"queue",
"(",
"self",
")",
":",
"# Use heapq to sort the queue rather than using 'sorted(self._queue)'.",
"# With heapq, two events scheduled at the same time will show in",
"# the actual order they would be retrieved.",
"events",
"=",
"self",
".",
"_queue",
"[",
":",
"]",
"return",
"map",
"(",
"heapq",
".",
"heappop",
",",
"[",
"events",
"]",
"*",
"len",
"(",
"events",
")",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/sched.py#L123-L134 | |
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/training/evaluation.py | python | _get_or_create_eval_step | () | Gets or creates the eval step `Tensor`.
Returns:
A `Tensor` representing a counter for the evaluation step.
Raises:
ValueError: If multiple `Tensors` have been added to the
`tf.GraphKeys.EVAL_STEP` collection. | Gets or creates the eval step `Tensor`. | [
"Gets",
"or",
"creates",
"the",
"eval",
"step",
"Tensor",
"."
] | def _get_or_create_eval_step():
"""Gets or creates the eval step `Tensor`.
Returns:
A `Tensor` representing a counter for the evaluation step.
Raises:
ValueError: If multiple `Tensors` have been added to the
`tf.GraphKeys.EVAL_STEP` collection.
"""
graph = ops.get_default_graph()
eval_steps = graph.get_collection(ops.GraphKeys.EVAL_STEP)
if len(eval_steps) == 1:
return eval_steps[0]
elif len(eval_steps) > 1:
raise ValueError('Multiple tensors added to tf.GraphKeys.EVAL_STEP')
else:
counter = variable_scope.get_variable(
'eval_step',
shape=[],
dtype=dtypes.int64,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES, ops.GraphKeys.EVAL_STEP])
return counter | [
"def",
"_get_or_create_eval_step",
"(",
")",
":",
"graph",
"=",
"ops",
".",
"get_default_graph",
"(",
")",
"eval_steps",
"=",
"graph",
".",
"get_collection",
"(",
"ops",
".",
"GraphKeys",
".",
"EVAL_STEP",
")",
"if",
"len",
"(",
"eval_steps",
")",
"==",
"1",
":",
"return",
"eval_steps",
"[",
"0",
"]",
"elif",
"len",
"(",
"eval_steps",
")",
">",
"1",
":",
"raise",
"ValueError",
"(",
"'Multiple tensors added to tf.GraphKeys.EVAL_STEP'",
")",
"else",
":",
"counter",
"=",
"variable_scope",
".",
"get_variable",
"(",
"'eval_step'",
",",
"shape",
"=",
"[",
"]",
",",
"dtype",
"=",
"dtypes",
".",
"int64",
",",
"initializer",
"=",
"init_ops",
".",
"zeros_initializer",
"(",
")",
",",
"trainable",
"=",
"False",
",",
"collections",
"=",
"[",
"ops",
".",
"GraphKeys",
".",
"LOCAL_VARIABLES",
",",
"ops",
".",
"GraphKeys",
".",
"EVAL_STEP",
"]",
")",
"return",
"counter"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/training/evaluation.py#L37-L61 | ||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/WebOb/webob/response.py | python | Response.unset_cookie | (self, name, strict=True) | Unset a cookie with the given name (remove it from the
response). | Unset a cookie with the given name (remove it from the
response). | [
"Unset",
"a",
"cookie",
"with",
"the",
"given",
"name",
"(",
"remove",
"it",
"from",
"the",
"response",
")",
"."
] | def unset_cookie(self, name, strict=True):
"""
Unset a cookie with the given name (remove it from the
response).
"""
existing = self.headers.getall('Set-Cookie')
if not existing and not strict:
return
cookies = Cookie()
for header in existing:
cookies.load(header)
if isinstance(name, text_type):
name = name.encode('utf8')
if name in cookies:
del cookies[name]
del self.headers['Set-Cookie']
for m in cookies.values():
self.headerlist.append(('Set-Cookie', m.serialize()))
elif strict:
raise KeyError("No cookie has been set with the name %r" % name) | [
"def",
"unset_cookie",
"(",
"self",
",",
"name",
",",
"strict",
"=",
"True",
")",
":",
"existing",
"=",
"self",
".",
"headers",
".",
"getall",
"(",
"'Set-Cookie'",
")",
"if",
"not",
"existing",
"and",
"not",
"strict",
":",
"return",
"cookies",
"=",
"Cookie",
"(",
")",
"for",
"header",
"in",
"existing",
":",
"cookies",
".",
"load",
"(",
"header",
")",
"if",
"isinstance",
"(",
"name",
",",
"text_type",
")",
":",
"name",
"=",
"name",
".",
"encode",
"(",
"'utf8'",
")",
"if",
"name",
"in",
"cookies",
":",
"del",
"cookies",
"[",
"name",
"]",
"del",
"self",
".",
"headers",
"[",
"'Set-Cookie'",
"]",
"for",
"m",
"in",
"cookies",
".",
"values",
"(",
")",
":",
"self",
".",
"headerlist",
".",
"append",
"(",
"(",
"'Set-Cookie'",
",",
"m",
".",
"serialize",
"(",
")",
")",
")",
"elif",
"strict",
":",
"raise",
"KeyError",
"(",
"\"No cookie has been set with the name %r\"",
"%",
"name",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/WebOb/webob/response.py#L800-L819 | ||
google/fhir | d77f57706c1a168529b0b87ca7ccb1c0113e83c2 | go/generate_go_protos.py | python | move_generated_go_protos | (tmp_dir: str) | Responsible for moving the generated go protos to their final destination.
Args:
tmp_dir: the temporary directory where the proto building and
transformations occur. | Responsible for moving the generated go protos to their final destination. | [
"Responsible",
"for",
"moving",
"the",
"generated",
"go",
"protos",
"to",
"their",
"final",
"destination",
"."
] | def move_generated_go_protos(tmp_dir: str):
"""Responsible for moving the generated go protos to their final destination.
Args:
tmp_dir: the temporary directory where the proto building and
transformations occur.
"""
dest_root = _REPO_PATH.value
proto_dest_dir = os.path.join(dest_root, "go/proto")
shutil.rmtree(proto_dest_dir, ignore_errors=True)
shutil.move(
os.path.join(tmp_dir, "proto-out/github.com/google/fhir/go/proto"),
proto_dest_dir)
accessor_out = os.path.join(
dest_root, "go/jsonformat/internal/accessor/accessor_test_go_proto")
shutil.rmtree(accessor_out, ignore_errors=True)
shutil.copytree(
os.path.join(
tmp_dir,
"proto-out/github.com/google/fhir/go/jsonformat/internal/accessor/accessor_test_go_proto"
), accessor_out)
protopath_dest_dir = os.path.join(
dest_root, "go/jsonformat/internal/protopath/protopathtest_go_proto")
shutil.rmtree(protopath_dest_dir, ignore_errors=True)
shutil.copytree(
os.path.join(
tmp_dir,
"proto-out/github.com/google/fhir/go/jsonformat/internal/protopath/protopathtest_go_proto"
), protopath_dest_dir) | [
"def",
"move_generated_go_protos",
"(",
"tmp_dir",
":",
"str",
")",
":",
"dest_root",
"=",
"_REPO_PATH",
".",
"value",
"proto_dest_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dest_root",
",",
"\"go/proto\"",
")",
"shutil",
".",
"rmtree",
"(",
"proto_dest_dir",
",",
"ignore_errors",
"=",
"True",
")",
"shutil",
".",
"move",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"\"proto-out/github.com/google/fhir/go/proto\"",
")",
",",
"proto_dest_dir",
")",
"accessor_out",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dest_root",
",",
"\"go/jsonformat/internal/accessor/accessor_test_go_proto\"",
")",
"shutil",
".",
"rmtree",
"(",
"accessor_out",
",",
"ignore_errors",
"=",
"True",
")",
"shutil",
".",
"copytree",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"\"proto-out/github.com/google/fhir/go/jsonformat/internal/accessor/accessor_test_go_proto\"",
")",
",",
"accessor_out",
")",
"protopath_dest_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dest_root",
",",
"\"go/jsonformat/internal/protopath/protopathtest_go_proto\"",
")",
"shutil",
".",
"rmtree",
"(",
"protopath_dest_dir",
",",
"ignore_errors",
"=",
"True",
")",
"shutil",
".",
"copytree",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"\"proto-out/github.com/google/fhir/go/jsonformat/internal/protopath/protopathtest_go_proto\"",
")",
",",
"protopath_dest_dir",
")"
] | https://github.com/google/fhir/blob/d77f57706c1a168529b0b87ca7ccb1c0113e83c2/go/generate_go_protos.py#L37-L67 | ||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/stc.py | python | StyledTextCtrl.AutoCompComplete | (*args, **kwargs) | return _stc.StyledTextCtrl_AutoCompComplete(*args, **kwargs) | AutoCompComplete(self)
User has selected an item so remove the list and insert the selection. | AutoCompComplete(self) | [
"AutoCompComplete",
"(",
"self",
")"
] | def AutoCompComplete(*args, **kwargs):
"""
AutoCompComplete(self)
User has selected an item so remove the list and insert the selection.
"""
return _stc.StyledTextCtrl_AutoCompComplete(*args, **kwargs) | [
"def",
"AutoCompComplete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_AutoCompComplete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L3062-L3068 | |
deepmind/open_spiel | 4ca53bea32bb2875c7385d215424048ae92f78c8 | open_spiel/python/algorithms/psro_v2/psro_v2.py | python | PSROSolver.get_and_update_non_marginalized_meta_strategies | (self, update=True) | return self._non_marginalized_probabilities | Returns the Nash Equilibrium distribution on meta game matrix. | Returns the Nash Equilibrium distribution on meta game matrix. | [
"Returns",
"the",
"Nash",
"Equilibrium",
"distribution",
"on",
"meta",
"game",
"matrix",
"."
] | def get_and_update_non_marginalized_meta_strategies(self, update=True):
"""Returns the Nash Equilibrium distribution on meta game matrix."""
if update:
self.update_meta_strategies()
return self._non_marginalized_probabilities | [
"def",
"get_and_update_non_marginalized_meta_strategies",
"(",
"self",
",",
"update",
"=",
"True",
")",
":",
"if",
"update",
":",
"self",
".",
"update_meta_strategies",
"(",
")",
"return",
"self",
".",
"_non_marginalized_probabilities"
] | https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/psro_v2/psro_v2.py#L478-L482 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/share/gdb/python/gdb/command/frame_filters.py | python | InfoFrameFilter.enabled_string | (state) | Return "Yes" if filter is enabled, otherwise "No". | Return "Yes" if filter is enabled, otherwise "No". | [
"Return",
"Yes",
"if",
"filter",
"is",
"enabled",
"otherwise",
"No",
"."
] | def enabled_string(state):
"""Return "Yes" if filter is enabled, otherwise "No"."""
if state:
return "Yes"
else:
return "No" | [
"def",
"enabled_string",
"(",
"state",
")",
":",
"if",
"state",
":",
"return",
"\"Yes\"",
"else",
":",
"return",
"\"No\""
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/share/gdb/python/gdb/command/frame_filters.py#L52-L57 | ||
MegEngine/MegEngine | ce9ad07a27ec909fb8db4dd67943d24ba98fb93a | imperative/python/megengine/module/qat/module.py | python | QATModule.get_activation_dtype | (self) | return self._get_method_result(
"get_quantized_dtype", self.act_fake_quant, self.act_observer
) | r"""Get activation's quantization dtype as the method from ``qconfig``. | r"""Get activation's quantization dtype as the method from ``qconfig``. | [
"r",
"Get",
"activation",
"s",
"quantization",
"dtype",
"as",
"the",
"method",
"from",
"qconfig",
"."
] | def get_activation_dtype(self):
r"""Get activation's quantization dtype as the method from ``qconfig``."""
return self._get_method_result(
"get_quantized_dtype", self.act_fake_quant, self.act_observer
) | [
"def",
"get_activation_dtype",
"(",
"self",
")",
":",
"return",
"self",
".",
"_get_method_result",
"(",
"\"get_quantized_dtype\"",
",",
"self",
".",
"act_fake_quant",
",",
"self",
".",
"act_observer",
")"
] | https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/module/qat/module.py#L140-L144 | |
wanderine/BROCCOLI | ff7613de40d97429ba76ee2948cb5e1d7dd991d0 | code/bids/fslinstaller.py | python | FslInstall.install_tar | (self, targz_file) | return FslIResult('', FslIResult.SUCCESS, '') | Install given tar.gz file, use sudo where required | Install given tar.gz file, use sudo where required | [
"Install",
"given",
"tar",
".",
"gz",
"file",
"use",
"sudo",
"where",
"required"
] | def install_tar(self, targz_file):
'''Install given tar.gz file, use sudo where required'''
#import tarfile
from os import getpid,path
from time import time
# Check if install location is writable
MsgUser.debug("Checking %s is writeable." % (self.location))
if is_writeable(self.location):
as_root = False
elif is_writeable_as_root(self.location):
as_root = True
else:
return FslIResult(False, FslIResult.ERROR, "Unable to write to target folder (%s)" % (self.location))
MsgUser.message("Installing FSL software...")
# Find out how many files to install
#tarf = tarfile.open(targz_file, mode='r:gz')
#MsgUser.debug("Opened the tar file for reading...")
#tarcontents = tarf.getnames()
#nfiles = len(tarcontents)
#tarf.close()
#MsgUser.debug("Calculated the number of install objects as %s" % (nfiles))
# Generate a temporary name - eg fsl-<mypid>-date
tempname = '-'.join( ('fsl', str(getpid()), str(time())) )
MsgUser.debug("Untarring to %s into temporary folder %s." % (self.location, tempname))
tempfolder = "/".join((self.location.rstrip('/'),tempname))
a_result = run_cmd("mkdir %s" % (tempfolder), as_root = as_root)
if a_result.status == FslIResult.ERROR:
return a_result
MsgUser.debug("Calling tar -C %s -x -v -f %s" % (tempfolder, targz_file))
tar_cmd = ' '.join(('tar',
'-C',
tempfolder,
'-x',
'-o',
'-f',
targz_file))
#a_result = run_cmd_countlines(tar_cmd, nfiles, tui=False, as_root=as_root)
a_result = run_cmd_dropstdout(tar_cmd, as_root=as_root)
try:
if a_result.status == FslIResult.SUCCESS:
new_fsl = "/".join((tempfolder.rstrip("/"), 'fsl'))
install_to = "/".join((self.location.rstrip("/"),"fsl"))
if path.exists(install_to):
# move old one out of way
a_result = get_installed_version(install_to)
old_version = a_result.result
if a_result.status != FslIResult.SUCCESS:
MsgUser.warning("The contents of %s doesn't look like an FSL installation! - moving to fsl-0.0.0" % (install_to))
old_fsl = '-'.join((install_to, str(old_version)))
if path.exists(old_fsl):
MsgUser.debug("Looks like there is another copy of the old version of FSL - deleting...")
c_result = safe_delete(old_fsl, as_root)
if c_result.status == FslIResult.ERROR:
raise InstallFailed(";".join(("Install location already has a %s - I've tried to delete it but failed" % (old_fsl), c_result.message)))
a_result = run_cmd(" ".join(('mv', install_to, old_fsl)), as_root)
if a_result.status == FslIResult.ERROR:
# failed to move the old version
if not self.keep:
c_result = safe_delete(install_to, as_root)
if c_result.status == FslIResult.ERROR:
raise InstallFailed(";".join((a_result.message, c_result.message)))
else:
MsgUser.debug("Failed to move old version - %s, but able to delete it." % (a_result.message))
else:
c_result = safe_delete(tempfolder, as_root)
if c_result.status == FslIResult.ERROR:
MsgUser.debug("Failed to delete %s - %s." % (tempfolder, c_result.message))
raise InstallFailed( ";".join((a_result.message, c_result.message)))
else:
MsgUser.debug("Deleted temp folder %s - %s." % (tempfolder, c_result.message))
MsgUser.debug("Failed to move old version - %s." % (a_result.message))
raise InstallFailed('Failed to move old version out of way and you requested it was kept.')
# Remove old version if requested
if self.keep:
MsgUser.message("Old version moved to %s" % (old_fsl))
else:
c_result = safe_delete(old_fsl, as_root)
if c_result.status == FslIResult.ERROR:
raise InstallFailed( c_result.message )
else:
MsgUser.debug("Removed %s" % (old_fsl))
MsgUser.debug( " ".join(('mv', new_fsl, install_to)) )
a_result = run_cmd(" ".join(('mv', new_fsl, install_to)), as_root)
if a_result.status == FslIResult.ERROR:
# Unable to move new install into place
MsgUser.debug("Move %s into %s failed - %s." % (new_fsl, install_to, a_result.message))
raise InstallFailed('Failed to move new version into place %s' % (a_result.message))
else:
MsgUser.debug("Unable to unpack new version - %s." % (a_result.message))
raise InstallFailed('Unable to unpack new version - %s' % (a_result.message))
except InstallFailed, e:
# Clean up unpacked version
safe_delete(tempfolder,as_root)
return FslIResult('', FslIResult.ERROR, str(e))
safe_delete(tempfolder,as_root)
MsgUser.debug("Install complete")
MsgUser.ok("FSL software installed.")
return FslIResult('', FslIResult.SUCCESS, '') | [
"def",
"install_tar",
"(",
"self",
",",
"targz_file",
")",
":",
"#import tarfile",
"from",
"os",
"import",
"getpid",
",",
"path",
"from",
"time",
"import",
"time",
"# Check if install location is writable",
"MsgUser",
".",
"debug",
"(",
"\"Checking %s is writeable.\"",
"%",
"(",
"self",
".",
"location",
")",
")",
"if",
"is_writeable",
"(",
"self",
".",
"location",
")",
":",
"as_root",
"=",
"False",
"elif",
"is_writeable_as_root",
"(",
"self",
".",
"location",
")",
":",
"as_root",
"=",
"True",
"else",
":",
"return",
"FslIResult",
"(",
"False",
",",
"FslIResult",
".",
"ERROR",
",",
"\"Unable to write to target folder (%s)\"",
"%",
"(",
"self",
".",
"location",
")",
")",
"MsgUser",
".",
"message",
"(",
"\"Installing FSL software...\"",
")",
"# Find out how many files to install",
"#tarf = tarfile.open(targz_file, mode='r:gz')",
"#MsgUser.debug(\"Opened the tar file for reading...\")",
"#tarcontents = tarf.getnames()",
"#nfiles = len(tarcontents)",
"#tarf.close()",
"#MsgUser.debug(\"Calculated the number of install objects as %s\" % (nfiles))",
"# Generate a temporary name - eg fsl-<mypid>-date",
"tempname",
"=",
"'-'",
".",
"join",
"(",
"(",
"'fsl'",
",",
"str",
"(",
"getpid",
"(",
")",
")",
",",
"str",
"(",
"time",
"(",
")",
")",
")",
")",
"MsgUser",
".",
"debug",
"(",
"\"Untarring to %s into temporary folder %s.\"",
"%",
"(",
"self",
".",
"location",
",",
"tempname",
")",
")",
"tempfolder",
"=",
"\"/\"",
".",
"join",
"(",
"(",
"self",
".",
"location",
".",
"rstrip",
"(",
"'/'",
")",
",",
"tempname",
")",
")",
"a_result",
"=",
"run_cmd",
"(",
"\"mkdir %s\"",
"%",
"(",
"tempfolder",
")",
",",
"as_root",
"=",
"as_root",
")",
"if",
"a_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"return",
"a_result",
"MsgUser",
".",
"debug",
"(",
"\"Calling tar -C %s -x -v -f %s\"",
"%",
"(",
"tempfolder",
",",
"targz_file",
")",
")",
"tar_cmd",
"=",
"' '",
".",
"join",
"(",
"(",
"'tar'",
",",
"'-C'",
",",
"tempfolder",
",",
"'-x'",
",",
"'-o'",
",",
"'-f'",
",",
"targz_file",
")",
")",
"#a_result = run_cmd_countlines(tar_cmd, nfiles, tui=False, as_root=as_root)",
"a_result",
"=",
"run_cmd_dropstdout",
"(",
"tar_cmd",
",",
"as_root",
"=",
"as_root",
")",
"try",
":",
"if",
"a_result",
".",
"status",
"==",
"FslIResult",
".",
"SUCCESS",
":",
"new_fsl",
"=",
"\"/\"",
".",
"join",
"(",
"(",
"tempfolder",
".",
"rstrip",
"(",
"\"/\"",
")",
",",
"'fsl'",
")",
")",
"install_to",
"=",
"\"/\"",
".",
"join",
"(",
"(",
"self",
".",
"location",
".",
"rstrip",
"(",
"\"/\"",
")",
",",
"\"fsl\"",
")",
")",
"if",
"path",
".",
"exists",
"(",
"install_to",
")",
":",
"# move old one out of way",
"a_result",
"=",
"get_installed_version",
"(",
"install_to",
")",
"old_version",
"=",
"a_result",
".",
"result",
"if",
"a_result",
".",
"status",
"!=",
"FslIResult",
".",
"SUCCESS",
":",
"MsgUser",
".",
"warning",
"(",
"\"The contents of %s doesn't look like an FSL installation! - moving to fsl-0.0.0\"",
"%",
"(",
"install_to",
")",
")",
"old_fsl",
"=",
"'-'",
".",
"join",
"(",
"(",
"install_to",
",",
"str",
"(",
"old_version",
")",
")",
")",
"if",
"path",
".",
"exists",
"(",
"old_fsl",
")",
":",
"MsgUser",
".",
"debug",
"(",
"\"Looks like there is another copy of the old version of FSL - deleting...\"",
")",
"c_result",
"=",
"safe_delete",
"(",
"old_fsl",
",",
"as_root",
")",
"if",
"c_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"raise",
"InstallFailed",
"(",
"\";\"",
".",
"join",
"(",
"(",
"\"Install location already has a %s - I've tried to delete it but failed\"",
"%",
"(",
"old_fsl",
")",
",",
"c_result",
".",
"message",
")",
")",
")",
"a_result",
"=",
"run_cmd",
"(",
"\" \"",
".",
"join",
"(",
"(",
"'mv'",
",",
"install_to",
",",
"old_fsl",
")",
")",
",",
"as_root",
")",
"if",
"a_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"# failed to move the old version",
"if",
"not",
"self",
".",
"keep",
":",
"c_result",
"=",
"safe_delete",
"(",
"install_to",
",",
"as_root",
")",
"if",
"c_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"raise",
"InstallFailed",
"(",
"\";\"",
".",
"join",
"(",
"(",
"a_result",
".",
"message",
",",
"c_result",
".",
"message",
")",
")",
")",
"else",
":",
"MsgUser",
".",
"debug",
"(",
"\"Failed to move old version - %s, but able to delete it.\"",
"%",
"(",
"a_result",
".",
"message",
")",
")",
"else",
":",
"c_result",
"=",
"safe_delete",
"(",
"tempfolder",
",",
"as_root",
")",
"if",
"c_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"MsgUser",
".",
"debug",
"(",
"\"Failed to delete %s - %s.\"",
"%",
"(",
"tempfolder",
",",
"c_result",
".",
"message",
")",
")",
"raise",
"InstallFailed",
"(",
"\";\"",
".",
"join",
"(",
"(",
"a_result",
".",
"message",
",",
"c_result",
".",
"message",
")",
")",
")",
"else",
":",
"MsgUser",
".",
"debug",
"(",
"\"Deleted temp folder %s - %s.\"",
"%",
"(",
"tempfolder",
",",
"c_result",
".",
"message",
")",
")",
"MsgUser",
".",
"debug",
"(",
"\"Failed to move old version - %s.\"",
"%",
"(",
"a_result",
".",
"message",
")",
")",
"raise",
"InstallFailed",
"(",
"'Failed to move old version out of way and you requested it was kept.'",
")",
"# Remove old version if requested",
"if",
"self",
".",
"keep",
":",
"MsgUser",
".",
"message",
"(",
"\"Old version moved to %s\"",
"%",
"(",
"old_fsl",
")",
")",
"else",
":",
"c_result",
"=",
"safe_delete",
"(",
"old_fsl",
",",
"as_root",
")",
"if",
"c_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"raise",
"InstallFailed",
"(",
"c_result",
".",
"message",
")",
"else",
":",
"MsgUser",
".",
"debug",
"(",
"\"Removed %s\"",
"%",
"(",
"old_fsl",
")",
")",
"MsgUser",
".",
"debug",
"(",
"\" \"",
".",
"join",
"(",
"(",
"'mv'",
",",
"new_fsl",
",",
"install_to",
")",
")",
")",
"a_result",
"=",
"run_cmd",
"(",
"\" \"",
".",
"join",
"(",
"(",
"'mv'",
",",
"new_fsl",
",",
"install_to",
")",
")",
",",
"as_root",
")",
"if",
"a_result",
".",
"status",
"==",
"FslIResult",
".",
"ERROR",
":",
"# Unable to move new install into place",
"MsgUser",
".",
"debug",
"(",
"\"Move %s into %s failed - %s.\"",
"%",
"(",
"new_fsl",
",",
"install_to",
",",
"a_result",
".",
"message",
")",
")",
"raise",
"InstallFailed",
"(",
"'Failed to move new version into place %s'",
"%",
"(",
"a_result",
".",
"message",
")",
")",
"else",
":",
"MsgUser",
".",
"debug",
"(",
"\"Unable to unpack new version - %s.\"",
"%",
"(",
"a_result",
".",
"message",
")",
")",
"raise",
"InstallFailed",
"(",
"'Unable to unpack new version - %s'",
"%",
"(",
"a_result",
".",
"message",
")",
")",
"except",
"InstallFailed",
",",
"e",
":",
"# Clean up unpacked version",
"safe_delete",
"(",
"tempfolder",
",",
"as_root",
")",
"return",
"FslIResult",
"(",
"''",
",",
"FslIResult",
".",
"ERROR",
",",
"str",
"(",
"e",
")",
")",
"safe_delete",
"(",
"tempfolder",
",",
"as_root",
")",
"MsgUser",
".",
"debug",
"(",
"\"Install complete\"",
")",
"MsgUser",
".",
"ok",
"(",
"\"FSL software installed.\"",
")",
"return",
"FslIResult",
"(",
"''",
",",
"FslIResult",
".",
"SUCCESS",
",",
"''",
")"
] | https://github.com/wanderine/BROCCOLI/blob/ff7613de40d97429ba76ee2948cb5e1d7dd991d0/code/bids/fslinstaller.py#L1419-L1520 | |
KratosMultiphysics/Kratos | 0000833054ed0503424eb28205d6508d9ca6cbbc | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_randomGeneratorWrapper/generator.py | python | returnRayleighAndUniform | (*args) | return [rayleigh_rv, uniform_rv] | Return one rayleigh and uniformly distributed random variables
with given parameters | Return one rayleigh and uniformly distributed random variables
with given parameters | [
"Return",
"one",
"rayleigh",
"and",
"uniformly",
"distributed",
"random",
"variables",
"with",
"given",
"parameters"
] | def returnRayleighAndUniform(*args):
"""
Return one rayleigh and uniformly distributed random variables
with given parameters
"""
rayleigh_rv = np.random.rayleigh(args[0],size=1)[0]
uniform_rv = np.random.uniform(args[1], args[2], 1)
return [rayleigh_rv, uniform_rv] | [
"def",
"returnRayleighAndUniform",
"(",
"*",
"args",
")",
":",
"rayleigh_rv",
"=",
"np",
".",
"random",
".",
"rayleigh",
"(",
"args",
"[",
"0",
"]",
",",
"size",
"=",
"1",
")",
"[",
"0",
"]",
"uniform_rv",
"=",
"np",
".",
"random",
".",
"uniform",
"(",
"args",
"[",
"1",
"]",
",",
"args",
"[",
"2",
"]",
",",
"1",
")",
"return",
"[",
"rayleigh_rv",
",",
"uniform_rv",
"]"
] | https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_randomGeneratorWrapper/generator.py#L65-L72 | |
GSORF/Visual-GPS-SLAM | 9e327108d6be3fd8dc80c8f3bcc329237bacf230 | 03_Application/video2bag/img2bag.py | python | CreateBag | (args) | Creates the actual bag file by successively adding images | Creates the actual bag file by successively adding images | [
"Creates",
"the",
"actual",
"bag",
"file",
"by",
"successively",
"adding",
"images"
] | def CreateBag(args):
'''Creates the actual bag file by successively adding images'''
all_imgs, left_imgs, right_imgs = GetFilesFromDir(args[0])
if len(all_imgs) <= 0:
print("No images found in %s" % args[0])
exit()
if len(left_imgs) > 0 and len(right_imgs) > 0:
# create bagfile with stereo camera image pairs
CreateStereoBag(left_imgs, right_imgs, args[1])
else:
# create bagfile with mono camera image stream
CreateMonoBag(all_imgs, args[1]) | [
"def",
"CreateBag",
"(",
"args",
")",
":",
"all_imgs",
",",
"left_imgs",
",",
"right_imgs",
"=",
"GetFilesFromDir",
"(",
"args",
"[",
"0",
"]",
")",
"if",
"len",
"(",
"all_imgs",
")",
"<=",
"0",
":",
"print",
"(",
"\"No images found in %s\"",
"%",
"args",
"[",
"0",
"]",
")",
"exit",
"(",
")",
"if",
"len",
"(",
"left_imgs",
")",
">",
"0",
"and",
"len",
"(",
"right_imgs",
")",
">",
"0",
":",
"# create bagfile with stereo camera image pairs",
"CreateStereoBag",
"(",
"left_imgs",
",",
"right_imgs",
",",
"args",
"[",
"1",
"]",
")",
"else",
":",
"# create bagfile with mono camera image stream",
"CreateMonoBag",
"(",
"all_imgs",
",",
"args",
"[",
"1",
"]",
")"
] | https://github.com/GSORF/Visual-GPS-SLAM/blob/9e327108d6be3fd8dc80c8f3bcc329237bacf230/03_Application/video2bag/img2bag.py#L128-L140 | ||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/locale.py | python | currency | (val, symbol=True, grouping=False, international=False) | return s.replace('<', '').replace('>', '') | Formats val according to the currency settings
in the current locale. | Formats val according to the currency settings
in the current locale. | [
"Formats",
"val",
"according",
"to",
"the",
"currency",
"settings",
"in",
"the",
"current",
"locale",
"."
] | def currency(val, symbol=True, grouping=False, international=False):
"""Formats val according to the currency settings
in the current locale."""
conv = localeconv()
# check for illegal values
digits = conv[international and 'int_frac_digits' or 'frac_digits']
if digits == 127:
raise ValueError("Currency formatting is not possible using "
"the 'C' locale.")
s = format('%%.%if' % digits, abs(val), grouping, monetary=True)
# '<' and '>' are markers if the sign must be inserted between symbol and value
s = '<' + s + '>'
if symbol:
smb = conv[international and 'int_curr_symbol' or 'currency_symbol']
precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes']
separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space']
if precedes:
s = smb + (separated and ' ' or '') + s
else:
s = s + (separated and ' ' or '') + smb
sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
sign = conv[val<0 and 'negative_sign' or 'positive_sign']
if sign_pos == 0:
s = '(' + s + ')'
elif sign_pos == 1:
s = sign + s
elif sign_pos == 2:
s = s + sign
elif sign_pos == 3:
s = s.replace('<', sign)
elif sign_pos == 4:
s = s.replace('>', sign)
else:
# the default if nothing specified;
# this should be the most fitting sign position
s = sign + s
return s.replace('<', '').replace('>', '') | [
"def",
"currency",
"(",
"val",
",",
"symbol",
"=",
"True",
",",
"grouping",
"=",
"False",
",",
"international",
"=",
"False",
")",
":",
"conv",
"=",
"localeconv",
"(",
")",
"# check for illegal values",
"digits",
"=",
"conv",
"[",
"international",
"and",
"'int_frac_digits'",
"or",
"'frac_digits'",
"]",
"if",
"digits",
"==",
"127",
":",
"raise",
"ValueError",
"(",
"\"Currency formatting is not possible using \"",
"\"the 'C' locale.\"",
")",
"s",
"=",
"format",
"(",
"'%%.%if'",
"%",
"digits",
",",
"abs",
"(",
"val",
")",
",",
"grouping",
",",
"monetary",
"=",
"True",
")",
"# '<' and '>' are markers if the sign must be inserted between symbol and value",
"s",
"=",
"'<'",
"+",
"s",
"+",
"'>'",
"if",
"symbol",
":",
"smb",
"=",
"conv",
"[",
"international",
"and",
"'int_curr_symbol'",
"or",
"'currency_symbol'",
"]",
"precedes",
"=",
"conv",
"[",
"val",
"<",
"0",
"and",
"'n_cs_precedes'",
"or",
"'p_cs_precedes'",
"]",
"separated",
"=",
"conv",
"[",
"val",
"<",
"0",
"and",
"'n_sep_by_space'",
"or",
"'p_sep_by_space'",
"]",
"if",
"precedes",
":",
"s",
"=",
"smb",
"+",
"(",
"separated",
"and",
"' '",
"or",
"''",
")",
"+",
"s",
"else",
":",
"s",
"=",
"s",
"+",
"(",
"separated",
"and",
"' '",
"or",
"''",
")",
"+",
"smb",
"sign_pos",
"=",
"conv",
"[",
"val",
"<",
"0",
"and",
"'n_sign_posn'",
"or",
"'p_sign_posn'",
"]",
"sign",
"=",
"conv",
"[",
"val",
"<",
"0",
"and",
"'negative_sign'",
"or",
"'positive_sign'",
"]",
"if",
"sign_pos",
"==",
"0",
":",
"s",
"=",
"'('",
"+",
"s",
"+",
"')'",
"elif",
"sign_pos",
"==",
"1",
":",
"s",
"=",
"sign",
"+",
"s",
"elif",
"sign_pos",
"==",
"2",
":",
"s",
"=",
"s",
"+",
"sign",
"elif",
"sign_pos",
"==",
"3",
":",
"s",
"=",
"s",
".",
"replace",
"(",
"'<'",
",",
"sign",
")",
"elif",
"sign_pos",
"==",
"4",
":",
"s",
"=",
"s",
".",
"replace",
"(",
"'>'",
",",
"sign",
")",
"else",
":",
"# the default if nothing specified;",
"# this should be the most fitting sign position",
"s",
"=",
"sign",
"+",
"s",
"return",
"s",
".",
"replace",
"(",
"'<'",
",",
"''",
")",
".",
"replace",
"(",
"'>'",
",",
"''",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/locale.py#L258-L301 | |
PaddlePaddle/PaddleOCR | b756bf5f8c90142e0d89d3db0163965c686b6ffe | ppocr/postprocess/locality_aware_nms.py | python | weighted_merge | (g, p) | return g | Weighted merge. | Weighted merge. | [
"Weighted",
"merge",
"."
] | def weighted_merge(g, p):
"""
Weighted merge.
"""
g[:8] = (g[8] * g[:8] + p[8] * p[:8]) / (g[8] + p[8])
g[8] = (g[8] + p[8])
return g | [
"def",
"weighted_merge",
"(",
"g",
",",
"p",
")",
":",
"g",
"[",
":",
"8",
"]",
"=",
"(",
"g",
"[",
"8",
"]",
"*",
"g",
"[",
":",
"8",
"]",
"+",
"p",
"[",
"8",
"]",
"*",
"p",
"[",
":",
"8",
"]",
")",
"/",
"(",
"g",
"[",
"8",
"]",
"+",
"p",
"[",
"8",
"]",
")",
"g",
"[",
"8",
"]",
"=",
"(",
"g",
"[",
"8",
"]",
"+",
"p",
"[",
"8",
"]",
")",
"return",
"g"
] | https://github.com/PaddlePaddle/PaddleOCR/blob/b756bf5f8c90142e0d89d3db0163965c686b6ffe/ppocr/postprocess/locality_aware_nms.py#L46-L52 | |
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2.py | python | xmlNode.nodePath | (self) | return ret | Build a structure based Path for the given node | Build a structure based Path for the given node | [
"Build",
"a",
"structure",
"based",
"Path",
"for",
"the",
"given",
"node"
] | def nodePath(self):
"""Build a structure based Path for the given node """
ret = libxml2mod.xmlGetNodePath(self._o)
return ret | [
"def",
"nodePath",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlGetNodePath",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L3440-L3443 | |
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/training/tracking/layer_utils.py | python | gather_non_trainable_weights | (trainable, sub_layers, extra_variables) | return weights + non_trainable_extra_variables | Lists the non-trainable weights for an object with sub-layers.
Args:
trainable: Whether the object collecting the variables is trainable.
sub_layers: A flat list of Layer objects owned by this object, to collect
variables from.
extra_variables: Any extra variables to include. Their `.trainable` property
is used to categorize them.
Returns:
A list of collected non-trainable weights/variables. | Lists the non-trainable weights for an object with sub-layers. | [
"Lists",
"the",
"non",
"-",
"trainable",
"weights",
"for",
"an",
"object",
"with",
"sub",
"-",
"layers",
"."
] | def gather_non_trainable_weights(trainable, sub_layers, extra_variables):
"""Lists the non-trainable weights for an object with sub-layers.
Args:
trainable: Whether the object collecting the variables is trainable.
sub_layers: A flat list of Layer objects owned by this object, to collect
variables from.
extra_variables: Any extra variables to include. Their `.trainable` property
is used to categorize them.
Returns:
A list of collected non-trainable weights/variables.
"""
trainable_extra_variables = []
non_trainable_extra_variables = []
for v in extra_variables:
if v.trainable:
trainable_extra_variables.append(v)
else:
non_trainable_extra_variables.append(v)
weights = []
for layer in sub_layers:
weights += layer.non_trainable_weights
if not trainable:
trainable_weights = []
for layer in sub_layers:
trainable_weights += layer.trainable_weights
return (trainable_weights + trainable_extra_variables
+ weights + non_trainable_extra_variables)
return weights + non_trainable_extra_variables | [
"def",
"gather_non_trainable_weights",
"(",
"trainable",
",",
"sub_layers",
",",
"extra_variables",
")",
":",
"trainable_extra_variables",
"=",
"[",
"]",
"non_trainable_extra_variables",
"=",
"[",
"]",
"for",
"v",
"in",
"extra_variables",
":",
"if",
"v",
".",
"trainable",
":",
"trainable_extra_variables",
".",
"append",
"(",
"v",
")",
"else",
":",
"non_trainable_extra_variables",
".",
"append",
"(",
"v",
")",
"weights",
"=",
"[",
"]",
"for",
"layer",
"in",
"sub_layers",
":",
"weights",
"+=",
"layer",
".",
"non_trainable_weights",
"if",
"not",
"trainable",
":",
"trainable_weights",
"=",
"[",
"]",
"for",
"layer",
"in",
"sub_layers",
":",
"trainable_weights",
"+=",
"layer",
".",
"trainable_weights",
"return",
"(",
"trainable_weights",
"+",
"trainable_extra_variables",
"+",
"weights",
"+",
"non_trainable_extra_variables",
")",
"return",
"weights",
"+",
"non_trainable_extra_variables"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/training/tracking/layer_utils.py#L85-L114 | |
NVIDIA/TensorRT | 42805f078052daad1a98bc5965974fcffaad0960 | tools/pytorch-quantization/pytorch_quantization/quant_modules.py | python | QuantModuleReplacementHelper.restore_float_modules | (self) | Reverse the effect of monkey patch by using the orginal_func_map to replace back the
original modules. | Reverse the effect of monkey patch by using the orginal_func_map to replace back the
original modules. | [
"Reverse",
"the",
"effect",
"of",
"monkey",
"patch",
"by",
"using",
"the",
"orginal_func_map",
"to",
"replace",
"back",
"the",
"original",
"modules",
"."
] | def restore_float_modules(self):
"""
Reverse the effect of monkey patch by using the orginal_func_map to replace back the
original modules.
"""
for entry in self.orginal_func_map:
setattr(entry.orig_mod, entry.mod_name, entry.replace_mod) | [
"def",
"restore_float_modules",
"(",
"self",
")",
":",
"for",
"entry",
"in",
"self",
".",
"orginal_func_map",
":",
"setattr",
"(",
"entry",
".",
"orig_mod",
",",
"entry",
".",
"mod_name",
",",
"entry",
".",
"replace_mod",
")"
] | https://github.com/NVIDIA/TensorRT/blob/42805f078052daad1a98bc5965974fcffaad0960/tools/pytorch-quantization/pytorch_quantization/quant_modules.py#L108-L114 | ||
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/plan/robotcspace.py | python | EmbeddedRobotCSpace.sendPathToController | (self,path,controller) | Sends a planned path so that it is executed correctly by the
controller (assumes a fully actuated robot).
Args:
path (list of Configs): a path in the embedded space or the
ambient space, as returned by a planner.
controller (SimRobotController): the robot's controller | Sends a planned path so that it is executed correctly by the
controller (assumes a fully actuated robot). | [
"Sends",
"a",
"planned",
"path",
"so",
"that",
"it",
"is",
"executed",
"correctly",
"by",
"the",
"controller",
"(",
"assumes",
"a",
"fully",
"actuated",
"robot",
")",
"."
] | def sendPathToController(self,path,controller):
"""Sends a planned path so that it is executed correctly by the
controller (assumes a fully actuated robot).
Args:
path (list of Configs): a path in the embedded space or the
ambient space, as returned by a planner.
controller (SimRobotController): the robot's controller
"""
if len(path[0]) == len(self.mapping):
path = self.liftPath(path)
if hasattr(self.ambientspace,'discretizePath'):
path = self.ambientspace.discretizePath(path)
self.ambientspace.sendPathToController(path,controller) | [
"def",
"sendPathToController",
"(",
"self",
",",
"path",
",",
"controller",
")",
":",
"if",
"len",
"(",
"path",
"[",
"0",
"]",
")",
"==",
"len",
"(",
"self",
".",
"mapping",
")",
":",
"path",
"=",
"self",
".",
"liftPath",
"(",
"path",
")",
"if",
"hasattr",
"(",
"self",
".",
"ambientspace",
",",
"'discretizePath'",
")",
":",
"path",
"=",
"self",
".",
"ambientspace",
".",
"discretizePath",
"(",
"path",
")",
"self",
".",
"ambientspace",
".",
"sendPathToController",
"(",
"path",
",",
"controller",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/plan/robotcspace.py#L405-L418 | ||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/training/learning_rate_decay.py | python | natural_exp_decay | (learning_rate, global_step, decay_steps, decay_rate,
staircase=False, name=None) | Applies natural exponential decay to the initial learning rate.
When training a model, it is often recommended to lower the learning rate as
the training progresses. This function applies an exponential decay function
to a provided initial learning rate. It requires an `global_step` value to
compute the decayed learning rate. You can just pass a TensorFlow variable
that you increment at each training step.
The function returns the decayed learning rate. It is computed as:
```python
decayed_learning_rate = learning_rate * exp(-decay_rate * global_step)
```
Example: decay exponentially with a base of 0.96:
```python
...
global_step = tf.Variable(0, trainable=False)
learning_rate = 0.1
k = 0.5
learning_rate = tf.train.exponential_time_decay(learning_rate, global_step, k)
# Passing global_step to minimize() will increment it at each step.
learning_step = (
tf.train.GradientDescentOptimizer(learning_rate)
.minimize(...my loss..., global_step=global_step)
)
```
Args:
learning_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The initial learning rate.
global_step: A Python number.
Global step to use for the decay computation. Must not be negative.
decay_steps: How often to apply decay.
decay_rate: A Python number. The decay rate.
staircase: Whether to apply decay in a discrete staircase, as opposed to
continuous, fashion.
name: String. Optional name of the operation. Defaults to
'ExponentialTimeDecay'.
Returns:
A scalar `Tensor` of the same type as `learning_rate`. The decayed
learning rate.
Raises:
ValueError: if `global_step` is not supplied. | Applies natural exponential decay to the initial learning rate. | [
"Applies",
"natural",
"exponential",
"decay",
"to",
"the",
"initial",
"learning",
"rate",
"."
] | def natural_exp_decay(learning_rate, global_step, decay_steps, decay_rate,
staircase=False, name=None):
"""Applies natural exponential decay to the initial learning rate.
When training a model, it is often recommended to lower the learning rate as
the training progresses. This function applies an exponential decay function
to a provided initial learning rate. It requires an `global_step` value to
compute the decayed learning rate. You can just pass a TensorFlow variable
that you increment at each training step.
The function returns the decayed learning rate. It is computed as:
```python
decayed_learning_rate = learning_rate * exp(-decay_rate * global_step)
```
Example: decay exponentially with a base of 0.96:
```python
...
global_step = tf.Variable(0, trainable=False)
learning_rate = 0.1
k = 0.5
learning_rate = tf.train.exponential_time_decay(learning_rate, global_step, k)
# Passing global_step to minimize() will increment it at each step.
learning_step = (
tf.train.GradientDescentOptimizer(learning_rate)
.minimize(...my loss..., global_step=global_step)
)
```
Args:
learning_rate: A scalar `float32` or `float64` `Tensor` or a
Python number. The initial learning rate.
global_step: A Python number.
Global step to use for the decay computation. Must not be negative.
decay_steps: How often to apply decay.
decay_rate: A Python number. The decay rate.
staircase: Whether to apply decay in a discrete staircase, as opposed to
continuous, fashion.
name: String. Optional name of the operation. Defaults to
'ExponentialTimeDecay'.
Returns:
A scalar `Tensor` of the same type as `learning_rate`. The decayed
learning rate.
Raises:
ValueError: if `global_step` is not supplied.
"""
if global_step is None:
raise ValueError("global_step is required for natural_exp_decay.")
with ops.name_scope(name, "NaturalExpDecay",
[learning_rate, global_step, decay_rate]) as name:
learning_rate = ops.convert_to_tensor(learning_rate, name="learning_rate")
dtype = learning_rate.dtype
global_step = math_ops.cast(global_step, dtype)
decay_steps = math_ops.cast(decay_steps, dtype)
decay_rate = math_ops.cast(decay_rate, dtype)
p = global_step / decay_steps
if staircase:
p = math_ops.floor(p)
exponent = math_ops.exp(math_ops.multiply(math_ops.negative(decay_rate), p))
return math_ops.multiply(learning_rate, exponent, name=name) | [
"def",
"natural_exp_decay",
"(",
"learning_rate",
",",
"global_step",
",",
"decay_steps",
",",
"decay_rate",
",",
"staircase",
"=",
"False",
",",
"name",
"=",
"None",
")",
":",
"if",
"global_step",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"global_step is required for natural_exp_decay.\"",
")",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"NaturalExpDecay\"",
",",
"[",
"learning_rate",
",",
"global_step",
",",
"decay_rate",
"]",
")",
"as",
"name",
":",
"learning_rate",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"learning_rate",
",",
"name",
"=",
"\"learning_rate\"",
")",
"dtype",
"=",
"learning_rate",
".",
"dtype",
"global_step",
"=",
"math_ops",
".",
"cast",
"(",
"global_step",
",",
"dtype",
")",
"decay_steps",
"=",
"math_ops",
".",
"cast",
"(",
"decay_steps",
",",
"dtype",
")",
"decay_rate",
"=",
"math_ops",
".",
"cast",
"(",
"decay_rate",
",",
"dtype",
")",
"p",
"=",
"global_step",
"/",
"decay_steps",
"if",
"staircase",
":",
"p",
"=",
"math_ops",
".",
"floor",
"(",
"p",
")",
"exponent",
"=",
"math_ops",
".",
"exp",
"(",
"math_ops",
".",
"multiply",
"(",
"math_ops",
".",
"negative",
"(",
"decay_rate",
")",
",",
"p",
")",
")",
"return",
"math_ops",
".",
"multiply",
"(",
"learning_rate",
",",
"exponent",
",",
"name",
"=",
"name",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/training/learning_rate_decay.py#L282-L346 | ||
natanielruiz/android-yolo | 1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f | jni-build/jni/include/tensorflow/tensorboard/backend/handler.py | python | TensorboardHandler.do_GET | (self) | Handler for all get requests. | Handler for all get requests. | [
"Handler",
"for",
"all",
"get",
"requests",
"."
] | def do_GET(self): # pylint: disable=invalid-name
"""Handler for all get requests."""
parsed_url = urlparse.urlparse(self.path)
# Remove a trailing slash, if present.
clean_path = parsed_url.path
if clean_path.endswith('/'):
clean_path = clean_path[:-1]
data_handlers = {
DATA_PREFIX + SCALARS_ROUTE: self._serve_scalars,
DATA_PREFIX + GRAPH_ROUTE: self._serve_graph,
DATA_PREFIX + RUN_METADATA_ROUTE: self._serve_run_metadata,
DATA_PREFIX + HISTOGRAMS_ROUTE: self._serve_histograms,
DATA_PREFIX + COMPRESSED_HISTOGRAMS_ROUTE:
self._serve_compressed_histograms,
DATA_PREFIX + IMAGES_ROUTE: self._serve_images,
DATA_PREFIX + INDIVIDUAL_IMAGE_ROUTE: self._serve_image,
DATA_PREFIX + AUDIO_ROUTE: self._serve_audio,
DATA_PREFIX + INDIVIDUAL_AUDIO_ROUTE: self._serve_individual_audio,
DATA_PREFIX + RUNS_ROUTE: self._serve_runs,
'/app.js': self._serve_js
}
query_params = urlparse.parse_qs(parsed_url.query)
# parse_qs returns a list of values for each key; we're only interested in
# the first.
for key in query_params:
value_count = len(query_params[key])
if value_count != 1:
self.send_error(
400, 'query parameter %s should have exactly one value, had %d' %
(key, value_count))
return
query_params[key] = query_params[key][0]
if clean_path in data_handlers:
data_handlers[clean_path](query_params)
elif clean_path in TAB_ROUTES:
self._serve_index(query_params)
else:
self._serve_static_file(clean_path) | [
"def",
"do_GET",
"(",
"self",
")",
":",
"# pylint: disable=invalid-name",
"parsed_url",
"=",
"urlparse",
".",
"urlparse",
"(",
"self",
".",
"path",
")",
"# Remove a trailing slash, if present.",
"clean_path",
"=",
"parsed_url",
".",
"path",
"if",
"clean_path",
".",
"endswith",
"(",
"'/'",
")",
":",
"clean_path",
"=",
"clean_path",
"[",
":",
"-",
"1",
"]",
"data_handlers",
"=",
"{",
"DATA_PREFIX",
"+",
"SCALARS_ROUTE",
":",
"self",
".",
"_serve_scalars",
",",
"DATA_PREFIX",
"+",
"GRAPH_ROUTE",
":",
"self",
".",
"_serve_graph",
",",
"DATA_PREFIX",
"+",
"RUN_METADATA_ROUTE",
":",
"self",
".",
"_serve_run_metadata",
",",
"DATA_PREFIX",
"+",
"HISTOGRAMS_ROUTE",
":",
"self",
".",
"_serve_histograms",
",",
"DATA_PREFIX",
"+",
"COMPRESSED_HISTOGRAMS_ROUTE",
":",
"self",
".",
"_serve_compressed_histograms",
",",
"DATA_PREFIX",
"+",
"IMAGES_ROUTE",
":",
"self",
".",
"_serve_images",
",",
"DATA_PREFIX",
"+",
"INDIVIDUAL_IMAGE_ROUTE",
":",
"self",
".",
"_serve_image",
",",
"DATA_PREFIX",
"+",
"AUDIO_ROUTE",
":",
"self",
".",
"_serve_audio",
",",
"DATA_PREFIX",
"+",
"INDIVIDUAL_AUDIO_ROUTE",
":",
"self",
".",
"_serve_individual_audio",
",",
"DATA_PREFIX",
"+",
"RUNS_ROUTE",
":",
"self",
".",
"_serve_runs",
",",
"'/app.js'",
":",
"self",
".",
"_serve_js",
"}",
"query_params",
"=",
"urlparse",
".",
"parse_qs",
"(",
"parsed_url",
".",
"query",
")",
"# parse_qs returns a list of values for each key; we're only interested in",
"# the first.",
"for",
"key",
"in",
"query_params",
":",
"value_count",
"=",
"len",
"(",
"query_params",
"[",
"key",
"]",
")",
"if",
"value_count",
"!=",
"1",
":",
"self",
".",
"send_error",
"(",
"400",
",",
"'query parameter %s should have exactly one value, had %d'",
"%",
"(",
"key",
",",
"value_count",
")",
")",
"return",
"query_params",
"[",
"key",
"]",
"=",
"query_params",
"[",
"key",
"]",
"[",
"0",
"]",
"if",
"clean_path",
"in",
"data_handlers",
":",
"data_handlers",
"[",
"clean_path",
"]",
"(",
"query_params",
")",
"elif",
"clean_path",
"in",
"TAB_ROUTES",
":",
"self",
".",
"_serve_index",
"(",
"query_params",
")",
"else",
":",
"self",
".",
"_serve_static_file",
"(",
"clean_path",
")"
] | https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/tensorboard/backend/handler.py#L549-L590 | ||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/variable_scope.py | python | VariableScope.__init__ | (self,
reuse,
name="",
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
name_scope="",
dtype=dtypes.float32,
use_resource=None) | Creates a new VariableScope with the given properties. | Creates a new VariableScope with the given properties. | [
"Creates",
"a",
"new",
"VariableScope",
"with",
"the",
"given",
"properties",
"."
] | def __init__(self,
reuse,
name="",
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
name_scope="",
dtype=dtypes.float32,
use_resource=None):
"""Creates a new VariableScope with the given properties."""
self._name = name
self._initializer = initializer
self._regularizer = regularizer
self._reuse = reuse
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._name_scope = name_scope
self._dtype = dtype
self._use_resource = use_resource | [
"def",
"__init__",
"(",
"self",
",",
"reuse",
",",
"name",
"=",
"\"\"",
",",
"initializer",
"=",
"None",
",",
"regularizer",
"=",
"None",
",",
"caching_device",
"=",
"None",
",",
"partitioner",
"=",
"None",
",",
"custom_getter",
"=",
"None",
",",
"name_scope",
"=",
"\"\"",
",",
"dtype",
"=",
"dtypes",
".",
"float32",
",",
"use_resource",
"=",
"None",
")",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_initializer",
"=",
"initializer",
"self",
".",
"_regularizer",
"=",
"regularizer",
"self",
".",
"_reuse",
"=",
"reuse",
"self",
".",
"_caching_device",
"=",
"caching_device",
"self",
".",
"_partitioner",
"=",
"partitioner",
"self",
".",
"_custom_getter",
"=",
"custom_getter",
"self",
".",
"_name_scope",
"=",
"name_scope",
"self",
".",
"_dtype",
"=",
"dtype",
"self",
".",
"_use_resource",
"=",
"use_resource"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/variable_scope.py#L803-L824 | ||
kevin-ssy/Optical-Flow-Guided-Feature | 07d4501a29002ee7821c38c1820e4a64c1acf6e8 | lib/caffe-action/scripts/cpp_lint.py | python | FilesBelongToSameModule | (filename_cc, filename_h) | return files_belong_to_same_module, common_path | Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the .cc file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file. | Check if these two filenames belong to the same module. | [
"Check",
"if",
"these",
"two",
"filenames",
"belong",
"to",
"the",
"same",
"module",
"."
] | def FilesBelongToSameModule(filename_cc, filename_h):
"""Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the .cc file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file.
"""
if not filename_cc.endswith('.cc'):
return (False, '')
filename_cc = filename_cc[:-len('.cc')]
if filename_cc.endswith('_unittest'):
filename_cc = filename_cc[:-len('_unittest')]
elif filename_cc.endswith('_test'):
filename_cc = filename_cc[:-len('_test')]
filename_cc = filename_cc.replace('/public/', '/')
filename_cc = filename_cc.replace('/internal/', '/')
if not filename_h.endswith('.h'):
return (False, '')
filename_h = filename_h[:-len('.h')]
if filename_h.endswith('-inl'):
filename_h = filename_h[:-len('-inl')]
filename_h = filename_h.replace('/public/', '/')
filename_h = filename_h.replace('/internal/', '/')
files_belong_to_same_module = filename_cc.endswith(filename_h)
common_path = ''
if files_belong_to_same_module:
common_path = filename_cc[:-len(filename_h)]
return files_belong_to_same_module, common_path | [
"def",
"FilesBelongToSameModule",
"(",
"filename_cc",
",",
"filename_h",
")",
":",
"if",
"not",
"filename_cc",
".",
"endswith",
"(",
"'.cc'",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"'.cc'",
")",
"]",
"if",
"filename_cc",
".",
"endswith",
"(",
"'_unittest'",
")",
":",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"'_unittest'",
")",
"]",
"elif",
"filename_cc",
".",
"endswith",
"(",
"'_test'",
")",
":",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"'_test'",
")",
"]",
"filename_cc",
"=",
"filename_cc",
".",
"replace",
"(",
"'/public/'",
",",
"'/'",
")",
"filename_cc",
"=",
"filename_cc",
".",
"replace",
"(",
"'/internal/'",
",",
"'/'",
")",
"if",
"not",
"filename_h",
".",
"endswith",
"(",
"'.h'",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"filename_h",
"=",
"filename_h",
"[",
":",
"-",
"len",
"(",
"'.h'",
")",
"]",
"if",
"filename_h",
".",
"endswith",
"(",
"'-inl'",
")",
":",
"filename_h",
"=",
"filename_h",
"[",
":",
"-",
"len",
"(",
"'-inl'",
")",
"]",
"filename_h",
"=",
"filename_h",
".",
"replace",
"(",
"'/public/'",
",",
"'/'",
")",
"filename_h",
"=",
"filename_h",
".",
"replace",
"(",
"'/internal/'",
",",
"'/'",
")",
"files_belong_to_same_module",
"=",
"filename_cc",
".",
"endswith",
"(",
"filename_h",
")",
"common_path",
"=",
"''",
"if",
"files_belong_to_same_module",
":",
"common_path",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"filename_h",
")",
"]",
"return",
"files_belong_to_same_module",
",",
"common_path"
] | https://github.com/kevin-ssy/Optical-Flow-Guided-Feature/blob/07d4501a29002ee7821c38c1820e4a64c1acf6e8/lib/caffe-action/scripts/cpp_lint.py#L4399-L4451 | |
dscharrer/innoextract | 5519d364cc8898f906f6285d81a87ab8c5469cde | cmake/cpplint.py | python | CleanseComments | (line) | return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed. | Removes //-comments and single-line C-style /* */ comments. | [
"Removes",
"//",
"-",
"comments",
"and",
"single",
"-",
"line",
"C",
"-",
"style",
"/",
"*",
"*",
"/",
"comments",
"."
] | def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos].rstrip()
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | [
"def",
"CleanseComments",
"(",
"line",
")",
":",
"commentpos",
"=",
"line",
".",
"find",
"(",
"'//'",
")",
"if",
"commentpos",
"!=",
"-",
"1",
"and",
"not",
"IsCppString",
"(",
"line",
"[",
":",
"commentpos",
"]",
")",
":",
"line",
"=",
"line",
"[",
":",
"commentpos",
"]",
".",
"rstrip",
"(",
")",
"# get rid of /* ... */",
"return",
"_RE_PATTERN_CLEANSE_LINE_C_COMMENTS",
".",
"sub",
"(",
"''",
",",
"line",
")"
] | https://github.com/dscharrer/innoextract/blob/5519d364cc8898f906f6285d81a87ab8c5469cde/cmake/cpplint.py#L970-L983 | |
BlzFans/wke | b0fa21158312e40c5fbd84682d643022b6c34a93 | cygwin/lib/python2.6/pydoc.py | python | HTMLDoc.formatvalue | (self, object) | return self.grey('=' + self.repr(object)) | Format an argument default value as text. | Format an argument default value as text. | [
"Format",
"an",
"argument",
"default",
"value",
"as",
"text",
"."
] | def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object)) | [
"def",
"formatvalue",
"(",
"self",
",",
"object",
")",
":",
"return",
"self",
".",
"grey",
"(",
"'='",
"+",
"self",
".",
"repr",
"(",
"object",
")",
")"
] | https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/pydoc.py#L843-L845 | |
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py | python | Canvas.itemconfigure | (self, tagOrId, cnf=None, **kw) | return self._configure(('itemconfigure', tagOrId), cnf, kw) | Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments. | Configure resources of an item TAGORID. | [
"Configure",
"resources",
"of",
"an",
"item",
"TAGORID",
"."
] | def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw) | [
"def",
"itemconfigure",
"(",
"self",
",",
"tagOrId",
",",
"cnf",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"return",
"self",
".",
"_configure",
"(",
"(",
"'itemconfigure'",
",",
"tagOrId",
")",
",",
"cnf",
",",
"kw",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py#L2342-L2349 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_misc.py | python | Clipboard.Close | (*args, **kwargs) | return _misc_.Clipboard_Close(*args, **kwargs) | Close(self)
Closes the clipboard. | Close(self) | [
"Close",
"(",
"self",
")"
] | def Close(*args, **kwargs):
"""
Close(self)
Closes the clipboard.
"""
return _misc_.Clipboard_Close(*args, **kwargs) | [
"def",
"Close",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"Clipboard_Close",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L5803-L5809 | |
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/symbol/numpy/linalg.py | python | qr | (a, mode='reduced') | return _npi.qr(a) | r"""
Compute the qr factorization of a matrix a.
Factor the matrix a as qr, where q is orthonormal and r is upper-triangular.
Parameters
----------
a : (..., M, N) _Symbol
Matrix or stack of matrices to be qr factored.
mode: {‘reduced’, ‘complete’, ‘r’, ‘raw’, ‘full’, ‘economic’}, optional
Only default mode, 'reduced', is implemented. If K = min(M, N), then
* 'reduced’ : returns q, r with dimensions (M, K), (K, N) (default)
Returns
-------
q : (..., M, K) _Symbol
A matrix or stack of matrices with K orthonormal columns, with K = min(M, N).
r : (..., K, N) _Symbol
A matrix or stack of upper triangular matrices.
Raises
------
MXNetError
If factoring fails.
Examples
--------
>>> from mxnet import np
>>> a = np.random.uniform(-10, 10, (2, 2))
>>> q, r = np.linalg.qr(a)
>>> q
array([[-0.22121978, -0.97522414],
[-0.97522414, 0.22121954]])
>>> r
array([[-4.4131265 , -7.1255064 ],
[ 0. , -0.28771925]])
>>> a = np.random.uniform(-10, 10, (2, 3))
>>> q, r = np.linalg.qr(a)
>>> q
array([[-0.28376842, -0.9588929 ],
[-0.9588929 , 0.28376836]])
>>> r
array([[-7.242763 , -0.5673361 , -2.624416 ],
[ 0. , -7.297918 , -0.15949416]])
>>> a = np.random.uniform(-10, 10, (3, 2))
>>> q, r = np.linalg.qr(a)
>>> q
array([[-0.34515655, 0.10919492],
[ 0.14765628, -0.97452265],
[-0.92685735, -0.19591334]])
>>> r
array([[-8.453794, 8.4175 ],
[ 0. , 5.430561]]) | r"""
Compute the qr factorization of a matrix a.
Factor the matrix a as qr, where q is orthonormal and r is upper-triangular. | [
"r",
"Compute",
"the",
"qr",
"factorization",
"of",
"a",
"matrix",
"a",
".",
"Factor",
"the",
"matrix",
"a",
"as",
"qr",
"where",
"q",
"is",
"orthonormal",
"and",
"r",
"is",
"upper",
"-",
"triangular",
"."
] | def qr(a, mode='reduced'):
r"""
Compute the qr factorization of a matrix a.
Factor the matrix a as qr, where q is orthonormal and r is upper-triangular.
Parameters
----------
a : (..., M, N) _Symbol
Matrix or stack of matrices to be qr factored.
mode: {‘reduced’, ‘complete’, ‘r’, ‘raw’, ‘full’, ‘economic’}, optional
Only default mode, 'reduced', is implemented. If K = min(M, N), then
* 'reduced’ : returns q, r with dimensions (M, K), (K, N) (default)
Returns
-------
q : (..., M, K) _Symbol
A matrix or stack of matrices with K orthonormal columns, with K = min(M, N).
r : (..., K, N) _Symbol
A matrix or stack of upper triangular matrices.
Raises
------
MXNetError
If factoring fails.
Examples
--------
>>> from mxnet import np
>>> a = np.random.uniform(-10, 10, (2, 2))
>>> q, r = np.linalg.qr(a)
>>> q
array([[-0.22121978, -0.97522414],
[-0.97522414, 0.22121954]])
>>> r
array([[-4.4131265 , -7.1255064 ],
[ 0. , -0.28771925]])
>>> a = np.random.uniform(-10, 10, (2, 3))
>>> q, r = np.linalg.qr(a)
>>> q
array([[-0.28376842, -0.9588929 ],
[-0.9588929 , 0.28376836]])
>>> r
array([[-7.242763 , -0.5673361 , -2.624416 ],
[ 0. , -7.297918 , -0.15949416]])
>>> a = np.random.uniform(-10, 10, (3, 2))
>>> q, r = np.linalg.qr(a)
>>> q
array([[-0.34515655, 0.10919492],
[ 0.14765628, -0.97452265],
[-0.92685735, -0.19591334]])
>>> r
array([[-8.453794, 8.4175 ],
[ 0. , 5.430561]])
"""
if mode is not None and mode != 'reduced':
raise NotImplementedError("Only default mode='reduced' is implemented.")
return _npi.qr(a) | [
"def",
"qr",
"(",
"a",
",",
"mode",
"=",
"'reduced'",
")",
":",
"if",
"mode",
"is",
"not",
"None",
"and",
"mode",
"!=",
"'reduced'",
":",
"raise",
"NotImplementedError",
"(",
"\"Only default mode='reduced' is implemented.\"",
")",
"return",
"_npi",
".",
"qr",
"(",
"a",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/numpy/linalg.py#L484-L540 | |
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/contrib/graph_editor/select.py | python | select_ops_and_ts | (*args, **kwargs) | return ops, ts | Helper to select operations and tensors.
Args:
*args: list of 1) regular expressions (compiled or not) or 2) (array of)
`tf.Operation` 3) (array of) tf.Tensor. Regular expressions matching
tensors must start with the comment `"(?#ts)"`, for instance:
`"(?#ts)^foo/.*"`.
**kwargs: 'graph': `tf.Graph` in which to perform the regex query.This is
required when using regex.
'positive_filter': an elem if selected only if `positive_filter(elem)` is
`True`. This is optional.
Returns:
A tuple `(ops, ts)` where:
`ops` is a list of `tf.Operation`, and
`ts` is a list of `tf.Tensor`
Raises:
TypeError: if the optional keyword argument graph is not a `tf.Graph`
or if an argument in args is not an (array of) `tf.Tensor`
or an (array of) `tf.Operation` or a string or a regular expression.
ValueError: if one of the keyword arguments is unexpected or if a regular
expression is used without passing a graph as a keyword argument. | Helper to select operations and tensors. | [
"Helper",
"to",
"select",
"operations",
"and",
"tensors",
"."
] | def select_ops_and_ts(*args, **kwargs):
"""Helper to select operations and tensors.
Args:
*args: list of 1) regular expressions (compiled or not) or 2) (array of)
`tf.Operation` 3) (array of) tf.Tensor. Regular expressions matching
tensors must start with the comment `"(?#ts)"`, for instance:
`"(?#ts)^foo/.*"`.
**kwargs: 'graph': `tf.Graph` in which to perform the regex query.This is
required when using regex.
'positive_filter': an elem if selected only if `positive_filter(elem)` is
`True`. This is optional.
Returns:
A tuple `(ops, ts)` where:
`ops` is a list of `tf.Operation`, and
`ts` is a list of `tf.Tensor`
Raises:
TypeError: if the optional keyword argument graph is not a `tf.Graph`
or if an argument in args is not an (array of) `tf.Tensor`
or an (array of) `tf.Operation` or a string or a regular expression.
ValueError: if one of the keyword arguments is unexpected or if a regular
expression is used without passing a graph as a keyword argument.
"""
ops = select_ops(*args, restrict_ops_regex=False, **kwargs)
ts = select_ts(*args, restrict_ts_regex=True, **kwargs)
return ops, ts | [
"def",
"select_ops_and_ts",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ops",
"=",
"select_ops",
"(",
"*",
"args",
",",
"restrict_ops_regex",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
"ts",
"=",
"select_ts",
"(",
"*",
"args",
",",
"restrict_ts_regex",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
"return",
"ops",
",",
"ts"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/graph_editor/select.py#L746-L771 | |
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/ragged/row_partition.py | python | RowPartition.offsets_in_rows | (self) | return gen_ragged_math_ops.ragged_range(
starts=constant_op.constant(0, self.dtype),
limits=self.row_lengths(),
deltas=constant_op.constant(1, self.dtype)).rt_dense_values | Return the offset of each value.
RowPartition takes an array x and converts it into sublists.
offsets[i] is the index of x[i] in its sublist.
Given a shape, such as:
[*,*,*],[*,*],[],[*,*]
This returns:
0,1,2,0,1,0,1
Returns:
an offset for every value. | Return the offset of each value. | [
"Return",
"the",
"offset",
"of",
"each",
"value",
"."
] | def offsets_in_rows(self):
"""Return the offset of each value.
RowPartition takes an array x and converts it into sublists.
offsets[i] is the index of x[i] in its sublist.
Given a shape, such as:
[*,*,*],[*,*],[],[*,*]
This returns:
0,1,2,0,1,0,1
Returns:
an offset for every value.
"""
return gen_ragged_math_ops.ragged_range(
starts=constant_op.constant(0, self.dtype),
limits=self.row_lengths(),
deltas=constant_op.constant(1, self.dtype)).rt_dense_values | [
"def",
"offsets_in_rows",
"(",
"self",
")",
":",
"return",
"gen_ragged_math_ops",
".",
"ragged_range",
"(",
"starts",
"=",
"constant_op",
".",
"constant",
"(",
"0",
",",
"self",
".",
"dtype",
")",
",",
"limits",
"=",
"self",
".",
"row_lengths",
"(",
")",
",",
"deltas",
"=",
"constant_op",
".",
"constant",
"(",
"1",
",",
"self",
".",
"dtype",
")",
")",
".",
"rt_dense_values"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/ragged/row_partition.py#L928-L944 | |
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/coverage/coverage/control.py | python | Coverage.get_data | (self) | return self.data | Get the collected data and reset the collector.
Also warn about various problems collecting data.
Returns a :class:`coverage.CoverageData`, the collected coverage data.
.. versionadded:: 4.0 | Get the collected data and reset the collector. | [
"Get",
"the",
"collected",
"data",
"and",
"reset",
"the",
"collector",
"."
] | def get_data(self):
"""Get the collected data and reset the collector.
Also warn about various problems collecting data.
Returns a :class:`coverage.CoverageData`, the collected coverage data.
.. versionadded:: 4.0
"""
self._init()
if not self._measured:
return self.data
self.collector.save_data(self.data)
# If there are still entries in the source_pkgs list, then we never
# encountered those packages.
if self._warn_unimported_source:
for pkg in self.source_pkgs:
if pkg not in sys.modules:
self._warn("Module %s was never imported." % pkg)
elif not (
hasattr(sys.modules[pkg], '__file__') and
os.path.exists(sys.modules[pkg].__file__)
):
self._warn("Module %s has no Python source." % pkg)
else:
self._warn("Module %s was previously imported, but not measured." % pkg)
# Find out if we got any data.
if not self.data and self._warn_no_data:
self._warn("No data was collected.")
# Find files that were never executed at all.
for src in self.source:
for py_file in find_python_files(src):
py_file = files.canonical_filename(py_file)
if self.omit_match and self.omit_match.match(py_file):
# Turns out this file was omitted, so don't pull it back
# in as unexecuted.
continue
self.data.touch_file(py_file)
if self.config.note:
self.data.add_run_info(note=self.config.note)
self._measured = False
return self.data | [
"def",
"get_data",
"(",
"self",
")",
":",
"self",
".",
"_init",
"(",
")",
"if",
"not",
"self",
".",
"_measured",
":",
"return",
"self",
".",
"data",
"self",
".",
"collector",
".",
"save_data",
"(",
"self",
".",
"data",
")",
"# If there are still entries in the source_pkgs list, then we never",
"# encountered those packages.",
"if",
"self",
".",
"_warn_unimported_source",
":",
"for",
"pkg",
"in",
"self",
".",
"source_pkgs",
":",
"if",
"pkg",
"not",
"in",
"sys",
".",
"modules",
":",
"self",
".",
"_warn",
"(",
"\"Module %s was never imported.\"",
"%",
"pkg",
")",
"elif",
"not",
"(",
"hasattr",
"(",
"sys",
".",
"modules",
"[",
"pkg",
"]",
",",
"'__file__'",
")",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"sys",
".",
"modules",
"[",
"pkg",
"]",
".",
"__file__",
")",
")",
":",
"self",
".",
"_warn",
"(",
"\"Module %s has no Python source.\"",
"%",
"pkg",
")",
"else",
":",
"self",
".",
"_warn",
"(",
"\"Module %s was previously imported, but not measured.\"",
"%",
"pkg",
")",
"# Find out if we got any data.",
"if",
"not",
"self",
".",
"data",
"and",
"self",
".",
"_warn_no_data",
":",
"self",
".",
"_warn",
"(",
"\"No data was collected.\"",
")",
"# Find files that were never executed at all.",
"for",
"src",
"in",
"self",
".",
"source",
":",
"for",
"py_file",
"in",
"find_python_files",
"(",
"src",
")",
":",
"py_file",
"=",
"files",
".",
"canonical_filename",
"(",
"py_file",
")",
"if",
"self",
".",
"omit_match",
"and",
"self",
".",
"omit_match",
".",
"match",
"(",
"py_file",
")",
":",
"# Turns out this file was omitted, so don't pull it back",
"# in as unexecuted.",
"continue",
"self",
".",
"data",
".",
"touch_file",
"(",
"py_file",
")",
"if",
"self",
".",
"config",
".",
"note",
":",
"self",
".",
"data",
".",
"add_run_info",
"(",
"note",
"=",
"self",
".",
"config",
".",
"note",
")",
"self",
".",
"_measured",
"=",
"False",
"return",
"self",
".",
"data"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/coverage/coverage/control.py#L796-L846 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py3/pandas/core/computation/pytables.py | python | BinOp.metadata | (self) | return getattr(self.queryables.get(self.lhs), "metadata", None) | the metadata of my field | the metadata of my field | [
"the",
"metadata",
"of",
"my",
"field"
] | def metadata(self):
"""the metadata of my field"""
return getattr(self.queryables.get(self.lhs), "metadata", None) | [
"def",
"metadata",
"(",
"self",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"queryables",
".",
"get",
"(",
"self",
".",
"lhs",
")",
",",
"\"metadata\"",
",",
"None",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/computation/pytables.py#L188-L190 | |
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/node-10.15.3/deps/v8/PRESUBMIT.py | python | _CheckNoInlineHeaderIncludesInNormalHeaders | (input_api, output_api) | Attempts to prevent inclusion of inline headers into normal header
files. This tries to establish a layering where inline headers can be
included by other inline headers or compilation units only. | Attempts to prevent inclusion of inline headers into normal header
files. This tries to establish a layering where inline headers can be
included by other inline headers or compilation units only. | [
"Attempts",
"to",
"prevent",
"inclusion",
"of",
"inline",
"headers",
"into",
"normal",
"header",
"files",
".",
"This",
"tries",
"to",
"establish",
"a",
"layering",
"where",
"inline",
"headers",
"can",
"be",
"included",
"by",
"other",
"inline",
"headers",
"or",
"compilation",
"units",
"only",
"."
] | def _CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api):
"""Attempts to prevent inclusion of inline headers into normal header
files. This tries to establish a layering where inline headers can be
included by other inline headers or compilation units only."""
file_inclusion_pattern = r'(?!.+-inl\.h).+\.h'
include_directive_pattern = input_api.re.compile(r'#include ".+-inl.h"')
include_error = (
'You are including an inline header (e.g. foo-inl.h) within a normal\n'
'header (e.g. bar.h) file. This violates layering of dependencies.')
def FilterFile(affected_file):
black_list = (_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST)
return input_api.FilterSourceFile(
affected_file,
white_list=(file_inclusion_pattern, ),
black_list=black_list)
problems = []
for f in input_api.AffectedSourceFiles(FilterFile):
local_path = f.LocalPath()
for line_number, line in f.ChangedContents():
if (include_directive_pattern.search(line)):
problems.append(
'%s:%d\n %s' % (local_path, line_number, line.strip()))
if problems:
return [output_api.PresubmitError(include_error, problems)]
else:
return [] | [
"def",
"_CheckNoInlineHeaderIncludesInNormalHeaders",
"(",
"input_api",
",",
"output_api",
")",
":",
"file_inclusion_pattern",
"=",
"r'(?!.+-inl\\.h).+\\.h'",
"include_directive_pattern",
"=",
"input_api",
".",
"re",
".",
"compile",
"(",
"r'#include \".+-inl.h\"'",
")",
"include_error",
"=",
"(",
"'You are including an inline header (e.g. foo-inl.h) within a normal\\n'",
"'header (e.g. bar.h) file. This violates layering of dependencies.'",
")",
"def",
"FilterFile",
"(",
"affected_file",
")",
":",
"black_list",
"=",
"(",
"_EXCLUDED_PATHS",
"+",
"input_api",
".",
"DEFAULT_BLACK_LIST",
")",
"return",
"input_api",
".",
"FilterSourceFile",
"(",
"affected_file",
",",
"white_list",
"=",
"(",
"file_inclusion_pattern",
",",
")",
",",
"black_list",
"=",
"black_list",
")",
"problems",
"=",
"[",
"]",
"for",
"f",
"in",
"input_api",
".",
"AffectedSourceFiles",
"(",
"FilterFile",
")",
":",
"local_path",
"=",
"f",
".",
"LocalPath",
"(",
")",
"for",
"line_number",
",",
"line",
"in",
"f",
".",
"ChangedContents",
"(",
")",
":",
"if",
"(",
"include_directive_pattern",
".",
"search",
"(",
"line",
")",
")",
":",
"problems",
".",
"append",
"(",
"'%s:%d\\n %s'",
"%",
"(",
"local_path",
",",
"line_number",
",",
"line",
".",
"strip",
"(",
")",
")",
")",
"if",
"problems",
":",
"return",
"[",
"output_api",
".",
"PresubmitError",
"(",
"include_error",
",",
"problems",
")",
"]",
"else",
":",
"return",
"[",
"]"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/deps/v8/PRESUBMIT.py#L214-L243 | ||
luliyucoordinate/Leetcode | 96afcdc54807d1d184e881a075d1dbf3371e31fb | src/0945-Minimum-Increment-to-Make-Array-Unique/0945.py | python | Solution.minIncrementForUnique | (self, A) | return result | :type A: List[int]
:rtype: int | :type A: List[int]
:rtype: int | [
":",
"type",
"A",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"int"
] | def minIncrementForUnique(self, A):
"""
:type A: List[int]
:rtype: int
"""
box, result, max_A = [0]*40000, 0, 0
for a in A:
box[a] += 1
if max_A < a:
max_A = a
for i in range(max_A):
if box[i] <= 1:
continue
ano = box[i] - 1
result += ano
box[i+1] += ano
box[i] = 1
last_ano = box[max_A] - 1
result += (1 + last_ano)*last_ano//2
return result | [
"def",
"minIncrementForUnique",
"(",
"self",
",",
"A",
")",
":",
"box",
",",
"result",
",",
"max_A",
"=",
"[",
"0",
"]",
"*",
"40000",
",",
"0",
",",
"0",
"for",
"a",
"in",
"A",
":",
"box",
"[",
"a",
"]",
"+=",
"1",
"if",
"max_A",
"<",
"a",
":",
"max_A",
"=",
"a",
"for",
"i",
"in",
"range",
"(",
"max_A",
")",
":",
"if",
"box",
"[",
"i",
"]",
"<=",
"1",
":",
"continue",
"ano",
"=",
"box",
"[",
"i",
"]",
"-",
"1",
"result",
"+=",
"ano",
"box",
"[",
"i",
"+",
"1",
"]",
"+=",
"ano",
"box",
"[",
"i",
"]",
"=",
"1",
"last_ano",
"=",
"box",
"[",
"max_A",
"]",
"-",
"1",
"result",
"+=",
"(",
"1",
"+",
"last_ano",
")",
"*",
"last_ano",
"//",
"2",
"return",
"result"
] | https://github.com/luliyucoordinate/Leetcode/blob/96afcdc54807d1d184e881a075d1dbf3371e31fb/src/0945-Minimum-Increment-to-Make-Array-Unique/0945.py#L2-L23 | |
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Path/PathScripts/PathJob.py | python | ObjectJob.baseCandidates | (cls) | return sorted(
[obj for obj in FreeCAD.ActiveDocument.Objects if cls.isBaseCandidate(obj)],
key=lambda o: o.Label,
) | Answer all objects in the current document which could serve as a Base for a job. | Answer all objects in the current document which could serve as a Base for a job. | [
"Answer",
"all",
"objects",
"in",
"the",
"current",
"document",
"which",
"could",
"serve",
"as",
"a",
"Base",
"for",
"a",
"job",
"."
] | def baseCandidates(cls):
"""Answer all objects in the current document which could serve as a Base for a job."""
return sorted(
[obj for obj in FreeCAD.ActiveDocument.Objects if cls.isBaseCandidate(obj)],
key=lambda o: o.Label,
) | [
"def",
"baseCandidates",
"(",
"cls",
")",
":",
"return",
"sorted",
"(",
"[",
"obj",
"for",
"obj",
"in",
"FreeCAD",
".",
"ActiveDocument",
".",
"Objects",
"if",
"cls",
".",
"isBaseCandidate",
"(",
"obj",
")",
"]",
",",
"key",
"=",
"lambda",
"o",
":",
"o",
".",
"Label",
",",
")"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Path/PathScripts/PathJob.py#L818-L823 | |
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | lts/tools/inspector_protocol/jinja2/ext.py | python | babel_extract | (fileobj, keywords, comment_tags, options) | Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best preceeding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
.. versionchanged:: 2.5.1
The `newstyle_gettext` flag can be set to `True` to enable newstyle
gettext calls.
.. versionchanged:: 2.7
A `silent` option can now be provided. If set to `False` template
syntax errors are propagated instead of being ignored.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results.
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
(comments will be empty currently) | Babel extraction method for Jinja templates. | [
"Babel",
"extraction",
"method",
"for",
"Jinja",
"templates",
"."
] | def babel_extract(fileobj, keywords, comment_tags, options):
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best preceeding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
.. versionchanged:: 2.5.1
The `newstyle_gettext` flag can be set to `True` to enable newstyle
gettext calls.
.. versionchanged:: 2.7
A `silent` option can now be provided. If set to `False` template
syntax errors are propagated instead of being ignored.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results.
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
(comments will be empty currently)
"""
extensions = set()
for extension in options.get('extensions', '').split(','):
extension = extension.strip()
if not extension:
continue
extensions.add(import_string(extension))
if InternationalizationExtension not in extensions:
extensions.add(InternationalizationExtension)
def getbool(options, key, default=False):
return options.get(key, str(default)).lower() in \
('1', 'on', 'yes', 'true')
silent = getbool(options, 'silent', True)
environment = Environment(
options.get('block_start_string', BLOCK_START_STRING),
options.get('block_end_string', BLOCK_END_STRING),
options.get('variable_start_string', VARIABLE_START_STRING),
options.get('variable_end_string', VARIABLE_END_STRING),
options.get('comment_start_string', COMMENT_START_STRING),
options.get('comment_end_string', COMMENT_END_STRING),
options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX,
options.get('line_comment_prefix') or LINE_COMMENT_PREFIX,
getbool(options, 'trim_blocks', TRIM_BLOCKS),
getbool(options, 'lstrip_blocks', LSTRIP_BLOCKS),
NEWLINE_SEQUENCE,
getbool(options, 'keep_trailing_newline', KEEP_TRAILING_NEWLINE),
frozenset(extensions),
cache_size=0,
auto_reload=False
)
if getbool(options, 'trimmed'):
environment.policies['ext.i18n.trimmed'] = True
if getbool(options, 'newstyle_gettext'):
environment.newstyle_gettext = True
source = fileobj.read().decode(options.get('encoding', 'utf-8'))
try:
node = environment.parse(source)
tokens = list(environment.lex(environment.preprocess(source)))
except TemplateSyntaxError as e:
if not silent:
raise
# skip templates with syntax errors
return
finder = _CommentFinder(tokens, comment_tags)
for lineno, func, message in extract_from_ast(node, keywords):
yield lineno, func, message, finder.find_comments(lineno) | [
"def",
"babel_extract",
"(",
"fileobj",
",",
"keywords",
",",
"comment_tags",
",",
"options",
")",
":",
"extensions",
"=",
"set",
"(",
")",
"for",
"extension",
"in",
"options",
".",
"get",
"(",
"'extensions'",
",",
"''",
")",
".",
"split",
"(",
"','",
")",
":",
"extension",
"=",
"extension",
".",
"strip",
"(",
")",
"if",
"not",
"extension",
":",
"continue",
"extensions",
".",
"add",
"(",
"import_string",
"(",
"extension",
")",
")",
"if",
"InternationalizationExtension",
"not",
"in",
"extensions",
":",
"extensions",
".",
"add",
"(",
"InternationalizationExtension",
")",
"def",
"getbool",
"(",
"options",
",",
"key",
",",
"default",
"=",
"False",
")",
":",
"return",
"options",
".",
"get",
"(",
"key",
",",
"str",
"(",
"default",
")",
")",
".",
"lower",
"(",
")",
"in",
"(",
"'1'",
",",
"'on'",
",",
"'yes'",
",",
"'true'",
")",
"silent",
"=",
"getbool",
"(",
"options",
",",
"'silent'",
",",
"True",
")",
"environment",
"=",
"Environment",
"(",
"options",
".",
"get",
"(",
"'block_start_string'",
",",
"BLOCK_START_STRING",
")",
",",
"options",
".",
"get",
"(",
"'block_end_string'",
",",
"BLOCK_END_STRING",
")",
",",
"options",
".",
"get",
"(",
"'variable_start_string'",
",",
"VARIABLE_START_STRING",
")",
",",
"options",
".",
"get",
"(",
"'variable_end_string'",
",",
"VARIABLE_END_STRING",
")",
",",
"options",
".",
"get",
"(",
"'comment_start_string'",
",",
"COMMENT_START_STRING",
")",
",",
"options",
".",
"get",
"(",
"'comment_end_string'",
",",
"COMMENT_END_STRING",
")",
",",
"options",
".",
"get",
"(",
"'line_statement_prefix'",
")",
"or",
"LINE_STATEMENT_PREFIX",
",",
"options",
".",
"get",
"(",
"'line_comment_prefix'",
")",
"or",
"LINE_COMMENT_PREFIX",
",",
"getbool",
"(",
"options",
",",
"'trim_blocks'",
",",
"TRIM_BLOCKS",
")",
",",
"getbool",
"(",
"options",
",",
"'lstrip_blocks'",
",",
"LSTRIP_BLOCKS",
")",
",",
"NEWLINE_SEQUENCE",
",",
"getbool",
"(",
"options",
",",
"'keep_trailing_newline'",
",",
"KEEP_TRAILING_NEWLINE",
")",
",",
"frozenset",
"(",
"extensions",
")",
",",
"cache_size",
"=",
"0",
",",
"auto_reload",
"=",
"False",
")",
"if",
"getbool",
"(",
"options",
",",
"'trimmed'",
")",
":",
"environment",
".",
"policies",
"[",
"'ext.i18n.trimmed'",
"]",
"=",
"True",
"if",
"getbool",
"(",
"options",
",",
"'newstyle_gettext'",
")",
":",
"environment",
".",
"newstyle_gettext",
"=",
"True",
"source",
"=",
"fileobj",
".",
"read",
"(",
")",
".",
"decode",
"(",
"options",
".",
"get",
"(",
"'encoding'",
",",
"'utf-8'",
")",
")",
"try",
":",
"node",
"=",
"environment",
".",
"parse",
"(",
"source",
")",
"tokens",
"=",
"list",
"(",
"environment",
".",
"lex",
"(",
"environment",
".",
"preprocess",
"(",
"source",
")",
")",
")",
"except",
"TemplateSyntaxError",
"as",
"e",
":",
"if",
"not",
"silent",
":",
"raise",
"# skip templates with syntax errors",
"return",
"finder",
"=",
"_CommentFinder",
"(",
"tokens",
",",
"comment_tags",
")",
"for",
"lineno",
",",
"func",
",",
"message",
"in",
"extract_from_ast",
"(",
"node",
",",
"keywords",
")",
":",
"yield",
"lineno",
",",
"func",
",",
"message",
",",
"finder",
".",
"find_comments",
"(",
"lineno",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/inspector_protocol/jinja2/ext.py#L542-L619 | ||
forkineye/ESPixelStick | 22926f1c0d1131f1369fc7cad405689a095ae3cb | dist/bin/pyserial/serial/tools/hexlify_codec.py | python | hex_decode | (data, errors='strict') | return (unicode(''.join('{:02X} '.format(ord(b)) for b in serial.iterbytes(data))), len(data)) | b'@ab' -> '40 41 42 | b' | [
"b"
] | def hex_decode(data, errors='strict'):
"""b'@ab' -> '40 41 42'"""
return (unicode(''.join('{:02X} '.format(ord(b)) for b in serial.iterbytes(data))), len(data)) | [
"def",
"hex_decode",
"(",
"data",
",",
"errors",
"=",
"'strict'",
")",
":",
"return",
"(",
"unicode",
"(",
"''",
".",
"join",
"(",
"'{:02X} '",
".",
"format",
"(",
"ord",
"(",
"b",
")",
")",
"for",
"b",
"in",
"serial",
".",
"iterbytes",
"(",
"data",
")",
")",
")",
",",
"len",
"(",
"data",
")",
")"
] | https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/pyserial/serial/tools/hexlify_codec.py#L41-L43 | |
SpenceKonde/megaTinyCore | 1c4a70b18a149fe6bcb551dfa6db11ca50b8997b | megaavr/tools/libs/serial/tools/miniterm.py | python | Miniterm.update_transformations | (self) | take list of transformation classes and instantiate them for rx and tx | take list of transformation classes and instantiate them for rx and tx | [
"take",
"list",
"of",
"transformation",
"classes",
"and",
"instantiate",
"them",
"for",
"rx",
"and",
"tx"
] | def update_transformations(self):
"""take list of transformation classes and instantiate them for rx and tx"""
transformations = [EOL_TRANSFORMATIONS[self.eol]] + [TRANSFORMATIONS[f]
for f in self.filters]
self.tx_transformations = [t() for t in transformations]
self.rx_transformations = list(reversed(self.tx_transformations)) | [
"def",
"update_transformations",
"(",
"self",
")",
":",
"transformations",
"=",
"[",
"EOL_TRANSFORMATIONS",
"[",
"self",
".",
"eol",
"]",
"]",
"+",
"[",
"TRANSFORMATIONS",
"[",
"f",
"]",
"for",
"f",
"in",
"self",
".",
"filters",
"]",
"self",
".",
"tx_transformations",
"=",
"[",
"t",
"(",
")",
"for",
"t",
"in",
"transformations",
"]",
"self",
".",
"rx_transformations",
"=",
"list",
"(",
"reversed",
"(",
"self",
".",
"tx_transformations",
")",
")"
] | https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/serial/tools/miniterm.py#L398-L403 | ||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqt/mantidqt/widgets/sliceviewer/lineplots.py | python | PixelLinePlot.handle_key | (self, key) | Called by KeyHandler if a key was accepted to perform a region
extraction | Called by KeyHandler if a key was accepted to perform a region
extraction | [
"Called",
"by",
"KeyHandler",
"if",
"a",
"key",
"was",
"accepted",
"to",
"perform",
"a",
"region",
"extraction"
] | def handle_key(self, key):
"""
Called by KeyHandler if a key was accepted to perform a region
extraction
"""
if self._cursor_pos is None:
return None
pixel_transforms = self.PIXEL_TRANSFORM_CLS
if key in pixel_transforms:
to_next_pixel = pixel_transforms[key](self.plotter.image)
to_next_pixel.move_from(self._cursor_pos)
else:
self.exporter.export_pixel_cut(self._cursor_pos, key) | [
"def",
"handle_key",
"(",
"self",
",",
"key",
")",
":",
"if",
"self",
".",
"_cursor_pos",
"is",
"None",
":",
"return",
"None",
"pixel_transforms",
"=",
"self",
".",
"PIXEL_TRANSFORM_CLS",
"if",
"key",
"in",
"pixel_transforms",
":",
"to_next_pixel",
"=",
"pixel_transforms",
"[",
"key",
"]",
"(",
"self",
".",
"plotter",
".",
"image",
")",
"to_next_pixel",
".",
"move_from",
"(",
"self",
".",
"_cursor_pos",
")",
"else",
":",
"self",
".",
"exporter",
".",
"export_pixel_cut",
"(",
"self",
".",
"_cursor_pos",
",",
"key",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqt/mantidqt/widgets/sliceviewer/lineplots.py#L300-L313 | ||
Constellation/iv | 64c3a9c7c517063f29d90d449180ea8f6f4d946f | tools/cpplint.py | python | CleanseComments | (line) | return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed. | Removes //-comments and single-line C-style /* */ comments. | [
"Removes",
"//",
"-",
"comments",
"and",
"single",
"-",
"line",
"C",
"-",
"style",
"/",
"*",
"*",
"/",
"comments",
"."
] | def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos].rstrip()
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | [
"def",
"CleanseComments",
"(",
"line",
")",
":",
"commentpos",
"=",
"line",
".",
"find",
"(",
"'//'",
")",
"if",
"commentpos",
"!=",
"-",
"1",
"and",
"not",
"IsCppString",
"(",
"line",
"[",
":",
"commentpos",
"]",
")",
":",
"line",
"=",
"line",
"[",
":",
"commentpos",
"]",
".",
"rstrip",
"(",
")",
"# get rid of /* ... */",
"return",
"_RE_PATTERN_CLEANSE_LINE_C_COMMENTS",
".",
"sub",
"(",
"''",
",",
"line",
")"
] | https://github.com/Constellation/iv/blob/64c3a9c7c517063f29d90d449180ea8f6f4d946f/tools/cpplint.py#L1155-L1168 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/chunk.py | python | Chunk.getsize | (self) | return self.chunksize | Return the size of the current chunk. | Return the size of the current chunk. | [
"Return",
"the",
"size",
"of",
"the",
"current",
"chunk",
"."
] | def getsize(self):
"""Return the size of the current chunk."""
return self.chunksize | [
"def",
"getsize",
"(",
"self",
")",
":",
"return",
"self",
".",
"chunksize"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/chunk.py#L82-L84 | |
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_gdi.py | python | Font_SetDefaultEncoding | (*args, **kwargs) | return _gdi_.Font_SetDefaultEncoding(*args, **kwargs) | Font_SetDefaultEncoding(int encoding)
Sets the default font encoding. | Font_SetDefaultEncoding(int encoding) | [
"Font_SetDefaultEncoding",
"(",
"int",
"encoding",
")"
] | def Font_SetDefaultEncoding(*args, **kwargs):
"""
Font_SetDefaultEncoding(int encoding)
Sets the default font encoding.
"""
return _gdi_.Font_SetDefaultEncoding(*args, **kwargs) | [
"def",
"Font_SetDefaultEncoding",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"Font_SetDefaultEncoding",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_gdi.py#L2627-L2633 | |
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pexpect/pexpect/pty_spawn.py | python | spawn.waitnoecho | (self, timeout=-1) | This waits until the terminal ECHO flag is set False. This returns
True if the echo mode is off. This returns False if the ECHO flag was
not set False before the timeout. This can be used to detect when the
child is waiting for a password. Usually a child application will turn
off echo mode when it is waiting for the user to enter a password. For
example, instead of expecting the "password:" prompt you can wait for
the child to set ECHO off::
p = pexpect.spawn('ssh user@example.com')
p.waitnoecho()
p.sendline(mypassword)
If timeout==-1 then this method will use the value in self.timeout.
If timeout==None then this method to block until ECHO flag is False. | This waits until the terminal ECHO flag is set False. This returns
True if the echo mode is off. This returns False if the ECHO flag was
not set False before the timeout. This can be used to detect when the
child is waiting for a password. Usually a child application will turn
off echo mode when it is waiting for the user to enter a password. For
example, instead of expecting the "password:" prompt you can wait for
the child to set ECHO off:: | [
"This",
"waits",
"until",
"the",
"terminal",
"ECHO",
"flag",
"is",
"set",
"False",
".",
"This",
"returns",
"True",
"if",
"the",
"echo",
"mode",
"is",
"off",
".",
"This",
"returns",
"False",
"if",
"the",
"ECHO",
"flag",
"was",
"not",
"set",
"False",
"before",
"the",
"timeout",
".",
"This",
"can",
"be",
"used",
"to",
"detect",
"when",
"the",
"child",
"is",
"waiting",
"for",
"a",
"password",
".",
"Usually",
"a",
"child",
"application",
"will",
"turn",
"off",
"echo",
"mode",
"when",
"it",
"is",
"waiting",
"for",
"the",
"user",
"to",
"enter",
"a",
"password",
".",
"For",
"example",
"instead",
"of",
"expecting",
"the",
"password",
":",
"prompt",
"you",
"can",
"wait",
"for",
"the",
"child",
"to",
"set",
"ECHO",
"off",
"::"
] | def waitnoecho(self, timeout=-1):
'''This waits until the terminal ECHO flag is set False. This returns
True if the echo mode is off. This returns False if the ECHO flag was
not set False before the timeout. This can be used to detect when the
child is waiting for a password. Usually a child application will turn
off echo mode when it is waiting for the user to enter a password. For
example, instead of expecting the "password:" prompt you can wait for
the child to set ECHO off::
p = pexpect.spawn('ssh user@example.com')
p.waitnoecho()
p.sendline(mypassword)
If timeout==-1 then this method will use the value in self.timeout.
If timeout==None then this method to block until ECHO flag is False.
'''
if timeout == -1:
timeout = self.timeout
if timeout is not None:
end_time = time.time() + timeout
while True:
if not self.getecho():
return True
if timeout < 0 and timeout is not None:
return False
if timeout is not None:
timeout = end_time - time.time()
time.sleep(0.1) | [
"def",
"waitnoecho",
"(",
"self",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"if",
"timeout",
"==",
"-",
"1",
":",
"timeout",
"=",
"self",
".",
"timeout",
"if",
"timeout",
"is",
"not",
"None",
":",
"end_time",
"=",
"time",
".",
"time",
"(",
")",
"+",
"timeout",
"while",
"True",
":",
"if",
"not",
"self",
".",
"getecho",
"(",
")",
":",
"return",
"True",
"if",
"timeout",
"<",
"0",
"and",
"timeout",
"is",
"not",
"None",
":",
"return",
"False",
"if",
"timeout",
"is",
"not",
"None",
":",
"timeout",
"=",
"end_time",
"-",
"time",
".",
"time",
"(",
")",
"time",
".",
"sleep",
"(",
"0.1",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pexpect/pexpect/pty_spawn.py#L344-L372 | ||
mapnik/mapnik | f3da900c355e1d15059c4a91b00203dcc9d9f0ef | scons/scons-local-4.1.0/SCons/Node/FS.py | python | RootDir._lookup_abs | (self, p, klass, create=1) | return result | Fast (?) lookup of a *normalized* absolute path.
This method is intended for use by internal lookups with
already-normalized path data. For general-purpose lookups,
use the FS.Entry(), FS.Dir() or FS.File() methods.
The caller is responsible for making sure we're passed a
normalized absolute path; we merely let Python's dictionary look
up and return the One True Node.FS object for the path.
If a Node for the specified "p" doesn't already exist, and
"create" is specified, the Node may be created after recursive
invocation to find or create the parent directory or directories. | Fast (?) lookup of a *normalized* absolute path. | [
"Fast",
"(",
"?",
")",
"lookup",
"of",
"a",
"*",
"normalized",
"*",
"absolute",
"path",
"."
] | def _lookup_abs(self, p, klass, create=1):
"""
Fast (?) lookup of a *normalized* absolute path.
This method is intended for use by internal lookups with
already-normalized path data. For general-purpose lookups,
use the FS.Entry(), FS.Dir() or FS.File() methods.
The caller is responsible for making sure we're passed a
normalized absolute path; we merely let Python's dictionary look
up and return the One True Node.FS object for the path.
If a Node for the specified "p" doesn't already exist, and
"create" is specified, the Node may be created after recursive
invocation to find or create the parent directory or directories.
"""
k = _my_normcase(p)
try:
result = self._lookupDict[k]
except KeyError:
if not create:
msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self))
raise SCons.Errors.UserError(msg)
# There is no Node for this path name, and we're allowed
# to create it.
dir_name, file_name = p.rsplit('/',1)
dir_node = self._lookup_abs(dir_name, Dir)
result = klass(file_name, dir_node, self.fs)
# Double-check on disk (as configured) that the Node we
# created matches whatever is out there in the real world.
result.diskcheck_match()
self._lookupDict[k] = result
dir_node.entries[_my_normcase(file_name)] = result
dir_node.implicit = None
else:
# There is already a Node for this path name. Allow it to
# complain if we were looking for an inappropriate type.
result.must_be_same(klass)
return result | [
"def",
"_lookup_abs",
"(",
"self",
",",
"p",
",",
"klass",
",",
"create",
"=",
"1",
")",
":",
"k",
"=",
"_my_normcase",
"(",
"p",
")",
"try",
":",
"result",
"=",
"self",
".",
"_lookupDict",
"[",
"k",
"]",
"except",
"KeyError",
":",
"if",
"not",
"create",
":",
"msg",
"=",
"\"No such file or directory: '%s' in '%s' (and create is False)\"",
"%",
"(",
"p",
",",
"str",
"(",
"self",
")",
")",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"msg",
")",
"# There is no Node for this path name, and we're allowed",
"# to create it.",
"dir_name",
",",
"file_name",
"=",
"p",
".",
"rsplit",
"(",
"'/'",
",",
"1",
")",
"dir_node",
"=",
"self",
".",
"_lookup_abs",
"(",
"dir_name",
",",
"Dir",
")",
"result",
"=",
"klass",
"(",
"file_name",
",",
"dir_node",
",",
"self",
".",
"fs",
")",
"# Double-check on disk (as configured) that the Node we",
"# created matches whatever is out there in the real world.",
"result",
".",
"diskcheck_match",
"(",
")",
"self",
".",
"_lookupDict",
"[",
"k",
"]",
"=",
"result",
"dir_node",
".",
"entries",
"[",
"_my_normcase",
"(",
"file_name",
")",
"]",
"=",
"result",
"dir_node",
".",
"implicit",
"=",
"None",
"else",
":",
"# There is already a Node for this path name. Allow it to",
"# complain if we were looking for an inappropriate type.",
"result",
".",
"must_be_same",
"(",
"klass",
")",
"return",
"result"
] | https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Node/FS.py#L2372-L2412 | |
ceph/ceph | 959663007321a369c83218414a29bd9dbc8bda3a | src/pybind/mgr/telemetry/module.py | python | Module.diff | (self) | return 0, r, '' | Show the diff between opted-in collection and available collection | Show the diff between opted-in collection and available collection | [
"Show",
"the",
"diff",
"between",
"opted",
"-",
"in",
"collection",
"and",
"available",
"collection"
] | def diff(self) -> Tuple[int, str, str]:
'''
Show the diff between opted-in collection and available collection
'''
diff = []
keys = ['nag']
for c in MODULE_COLLECTION:
if not self.is_enabled_collection(c['name']):
diff.append({key: val for key, val in c.items() if key not in keys})
r = None
if diff == []:
r = "Telemetry is up to date"
else:
r = json.dumps(diff, indent=4, sort_keys=True)
return 0, r, '' | [
"def",
"diff",
"(",
"self",
")",
"->",
"Tuple",
"[",
"int",
",",
"str",
",",
"str",
"]",
":",
"diff",
"=",
"[",
"]",
"keys",
"=",
"[",
"'nag'",
"]",
"for",
"c",
"in",
"MODULE_COLLECTION",
":",
"if",
"not",
"self",
".",
"is_enabled_collection",
"(",
"c",
"[",
"'name'",
"]",
")",
":",
"diff",
".",
"append",
"(",
"{",
"key",
":",
"val",
"for",
"key",
",",
"val",
"in",
"c",
".",
"items",
"(",
")",
"if",
"key",
"not",
"in",
"keys",
"}",
")",
"r",
"=",
"None",
"if",
"diff",
"==",
"[",
"]",
":",
"r",
"=",
"\"Telemetry is up to date\"",
"else",
":",
"r",
"=",
"json",
".",
"dumps",
"(",
"diff",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")",
"return",
"0",
",",
"r",
",",
"''"
] | https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/telemetry/module.py#L1358-L1375 | |
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | snapx/snapx/classes/coreviews.py | python | AtlasView.__init__ | (self, g, n) | Initialize with the input node and graph | Initialize with the input node and graph | [
"Initialize",
"with",
"the",
"input",
"node",
"and",
"graph"
] | def __init__(self, g, n):
"""Initialize with the input node and graph"""
self._graph = g
self._node = n
if not isinstance(n, int):
raise TypeError("Node ID must be int.")
if n not in g:
raise KeyError("Node must be present in graph.") | [
"def",
"__init__",
"(",
"self",
",",
"g",
",",
"n",
")",
":",
"self",
".",
"_graph",
"=",
"g",
"self",
".",
"_node",
"=",
"n",
"if",
"not",
"isinstance",
"(",
"n",
",",
"int",
")",
":",
"raise",
"TypeError",
"(",
"\"Node ID must be int.\"",
")",
"if",
"n",
"not",
"in",
"g",
":",
"raise",
"KeyError",
"(",
"\"Node must be present in graph.\"",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/snapx/snapx/classes/coreviews.py#L23-L31 | ||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/plat-mac/pimp.py | python | PimpPackage.downloadPackageOnly | (self, output=None) | Download a single package, if needed.
An MD5 signature is used to determine whether download is needed,
and to test that we actually downloaded what we expected.
If output is given it is a file-like object that will receive a log
of what happens.
If anything unforeseen happened the method returns an error message
string. | Download a single package, if needed. | [
"Download",
"a",
"single",
"package",
"if",
"needed",
"."
] | def downloadPackageOnly(self, output=None):
"""Download a single package, if needed.
An MD5 signature is used to determine whether download is needed,
and to test that we actually downloaded what we expected.
If output is given it is a file-like object that will receive a log
of what happens.
If anything unforeseen happened the method returns an error message
string.
"""
scheme, loc, path, query, frag = urlparse.urlsplit(self._dict['Download-URL'])
path = urllib.url2pathname(path)
filename = os.path.split(path)[1]
self.archiveFilename = os.path.join(self._db.preferences.downloadDir, filename)
if not self._archiveOK():
if scheme == 'manual':
return "Please download package manually and save as %s" % self.archiveFilename
downloader = PimpUrllibDownloader(None, self._db.preferences.downloadDir,
watcher=self._db.preferences.watcher)
if not downloader.download(self._dict['Download-URL'],
self.archiveFilename, output):
return "download command failed"
if not os.path.exists(self.archiveFilename) and not NO_EXECUTE:
return "archive not found after download"
if not self._archiveOK():
return "archive does not have correct MD5 checksum" | [
"def",
"downloadPackageOnly",
"(",
"self",
",",
"output",
"=",
"None",
")",
":",
"scheme",
",",
"loc",
",",
"path",
",",
"query",
",",
"frag",
"=",
"urlparse",
".",
"urlsplit",
"(",
"self",
".",
"_dict",
"[",
"'Download-URL'",
"]",
")",
"path",
"=",
"urllib",
".",
"url2pathname",
"(",
"path",
")",
"filename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"[",
"1",
"]",
"self",
".",
"archiveFilename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_db",
".",
"preferences",
".",
"downloadDir",
",",
"filename",
")",
"if",
"not",
"self",
".",
"_archiveOK",
"(",
")",
":",
"if",
"scheme",
"==",
"'manual'",
":",
"return",
"\"Please download package manually and save as %s\"",
"%",
"self",
".",
"archiveFilename",
"downloader",
"=",
"PimpUrllibDownloader",
"(",
"None",
",",
"self",
".",
"_db",
".",
"preferences",
".",
"downloadDir",
",",
"watcher",
"=",
"self",
".",
"_db",
".",
"preferences",
".",
"watcher",
")",
"if",
"not",
"downloader",
".",
"download",
"(",
"self",
".",
"_dict",
"[",
"'Download-URL'",
"]",
",",
"self",
".",
"archiveFilename",
",",
"output",
")",
":",
"return",
"\"download command failed\"",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"archiveFilename",
")",
"and",
"not",
"NO_EXECUTE",
":",
"return",
"\"archive not found after download\"",
"if",
"not",
"self",
".",
"_archiveOK",
"(",
")",
":",
"return",
"\"archive does not have correct MD5 checksum\""
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/plat-mac/pimp.py#L663-L690 | ||
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/rep3.py | python | download_targets_data | (targets_url=None) | return targets_data | Download REP 3 targets file and unmarshal from YAML.
DEPRECATED: this function is deprecated. List of targets should be obtained
from the rosdistro module.
The body of this function is an example.
:param target_url: override URL of platform targets file. Defaults
to ``REP3_TARGETS_URL``.
:raises: :exc:`DownloadFailure`
:raises: :exc:`InvalidData` If targets file does not pass cursory validation checks. | Download REP 3 targets file and unmarshal from YAML.
DEPRECATED: this function is deprecated. List of targets should be obtained
from the rosdistro module.
The body of this function is an example. | [
"Download",
"REP",
"3",
"targets",
"file",
"and",
"unmarshal",
"from",
"YAML",
".",
"DEPRECATED",
":",
"this",
"function",
"is",
"deprecated",
".",
"List",
"of",
"targets",
"should",
"be",
"obtained",
"from",
"the",
"rosdistro",
"module",
".",
"The",
"body",
"of",
"this",
"function",
"is",
"an",
"example",
"."
] | def download_targets_data(targets_url=None):
"""
Download REP 3 targets file and unmarshal from YAML.
DEPRECATED: this function is deprecated. List of targets should be obtained
from the rosdistro module.
The body of this function is an example.
:param target_url: override URL of platform targets file. Defaults
to ``REP3_TARGETS_URL``.
:raises: :exc:`DownloadFailure`
:raises: :exc:`InvalidData` If targets file does not pass cursory validation checks.
"""
warnings.warn("deprecated, use rosdistro instead", PreRep137Warning)
if targets_url is None:
targets_url = REP3_TARGETS_URL
try:
f = urlopen(targets_url, timeout=DOWNLOAD_TIMEOUT)
text = f.read()
f.close()
targets_data = yaml.safe_load(text)
except Exception as e:
raise DownloadFailure("Failed to download target platform data for gbpdistro:\n\t%s"%(str(e)))
if type(targets_data) == list:
# convert to dictionary
new_targets_data = {}
for t in targets_data:
platform = list(t.keys())[0]
new_targets_data[platform] = t[platform]
targets_data = new_targets_data
return targets_data | [
"def",
"download_targets_data",
"(",
"targets_url",
"=",
"None",
")",
":",
"warnings",
".",
"warn",
"(",
"\"deprecated, use rosdistro instead\"",
",",
"PreRep137Warning",
")",
"if",
"targets_url",
"is",
"None",
":",
"targets_url",
"=",
"REP3_TARGETS_URL",
"try",
":",
"f",
"=",
"urlopen",
"(",
"targets_url",
",",
"timeout",
"=",
"DOWNLOAD_TIMEOUT",
")",
"text",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"targets_data",
"=",
"yaml",
".",
"safe_load",
"(",
"text",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"DownloadFailure",
"(",
"\"Failed to download target platform data for gbpdistro:\\n\\t%s\"",
"%",
"(",
"str",
"(",
"e",
")",
")",
")",
"if",
"type",
"(",
"targets_data",
")",
"==",
"list",
":",
"# convert to dictionary",
"new_targets_data",
"=",
"{",
"}",
"for",
"t",
"in",
"targets_data",
":",
"platform",
"=",
"list",
"(",
"t",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"new_targets_data",
"[",
"platform",
"]",
"=",
"t",
"[",
"platform",
"]",
"targets_data",
"=",
"new_targets_data",
"return",
"targets_data"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/rep3.py#L44-L74 | |
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/window/rolling.py | python | Rolling._validate_monotonic | (self) | Validate monotonic (increasing or decreasing). | Validate monotonic (increasing or decreasing). | [
"Validate",
"monotonic",
"(",
"increasing",
"or",
"decreasing",
")",
"."
] | def _validate_monotonic(self):
"""
Validate monotonic (increasing or decreasing).
"""
if not (self._on.is_monotonic_increasing or self._on.is_monotonic_decreasing):
formatted = self.on
if self.on is None:
formatted = "index"
raise ValueError(f"{formatted} must be monotonic") | [
"def",
"_validate_monotonic",
"(",
"self",
")",
":",
"if",
"not",
"(",
"self",
".",
"_on",
".",
"is_monotonic_increasing",
"or",
"self",
".",
"_on",
".",
"is_monotonic_decreasing",
")",
":",
"formatted",
"=",
"self",
".",
"on",
"if",
"self",
".",
"on",
"is",
"None",
":",
"formatted",
"=",
"\"index\"",
"raise",
"ValueError",
"(",
"f\"{formatted} must be monotonic\"",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/window/rolling.py#L1869-L1877 | ||
eclipse/sumo | 7132a9b8b6eea734bdec38479026b4d8c4336d03 | tools/contributed/sumopy/plugins/mapmatching/mapmatching.py | python | MobikeImporter.calc_seconds | (self, t_data,
sep_date_clock = ' ', sep_date = '/', sep_clock = ':',
) | Returns time in seconds after 1/1/1970.
Time format for time data string used:
01/07/2018 06:00, SUNTRACTING 7h to meet chinese time | Returns time in seconds after 1/1/1970.
Time format for time data string used:
01/07/2018 06:00, SUNTRACTING 7h to meet chinese time | [
"Returns",
"time",
"in",
"seconds",
"after",
"1",
"/",
"1",
"/",
"1970",
".",
"Time",
"format",
"for",
"time",
"data",
"string",
"used",
":",
"01",
"/",
"07",
"/",
"2018",
"06",
":",
"00",
"SUNTRACTING",
"7h",
"to",
"meet",
"chinese",
"time"
] | def calc_seconds(self, t_data,
sep_date_clock = ' ', sep_date = '/', sep_clock = ':',
):
"""
Returns time in seconds after 1/1/1970.
Time format for time data string used:
01/07/2018 06:00, SUNTRACTING 7h to meet chinese time
"""
#
if len(t_data.split(sep_date_clock))!=2:
return -1
(date, clock) = t_data.split(sep_date_clock)
if (len(clock.split( sep_clock))==2)&(len(date.split(sep_date))==3):
(day_str,month_str,year_str) = date.split(sep_date)
#print ' year_str,month_str,day_str',year_str,month_str,day_str
(hours_str,minutes_str) = clock.split(sep_clock)
seconds_str = "0"
#print ' hours_str,minutes_str,seconds_str',hours_str,minutes_str,seconds_str
t = time.mktime(( int(year_str),int(month_str),int(day_str),
int(hours_str),int(minutes_str),int(float(seconds_str)),-1,-1,-1))-25200# -7h
#print 'calc_seconds',t
#print ' t_data'
#print ' tupel',int(year_str),int(month_str),int(day_str), int(hours_str),int(minutes_str),int(float(seconds_str)),0,0,0
return int(t)
else:
return -1 | [
"def",
"calc_seconds",
"(",
"self",
",",
"t_data",
",",
"sep_date_clock",
"=",
"' '",
",",
"sep_date",
"=",
"'/'",
",",
"sep_clock",
"=",
"':'",
",",
")",
":",
"#",
"if",
"len",
"(",
"t_data",
".",
"split",
"(",
"sep_date_clock",
")",
")",
"!=",
"2",
":",
"return",
"-",
"1",
"(",
"date",
",",
"clock",
")",
"=",
"t_data",
".",
"split",
"(",
"sep_date_clock",
")",
"if",
"(",
"len",
"(",
"clock",
".",
"split",
"(",
"sep_clock",
")",
")",
"==",
"2",
")",
"&",
"(",
"len",
"(",
"date",
".",
"split",
"(",
"sep_date",
")",
")",
"==",
"3",
")",
":",
"(",
"day_str",
",",
"month_str",
",",
"year_str",
")",
"=",
"date",
".",
"split",
"(",
"sep_date",
")",
"#print ' year_str,month_str,day_str',year_str,month_str,day_str",
"(",
"hours_str",
",",
"minutes_str",
")",
"=",
"clock",
".",
"split",
"(",
"sep_clock",
")",
"seconds_str",
"=",
"\"0\"",
"#print ' hours_str,minutes_str,seconds_str',hours_str,minutes_str,seconds_str",
"t",
"=",
"time",
".",
"mktime",
"(",
"(",
"int",
"(",
"year_str",
")",
",",
"int",
"(",
"month_str",
")",
",",
"int",
"(",
"day_str",
")",
",",
"int",
"(",
"hours_str",
")",
",",
"int",
"(",
"minutes_str",
")",
",",
"int",
"(",
"float",
"(",
"seconds_str",
")",
")",
",",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
")",
"-",
"25200",
"# -7h",
"#print 'calc_seconds',t",
"#print ' t_data'",
"#print ' tupel',int(year_str),int(month_str),int(day_str), int(hours_str),int(minutes_str),int(float(seconds_str)),0,0,0",
"return",
"int",
"(",
"t",
")",
"else",
":",
"return",
"-",
"1"
] | https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/contributed/sumopy/plugins/mapmatching/mapmatching.py#L4673-L4703 | ||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tarfile.py | python | TarInfo.__init__ | (self, name="") | Construct a TarInfo object. name is the optional name
of the member. | Construct a TarInfo object. name is the optional name
of the member. | [
"Construct",
"a",
"TarInfo",
"object",
".",
"name",
"is",
"the",
"optional",
"name",
"of",
"the",
"member",
"."
] | def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
of the member.
"""
self.name = name # member name
self.mode = 0o644 # file permissions
self.uid = 0 # user id
self.gid = 0 # group id
self.size = 0 # file size
self.mtime = 0 # modification time
self.chksum = 0 # header checksum
self.type = REGTYPE # member type
self.linkname = "" # link name
self.uname = "" # user name
self.gname = "" # group name
self.devmajor = 0 # device major number
self.devminor = 0 # device minor number
self.offset = 0 # the tar header starts here
self.offset_data = 0 # the file's data starts here
self.sparse = None # sparse member information
self.pax_headers = {} | [
"def",
"__init__",
"(",
"self",
",",
"name",
"=",
"\"\"",
")",
":",
"self",
".",
"name",
"=",
"name",
"# member name",
"self",
".",
"mode",
"=",
"0o644",
"# file permissions",
"self",
".",
"uid",
"=",
"0",
"# user id",
"self",
".",
"gid",
"=",
"0",
"# group id",
"self",
".",
"size",
"=",
"0",
"# file size",
"self",
".",
"mtime",
"=",
"0",
"# modification time",
"self",
".",
"chksum",
"=",
"0",
"# header checksum",
"self",
".",
"type",
"=",
"REGTYPE",
"# member type",
"self",
".",
"linkname",
"=",
"\"\"",
"# link name",
"self",
".",
"uname",
"=",
"\"\"",
"# user name",
"self",
".",
"gname",
"=",
"\"\"",
"# group name",
"self",
".",
"devmajor",
"=",
"0",
"# device major number",
"self",
".",
"devminor",
"=",
"0",
"# device minor number",
"self",
".",
"offset",
"=",
"0",
"# the tar header starts here",
"self",
".",
"offset_data",
"=",
"0",
"# the file's data starts here",
"self",
".",
"sparse",
"=",
"None",
"# sparse member information",
"self",
".",
"pax_headers",
"=",
"{",
"}"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tarfile.py#L739-L761 | ||
google/syzygy | 8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5 | third_party/numpy/files/numpy/polynomial/polynomial.py | python | polydiv | (c1, c2) | Divide one polynomial by another.
Returns the quotient-with-remainder of two polynomials `c1` / `c2`.
The arguments are sequences of coefficients, from lowest order term
to highest, e.g., [1,2,3] represents ``1 + 2*x + 3*x**2``.
Parameters
----------
c1, c2 : array_like
1-d arrays of polynomial coefficients ordered from low to high.
Returns
-------
[quo, rem] : ndarrays
Of coefficient series representing the quotient and remainder.
See Also
--------
polyadd, polysub, polymul, polypow
Examples
--------
>>> import numpy.polynomial as P
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> P.polydiv(c1,c2)
(array([ 3.]), array([-8., -4.]))
>>> P.polydiv(c2,c1)
(array([ 0.33333333]), array([ 2.66666667, 1.33333333])) | Divide one polynomial by another. | [
"Divide",
"one",
"polynomial",
"by",
"another",
"."
] | def polydiv(c1, c2):
"""
Divide one polynomial by another.
Returns the quotient-with-remainder of two polynomials `c1` / `c2`.
The arguments are sequences of coefficients, from lowest order term
to highest, e.g., [1,2,3] represents ``1 + 2*x + 3*x**2``.
Parameters
----------
c1, c2 : array_like
1-d arrays of polynomial coefficients ordered from low to high.
Returns
-------
[quo, rem] : ndarrays
Of coefficient series representing the quotient and remainder.
See Also
--------
polyadd, polysub, polymul, polypow
Examples
--------
>>> import numpy.polynomial as P
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> P.polydiv(c1,c2)
(array([ 3.]), array([-8., -4.]))
>>> P.polydiv(c2,c1)
(array([ 0.33333333]), array([ 2.66666667, 1.33333333]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0 :
raise ZeroDivisionError()
len1 = len(c1)
len2 = len(c2)
if len2 == 1 :
return c1/c2[-1], c1[:1]*0
elif len1 < len2 :
return c1[:1]*0, c1
else :
dlen = len1 - len2
scl = c2[-1]
c2 = c2[:-1]/scl
i = dlen
j = len1 - 1
while i >= 0 :
c1[i:j] -= c2*c1[j]
i -= 1
j -= 1
return c1[j+1:]/scl, pu.trimseq(c1[:j+1]) | [
"def",
"polydiv",
"(",
"c1",
",",
"c2",
")",
":",
"# c1, c2 are trimmed copies",
"[",
"c1",
",",
"c2",
"]",
"=",
"pu",
".",
"as_series",
"(",
"[",
"c1",
",",
"c2",
"]",
")",
"if",
"c2",
"[",
"-",
"1",
"]",
"==",
"0",
":",
"raise",
"ZeroDivisionError",
"(",
")",
"len1",
"=",
"len",
"(",
"c1",
")",
"len2",
"=",
"len",
"(",
"c2",
")",
"if",
"len2",
"==",
"1",
":",
"return",
"c1",
"/",
"c2",
"[",
"-",
"1",
"]",
",",
"c1",
"[",
":",
"1",
"]",
"*",
"0",
"elif",
"len1",
"<",
"len2",
":",
"return",
"c1",
"[",
":",
"1",
"]",
"*",
"0",
",",
"c1",
"else",
":",
"dlen",
"=",
"len1",
"-",
"len2",
"scl",
"=",
"c2",
"[",
"-",
"1",
"]",
"c2",
"=",
"c2",
"[",
":",
"-",
"1",
"]",
"/",
"scl",
"i",
"=",
"dlen",
"j",
"=",
"len1",
"-",
"1",
"while",
"i",
">=",
"0",
":",
"c1",
"[",
"i",
":",
"j",
"]",
"-=",
"c2",
"*",
"c1",
"[",
"j",
"]",
"i",
"-=",
"1",
"j",
"-=",
"1",
"return",
"c1",
"[",
"j",
"+",
"1",
":",
"]",
"/",
"scl",
",",
"pu",
".",
"trimseq",
"(",
"c1",
"[",
":",
"j",
"+",
"1",
"]",
")"
] | https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/polynomial/polynomial.py#L341-L395 | ||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/python/turicreate/toolkits/_mps_utils.py | python | MpsFloatArray.asnumpy | (self) | return _numpy_array_from_ctypes(data_ptr, shape_ptr, dim) | Copy the data from TCMPS into a new numpy ndarray | Copy the data from TCMPS into a new numpy ndarray | [
"Copy",
"the",
"data",
"from",
"TCMPS",
"into",
"a",
"new",
"numpy",
"ndarray"
] | def asnumpy(self):
"""Copy the data from TCMPS into a new numpy ndarray"""
# Create C variables that will serve as out parameters for TCMPS.
data_ptr = _ctypes.POINTER(_ctypes.c_float)() # float* data_ptr
shape_ptr = _ctypes.POINTER(_ctypes.c_size_t)() # size_t* shape_ptr
dim = _ctypes.c_size_t() # size_t dim
# Obtain pointers into memory owned by the C++ object self.handle.
# Note that this may trigger synchronization with another thread
# producing the data.
status_code = self._LIB.TCMPSReadFloatArray(
self.handle,
_ctypes.byref(data_ptr),
_ctypes.byref(shape_ptr),
_ctypes.byref(dim),
)
assert status_code == 0, "Error calling TCMPSReadFloatArray"
return _numpy_array_from_ctypes(data_ptr, shape_ptr, dim) | [
"def",
"asnumpy",
"(",
"self",
")",
":",
"# Create C variables that will serve as out parameters for TCMPS.",
"data_ptr",
"=",
"_ctypes",
".",
"POINTER",
"(",
"_ctypes",
".",
"c_float",
")",
"(",
")",
"# float* data_ptr",
"shape_ptr",
"=",
"_ctypes",
".",
"POINTER",
"(",
"_ctypes",
".",
"c_size_t",
")",
"(",
")",
"# size_t* shape_ptr",
"dim",
"=",
"_ctypes",
".",
"c_size_t",
"(",
")",
"# size_t dim",
"# Obtain pointers into memory owned by the C++ object self.handle.",
"# Note that this may trigger synchronization with another thread",
"# producing the data.",
"status_code",
"=",
"self",
".",
"_LIB",
".",
"TCMPSReadFloatArray",
"(",
"self",
".",
"handle",
",",
"_ctypes",
".",
"byref",
"(",
"data_ptr",
")",
",",
"_ctypes",
".",
"byref",
"(",
"shape_ptr",
")",
",",
"_ctypes",
".",
"byref",
"(",
"dim",
")",
",",
")",
"assert",
"status_code",
"==",
"0",
",",
"\"Error calling TCMPSReadFloatArray\"",
"return",
"_numpy_array_from_ctypes",
"(",
"data_ptr",
",",
"shape_ptr",
",",
"dim",
")"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/toolkits/_mps_utils.py#L276-L295 | |
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/layers/nn.py | python | hard_sigmoid | (x, slope=0.2, offset=0.5, name=None) | return out | ${comment}
Parameters:
x (${x_type}): ${x_comment}
slope (float, optional): ${slope_comment}
offset (float, optional): ${offset_comment}
name (str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`
Returns:
${out_type}: ${out_comment}
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
data = fluid.layers.fill_constant(shape=[3, 2], value=0.5, dtype='float32') # [[0.5, 0.5], [0.5, 0.5], [0.5, 0.5]]
result = fluid.layers.hard_sigmoid(data) # [[0.6, 0.6], [0.6, 0.6], [0.6, 0.6]] | ${comment}
Parameters:
x (${x_type}): ${x_comment}
slope (float, optional): ${slope_comment}
offset (float, optional): ${offset_comment}
name (str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name` | [
"$",
"{",
"comment",
"}",
"Parameters",
":",
"x",
"(",
"$",
"{",
"x_type",
"}",
")",
":",
"$",
"{",
"x_comment",
"}",
"slope",
"(",
"float",
"optional",
")",
":",
"$",
"{",
"slope_comment",
"}",
"offset",
"(",
"float",
"optional",
")",
":",
"$",
"{",
"offset_comment",
"}",
"name",
"(",
"str",
"optional",
")",
":",
"The",
"default",
"value",
"is",
"None",
".",
"Normally",
"there",
"is",
"no",
"need",
"for",
"user",
"to",
"set",
"this",
"property",
".",
"For",
"more",
"information",
"please",
"refer",
"to",
":",
"ref",
":",
"api_guide_Name"
] | def hard_sigmoid(x, slope=0.2, offset=0.5, name=None):
"""
${comment}
Parameters:
x (${x_type}): ${x_comment}
slope (float, optional): ${slope_comment}
offset (float, optional): ${offset_comment}
name (str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`
Returns:
${out_type}: ${out_comment}
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
data = fluid.layers.fill_constant(shape=[3, 2], value=0.5, dtype='float32') # [[0.5, 0.5], [0.5, 0.5], [0.5, 0.5]]
result = fluid.layers.hard_sigmoid(data) # [[0.6, 0.6], [0.6, 0.6], [0.6, 0.6]]
"""
if in_dygraph_mode():
return _C_ops.hard_sigmoid(x, 'slope', slope, 'offset', offset)
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'],
'hard_sigmoid')
helper = LayerHelper('hard_sigmoid', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='hard_sigmoid',
inputs={'X': x},
outputs={'Out': out},
attrs={'slope': slope,
'offset': offset})
return out | [
"def",
"hard_sigmoid",
"(",
"x",
",",
"slope",
"=",
"0.2",
",",
"offset",
"=",
"0.5",
",",
"name",
"=",
"None",
")",
":",
"if",
"in_dygraph_mode",
"(",
")",
":",
"return",
"_C_ops",
".",
"hard_sigmoid",
"(",
"x",
",",
"'slope'",
",",
"slope",
",",
"'offset'",
",",
"offset",
")",
"check_variable_and_dtype",
"(",
"x",
",",
"'x'",
",",
"[",
"'float16'",
",",
"'float32'",
",",
"'float64'",
"]",
",",
"'hard_sigmoid'",
")",
"helper",
"=",
"LayerHelper",
"(",
"'hard_sigmoid'",
",",
"*",
"*",
"locals",
"(",
")",
")",
"out",
"=",
"helper",
".",
"create_variable_for_type_inference",
"(",
"dtype",
"=",
"x",
".",
"dtype",
")",
"helper",
".",
"append_op",
"(",
"type",
"=",
"'hard_sigmoid'",
",",
"inputs",
"=",
"{",
"'X'",
":",
"x",
"}",
",",
"outputs",
"=",
"{",
"'Out'",
":",
"out",
"}",
",",
"attrs",
"=",
"{",
"'slope'",
":",
"slope",
",",
"'offset'",
":",
"offset",
"}",
")",
"return",
"out"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/layers/nn.py#L9729-L9768 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.