text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def check_integrity(sakefile, settings):
"""
Checks the format of the sakefile dictionary
to ensure it conforms to specification
Args:
A dictionary that is the parsed Sakefile (from sake.py)
The setting dictionary (for print functions)
Returns:
True if the Sakefile is conformant
False if not
"""
sprint = settings["sprint"]
error = settings["error"]
sprint("Call to check_integrity issued", level="verbose")
if not sakefile:
error("Sakefile is empty")
return False
# checking for duplicate targets
if len(sakefile.keys()) != len(set(sakefile.keys())):
error("Sakefile contains duplicate targets")
return False
for target in sakefile:
if target == "all":
if not check_target_integrity(target, sakefile["all"], all=True):
error("Failed to accept target 'all'")
return False
continue
if "formula" not in sakefile[target]:
if not check_target_integrity(target, sakefile[target],
meta=True):
errmes = "Failed to accept meta-target '{}'".format(target)
error(errmes)
return False
for atom_target in sakefile[target]:
if atom_target == "help":
continue
if not check_target_integrity(atom_target,
sakefile[target][atom_target],
parent=target):
errmes = "Failed to accept target '{}'\n".format(
atom_target)
error(errmes)
return False
continue
if not check_target_integrity(target, sakefile[target]):
errmes = "Failed to accept target '{}'\n".format(target)
error(errmes)
return False
return True | [
"def",
"check_integrity",
"(",
"sakefile",
",",
"settings",
")",
":",
"sprint",
"=",
"settings",
"[",
"\"sprint\"",
"]",
"error",
"=",
"settings",
"[",
"\"error\"",
"]",
"sprint",
"(",
"\"Call to check_integrity issued\"",
",",
"level",
"=",
"\"verbose\"",
")",
"if",
"not",
"sakefile",
":",
"error",
"(",
"\"Sakefile is empty\"",
")",
"return",
"False",
"# checking for duplicate targets",
"if",
"len",
"(",
"sakefile",
".",
"keys",
"(",
")",
")",
"!=",
"len",
"(",
"set",
"(",
"sakefile",
".",
"keys",
"(",
")",
")",
")",
":",
"error",
"(",
"\"Sakefile contains duplicate targets\"",
")",
"return",
"False",
"for",
"target",
"in",
"sakefile",
":",
"if",
"target",
"==",
"\"all\"",
":",
"if",
"not",
"check_target_integrity",
"(",
"target",
",",
"sakefile",
"[",
"\"all\"",
"]",
",",
"all",
"=",
"True",
")",
":",
"error",
"(",
"\"Failed to accept target 'all'\"",
")",
"return",
"False",
"continue",
"if",
"\"formula\"",
"not",
"in",
"sakefile",
"[",
"target",
"]",
":",
"if",
"not",
"check_target_integrity",
"(",
"target",
",",
"sakefile",
"[",
"target",
"]",
",",
"meta",
"=",
"True",
")",
":",
"errmes",
"=",
"\"Failed to accept meta-target '{}'\"",
".",
"format",
"(",
"target",
")",
"error",
"(",
"errmes",
")",
"return",
"False",
"for",
"atom_target",
"in",
"sakefile",
"[",
"target",
"]",
":",
"if",
"atom_target",
"==",
"\"help\"",
":",
"continue",
"if",
"not",
"check_target_integrity",
"(",
"atom_target",
",",
"sakefile",
"[",
"target",
"]",
"[",
"atom_target",
"]",
",",
"parent",
"=",
"target",
")",
":",
"errmes",
"=",
"\"Failed to accept target '{}'\\n\"",
".",
"format",
"(",
"atom_target",
")",
"error",
"(",
"errmes",
")",
"return",
"False",
"continue",
"if",
"not",
"check_target_integrity",
"(",
"target",
",",
"sakefile",
"[",
"target",
"]",
")",
":",
"errmes",
"=",
"\"Failed to accept target '{}'\\n\"",
".",
"format",
"(",
"target",
")",
"error",
"(",
"errmes",
")",
"return",
"False",
"return",
"True"
] | 39.4 | 16.44 |
def find_binary(self, binary):
"""
Scan and return the first path to a binary that we can find
"""
if os.path.exists(binary):
return binary
# Extract out the filename if we were given a full path
binary_name = os.path.basename(binary)
# Gather $PATH
search_paths = os.environ['PATH'].split(':')
# Extra paths to scan...
default_paths = [
'/usr/bin',
'/bin'
'/usr/local/bin',
'/usr/sbin',
'/sbin'
'/usr/local/sbin',
]
for path in default_paths:
if path not in search_paths:
search_paths.append(path)
for path in search_paths:
if os.path.isdir(path):
filename = os.path.join(path, binary_name)
if os.path.exists(filename):
return filename
return binary | [
"def",
"find_binary",
"(",
"self",
",",
"binary",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"binary",
")",
":",
"return",
"binary",
"# Extract out the filename if we were given a full path",
"binary_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"binary",
")",
"# Gather $PATH",
"search_paths",
"=",
"os",
".",
"environ",
"[",
"'PATH'",
"]",
".",
"split",
"(",
"':'",
")",
"# Extra paths to scan...",
"default_paths",
"=",
"[",
"'/usr/bin'",
",",
"'/bin'",
"'/usr/local/bin'",
",",
"'/usr/sbin'",
",",
"'/sbin'",
"'/usr/local/sbin'",
",",
"]",
"for",
"path",
"in",
"default_paths",
":",
"if",
"path",
"not",
"in",
"search_paths",
":",
"search_paths",
".",
"append",
"(",
"path",
")",
"for",
"path",
"in",
"search_paths",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"binary_name",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"return",
"filename",
"return",
"binary"
] | 26.794118 | 16.852941 |
def get_as_local_path(path, overwrite, progress=0,
httpuser=None, httppassword=None):
"""
Automatically handle local and remote URLs, files and directories
path: Either a local directory, file or remote URL. If a URL is given
it will be fetched. If this is a zip it will be automatically
expanded by default.
overwrite: Whether to overwrite an existing file:
'error': Raise an exception
'backup: Renamed the old file and use the new one
'keep': Keep the old file, don't overwrite or raise an exception
progress: Number of progress dots, default 0 (don't print)
httpuser, httppass: Credentials for HTTP authentication
return: A tuple (type, localpath)
type:
'file': localpath is the path to a local file
'directory': localpath is the path to a local directory
'unzipped': localpath is the path to a local unzipped directory
"""
m = re.match('([A-Za-z]+)://', path)
if m:
# url_open handles multiple protocols so don't bother validating
log.debug('Detected URL protocol: %s', m.group(1))
# URL should use / as the pathsep
localpath = path.split('/')[-1]
if not localpath:
raise FileException(
'Remote path appears to be a directory', path)
if os.path.exists(localpath):
if overwrite == 'error':
raise FileException('File already exists', localpath)
elif overwrite == 'keep':
log.info('Keeping existing %s', localpath)
elif overwrite == 'backup':
rename_backup(localpath)
download(path, localpath, progress, httpuser=httpuser,
httppassword=httppassword)
else:
raise Exception('Invalid overwrite flag: %s' % overwrite)
else:
download(path, localpath, progress, httpuser=httpuser,
httppassword=httppassword)
else:
localpath = path
log.debug("Local path: %s", localpath)
if os.path.isdir(localpath):
return 'directory', localpath
if os.path.exists(localpath):
return 'file', localpath
# Somethings gone very wrong
raise Exception('Local path does not exist: %s' % localpath) | [
"def",
"get_as_local_path",
"(",
"path",
",",
"overwrite",
",",
"progress",
"=",
"0",
",",
"httpuser",
"=",
"None",
",",
"httppassword",
"=",
"None",
")",
":",
"m",
"=",
"re",
".",
"match",
"(",
"'([A-Za-z]+)://'",
",",
"path",
")",
"if",
"m",
":",
"# url_open handles multiple protocols so don't bother validating",
"log",
".",
"debug",
"(",
"'Detected URL protocol: %s'",
",",
"m",
".",
"group",
"(",
"1",
")",
")",
"# URL should use / as the pathsep",
"localpath",
"=",
"path",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"if",
"not",
"localpath",
":",
"raise",
"FileException",
"(",
"'Remote path appears to be a directory'",
",",
"path",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"localpath",
")",
":",
"if",
"overwrite",
"==",
"'error'",
":",
"raise",
"FileException",
"(",
"'File already exists'",
",",
"localpath",
")",
"elif",
"overwrite",
"==",
"'keep'",
":",
"log",
".",
"info",
"(",
"'Keeping existing %s'",
",",
"localpath",
")",
"elif",
"overwrite",
"==",
"'backup'",
":",
"rename_backup",
"(",
"localpath",
")",
"download",
"(",
"path",
",",
"localpath",
",",
"progress",
",",
"httpuser",
"=",
"httpuser",
",",
"httppassword",
"=",
"httppassword",
")",
"else",
":",
"raise",
"Exception",
"(",
"'Invalid overwrite flag: %s'",
"%",
"overwrite",
")",
"else",
":",
"download",
"(",
"path",
",",
"localpath",
",",
"progress",
",",
"httpuser",
"=",
"httpuser",
",",
"httppassword",
"=",
"httppassword",
")",
"else",
":",
"localpath",
"=",
"path",
"log",
".",
"debug",
"(",
"\"Local path: %s\"",
",",
"localpath",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"localpath",
")",
":",
"return",
"'directory'",
",",
"localpath",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"localpath",
")",
":",
"return",
"'file'",
",",
"localpath",
"# Somethings gone very wrong",
"raise",
"Exception",
"(",
"'Local path does not exist: %s'",
"%",
"localpath",
")"
] | 40.321429 | 17.571429 |
def from_srt(cls, file):
"""Reads captions from a file in SubRip format."""
parser = SRTParser().read(file)
return cls(file=file, captions=parser.captions) | [
"def",
"from_srt",
"(",
"cls",
",",
"file",
")",
":",
"parser",
"=",
"SRTParser",
"(",
")",
".",
"read",
"(",
"file",
")",
"return",
"cls",
"(",
"file",
"=",
"file",
",",
"captions",
"=",
"parser",
".",
"captions",
")"
] | 44 | 8 |
def __copyfile(source, destination):
"""Copy data and mode bits ("cp source destination").
The destination may be a directory.
Args:
source (str): Source file (file to copy).
destination (str): Destination file or directory (where to copy).
Returns:
bool: True if the operation is successful, False otherwise.
"""
logger.info("copyfile: %s -> %s" % (source, destination))
try:
__create_destdir(destination)
shutil.copy(source, destination)
return True
except Exception as e:
logger.error(
"copyfile: %s -> %s failed! Error: %s", source, destination, e
)
return False | [
"def",
"__copyfile",
"(",
"source",
",",
"destination",
")",
":",
"logger",
".",
"info",
"(",
"\"copyfile: %s -> %s\"",
"%",
"(",
"source",
",",
"destination",
")",
")",
"try",
":",
"__create_destdir",
"(",
"destination",
")",
"shutil",
".",
"copy",
"(",
"source",
",",
"destination",
")",
"return",
"True",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"copyfile: %s -> %s failed! Error: %s\"",
",",
"source",
",",
"destination",
",",
"e",
")",
"return",
"False"
] | 30.181818 | 20.363636 |
def _geodetic_to_cartesian(cls, lat, lon, alt):
"""Conversion from latitude, longitude and altitude coordinates to
cartesian with respect to an ellipsoid
Args:
lat (float): Latitude in radians
lon (float): Longitude in radians
alt (float): Altitude to sea level in meters
Return:
numpy.array: 3D element (in meters)
"""
C = Earth.r / np.sqrt(1 - (Earth.e * np.sin(lat)) ** 2)
S = Earth.r * (1 - Earth.e ** 2) / np.sqrt(1 - (Earth.e * np.sin(lat)) ** 2)
r_d = (C + alt) * np.cos(lat)
r_k = (S + alt) * np.sin(lat)
norm = np.sqrt(r_d ** 2 + r_k ** 2)
return norm * np.array([
np.cos(lat) * np.cos(lon),
np.cos(lat) * np.sin(lon),
np.sin(lat)
]) | [
"def",
"_geodetic_to_cartesian",
"(",
"cls",
",",
"lat",
",",
"lon",
",",
"alt",
")",
":",
"C",
"=",
"Earth",
".",
"r",
"/",
"np",
".",
"sqrt",
"(",
"1",
"-",
"(",
"Earth",
".",
"e",
"*",
"np",
".",
"sin",
"(",
"lat",
")",
")",
"**",
"2",
")",
"S",
"=",
"Earth",
".",
"r",
"*",
"(",
"1",
"-",
"Earth",
".",
"e",
"**",
"2",
")",
"/",
"np",
".",
"sqrt",
"(",
"1",
"-",
"(",
"Earth",
".",
"e",
"*",
"np",
".",
"sin",
"(",
"lat",
")",
")",
"**",
"2",
")",
"r_d",
"=",
"(",
"C",
"+",
"alt",
")",
"*",
"np",
".",
"cos",
"(",
"lat",
")",
"r_k",
"=",
"(",
"S",
"+",
"alt",
")",
"*",
"np",
".",
"sin",
"(",
"lat",
")",
"norm",
"=",
"np",
".",
"sqrt",
"(",
"r_d",
"**",
"2",
"+",
"r_k",
"**",
"2",
")",
"return",
"norm",
"*",
"np",
".",
"array",
"(",
"[",
"np",
".",
"cos",
"(",
"lat",
")",
"*",
"np",
".",
"cos",
"(",
"lon",
")",
",",
"np",
".",
"cos",
"(",
"lat",
")",
"*",
"np",
".",
"sin",
"(",
"lon",
")",
",",
"np",
".",
"sin",
"(",
"lat",
")",
"]",
")"
] | 34.913043 | 15.304348 |
def _can_compute(self, _id, persistence):
"""
Return true if this feature stored, or is unstored, but can be computed
from stored dependencies
"""
if self.store and self._stored(_id, persistence):
return True
if self.is_root:
return False
return all(
[n._can_compute(_id, persistence) for n in self.dependencies]) | [
"def",
"_can_compute",
"(",
"self",
",",
"_id",
",",
"persistence",
")",
":",
"if",
"self",
".",
"store",
"and",
"self",
".",
"_stored",
"(",
"_id",
",",
"persistence",
")",
":",
"return",
"True",
"if",
"self",
".",
"is_root",
":",
"return",
"False",
"return",
"all",
"(",
"[",
"n",
".",
"_can_compute",
"(",
"_id",
",",
"persistence",
")",
"for",
"n",
"in",
"self",
".",
"dependencies",
"]",
")"
] | 30.384615 | 19.153846 |
def get_plugin(self, method):
"""
Return plugin object if CLI option is activated and method exists
@param method: name of plugin's method we're calling
@type method: string
@returns: list of plugins with `method`
"""
all_plugins = []
for entry_point in pkg_resources.iter_entry_points('yolk.plugins'):
plugin_obj = entry_point.load()
plugin = plugin_obj()
plugin.configure(self.options, None)
if plugin.enabled:
if not hasattr(plugin, method):
self.logger.warn("Error: plugin has no method: %s" % method)
plugin = None
else:
all_plugins.append(plugin)
return all_plugins | [
"def",
"get_plugin",
"(",
"self",
",",
"method",
")",
":",
"all_plugins",
"=",
"[",
"]",
"for",
"entry_point",
"in",
"pkg_resources",
".",
"iter_entry_points",
"(",
"'yolk.plugins'",
")",
":",
"plugin_obj",
"=",
"entry_point",
".",
"load",
"(",
")",
"plugin",
"=",
"plugin_obj",
"(",
")",
"plugin",
".",
"configure",
"(",
"self",
".",
"options",
",",
"None",
")",
"if",
"plugin",
".",
"enabled",
":",
"if",
"not",
"hasattr",
"(",
"plugin",
",",
"method",
")",
":",
"self",
".",
"logger",
".",
"warn",
"(",
"\"Error: plugin has no method: %s\"",
"%",
"method",
")",
"plugin",
"=",
"None",
"else",
":",
"all_plugins",
".",
"append",
"(",
"plugin",
")",
"return",
"all_plugins"
] | 34.772727 | 17.045455 |
def get_constituents(self, index_ticker, date=None, only_list=False):
""" Get a list of all constituents of a given index.
index_ticker - Datastream ticker for index
date - date for which list should be retrieved (if None then
list of present constituents is retrieved)
only_list - request only list of symbols. By default the method
retrieves many extra fields with information (various
mnemonics and codes). This might pose some problems
for large indices like Russel-3000. If only_list=True,
then only the list of symbols and names are retrieved.
"""
if date is not None:
str_date = pd.to_datetime(date).strftime('%m%y')
else:
str_date = ''
# Note: ~XREF is equal to the following large request
# ~REP~=DSCD,EXMNEM,GEOG,GEOGC,IBTKR,INDC,INDG,INDM,INDX,INDXEG,INDXFS,INDXL,
# INDXS,ISIN,ISINID,LOC,MNEM,NAME,SECD,TYPE
fields = '~REP~=NAME' if only_list else '~XREF'
query = 'L' + index_ticker + str_date + fields
raw = self.request(query)
res, metadata = self.parse_record_static(raw)
return res | [
"def",
"get_constituents",
"(",
"self",
",",
"index_ticker",
",",
"date",
"=",
"None",
",",
"only_list",
"=",
"False",
")",
":",
"if",
"date",
"is",
"not",
"None",
":",
"str_date",
"=",
"pd",
".",
"to_datetime",
"(",
"date",
")",
".",
"strftime",
"(",
"'%m%y'",
")",
"else",
":",
"str_date",
"=",
"''",
"# Note: ~XREF is equal to the following large request",
"# ~REP~=DSCD,EXMNEM,GEOG,GEOGC,IBTKR,INDC,INDG,INDM,INDX,INDXEG,INDXFS,INDXL,",
"# INDXS,ISIN,ISINID,LOC,MNEM,NAME,SECD,TYPE",
"fields",
"=",
"'~REP~=NAME'",
"if",
"only_list",
"else",
"'~XREF'",
"query",
"=",
"'L'",
"+",
"index_ticker",
"+",
"str_date",
"+",
"fields",
"raw",
"=",
"self",
".",
"request",
"(",
"query",
")",
"res",
",",
"metadata",
"=",
"self",
".",
"parse_record_static",
"(",
"raw",
")",
"return",
"res"
] | 51.32 | 24.72 |
def _init_client(self, from_archive=False):
"""Init client"""
return KitsuneClient(self.url, self.archive, from_archive) | [
"def",
"_init_client",
"(",
"self",
",",
"from_archive",
"=",
"False",
")",
":",
"return",
"KitsuneClient",
"(",
"self",
".",
"url",
",",
"self",
".",
"archive",
",",
"from_archive",
")"
] | 33.5 | 17.25 |
def ColorWithLightness(self, lightness):
'''Create a new instance based on this one with a new lightness value.
Parameters:
:lightness:
The lightness of the new color [0...1].
Returns:
A grapefruit.Color instance.
>>> Color.NewFromHsl(30, 1, 0.5).ColorWithLightness(0.25)
(0.5, 0.25, 0.0, 1.0)
>>> Color.NewFromHsl(30, 1, 0.5).ColorWithLightness(0.25).hsl
(30, 1, 0.25)
'''
h, s, l = self.__hsl
return Color((h, s, lightness), 'hsl', self.__a, self.__wref) | [
"def",
"ColorWithLightness",
"(",
"self",
",",
"lightness",
")",
":",
"h",
",",
"s",
",",
"l",
"=",
"self",
".",
"__hsl",
"return",
"Color",
"(",
"(",
"h",
",",
"s",
",",
"lightness",
")",
",",
"'hsl'",
",",
"self",
".",
"__a",
",",
"self",
".",
"__wref",
")"
] | 27.944444 | 24.5 |
def paintEvent(self, event):
"""
Overloads the paint event to handle painting pointers for the popup \
mode.
:param event | <QPaintEvent>
"""
# use the base technique for the dialog mode
if self.currentMode() == XPopupWidget.Mode.Dialog:
super(XPopupWidget, self).paintEvent(event)
return
# setup the coloring options
palette = self.palette()
with XPainter(self) as painter:
pen = QPen(palette.color(palette.Window).darker(130))
pen.setWidthF(1.75)
painter.setPen(pen)
painter.setRenderHint(painter.Antialiasing)
painter.setBrush(palette.color(palette.Window))
painter.drawPath(self.borderPath()) | [
"def",
"paintEvent",
"(",
"self",
",",
"event",
")",
":",
"# use the base technique for the dialog mode\r",
"if",
"self",
".",
"currentMode",
"(",
")",
"==",
"XPopupWidget",
".",
"Mode",
".",
"Dialog",
":",
"super",
"(",
"XPopupWidget",
",",
"self",
")",
".",
"paintEvent",
"(",
"event",
")",
"return",
"# setup the coloring options\r",
"palette",
"=",
"self",
".",
"palette",
"(",
")",
"with",
"XPainter",
"(",
"self",
")",
"as",
"painter",
":",
"pen",
"=",
"QPen",
"(",
"palette",
".",
"color",
"(",
"palette",
".",
"Window",
")",
".",
"darker",
"(",
"130",
")",
")",
"pen",
".",
"setWidthF",
"(",
"1.75",
")",
"painter",
".",
"setPen",
"(",
"pen",
")",
"painter",
".",
"setRenderHint",
"(",
"painter",
".",
"Antialiasing",
")",
"painter",
".",
"setBrush",
"(",
"palette",
".",
"color",
"(",
"palette",
".",
"Window",
")",
")",
"painter",
".",
"drawPath",
"(",
"self",
".",
"borderPath",
"(",
")",
")"
] | 36.545455 | 15.181818 |
def _receive_all(socket, num_bytes):
"""Reads `num_bytes` bytes from the specified socket.
:param socket: open socket instance
:param num_bytes: number of bytes to read
:return: received data
"""
buffer = ''
buffer_size = 0
bytes_left = num_bytes
while buffer_size < num_bytes:
data = socket.recv(bytes_left)
delta = len(data)
buffer_size += delta
bytes_left -= delta
buffer += data
return buffer | [
"def",
"_receive_all",
"(",
"socket",
",",
"num_bytes",
")",
":",
"buffer",
"=",
"''",
"buffer_size",
"=",
"0",
"bytes_left",
"=",
"num_bytes",
"while",
"buffer_size",
"<",
"num_bytes",
":",
"data",
"=",
"socket",
".",
"recv",
"(",
"bytes_left",
")",
"delta",
"=",
"len",
"(",
"data",
")",
"buffer_size",
"+=",
"delta",
"bytes_left",
"-=",
"delta",
"buffer",
"+=",
"data",
"return",
"buffer"
] | 24.263158 | 15.421053 |
def MGMT_ACTIVE_SET(self, sAddr='', xCommissioningSessionId=None, listActiveTimestamp=None, listChannelMask=None, xExtendedPanId=None,
sNetworkName=None, sPSKc=None, listSecurityPolicy=None, xChannel=None, sMeshLocalPrefix=None, xMasterKey=None,
xPanId=None, xTmfPort=None, xSteeringData=None, xBorderRouterLocator=None, BogusTLV=None, xDelayTimer=None):
"""send MGMT_ACTIVE_SET command
Returns:
True: successful to send MGMT_ACTIVE_SET
False: fail to send MGMT_ACTIVE_SET
"""
print '%s call MGMT_ACTIVE_SET' % self.port
try:
cmd = WPANCTL_CMD + 'dataset mgmt-set-active'
if self.__sendCommand(WPANCTL_CMD + 'dataset erase')[0] == 'Fail':
return False
if listActiveTimestamp != None:
sActiveTimestamp = str(hex(listActiveTimestamp[0]))
if len(sActiveTimestamp) < 18:
sActiveTimestamp = sActiveTimestamp.lstrip('0x').zfill(16)
setActiveTimeCmd = WPANCTL_CMD + 'setprop Dataset:ActiveTimestamp ' + sActiveTimestamp
if self.__sendCommand(setActiveTimeCmd)[0] == 'Fail':
return False
if xExtendedPanId != None:
xpanid = self.__convertLongToString(xExtendedPanId)
if len(xpanid) < 16:
xpanid = xpanid.zfill(16)
setExtendedPanIdCmd = WPANCTL_CMD + 'setprop Dataset:ExtendedPanId ' + xpanid
if self.__sendCommand(setExtendedPanIdCmd)[0] == 'Fail':
return False
if sNetworkName != None:
setNetworkNameCmd = WPANCTL_CMD + 'setprop Dataset:NetworkName ' + str(sNetworkName)
if self.__sendCommand(setNetworkNameCmd)[0] == 'Fail':
return False
if xChannel != None:
setChannelCmd = WPANCTL_CMD + 'setprop Dataset:Channel ' + str(xChannel)
if self.__sendCommand(setChannelCmd)[0] == 'Fail':
return False
if sMeshLocalPrefix != None:
setMLPrefixCmd = WPANCTL_CMD + 'setprop Dataset:MeshLocalPrefix ' + str(sMeshLocalPrefix)
if self.__sendCommand(setMLPrefixCmd)[0] == 'Fail':
return False
if xMasterKey != None:
key = self.__convertLongToString(xMasterKey)
if len(key) < 32:
key = key.zfill(32)
setMasterKeyCmd = WPANCTL_CMD + 'setprop Dataset:MasterKey ' + key
if self.__sendCommand(setMasterKeyCmd)[0] == 'Fail':
return False
if xPanId != None:
setPanIdCmd = WPANCTL_CMD + 'setprop Dataset:PanId ' + str(xPanId)
if self.__sendCommand(setPanIdCmd)[0] == 'Fail':
return False
if listChannelMask != None:
setChannelMaskCmd = WPANCTL_CMD + 'setprop Dataset:ChannelMaskPage0 ' \
+ '0x' + self.__convertLongToString(self.__convertChannelMask(listChannelMask))
if self.__sendCommand(setChannelMaskCmd)[0] == 'Fail':
return False
if sPSKc != None or listSecurityPolicy != None or \
xCommissioningSessionId != None or xTmfPort != None or xSteeringData != None or xBorderRouterLocator != None or \
BogusTLV != None:
setRawTLVCmd = WPANCTL_CMD + 'setprop Dataset:RawTlvs '
if sPSKc != None:
setRawTLVCmd += '0410'
stretchedPskc = Thread_PBKDF2.get(sPSKc, ModuleHelper.Default_XpanId, ModuleHelper.Default_NwkName)
pskc = hex(stretchedPskc).rstrip('L').lstrip('0x')
if len(pskc) < 32:
pskc = pskc.zfill(32)
setRawTLVCmd += pskc
if listSecurityPolicy != None:
setRawTLVCmd += '0c03'
rotationTime = 0
policyBits = 0
# previous passing way listSecurityPolicy=[True, True, 3600, False, False, True]
if (len(listSecurityPolicy) == 6):
rotationTime = listSecurityPolicy[2]
# the last three reserved bits must be 1
policyBits = 0b00000111
if listSecurityPolicy[0]:
policyBits = policyBits | 0b10000000
if listSecurityPolicy[1]:
policyBits = policyBits | 0b01000000
if listSecurityPolicy[3]:
policyBits = policyBits | 0b00100000
if listSecurityPolicy[4]:
policyBits = policyBits | 0b00010000
if listSecurityPolicy[5]:
policyBits = policyBits | 0b00001000
else:
# new passing way listSecurityPolicy=[3600, 0b11001111]
rotationTime = listSecurityPolicy[0]
policyBits = listSecurityPolicy[1]
policy = str(hex(rotationTime))[2:]
if len(policy) < 4:
policy = policy.zfill(4)
setRawTLVCmd += policy
setRawTLVCmd += str(hex(policyBits))[2:]
if xCommissioningSessionId != None:
setRawTLVCmd += '0b02'
sessionid = str(hex(xCommissioningSessionId))[2:]
if len(sessionid) < 4:
sessionid = sessionid.zfill(4)
setRawTLVCmd += sessionid
if xBorderRouterLocator != None:
setRawTLVCmd += '0902'
locator = str(hex(xBorderRouterLocator))[2:]
if len(locator) < 4:
locator = locator.zfill(4)
setRawTLVCmd += locator
if xSteeringData != None:
steeringData = self.__convertLongToString(xSteeringData)
setRawTLVCmd += '08' + str(len(steeringData)/2).zfill(2)
setRawTLVCmd += steeringData
if BogusTLV != None:
setRawTLVCmd += "8202aa55"
print setRawTLVCmd
print cmd
if self.__sendCommand(setRawTLVCmd)[0] == 'Fail':
return False
return self.__sendCommand(cmd)[0] != 'Fail'
except Exception, e:
ModuleHelper.WriteIntoDebugLogger('MGMT_ACTIVE_SET() Error: ' + str(e)) | [
"def",
"MGMT_ACTIVE_SET",
"(",
"self",
",",
"sAddr",
"=",
"''",
",",
"xCommissioningSessionId",
"=",
"None",
",",
"listActiveTimestamp",
"=",
"None",
",",
"listChannelMask",
"=",
"None",
",",
"xExtendedPanId",
"=",
"None",
",",
"sNetworkName",
"=",
"None",
",",
"sPSKc",
"=",
"None",
",",
"listSecurityPolicy",
"=",
"None",
",",
"xChannel",
"=",
"None",
",",
"sMeshLocalPrefix",
"=",
"None",
",",
"xMasterKey",
"=",
"None",
",",
"xPanId",
"=",
"None",
",",
"xTmfPort",
"=",
"None",
",",
"xSteeringData",
"=",
"None",
",",
"xBorderRouterLocator",
"=",
"None",
",",
"BogusTLV",
"=",
"None",
",",
"xDelayTimer",
"=",
"None",
")",
":",
"print",
"'%s call MGMT_ACTIVE_SET'",
"%",
"self",
".",
"port",
"try",
":",
"cmd",
"=",
"WPANCTL_CMD",
"+",
"'dataset mgmt-set-active'",
"if",
"self",
".",
"__sendCommand",
"(",
"WPANCTL_CMD",
"+",
"'dataset erase'",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"listActiveTimestamp",
"!=",
"None",
":",
"sActiveTimestamp",
"=",
"str",
"(",
"hex",
"(",
"listActiveTimestamp",
"[",
"0",
"]",
")",
")",
"if",
"len",
"(",
"sActiveTimestamp",
")",
"<",
"18",
":",
"sActiveTimestamp",
"=",
"sActiveTimestamp",
".",
"lstrip",
"(",
"'0x'",
")",
".",
"zfill",
"(",
"16",
")",
"setActiveTimeCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:ActiveTimestamp '",
"+",
"sActiveTimestamp",
"if",
"self",
".",
"__sendCommand",
"(",
"setActiveTimeCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"xExtendedPanId",
"!=",
"None",
":",
"xpanid",
"=",
"self",
".",
"__convertLongToString",
"(",
"xExtendedPanId",
")",
"if",
"len",
"(",
"xpanid",
")",
"<",
"16",
":",
"xpanid",
"=",
"xpanid",
".",
"zfill",
"(",
"16",
")",
"setExtendedPanIdCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:ExtendedPanId '",
"+",
"xpanid",
"if",
"self",
".",
"__sendCommand",
"(",
"setExtendedPanIdCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"sNetworkName",
"!=",
"None",
":",
"setNetworkNameCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:NetworkName '",
"+",
"str",
"(",
"sNetworkName",
")",
"if",
"self",
".",
"__sendCommand",
"(",
"setNetworkNameCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"xChannel",
"!=",
"None",
":",
"setChannelCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:Channel '",
"+",
"str",
"(",
"xChannel",
")",
"if",
"self",
".",
"__sendCommand",
"(",
"setChannelCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"sMeshLocalPrefix",
"!=",
"None",
":",
"setMLPrefixCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:MeshLocalPrefix '",
"+",
"str",
"(",
"sMeshLocalPrefix",
")",
"if",
"self",
".",
"__sendCommand",
"(",
"setMLPrefixCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"xMasterKey",
"!=",
"None",
":",
"key",
"=",
"self",
".",
"__convertLongToString",
"(",
"xMasterKey",
")",
"if",
"len",
"(",
"key",
")",
"<",
"32",
":",
"key",
"=",
"key",
".",
"zfill",
"(",
"32",
")",
"setMasterKeyCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:MasterKey '",
"+",
"key",
"if",
"self",
".",
"__sendCommand",
"(",
"setMasterKeyCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"xPanId",
"!=",
"None",
":",
"setPanIdCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:PanId '",
"+",
"str",
"(",
"xPanId",
")",
"if",
"self",
".",
"__sendCommand",
"(",
"setPanIdCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"listChannelMask",
"!=",
"None",
":",
"setChannelMaskCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:ChannelMaskPage0 '",
"+",
"'0x'",
"+",
"self",
".",
"__convertLongToString",
"(",
"self",
".",
"__convertChannelMask",
"(",
"listChannelMask",
")",
")",
"if",
"self",
".",
"__sendCommand",
"(",
"setChannelMaskCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"if",
"sPSKc",
"!=",
"None",
"or",
"listSecurityPolicy",
"!=",
"None",
"or",
"xCommissioningSessionId",
"!=",
"None",
"or",
"xTmfPort",
"!=",
"None",
"or",
"xSteeringData",
"!=",
"None",
"or",
"xBorderRouterLocator",
"!=",
"None",
"or",
"BogusTLV",
"!=",
"None",
":",
"setRawTLVCmd",
"=",
"WPANCTL_CMD",
"+",
"'setprop Dataset:RawTlvs '",
"if",
"sPSKc",
"!=",
"None",
":",
"setRawTLVCmd",
"+=",
"'0410'",
"stretchedPskc",
"=",
"Thread_PBKDF2",
".",
"get",
"(",
"sPSKc",
",",
"ModuleHelper",
".",
"Default_XpanId",
",",
"ModuleHelper",
".",
"Default_NwkName",
")",
"pskc",
"=",
"hex",
"(",
"stretchedPskc",
")",
".",
"rstrip",
"(",
"'L'",
")",
".",
"lstrip",
"(",
"'0x'",
")",
"if",
"len",
"(",
"pskc",
")",
"<",
"32",
":",
"pskc",
"=",
"pskc",
".",
"zfill",
"(",
"32",
")",
"setRawTLVCmd",
"+=",
"pskc",
"if",
"listSecurityPolicy",
"!=",
"None",
":",
"setRawTLVCmd",
"+=",
"'0c03'",
"rotationTime",
"=",
"0",
"policyBits",
"=",
"0",
"# previous passing way listSecurityPolicy=[True, True, 3600, False, False, True]",
"if",
"(",
"len",
"(",
"listSecurityPolicy",
")",
"==",
"6",
")",
":",
"rotationTime",
"=",
"listSecurityPolicy",
"[",
"2",
"]",
"# the last three reserved bits must be 1",
"policyBits",
"=",
"0b00000111",
"if",
"listSecurityPolicy",
"[",
"0",
"]",
":",
"policyBits",
"=",
"policyBits",
"|",
"0b10000000",
"if",
"listSecurityPolicy",
"[",
"1",
"]",
":",
"policyBits",
"=",
"policyBits",
"|",
"0b01000000",
"if",
"listSecurityPolicy",
"[",
"3",
"]",
":",
"policyBits",
"=",
"policyBits",
"|",
"0b00100000",
"if",
"listSecurityPolicy",
"[",
"4",
"]",
":",
"policyBits",
"=",
"policyBits",
"|",
"0b00010000",
"if",
"listSecurityPolicy",
"[",
"5",
"]",
":",
"policyBits",
"=",
"policyBits",
"|",
"0b00001000",
"else",
":",
"# new passing way listSecurityPolicy=[3600, 0b11001111]",
"rotationTime",
"=",
"listSecurityPolicy",
"[",
"0",
"]",
"policyBits",
"=",
"listSecurityPolicy",
"[",
"1",
"]",
"policy",
"=",
"str",
"(",
"hex",
"(",
"rotationTime",
")",
")",
"[",
"2",
":",
"]",
"if",
"len",
"(",
"policy",
")",
"<",
"4",
":",
"policy",
"=",
"policy",
".",
"zfill",
"(",
"4",
")",
"setRawTLVCmd",
"+=",
"policy",
"setRawTLVCmd",
"+=",
"str",
"(",
"hex",
"(",
"policyBits",
")",
")",
"[",
"2",
":",
"]",
"if",
"xCommissioningSessionId",
"!=",
"None",
":",
"setRawTLVCmd",
"+=",
"'0b02'",
"sessionid",
"=",
"str",
"(",
"hex",
"(",
"xCommissioningSessionId",
")",
")",
"[",
"2",
":",
"]",
"if",
"len",
"(",
"sessionid",
")",
"<",
"4",
":",
"sessionid",
"=",
"sessionid",
".",
"zfill",
"(",
"4",
")",
"setRawTLVCmd",
"+=",
"sessionid",
"if",
"xBorderRouterLocator",
"!=",
"None",
":",
"setRawTLVCmd",
"+=",
"'0902'",
"locator",
"=",
"str",
"(",
"hex",
"(",
"xBorderRouterLocator",
")",
")",
"[",
"2",
":",
"]",
"if",
"len",
"(",
"locator",
")",
"<",
"4",
":",
"locator",
"=",
"locator",
".",
"zfill",
"(",
"4",
")",
"setRawTLVCmd",
"+=",
"locator",
"if",
"xSteeringData",
"!=",
"None",
":",
"steeringData",
"=",
"self",
".",
"__convertLongToString",
"(",
"xSteeringData",
")",
"setRawTLVCmd",
"+=",
"'08'",
"+",
"str",
"(",
"len",
"(",
"steeringData",
")",
"/",
"2",
")",
".",
"zfill",
"(",
"2",
")",
"setRawTLVCmd",
"+=",
"steeringData",
"if",
"BogusTLV",
"!=",
"None",
":",
"setRawTLVCmd",
"+=",
"\"8202aa55\"",
"print",
"setRawTLVCmd",
"print",
"cmd",
"if",
"self",
".",
"__sendCommand",
"(",
"setRawTLVCmd",
")",
"[",
"0",
"]",
"==",
"'Fail'",
":",
"return",
"False",
"return",
"self",
".",
"__sendCommand",
"(",
"cmd",
")",
"[",
"0",
"]",
"!=",
"'Fail'",
"except",
"Exception",
",",
"e",
":",
"ModuleHelper",
".",
"WriteIntoDebugLogger",
"(",
"'MGMT_ACTIVE_SET() Error: '",
"+",
"str",
"(",
"e",
")",
")"
] | 41.166667 | 24.410256 |
def GetPlugins(cls):
"""Retrieves the registered plugins.
Yields:
tuple[str, type]: name and class of the plugin.
"""
for plugin_name, plugin_class in iter(cls._plugin_classes.items()):
yield plugin_name, plugin_class | [
"def",
"GetPlugins",
"(",
"cls",
")",
":",
"for",
"plugin_name",
",",
"plugin_class",
"in",
"iter",
"(",
"cls",
".",
"_plugin_classes",
".",
"items",
"(",
")",
")",
":",
"yield",
"plugin_name",
",",
"plugin_class"
] | 29.875 | 17 |
def execute(filename, formatted_name):
"""Renames a file based on the name generated using metadata.
:param str filename: absolute path and filename of original file
:param str formatted_name: absolute path and new filename
"""
if os.path.isfile(formatted_name):
# If the destination exists, skip rename unless overwrite enabled
if not cfg.CONF.overwrite_file_enabled:
LOG.info('File %s already exists not forcefully moving %s',
formatted_name, filename)
return
LOG.info('renaming [%s] to [%s]', filename, formatted_name)
if not cfg.CONF.dryrun:
shutil.move(filename, formatted_name) | [
"def",
"execute",
"(",
"filename",
",",
"formatted_name",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"formatted_name",
")",
":",
"# If the destination exists, skip rename unless overwrite enabled",
"if",
"not",
"cfg",
".",
"CONF",
".",
"overwrite_file_enabled",
":",
"LOG",
".",
"info",
"(",
"'File %s already exists not forcefully moving %s'",
",",
"formatted_name",
",",
"filename",
")",
"return",
"LOG",
".",
"info",
"(",
"'renaming [%s] to [%s]'",
",",
"filename",
",",
"formatted_name",
")",
"if",
"not",
"cfg",
".",
"CONF",
".",
"dryrun",
":",
"shutil",
".",
"move",
"(",
"filename",
",",
"formatted_name",
")"
] | 39.235294 | 18.411765 |
def formfield_for_dbfield(self, db_field, **kwargs):
''' Offer grading choices from the assignment definition as potential form
field values for 'grading'.
When no object is given in the form, the this is a new manual submission
'''
if db_field.name == "grading":
submurl = kwargs['request'].path
try:
# Does not work on new submission action by admin or with a change of URLs. The former is expectable.
submid = [int(s) for s in submurl.split('/') if s.isdigit()][0]
kwargs["queryset"] = Submission.objects.get(
pk=submid).assignment.gradingScheme.gradings
except:
kwargs["queryset"] = Grading.objects.none()
return super(SubmissionAdmin, self).formfield_for_dbfield(db_field, **kwargs) | [
"def",
"formfield_for_dbfield",
"(",
"self",
",",
"db_field",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"db_field",
".",
"name",
"==",
"\"grading\"",
":",
"submurl",
"=",
"kwargs",
"[",
"'request'",
"]",
".",
"path",
"try",
":",
"# Does not work on new submission action by admin or with a change of URLs. The former is expectable.",
"submid",
"=",
"[",
"int",
"(",
"s",
")",
"for",
"s",
"in",
"submurl",
".",
"split",
"(",
"'/'",
")",
"if",
"s",
".",
"isdigit",
"(",
")",
"]",
"[",
"0",
"]",
"kwargs",
"[",
"\"queryset\"",
"]",
"=",
"Submission",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"submid",
")",
".",
"assignment",
".",
"gradingScheme",
".",
"gradings",
"except",
":",
"kwargs",
"[",
"\"queryset\"",
"]",
"=",
"Grading",
".",
"objects",
".",
"none",
"(",
")",
"return",
"super",
"(",
"SubmissionAdmin",
",",
"self",
")",
".",
"formfield_for_dbfield",
"(",
"db_field",
",",
"*",
"*",
"kwargs",
")"
] | 56.6 | 26.866667 |
def append_transition(self, symbol, targetset):
"""Appends a transition"""
if symbol in self.transitions:
return
self.transitions[symbol] = targetset | [
"def",
"append_transition",
"(",
"self",
",",
"symbol",
",",
"targetset",
")",
":",
"if",
"symbol",
"in",
"self",
".",
"transitions",
":",
"return",
"self",
".",
"transitions",
"[",
"symbol",
"]",
"=",
"targetset"
] | 36.2 | 7 |
def get(img, light=False):
"""Get colorscheme."""
if not shutil.which("schemer2"):
logging.error("Schemer2 wasn't found on your system.")
logging.error("Try another backend. (wal --backend)")
sys.exit(1)
cols = [col.decode('UTF-8') for col in gen_colors(img)]
return adjust(cols, light) | [
"def",
"get",
"(",
"img",
",",
"light",
"=",
"False",
")",
":",
"if",
"not",
"shutil",
".",
"which",
"(",
"\"schemer2\"",
")",
":",
"logging",
".",
"error",
"(",
"\"Schemer2 wasn't found on your system.\"",
")",
"logging",
".",
"error",
"(",
"\"Try another backend. (wal --backend)\"",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"cols",
"=",
"[",
"col",
".",
"decode",
"(",
"'UTF-8'",
")",
"for",
"col",
"in",
"gen_colors",
"(",
"img",
")",
"]",
"return",
"adjust",
"(",
"cols",
",",
"light",
")"
] | 35.444444 | 16.777778 |
def output(value, address):
'''
int, str -> TxOut
accepts base58 or bech32 addresses
'''
script = addr.to_output_script(address)
value = utils.i2le_padded(value, 8)
return tb._make_output(value, script) | [
"def",
"output",
"(",
"value",
",",
"address",
")",
":",
"script",
"=",
"addr",
".",
"to_output_script",
"(",
"address",
")",
"value",
"=",
"utils",
".",
"i2le_padded",
"(",
"value",
",",
"8",
")",
"return",
"tb",
".",
"_make_output",
"(",
"value",
",",
"script",
")"
] | 27.875 | 13.125 |
def get_program_type_by_slug(self, slug):
"""
Get a program type by its slug.
Arguments:
slug (str): The slug to identify the program type.
Returns:
dict: A program type object.
"""
return self._load_data(
self.PROGRAM_TYPES_ENDPOINT,
resource_id=slug,
default=None,
) | [
"def",
"get_program_type_by_slug",
"(",
"self",
",",
"slug",
")",
":",
"return",
"self",
".",
"_load_data",
"(",
"self",
".",
"PROGRAM_TYPES_ENDPOINT",
",",
"resource_id",
"=",
"slug",
",",
"default",
"=",
"None",
",",
")"
] | 23.25 | 16 |
def check(self, var):
"""Return True if the variable matches this type, and False otherwise."""
return isinstance(var, tuple) and all(_check_type(t, self._element_type) for t in var) | [
"def",
"check",
"(",
"self",
",",
"var",
")",
":",
"return",
"isinstance",
"(",
"var",
",",
"tuple",
")",
"and",
"all",
"(",
"_check_type",
"(",
"t",
",",
"self",
".",
"_element_type",
")",
"for",
"t",
"in",
"var",
")"
] | 65.333333 | 24.333333 |
def filter_collections(self, model, context=None):
"""
Filter collections
Runs filters on collection properties changing them in place.
:param model: object or dict
:param context: object, dict or None
:return: None
"""
if model is None:
return
for property_name in self.collections:
prop = self.collections[property_name]
collection = self.get(model, property_name)
filtered_value = prop.filter(
value=collection,
model=model,
context=context
)
self.set(model, property_name, filtered_value)
prop.filter_with_schema(
collection,
context if prop.use_context else None
) | [
"def",
"filter_collections",
"(",
"self",
",",
"model",
",",
"context",
"=",
"None",
")",
":",
"if",
"model",
"is",
"None",
":",
"return",
"for",
"property_name",
"in",
"self",
".",
"collections",
":",
"prop",
"=",
"self",
".",
"collections",
"[",
"property_name",
"]",
"collection",
"=",
"self",
".",
"get",
"(",
"model",
",",
"property_name",
")",
"filtered_value",
"=",
"prop",
".",
"filter",
"(",
"value",
"=",
"collection",
",",
"model",
"=",
"model",
",",
"context",
"=",
"context",
")",
"self",
".",
"set",
"(",
"model",
",",
"property_name",
",",
"filtered_value",
")",
"prop",
".",
"filter_with_schema",
"(",
"collection",
",",
"context",
"if",
"prop",
".",
"use_context",
"else",
"None",
")"
] | 31.84 | 14.32 |
def prepare(self):
"""
Preparatory checks for whether this Executor can go ahead
and (try to) build its targets.
"""
for s in self.get_all_sources():
if s.missing():
msg = "Source `%s' not found, needed by target `%s'."
raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0])) | [
"def",
"prepare",
"(",
"self",
")",
":",
"for",
"s",
"in",
"self",
".",
"get_all_sources",
"(",
")",
":",
"if",
"s",
".",
"missing",
"(",
")",
":",
"msg",
"=",
"\"Source `%s' not found, needed by target `%s'.\"",
"raise",
"SCons",
".",
"Errors",
".",
"StopError",
"(",
"msg",
"%",
"(",
"s",
",",
"self",
".",
"batches",
"[",
"0",
"]",
".",
"targets",
"[",
"0",
"]",
")",
")"
] | 40.333333 | 14.777778 |
def token(cls: Type[XHXType], sha_hash: str) -> XHXType:
"""
Return XHX instance from sha_hash
:param sha_hash: SHA256 hash
:return:
"""
xhx = cls()
xhx.sha_hash = sha_hash
return xhx | [
"def",
"token",
"(",
"cls",
":",
"Type",
"[",
"XHXType",
"]",
",",
"sha_hash",
":",
"str",
")",
"->",
"XHXType",
":",
"xhx",
"=",
"cls",
"(",
")",
"xhx",
".",
"sha_hash",
"=",
"sha_hash",
"return",
"xhx"
] | 23.9 | 13.7 |
def user(self, username):
""" Returns the :class:`~plexapi.myplex.MyPlexUser` that matches the email or username specified.
Parameters:
username (str): Username, email or id of the user to return.
"""
for user in self.users():
# Home users don't have email, username etc.
if username.lower() == user.title.lower():
return user
elif (user.username and user.email and user.id and username.lower() in
(user.username.lower(), user.email.lower(), str(user.id))):
return user
raise NotFound('Unable to find user %s' % username) | [
"def",
"user",
"(",
"self",
",",
"username",
")",
":",
"for",
"user",
"in",
"self",
".",
"users",
"(",
")",
":",
"# Home users don't have email, username etc.",
"if",
"username",
".",
"lower",
"(",
")",
"==",
"user",
".",
"title",
".",
"lower",
"(",
")",
":",
"return",
"user",
"elif",
"(",
"user",
".",
"username",
"and",
"user",
".",
"email",
"and",
"user",
".",
"id",
"and",
"username",
".",
"lower",
"(",
")",
"in",
"(",
"user",
".",
"username",
".",
"lower",
"(",
")",
",",
"user",
".",
"email",
".",
"lower",
"(",
")",
",",
"str",
"(",
"user",
".",
"id",
")",
")",
")",
":",
"return",
"user",
"raise",
"NotFound",
"(",
"'Unable to find user %s'",
"%",
"username",
")"
] | 40.875 | 21.75 |
def from_short_lines_text(self, text: str):
"""
Famous example from Hávamál 77
>>> text = "Deyr fé,\\ndeyja frændr,\\ndeyr sjalfr it sama,\\nek veit einn,\\nat aldrei deyr:\\ndómr um dauðan hvern."
>>> lj = Ljoodhhaattr()
>>> lj.from_short_lines_text(text)
>>> [sl.text for sl in lj.short_lines]
['Deyr fé,', 'deyja frændr,', 'deyr sjalfr it sama,', 'ek veit einn,', 'at aldrei deyr:', 'dómr um dauðan hvern.']
>>> [[sl.text for sl in long_line] for long_line in lj.long_lines]
[['Deyr fé,', 'deyja frændr,'], ['deyr sjalfr it sama,'], ['ek veit einn,', 'at aldrei deyr:'], ['dómr um dauðan hvern.']]
:param text:
:return:
"""
Metre.from_short_lines_text(self, text)
lines = [line for line in text.split("\n") if line]
self.short_lines = [ShortLine(lines[0]), ShortLine(lines[1]), LongLine(lines[2]), ShortLine(lines[3]),
ShortLine(lines[4]), LongLine(lines[5])]
self.long_lines = [self.short_lines[0:2], [self.short_lines[2]], self.short_lines[3:5], [self.short_lines[5]]] | [
"def",
"from_short_lines_text",
"(",
"self",
",",
"text",
":",
"str",
")",
":",
"Metre",
".",
"from_short_lines_text",
"(",
"self",
",",
"text",
")",
"lines",
"=",
"[",
"line",
"for",
"line",
"in",
"text",
".",
"split",
"(",
"\"\\n\"",
")",
"if",
"line",
"]",
"self",
".",
"short_lines",
"=",
"[",
"ShortLine",
"(",
"lines",
"[",
"0",
"]",
")",
",",
"ShortLine",
"(",
"lines",
"[",
"1",
"]",
")",
",",
"LongLine",
"(",
"lines",
"[",
"2",
"]",
")",
",",
"ShortLine",
"(",
"lines",
"[",
"3",
"]",
")",
",",
"ShortLine",
"(",
"lines",
"[",
"4",
"]",
")",
",",
"LongLine",
"(",
"lines",
"[",
"5",
"]",
")",
"]",
"self",
".",
"long_lines",
"=",
"[",
"self",
".",
"short_lines",
"[",
"0",
":",
"2",
"]",
",",
"[",
"self",
".",
"short_lines",
"[",
"2",
"]",
"]",
",",
"self",
".",
"short_lines",
"[",
"3",
":",
"5",
"]",
",",
"[",
"self",
".",
"short_lines",
"[",
"5",
"]",
"]",
"]"
] | 58.526316 | 31.578947 |
def get_bounding_boxes(df_shapes, shape_i_columns):
'''
Return a `pandas.DataFrame` indexed by `shape_i_columns` (i.e., each row
corresponds to a single shape/polygon), containing the following columns:
- `width`: The width of the widest part of the shape.
- `height`: The height of the tallest part of the shape.
'''
xy_groups = df_shapes.groupby(shape_i_columns)[['x', 'y']]
xy_min = xy_groups.agg('min')
xy_max = xy_groups.agg('max')
shapes = (xy_max - xy_min).rename(columns={'x': 'width', 'y': 'height'})
return xy_min.join(shapes) | [
"def",
"get_bounding_boxes",
"(",
"df_shapes",
",",
"shape_i_columns",
")",
":",
"xy_groups",
"=",
"df_shapes",
".",
"groupby",
"(",
"shape_i_columns",
")",
"[",
"[",
"'x'",
",",
"'y'",
"]",
"]",
"xy_min",
"=",
"xy_groups",
".",
"agg",
"(",
"'min'",
")",
"xy_max",
"=",
"xy_groups",
".",
"agg",
"(",
"'max'",
")",
"shapes",
"=",
"(",
"xy_max",
"-",
"xy_min",
")",
".",
"rename",
"(",
"columns",
"=",
"{",
"'x'",
":",
"'width'",
",",
"'y'",
":",
"'height'",
"}",
")",
"return",
"xy_min",
".",
"join",
"(",
"shapes",
")"
] | 40.785714 | 25.071429 |
def reqMktDepth(self, id, contract, numRows, mktDepthOptions):
"""reqMktDepth(EClient self, TickerId id, Contract contract, int numRows, TagValueListSPtr const & mktDepthOptions)"""
return _swigibpy.EClient_reqMktDepth(self, id, contract, numRows, mktDepthOptions) | [
"def",
"reqMktDepth",
"(",
"self",
",",
"id",
",",
"contract",
",",
"numRows",
",",
"mktDepthOptions",
")",
":",
"return",
"_swigibpy",
".",
"EClient_reqMktDepth",
"(",
"self",
",",
"id",
",",
"contract",
",",
"numRows",
",",
"mktDepthOptions",
")"
] | 92.666667 | 24 |
def convert(self, name):
"translate gui2py attribute name from pythoncard legacy code"
new_name = PYTHONCARD_PROPERTY_MAP.get(name)
if new_name:
print "WARNING: property %s should be %s (%s)" % (name, new_name, self.obj.name)
return new_name
else:
return name | [
"def",
"convert",
"(",
"self",
",",
"name",
")",
":",
"new_name",
"=",
"PYTHONCARD_PROPERTY_MAP",
".",
"get",
"(",
"name",
")",
"if",
"new_name",
":",
"print",
"\"WARNING: property %s should be %s (%s)\"",
"%",
"(",
"name",
",",
"new_name",
",",
"self",
".",
"obj",
".",
"name",
")",
"return",
"new_name",
"else",
":",
"return",
"name"
] | 40 | 23.25 |
def reward_wall(self):
"""
Add a wall collision reward
"""
if not 'wall' in self.mode:
return
mode = self.mode['wall']
if mode and mode and self.__test_cond(mode):
self.logger.debug("Wall {x}/{y}'".format(x=self.bumped_x, y=self.bumped_y))
self.player.stats['reward'] += mode['reward']
self.player.game_over = self.player.game_over or mode['terminal'] | [
"def",
"reward_wall",
"(",
"self",
")",
":",
"if",
"not",
"'wall'",
"in",
"self",
".",
"mode",
":",
"return",
"mode",
"=",
"self",
".",
"mode",
"[",
"'wall'",
"]",
"if",
"mode",
"and",
"mode",
"and",
"self",
".",
"__test_cond",
"(",
"mode",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Wall {x}/{y}'\"",
".",
"format",
"(",
"x",
"=",
"self",
".",
"bumped_x",
",",
"y",
"=",
"self",
".",
"bumped_y",
")",
")",
"self",
".",
"player",
".",
"stats",
"[",
"'reward'",
"]",
"+=",
"mode",
"[",
"'reward'",
"]",
"self",
".",
"player",
".",
"game_over",
"=",
"self",
".",
"player",
".",
"game_over",
"or",
"mode",
"[",
"'terminal'",
"]"
] | 36.416667 | 17.583333 |
def _start_print(self):
"""Print the start message with or without newline depending on the
self._start_no_nl variable.
"""
if self._start_no_nl:
sys.stdout.write(self._start_msg)
sys.stdout.flush()
else:
print(self._start_msg) | [
"def",
"_start_print",
"(",
"self",
")",
":",
"if",
"self",
".",
"_start_no_nl",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"_start_msg",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"else",
":",
"print",
"(",
"self",
".",
"_start_msg",
")"
] | 32.777778 | 9 |
def delete(self, *clauses, **filters):
"""Delete rows from the table.
Keyword arguments can be used to add column-based filters. The filter
criterion will always be equality:
::
table.delete(place='Berlin')
If no arguments are given, all records are deleted.
"""
if not self.exists:
return False
clause = self._args_to_clause(filters, clauses=clauses)
stmt = self.table.delete(whereclause=clause)
rp = self.db.executable.execute(stmt)
return rp.rowcount > 0 | [
"def",
"delete",
"(",
"self",
",",
"*",
"clauses",
",",
"*",
"*",
"filters",
")",
":",
"if",
"not",
"self",
".",
"exists",
":",
"return",
"False",
"clause",
"=",
"self",
".",
"_args_to_clause",
"(",
"filters",
",",
"clauses",
"=",
"clauses",
")",
"stmt",
"=",
"self",
".",
"table",
".",
"delete",
"(",
"whereclause",
"=",
"clause",
")",
"rp",
"=",
"self",
".",
"db",
".",
"executable",
".",
"execute",
"(",
"stmt",
")",
"return",
"rp",
".",
"rowcount",
">",
"0"
] | 32.705882 | 17 |
def save_composition(self, composition_form, *args, **kwargs):
"""Pass through to provider CompositionAdminSession.update_composition"""
# Implemented from kitosid template for -
# osid.resource.ResourceAdminSession.update_resource
if composition_form.is_for_update():
return self.update_composition(composition_form, *args, **kwargs)
else:
return self.create_composition(composition_form, *args, **kwargs) | [
"def",
"save_composition",
"(",
"self",
",",
"composition_form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Implemented from kitosid template for -",
"# osid.resource.ResourceAdminSession.update_resource",
"if",
"composition_form",
".",
"is_for_update",
"(",
")",
":",
"return",
"self",
".",
"update_composition",
"(",
"composition_form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"self",
".",
"create_composition",
"(",
"composition_form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 57.875 | 19.5 |
def _interpret_response(self, response, payload, expected_status):
# type: (Response, dict, Container[int]) -> dict
"""
Interprets the HTTP response from the node.
:param response:
The response object received from
:py:meth:`_send_http_request`.
:param payload:
The request payload that was sent (used for debugging).
:param expected_status:
The response should match one of these status codes to be
considered valid.
"""
raw_content = response.text
if not raw_content:
raise with_context(
exc=BadApiResponse(
'Empty {status} response from node.'.format(
status=response.status_code,
),
),
context={
'request': payload,
},
)
try:
decoded = json.loads(raw_content) # type: dict
# :bc: py2k doesn't have JSONDecodeError
except ValueError:
raise with_context(
exc=BadApiResponse(
'Non-JSON {status} response from node: '
'{raw_content}'.format(
status=response.status_code,
raw_content=raw_content,
)
),
context={
'request': payload,
'raw_response': raw_content,
},
)
if not isinstance(decoded, dict):
raise with_context(
exc=BadApiResponse(
'Malformed {status} response from node: {decoded!r}'.format(
status=response.status_code,
decoded=decoded,
),
),
context={
'request': payload,
'response': decoded,
},
)
if response.status_code in expected_status:
return decoded
error = None
try:
if response.status_code == codes['bad_request']:
error = decoded['error']
elif response.status_code == codes['internal_server_error']:
error = decoded['exception']
except KeyError:
pass
raise with_context(
exc=BadApiResponse(
'{status} response from node: {error}'.format(
error=error or decoded,
status=response.status_code,
),
),
context={
'request': payload,
'response': decoded,
},
) | [
"def",
"_interpret_response",
"(",
"self",
",",
"response",
",",
"payload",
",",
"expected_status",
")",
":",
"# type: (Response, dict, Container[int]) -> dict",
"raw_content",
"=",
"response",
".",
"text",
"if",
"not",
"raw_content",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'Empty {status} response from node.'",
".",
"format",
"(",
"status",
"=",
"response",
".",
"status_code",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"}",
",",
")",
"try",
":",
"decoded",
"=",
"json",
".",
"loads",
"(",
"raw_content",
")",
"# type: dict",
"# :bc: py2k doesn't have JSONDecodeError",
"except",
"ValueError",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'Non-JSON {status} response from node: '",
"'{raw_content}'",
".",
"format",
"(",
"status",
"=",
"response",
".",
"status_code",
",",
"raw_content",
"=",
"raw_content",
",",
")",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"'raw_response'",
":",
"raw_content",
",",
"}",
",",
")",
"if",
"not",
"isinstance",
"(",
"decoded",
",",
"dict",
")",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'Malformed {status} response from node: {decoded!r}'",
".",
"format",
"(",
"status",
"=",
"response",
".",
"status_code",
",",
"decoded",
"=",
"decoded",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"'response'",
":",
"decoded",
",",
"}",
",",
")",
"if",
"response",
".",
"status_code",
"in",
"expected_status",
":",
"return",
"decoded",
"error",
"=",
"None",
"try",
":",
"if",
"response",
".",
"status_code",
"==",
"codes",
"[",
"'bad_request'",
"]",
":",
"error",
"=",
"decoded",
"[",
"'error'",
"]",
"elif",
"response",
".",
"status_code",
"==",
"codes",
"[",
"'internal_server_error'",
"]",
":",
"error",
"=",
"decoded",
"[",
"'exception'",
"]",
"except",
"KeyError",
":",
"pass",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'{status} response from node: {error}'",
".",
"format",
"(",
"error",
"=",
"error",
"or",
"decoded",
",",
"status",
"=",
"response",
".",
"status_code",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"'response'",
":",
"decoded",
",",
"}",
",",
")"
] | 30.078652 | 17.876404 |
def set_title(self, index, title):
"""Sets the title of a container page.
Parameters
----------
index : int
Index of the container page
title : unicode
New title
"""
# JSON dictionaries have string keys, so we convert index to a string
index = unicode_type(int(index))
self._titles[index] = title
self.send_state('_titles') | [
"def",
"set_title",
"(",
"self",
",",
"index",
",",
"title",
")",
":",
"# JSON dictionaries have string keys, so we convert index to a string",
"index",
"=",
"unicode_type",
"(",
"int",
"(",
"index",
")",
")",
"self",
".",
"_titles",
"[",
"index",
"]",
"=",
"title",
"self",
".",
"send_state",
"(",
"'_titles'",
")"
] | 29.642857 | 14 |
def open_file(self, title="Open File", initialDir="~", fileTypes="*|All Files", rememberAs=None, **kwargs):
"""
Show an Open File dialog
Usage: C{dialog.open_file(title="Open File", initialDir="~", fileTypes="*|All Files", rememberAs=None, **kwargs)}
@param title: window title for the dialog
@param initialDir: starting directory for the file dialog
@param fileTypes: file type filter expression
@param rememberAs: gives an ID to this file dialog, allowing it to open at the last used path next time
@return: a tuple containing the exit code and file path
@rtype: C{DialogData(int, str)}
"""
if rememberAs is not None:
return self._run_kdialog(title, ["--getopenfilename", initialDir, fileTypes, ":" + rememberAs], kwargs)
else:
return self._run_kdialog(title, ["--getopenfilename", initialDir, fileTypes], kwargs) | [
"def",
"open_file",
"(",
"self",
",",
"title",
"=",
"\"Open File\"",
",",
"initialDir",
"=",
"\"~\"",
",",
"fileTypes",
"=",
"\"*|All Files\"",
",",
"rememberAs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"rememberAs",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_run_kdialog",
"(",
"title",
",",
"[",
"\"--getopenfilename\"",
",",
"initialDir",
",",
"fileTypes",
",",
"\":\"",
"+",
"rememberAs",
"]",
",",
"kwargs",
")",
"else",
":",
"return",
"self",
".",
"_run_kdialog",
"(",
"title",
",",
"[",
"\"--getopenfilename\"",
",",
"initialDir",
",",
"fileTypes",
"]",
",",
"kwargs",
")"
] | 55.117647 | 31 |
def rotate_crop(centerij, sz, angle, img=None, mode='constant', **kwargs):
"""
rotate and crop
if no img, then return crop function
:param centerij:
:param sz:
:param angle:
:param img: [h,w,d]
:param mode: padding option
:return: cropped image or function
"""
# crop enough size ( 2 * sqrt(sum(sz^2) )
# rotate
from skimage import transform
sz = np.array(sz)
crop_half = int(np.ceil(np.sqrt(np.square(sz).sum())))
if centerij[0] >= crop_half or centerij[1] >= crop_half:
raise NotImplementedError
slicei = slice(centerij[0] - crop_half, centerij[0] + crop_half)
slicej = slice(centerij[1] - crop_half, centerij[1] + crop_half)
# slicei = (centerij[0] - crop_half, centerij[0] + crop_half)
# slicej = (centerij[1] - crop_half, centerij[1] + crop_half)
# def _pad_if_need(im):
# imshape = im.shape
# pad_need = slicei[0] < 0 or slicej[0] < 0 or slice
# padwidth = [(slicei[0], np.maximum(0, slicei[1] - imshape[0])),
# (slicej[0], np.maximum(0, slicej[1] - imshape[1]))]
def _rotate_cropcenter(im):
enoughcrop = im[slicei, slicej]
rotated = transform.rotate(enoughcrop, angle, resize=False, preserve_range=True, mode=mode, **kwargs)
return cropcenter(sz, rotated)
if img is not None:
return _rotate_cropcenter(img)
return _rotate_cropcenter | [
"def",
"rotate_crop",
"(",
"centerij",
",",
"sz",
",",
"angle",
",",
"img",
"=",
"None",
",",
"mode",
"=",
"'constant'",
",",
"*",
"*",
"kwargs",
")",
":",
"# crop enough size ( 2 * sqrt(sum(sz^2) )",
"# rotate",
"from",
"skimage",
"import",
"transform",
"sz",
"=",
"np",
".",
"array",
"(",
"sz",
")",
"crop_half",
"=",
"int",
"(",
"np",
".",
"ceil",
"(",
"np",
".",
"sqrt",
"(",
"np",
".",
"square",
"(",
"sz",
")",
".",
"sum",
"(",
")",
")",
")",
")",
"if",
"centerij",
"[",
"0",
"]",
">=",
"crop_half",
"or",
"centerij",
"[",
"1",
"]",
">=",
"crop_half",
":",
"raise",
"NotImplementedError",
"slicei",
"=",
"slice",
"(",
"centerij",
"[",
"0",
"]",
"-",
"crop_half",
",",
"centerij",
"[",
"0",
"]",
"+",
"crop_half",
")",
"slicej",
"=",
"slice",
"(",
"centerij",
"[",
"1",
"]",
"-",
"crop_half",
",",
"centerij",
"[",
"1",
"]",
"+",
"crop_half",
")",
"# slicei = (centerij[0] - crop_half, centerij[0] + crop_half)",
"# slicej = (centerij[1] - crop_half, centerij[1] + crop_half)",
"# def _pad_if_need(im):",
"# imshape = im.shape",
"# pad_need = slicei[0] < 0 or slicej[0] < 0 or slice",
"# padwidth = [(slicei[0], np.maximum(0, slicei[1] - imshape[0])),",
"# (slicej[0], np.maximum(0, slicej[1] - imshape[1]))]",
"def",
"_rotate_cropcenter",
"(",
"im",
")",
":",
"enoughcrop",
"=",
"im",
"[",
"slicei",
",",
"slicej",
"]",
"rotated",
"=",
"transform",
".",
"rotate",
"(",
"enoughcrop",
",",
"angle",
",",
"resize",
"=",
"False",
",",
"preserve_range",
"=",
"True",
",",
"mode",
"=",
"mode",
",",
"*",
"*",
"kwargs",
")",
"return",
"cropcenter",
"(",
"sz",
",",
"rotated",
")",
"if",
"img",
"is",
"not",
"None",
":",
"return",
"_rotate_cropcenter",
"(",
"img",
")",
"return",
"_rotate_cropcenter"
] | 33.804878 | 21.073171 |
def add_linguistic_processor(self, layer, my_lp):
"""
Adds a linguistic processor to the header
@type my_lp: L{Clp}
@param my_lp: linguistic processor object
@type layer: string
@param layer: the layer to which the processor is related to
"""
if self.header is None:
self.header = CHeader(type=self.type)
self.root.insert(0,self.header.get_node())
self.header.add_linguistic_processor(layer,my_lp) | [
"def",
"add_linguistic_processor",
"(",
"self",
",",
"layer",
",",
"my_lp",
")",
":",
"if",
"self",
".",
"header",
"is",
"None",
":",
"self",
".",
"header",
"=",
"CHeader",
"(",
"type",
"=",
"self",
".",
"type",
")",
"self",
".",
"root",
".",
"insert",
"(",
"0",
",",
"self",
".",
"header",
".",
"get_node",
"(",
")",
")",
"self",
".",
"header",
".",
"add_linguistic_processor",
"(",
"layer",
",",
"my_lp",
")"
] | 40.166667 | 10.833333 |
def register_model(self, key, *models, **kwargs):
"""
Register a cache_group with this manager.
Use this method to register more simple
groups where all models share the same parameters.
Any arguments are treated as models that you would like
to register.
Any keyword arguments received are passed to the
register method when registering each model.
:param key: The key to register this group as. \
Raises an exception if the key is already registered.
"""
cache_group = CacheGroup(key)
for model in models:
cache_group.register(model, **kwargs)
self.register_cache(cache_group) | [
"def",
"register_model",
"(",
"self",
",",
"key",
",",
"*",
"models",
",",
"*",
"*",
"kwargs",
")",
":",
"cache_group",
"=",
"CacheGroup",
"(",
"key",
")",
"for",
"model",
"in",
"models",
":",
"cache_group",
".",
"register",
"(",
"model",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"register_cache",
"(",
"cache_group",
")"
] | 31.227273 | 18.863636 |
def main(argv=sys.argv[1:]):
"""Parses the command line comments."""
usage = 'usage: %prog [options] FILE\n\n' + __doc__
parser = OptionParser(usage)
# options
parser.add_option("-f", "--force",
action='store_true', default=False,
help="make changes even if they cannot undone before saving the new file")
parser.add_option("-m", "--min_level",
default='NONE',
help="minimum level of logging statements to modify [default: no minimum]")
parser.add_option("-M", "--max_level",
default='NONE',
help="maximum level of logging statements to modify [default: no maximum]")
parser.add_option("-o", "--output-file",
default=None,
help="where to output the result [default: overwrite the input file]")
parser.add_option("-r", "--restore",
action='store_true', default=False,
help="restore logging statements previously commented out and replaced with pass statements")
parser.add_option("-v", "--verbose",
action='store_true', default=False,
help="print informational messages about changes made")
(options, args) = parser.parse_args(argv)
if len(args) != 1:
parser.error("expected 1 argument but got %d arguments: %s" % (len(args), ' '.join(args)))
input_fn = args[0]
if not options.output_file:
options.output_file = input_fn
# validate min/max level
LEVEL_CHOICES = LEVELS + ['NONE']
min_level_value = 0 if options.min_level == 'NONE' else get_level_value(options.min_level)
if options.min_level is None:
parser.error("min level must be an integer or one of these values: %s" % ', '.join(LEVEL_CHOICES))
max_level_value = sys.maxint if options.max_level == 'NONE' else get_level_value(options.max_level)
if options.max_level is None:
parser.error("max level must be an integer or one of these values: %s" % ', '.join(LEVEL_CHOICES))
if options.verbose:
logging.getLogger().setLevel(logging.INFO)
try:
return modify_logging(input_fn, options.output_file,
min_level_value, max_level_value,
options.restore, options.force)
except IOError as e:
logging.error(str(e))
return -1 | [
"def",
"main",
"(",
"argv",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
")",
":",
"usage",
"=",
"'usage: %prog [options] FILE\\n\\n'",
"+",
"__doc__",
"parser",
"=",
"OptionParser",
"(",
"usage",
")",
"# options",
"parser",
".",
"add_option",
"(",
"\"-f\"",
",",
"\"--force\"",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"\"make changes even if they cannot undone before saving the new file\"",
")",
"parser",
".",
"add_option",
"(",
"\"-m\"",
",",
"\"--min_level\"",
",",
"default",
"=",
"'NONE'",
",",
"help",
"=",
"\"minimum level of logging statements to modify [default: no minimum]\"",
")",
"parser",
".",
"add_option",
"(",
"\"-M\"",
",",
"\"--max_level\"",
",",
"default",
"=",
"'NONE'",
",",
"help",
"=",
"\"maximum level of logging statements to modify [default: no maximum]\"",
")",
"parser",
".",
"add_option",
"(",
"\"-o\"",
",",
"\"--output-file\"",
",",
"default",
"=",
"None",
",",
"help",
"=",
"\"where to output the result [default: overwrite the input file]\"",
")",
"parser",
".",
"add_option",
"(",
"\"-r\"",
",",
"\"--restore\"",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"\"restore logging statements previously commented out and replaced with pass statements\"",
")",
"parser",
".",
"add_option",
"(",
"\"-v\"",
",",
"\"--verbose\"",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"\"print informational messages about changes made\"",
")",
"(",
"options",
",",
"args",
")",
"=",
"parser",
".",
"parse_args",
"(",
"argv",
")",
"if",
"len",
"(",
"args",
")",
"!=",
"1",
":",
"parser",
".",
"error",
"(",
"\"expected 1 argument but got %d arguments: %s\"",
"%",
"(",
"len",
"(",
"args",
")",
",",
"' '",
".",
"join",
"(",
"args",
")",
")",
")",
"input_fn",
"=",
"args",
"[",
"0",
"]",
"if",
"not",
"options",
".",
"output_file",
":",
"options",
".",
"output_file",
"=",
"input_fn",
"# validate min/max level",
"LEVEL_CHOICES",
"=",
"LEVELS",
"+",
"[",
"'NONE'",
"]",
"min_level_value",
"=",
"0",
"if",
"options",
".",
"min_level",
"==",
"'NONE'",
"else",
"get_level_value",
"(",
"options",
".",
"min_level",
")",
"if",
"options",
".",
"min_level",
"is",
"None",
":",
"parser",
".",
"error",
"(",
"\"min level must be an integer or one of these values: %s\"",
"%",
"', '",
".",
"join",
"(",
"LEVEL_CHOICES",
")",
")",
"max_level_value",
"=",
"sys",
".",
"maxint",
"if",
"options",
".",
"max_level",
"==",
"'NONE'",
"else",
"get_level_value",
"(",
"options",
".",
"max_level",
")",
"if",
"options",
".",
"max_level",
"is",
"None",
":",
"parser",
".",
"error",
"(",
"\"max level must be an integer or one of these values: %s\"",
"%",
"', '",
".",
"join",
"(",
"LEVEL_CHOICES",
")",
")",
"if",
"options",
".",
"verbose",
":",
"logging",
".",
"getLogger",
"(",
")",
".",
"setLevel",
"(",
"logging",
".",
"INFO",
")",
"try",
":",
"return",
"modify_logging",
"(",
"input_fn",
",",
"options",
".",
"output_file",
",",
"min_level_value",
",",
"max_level_value",
",",
"options",
".",
"restore",
",",
"options",
".",
"force",
")",
"except",
"IOError",
"as",
"e",
":",
"logging",
".",
"error",
"(",
"str",
"(",
"e",
")",
")",
"return",
"-",
"1"
] | 47.098039 | 24.098039 |
def _add_mixing_variable_names_to_individual_vars(self):
"""
Ensure that the model objects mixing variables are added to its list of
individual variables.
"""
assert isinstance(self.ind_var_names, list)
# Note that if one estimates a mixed logit model, then the mixing
# variables will be added to individual vars. And if one estimates
# the model again (perhaps from different starting values), then
# an error will be raised when creating the coefs series because we
# will have added the mixing variables twice. The condition below
# should prevent this error.
already_included = any(["Sigma " in x for x in self.ind_var_names])
if self.mixing_vars is not None and not already_included:
new_ind_var_names = ["Sigma " + x for x in self.mixing_vars]
self.ind_var_names += new_ind_var_names
return None | [
"def",
"_add_mixing_variable_names_to_individual_vars",
"(",
"self",
")",
":",
"assert",
"isinstance",
"(",
"self",
".",
"ind_var_names",
",",
"list",
")",
"# Note that if one estimates a mixed logit model, then the mixing",
"# variables will be added to individual vars. And if one estimates",
"# the model again (perhaps from different starting values), then",
"# an error will be raised when creating the coefs series because we",
"# will have added the mixing variables twice. The condition below",
"# should prevent this error.",
"already_included",
"=",
"any",
"(",
"[",
"\"Sigma \"",
"in",
"x",
"for",
"x",
"in",
"self",
".",
"ind_var_names",
"]",
")",
"if",
"self",
".",
"mixing_vars",
"is",
"not",
"None",
"and",
"not",
"already_included",
":",
"new_ind_var_names",
"=",
"[",
"\"Sigma \"",
"+",
"x",
"for",
"x",
"in",
"self",
".",
"mixing_vars",
"]",
"self",
".",
"ind_var_names",
"+=",
"new_ind_var_names",
"return",
"None"
] | 51.222222 | 22.888889 |
def delete_file(self, sass_filename, sass_fileurl):
"""
Delete a *.css file, but only if it has been generated through a SASS/SCSS file.
"""
if self.use_static_root:
destpath = os.path.join(self.static_root, os.path.splitext(sass_fileurl)[0] + '.css')
else:
destpath = os.path.splitext(sass_filename)[0] + '.css'
if os.path.isfile(destpath):
os.remove(destpath)
self.processed_files.append(sass_filename)
if self.verbosity > 1:
self.stdout.write("Deleted '{0}'\n".format(destpath)) | [
"def",
"delete_file",
"(",
"self",
",",
"sass_filename",
",",
"sass_fileurl",
")",
":",
"if",
"self",
".",
"use_static_root",
":",
"destpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"static_root",
",",
"os",
".",
"path",
".",
"splitext",
"(",
"sass_fileurl",
")",
"[",
"0",
"]",
"+",
"'.css'",
")",
"else",
":",
"destpath",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"sass_filename",
")",
"[",
"0",
"]",
"+",
"'.css'",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"destpath",
")",
":",
"os",
".",
"remove",
"(",
"destpath",
")",
"self",
".",
"processed_files",
".",
"append",
"(",
"sass_filename",
")",
"if",
"self",
".",
"verbosity",
">",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"Deleted '{0}'\\n\"",
".",
"format",
"(",
"destpath",
")",
")"
] | 45.615385 | 18.384615 |
def read_flash(self, addr=0xFF, page=0x00):
"""Read back a flash page from the Crazyflie and return it"""
buff = bytearray()
page_size = self.targets[addr].page_size
for i in range(0, int(math.ceil(page_size / 25.0))):
pk = None
retry_counter = 5
while ((not pk or pk.header != 0xFF or
struct.unpack('<BB', pk.data[0:2]) != (addr, 0x1C)) and
retry_counter >= 0):
pk = CRTPPacket()
pk.set_header(0xFF, 0xFF)
pk.data = struct.pack('<BBHH', addr, 0x1C, page, (i * 25))
self.link.send_packet(pk)
pk = self.link.receive_packet(1)
retry_counter -= 1
if (retry_counter < 0):
return None
else:
buff += pk.data[6:]
# For some reason we get one byte extra here...
return buff[0:page_size] | [
"def",
"read_flash",
"(",
"self",
",",
"addr",
"=",
"0xFF",
",",
"page",
"=",
"0x00",
")",
":",
"buff",
"=",
"bytearray",
"(",
")",
"page_size",
"=",
"self",
".",
"targets",
"[",
"addr",
"]",
".",
"page_size",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"int",
"(",
"math",
".",
"ceil",
"(",
"page_size",
"/",
"25.0",
")",
")",
")",
":",
"pk",
"=",
"None",
"retry_counter",
"=",
"5",
"while",
"(",
"(",
"not",
"pk",
"or",
"pk",
".",
"header",
"!=",
"0xFF",
"or",
"struct",
".",
"unpack",
"(",
"'<BB'",
",",
"pk",
".",
"data",
"[",
"0",
":",
"2",
"]",
")",
"!=",
"(",
"addr",
",",
"0x1C",
")",
")",
"and",
"retry_counter",
">=",
"0",
")",
":",
"pk",
"=",
"CRTPPacket",
"(",
")",
"pk",
".",
"set_header",
"(",
"0xFF",
",",
"0xFF",
")",
"pk",
".",
"data",
"=",
"struct",
".",
"pack",
"(",
"'<BBHH'",
",",
"addr",
",",
"0x1C",
",",
"page",
",",
"(",
"i",
"*",
"25",
")",
")",
"self",
".",
"link",
".",
"send_packet",
"(",
"pk",
")",
"pk",
"=",
"self",
".",
"link",
".",
"receive_packet",
"(",
"1",
")",
"retry_counter",
"-=",
"1",
"if",
"(",
"retry_counter",
"<",
"0",
")",
":",
"return",
"None",
"else",
":",
"buff",
"+=",
"pk",
".",
"data",
"[",
"6",
":",
"]",
"# For some reason we get one byte extra here...",
"return",
"buff",
"[",
"0",
":",
"page_size",
"]"
] | 35.884615 | 15.615385 |
def verify(value, msg):
"""
C-style validator
Keyword arguments:
value -- dictionary to validate (required)
msg -- the protobuf schema to validate against (required)
Returns:
True: If valid input
False: If invalid input
"""
return bool(value) and \
converts_to_proto(value, msg) and \
successfuly_encodes(msg) and \
special_typechecking(value, msg) | [
"def",
"verify",
"(",
"value",
",",
"msg",
")",
":",
"return",
"bool",
"(",
"value",
")",
"and",
"converts_to_proto",
"(",
"value",
",",
"msg",
")",
"and",
"successfuly_encodes",
"(",
"msg",
")",
"and",
"special_typechecking",
"(",
"value",
",",
"msg",
")"
] | 26 | 14.5 |
def update_board_chart(self, chart, team_context, board, name):
"""UpdateBoardChart.
Update a board chart
:param :class:`<BoardChart> <azure.devops.v5_0.work.models.BoardChart>` chart:
:param :class:`<TeamContext> <azure.devops.v5_0.work.models.TeamContext>` team_context: The team context for the operation
:param str board: Identifier for board, either board's backlog level name (Eg:"Stories") or Id
:param str name: The chart name
:rtype: :class:`<BoardChart> <azure.devops.v5_0.work.models.BoardChart>`
"""
project = None
team = None
if team_context is not None:
if team_context.project_id:
project = team_context.project_id
else:
project = team_context.project
if team_context.team_id:
team = team_context.team_id
else:
team = team_context.team
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'string')
if team is not None:
route_values['team'] = self._serialize.url('team', team, 'string')
if board is not None:
route_values['board'] = self._serialize.url('board', board, 'str')
if name is not None:
route_values['name'] = self._serialize.url('name', name, 'str')
content = self._serialize.body(chart, 'BoardChart')
response = self._send(http_method='PATCH',
location_id='45fe888c-239e-49fd-958c-df1a1ab21d97',
version='5.0',
route_values=route_values,
content=content)
return self._deserialize('BoardChart', response) | [
"def",
"update_board_chart",
"(",
"self",
",",
"chart",
",",
"team_context",
",",
"board",
",",
"name",
")",
":",
"project",
"=",
"None",
"team",
"=",
"None",
"if",
"team_context",
"is",
"not",
"None",
":",
"if",
"team_context",
".",
"project_id",
":",
"project",
"=",
"team_context",
".",
"project_id",
"else",
":",
"project",
"=",
"team_context",
".",
"project",
"if",
"team_context",
".",
"team_id",
":",
"team",
"=",
"team_context",
".",
"team_id",
"else",
":",
"team",
"=",
"team_context",
".",
"team",
"route_values",
"=",
"{",
"}",
"if",
"project",
"is",
"not",
"None",
":",
"route_values",
"[",
"'project'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'project'",
",",
"project",
",",
"'string'",
")",
"if",
"team",
"is",
"not",
"None",
":",
"route_values",
"[",
"'team'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'team'",
",",
"team",
",",
"'string'",
")",
"if",
"board",
"is",
"not",
"None",
":",
"route_values",
"[",
"'board'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'board'",
",",
"board",
",",
"'str'",
")",
"if",
"name",
"is",
"not",
"None",
":",
"route_values",
"[",
"'name'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'name'",
",",
"name",
",",
"'str'",
")",
"content",
"=",
"self",
".",
"_serialize",
".",
"body",
"(",
"chart",
",",
"'BoardChart'",
")",
"response",
"=",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'PATCH'",
",",
"location_id",
"=",
"'45fe888c-239e-49fd-958c-df1a1ab21d97'",
",",
"version",
"=",
"'5.0'",
",",
"route_values",
"=",
"route_values",
",",
"content",
"=",
"content",
")",
"return",
"self",
".",
"_deserialize",
"(",
"'BoardChart'",
",",
"response",
")"
] | 48.162162 | 20.405405 |
def critic(self, real_pred, input):
"Create some `fake_pred` with the generator from `input` and compare them to `real_pred` in `self.loss_funcD`."
fake = self.gan_model.generator(input.requires_grad_(False)).requires_grad_(True)
fake_pred = self.gan_model.critic(fake)
return self.loss_funcC(real_pred, fake_pred) | [
"def",
"critic",
"(",
"self",
",",
"real_pred",
",",
"input",
")",
":",
"fake",
"=",
"self",
".",
"gan_model",
".",
"generator",
"(",
"input",
".",
"requires_grad_",
"(",
"False",
")",
")",
".",
"requires_grad_",
"(",
"True",
")",
"fake_pred",
"=",
"self",
".",
"gan_model",
".",
"critic",
"(",
"fake",
")",
"return",
"self",
".",
"loss_funcC",
"(",
"real_pred",
",",
"fake_pred",
")"
] | 68.4 | 30.4 |
def sequence_to_graph(G, seq, color='black'):
"""
Automatically construct graph given a sequence of characters.
"""
for x in seq:
if x.endswith("_1"): # Mutation
G.node(x, color=color, width="0.1", shape="circle", label="")
else:
G.node(x, color=color)
for a, b in pairwise(seq):
G.edge(a, b, color=color) | [
"def",
"sequence_to_graph",
"(",
"G",
",",
"seq",
",",
"color",
"=",
"'black'",
")",
":",
"for",
"x",
"in",
"seq",
":",
"if",
"x",
".",
"endswith",
"(",
"\"_1\"",
")",
":",
"# Mutation",
"G",
".",
"node",
"(",
"x",
",",
"color",
"=",
"color",
",",
"width",
"=",
"\"0.1\"",
",",
"shape",
"=",
"\"circle\"",
",",
"label",
"=",
"\"\"",
")",
"else",
":",
"G",
".",
"node",
"(",
"x",
",",
"color",
"=",
"color",
")",
"for",
"a",
",",
"b",
"in",
"pairwise",
"(",
"seq",
")",
":",
"G",
".",
"edge",
"(",
"a",
",",
"b",
",",
"color",
"=",
"color",
")"
] | 33.090909 | 12.363636 |
def _remote_file_exists(self, path):
"""
Determine if `path` exists by directly invoking os.path.exists() in the
target user account.
"""
LOG.debug('_remote_file_exists(%r)', path)
return self._connection.get_chain().call(
ansible_mitogen.target.file_exists,
mitogen.utils.cast(path)
) | [
"def",
"_remote_file_exists",
"(",
"self",
",",
"path",
")",
":",
"LOG",
".",
"debug",
"(",
"'_remote_file_exists(%r)'",
",",
"path",
")",
"return",
"self",
".",
"_connection",
".",
"get_chain",
"(",
")",
".",
"call",
"(",
"ansible_mitogen",
".",
"target",
".",
"file_exists",
",",
"mitogen",
".",
"utils",
".",
"cast",
"(",
"path",
")",
")"
] | 35.6 | 11.6 |
def set_pwm_freq(self, freq_hz):
"""Set the PWM frequency to the provided value in hertz."""
prescaleval = 25000000.0 # 25MHz
prescaleval /= 4096.0 # 12-bit
prescaleval /= float(freq_hz)
prescaleval -= 1.0
logger.debug('Setting PWM frequency to {0} Hz'.format(freq_hz))
logger.debug('Estimated pre-scale: {0}'.format(prescaleval))
prescale = int(math.floor(prescaleval + 0.5))
logger.debug('Final pre-scale: {0}'.format(prescale))
oldmode = self.i2c.read_U8(MODE1)
newmode = (oldmode & 0x7F) | 0x10 # sleep
self.i2c.write8(MODE1, newmode) # go to sleep
self.i2c.write8(PRESCALE, prescale)
self.i2c.write8(MODE1, oldmode)
time.sleep(0.005)
self.i2c.write8(MODE1, oldmode | 0x80) | [
"def",
"set_pwm_freq",
"(",
"self",
",",
"freq_hz",
")",
":",
"prescaleval",
"=",
"25000000.0",
"# 25MHz",
"prescaleval",
"/=",
"4096.0",
"# 12-bit",
"prescaleval",
"/=",
"float",
"(",
"freq_hz",
")",
"prescaleval",
"-=",
"1.0",
"logger",
".",
"debug",
"(",
"'Setting PWM frequency to {0} Hz'",
".",
"format",
"(",
"freq_hz",
")",
")",
"logger",
".",
"debug",
"(",
"'Estimated pre-scale: {0}'",
".",
"format",
"(",
"prescaleval",
")",
")",
"prescale",
"=",
"int",
"(",
"math",
".",
"floor",
"(",
"prescaleval",
"+",
"0.5",
")",
")",
"logger",
".",
"debug",
"(",
"'Final pre-scale: {0}'",
".",
"format",
"(",
"prescale",
")",
")",
"oldmode",
"=",
"self",
".",
"i2c",
".",
"read_U8",
"(",
"MODE1",
")",
"newmode",
"=",
"(",
"oldmode",
"&",
"0x7F",
")",
"|",
"0x10",
"# sleep",
"self",
".",
"i2c",
".",
"write8",
"(",
"MODE1",
",",
"newmode",
")",
"# go to sleep",
"self",
".",
"i2c",
".",
"write8",
"(",
"PRESCALE",
",",
"prescale",
")",
"self",
".",
"i2c",
".",
"write8",
"(",
"MODE1",
",",
"oldmode",
")",
"time",
".",
"sleep",
"(",
"0.005",
")",
"self",
".",
"i2c",
".",
"write8",
"(",
"MODE1",
",",
"oldmode",
"|",
"0x80",
")"
] | 47.176471 | 10.411765 |
def concatenate(vars, axis=-1):
"""
A utility function of concatenate.
"""
from deepy.core.neural_var import NeuralVariable
if isinstance(vars[0], NeuralVariable):
concat_var = Concatenate(axis=axis).compute(*vars)
if axis == -1 or axis == vars[0].tensor.ndim - 1:
concat_var.output_dim = sum([x.output_dim for x in vars], 0)
else:
concat_var = TT.concatenate(vars, axis)
return concat_var | [
"def",
"concatenate",
"(",
"vars",
",",
"axis",
"=",
"-",
"1",
")",
":",
"from",
"deepy",
".",
"core",
".",
"neural_var",
"import",
"NeuralVariable",
"if",
"isinstance",
"(",
"vars",
"[",
"0",
"]",
",",
"NeuralVariable",
")",
":",
"concat_var",
"=",
"Concatenate",
"(",
"axis",
"=",
"axis",
")",
".",
"compute",
"(",
"*",
"vars",
")",
"if",
"axis",
"==",
"-",
"1",
"or",
"axis",
"==",
"vars",
"[",
"0",
"]",
".",
"tensor",
".",
"ndim",
"-",
"1",
":",
"concat_var",
".",
"output_dim",
"=",
"sum",
"(",
"[",
"x",
".",
"output_dim",
"for",
"x",
"in",
"vars",
"]",
",",
"0",
")",
"else",
":",
"concat_var",
"=",
"TT",
".",
"concatenate",
"(",
"vars",
",",
"axis",
")",
"return",
"concat_var"
] | 36.833333 | 12.5 |
def _is_valid_api_url(self, url):
"""Callback for is_valid_api_url."""
# Check response is a JSON with ok: 1
data = {}
try:
r = requests.get(url, proxies=self.proxy_servers)
content = to_text_string(r.content, encoding='utf-8')
data = json.loads(content)
except Exception as error:
logger.error(str(error))
return data.get('ok', 0) == 1 | [
"def",
"_is_valid_api_url",
"(",
"self",
",",
"url",
")",
":",
"# Check response is a JSON with ok: 1",
"data",
"=",
"{",
"}",
"try",
":",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"proxies",
"=",
"self",
".",
"proxy_servers",
")",
"content",
"=",
"to_text_string",
"(",
"r",
".",
"content",
",",
"encoding",
"=",
"'utf-8'",
")",
"data",
"=",
"json",
".",
"loads",
"(",
"content",
")",
"except",
"Exception",
"as",
"error",
":",
"logger",
".",
"error",
"(",
"str",
"(",
"error",
")",
")",
"return",
"data",
".",
"get",
"(",
"'ok'",
",",
"0",
")",
"==",
"1"
] | 35.166667 | 13.666667 |
def get_dependencies(self, id, **kwargs):
"""
Get the direct dependencies of the specified configuration
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_dependencies(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Build configuration id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: BuildConfigurationPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_dependencies_with_http_info(id, **kwargs)
else:
(data) = self.get_dependencies_with_http_info(id, **kwargs)
return data | [
"def",
"get_dependencies",
"(",
"self",
",",
"id",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'callback'",
")",
":",
"return",
"self",
".",
"get_dependencies_with_http_info",
"(",
"id",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"get_dependencies_with_http_info",
"(",
"id",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 41.448276 | 15.241379 |
def listdir( self, folder_id='0',
type_filter=None, offset=None, limit=None, **listdir_kwz ):
'''Return a list of objects in the specified folder_id.
limit is passed to the API, so might be used as optimization.
None means "fetch all items, with several requests, if necessary".
type_filter can be set to type (str) or sequence
of object types to return, post-api-call processing.'''
res = yield super(txBox, self).listdir(
folder_id=folder_id, offset=offset,
limit=limit if limit is not None else 900, **listdir_kwz )
lst = res['entries']
if limit is None: # treat it as "no limit", using several requests to fetch all items
while res['total_count'] > res['offset'] + res['limit']:
offset = res['offset'] + res['limit']
res = yield super(txBox, self).listdir(
folder_id=folder_id, offset=offset, limit=900, **listdir_kwz )
lst.extend(res['entries'])
if type_filter:
if isinstance(type_filter, types.StringTypes): type_filter = {type_filter}
lst = list(obj for obj in lst if obj['type'] in type_filter)
defer.returnValue(lst) | [
"def",
"listdir",
"(",
"self",
",",
"folder_id",
"=",
"'0'",
",",
"type_filter",
"=",
"None",
",",
"offset",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"*",
"*",
"listdir_kwz",
")",
":",
"res",
"=",
"yield",
"super",
"(",
"txBox",
",",
"self",
")",
".",
"listdir",
"(",
"folder_id",
"=",
"folder_id",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"limit",
"if",
"limit",
"is",
"not",
"None",
"else",
"900",
",",
"*",
"*",
"listdir_kwz",
")",
"lst",
"=",
"res",
"[",
"'entries'",
"]",
"if",
"limit",
"is",
"None",
":",
"# treat it as \"no limit\", using several requests to fetch all items",
"while",
"res",
"[",
"'total_count'",
"]",
">",
"res",
"[",
"'offset'",
"]",
"+",
"res",
"[",
"'limit'",
"]",
":",
"offset",
"=",
"res",
"[",
"'offset'",
"]",
"+",
"res",
"[",
"'limit'",
"]",
"res",
"=",
"yield",
"super",
"(",
"txBox",
",",
"self",
")",
".",
"listdir",
"(",
"folder_id",
"=",
"folder_id",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"900",
",",
"*",
"*",
"listdir_kwz",
")",
"lst",
".",
"extend",
"(",
"res",
"[",
"'entries'",
"]",
")",
"if",
"type_filter",
":",
"if",
"isinstance",
"(",
"type_filter",
",",
"types",
".",
"StringTypes",
")",
":",
"type_filter",
"=",
"{",
"type_filter",
"}",
"lst",
"=",
"list",
"(",
"obj",
"for",
"obj",
"in",
"lst",
"if",
"obj",
"[",
"'type'",
"]",
"in",
"type_filter",
")",
"defer",
".",
"returnValue",
"(",
"lst",
")"
] | 50.761905 | 18 |
def _ParseArgs(self, args, known_only):
"""Helper function to do the main argument parsing.
This function goes through args and does the bulk of the flag parsing.
It will find the corresponding flag in our flag dictionary, and call its
.parse() method on the flag value.
Args:
args: List of strings with the arguments to parse.
known_only: parse and remove known flags, return rest in unparsed_args
Returns:
A tuple with the following:
unknown_flags: List of (flag name, arg) for flags we don't know about.
unparsed_args: List of arguments we did not parse.
undefok: Set of flags that were given via --undefok.
Raises:
Error: on any parsing error.
ValueError: on flag value parsing error.
"""
unknown_flags, unparsed_args, undefok = [], [], set()
flag_dict = self.FlagDict()
args = iter(args)
for arg in args:
value = None
def GetValue():
# pylint: disable=cell-var-from-loop
try:
return next(args) if value is None else value
except StopIteration:
raise exceptions.Error('Missing value for flag ' + arg)
if not arg.startswith('-'):
# A non-argument: default is break, GNU is skip.
unparsed_args.append(arg)
if self.IsGnuGetOpt():
continue
else:
break
if arg == '--':
if known_only:
unparsed_args.append(arg)
break
if '=' in arg:
name, value = arg.lstrip('-').split('=', 1)
else:
name, value = arg.lstrip('-'), None
if not name:
# The argument is all dashes (including one dash).
unparsed_args.append(arg)
if self.IsGnuGetOpt():
continue
else:
break
# --undefok is a special case.
if name == 'undefok':
if known_only:
unparsed_args.append(arg)
value = GetValue()
undefok.update(v.strip() for v in value.split(','))
undefok.update('no' + v.strip() for v in value.split(','))
continue
flag = flag_dict.get(name)
if flag:
value = (flag.boolean and value is None) or GetValue()
elif name.startswith('no') and len(name) > 2:
# Boolean flags can take the form of --noflag, with no value.
noflag = flag_dict.get(name[2:])
if noflag and noflag.boolean:
if value is not None:
raise ValueError(arg + ' does not take an argument')
flag = noflag
value = False
if flag:
flag.parse(value)
flag.using_default_value = False
elif known_only:
unparsed_args.append(arg)
else:
unknown_flags.append((name, arg))
unparsed_args.extend(args)
return unknown_flags, unparsed_args, undefok | [
"def",
"_ParseArgs",
"(",
"self",
",",
"args",
",",
"known_only",
")",
":",
"unknown_flags",
",",
"unparsed_args",
",",
"undefok",
"=",
"[",
"]",
",",
"[",
"]",
",",
"set",
"(",
")",
"flag_dict",
"=",
"self",
".",
"FlagDict",
"(",
")",
"args",
"=",
"iter",
"(",
"args",
")",
"for",
"arg",
"in",
"args",
":",
"value",
"=",
"None",
"def",
"GetValue",
"(",
")",
":",
"# pylint: disable=cell-var-from-loop",
"try",
":",
"return",
"next",
"(",
"args",
")",
"if",
"value",
"is",
"None",
"else",
"value",
"except",
"StopIteration",
":",
"raise",
"exceptions",
".",
"Error",
"(",
"'Missing value for flag '",
"+",
"arg",
")",
"if",
"not",
"arg",
".",
"startswith",
"(",
"'-'",
")",
":",
"# A non-argument: default is break, GNU is skip.",
"unparsed_args",
".",
"append",
"(",
"arg",
")",
"if",
"self",
".",
"IsGnuGetOpt",
"(",
")",
":",
"continue",
"else",
":",
"break",
"if",
"arg",
"==",
"'--'",
":",
"if",
"known_only",
":",
"unparsed_args",
".",
"append",
"(",
"arg",
")",
"break",
"if",
"'='",
"in",
"arg",
":",
"name",
",",
"value",
"=",
"arg",
".",
"lstrip",
"(",
"'-'",
")",
".",
"split",
"(",
"'='",
",",
"1",
")",
"else",
":",
"name",
",",
"value",
"=",
"arg",
".",
"lstrip",
"(",
"'-'",
")",
",",
"None",
"if",
"not",
"name",
":",
"# The argument is all dashes (including one dash).",
"unparsed_args",
".",
"append",
"(",
"arg",
")",
"if",
"self",
".",
"IsGnuGetOpt",
"(",
")",
":",
"continue",
"else",
":",
"break",
"# --undefok is a special case.",
"if",
"name",
"==",
"'undefok'",
":",
"if",
"known_only",
":",
"unparsed_args",
".",
"append",
"(",
"arg",
")",
"value",
"=",
"GetValue",
"(",
")",
"undefok",
".",
"update",
"(",
"v",
".",
"strip",
"(",
")",
"for",
"v",
"in",
"value",
".",
"split",
"(",
"','",
")",
")",
"undefok",
".",
"update",
"(",
"'no'",
"+",
"v",
".",
"strip",
"(",
")",
"for",
"v",
"in",
"value",
".",
"split",
"(",
"','",
")",
")",
"continue",
"flag",
"=",
"flag_dict",
".",
"get",
"(",
"name",
")",
"if",
"flag",
":",
"value",
"=",
"(",
"flag",
".",
"boolean",
"and",
"value",
"is",
"None",
")",
"or",
"GetValue",
"(",
")",
"elif",
"name",
".",
"startswith",
"(",
"'no'",
")",
"and",
"len",
"(",
"name",
")",
">",
"2",
":",
"# Boolean flags can take the form of --noflag, with no value.",
"noflag",
"=",
"flag_dict",
".",
"get",
"(",
"name",
"[",
"2",
":",
"]",
")",
"if",
"noflag",
"and",
"noflag",
".",
"boolean",
":",
"if",
"value",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"arg",
"+",
"' does not take an argument'",
")",
"flag",
"=",
"noflag",
"value",
"=",
"False",
"if",
"flag",
":",
"flag",
".",
"parse",
"(",
"value",
")",
"flag",
".",
"using_default_value",
"=",
"False",
"elif",
"known_only",
":",
"unparsed_args",
".",
"append",
"(",
"arg",
")",
"else",
":",
"unknown_flags",
".",
"append",
"(",
"(",
"name",
",",
"arg",
")",
")",
"unparsed_args",
".",
"extend",
"(",
"args",
")",
"return",
"unknown_flags",
",",
"unparsed_args",
",",
"undefok"
] | 29.387097 | 20.182796 |
def load_scheduler_plugins(self):
"""Refresh the list of available schedulers
Returns:
`list` of :obj:`BaseScheduler`
"""
if not self.scheduler_plugins:
for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.schedulers']['plugins']:
cls = entry_point.load()
self.scheduler_plugins[cls.__name__] = cls
if cls.__name__ == self.active_scheduler:
self.log.debug('Scheduler loaded: {} in module {}'.format(cls.__name__, cls.__module__))
else:
self.log.debug('Scheduler disabled: {} in module {}'.format(cls.__name__, cls.__module__)) | [
"def",
"load_scheduler_plugins",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"scheduler_plugins",
":",
"for",
"entry_point",
"in",
"CINQ_PLUGINS",
"[",
"'cloud_inquisitor.plugins.schedulers'",
"]",
"[",
"'plugins'",
"]",
":",
"cls",
"=",
"entry_point",
".",
"load",
"(",
")",
"self",
".",
"scheduler_plugins",
"[",
"cls",
".",
"__name__",
"]",
"=",
"cls",
"if",
"cls",
".",
"__name__",
"==",
"self",
".",
"active_scheduler",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'Scheduler loaded: {} in module {}'",
".",
"format",
"(",
"cls",
".",
"__name__",
",",
"cls",
".",
"__module__",
")",
")",
"else",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'Scheduler disabled: {} in module {}'",
".",
"format",
"(",
"cls",
".",
"__name__",
",",
"cls",
".",
"__module__",
")",
")"
] | 48.5 | 22.928571 |
def _SetUnknownFlag(self, name, value):
"""Returns value if setting flag |name| to |value| returned True.
Args:
name: Name of the flag to set.
value: Value to set.
Returns:
Flag value on successful call.
Raises:
UnrecognizedFlagError
IllegalFlagValueError
"""
setter = self.__dict__['__set_unknown']
if setter:
try:
setter(name, value)
return value
except (TypeError, ValueError): # Flag value is not valid.
raise exceptions.IllegalFlagValueError('"{1}" is not valid for --{0}'
.format(name, value))
except NameError: # Flag name is not valid.
pass
raise exceptions.UnrecognizedFlagError(name, value) | [
"def",
"_SetUnknownFlag",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"setter",
"=",
"self",
".",
"__dict__",
"[",
"'__set_unknown'",
"]",
"if",
"setter",
":",
"try",
":",
"setter",
"(",
"name",
",",
"value",
")",
"return",
"value",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"# Flag value is not valid.",
"raise",
"exceptions",
".",
"IllegalFlagValueError",
"(",
"'\"{1}\" is not valid for --{0}'",
".",
"format",
"(",
"name",
",",
"value",
")",
")",
"except",
"NameError",
":",
"# Flag name is not valid.",
"pass",
"raise",
"exceptions",
".",
"UnrecognizedFlagError",
"(",
"name",
",",
"value",
")"
] | 29.6 | 19.68 |
def res_to_url(resource, action):
"""Convert resource.action to (url, HTTP_METHOD)"""
i = action.find("_")
if i < 0:
url = "/" + resource
httpmethod = action
else:
url = "/%s/%s" % (resource, action[i + 1:])
httpmethod = action[:i]
return url, httpmethod.upper() | [
"def",
"res_to_url",
"(",
"resource",
",",
"action",
")",
":",
"i",
"=",
"action",
".",
"find",
"(",
"\"_\"",
")",
"if",
"i",
"<",
"0",
":",
"url",
"=",
"\"/\"",
"+",
"resource",
"httpmethod",
"=",
"action",
"else",
":",
"url",
"=",
"\"/%s/%s\"",
"%",
"(",
"resource",
",",
"action",
"[",
"i",
"+",
"1",
":",
"]",
")",
"httpmethod",
"=",
"action",
"[",
":",
"i",
"]",
"return",
"url",
",",
"httpmethod",
".",
"upper",
"(",
")"
] | 30.5 | 13.2 |
def select_options(self, options_prefix):
""" Select options from this selection, that are started with the specified prefix
:param options_prefix: name prefix of options that should be selected
:return: WConfigSelection
"""
return WConfigSelection(
self.config(), self.section(), self.option_prefix() + options_prefix
) | [
"def",
"select_options",
"(",
"self",
",",
"options_prefix",
")",
":",
"return",
"WConfigSelection",
"(",
"self",
".",
"config",
"(",
")",
",",
"self",
".",
"section",
"(",
")",
",",
"self",
".",
"option_prefix",
"(",
")",
"+",
"options_prefix",
")"
] | 36.444444 | 18.555556 |
def to_gds(self, multiplier):
"""
Convert this object to a GDSII element.
Parameters
----------
multiplier : number
A number that multiplies all dimensions written in the GDSII
element.
Returns
-------
out : string
The GDSII binary string that represents this object.
"""
name = self.ref_cell.name
if len(name) % 2 != 0:
name = name + '\0'
data = struct.pack('>4h', 4, 0x0A00, 4 + len(name),
0x1206) + name.encode('ascii')
if (self.rotation is not None) or (self.magnification is
not None) or self.x_reflection:
word = 0
values = b''
if self.x_reflection:
word += 0x8000
if not (self.magnification is None):
# This flag indicates that the magnification is absolute, not
# relative (not supported).
#word += 0x0004
values += struct.pack('>2h', 12, 0x1B05) + _eight_byte_real(
self.magnification)
if not (self.rotation is None):
# This flag indicates that the rotation is absolute, not
# relative (not supported).
#word += 0x0002
values += struct.pack('>2h', 12, 0x1C05) + _eight_byte_real(
self.rotation)
data += struct.pack('>2hH', 6, 0x1A01, word) + values
return data + struct.pack(
'>2h2l2h', 12, 0x1003, int(round(self.origin[0] * multiplier)),
int(round(self.origin[1] * multiplier)), 4, 0x1100) | [
"def",
"to_gds",
"(",
"self",
",",
"multiplier",
")",
":",
"name",
"=",
"self",
".",
"ref_cell",
".",
"name",
"if",
"len",
"(",
"name",
")",
"%",
"2",
"!=",
"0",
":",
"name",
"=",
"name",
"+",
"'\\0'",
"data",
"=",
"struct",
".",
"pack",
"(",
"'>4h'",
",",
"4",
",",
"0x0A00",
",",
"4",
"+",
"len",
"(",
"name",
")",
",",
"0x1206",
")",
"+",
"name",
".",
"encode",
"(",
"'ascii'",
")",
"if",
"(",
"self",
".",
"rotation",
"is",
"not",
"None",
")",
"or",
"(",
"self",
".",
"magnification",
"is",
"not",
"None",
")",
"or",
"self",
".",
"x_reflection",
":",
"word",
"=",
"0",
"values",
"=",
"b''",
"if",
"self",
".",
"x_reflection",
":",
"word",
"+=",
"0x8000",
"if",
"not",
"(",
"self",
".",
"magnification",
"is",
"None",
")",
":",
"# This flag indicates that the magnification is absolute, not",
"# relative (not supported).",
"#word += 0x0004",
"values",
"+=",
"struct",
".",
"pack",
"(",
"'>2h'",
",",
"12",
",",
"0x1B05",
")",
"+",
"_eight_byte_real",
"(",
"self",
".",
"magnification",
")",
"if",
"not",
"(",
"self",
".",
"rotation",
"is",
"None",
")",
":",
"# This flag indicates that the rotation is absolute, not",
"# relative (not supported).",
"#word += 0x0002",
"values",
"+=",
"struct",
".",
"pack",
"(",
"'>2h'",
",",
"12",
",",
"0x1C05",
")",
"+",
"_eight_byte_real",
"(",
"self",
".",
"rotation",
")",
"data",
"+=",
"struct",
".",
"pack",
"(",
"'>2hH'",
",",
"6",
",",
"0x1A01",
",",
"word",
")",
"+",
"values",
"return",
"data",
"+",
"struct",
".",
"pack",
"(",
"'>2h2l2h'",
",",
"12",
",",
"0x1003",
",",
"int",
"(",
"round",
"(",
"self",
".",
"origin",
"[",
"0",
"]",
"*",
"multiplier",
")",
")",
",",
"int",
"(",
"round",
"(",
"self",
".",
"origin",
"[",
"1",
"]",
"*",
"multiplier",
")",
")",
",",
"4",
",",
"0x1100",
")"
] | 39.785714 | 17.785714 |
def blocking_delete(self, meta=None, index_fields=None):
"""
Deletes and waits till the backend properly update indexes for just deleted object.
meta (dict): JSON serializable meta data for logging of save operation.
{'lorem': 'ipsum', 'dolar': 5}
index_fields (list): Tuple list for indexing keys in riak (with 'bin' or 'int').
bin is used for string fields, int is used for integer fields.
[('lorem','bin'),('dolar','int')]
"""
self.delete(meta=meta, index_fields=index_fields)
while self.objects.filter(key=self.key).count():
time.sleep(0.3) | [
"def",
"blocking_delete",
"(",
"self",
",",
"meta",
"=",
"None",
",",
"index_fields",
"=",
"None",
")",
":",
"self",
".",
"delete",
"(",
"meta",
"=",
"meta",
",",
"index_fields",
"=",
"index_fields",
")",
"while",
"self",
".",
"objects",
".",
"filter",
"(",
"key",
"=",
"self",
".",
"key",
")",
".",
"count",
"(",
")",
":",
"time",
".",
"sleep",
"(",
"0.3",
")"
] | 53.083333 | 20.083333 |
def set_broadcast_layout(self, broadcast_id, layout_type, stylesheet=None):
"""
Use this method to change the layout type of a live streaming broadcast
:param String broadcast_id: The ID of the broadcast that will be updated
:param String layout_type: The layout type for the broadcast. Valid values are:
'bestFit', 'custom', 'horizontalPresentation', 'pip' and 'verticalPresentation'
:param String stylesheet optional: CSS used to style the custom layout.
Specify this only if you set the type property to 'custom'
"""
payload = {
'type': layout_type,
}
if layout_type == 'custom':
if stylesheet is not None:
payload['stylesheet'] = stylesheet
endpoint = self.endpoints.broadcast_url(broadcast_id, layout=True)
response = requests.put(
endpoint,
data=json.dumps(payload),
headers=self.json_headers(),
proxies=self.proxies,
timeout=self.timeout
)
if response.status_code == 200:
pass
elif response.status_code == 400:
raise BroadcastError(
'Invalid request. This response may indicate that data in your request data is '
'invalid JSON. It may also indicate that you passed in invalid layout options.')
elif response.status_code == 403:
raise AuthError('Authentication error.')
else:
raise RequestError('OpenTok server error.', response.status_code) | [
"def",
"set_broadcast_layout",
"(",
"self",
",",
"broadcast_id",
",",
"layout_type",
",",
"stylesheet",
"=",
"None",
")",
":",
"payload",
"=",
"{",
"'type'",
":",
"layout_type",
",",
"}",
"if",
"layout_type",
"==",
"'custom'",
":",
"if",
"stylesheet",
"is",
"not",
"None",
":",
"payload",
"[",
"'stylesheet'",
"]",
"=",
"stylesheet",
"endpoint",
"=",
"self",
".",
"endpoints",
".",
"broadcast_url",
"(",
"broadcast_id",
",",
"layout",
"=",
"True",
")",
"response",
"=",
"requests",
".",
"put",
"(",
"endpoint",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"payload",
")",
",",
"headers",
"=",
"self",
".",
"json_headers",
"(",
")",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"pass",
"elif",
"response",
".",
"status_code",
"==",
"400",
":",
"raise",
"BroadcastError",
"(",
"'Invalid request. This response may indicate that data in your request data is '",
"'invalid JSON. It may also indicate that you passed in invalid layout options.'",
")",
"elif",
"response",
".",
"status_code",
"==",
"403",
":",
"raise",
"AuthError",
"(",
"'Authentication error.'",
")",
"else",
":",
"raise",
"RequestError",
"(",
"'OpenTok server error.'",
",",
"response",
".",
"status_code",
")"
] | 39.487179 | 23.641026 |
def on_while(self, node): # ('test', 'body', 'orelse')
"""While blocks."""
while self.run(node.test):
self._interrupt = None
for tnode in node.body:
self.run(tnode)
if self._interrupt is not None:
break
if isinstance(self._interrupt, ast.Break):
break
else:
for tnode in node.orelse:
self.run(tnode)
self._interrupt = None | [
"def",
"on_while",
"(",
"self",
",",
"node",
")",
":",
"# ('test', 'body', 'orelse')",
"while",
"self",
".",
"run",
"(",
"node",
".",
"test",
")",
":",
"self",
".",
"_interrupt",
"=",
"None",
"for",
"tnode",
"in",
"node",
".",
"body",
":",
"self",
".",
"run",
"(",
"tnode",
")",
"if",
"self",
".",
"_interrupt",
"is",
"not",
"None",
":",
"break",
"if",
"isinstance",
"(",
"self",
".",
"_interrupt",
",",
"ast",
".",
"Break",
")",
":",
"break",
"else",
":",
"for",
"tnode",
"in",
"node",
".",
"orelse",
":",
"self",
".",
"run",
"(",
"tnode",
")",
"self",
".",
"_interrupt",
"=",
"None"
] | 34 | 10.5 |
def union(self, other, left_name="LEFT", right_name="RIGHT"):
"""
*Wrapper of* ``UNION``
The UNION operation is used to integrate homogeneous or heterogeneous samples of two
datasets within a single dataset; for each sample of either one of the input datasets, a
sample is created in the result as follows:
* its metadata are the same as in the original sample;
* its schema is the schema of the first (left) input dataset; new
identifiers are assigned to each output sample;
* its regions are the same (in coordinates and attribute values) as in the original
sample. Region attributes which are missing in an input dataset sample
(w.r.t. the merged schema) are set to null.
:param other: a GMQLDataset
:param left_name: name that you want to assign to the left dataset
:param right_name: name tha t you want to assign to the right dataset
:return: a new GMQLDataset
Example of usage::
import gmql as gl
d1 = gl.get_example_dataset("Example_Dataset_1")
d2 = gl.get_example_dataset("Example_Dataset_2")
result = d1.union(other=d2, left_name="D1", right_name="D2")
"""
if not isinstance(left_name, str) or \
not isinstance(right_name, str):
raise TypeError("left_name and right_name must be strings. "
"{} - {} was provided".format(type(left_name), type(right_name)))
if isinstance(other, GMQLDataset):
other_idx = other.__index
else:
raise TypeError("other must be a GMQLDataset. "
"{} was provided".format(type(other)))
if len(left_name) == 0 or len(right_name) == 0:
raise ValueError("left_name and right_name must not be empty")
new_index = self.opmng.union(self.__index, other_idx, left_name, right_name)
new_local_sources, new_remote_sources = self.__combine_sources(self, other)
new_location = self.__combine_locations(self, other)
return GMQLDataset(index=new_index, location=new_location,
local_sources=new_local_sources,
remote_sources=new_remote_sources,
meta_profile=self.meta_profile) | [
"def",
"union",
"(",
"self",
",",
"other",
",",
"left_name",
"=",
"\"LEFT\"",
",",
"right_name",
"=",
"\"RIGHT\"",
")",
":",
"if",
"not",
"isinstance",
"(",
"left_name",
",",
"str",
")",
"or",
"not",
"isinstance",
"(",
"right_name",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"\"left_name and right_name must be strings. \"",
"\"{} - {} was provided\"",
".",
"format",
"(",
"type",
"(",
"left_name",
")",
",",
"type",
"(",
"right_name",
")",
")",
")",
"if",
"isinstance",
"(",
"other",
",",
"GMQLDataset",
")",
":",
"other_idx",
"=",
"other",
".",
"__index",
"else",
":",
"raise",
"TypeError",
"(",
"\"other must be a GMQLDataset. \"",
"\"{} was provided\"",
".",
"format",
"(",
"type",
"(",
"other",
")",
")",
")",
"if",
"len",
"(",
"left_name",
")",
"==",
"0",
"or",
"len",
"(",
"right_name",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"left_name and right_name must not be empty\"",
")",
"new_index",
"=",
"self",
".",
"opmng",
".",
"union",
"(",
"self",
".",
"__index",
",",
"other_idx",
",",
"left_name",
",",
"right_name",
")",
"new_local_sources",
",",
"new_remote_sources",
"=",
"self",
".",
"__combine_sources",
"(",
"self",
",",
"other",
")",
"new_location",
"=",
"self",
".",
"__combine_locations",
"(",
"self",
",",
"other",
")",
"return",
"GMQLDataset",
"(",
"index",
"=",
"new_index",
",",
"location",
"=",
"new_location",
",",
"local_sources",
"=",
"new_local_sources",
",",
"remote_sources",
"=",
"new_remote_sources",
",",
"meta_profile",
"=",
"self",
".",
"meta_profile",
")"
] | 47.12 | 26.36 |
def cli(ctx, hostname, username, password, config_dir, https):
"""Command-line interface for interacting with a WVA device"""
ctx.is_root = True
ctx.user_values_entered = False
ctx.config_dir = os.path.abspath(os.path.expanduser(config_dir))
ctx.config = load_config(ctx)
ctx.hostname = hostname
ctx.username = username
ctx.password = password
ctx.https = https
# Creating the WVA object is deferred as some commands like clearconfig
# should not require a username/password to perform them
ctx.wva = None | [
"def",
"cli",
"(",
"ctx",
",",
"hostname",
",",
"username",
",",
"password",
",",
"config_dir",
",",
"https",
")",
":",
"ctx",
".",
"is_root",
"=",
"True",
"ctx",
".",
"user_values_entered",
"=",
"False",
"ctx",
".",
"config_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"config_dir",
")",
")",
"ctx",
".",
"config",
"=",
"load_config",
"(",
"ctx",
")",
"ctx",
".",
"hostname",
"=",
"hostname",
"ctx",
".",
"username",
"=",
"username",
"ctx",
".",
"password",
"=",
"password",
"ctx",
".",
"https",
"=",
"https",
"# Creating the WVA object is deferred as some commands like clearconfig",
"# should not require a username/password to perform them",
"ctx",
".",
"wva",
"=",
"None"
] | 38.642857 | 18.214286 |
def _redraw(self):
"""
Render the command line again. (Not thread safe!) (From other threads,
or if unsure, use :meth:`.CommandLineInterface.invalidate`.)
"""
# Only draw when no sub application was started.
if self._is_running and self._sub_cli is None:
self.render_counter += 1
self.renderer.render(self, self.layout, is_done=self.is_done)
# Fire render event.
self.on_render.fire() | [
"def",
"_redraw",
"(",
"self",
")",
":",
"# Only draw when no sub application was started.",
"if",
"self",
".",
"_is_running",
"and",
"self",
".",
"_sub_cli",
"is",
"None",
":",
"self",
".",
"render_counter",
"+=",
"1",
"self",
".",
"renderer",
".",
"render",
"(",
"self",
",",
"self",
".",
"layout",
",",
"is_done",
"=",
"self",
".",
"is_done",
")",
"# Fire render event.",
"self",
".",
"on_render",
".",
"fire",
"(",
")"
] | 39.166667 | 17.5 |
def to_utf8(datas):
"""
Force utf8 string entries in the given datas
"""
res = datas
if isinstance(datas, dict):
res = {}
for key, value in datas.items():
key = to_utf8(key)
value = to_utf8(value)
res[key] = value
elif isinstance(datas, (list, tuple)):
res = []
for data in datas:
res.append(to_utf8(data))
elif isinstance(datas, unicode):
res = datas.encode('utf-8')
return res | [
"def",
"to_utf8",
"(",
"datas",
")",
":",
"res",
"=",
"datas",
"if",
"isinstance",
"(",
"datas",
",",
"dict",
")",
":",
"res",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"datas",
".",
"items",
"(",
")",
":",
"key",
"=",
"to_utf8",
"(",
"key",
")",
"value",
"=",
"to_utf8",
"(",
"value",
")",
"res",
"[",
"key",
"]",
"=",
"value",
"elif",
"isinstance",
"(",
"datas",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"res",
"=",
"[",
"]",
"for",
"data",
"in",
"datas",
":",
"res",
".",
"append",
"(",
"to_utf8",
"(",
"data",
")",
")",
"elif",
"isinstance",
"(",
"datas",
",",
"unicode",
")",
":",
"res",
"=",
"datas",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"res"
] | 22.904762 | 14.904762 |
def establish_connection(self, width=None, height=None):
"""Establish SSH connection to the network device
Timeout will generate a NetMikoTimeoutException
Authentication failure will generate a NetMikoAuthenticationException
width and height are needed for Fortinet paging setting.
:param width: Specified width of the VT100 terminal window
:type width: int
:param height: Specified height of the VT100 terminal window
:type height: int
"""
if self.protocol == "telnet":
self.remote_conn = telnetlib.Telnet(
self.host, port=self.port, timeout=self.timeout
)
self.telnet_login()
elif self.protocol == "serial":
self.remote_conn = serial.Serial(**self.serial_settings)
self.serial_login()
elif self.protocol == "ssh":
ssh_connect_params = self._connect_params_dict()
self.remote_conn_pre = self._build_ssh_client()
# initiate SSH connection
try:
self.remote_conn_pre.connect(**ssh_connect_params)
except socket.error:
self.paramiko_cleanup()
msg = "Connection to device timed-out: {device_type} {ip}:{port}".format(
device_type=self.device_type, ip=self.host, port=self.port
)
raise NetMikoTimeoutException(msg)
except paramiko.ssh_exception.AuthenticationException as auth_err:
self.paramiko_cleanup()
msg = "Authentication failure: unable to connect {device_type} {ip}:{port}".format(
device_type=self.device_type, ip=self.host, port=self.port
)
msg += self.RETURN + text_type(auth_err)
raise NetMikoAuthenticationException(msg)
if self.verbose:
print(
"SSH connection established to {}:{}".format(self.host, self.port)
)
# Use invoke_shell to establish an 'interactive session'
if width and height:
self.remote_conn = self.remote_conn_pre.invoke_shell(
term="vt100", width=width, height=height
)
else:
self.remote_conn = self.remote_conn_pre.invoke_shell()
self.remote_conn.settimeout(self.blocking_timeout)
if self.keepalive:
self.remote_conn.transport.set_keepalive(self.keepalive)
self.special_login_handler()
if self.verbose:
print("Interactive SSH session established")
return "" | [
"def",
"establish_connection",
"(",
"self",
",",
"width",
"=",
"None",
",",
"height",
"=",
"None",
")",
":",
"if",
"self",
".",
"protocol",
"==",
"\"telnet\"",
":",
"self",
".",
"remote_conn",
"=",
"telnetlib",
".",
"Telnet",
"(",
"self",
".",
"host",
",",
"port",
"=",
"self",
".",
"port",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"self",
".",
"telnet_login",
"(",
")",
"elif",
"self",
".",
"protocol",
"==",
"\"serial\"",
":",
"self",
".",
"remote_conn",
"=",
"serial",
".",
"Serial",
"(",
"*",
"*",
"self",
".",
"serial_settings",
")",
"self",
".",
"serial_login",
"(",
")",
"elif",
"self",
".",
"protocol",
"==",
"\"ssh\"",
":",
"ssh_connect_params",
"=",
"self",
".",
"_connect_params_dict",
"(",
")",
"self",
".",
"remote_conn_pre",
"=",
"self",
".",
"_build_ssh_client",
"(",
")",
"# initiate SSH connection",
"try",
":",
"self",
".",
"remote_conn_pre",
".",
"connect",
"(",
"*",
"*",
"ssh_connect_params",
")",
"except",
"socket",
".",
"error",
":",
"self",
".",
"paramiko_cleanup",
"(",
")",
"msg",
"=",
"\"Connection to device timed-out: {device_type} {ip}:{port}\"",
".",
"format",
"(",
"device_type",
"=",
"self",
".",
"device_type",
",",
"ip",
"=",
"self",
".",
"host",
",",
"port",
"=",
"self",
".",
"port",
")",
"raise",
"NetMikoTimeoutException",
"(",
"msg",
")",
"except",
"paramiko",
".",
"ssh_exception",
".",
"AuthenticationException",
"as",
"auth_err",
":",
"self",
".",
"paramiko_cleanup",
"(",
")",
"msg",
"=",
"\"Authentication failure: unable to connect {device_type} {ip}:{port}\"",
".",
"format",
"(",
"device_type",
"=",
"self",
".",
"device_type",
",",
"ip",
"=",
"self",
".",
"host",
",",
"port",
"=",
"self",
".",
"port",
")",
"msg",
"+=",
"self",
".",
"RETURN",
"+",
"text_type",
"(",
"auth_err",
")",
"raise",
"NetMikoAuthenticationException",
"(",
"msg",
")",
"if",
"self",
".",
"verbose",
":",
"print",
"(",
"\"SSH connection established to {}:{}\"",
".",
"format",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
")",
"# Use invoke_shell to establish an 'interactive session'",
"if",
"width",
"and",
"height",
":",
"self",
".",
"remote_conn",
"=",
"self",
".",
"remote_conn_pre",
".",
"invoke_shell",
"(",
"term",
"=",
"\"vt100\"",
",",
"width",
"=",
"width",
",",
"height",
"=",
"height",
")",
"else",
":",
"self",
".",
"remote_conn",
"=",
"self",
".",
"remote_conn_pre",
".",
"invoke_shell",
"(",
")",
"self",
".",
"remote_conn",
".",
"settimeout",
"(",
"self",
".",
"blocking_timeout",
")",
"if",
"self",
".",
"keepalive",
":",
"self",
".",
"remote_conn",
".",
"transport",
".",
"set_keepalive",
"(",
"self",
".",
"keepalive",
")",
"self",
".",
"special_login_handler",
"(",
")",
"if",
"self",
".",
"verbose",
":",
"print",
"(",
"\"Interactive SSH session established\"",
")",
"return",
"\"\""
] | 41.761905 | 22.238095 |
async def undo_check_in(self):
""" Undo the check in for this participant
|methcoro|
Warning:
|unstable|
Raises:
APIException
"""
res = await self.connection('POST', 'tournaments/{}/participants/{}/undo_check_in'.format(self._tournament_id, self._id))
self._refresh_from_json(res) | [
"async",
"def",
"undo_check_in",
"(",
"self",
")",
":",
"res",
"=",
"await",
"self",
".",
"connection",
"(",
"'POST'",
",",
"'tournaments/{}/participants/{}/undo_check_in'",
".",
"format",
"(",
"self",
".",
"_tournament_id",
",",
"self",
".",
"_id",
")",
")",
"self",
".",
"_refresh_from_json",
"(",
"res",
")"
] | 25.071429 | 26.285714 |
def write_traceback(logger=None, exc_info=None):
"""
Write the latest traceback to the log.
This should be used inside an C{except} block. For example:
try:
dostuff()
except:
write_traceback(logger)
Or you can pass the result of C{sys.exc_info()} to the C{exc_info}
parameter.
"""
if exc_info is None:
exc_info = sys.exc_info()
typ, exception, tb = exc_info
traceback = "".join(_traceback_no_io.format_exception(typ, exception, tb))
_writeTracebackMessage(logger, typ, exception, traceback) | [
"def",
"write_traceback",
"(",
"logger",
"=",
"None",
",",
"exc_info",
"=",
"None",
")",
":",
"if",
"exc_info",
"is",
"None",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"typ",
",",
"exception",
",",
"tb",
"=",
"exc_info",
"traceback",
"=",
"\"\"",
".",
"join",
"(",
"_traceback_no_io",
".",
"format_exception",
"(",
"typ",
",",
"exception",
",",
"tb",
")",
")",
"_writeTracebackMessage",
"(",
"logger",
",",
"typ",
",",
"exception",
",",
"traceback",
")"
] | 29.842105 | 19.526316 |
def get_airport_weather(self, iata, page=1, limit=100):
"""Retrieve the weather at an airport
Given the IATA code of an airport, this method returns the weather information.
Args:
iata (str): The IATA code for an airport, e.g. HYD
page (int): Optional page number; for users who are on a plan with flightradar24 they can pass in higher page numbers to get more data
limit (int): Optional limit on number of records returned
Returns:
A list of dicts with the data; one dict for each row of data from flightradar24
Example::
from pyflightdata import FlightData
f=FlightData()
#optional login
f.login(myemail,mypassword)
f.get_airport_weather('HYD')
f.get_airport_weather('HYD',page=1,limit=10)
"""
url = AIRPORT_DATA_BASE.format(iata, str(self.AUTH_TOKEN), page, limit)
weather = self._fr24.get_airport_weather(url)
mi = weather['sky']['visibility']['mi']
if (mi is not None) and (mi != "None"):
mi = float(mi)
km = mi * 1.6094
weather['sky']['visibility']['km'] = km
return weather | [
"def",
"get_airport_weather",
"(",
"self",
",",
"iata",
",",
"page",
"=",
"1",
",",
"limit",
"=",
"100",
")",
":",
"url",
"=",
"AIRPORT_DATA_BASE",
".",
"format",
"(",
"iata",
",",
"str",
"(",
"self",
".",
"AUTH_TOKEN",
")",
",",
"page",
",",
"limit",
")",
"weather",
"=",
"self",
".",
"_fr24",
".",
"get_airport_weather",
"(",
"url",
")",
"mi",
"=",
"weather",
"[",
"'sky'",
"]",
"[",
"'visibility'",
"]",
"[",
"'mi'",
"]",
"if",
"(",
"mi",
"is",
"not",
"None",
")",
"and",
"(",
"mi",
"!=",
"\"None\"",
")",
":",
"mi",
"=",
"float",
"(",
"mi",
")",
"km",
"=",
"mi",
"*",
"1.6094",
"weather",
"[",
"'sky'",
"]",
"[",
"'visibility'",
"]",
"[",
"'km'",
"]",
"=",
"km",
"return",
"weather"
] | 38.709677 | 24.387097 |
def options(self, request, tag):
"""
Render each of the options of the wrapped L{ChoiceParameter} instance.
"""
option = tag.patternGenerator('option')
return tag[[
OptionView(index, o, option())
for (index, o)
in enumerate(self.parameter.choices)]] | [
"def",
"options",
"(",
"self",
",",
"request",
",",
"tag",
")",
":",
"option",
"=",
"tag",
".",
"patternGenerator",
"(",
"'option'",
")",
"return",
"tag",
"[",
"[",
"OptionView",
"(",
"index",
",",
"o",
",",
"option",
"(",
")",
")",
"for",
"(",
"index",
",",
"o",
")",
"in",
"enumerate",
"(",
"self",
".",
"parameter",
".",
"choices",
")",
"]",
"]"
] | 36.555556 | 11.444444 |
def _pre_mongod_server_start(server, options_override=None):
"""
Does necessary work before starting a server
1- An efficiency step for arbiters running with --no-journal
* there is a lock file ==>
* server must not have exited cleanly from last run, and does not know
how to auto-recover (as a journalled server would)
* however: this is an arbiter, therefore
* there is no need to repair data files in any way ==>
* i can rm this lockfile and start my server
"""
lock_file_path = server.get_lock_file_path()
no_journal = (server.get_cmd_option("nojournal") or
(options_override and "nojournal" in options_override))
if (os.path.exists(lock_file_path) and
server.is_arbiter_server() and
no_journal):
log_warning("WARNING: Detected a lock file ('%s') for your server '%s'"
" ; since this server is an arbiter, there is no need for"
" repair or other action. Deleting mongod.lock and"
" proceeding..." % (lock_file_path, server.id))
try:
os.remove(lock_file_path)
except Exception, e:
log_exception(e)
raise MongoctlException("Error while trying to delete '%s'. "
"Cause: %s" % (lock_file_path, e)) | [
"def",
"_pre_mongod_server_start",
"(",
"server",
",",
"options_override",
"=",
"None",
")",
":",
"lock_file_path",
"=",
"server",
".",
"get_lock_file_path",
"(",
")",
"no_journal",
"=",
"(",
"server",
".",
"get_cmd_option",
"(",
"\"nojournal\"",
")",
"or",
"(",
"options_override",
"and",
"\"nojournal\"",
"in",
"options_override",
")",
")",
"if",
"(",
"os",
".",
"path",
".",
"exists",
"(",
"lock_file_path",
")",
"and",
"server",
".",
"is_arbiter_server",
"(",
")",
"and",
"no_journal",
")",
":",
"log_warning",
"(",
"\"WARNING: Detected a lock file ('%s') for your server '%s'\"",
"\" ; since this server is an arbiter, there is no need for\"",
"\" repair or other action. Deleting mongod.lock and\"",
"\" proceeding...\"",
"%",
"(",
"lock_file_path",
",",
"server",
".",
"id",
")",
")",
"try",
":",
"os",
".",
"remove",
"(",
"lock_file_path",
")",
"except",
"Exception",
",",
"e",
":",
"log_exception",
"(",
"e",
")",
"raise",
"MongoctlException",
"(",
"\"Error while trying to delete '%s'. \"",
"\"Cause: %s\"",
"%",
"(",
"lock_file_path",
",",
"e",
")",
")"
] | 43.483871 | 20.903226 |
def next(self):
"""A `next` that caches the returned results. Together with the
slightly different `__iter__`, these cursors can be iterated over
more than once."""
if self.__tailable:
return PymongoCursor.next(self)
try:
ret = PymongoCursor.next(self)
except StopIteration:
self.__fullcache = True
raise
self.__itercache.append(ret)
return ret | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"__tailable",
":",
"return",
"PymongoCursor",
".",
"next",
"(",
"self",
")",
"try",
":",
"ret",
"=",
"PymongoCursor",
".",
"next",
"(",
"self",
")",
"except",
"StopIteration",
":",
"self",
".",
"__fullcache",
"=",
"True",
"raise",
"self",
".",
"__itercache",
".",
"append",
"(",
"ret",
")",
"return",
"ret"
] | 34.230769 | 13 |
def init(opts):
'''
This function gets called when the proxy starts up. For
panos devices, a determination is made on the connection type
and the appropriate connection details that must be cached.
'''
if 'host' not in opts['proxy']:
log.critical('No \'host\' key found in pillar for this proxy.')
return False
if 'apikey' not in opts['proxy']:
# If we do not have an apikey, we must have both a username and password
if 'username' not in opts['proxy']:
log.critical('No \'username\' key found in pillar for this proxy.')
return False
if 'password' not in opts['proxy']:
log.critical('No \'passwords\' key found in pillar for this proxy.')
return False
DETAILS['url'] = 'https://{0}/api/'.format(opts['proxy']['host'])
# Set configuration details
DETAILS['host'] = opts['proxy']['host']
if 'serial' in opts['proxy']:
DETAILS['serial'] = opts['proxy'].get('serial')
if 'apikey' in opts['proxy']:
log.debug("Selected pan_key method for panos proxy module.")
DETAILS['method'] = 'pan_key'
DETAILS['apikey'] = opts['proxy'].get('apikey')
else:
log.debug("Selected pan_pass method for panos proxy module.")
DETAILS['method'] = 'pan_pass'
DETAILS['username'] = opts['proxy'].get('username')
DETAILS['password'] = opts['proxy'].get('password')
else:
if 'apikey' in opts['proxy']:
log.debug("Selected dev_key method for panos proxy module.")
DETAILS['method'] = 'dev_key'
DETAILS['apikey'] = opts['proxy'].get('apikey')
else:
log.debug("Selected dev_pass method for panos proxy module.")
DETAILS['method'] = 'dev_pass'
DETAILS['username'] = opts['proxy'].get('username')
DETAILS['password'] = opts['proxy'].get('password')
# Ensure connectivity to the device
log.debug("Attempting to connect to panos proxy host.")
query = {'type': 'op', 'cmd': '<show><system><info></info></system></show>'}
call(query)
log.debug("Successfully connected to panos proxy host.")
DETAILS['initialized'] = True | [
"def",
"init",
"(",
"opts",
")",
":",
"if",
"'host'",
"not",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"log",
".",
"critical",
"(",
"'No \\'host\\' key found in pillar for this proxy.'",
")",
"return",
"False",
"if",
"'apikey'",
"not",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"# If we do not have an apikey, we must have both a username and password",
"if",
"'username'",
"not",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"log",
".",
"critical",
"(",
"'No \\'username\\' key found in pillar for this proxy.'",
")",
"return",
"False",
"if",
"'password'",
"not",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"log",
".",
"critical",
"(",
"'No \\'passwords\\' key found in pillar for this proxy.'",
")",
"return",
"False",
"DETAILS",
"[",
"'url'",
"]",
"=",
"'https://{0}/api/'",
".",
"format",
"(",
"opts",
"[",
"'proxy'",
"]",
"[",
"'host'",
"]",
")",
"# Set configuration details",
"DETAILS",
"[",
"'host'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
"[",
"'host'",
"]",
"if",
"'serial'",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"DETAILS",
"[",
"'serial'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'serial'",
")",
"if",
"'apikey'",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"log",
".",
"debug",
"(",
"\"Selected pan_key method for panos proxy module.\"",
")",
"DETAILS",
"[",
"'method'",
"]",
"=",
"'pan_key'",
"DETAILS",
"[",
"'apikey'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'apikey'",
")",
"else",
":",
"log",
".",
"debug",
"(",
"\"Selected pan_pass method for panos proxy module.\"",
")",
"DETAILS",
"[",
"'method'",
"]",
"=",
"'pan_pass'",
"DETAILS",
"[",
"'username'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'username'",
")",
"DETAILS",
"[",
"'password'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'password'",
")",
"else",
":",
"if",
"'apikey'",
"in",
"opts",
"[",
"'proxy'",
"]",
":",
"log",
".",
"debug",
"(",
"\"Selected dev_key method for panos proxy module.\"",
")",
"DETAILS",
"[",
"'method'",
"]",
"=",
"'dev_key'",
"DETAILS",
"[",
"'apikey'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'apikey'",
")",
"else",
":",
"log",
".",
"debug",
"(",
"\"Selected dev_pass method for panos proxy module.\"",
")",
"DETAILS",
"[",
"'method'",
"]",
"=",
"'dev_pass'",
"DETAILS",
"[",
"'username'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'username'",
")",
"DETAILS",
"[",
"'password'",
"]",
"=",
"opts",
"[",
"'proxy'",
"]",
".",
"get",
"(",
"'password'",
")",
"# Ensure connectivity to the device",
"log",
".",
"debug",
"(",
"\"Attempting to connect to panos proxy host.\"",
")",
"query",
"=",
"{",
"'type'",
":",
"'op'",
",",
"'cmd'",
":",
"'<show><system><info></info></system></show>'",
"}",
"call",
"(",
"query",
")",
"log",
".",
"debug",
"(",
"\"Successfully connected to panos proxy host.\"",
")",
"DETAILS",
"[",
"'initialized'",
"]",
"=",
"True"
] | 43.215686 | 20.901961 |
def configure(self, argv=('',), **kwargs):
"""Configures TensorBoard behavior via flags.
This method will populate the "flags" property with an argparse.Namespace
representing flag values parsed from the provided argv list, overridden by
explicit flags from remaining keyword arguments.
Args:
argv: Can be set to CLI args equivalent to sys.argv; the first arg is
taken to be the name of the path being executed.
kwargs: Additional arguments will override what was parsed from
argv. They must be passed as Python data structures, e.g.
`foo=1` rather than `foo="1"`.
Returns:
Either argv[:1] if argv was non-empty, or [''] otherwise, as a mechanism
for absl.app.run() compatibility.
Raises:
ValueError: If flag values are invalid.
"""
parser = argparse_flags.ArgumentParser(
prog='tensorboard',
description=('TensorBoard is a suite of web applications for '
'inspecting and understanding your TensorFlow runs '
'and graphs. https://github.com/tensorflow/tensorboard '))
for loader in self.plugin_loaders:
loader.define_flags(parser)
arg0 = argv[0] if argv else ''
flags = parser.parse_args(argv[1:]) # Strip binary name from argv.
self.cache_key = manager.cache_key(
working_directory=os.getcwd(),
arguments=argv[1:],
configure_kwargs=kwargs,
)
if absl_flags and arg0:
# Only expose main module Abseil flags as TensorBoard native flags.
# This is the same logic Abseil's ArgumentParser uses for determining
# which Abseil flags to include in the short helpstring.
for flag in set(absl_flags.FLAGS.get_key_flags_for_module(arg0)):
if hasattr(flags, flag.name):
raise ValueError('Conflicting Abseil flag: %s' % flag.name)
setattr(flags, flag.name, flag.value)
for k, v in kwargs.items():
if not hasattr(flags, k):
raise ValueError('Unknown TensorBoard flag: %s' % k)
setattr(flags, k, v)
for loader in self.plugin_loaders:
loader.fix_flags(flags)
self.flags = flags
return [arg0] | [
"def",
"configure",
"(",
"self",
",",
"argv",
"=",
"(",
"''",
",",
")",
",",
"*",
"*",
"kwargs",
")",
":",
"parser",
"=",
"argparse_flags",
".",
"ArgumentParser",
"(",
"prog",
"=",
"'tensorboard'",
",",
"description",
"=",
"(",
"'TensorBoard is a suite of web applications for '",
"'inspecting and understanding your TensorFlow runs '",
"'and graphs. https://github.com/tensorflow/tensorboard '",
")",
")",
"for",
"loader",
"in",
"self",
".",
"plugin_loaders",
":",
"loader",
".",
"define_flags",
"(",
"parser",
")",
"arg0",
"=",
"argv",
"[",
"0",
"]",
"if",
"argv",
"else",
"''",
"flags",
"=",
"parser",
".",
"parse_args",
"(",
"argv",
"[",
"1",
":",
"]",
")",
"# Strip binary name from argv.",
"self",
".",
"cache_key",
"=",
"manager",
".",
"cache_key",
"(",
"working_directory",
"=",
"os",
".",
"getcwd",
"(",
")",
",",
"arguments",
"=",
"argv",
"[",
"1",
":",
"]",
",",
"configure_kwargs",
"=",
"kwargs",
",",
")",
"if",
"absl_flags",
"and",
"arg0",
":",
"# Only expose main module Abseil flags as TensorBoard native flags.",
"# This is the same logic Abseil's ArgumentParser uses for determining",
"# which Abseil flags to include in the short helpstring.",
"for",
"flag",
"in",
"set",
"(",
"absl_flags",
".",
"FLAGS",
".",
"get_key_flags_for_module",
"(",
"arg0",
")",
")",
":",
"if",
"hasattr",
"(",
"flags",
",",
"flag",
".",
"name",
")",
":",
"raise",
"ValueError",
"(",
"'Conflicting Abseil flag: %s'",
"%",
"flag",
".",
"name",
")",
"setattr",
"(",
"flags",
",",
"flag",
".",
"name",
",",
"flag",
".",
"value",
")",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"not",
"hasattr",
"(",
"flags",
",",
"k",
")",
":",
"raise",
"ValueError",
"(",
"'Unknown TensorBoard flag: %s'",
"%",
"k",
")",
"setattr",
"(",
"flags",
",",
"k",
",",
"v",
")",
"for",
"loader",
"in",
"self",
".",
"plugin_loaders",
":",
"loader",
".",
"fix_flags",
"(",
"flags",
")",
"self",
".",
"flags",
"=",
"flags",
"return",
"[",
"arg0",
"]"
] | 41.647059 | 19.372549 |
def account_weight(self, account):
"""
Returns the voting weight for **account**
:param account: Account to get voting weight for
:type account: str
:raises: :py:exc:`nano.rpc.RPCException`
>>> rpc.account_weight(
... account="xrb_3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000000"
... )
10000
"""
account = self._process_value(account, 'account')
payload = {"account": account}
resp = self.call('account_weight', payload)
return int(resp['weight']) | [
"def",
"account_weight",
"(",
"self",
",",
"account",
")",
":",
"account",
"=",
"self",
".",
"_process_value",
"(",
"account",
",",
"'account'",
")",
"payload",
"=",
"{",
"\"account\"",
":",
"account",
"}",
"resp",
"=",
"self",
".",
"call",
"(",
"'account_weight'",
",",
"payload",
")",
"return",
"int",
"(",
"resp",
"[",
"'weight'",
"]",
")"
] | 24.434783 | 22.695652 |
async def convert_local(path, to_type):
'''
Given an absolute path to a local file, convert to a given to_type
'''
# Now find path between types
typed_foreign_res = TypedLocalResource(path)
original_ts = typed_foreign_res.typestring
conversion_path = singletons.converter_graph.find_path(
original_ts, to_type)
# print('Conversion path: ', conversion_path)
# Loop through each step in graph path and convert
for is_first, is_last, path_step in first_last_iterator(conversion_path):
converter_class, from_ts, to_ts = path_step
converter = converter_class()
in_resource = TypedLocalResource(path, from_ts)
if is_first: # Ensure first resource is just the source one
in_resource = typed_foreign_res
out_resource = TypedLocalResource(path, to_ts)
if is_last:
out_resource = TypedPathedLocalResource(path, to_ts)
await converter.convert(in_resource, out_resource) | [
"async",
"def",
"convert_local",
"(",
"path",
",",
"to_type",
")",
":",
"# Now find path between types",
"typed_foreign_res",
"=",
"TypedLocalResource",
"(",
"path",
")",
"original_ts",
"=",
"typed_foreign_res",
".",
"typestring",
"conversion_path",
"=",
"singletons",
".",
"converter_graph",
".",
"find_path",
"(",
"original_ts",
",",
"to_type",
")",
"# print('Conversion path: ', conversion_path)",
"# Loop through each step in graph path and convert",
"for",
"is_first",
",",
"is_last",
",",
"path_step",
"in",
"first_last_iterator",
"(",
"conversion_path",
")",
":",
"converter_class",
",",
"from_ts",
",",
"to_ts",
"=",
"path_step",
"converter",
"=",
"converter_class",
"(",
")",
"in_resource",
"=",
"TypedLocalResource",
"(",
"path",
",",
"from_ts",
")",
"if",
"is_first",
":",
"# Ensure first resource is just the source one",
"in_resource",
"=",
"typed_foreign_res",
"out_resource",
"=",
"TypedLocalResource",
"(",
"path",
",",
"to_ts",
")",
"if",
"is_last",
":",
"out_resource",
"=",
"TypedPathedLocalResource",
"(",
"path",
",",
"to_ts",
")",
"await",
"converter",
".",
"convert",
"(",
"in_resource",
",",
"out_resource",
")"
] | 42.043478 | 18.478261 |
def palindromic_substrings_iter(s):
"""
A slightly more Pythonic approach with a recursive generator
"""
if not s:
yield []
return
for i in range(len(s), 0, -1):
sub = s[:i]
if sub == sub[::-1]:
for rest in palindromic_substrings_iter(s[i:]):
yield [sub] + rest | [
"def",
"palindromic_substrings_iter",
"(",
"s",
")",
":",
"if",
"not",
"s",
":",
"yield",
"[",
"]",
"return",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"s",
")",
",",
"0",
",",
"-",
"1",
")",
":",
"sub",
"=",
"s",
"[",
":",
"i",
"]",
"if",
"sub",
"==",
"sub",
"[",
":",
":",
"-",
"1",
"]",
":",
"for",
"rest",
"in",
"palindromic_substrings_iter",
"(",
"s",
"[",
"i",
":",
"]",
")",
":",
"yield",
"[",
"sub",
"]",
"+",
"rest"
] | 27.5 | 14.166667 |
def get_class_module(module_name: str, class_name: str) -> Optional[str]:
"""
Get a sub-module of the given module which has the given class.
This method wraps `utils.reflection.find_class_module method` with the following behavior:
- raise error when multiple sub-modules with different classes with the same name are found
- return None when no sub-module is found
- warn about non-searchable sub-modules
.. note::
This function logs!
:param module_name: module to be searched
:param class_name: searched class name
:return: sub-module with the searched class or None
"""
matched_modules, erroneous_modules = find_class_module(module_name, class_name)
for submodule, error in erroneous_modules:
logging.warning('Could not inspect sub-module `%s` due to `%s` '
'when searching for `%s` in sub-modules of `%s`.',
submodule, type(error).__name__, class_name, module_name)
if len(matched_modules) == 1:
return matched_modules[0]
if len(matched_modules) > 1:
# check if all the module attributes point to the same class
first_class = getattr(importlib.import_module(matched_modules[0]), class_name)
for matched_module in matched_modules:
another_class = getattr(importlib.import_module(matched_module), class_name)
if another_class is not first_class:
raise ValueError('Found more than one sub-module when searching for `{}` in sub-modules of `{}`. '
'Please specify the module explicitly. Found sub-modules: `{}`'
.format(class_name, module_name, matched_modules))
return matched_modules[0]
return None | [
"def",
"get_class_module",
"(",
"module_name",
":",
"str",
",",
"class_name",
":",
"str",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"matched_modules",
",",
"erroneous_modules",
"=",
"find_class_module",
"(",
"module_name",
",",
"class_name",
")",
"for",
"submodule",
",",
"error",
"in",
"erroneous_modules",
":",
"logging",
".",
"warning",
"(",
"'Could not inspect sub-module `%s` due to `%s` '",
"'when searching for `%s` in sub-modules of `%s`.'",
",",
"submodule",
",",
"type",
"(",
"error",
")",
".",
"__name__",
",",
"class_name",
",",
"module_name",
")",
"if",
"len",
"(",
"matched_modules",
")",
"==",
"1",
":",
"return",
"matched_modules",
"[",
"0",
"]",
"if",
"len",
"(",
"matched_modules",
")",
">",
"1",
":",
"# check if all the module attributes point to the same class",
"first_class",
"=",
"getattr",
"(",
"importlib",
".",
"import_module",
"(",
"matched_modules",
"[",
"0",
"]",
")",
",",
"class_name",
")",
"for",
"matched_module",
"in",
"matched_modules",
":",
"another_class",
"=",
"getattr",
"(",
"importlib",
".",
"import_module",
"(",
"matched_module",
")",
",",
"class_name",
")",
"if",
"another_class",
"is",
"not",
"first_class",
":",
"raise",
"ValueError",
"(",
"'Found more than one sub-module when searching for `{}` in sub-modules of `{}`. '",
"'Please specify the module explicitly. Found sub-modules: `{}`'",
".",
"format",
"(",
"class_name",
",",
"module_name",
",",
"matched_modules",
")",
")",
"return",
"matched_modules",
"[",
"0",
"]",
"return",
"None"
] | 47.135135 | 26.972973 |
def add_child(self, node):
"""! @brief Link a child node onto this object."""
node._parent = self
self._children.append(node) | [
"def",
"add_child",
"(",
"self",
",",
"node",
")",
":",
"node",
".",
"_parent",
"=",
"self",
"self",
".",
"_children",
".",
"append",
"(",
"node",
")"
] | 36.5 | 8 |
def move_to_output_dir(work_dir, output_dir, uuid=None, files=list()):
"""
Moves files from work_dir to output_dir
Input1: Working directory
Input2: Output directory
Input3: UUID to be preprended onto file name
Input4: list of file names to be moved from working dir to output dir
"""
for fname in files:
if uuid is None:
shutil.move(os.path.join(work_dir, fname), os.path.join(output_dir, fname))
else:
shutil.move(os.path.join(work_dir, fname), os.path.join(output_dir, '{}.{}'.format(uuid, fname))) | [
"def",
"move_to_output_dir",
"(",
"work_dir",
",",
"output_dir",
",",
"uuid",
"=",
"None",
",",
"files",
"=",
"list",
"(",
")",
")",
":",
"for",
"fname",
"in",
"files",
":",
"if",
"uuid",
"is",
"None",
":",
"shutil",
".",
"move",
"(",
"os",
".",
"path",
".",
"join",
"(",
"work_dir",
",",
"fname",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"fname",
")",
")",
"else",
":",
"shutil",
".",
"move",
"(",
"os",
".",
"path",
".",
"join",
"(",
"work_dir",
",",
"fname",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"'{}.{}'",
".",
"format",
"(",
"uuid",
",",
"fname",
")",
")",
")"
] | 40.071429 | 22.357143 |
def junos_cli(command, format=None, dev_timeout=None, dest=None, **kwargs):
'''
.. versionadded:: 2019.2.0
Execute a CLI command and return the output in the specified format.
command
The command to execute on the Junos CLI.
format: ``text``
Format in which to get the CLI output (either ``text`` or ``xml``).
dev_timeout: ``30``
The NETCONF RPC timeout (in seconds).
dest
Destination file where the RPC output is stored. Note that the file will
be stored on the Proxy Minion. To push the files to the Master, use
:mod:`cp.push <salt.modules.cp.push>`.
CLI Example:
.. code-block:: bash
salt '*' napalm.junos_cli 'show lldp neighbors'
'''
prep = _junos_prep_fun(napalm_device) # pylint: disable=undefined-variable
if not prep['result']:
return prep
return __salt__['junos.cli'](command,
format=format,
dev_timeout=dev_timeout,
dest=dest,
**kwargs) | [
"def",
"junos_cli",
"(",
"command",
",",
"format",
"=",
"None",
",",
"dev_timeout",
"=",
"None",
",",
"dest",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"prep",
"=",
"_junos_prep_fun",
"(",
"napalm_device",
")",
"# pylint: disable=undefined-variable",
"if",
"not",
"prep",
"[",
"'result'",
"]",
":",
"return",
"prep",
"return",
"__salt__",
"[",
"'junos.cli'",
"]",
"(",
"command",
",",
"format",
"=",
"format",
",",
"dev_timeout",
"=",
"dev_timeout",
",",
"dest",
"=",
"dest",
",",
"*",
"*",
"kwargs",
")"
] | 31.5 | 24.970588 |
def get_stat(path, filename):
''' get stat '''
return os.stat(os.path.join(path, filename)) | [
"def",
"get_stat",
"(",
"path",
",",
"filename",
")",
":",
"return",
"os",
".",
"stat",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"filename",
")",
")"
] | 31 | 13 |
def push(self, path, name, tag=None):
'''push an image to Singularity Registry
path: should correspond to an absolte image path (or derive it)
name: should be the complete uri that the user has requested to push.
tag: should correspond with an image tag. This is provided to mirror Docker
'''
path = os.path.abspath(path)
bot.debug("PUSH %s" % path)
if not os.path.exists(path):
bot.error('%s does not exist.' %path)
sys.exit(1)
# This returns a data structure with collection, container, based on uri
names = parse_image_name(remove_uri(name),tag=tag)
# use Singularity client, if exists, to inspect to extract metadata
metadata = self.get_metadata(path, names=names)
# If you want a spinner
bot.spinner.start()
# do your push request here. Generally you want to except a KeyboardInterrupt
# and give the user a status from the response
bot.spinner.stop() | [
"def",
"push",
"(",
"self",
",",
"path",
",",
"name",
",",
"tag",
"=",
"None",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
"bot",
".",
"debug",
"(",
"\"PUSH %s\"",
"%",
"path",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"bot",
".",
"error",
"(",
"'%s does not exist.'",
"%",
"path",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"# This returns a data structure with collection, container, based on uri",
"names",
"=",
"parse_image_name",
"(",
"remove_uri",
"(",
"name",
")",
",",
"tag",
"=",
"tag",
")",
"# use Singularity client, if exists, to inspect to extract metadata",
"metadata",
"=",
"self",
".",
"get_metadata",
"(",
"path",
",",
"names",
"=",
"names",
")",
"# If you want a spinner",
"bot",
".",
"spinner",
".",
"start",
"(",
")",
"# do your push request here. Generally you want to except a KeyboardInterrupt",
"# and give the user a status from the response",
"bot",
".",
"spinner",
".",
"stop",
"(",
")"
] | 37 | 23.08 |
def strobogrammatic_in_range(low, high):
"""
:type low: str
:type high: str
:rtype: int
"""
res = []
count = 0
low_len = len(low)
high_len = len(high)
for i in range(low_len, high_len + 1):
res.extend(helper2(i, i))
for perm in res:
if len(perm) == low_len and int(perm) < int(low):
continue
elif len(perm) == high_len and int(perm) > int(high):
continue
else:
count += 1
return count | [
"def",
"strobogrammatic_in_range",
"(",
"low",
",",
"high",
")",
":",
"res",
"=",
"[",
"]",
"count",
"=",
"0",
"low_len",
"=",
"len",
"(",
"low",
")",
"high_len",
"=",
"len",
"(",
"high",
")",
"for",
"i",
"in",
"range",
"(",
"low_len",
",",
"high_len",
"+",
"1",
")",
":",
"res",
".",
"extend",
"(",
"helper2",
"(",
"i",
",",
"i",
")",
")",
"for",
"perm",
"in",
"res",
":",
"if",
"len",
"(",
"perm",
")",
"==",
"low_len",
"and",
"int",
"(",
"perm",
")",
"<",
"int",
"(",
"low",
")",
":",
"continue",
"elif",
"len",
"(",
"perm",
")",
"==",
"high_len",
"and",
"int",
"(",
"perm",
")",
">",
"int",
"(",
"high",
")",
":",
"continue",
"else",
":",
"count",
"+=",
"1",
"return",
"count"
] | 24.05 | 16.65 |
def all_actions(self):
""" Генератор, возвращающий все события
"""
count = self.get_count_action()
page = 1
while True: # не цикл for, потому как не уверен, что всё норм будет с API
actions = self.get_action_list(page=page)
for action in actions:
yield action
page += 1
if len(actions) < options.ACTIONS_ON_PAGE or page > math.ceil(count / options.ACTIONS_ON_PAGE):
break | [
"def",
"all_actions",
"(",
"self",
")",
":",
"count",
"=",
"self",
".",
"get_count_action",
"(",
")",
"page",
"=",
"1",
"while",
"True",
":",
"# не цикл for, потому как не уверен, что всё норм будет с API",
"actions",
"=",
"self",
".",
"get_action_list",
"(",
"page",
"=",
"page",
")",
"for",
"action",
"in",
"actions",
":",
"yield",
"action",
"page",
"+=",
"1",
"if",
"len",
"(",
"actions",
")",
"<",
"options",
".",
"ACTIONS_ON_PAGE",
"or",
"page",
">",
"math",
".",
"ceil",
"(",
"count",
"/",
"options",
".",
"ACTIONS_ON_PAGE",
")",
":",
"break"
] | 40.083333 | 18.416667 |
def compute_from_text(self,text,beta=0.001):
"""
m.compute_from_text(,text,beta=0.001) -- Compute a matrix values from a text string of ambiguity codes.
Use Motif_from_text utility instead to build motifs on the fly.
"""
prevlett = {'B':'A', 'D':'C', 'V':'T', 'H':'G'}
countmat = []
text = re.sub('[\.\-]','N',text.upper())
for i in range(len(text)):
D = {'A': 0, 'C': 0, 'T':0, 'G':0}
letter = text[i]
if letter in ['B', 'D', 'V', 'H']: #B == no "A", etc...
_omit = prevlett[letter]
for L in ACGT:
if L != _omit: D[L] = 0.3333
elif one2two.has_key(letter): #Covers WSMYRK
for L in list(one2two[letter]):
D[L] = 0.5
elif letter == 'N':
for L in D.keys():
D[L] = self.background[L]
elif letter == '@':
for L in D.keys():
D[L] = self.background[L]-(0.0001)
D['A'] = D['A'] + 0.0004
else:
D[letter] = 1.0
countmat.append(D)
self.compute_from_counts(countmat,beta) | [
"def",
"compute_from_text",
"(",
"self",
",",
"text",
",",
"beta",
"=",
"0.001",
")",
":",
"prevlett",
"=",
"{",
"'B'",
":",
"'A'",
",",
"'D'",
":",
"'C'",
",",
"'V'",
":",
"'T'",
",",
"'H'",
":",
"'G'",
"}",
"countmat",
"=",
"[",
"]",
"text",
"=",
"re",
".",
"sub",
"(",
"'[\\.\\-]'",
",",
"'N'",
",",
"text",
".",
"upper",
"(",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"text",
")",
")",
":",
"D",
"=",
"{",
"'A'",
":",
"0",
",",
"'C'",
":",
"0",
",",
"'T'",
":",
"0",
",",
"'G'",
":",
"0",
"}",
"letter",
"=",
"text",
"[",
"i",
"]",
"if",
"letter",
"in",
"[",
"'B'",
",",
"'D'",
",",
"'V'",
",",
"'H'",
"]",
":",
"#B == no \"A\", etc...",
"_omit",
"=",
"prevlett",
"[",
"letter",
"]",
"for",
"L",
"in",
"ACGT",
":",
"if",
"L",
"!=",
"_omit",
":",
"D",
"[",
"L",
"]",
"=",
"0.3333",
"elif",
"one2two",
".",
"has_key",
"(",
"letter",
")",
":",
"#Covers WSMYRK",
"for",
"L",
"in",
"list",
"(",
"one2two",
"[",
"letter",
"]",
")",
":",
"D",
"[",
"L",
"]",
"=",
"0.5",
"elif",
"letter",
"==",
"'N'",
":",
"for",
"L",
"in",
"D",
".",
"keys",
"(",
")",
":",
"D",
"[",
"L",
"]",
"=",
"self",
".",
"background",
"[",
"L",
"]",
"elif",
"letter",
"==",
"'@'",
":",
"for",
"L",
"in",
"D",
".",
"keys",
"(",
")",
":",
"D",
"[",
"L",
"]",
"=",
"self",
".",
"background",
"[",
"L",
"]",
"-",
"(",
"0.0001",
")",
"D",
"[",
"'A'",
"]",
"=",
"D",
"[",
"'A'",
"]",
"+",
"0.0004",
"else",
":",
"D",
"[",
"letter",
"]",
"=",
"1.0",
"countmat",
".",
"append",
"(",
"D",
")",
"self",
".",
"compute_from_counts",
"(",
"countmat",
",",
"beta",
")"
] | 42.655172 | 13.551724 |
def send(self, from_, to, subject, text='', html='', cc=[], bcc=[],
headers={}, attachments=[]):
"""
Send an email.
"""
if isinstance(to, string_types):
raise TypeError('"to" parameter must be enumerable')
if text == '' and html == '':
raise ValueError('"text" and "html" must not both be empty')
return self._session.post('{}/send'.format(self._url), json={
'from': from_,
'to': to,
'cc': cc,
'bcc': bcc,
'subject': subject,
'headers': headers,
'text': text,
'html': html,
'attachments': list(self._process_attachments(attachments)),
}).json() | [
"def",
"send",
"(",
"self",
",",
"from_",
",",
"to",
",",
"subject",
",",
"text",
"=",
"''",
",",
"html",
"=",
"''",
",",
"cc",
"=",
"[",
"]",
",",
"bcc",
"=",
"[",
"]",
",",
"headers",
"=",
"{",
"}",
",",
"attachments",
"=",
"[",
"]",
")",
":",
"if",
"isinstance",
"(",
"to",
",",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"'\"to\" parameter must be enumerable'",
")",
"if",
"text",
"==",
"''",
"and",
"html",
"==",
"''",
":",
"raise",
"ValueError",
"(",
"'\"text\" and \"html\" must not both be empty'",
")",
"return",
"self",
".",
"_session",
".",
"post",
"(",
"'{}/send'",
".",
"format",
"(",
"self",
".",
"_url",
")",
",",
"json",
"=",
"{",
"'from'",
":",
"from_",
",",
"'to'",
":",
"to",
",",
"'cc'",
":",
"cc",
",",
"'bcc'",
":",
"bcc",
",",
"'subject'",
":",
"subject",
",",
"'headers'",
":",
"headers",
",",
"'text'",
":",
"text",
",",
"'html'",
":",
"html",
",",
"'attachments'",
":",
"list",
"(",
"self",
".",
"_process_attachments",
"(",
"attachments",
")",
")",
",",
"}",
")",
".",
"json",
"(",
")"
] | 36.3 | 15.3 |
def sep_inserter(iterable, sep):
"""
Insert '' between numbers in an iterable.
Parameters
----------
iterable
sep : str
The string character to be inserted between adjacent numeric objects.
Yields
------
The values of *iterable* in order, with *sep* inserted where adjacent
elements are numeric. If the first element in the input is numeric
then *sep* will be the first value yielded.
"""
try:
# Get the first element. A StopIteration indicates an empty iterable.
# Since we are controlling the types of the input, 'type' is used
# instead of 'isinstance' for the small speed advantage it offers.
types = (int, float, long)
first = next(iterable)
if type(first) in types:
yield sep
yield first
# Now, check if pair of elements are both numbers. If so, add ''.
second = next(iterable)
if type(first) in types and type(second) in types:
yield sep
yield second
# Now repeat in a loop.
for x in iterable:
first, second = second, x
if type(first) in types and type(second) in types:
yield sep
yield second
except StopIteration:
# Catch StopIteration per deprecation in PEP 479:
# "Change StopIteration handling inside generators"
return | [
"def",
"sep_inserter",
"(",
"iterable",
",",
"sep",
")",
":",
"try",
":",
"# Get the first element. A StopIteration indicates an empty iterable.",
"# Since we are controlling the types of the input, 'type' is used",
"# instead of 'isinstance' for the small speed advantage it offers.",
"types",
"=",
"(",
"int",
",",
"float",
",",
"long",
")",
"first",
"=",
"next",
"(",
"iterable",
")",
"if",
"type",
"(",
"first",
")",
"in",
"types",
":",
"yield",
"sep",
"yield",
"first",
"# Now, check if pair of elements are both numbers. If so, add ''.",
"second",
"=",
"next",
"(",
"iterable",
")",
"if",
"type",
"(",
"first",
")",
"in",
"types",
"and",
"type",
"(",
"second",
")",
"in",
"types",
":",
"yield",
"sep",
"yield",
"second",
"# Now repeat in a loop.",
"for",
"x",
"in",
"iterable",
":",
"first",
",",
"second",
"=",
"second",
",",
"x",
"if",
"type",
"(",
"first",
")",
"in",
"types",
"and",
"type",
"(",
"second",
")",
"in",
"types",
":",
"yield",
"sep",
"yield",
"second",
"except",
"StopIteration",
":",
"# Catch StopIteration per deprecation in PEP 479:",
"# \"Change StopIteration handling inside generators\"",
"return"
] | 31.674419 | 21.906977 |
def com_google_fonts_check_name_subfamilyname(ttFont,
style_with_spaces,
familyname_with_spaces):
""" Check name table: FONT_SUBFAMILY_NAME entries. """
from fontbakery.utils import name_entry_id
failed = False
for name in ttFont['name'].names:
if name.nameID == NameID.FONT_SUBFAMILY_NAME:
if name.platformID == PlatformID.MACINTOSH:
expected_value = style_with_spaces
elif name.platformID == PlatformID.WINDOWS:
if style_with_spaces in ["Bold", "Bold Italic"]:
expected_value = style_with_spaces
else:
if "Italic" in style_with_spaces:
expected_value = "Italic"
else:
expected_value = "Regular"
else:
yield FAIL, Message("invalid-entry",
("Font should not have a "
"{} entry!").format(name_entry_id(name)))
failed = True
continue
string = name.string.decode(name.getEncoding()).strip()
if string != expected_value:
failed = True
yield FAIL, Message("bad-familyname",
("Entry {} on the 'name' table: "
"Expected '{}' "
"but got '{}'.").format(name_entry_id(name),
expected_value,
string))
if not failed:
yield PASS, "FONT_SUBFAMILY_NAME entries are all good." | [
"def",
"com_google_fonts_check_name_subfamilyname",
"(",
"ttFont",
",",
"style_with_spaces",
",",
"familyname_with_spaces",
")",
":",
"from",
"fontbakery",
".",
"utils",
"import",
"name_entry_id",
"failed",
"=",
"False",
"for",
"name",
"in",
"ttFont",
"[",
"'name'",
"]",
".",
"names",
":",
"if",
"name",
".",
"nameID",
"==",
"NameID",
".",
"FONT_SUBFAMILY_NAME",
":",
"if",
"name",
".",
"platformID",
"==",
"PlatformID",
".",
"MACINTOSH",
":",
"expected_value",
"=",
"style_with_spaces",
"elif",
"name",
".",
"platformID",
"==",
"PlatformID",
".",
"WINDOWS",
":",
"if",
"style_with_spaces",
"in",
"[",
"\"Bold\"",
",",
"\"Bold Italic\"",
"]",
":",
"expected_value",
"=",
"style_with_spaces",
"else",
":",
"if",
"\"Italic\"",
"in",
"style_with_spaces",
":",
"expected_value",
"=",
"\"Italic\"",
"else",
":",
"expected_value",
"=",
"\"Regular\"",
"else",
":",
"yield",
"FAIL",
",",
"Message",
"(",
"\"invalid-entry\"",
",",
"(",
"\"Font should not have a \"",
"\"{} entry!\"",
")",
".",
"format",
"(",
"name_entry_id",
"(",
"name",
")",
")",
")",
"failed",
"=",
"True",
"continue",
"string",
"=",
"name",
".",
"string",
".",
"decode",
"(",
"name",
".",
"getEncoding",
"(",
")",
")",
".",
"strip",
"(",
")",
"if",
"string",
"!=",
"expected_value",
":",
"failed",
"=",
"True",
"yield",
"FAIL",
",",
"Message",
"(",
"\"bad-familyname\"",
",",
"(",
"\"Entry {} on the 'name' table: \"",
"\"Expected '{}' \"",
"\"but got '{}'.\"",
")",
".",
"format",
"(",
"name_entry_id",
"(",
"name",
")",
",",
"expected_value",
",",
"string",
")",
")",
"if",
"not",
"failed",
":",
"yield",
"PASS",
",",
"\"FONT_SUBFAMILY_NAME entries are all good.\""
] | 39.307692 | 17.717949 |
def _maybe_decompress_body(self):
"""Attempt to decompress the message body passed in using the named
compression module, if specified.
:rtype: bytes
"""
if self.content_encoding:
if self.content_encoding in self._CODEC_MAP.keys():
module_name = self._CODEC_MAP[self.content_encoding]
self.logger.debug('Decompressing with %s', module_name)
module = self._maybe_import(module_name)
return module.decompress(self._message.body)
self.logger.debug('Unsupported content-encoding: %s',
self.content_encoding)
return self._message.body | [
"def",
"_maybe_decompress_body",
"(",
"self",
")",
":",
"if",
"self",
".",
"content_encoding",
":",
"if",
"self",
".",
"content_encoding",
"in",
"self",
".",
"_CODEC_MAP",
".",
"keys",
"(",
")",
":",
"module_name",
"=",
"self",
".",
"_CODEC_MAP",
"[",
"self",
".",
"content_encoding",
"]",
"self",
".",
"logger",
".",
"debug",
"(",
"'Decompressing with %s'",
",",
"module_name",
")",
"module",
"=",
"self",
".",
"_maybe_import",
"(",
"module_name",
")",
"return",
"module",
".",
"decompress",
"(",
"self",
".",
"_message",
".",
"body",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Unsupported content-encoding: %s'",
",",
"self",
".",
"content_encoding",
")",
"return",
"self",
".",
"_message",
".",
"body"
] | 42.625 | 17.25 |
def findAllExceptions(pathToCheck):
"""
Find patterns of exceptions in a file or folder.
@param patternFinder: a visitor for pattern checking and save results
@return: patterns of special functions and classes
"""
finder = PatternFinder()
if os.path.isfile(pathToCheck):
with open(pathToCheck) as f:
findPatternsInFile(f.read(), finder)
else:
for path, dirs, files in os.walk(pathToCheck):
for file in files:
_, extname = os.path.splitext(file)
if extname == ".py":
pathFile = os.path.join(path, file)
with open(pathFile) as f:
findPatternsInFile(f.read(), finder)
return finder.patternsFunc, finder.patternsClass | [
"def",
"findAllExceptions",
"(",
"pathToCheck",
")",
":",
"finder",
"=",
"PatternFinder",
"(",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"pathToCheck",
")",
":",
"with",
"open",
"(",
"pathToCheck",
")",
"as",
"f",
":",
"findPatternsInFile",
"(",
"f",
".",
"read",
"(",
")",
",",
"finder",
")",
"else",
":",
"for",
"path",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"pathToCheck",
")",
":",
"for",
"file",
"in",
"files",
":",
"_",
",",
"extname",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"file",
")",
"if",
"extname",
"==",
"\".py\"",
":",
"pathFile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"file",
")",
"with",
"open",
"(",
"pathFile",
")",
"as",
"f",
":",
"findPatternsInFile",
"(",
"f",
".",
"read",
"(",
")",
",",
"finder",
")",
"return",
"finder",
".",
"patternsFunc",
",",
"finder",
".",
"patternsClass"
] | 38.35 | 12.75 |
def addresses(self):
"""
Return 3-tuple with (address, network, nicid)
:return: address related information of interface as 3-tuple list
:rtype: list
"""
addresses = []
for i in self.all_interfaces:
if isinstance(i, VlanInterface):
for v in i.interfaces:
addresses.append((v.address, v.network_value, v.nicid))
else:
addresses.append((i.address, i.network_value, i.nicid))
return addresses | [
"def",
"addresses",
"(",
"self",
")",
":",
"addresses",
"=",
"[",
"]",
"for",
"i",
"in",
"self",
".",
"all_interfaces",
":",
"if",
"isinstance",
"(",
"i",
",",
"VlanInterface",
")",
":",
"for",
"v",
"in",
"i",
".",
"interfaces",
":",
"addresses",
".",
"append",
"(",
"(",
"v",
".",
"address",
",",
"v",
".",
"network_value",
",",
"v",
".",
"nicid",
")",
")",
"else",
":",
"addresses",
".",
"append",
"(",
"(",
"i",
".",
"address",
",",
"i",
".",
"network_value",
",",
"i",
".",
"nicid",
")",
")",
"return",
"addresses"
] | 34.4 | 17.2 |
def get(self, block_number: BlockNumber) -> str:
"""Given a block number returns the hex representation of the blockhash"""
if block_number in self.mapping:
return self.mapping[block_number]
block_hash = self.web3.eth.getBlock(block_number)['hash']
block_hash = block_hash.hex()
self.mapping[block_number] = block_hash
return block_hash | [
"def",
"get",
"(",
"self",
",",
"block_number",
":",
"BlockNumber",
")",
"->",
"str",
":",
"if",
"block_number",
"in",
"self",
".",
"mapping",
":",
"return",
"self",
".",
"mapping",
"[",
"block_number",
"]",
"block_hash",
"=",
"self",
".",
"web3",
".",
"eth",
".",
"getBlock",
"(",
"block_number",
")",
"[",
"'hash'",
"]",
"block_hash",
"=",
"block_hash",
".",
"hex",
"(",
")",
"self",
".",
"mapping",
"[",
"block_number",
"]",
"=",
"block_hash",
"return",
"block_hash"
] | 43.222222 | 11.444444 |
def as_data_frame(self, use_pandas=True, header=True):
"""
Obtain the dataset as a python-local object.
:param bool use_pandas: If True (default) then return the H2OFrame as a pandas DataFrame (requires that the
``pandas`` library was installed). If False, then return the contents of the H2OFrame as plain nested
list, in a row-wise order.
:param bool header: If True (default), then column names will be appended as the first row in list
:returns: A python object (a list of lists of strings, each list is a row, if use_pandas=False, otherwise
a pandas DataFrame) containing this H2OFrame instance's data.
"""
if can_use_pandas() and use_pandas:
import pandas
return pandas.read_csv(StringIO(self.get_frame_data()), low_memory=False, skip_blank_lines=False)
from h2o.utils.csv.readers import reader
frame = [row for row in reader(StringIO(self.get_frame_data()))]
if not header:
frame.pop(0)
return frame | [
"def",
"as_data_frame",
"(",
"self",
",",
"use_pandas",
"=",
"True",
",",
"header",
"=",
"True",
")",
":",
"if",
"can_use_pandas",
"(",
")",
"and",
"use_pandas",
":",
"import",
"pandas",
"return",
"pandas",
".",
"read_csv",
"(",
"StringIO",
"(",
"self",
".",
"get_frame_data",
"(",
")",
")",
",",
"low_memory",
"=",
"False",
",",
"skip_blank_lines",
"=",
"False",
")",
"from",
"h2o",
".",
"utils",
".",
"csv",
".",
"readers",
"import",
"reader",
"frame",
"=",
"[",
"row",
"for",
"row",
"in",
"reader",
"(",
"StringIO",
"(",
"self",
".",
"get_frame_data",
"(",
")",
")",
")",
"]",
"if",
"not",
"header",
":",
"frame",
".",
"pop",
"(",
"0",
")",
"return",
"frame"
] | 52.5 | 30.45 |
def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
"""
Returns the mean and standard deviations
"""
# Return Distance Tables
imls = self._return_tables(rctx.mag, imt, "IMLs")
# Get distance vector for the given magnitude
idx = numpy.searchsorted(self.m_w, rctx.mag)
dists = self.distances[:, 0, idx - 1]
# Get mean and standard deviations
mean = self._get_mean(imls, dctx, dists)
stddevs = self._get_stddevs(dists, rctx.mag, dctx, imt, stddev_types)
if self.amplification:
# Apply amplification
mean_amp, sigma_amp = self.amplification.get_amplification_factors(
imt,
sctx,
rctx,
getattr(dctx, self.distance_type),
stddev_types)
mean = numpy.log(mean) + numpy.log(mean_amp)
for iloc in range(len(stddev_types)):
stddevs[iloc] *= sigma_amp[iloc]
return mean, stddevs
else:
return numpy.log(mean), stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
":",
"# Return Distance Tables",
"imls",
"=",
"self",
".",
"_return_tables",
"(",
"rctx",
".",
"mag",
",",
"imt",
",",
"\"IMLs\"",
")",
"# Get distance vector for the given magnitude",
"idx",
"=",
"numpy",
".",
"searchsorted",
"(",
"self",
".",
"m_w",
",",
"rctx",
".",
"mag",
")",
"dists",
"=",
"self",
".",
"distances",
"[",
":",
",",
"0",
",",
"idx",
"-",
"1",
"]",
"# Get mean and standard deviations",
"mean",
"=",
"self",
".",
"_get_mean",
"(",
"imls",
",",
"dctx",
",",
"dists",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"dists",
",",
"rctx",
".",
"mag",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
"if",
"self",
".",
"amplification",
":",
"# Apply amplification",
"mean_amp",
",",
"sigma_amp",
"=",
"self",
".",
"amplification",
".",
"get_amplification_factors",
"(",
"imt",
",",
"sctx",
",",
"rctx",
",",
"getattr",
"(",
"dctx",
",",
"self",
".",
"distance_type",
")",
",",
"stddev_types",
")",
"mean",
"=",
"numpy",
".",
"log",
"(",
"mean",
")",
"+",
"numpy",
".",
"log",
"(",
"mean_amp",
")",
"for",
"iloc",
"in",
"range",
"(",
"len",
"(",
"stddev_types",
")",
")",
":",
"stddevs",
"[",
"iloc",
"]",
"*=",
"sigma_amp",
"[",
"iloc",
"]",
"return",
"mean",
",",
"stddevs",
"else",
":",
"return",
"numpy",
".",
"log",
"(",
"mean",
")",
",",
"stddevs"
] | 41.076923 | 13.230769 |
def load(self, patterns, dirs, ignore=None):
"""
Load objects from the filesystem into the ``paths`` dictionary.
"""
for path in self.find_files(patterns=patterns, dirs=dirs, ignore=ignore):
data = self.read_file(path=path)
if data:
self.paths[path] = data | [
"def",
"load",
"(",
"self",
",",
"patterns",
",",
"dirs",
",",
"ignore",
"=",
"None",
")",
":",
"for",
"path",
"in",
"self",
".",
"find_files",
"(",
"patterns",
"=",
"patterns",
",",
"dirs",
"=",
"dirs",
",",
"ignore",
"=",
"ignore",
")",
":",
"data",
"=",
"self",
".",
"read_file",
"(",
"path",
"=",
"path",
")",
"if",
"data",
":",
"self",
".",
"paths",
"[",
"path",
"]",
"=",
"data"
] | 35.666667 | 15.666667 |
def _update_classifier(self, data, labels, w, classes):
"""Update the classifier parameters theta and bias
Parameters
----------
data : list of 2D arrays, element i has shape=[voxels_i, samples_i]
Each element in the list contains the fMRI data of one subject for
the classification task.
labels : list of arrays of int, element i has shape=[samples_i]
Each element in the list contains the labels for the data samples
in data_sup.
w : list of 2D array, element i has shape=[voxels_i, features]
The orthogonal transforms (mappings) :math:`W_i` for each subject.
classes : int
The number of classes in the classifier.
Returns
-------
theta : array, shape=[features, classes]
The MLR parameter for the class planes.
bias : array shape=[classes,]
The MLR parameter for class biases.
"""
# Stack the data and labels for training the classifier
data_stacked, labels_stacked, weights = \
SSSRM._stack_list(data, labels, w)
features = w[0].shape[1]
total_samples = weights.size
data_th = S.shared(data_stacked.astype(theano.config.floatX))
val_ = S.shared(labels_stacked)
total_samples_S = S.shared(total_samples)
theta_th = T.matrix(name='theta', dtype=theano.config.floatX)
bias_th = T.col(name='bias', dtype=theano.config.floatX)
constf2 = S.shared(self.alpha / self.gamma, allow_downcast=True)
weights_th = S.shared(weights)
log_p_y_given_x = \
T.log(T.nnet.softmax((theta_th.T.dot(data_th.T)).T + bias_th.T))
f = -constf2 * T.sum((log_p_y_given_x[T.arange(total_samples_S), val_])
/ weights_th) + 0.5 * T.sum(theta_th ** 2)
manifold = Product((Euclidean(features, classes),
Euclidean(classes, 1)))
problem = Problem(manifold=manifold, cost=f, arg=[theta_th, bias_th],
verbosity=0)
solver = ConjugateGradient(mingradnorm=1e-6)
solution = solver.solve(problem)
theta = solution[0]
bias = solution[1]
del constf2
del theta_th
del bias_th
del data_th
del val_
del solver
del solution
return theta, bias | [
"def",
"_update_classifier",
"(",
"self",
",",
"data",
",",
"labels",
",",
"w",
",",
"classes",
")",
":",
"# Stack the data and labels for training the classifier",
"data_stacked",
",",
"labels_stacked",
",",
"weights",
"=",
"SSSRM",
".",
"_stack_list",
"(",
"data",
",",
"labels",
",",
"w",
")",
"features",
"=",
"w",
"[",
"0",
"]",
".",
"shape",
"[",
"1",
"]",
"total_samples",
"=",
"weights",
".",
"size",
"data_th",
"=",
"S",
".",
"shared",
"(",
"data_stacked",
".",
"astype",
"(",
"theano",
".",
"config",
".",
"floatX",
")",
")",
"val_",
"=",
"S",
".",
"shared",
"(",
"labels_stacked",
")",
"total_samples_S",
"=",
"S",
".",
"shared",
"(",
"total_samples",
")",
"theta_th",
"=",
"T",
".",
"matrix",
"(",
"name",
"=",
"'theta'",
",",
"dtype",
"=",
"theano",
".",
"config",
".",
"floatX",
")",
"bias_th",
"=",
"T",
".",
"col",
"(",
"name",
"=",
"'bias'",
",",
"dtype",
"=",
"theano",
".",
"config",
".",
"floatX",
")",
"constf2",
"=",
"S",
".",
"shared",
"(",
"self",
".",
"alpha",
"/",
"self",
".",
"gamma",
",",
"allow_downcast",
"=",
"True",
")",
"weights_th",
"=",
"S",
".",
"shared",
"(",
"weights",
")",
"log_p_y_given_x",
"=",
"T",
".",
"log",
"(",
"T",
".",
"nnet",
".",
"softmax",
"(",
"(",
"theta_th",
".",
"T",
".",
"dot",
"(",
"data_th",
".",
"T",
")",
")",
".",
"T",
"+",
"bias_th",
".",
"T",
")",
")",
"f",
"=",
"-",
"constf2",
"*",
"T",
".",
"sum",
"(",
"(",
"log_p_y_given_x",
"[",
"T",
".",
"arange",
"(",
"total_samples_S",
")",
",",
"val_",
"]",
")",
"/",
"weights_th",
")",
"+",
"0.5",
"*",
"T",
".",
"sum",
"(",
"theta_th",
"**",
"2",
")",
"manifold",
"=",
"Product",
"(",
"(",
"Euclidean",
"(",
"features",
",",
"classes",
")",
",",
"Euclidean",
"(",
"classes",
",",
"1",
")",
")",
")",
"problem",
"=",
"Problem",
"(",
"manifold",
"=",
"manifold",
",",
"cost",
"=",
"f",
",",
"arg",
"=",
"[",
"theta_th",
",",
"bias_th",
"]",
",",
"verbosity",
"=",
"0",
")",
"solver",
"=",
"ConjugateGradient",
"(",
"mingradnorm",
"=",
"1e-6",
")",
"solution",
"=",
"solver",
".",
"solve",
"(",
"problem",
")",
"theta",
"=",
"solution",
"[",
"0",
"]",
"bias",
"=",
"solution",
"[",
"1",
"]",
"del",
"constf2",
"del",
"theta_th",
"del",
"bias_th",
"del",
"data_th",
"del",
"val_",
"del",
"solver",
"del",
"solution",
"return",
"theta",
",",
"bias"
] | 34.188406 | 23.217391 |
def set_verbosity(self, verbose=False, print_func=None):
"""Switch on/off verbose mode
Parameters
----------
verbose : bool
switch on/off verbose mode
print_func : function
A function that computes statistics of initialized arrays.
Takes an `NDArray` and returns an `str`. Defaults to mean
absolute value str((abs(x)/size(x)).asscalar()).
"""
self._verbose = verbose
if print_func is None:
def asum_stat(x):
"""returns |x|/size(x), async execution."""
return str((ndarray.norm(x)/sqrt(x.size)).asscalar())
print_func = asum_stat
self._print_func = print_func
return self | [
"def",
"set_verbosity",
"(",
"self",
",",
"verbose",
"=",
"False",
",",
"print_func",
"=",
"None",
")",
":",
"self",
".",
"_verbose",
"=",
"verbose",
"if",
"print_func",
"is",
"None",
":",
"def",
"asum_stat",
"(",
"x",
")",
":",
"\"\"\"returns |x|/size(x), async execution.\"\"\"",
"return",
"str",
"(",
"(",
"ndarray",
".",
"norm",
"(",
"x",
")",
"/",
"sqrt",
"(",
"x",
".",
"size",
")",
")",
".",
"asscalar",
"(",
")",
")",
"print_func",
"=",
"asum_stat",
"self",
".",
"_print_func",
"=",
"print_func",
"return",
"self"
] | 36.8 | 14.95 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.