hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1743d8baaa44407fe5ebdb2b00eba460ef1a2ae9 | 3,783 | py | Python | long_term_operation/output_check.py | nareshram256/EnergyManagementSystem | 2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa | [
"MIT"
] | 9 | 2020-04-24T14:34:16.000Z | 2022-01-25T07:16:03.000Z | long_term_operation/output_check.py | casemsee/EnergyManagementSystem | 2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa | [
"MIT"
] | null | null | null | long_term_operation/output_check.py | casemsee/EnergyManagementSystem | 2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa | [
"MIT"
] | 7 | 2019-09-19T13:26:02.000Z | 2021-11-27T09:53:54.000Z | # Output check procedure for optimal power flow
# The following rules are used to test the feasiblity of output
# 1) Active power balancing of on AC bus
# 2) Reactive power balancing of on DC bus
from configuration.configuration_time_line import default_look_ahead_time_step
from utils import Logger
logger = Logger("Long_term_dispatch_output_check")
from configuration.configuration_global import default_eps
class OutputCheck():
def output_local_check(*args):
model = args[0] # local ems models
T = default_look_ahead_time_step[
"Look_ahead_time_uc_time_step"] # The look ahead time step of optimal power flow
if model["success"] is True:
for i in range(T):
if model["UG"]["COMMAND_PG"][i] + model["DG"]["COMMAND_PG"][i] - model["BIC"]["COMMAND_AC2DC"][i] + \
model["BIC"][
"COMMAND_DC2AC"][i] * model["BIC"]["EFF_DC2AC"] - model["Load_ac"]["PD"][i] - \
model["Load_nac"]["PD"][i] >= \
default_eps["POWER_BALANCE"] or model["UG"]["COMMAND_PG"][i] + model["DG"]["COMMAND_PG"][i] - \
model["BIC"]["COMMAND_AC2DC"][i] + model["BIC"][
"COMMAND_DC2AC"][i] * model["BIC"]["EFF_DC2AC"] - model["Load_ac"]["PD"][i] - \
model["Load_nac"]["PD"][
i] <= \
-default_eps["POWER_BALANCE"]:
logger.error("The obtained solution can not meet AC bus power requirement!")
logger.info(
model["UG"]["COMMAND_PG"][i] + model["DG"]["COMMAND_PG"][i] - model["BIC"]["COMMAND_AC2DC"][i] +
model["BIC"][
"COMMAND_DC2AC"][i] * model["BIC"]["EFF_DC2AC"] - model["Load_ac"]["PD"][i] -
model["Load_nac"]["PD"][i])
if model["ESS"]["COMMAND_PG"][i] + model["BIC"]["COMMAND_AC2DC"][i] * model["BIC"]["EFF_DC2AC"] - \
model["BIC"]["COMMAND_DC2AC"][i] - model["Load_dc"]["PD"][i] - model["Load_ndc"]["PD"][i] + \
model["PV"][
"PG"][i] + \
model["WP"]["PG"][i] - model["PMG"][i] >= default_eps["POWER_BALANCE"] or \
model["ESS"]["COMMAND_PG"][i] + \
model["BIC"]["COMMAND_AC2DC"][i] * model["BIC"]["EFF_DC2AC"] - \
model["BIC"]["COMMAND_DC2AC"][i] - model["Load_dc"]["PD"][i] - model["Load_ndc"]["PD"][i] + \
model["PV"]["PG"][i] + model["WP"]["PG"][i] - model["PMG"][i] <= -default_eps["POWER_BALANCE"]:
logger.error("The obtained solution can not meet DC bus power requirement!")
logger.info(
model["ESS"]["COMMAND_PG"][i] + model["BIC"]["COMMAND_AC2DC"][i] * model["BIC"]["EFF_DC2AC"] -
model["BIC"]["COMMAND_DC2AC"][i] - model["Load_dc"]["PD"][i] - model["Load_ndc"]["PD"][i] +
model["PV"]["PG"][i] + \
model["WP"]["PG"][i] - model["PMG"][i])
if model["BIC"]["COMMAND_AC2DC"][i] * model["BIC"]["COMMAND_DC2AC"][i] is not 0:
logger.error("There exits bi-directional power flow on BIC!")
else:
logger.error("The obtained solution results in load shedding or renewable energy resource shedding!")
for i in range(T):
logger.info(
model["UG"]["COMMAND_PG"][i] + model["DG"]["COMMAND_PG"][i] - model["BIC"]["COMMAND_AC2DC"][i] +
model["BIC"]["COMMAND_DC2AC"][i] * model["BIC"]["EFF_DC2AC"] - model["Load_ac"]["PD"][i] -
model["Load_nac"]["PD"][i] + model["Load_ac"]["COMMAND_SHED"][i] +
model["Load_nac"]["COMMAND_SHED"][i])
logger.info(
model["ESS"]["COMMAND_PG"][i] + model["BIC"]["COMMAND_AC2DC"][i] * model["BIC"]["EFF_DC2AC"] - \
model["BIC"]["COMMAND_DC2AC"][i] - model["Load_dc"]["PD"][i] - model["Load_ndc"]["PD"][i] +
model["PV"][
"PG"][i] + \
model["WP"]["PG"][i] - model["PMG"][i] - model["PV"]["COMMAND_CURT"][i] -
model["WP"]["COMMAND_CURT"][i] +
model["Load_dc"]["COMMAND_SHED"][i] + model["Load_ndc"]["COMMAND_SHED"][i])
logger.info(model["BIC"]["COMMAND_AC2DC"][i] * model["BIC"]["COMMAND_DC2AC"][i])
return model
| 50.44 | 105 | 0.602961 |
d0615e0efa9de03ccf777c9dae01830988fa3d31 | 5,217 | py | Python | sconscontrib/SCons/Tool/gob2/__init__.py | kprussing/scons-contrib | cc5ae3b212bd7c5fe40bdc79460cfe7e6c6bcff1 | [
"MIT"
] | null | null | null | sconscontrib/SCons/Tool/gob2/__init__.py | kprussing/scons-contrib | cc5ae3b212bd7c5fe40bdc79460cfe7e6c6bcff1 | [
"MIT"
] | null | null | null | sconscontrib/SCons/Tool/gob2/__init__.py | kprussing/scons-contrib | cc5ae3b212bd7c5fe40bdc79460cfe7e6c6bcff1 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""SCons.Tool.gob2
Tool-specific initialization for the gob2 GObject builder.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
import os
import SCons.Action
import SCons.Builder
import SCons.Util
class ToolGob2Warning(SCons.Warnings.Warning):
pass
class Gob2NotFound(ToolGob2Warning):
pass
SCons.Warnings.enableWarningClass(ToolGob2Warning)
def _detect(env):
""" Try to detect the gob2 builder """
try:
return env["GOB2"]
except KeyError:
pass
gob2 = env.WhereIs("gob2") or env.WhereIs("gob")
if gob2:
return gob2
raise SCons.Errors.StopError(Gob2NotFound, "Could not detect gob2 builder")
return None
#
# Emitters
#
def __gob2_emitter(target, source, env):
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0]))
target.append(sourceBase + env.subst("$GOB2_HSUFFIX"))
target.append(sourceBase + env.subst("$GOB2_PRIVATESUFFIX"))
return target, source
#
# Builders
#
__gob2_cbuilder = SCons.Builder.Builder(
action=SCons.Action.Action("$GOB2_COM", "$GOB2_COMSTR"),
suffix="$GOB2_CSUFFIX",
src_suffix="$GOB2_SUFFIX",
emitter=__gob2_emitter,
)
__gob2_cppbuilder = SCons.Builder.Builder(
action=SCons.Action.Action("$GOB2_CXXCOM", "$GOB2_CXXCOMSTR"),
suffix="$GOB2_CXXSUFFIX",
src_suffix="$GOB2_SUFFIX",
emitter=__gob2_emitter,
)
def Gob2(env, target, source=None, *args, **kw):
"""
A pseudo-Builder wrapper for the gob2 executable, creating C output.
"""
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target[:]
target = []
for s in source:
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(s))
target.append(sourceBase)
if not SCons.Util.is_List(source):
source = [source]
result = []
for t, s in zip(target, source):
# Set output directory
env["GOB2_OUTDIR"] = "."
head, tail = os.path.split(SCons.Util.to_String(s))
if head:
env["GOB2_OUTDIR"] = head
# Call builder
result.extend(__gob2_cbuilder.__call__(env, t, s, **kw))
return result
def Gob2Cpp(env, target, source=None, *args, **kw):
"""
A pseudo-Builder wrapper for the gob2 executable, creating CPP output.
"""
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target[:]
target = []
for s in source:
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(s))
target.append(sourceBase)
if not SCons.Util.is_List(source):
source = [source]
result = []
for t, s in zip(target, source):
# Set output directory
env["GOB2_OUTDIR"] = "."
head, tail = os.path.split(SCons.Util.to_String(s))
if head:
env["GOB2_OUTDIR"] = head
# Call builder
result.extend(__gob2_cppbuilder.__call__(env, t, s, **kw))
return result
def generate(env):
"""Add Builders and construction variables to the Environment."""
env["GOB2"] = _detect(env)
env.SetDefault(
# Additional command-line flags
GOB2_FLAGS=SCons.Util.CLVar(""),
# Suffixes/prefixes
GOB2_SUFFIX=".gob",
GOB2_CSUFFIX=".c",
GOB2_CXXSUFFIX=".cc",
GOB2_HSUFFIX=".h",
GOB2_PRIVATESUFFIX="-private.h",
# GOB2 commands
GOB2_COM="$GOB2 $GOB2_FLAGS -o $GOB2_OUTDIR $SOURCES",
GOB2_COMSTR="",
GOB2_CXXCOM="$GOB2 $GOB2_FLAGS --for-cpp -o $GOB2_OUTDIR $SOURCES",
GOB2_CXXCOMSTR="",
)
try:
env.AddMethod(Gob2, "Gob2")
env.AddMethod(Gob2Cpp, "Gob2Cpp")
except AttributeError:
# Looks like we use a pre-0.98 version of SCons...
from SCons.Script.SConscript import SConsEnvironment
SConsEnvironment.Gob2 = Gob2
SConsEnvironment.Gob2Cpp = Gob2Cpp
def exists(env):
return _detect(env)
| 28.048387 | 79 | 0.665133 |
231c2e9a3709bc0712136f20013663c4962d91cf | 4,512 | py | Python | 25d_esa_field.py | spatialaudio/improved_driving_functions_for_rectangular_loudspeaker_arrays | 91885b4ea86a714c27a315bcc33519427830b9f3 | [
"MIT"
] | 2 | 2016-04-27T14:12:36.000Z | 2017-10-19T14:55:08.000Z | 25d_esa_field.py | spatialaudio/improved_driving_functions_for_rectangular_loudspeaker_arrays | 91885b4ea86a714c27a315bcc33519427830b9f3 | [
"MIT"
] | null | null | null | 25d_esa_field.py | spatialaudio/improved_driving_functions_for_rectangular_loudspeaker_arrays | 91885b4ea86a714c27a315bcc33519427830b9f3 | [
"MIT"
] | null | null | null | """ Generates Figure 4a of the paper
Sascha Spors, Frank Schultz, and Till Rettberg. Improved Driving Functions
for Rectangular Loudspeaker Arrays Driven by Sound Field Synthesis. In
German Annual Conference on Acoustics (DAGA), March 2016.
2.5D synthesis of a point source with the equivalent scattering approach
using a edge-shaped secondary source distribution
(c) Sascha Spors 2016, MIT Licence
"""
import numpy as np
import matplotlib.pyplot as plt
import sfs
# simulation switches
save_figures = False
wfs = False # WFS or ESA
# simulation parameters
dx = 0.003 # secondary source distance
N = 10000 # number of secondary sources for one array
f = 500 # frequency
Nc = 350 # max circular harmonics
omega = 2 * np.pi * f # angular frequency
src_angles = [180-45]
R = 4
xref = [2, -2, 0] # reference point
grid = sfs.util.xyz_grid([0, 4], [-4, 0], 0, spacing=0.02)
def compute_sound_field(x0, n0, a0, omega, angle):
xs = xref + R * np.asarray(sfs.util.direction_vector(np.radians(angle), np.radians(90)))
if wfs:
d = sfs.mono.drivingfunction.wfs_25d_point(omega, x0, n0, xs, xref=xref)
a = sfs.mono.drivingfunction.source_selection_point(n0, x0, xs)
else:
d = sfs.mono.drivingfunction.esa_edge_25d_point(omega, x0, xs, xref=xref, Nc=Nc)
a = np.ones(d.shape[0])
twin = sfs.tapering.none(a)
p = sfs.mono.synthesized.generic(omega, x0, n0, d * twin * a0, grid,
source=sfs.mono.source.point)
return p, twin, xs
def plot_objects(ax, xs):
ax.plot((0, 0), (-4.2, 0), 'k-', lw=2)
ax.plot((0, 4.2), (0, 0), 'k-', lw=2)
plt.annotate('4m', (0, 0.27), (4.1, 0.2), arrowprops={'arrowstyle': '<->'})
sfs.plot.virtualsource_2d(xs, type='point', ax=ax)
sfs.plot.reference_2d(xref, ax=ax)
def plot_sound_field(p, xs, twin):
plt.style.use(('paper.mplstyle', 'paper_box.mplstyle'))
fig = plt.figure()
ax1 = fig.add_axes([0.0, 0.0, 0.7, 1])
sfs.plot.soundfield(p, grid, xnorm=None, colorbar=False, vmax=1.5, vmin=-1.5, ax=ax1)
plot_objects(ax1, xs)
plt.axis([-1.1, 4.2, -4.2, 1.1])
plt.axis('off')
myfig = plt.gcf()
plt.show()
if save_figures:
if wfs:
myfig.savefig('../paper/figs/edge_wfs_25d_point_%dHz_%d.pdf' % (f, src_angles[n]), dpi=300)
else:
myfig.savefig('../paper/figs/edge_esa_25d_point_%dHz_%d.pdf' % (f, src_angles[n]), dpi=300)
def plot_sound_field_level(p, xs, twin):
plt.style.use(('paper.mplstyle', 'paper_box.mplstyle'))
fig = plt.figure()
ax1 = fig.add_axes([0.0, 0.0, 0.7, 1])
im = sfs.plot.level(p, grid, xnorm=None, colorbar=False, cmap=plt.cm.viridis, vmax=3, vmin=-3, ax=ax1)
CS = plt.contour(sfs.util.db(p), 1, levels=[0], origin='lower', linewidths=2, extent=(0, 4, -4, 0), colors='w', alpha=.5)
plt.clabel(CS, [0], inline=1, fmt='%1.1f dB', fontsize=8, rightside_up=1)
zc = CS.collections[0]
plt.setp(zc, linewidth=0.5)
plot_objects(plt.gca(), xs)
plt.annotate('4m', (-2.5, 2), (-2.75, -2.4), arrowprops={'arrowstyle': '<->'})
plt.axis([-1.1, 4.2, -4.2, 1.1])
plt.axis('off')
ax2 = fig.add_axes([0.55, -0.05, 0.25, .95])
plt.axis('off')
cbar = plt.colorbar(im, ax=ax2, shrink=.6)
cbar.set_label('relative level (dB)', rotation=270, labelpad=10)
cbar.set_ticks(np.arange(-3, 4))
myfig = plt.gcf()
plt.show()
if save_figures:
if wfs:
myfig.savefig('../paper/figs/edge_wfs_25d_point_%dHz_%d_L.pdf' % (f, src_angles[n]), dpi=300)
else:
myfig.savefig('../paper/figs/edge_esa_25d_point_%dHz_%d_L.pdf' % (f, src_angles[n]), dpi=300)
# get secondary source positions
x0, n0, a0 = sfs.array.rounded_edge(N, 0, dx, orientation=[0, -1, 0])
# compute field at the given positions for given virutal source
p = []
trajectory = []
lsactive = []
for angle in src_angles:
tmp, twin, xs = compute_sound_field(x0, n0, a0, omega, angle)
p.append(tmp)
trajectory.append(xs)
lsactive.append(twin)
p = np.asarray(p)
trajectory = np.asarray(trajectory)
lsactive = np.asarray(lsactive)
# plot synthesized sound field for multiple virtual source position
normalization = np.abs(sfs.mono.source.point(omega, xs, [0, 0, 0], xref))
for n in range(0, p.shape[0]):
plot_sound_field(p[n, :, :]/normalization, trajectory[n, :], lsactive[n, :])
plot_sound_field_level(p[n, :, :]/normalization, trajectory[n, :], lsactive[n, :])
| 33.176471 | 125 | 0.63586 |
61ccc0f503b0455a16862eb6db12c247fa61718f | 3,963 | py | Python | src/backend/apps/posts/models.py | Vixx-X/ati-project | 0ef80772a6fc3807e401cf58b9e15f3628373383 | [
"MIT"
] | null | null | null | src/backend/apps/posts/models.py | Vixx-X/ati-project | 0ef80772a6fc3807e401cf58b9e15f3628373383 | [
"MIT"
] | 61 | 2021-06-10T03:27:06.000Z | 2022-03-12T01:01:34.000Z | src/backend/apps/posts/models.py | Vixx-X/ati-project | 0ef80772a6fc3807e401cf58b9e15f3628373383 | [
"MIT"
] | null | null | null | """
Models for User module
"""
from datetime import datetime, timedelta
from flask_babel import format_datetime as _d
from flask_babel import lazy_gettext as _
from flask_user import current_user
from backend import db
from backend.apps.media.models import Media
from backend.apps.user.models import User
def get_time(time):
now = datetime.now()
delta = time - now
if delta < timedelta(seconds=30):
return _("Less than 30 seconds ago")
if delta < timedelta(minutes=5):
return _("Less than 5 minutes ago")
if delta < timedelta(minutes=10):
return _("Less than 10 minutes ago")
if delta < timedelta(days=2):
return _("Today")
if delta < timedelta(days=2):
return _("Yesterday")
return _d(time, format="%A %d-%m-%Y, %H:%M")
class Comment(db.EmbeddedDocument):
"""
Model for users` comments
"""
_id = db.ObjectIdField(required=True, default=lambda: db.ObjectId())
author = db.ReferenceField(User)
# Content
content = db.StringField(max_length=65536)
# response
comments = db.EmbeddedDocumentListField(
"self",
default=[],
)
time_created = db.DateTimeField()
@property
def get_author(self):
"""
Get Author of post
"""
if self.author:
return self.author
return User.get_deleted_user()
@property
def time(self):
return get_time(self.time_created)
@property
def get_firts_comments(self):
return self.comments[:3]
def as_dict(self):
raw = self.to_mongo().to_dict()
raw["id"] = str(raw.pop("_id"))
if "time_created" in raw:
raw["time_created"] = raw["time_created"].isoformat()
if "author" in raw:
raw["author"] = self.author.as_dict()
return raw
class Post(db.Document):
"""
Model for users` posts
"""
author = db.ReferenceField(User, reverse_delete_rule=db.NULLIFY)
# content
title = db.StringField(max_length=255)
description = db.StringField(max_length=65536)
tags = db.ListField(
db.StringField(max_length=255),
default=[],
)
media = db.ListField(
db.ReferenceField(
Media,
reverse_delete_rule=db.CASCADE,
),
)
# metadata
public = db.BooleanField(default=True)
time_created = db.DateTimeField()
time_edited = db.DateTimeField()
edited = db.BooleanField(default=False)
comments = db.EmbeddedDocumentListField(
Comment,
default=[],
)
meta = {
"collection": "posts",
}
@property
def primary_media(self):
if self.media:
return self.media[0]
return None
@property
def all_media(self):
if self.media:
return self.media
return None
@property
def time(self):
return get_time(self.time_created)
def as_dict(self):
raw = self.to_mongo().to_dict()
raw["id"] = str(raw.pop("_id"))
if "time_created" in raw:
raw["time_created"] = get_time(raw["time_created"])
if "time_edited" in raw:
raw["time_edited"] = get_time(raw["time_edited"])
if "author" in raw:
raw["author"] = self.author.as_dict()
return raw
@property
def get_author(self):
"""
Get Author of post
"""
if self.author:
return self.author
return User.get_deleted_user()
@property
def is_my_post(self):
return current_user == self.author
def save(self, *args, **kwargs):
"""
Override return method to update created or updated time
"""
if not self.time_created:
self.time_created = datetime.now()
else:
self.time_edited = datetime.now()
self.edited = True
return super().save(*args, **kwargs)
| 22.907514 | 72 | 0.590714 |
b1bffb3105a3157895b8216c7c786bc7cef094bc | 437 | py | Python | tests/sudoku_test.py | colincoleman/su_doku_solver | 2cd8da1055e4a4ea98a50bb19636743bc90e555a | [
"MIT"
] | null | null | null | tests/sudoku_test.py | colincoleman/su_doku_solver | 2cd8da1055e4a4ea98a50bb19636743bc90e555a | [
"MIT"
] | null | null | null | tests/sudoku_test.py | colincoleman/su_doku_solver | 2cd8da1055e4a4ea98a50bb19636743bc90e555a | [
"MIT"
] | null | null | null | from src.sudoku.puzzle import Puzzle
from src.sudoku.sudoku import init_puzzle, parse_input_file
input_file = './example_puzzle.txt'
def test_init_puzzle():
this_puzzle = Puzzle()
assert this_puzzle.cells[0].definite_value == "_"
init_puzzle(this_puzzle)
assert this_puzzle.cells[0].definite_value == '4'
def test_parse_input_file():
file_content = parse_input_file(input_file)
assert file_content[1] == '1'
| 25.705882 | 59 | 0.745995 |
6a688cbf2c652d170f704aa32e86cd43f01d5812 | 6,531 | py | Python | mmtbx/building/loop_closure/starting_conformations.py | TheApacheCats/cctbx_project | 94e3e85dd6385f0dc3f45077b743757d22b19391 | [
"BSD-3-Clause-LBNL"
] | 2 | 2021-03-18T12:31:57.000Z | 2022-03-14T06:27:06.000Z | mmtbx/building/loop_closure/starting_conformations.py | indu-in/cctbx_project1 | e09447ddc2ba3aa9d91b21008b0162ab290b0c30 | [
"BSD-3-Clause-LBNL"
] | null | null | null | mmtbx/building/loop_closure/starting_conformations.py | indu-in/cctbx_project1 | e09447ddc2ba3aa9d91b21008b0162ab290b0c30 | [
"BSD-3-Clause-LBNL"
] | null | null | null | from __future__ import absolute_import, division, print_function
from mmtbx.building.loop_closure import utils
from mmtbx.validation import ramalyze
import itertools
from libtbx.utils import null_out
import boost.python
from six.moves import zip
from six.moves import range
ext = boost.python.import_ext("mmtbx_validation_ramachandran_ext")
from mmtbx_validation_ramachandran_ext import rama_eval
from six.moves import cStringIO as StringIO
def set_rama_angles(moving_h, angles, direction_forward=True, check_omega=False):
"""
angles = [(phi, psi), (phi, psi), ... (phi, psi)]
phi or psi == None means we don't change this angle
returns deep-copied hierarchy with new angles. Change occurs from first to
last angle so starting point would be in the same place.
This function should produce up to all possible favored conformations.
This function doesn't change moving_h
direction_forward==True - set from beginning to end - the end residue moves
direction_forward==False - set from end to beginning, the first residue moves
"""
# print "angles", angles
# STOP()
result_h = moving_h.deep_copy()
result_h.reset_atom_i_seqs()
fixed_omega = False
phi_psi_atoms = utils.get_phi_psi_atoms(moving_h, omega=True)
assert len(phi_psi_atoms) == len(angles), "%d != %d" % (len(phi_psi_atoms), len(angles))
if not direction_forward:
phi_psi_atoms.reverse()
angles.reverse()
for ps_atoms, target_angle_pair in zip(phi_psi_atoms, angles):
phi_psi_pair = ps_atoms[0]
# print "phi_psi_pair", phi_psi_pair
omega = ps_atoms[2]
phi_psi_angles = utils.get_pair_angles(phi_psi_pair)
# print "ps_atoms, target_angle_pair", phi_psi_angles, target_angle_pair
# phi
if target_angle_pair[0] is not None and phi_psi_angles[0] is not None:
rotation_angle = -phi_psi_angles[0]+target_angle_pair[0]
# print "rot angle", rotation_angle
# if not direction_forward:
# rotation_angle = -rotation_angle
utils.rotate_atoms_around_bond(
result_h,
phi_psi_pair[0][1],
phi_psi_pair[0][2],
angle=rotation_angle,
direction_forward=direction_forward)
# psi
if target_angle_pair[1] is not None and phi_psi_angles[1] is not None:
rotation_angle = -phi_psi_angles[1]+target_angle_pair[1]
# print "rot angle", rotation_angle
# if not direction_forward:
# rotation_angle = -rotation_angle
utils.rotate_atoms_around_bond(
result_h,
phi_psi_pair[1][1],
phi_psi_pair[1][2],
angle=rotation_angle,
direction_forward=direction_forward)
# omega
if omega is not None and abs(abs(omega)-180) > 10 and check_omega:
rotation_angle= -omega+180
# print "Omega rotation:", omega, rotation_angle
utils.rotate_atoms_around_bond(
result_h,
phi_psi_pair[0][0],
phi_psi_pair[0][1],
angle=rotation_angle,
direction_forward=direction_forward)
fixed_omega = True
# print utils.list_rama_outliers_h(result_h)
# result_h.write_pdb_file(file_name="variant_%s.pdb" % direction_forward)
# STOP()
return result_h, fixed_omega
def is_not_none_combination(comb):
for pair in comb:
if pair != (None, None):
return True
return False
def get_sampled_rama_favored_angles(rama_key, r=None, step=20):
if r is None:
r = rama_eval()
result = []
for i in range(-180, 180, step):
for j in range(-180, 180, step):
score = r.evaluate_angles(ramalyze.res_types[rama_key], i,j)
r_ev = ramalyze.ramalyze.evalScore(ramalyze.res_types[rama_key], score)
if r_ev == ramalyze.RAMALYZE_FAVORED:
result.append((i,j))
return result
def get_all_starting_conformations(moving_h, change_radius,
n_outliers,
direction_forward=True, cutoff=50, change_all=True, log=null_out(), check_omega=False):
if log is None:
log = StringIO()
variants = []
result = []
r = rama_eval()
phi_psi_atoms = utils.get_phi_psi_atoms(moving_h, omega=True)
# print "N residue groups in h", [x.resseq for x in moving_h.residue_groups()]
if len(phi_psi_atoms) == 0:
print("Strange input to starting conformations!!!", file=log)
return result
n_rama = len(phi_psi_atoms)
# print "n_rama", n_rama
change_angles = [None]
if change_all:
change_angles = range((n_rama)//2-change_radius-n_outliers//2, (n_rama)//2+change_radius+1+n_outliers//2)
# if change_angles[0] < 0:
# change_angles = range(change_angles[-1]-change_angles[0])
has_twisted = False
if check_omega:
omegas = [x[2] for x in phi_psi_atoms]
for o in omegas:
if o is not None and abs(abs(o)-180) > 30:
has_twisted = True
print("n_outliers", n_outliers, file=log)
for i, (phi_psi_pair, rama_key, omega) in enumerate(phi_psi_atoms):
angle_is_outlier = utils.rama_evaluate(phi_psi_pair, r, rama_key) == ramalyze.RAMALYZE_OUTLIER
twisted = omega is not None and ((abs(abs(omega)-180) > 30) and check_omega)
print("in cycle, N, outlier?, change?, twisted?", i, angle_is_outlier, i in change_angles, twisted, file=log)
if angle_is_outlier and n_outliers < 3:
vs = get_sampled_rama_favored_angles(rama_key, r)
elif (i in change_angles) or angle_is_outlier or has_twisted:
# vs = get_sampled_rama_favored_angles(rama_key, r)
vs = ramalyze.get_favored_regions(rama_key)
else:
vs = [(None, None)]
variants.append(vs)
print("variants", variants, file=log)
all_angles_combination = list(itertools.product(*variants))
# filter none combinations
# print "len(all_angles_combination)", len(all_angles_combination)
all_angles_combination_f = []
for comb in all_angles_combination:
if is_not_none_combination(comb):
all_angles_combination_f.append(comb)
print("len(all_angles_combination_f)", len(all_angles_combination_f), file=log)
return all_angles_combination_f
# if len(all_angles_combination_f) == 0:
# print "In starting conformations - outlier was fixed?"
# return result
# n_added = 0
# n_all_combination = len(all_angles_combination_f)
# i_max = min(cutoff, n_all_combination)
# assert i_max > 0
# step = float(n_all_combination-1)/float(i_max-1)
# if step < 1:
# step = 1
# for i in range(i_max):
# comb = all_angles_combination_f[int(round(step*i))]
# result.append(set_rama_angles(moving_h, list(comb),direction_forward=direction_forward))
# print >> log, "Model %d, angles:" % i, comb
# return result
| 39.581818 | 113 | 0.709845 |
7ed4d3633bc09c79632206626b14270b86b19a01 | 2,581 | py | Python | coupling_examples/Layout_1x2.py | ebranlard/wiz | 6dfca8b2711b670229f5b2b3b3e0d7fe0bdea156 | [
"MIT"
] | 5 | 2020-06-16T11:58:10.000Z | 2022-01-03T17:09:55.000Z | coupling_examples/Layout_1x2.py | ebranlard/wiz | 6dfca8b2711b670229f5b2b3b3e0d7fe0bdea156 | [
"MIT"
] | null | null | null | coupling_examples/Layout_1x2.py | ebranlard/wiz | 6dfca8b2711b670229f5b2b3b3e0d7fe0bdea156 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from floris.induction import options_dict
from helper_functions import *
fontsize = 12
plt.rc('font', family='serif')
plt.rc('font', size=12)
# --- Plot options
bExport=False
bCompute=True
bColorBar=False
nStreamlines=0
U0 =6
minSpeed=0.5
maxSpeed=1.03
levelsLines=np.sort([1.05,1.0,0.99,0.98,0.95,0.9,0.5])
# ---
ny=30 # 200
nx=ny*4
input_file="Layout_1x2.json"
D=126
resolution=Vec3(nx, ny, 2)
bounds=[-4*D,14*D,0-10,2*D+10,89,90] # xmin xmax .. zmin zmax
Ind_Opts=options_dict()
Ind_Opts['Rfact']=1.0
Ind_Opts['GammaFact']=1.0
Ind_Opts['Ground']=True
titles=[]
titles.append('FLORIS (original)')
titles.append('FLORIS (with induction')
# titles.append('With induction and blending')
# --- Parametric computatoin
if bCompute:
planes=[]
print('-----------------------------------------------------------------')
Ind_Opts['no_induction']=True
planes.append(get_HH_plane_vel(input_file, Ind_Opts, resolution, bounds_to_set=bounds))
savePlane(planes[-1],'_data/Layout12_0_',U0=U0)
print('-----------------------------------------------------------------')
Ind_Opts['no_induction']=False
Ind_Opts['blend']=False
planes.append(get_HH_plane_vel(input_file, Ind_Opts, resolution, bounds_to_set=bounds))
savePlane(planes[-1],'_data/Layout12_1_',U0=U0)
planes=[]
planes.append(loadPlane('_data/Layout12_0_'))
planes.append(loadPlane('_data/Layout12_1_'))
# --- Plot and show
fig, axes = plt.subplots(nrows=len(titles), ncols=1, sharex=True, sharey=True, figsize=(12.0, 6.0))
for i,(ax,p,t) in enumerate(zip(axes.flat,planes,titles)):
x=p[0]
y=p[1]
u=p[2]
v=p[3]
im = plotPlane(x/D,y/D,u,v,ax,minSpeed=minSpeed,maxSpeed=maxSpeed,
nStreamlines=nStreamlines,levelsLines=levelsLines, axial=True, colors='k')
ax.title.set_text(t)
ax.set_ylabel('r/D [-]')
ax.title.set_text(t)
ax.tick_params(direction='in')
if i==1:
ax.set_xlabel('z/D [-]')
ax.set_xlim([-4,14])
ax.set_ylim([0,2])
if bColorBar:
fig.subplots_adjust(left=0.08, right=0.83, top=0.93, bottom=0.11,hspace=0.17)
cbar_ax = fig.add_axes([0.88, 0.11, 0.04, 0.82])
cbar=fig.colorbar(im, cax=cbar_ax)
cbar.set_ticks(levelsLines)
cbar.set_ticklabels([str(v) if v not in [0.99] else '' for v in levelsLines])
cbar.ax.tick_params(axis='both', direction='in',length=18,color=(0.5,0.5,0.5))
else:
fig.subplots_adjust(left=0.035, right=0.990, top=0.96, bottom=0.08,hspace=0.17)
plt.show()
#
| 27.752688 | 99 | 0.656722 |
4e94d14f9e33f8f9ececcad8dc9ae7bf9c8c11b5 | 1,924 | py | Python | tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py | googleapis/client-generator-python | db9ed9177e65aff07a0c1addf73c32da4dabcaf9 | [
"Apache-2.0"
] | null | null | null | tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py | googleapis/client-generator-python | db9ed9177e65aff07a0c1addf73c32da4dabcaf9 | [
"Apache-2.0"
] | null | null | null | tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py | googleapis/client-generator-python | db9ed9177e65aff07a0c1addf73c32da4dabcaf9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateTrigger
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-eventarc
# [START eventarc_v1_generated_Eventarc_CreateTrigger_async]
from google.cloud import eventarc_v1
async def sample_create_trigger():
# Create a client
client = eventarc_v1.EventarcAsyncClient()
# Initialize request argument(s)
trigger = eventarc_v1.Trigger()
trigger.name = "name_value"
trigger.event_filters.attribute = "attribute_value"
trigger.event_filters.value = "value_value"
trigger.destination.cloud_run.service = "service_value"
trigger.destination.cloud_run.region = "region_value"
request = eventarc_v1.CreateTriggerRequest(
parent="parent_value",
trigger=trigger,
trigger_id="trigger_id_value",
validate_only=True,
)
# Make the request
operation = client.create_trigger(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END eventarc_v1_generated_Eventarc_CreateTrigger_async]
| 32.066667 | 85 | 0.746362 |
585930e16b00b1d3f8e374c1f595558366d775b4 | 156,924 | py | Python | salt/config/__init__.py | newwebash/salt | 133d0986be65a29373eb7de1269d3d9cec204cea | [
"Apache-2.0"
] | null | null | null | salt/config/__init__.py | newwebash/salt | 133d0986be65a29373eb7de1269d3d9cec204cea | [
"Apache-2.0"
] | null | null | null | salt/config/__init__.py | newwebash/salt | 133d0986be65a29373eb7de1269d3d9cec204cea | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
All salt configuration loading and defaults should be in this module
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals, generators
import os
import re
import sys
import glob
import time
import codecs
import logging
import types
from copy import deepcopy
# pylint: disable=import-error,no-name-in-module
from salt.ext import six
from salt.ext.six.moves.urllib.parse import urlparse
# pylint: enable=import-error,no-name-in-module
# Import salt libs
import salt.utils.data
import salt.utils.dictupdate
import salt.utils.files
import salt.utils.network
import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.user
import salt.utils.validate.path
import salt.utils.xdg
import salt.utils.yaml
import salt.utils.zeromq
import salt.syspaths
import salt.exceptions
from salt.utils.locales import sdecode
import salt.defaults.exitcodes
try:
import psutil
if not hasattr(psutil, 'virtual_memory'):
raise ImportError('Version of psutil too old.')
HAS_PSUTIL = True
except ImportError:
HAS_PSUTIL = False
log = logging.getLogger(__name__)
_DFLT_LOG_DATEFMT = '%H:%M:%S'
_DFLT_LOG_DATEFMT_LOGFILE = '%Y-%m-%d %H:%M:%S'
_DFLT_LOG_FMT_CONSOLE = '[%(levelname)-8s] %(message)s'
_DFLT_LOG_FMT_LOGFILE = (
'%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(levelname)-8s][%(process)d] %(message)s'
)
_DFLT_REFSPECS = ['+refs/heads/*:refs/remotes/origin/*', '+refs/tags/*:refs/tags/*']
DEFAULT_INTERVAL = 60
if salt.utils.platform.is_windows():
# Since an 'ipc_mode' of 'ipc' will never work on Windows due to lack of
# support in ZeroMQ, we want the default to be something that has a
# chance of working.
_DFLT_IPC_MODE = 'tcp'
_MASTER_TRIES = -1
# This needs to be SYSTEM in order for salt-master to run as a Service
# Otherwise, it will not respond to CLI calls
_MASTER_USER = 'SYSTEM'
else:
_DFLT_IPC_MODE = 'ipc'
_MASTER_TRIES = 1
_MASTER_USER = salt.utils.user.get_user()
def _gather_buffer_space():
'''
Gather some system data and then calculate
buffer space.
Result is in bytes.
'''
if HAS_PSUTIL and psutil.version_info >= (0, 6, 0):
# Oh good, we have psutil. This will be quick.
total_mem = psutil.virtual_memory().total
else:
# Avoid loading core grains unless absolutely required
import platform
import salt.grains.core
# We need to load up ``mem_total`` grain. Let's mimic required OS data.
os_data = {'kernel': platform.system()}
grains = salt.grains.core._memdata(os_data)
total_mem = grains['mem_total']
# Return the higher number between 5% of the system memory and 10MiB
return max([total_mem * 0.05, 10 << 20])
# For the time being this will be a fixed calculation
# TODO: Allow user configuration
_DFLT_IPC_WBUFFER = _gather_buffer_space() * .5
# TODO: Reserved for future use
_DFLT_IPC_RBUFFER = _gather_buffer_space() * .5
FLO_DIR = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'daemons', 'flo')
VALID_OPTS = {
# The address of the salt master. May be specified as IP address or hostname
'master': (six.string_types, list),
# The TCP/UDP port of the master to connect to in order to listen to publications
'master_port': (six.string_types, int),
# The behaviour of the minion when connecting to a master. Can specify 'failover',
# 'disable', 'distributed', or 'func'. If 'func' is specified, the 'master' option should be
# set to an exec module function to run to determine the master hostname. If 'disable' is
# specified the minion will run, but will not try to connect to a master. If 'distributed'
# is specified the minion will try to deterministically pick a master based on its' id.
'master_type': six.string_types,
# Specify the format in which the master address will be specified. Can
# specify 'default' or 'ip_only'. If 'ip_only' is specified, then the
# master address will not be split into IP and PORT.
'master_uri_format': six.string_types,
# The following optiosn refer to the Minion only, and they specify
# the details of the source address / port to be used when connecting to
# the Master. This is useful when dealing withmachines where due to firewall
# rules you are restricted to use a certain IP/port combination only.
'source_interface_name': six.string_types,
'source_address': six.string_types,
'source_ret_port': (six.string_types, int),
'source_publish_port': (six.string_types, int),
# The fingerprint of the master key may be specified to increase security. Generate
# a master fingerprint with `salt-key -F master`
'master_finger': six.string_types,
# Selects a random master when starting a minion up in multi-master mode
'master_shuffle': bool,
# When in multi-master mode, temporarily remove a master from the list if a conenction
# is interrupted and try another master in the list.
'master_alive_interval': int,
# When in multi-master failover mode, fail back to the first master in the list if it's back
# online.
'master_failback': bool,
# When in multi-master mode, and master_failback is enabled ping the top master with this
# interval.
'master_failback_interval': int,
# The name of the signing key-pair
'master_sign_key_name': six.string_types,
# Sign the master auth-replies with a cryptographic signature of the masters public key.
'master_sign_pubkey': bool,
# Enables verification of the master-public-signature returned by the master in auth-replies.
# Must also set master_sign_pubkey for this to work
'verify_master_pubkey_sign': bool,
# If verify_master_pubkey_sign is enabled, the signature is only verified, if the public-key of
# the master changes. If the signature should always be verified, this can be set to True.
'always_verify_signature': bool,
# The name of the file in the masters pki-directory that holds the pre-calculated signature of
# the masters public-key
'master_pubkey_signature': six.string_types,
# Instead of computing the signature for each auth-reply, use a pre-calculated signature.
# The master_pubkey_signature must also be set for this.
'master_use_pubkey_signature': bool,
# Enable master stats eveents to be fired, these events will contain information about
# what commands the master is processing and what the rates are of the executions
'master_stats': bool,
'master_stats_event_iter': int,
# The key fingerprint of the higher-level master for the syndic to verify it is talking to the
# intended master
'syndic_finger': six.string_types,
# The caching mechanism to use for the PKI key store. Can substantially decrease master publish
# times. Available types:
# 'maint': Runs on a schedule as a part of the maintanence process.
# '': Disable the key cache [default]
'key_cache': six.string_types,
# The user under which the daemon should run
'user': six.string_types,
# The root directory prepended to these options: pki_dir, cachedir,
# sock_dir, log_file, autosign_file, autoreject_file, extension_modules,
# key_logfile, pidfile:
'root_dir': six.string_types,
# The directory used to store public key data
'pki_dir': six.string_types,
# A unique identifier for this daemon
'id': six.string_types,
# Use a module function to determine the unique identifier. If this is
# set and 'id' is not set, it will allow invocation of a module function
# to determine the value of 'id'. For simple invocations without function
# arguments, this may be a string that is the function name. For
# invocations with function arguments, this may be a dictionary with the
# key being the function name, and the value being an embedded dictionary
# where each key is a function argument name and each value is the
# corresponding argument value.
'id_function': (dict, six.string_types),
# The directory to store all cache files.
'cachedir': six.string_types,
# Append minion_id to these directories. Helps with
# multiple proxies and minions running on the same machine.
# Allowed elements in the list: pki_dir, cachedir, extension_modules, pidfile
'append_minionid_config_dirs': list,
# Flag to cache jobs locally.
'cache_jobs': bool,
# The path to the salt configuration file
'conf_file': six.string_types,
# The directory containing unix sockets for things like the event bus
'sock_dir': six.string_types,
# The pool size of unix sockets, it is necessary to avoid blocking waiting for zeromq and tcp communications.
'sock_pool_size': int,
# Specifies how the file server should backup files, if enabled. The backups
# live in the cache dir.
'backup_mode': six.string_types,
# A default renderer for all operations on this host
'renderer': six.string_types,
# Renderer whitelist. The only renderers from this list are allowed.
'renderer_whitelist': list,
# Rendrerer blacklist. Renderers from this list are disalloed even if specified in whitelist.
'renderer_blacklist': list,
# A flag indicating that a highstate run should immediately cease if a failure occurs.
'failhard': bool,
# A flag to indicate that highstate runs should force refresh the modules prior to execution
'autoload_dynamic_modules': bool,
# Force the minion into a single environment when it fetches files from the master
'saltenv': (type(None), six.string_types),
# Prevent saltenv from being overridden on the command line
'lock_saltenv': bool,
# Force the minion into a single pillar root when it fetches pillar data from the master
'pillarenv': (type(None), six.string_types),
# Make the pillarenv always match the effective saltenv
'pillarenv_from_saltenv': bool,
# Allows a user to provide an alternate name for top.sls
'state_top': six.string_types,
'state_top_saltenv': (type(None), six.string_types),
# States to run when a minion starts up
'startup_states': six.string_types,
# List of startup states
'sls_list': list,
# Configuration for snapper in the state system
'snapper_states': bool,
'snapper_states_config': six.string_types,
# A top file to execute if startup_states == 'top'
'top_file': six.string_types,
# Location of the files a minion should look for. Set to 'local' to never ask the master.
'file_client': six.string_types,
'local': bool,
# When using a local file_client, this parameter is used to allow the client to connect to
# a master for remote execution.
'use_master_when_local': bool,
# A map of saltenvs and fileserver backend locations
'file_roots': dict,
# A map of saltenvs and fileserver backend locations
'pillar_roots': dict,
# The external pillars permitted to be used on-demand using pillar.ext
'on_demand_ext_pillar': list,
# A map of glob paths to be used
'decrypt_pillar': list,
# Delimiter to use in path expressions for decrypt_pillar
'decrypt_pillar_delimiter': six.string_types,
# Default renderer for decrypt_pillar
'decrypt_pillar_default': six.string_types,
# List of renderers available for decrypt_pillar
'decrypt_pillar_renderers': list,
# The type of hashing algorithm to use when doing file comparisons
'hash_type': six.string_types,
# Refuse to load these modules
'disable_modules': list,
# Refuse to load these returners
'disable_returners': list,
# Tell the loader to only load modules in this list
'whitelist_modules': list,
# A list of additional directories to search for salt modules in
'module_dirs': list,
# A list of additional directories to search for salt returners in
'returner_dirs': list,
# A list of additional directories to search for salt states in
'states_dirs': list,
# A list of additional directories to search for salt grains in
'grains_dirs': list,
# A list of additional directories to search for salt renderers in
'render_dirs': list,
# A list of additional directories to search for salt outputters in
'outputter_dirs': list,
# A list of additional directories to search for salt utilities in. (Used by the loader
# to populate __utils__)
'utils_dirs': list,
# salt cloud providers
'providers': dict,
# First remove all modules during any sync operation
'clean_dynamic_modules': bool,
# A flag indicating that a master should accept any minion connection without any authentication
'open_mode': bool,
# Whether or not processes should be forked when needed. The alternative is to use threading.
'multiprocessing': bool,
# Maximum number of concurrently active processes at any given point in time
'process_count_max': int,
# Whether or not the salt minion should run scheduled mine updates
'mine_enabled': bool,
# Whether or not scheduled mine updates should be accompanied by a job return for the job cache
'mine_return_job': bool,
# The number of minutes between mine updates.
'mine_interval': int,
# The ipc strategy. (i.e., sockets versus tcp, etc)
'ipc_mode': six.string_types,
# Enable ipv6 support for daemons
'ipv6': bool,
# The chunk size to use when streaming files with the file server
'file_buffer_size': int,
# The TCP port on which minion events should be published if ipc_mode is TCP
'tcp_pub_port': int,
# The TCP port on which minion events should be pulled if ipc_mode is TCP
'tcp_pull_port': int,
# The TCP port on which events for the master should be published if ipc_mode is TCP
'tcp_master_pub_port': int,
# The TCP port on which events for the master should be pulled if ipc_mode is TCP
'tcp_master_pull_port': int,
# The TCP port on which events for the master should pulled and then republished onto
# the event bus on the master
'tcp_master_publish_pull': int,
# The TCP port for mworkers to connect to on the master
'tcp_master_workers': int,
# The file to send logging data to
'log_file': six.string_types,
# The level of verbosity at which to log
'log_level': six.string_types,
# The log level to log to a given file
'log_level_logfile': (type(None), six.string_types),
# The format to construct dates in log files
'log_datefmt': six.string_types,
# The dateformat for a given logfile
'log_datefmt_logfile': six.string_types,
# The format for console logs
'log_fmt_console': six.string_types,
# The format for a given log file
'log_fmt_logfile': (tuple, six.string_types),
# A dictionary of logging levels
'log_granular_levels': dict,
# The maximum number of bytes a single log file may contain before
# it is rotated. A value of 0 disables this feature.
# Currently only supported on Windows. On other platforms, use an
# external tool such as 'logrotate' to manage log files.
'log_rotate_max_bytes': int,
# The number of backup files to keep when rotating log files. Only
# used if log_rotate_max_bytes is greater than 0.
# Currently only supported on Windows. On other platforms, use an
# external tool such as 'logrotate' to manage log files.
'log_rotate_backup_count': int,
# If an event is above this size, it will be trimmed before putting it on the event bus
'max_event_size': int,
# Enable old style events to be sent on minion_startup. Change default to False in Neon release
'enable_legacy_startup_events': bool,
# Always execute states with test=True if this flag is set
'test': bool,
# Tell the loader to attempt to import *.pyx cython files if cython is available
'cython_enable': bool,
# Tell the loader to attempt to import *.zip archives
'enable_zip_modules': bool,
# Tell the client to show minions that have timed out
'show_timeout': bool,
# Tell the client to display the jid when a job is published
'show_jid': bool,
# Ensure that a generated jid is always unique. If this is set, the jid
# format is different due to an underscore and process id being appended
# to the jid. WARNING: A change to the jid format may break external
# applications that depend on the original format.
'unique_jid': bool,
# Tells the highstate outputter to show successful states. False will omit successes.
'state_verbose': bool,
# Specify the format for state outputs. See highstate outputter for additional details.
'state_output': six.string_types,
# Tells the highstate outputter to only report diffs of states that changed
'state_output_diff': bool,
# When true, states run in the order defined in an SLS file, unless requisites re-order them
'state_auto_order': bool,
# Fire events as state chunks are processed by the state compiler
'state_events': bool,
# The number of seconds a minion should wait before retry when attempting authentication
'acceptance_wait_time': float,
# The number of seconds a minion should wait before giving up during authentication
'acceptance_wait_time_max': float,
# Retry a connection attempt if the master rejects a minion's public key
'rejected_retry': bool,
# The interval in which a daemon's main loop should attempt to perform all necessary tasks
# for normal operation
'loop_interval': float,
# Perform pre-flight verification steps before daemon startup, such as checking configuration
# files and certain directories.
'verify_env': bool,
# The grains dictionary for a minion, containing specific "facts" about the minion
'grains': dict,
# Allow a daemon to function even if the key directories are not secured
'permissive_pki_access': bool,
# The passphrase of the master's private key
'key_pass': (type(None), six.string_types),
# The passphrase of the master's private signing key
'signing_key_pass': (type(None), six.string_types),
# The path to a directory to pull in configuration file includes
'default_include': six.string_types,
# If a minion is running an esky build of salt, upgrades can be performed using the url
# defined here. See saltutil.update() for additional information
'update_url': (bool, six.string_types),
# If using update_url with saltutil.update(), provide a list of services to be restarted
# post-install
'update_restart_services': list,
# The number of seconds to sleep between retrying an attempt to resolve the hostname of a
# salt master
'retry_dns': float,
# In the case when the resolve of the salt master hostname fails, fall back to localhost
'resolve_dns_fallback': bool,
# set the zeromq_reconnect_ivl option on the minion.
# http://lists.zeromq.org/pipermail/zeromq-dev/2011-January/008845.html
'recon_max': float,
# If recon_randomize is set, this specifies the lower bound for the randomized period
'recon_default': float,
# Tells the minion to choose a bounded, random interval to have zeromq attempt to reconnect
# in the event of a disconnect event
'recon_randomize': bool,
'return_retry_timer': int,
'return_retry_timer_max': int,
# Specify one or more returners in which all events will be sent to. Requires that the returners
# in question have an event_return(event) function!
'event_return': (list, six.string_types),
# The number of events to queue up in memory before pushing them down the pipe to an event
# returner specified by 'event_return'
'event_return_queue': int,
# Only forward events to an event returner if it matches one of the tags in this list
'event_return_whitelist': list,
# Events matching a tag in this list should never be sent to an event returner.
'event_return_blacklist': list,
# default match type for filtering events tags: startswith, endswith, find, regex, fnmatch
'event_match_type': six.string_types,
# This pidfile to write out to when a daemon starts
'pidfile': six.string_types,
# Used with the SECO range master tops system
'range_server': six.string_types,
# The tcp keepalive interval to set on TCP ports. This setting can be used to tune Salt
# connectivity issues in messy network environments with misbehaving firewalls
'tcp_keepalive': bool,
# Sets zeromq TCP keepalive idle. May be used to tune issues with minion disconnects
'tcp_keepalive_idle': float,
# Sets zeromq TCP keepalive count. May be used to tune issues with minion disconnects
'tcp_keepalive_cnt': float,
# Sets zeromq TCP keepalive interval. May be used to tune issues with minion disconnects.
'tcp_keepalive_intvl': float,
# The network interface for a daemon to bind to
'interface': six.string_types,
# The port for a salt master to broadcast publications on. This will also be the port minions
# connect to to listen for publications.
'publish_port': int,
# TODO unknown option!
'auth_mode': int,
# listen queue size / backlog
'zmq_backlog': int,
# Set the zeromq high water mark on the publisher interface.
# http://api.zeromq.org/3-2:zmq-setsockopt
'pub_hwm': int,
# IPC buffer size
# Refs https://github.com/saltstack/salt/issues/34215
'ipc_write_buffer': int,
# The number of MWorker processes for a master to startup. This number needs to scale up as
# the number of connected minions increases.
'worker_threads': int,
# The port for the master to listen to returns on. The minion needs to connect to this port
# to send returns.
'ret_port': int,
# The number of hours to keep jobs around in the job cache on the master
'keep_jobs': int,
# If the returner supports `clean_old_jobs`, then at cleanup time,
# archive the job data before deleting it.
'archive_jobs': bool,
# A master-only copy of the file_roots dictionary, used by the state compiler
'master_roots': dict,
# Add the proxymodule LazyLoader object to opts. This breaks many things
# but this was the default pre 2015.8.2. This should default to
# False in 2016.3.0
'add_proxymodule_to_opts': bool,
# Merge pillar data into configuration opts.
# As multiple proxies can run on the same server, we may need different
# configuration options for each, while there's one single configuration file.
# The solution is merging the pillar data of each proxy minion into the opts.
'proxy_merge_pillar_in_opts': bool,
# Deep merge of pillar data into configuration opts.
# Evaluated only when `proxy_merge_pillar_in_opts` is True.
'proxy_deep_merge_pillar_in_opts': bool,
# The strategy used when merging pillar into opts.
# Considered only when `proxy_merge_pillar_in_opts` is True.
'proxy_merge_pillar_in_opts_strategy': six.string_types,
# Allow enabling mine details using pillar data.
'proxy_mines_pillar': bool,
# In some particular cases, always alive proxies are not beneficial.
# This option can be used in those less dynamic environments:
# the user can request the connection
# always alive, or init-shutdown per command.
'proxy_always_alive': bool,
# Poll the connection state with the proxy minion
# If enabled, this option requires the function `alive`
# to be implemented in the proxy module
'proxy_keep_alive': bool,
# Frequency of the proxy_keep_alive, in minutes
'proxy_keep_alive_interval': int,
# Update intervals
'roots_update_interval': int,
'azurefs_update_interval': int,
'gitfs_update_interval': int,
'hgfs_update_interval': int,
'minionfs_update_interval': int,
's3fs_update_interval': int,
'svnfs_update_interval': int,
# NOTE: git_pillar_base, git_pillar_branch, git_pillar_env, and
# git_pillar_root omitted here because their values could conceivably be
# loaded as non-string types, which is OK because git_pillar will normalize
# them to strings. But rather than include all the possible types they
# could be, we'll just skip type-checking.
'git_pillar_ssl_verify': bool,
'git_pillar_global_lock': bool,
'git_pillar_user': six.string_types,
'git_pillar_password': six.string_types,
'git_pillar_insecure_auth': bool,
'git_pillar_privkey': six.string_types,
'git_pillar_pubkey': six.string_types,
'git_pillar_passphrase': six.string_types,
'git_pillar_refspecs': list,
'git_pillar_includes': bool,
'git_pillar_verify_config': bool,
# NOTE: gitfs_base, gitfs_mountpoint, and gitfs_root omitted here because
# their values could conceivably be loaded as non-string types, which is OK
# because gitfs will normalize them to strings. But rather than include all
# the possible types they could be, we'll just skip type-checking.
'gitfs_remotes': list,
'gitfs_insecure_auth': bool,
'gitfs_privkey': six.string_types,
'gitfs_pubkey': six.string_types,
'gitfs_passphrase': six.string_types,
'gitfs_env_whitelist': list,
'gitfs_env_blacklist': list,
'gitfs_saltenv_whitelist': list,
'gitfs_saltenv_blacklist': list,
'gitfs_ssl_verify': bool,
'gitfs_global_lock': bool,
'gitfs_saltenv': list,
'gitfs_ref_types': list,
'gitfs_refspecs': list,
'gitfs_disable_saltenv_mapping': bool,
'hgfs_remotes': list,
'hgfs_mountpoint': six.string_types,
'hgfs_root': six.string_types,
'hgfs_base': six.string_types,
'hgfs_branch_method': six.string_types,
'hgfs_env_whitelist': list,
'hgfs_env_blacklist': list,
'hgfs_saltenv_whitelist': list,
'hgfs_saltenv_blacklist': list,
'svnfs_remotes': list,
'svnfs_mountpoint': six.string_types,
'svnfs_root': six.string_types,
'svnfs_trunk': six.string_types,
'svnfs_branches': six.string_types,
'svnfs_tags': six.string_types,
'svnfs_env_whitelist': list,
'svnfs_env_blacklist': list,
'svnfs_saltenv_whitelist': list,
'svnfs_saltenv_blacklist': list,
'minionfs_env': six.string_types,
'minionfs_mountpoint': six.string_types,
'minionfs_whitelist': list,
'minionfs_blacklist': list,
# Specify a list of external pillar systems to use
'ext_pillar': list,
# Reserved for future use to version the pillar structure
'pillar_version': int,
# Whether or not a copy of the master opts dict should be rendered into minion pillars
'pillar_opts': bool,
# Cache the master pillar to disk to avoid having to pass through the rendering system
'pillar_cache': bool,
# Pillar cache TTL, in seconds. Has no effect unless `pillar_cache` is True
'pillar_cache_ttl': int,
# Pillar cache backend. Defaults to `disk` which stores caches in the master cache
'pillar_cache_backend': six.string_types,
'pillar_safe_render_error': bool,
# When creating a pillar, there are several strategies to choose from when
# encountering duplicate values
'pillar_source_merging_strategy': six.string_types,
# Recursively merge lists by aggregating them instead of replacing them.
'pillar_merge_lists': bool,
# If True, values from included pillar SLS targets will override
'pillar_includes_override_sls': bool,
# How to merge multiple top files from multiple salt environments
# (saltenvs); can be 'merge' or 'same'
'top_file_merging_strategy': six.string_types,
# The ordering for salt environment merging, when top_file_merging_strategy
# is set to 'same'
'env_order': list,
# The salt environment which provides the default top file when
# top_file_merging_strategy is set to 'same'; defaults to 'base'
'default_top': six.string_types,
'ping_on_rotate': bool,
'peer': dict,
'preserve_minion_cache': bool,
'syndic_master': (six.string_types, list),
# The behaviour of the multimaster syndic when connection to a master of masters failed. Can
# specify 'random' (default) or 'ordered'. If set to 'random' masters will be iterated in random
# order if 'ordered' the configured order will be used.
'syndic_failover': six.string_types,
'syndic_forward_all_events': bool,
'runner_dirs': list,
'client_acl_verify': bool,
'publisher_acl': dict,
'publisher_acl_blacklist': dict,
'sudo_acl': bool,
'external_auth': dict,
'token_expire': int,
'token_expire_user_override': (bool, dict),
'file_recv': bool,
'file_recv_max_size': int,
'file_ignore_regex': (list, six.string_types),
'file_ignore_glob': (list, six.string_types),
'fileserver_backend': list,
'fileserver_followsymlinks': bool,
'fileserver_ignoresymlinks': bool,
'fileserver_limit_traversal': bool,
'fileserver_verify_config': bool,
# Optionally apply '*' permissioins to any user. By default '*' is a fallback case that is
# applied only if the user didn't matched by other matchers.
'permissive_acl': bool,
# Optionally enables keeping the calculated user's auth list in the token file.
'keep_acl_in_token': bool,
# Auth subsystem module to use to get authorized access list for a user. By default it's the
# same module used for external authentication.
'eauth_acl_module': six.string_types,
# Subsystem to use to maintain eauth tokens. By default, tokens are stored on the local
# filesystem
'eauth_tokens': six.string_types,
# The number of open files a daemon is allowed to have open. Frequently needs to be increased
# higher than the system default in order to account for the way zeromq consumes file handles.
'max_open_files': int,
# Automatically accept any key provided to the master. Implies that the key will be preserved
# so that subsequent connections will be authenticated even if this option has later been
# turned off.
'auto_accept': bool,
'autosign_timeout': int,
# A mapping of external systems that can be used to generate topfile data.
'master_tops': dict,
# Whether or not matches from master_tops should be executed before or
# after those from the top file(s).
'master_tops_first': bool,
# A flag that should be set on a top-level master when it is ordering around subordinate masters
# via the use of a salt syndic
'order_masters': bool,
# Whether or not to cache jobs so that they can be examined later on
'job_cache': bool,
# Define a returner to be used as an external job caching storage backend
'ext_job_cache': six.string_types,
# Specify a returner for the master to use as a backend storage system to cache jobs returns
# that it receives
'master_job_cache': six.string_types,
# Specify whether the master should store end times for jobs as returns come in
'job_cache_store_endtime': bool,
# The minion data cache is a cache of information about the minions stored on the master.
# This information is primarily the pillar and grains data. The data is cached in the master
# cachedir under the name of the minion and used to predetermine what minions are expected to
# reply from executions.
'minion_data_cache': bool,
# The number of seconds between AES key rotations on the master
'publish_session': int,
# Defines a salt reactor. See http://docs.saltstack.com/en/latest/topics/reactor/
'reactor': list,
# The TTL for the cache of the reactor configuration
'reactor_refresh_interval': int,
# The number of workers for the runner/wheel in the reactor
'reactor_worker_threads': int,
# The queue size for workers in the reactor
'reactor_worker_hwm': int,
# Defines engines. See https://docs.saltstack.com/en/latest/topics/engines/
'engines': list,
# Whether or not to store runner returns in the job cache
'runner_returns': bool,
'serial': six.string_types,
'search': six.string_types,
# A compound target definition.
# See: http://docs.saltstack.com/en/latest/topics/targeting/nodegroups.html
'nodegroups': (dict, list),
# List-only nodegroups for salt-ssh. Each group must be formed as either a
# comma-separated list, or a YAML list.
'ssh_list_nodegroups': dict,
# By default, salt-ssh uses its own specially-generated RSA key to auth
# against minions. If this is set to True, salt-ssh will look in
# for a key at ~/.ssh/id_rsa, and fall back to using its own specially-
# generated RSA key if that file doesn't exist.
'ssh_use_home_key': bool,
# The logfile location for salt-key
'key_logfile': six.string_types,
# The upper bound for the random number of seconds that a minion should
# delay when starting in up before it connects to a master. This can be
# used to mitigate a thundering-herd scenario when many minions start up
# at once and attempt to all connect immediately to the master
'random_startup_delay': int,
# The source location for the winrepo sls files
# (used by win_pkg.py, minion only)
'winrepo_source_dir': six.string_types,
'winrepo_dir': six.string_types,
'winrepo_dir_ng': six.string_types,
'winrepo_cachefile': six.string_types,
# NOTE: winrepo_branch omitted here because its value could conceivably be
# loaded as a non-string type, which is OK because winrepo will normalize
# them to strings. But rather than include all the possible types it could
# be, we'll just skip type-checking.
'winrepo_cache_expire_max': int,
'winrepo_cache_expire_min': int,
'winrepo_remotes': list,
'winrepo_remotes_ng': list,
'winrepo_ssl_verify': bool,
'winrepo_user': six.string_types,
'winrepo_password': six.string_types,
'winrepo_insecure_auth': bool,
'winrepo_privkey': six.string_types,
'winrepo_pubkey': six.string_types,
'winrepo_passphrase': six.string_types,
'winrepo_refspecs': list,
# Set a hard limit for the amount of memory modules can consume on a minion.
'modules_max_memory': int,
# The number of minutes between the minion refreshing its cache of grains
'grains_refresh_every': int,
# Use lspci to gather system data for grains on a minion
'enable_lspci': bool,
# The number of seconds for the salt client to wait for additional syndics to
# check in with their lists of expected minions before giving up
'syndic_wait': int,
# Override Jinja environment option defaults for all templates except sls templates
'jinja_env': dict,
# Set Jinja environment options for sls templates
'jinja_sls_env': dict,
# If this is set to True leading spaces and tabs are stripped from the start
# of a line to a block.
'jinja_lstrip_blocks': bool,
# If this is set to True the first newline after a Jinja block is removed
'jinja_trim_blocks': bool,
# Cache minion ID to file
'minion_id_caching': bool,
# Always generate minion id in lowercase.
'minion_id_lowercase': bool,
# If set, the master will sign all publications before they are sent out
'sign_pub_messages': bool,
# The size of key that should be generated when creating new keys
'keysize': int,
# The transport system for this daemon. (i.e. zeromq, raet, etc)
'transport': six.string_types,
# The number of seconds to wait when the client is requesting information about running jobs
'gather_job_timeout': int,
# The number of seconds to wait before timing out an authentication request
'auth_timeout': int,
# The number of attempts to authenticate to a master before giving up
'auth_tries': int,
# The number of attempts to connect to a master before giving up.
# Set this to -1 for unlimited attempts. This allows for a master to have
# downtime and the minion to reconnect to it later when it comes back up.
# In 'failover' mode, it is the number of attempts for each set of masters.
# In this mode, it will cycle through the list of masters for each attempt.
'master_tries': int,
# Never give up when trying to authenticate to a master
'auth_safemode': bool,
'random_master': bool,
# An upper bound for the amount of time for a minion to sleep before attempting to
# reauth after a restart.
'random_reauth_delay': int,
# The number of seconds for a syndic to poll for new messages that need to be forwarded
'syndic_event_forward_timeout': float,
# The length that the syndic event queue must hit before events are popped off and forwarded
'syndic_jid_forward_cache_hwm': int,
# Salt SSH configuration
'ssh_passwd': six.string_types,
'ssh_port': six.string_types,
'ssh_sudo': bool,
'ssh_sudo_user': six.string_types,
'ssh_timeout': float,
'ssh_user': six.string_types,
'ssh_scan_ports': six.string_types,
'ssh_scan_timeout': float,
'ssh_identities_only': bool,
'ssh_log_file': six.string_types,
'ssh_config_file': six.string_types,
# Enable ioflo verbose logging. Warning! Very verbose!
'ioflo_verbose': int,
'ioflo_period': float,
# Set ioflo to realtime. Useful only for testing/debugging to simulate many ioflo periods very
# quickly
'ioflo_realtime': bool,
# Location for ioflo logs
'ioflo_console_logdir': six.string_types,
# The port to bind to when bringing up a RAET daemon
'raet_port': int,
'raet_alt_port': int,
'raet_mutable': bool,
'raet_main': bool,
'raet_clear_remotes': bool,
'raet_clear_remote_masters': bool,
'raet_road_bufcnt': int,
'raet_lane_bufcnt': int,
'cluster_mode': bool,
'cluster_masters': list,
'sqlite_queue_dir': six.string_types,
'queue_dirs': list,
# Instructs the minion to ping its master(s) every n number of minutes. Used
# primarily as a mitigation technique against minion disconnects.
'ping_interval': int,
# Instructs the salt CLI to print a summary of a minion responses before returning
'cli_summary': bool,
# The maximum number of minion connections allowed by the master. Can have performance
# implications in large setups.
'max_minions': int,
'username': (type(None), six.string_types),
'password': (type(None), six.string_types),
# Use zmq.SUSCRIBE to limit listening sockets to only process messages bound for them
'zmq_filtering': bool,
# Connection caching. Can greatly speed up salt performance.
'con_cache': bool,
'rotate_aes_key': bool,
# Cache ZeroMQ connections. Can greatly improve salt performance.
'cache_sreqs': bool,
# Can be set to override the python_shell=False default in the cmd module
'cmd_safe': bool,
# Used strictly for performance testing in RAET.
'dummy_publisher': bool,
# Used by salt-api for master requests timeout
'rest_timeout': int,
# If set, all minion exec module actions will be rerouted through sudo as this user
'sudo_user': six.string_types,
# HTTP connection timeout in seconds. Applied for tornado http fetch functions like cp.get_url
# should be greater than overall download time
'http_connect_timeout': float,
# HTTP request timeout in seconds. Applied for tornado http fetch functions like cp.get_url
# should be greater than overall download time
'http_request_timeout': float,
# HTTP request max file content size.
'http_max_body': int,
# Delay in seconds before executing bootstrap (Salt Cloud)
'bootstrap_delay': int,
# If a proxymodule has a function called 'grains', then call it during
# regular grains loading and merge the results with the proxy's grains
# dictionary. Otherwise it is assumed that the module calls the grains
# function in a custom way and returns the data elsewhere
#
# Default to False for 2016.3 and 2016.11. Switch to True for 2017.7.0
'proxy_merge_grains_in_module': bool,
# Command to use to restart salt-minion
'minion_restart_command': list,
# Whether or not a minion should send the results of a command back to the master
# Useful when a returner is the source of truth for a job result
'pub_ret': bool,
# HTTP proxy settings. Used in tornado fetch functions, apt-key etc
'proxy_host': six.string_types,
'proxy_username': six.string_types,
'proxy_password': six.string_types,
'proxy_port': int,
# Minion de-dup jid cache max size
'minion_jid_queue_hwm': int,
# Minion data cache driver (one of satl.cache.* modules)
'cache': six.string_types,
# Enables a fast in-memory cache booster and sets the expiration time.
'memcache_expire_seconds': int,
# Set a memcache limit in items (bank + key) per cache storage (driver + driver_opts).
'memcache_max_items': int,
# Each time a cache storage got full cleanup all the expired items not just the oldest one.
'memcache_full_cleanup': bool,
# Enable collecting the memcache stats and log it on `debug` log level.
'memcache_debug': bool,
# Thin and minimal Salt extra modules
'thin_extra_mods': six.string_types,
'min_extra_mods': six.string_types,
# Default returners minion should use. List or comma-delimited string
'return': (six.string_types, list),
# TLS/SSL connection options. This could be set to a dictionary containing arguments
# corresponding to python ssl.wrap_socket method. For details see:
# http://www.tornadoweb.org/en/stable/tcpserver.html#tornado.tcpserver.TCPServer
# http://docs.python.org/2/library/ssl.html#ssl.wrap_socket
# Note: to set enum arguments values like `cert_reqs` and `ssl_version` use constant names
# without ssl module prefix: `CERT_REQUIRED` or `PROTOCOL_SSLv23`.
'ssl': (dict, bool, type(None)),
# Controls how a multi-function job returns its data. If this is False,
# it will return its data using a dictionary with the function name as
# the key. This is compatible with legacy systems. If this is True, it
# will return its data using an array in the same order as the input
# array of functions to execute. This allows for calling the same
# function multiple times in the same multi-function job.
'multifunc_ordered': bool,
# Controls whether beacons are set up before a connection
# to the master is attempted.
'beacons_before_connect': bool,
# Controls whether the scheduler is set up before a connection
# to the master is attempted.
'scheduler_before_connect': bool,
# Whitelist/blacklist specific modules to be synced
'extmod_whitelist': dict,
'extmod_blacklist': dict,
# django auth
'django_auth_path': six.string_types,
'django_auth_settings': six.string_types,
# Number of times to try to auth with the master on a reconnect with the
# tcp transport
'tcp_authentication_retries': int,
# Permit or deny allowing minions to request revoke of its own key
'allow_minion_key_revoke': bool,
# File chunk size for salt-cp
'salt_cp_chunk_size': int,
# Require that the minion sign messages it posts to the master on the event
# bus
'minion_sign_messages': bool,
# Have master drop messages from minions for which their signatures do
# not verify
'drop_messages_signature_fail': bool,
# Require that payloads from minions have a 'sig' entry
# (in other words, require that minions have 'minion_sign_messages'
# turned on)
'require_minion_sign_messages': bool,
# The list of config entries to be passed to external pillar function as
# part of the extra_minion_data param
# Subconfig entries can be specified by using the ':' notation (e.g. key:subkey)
'pass_to_ext_pillars': (six.string_types, list),
# Used by salt.modules.dockermod.compare_container_networks to specify which keys are compared
'docker.compare_container_networks': dict,
# SSDP discovery publisher description.
# Contains publisher configuration and minion mapping.
# Setting it to False disables discovery
'discovery': (dict, bool),
# Scheduler should be a dictionary
'schedule': dict,
# Whether to fire auth events
'auth_events': bool,
# Whether to fire Minion data cache refresh events
'minion_data_cache_events': bool,
# Enable calling ssh minions from the salt master
'enable_ssh_minions': bool,
}
# default configurations
DEFAULT_MINION_OPTS = {
'interface': '0.0.0.0',
'master': 'salt',
'master_type': 'str',
'master_uri_format': 'default',
'source_interface_name': '',
'source_address': '',
'source_ret_port': 0,
'source_publish_port': 0,
'master_port': 4506,
'master_finger': '',
'master_shuffle': False,
'master_alive_interval': 0,
'master_failback': False,
'master_failback_interval': 0,
'verify_master_pubkey_sign': False,
'sign_pub_messages': False,
'always_verify_signature': False,
'master_sign_key_name': 'master_sign',
'syndic_finger': '',
'user': salt.utils.user.get_user(),
'root_dir': salt.syspaths.ROOT_DIR,
'pki_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'pki', 'minion'),
'id': '',
'id_function': {},
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'minion'),
'append_minionid_config_dirs': [],
'cache_jobs': False,
'grains_cache': False,
'grains_cache_expiration': 300,
'grains_deep_merge': False,
'conf_file': os.path.join(salt.syspaths.CONFIG_DIR, 'minion'),
'sock_dir': os.path.join(salt.syspaths.SOCK_DIR, 'minion'),
'sock_pool_size': 1,
'backup_mode': '',
'renderer': 'yaml_jinja',
'renderer_whitelist': [],
'renderer_blacklist': [],
'random_startup_delay': 0,
'failhard': False,
'autoload_dynamic_modules': True,
'saltenv': None,
'lock_saltenv': False,
'pillarenv': None,
'pillarenv_from_saltenv': False,
'pillar_opts': False,
'pillar_source_merging_strategy': 'smart',
'pillar_merge_lists': False,
'pillar_includes_override_sls': False,
# ``pillar_cache``, ``pillar_cache_ttl`` and ``pillar_cache_backend``
# are not used on the minion but are unavoidably in the code path
'pillar_cache': False,
'pillar_cache_ttl': 3600,
'pillar_cache_backend': 'disk',
'extension_modules': os.path.join(salt.syspaths.CACHE_DIR, 'minion', 'extmods'),
'state_top': 'top.sls',
'state_top_saltenv': None,
'startup_states': '',
'sls_list': [],
'top_file': '',
'thorium_interval': 0.5,
'thorium_roots': {
'base': [salt.syspaths.BASE_THORIUM_ROOTS_DIR],
},
'file_client': 'remote',
'local': False,
'use_master_when_local': False,
'file_roots': {
'base': [salt.syspaths.BASE_FILE_ROOTS_DIR,
salt.syspaths.SPM_FORMULA_PATH]
},
'top_file_merging_strategy': 'merge',
'env_order': [],
'default_top': 'base',
'fileserver_limit_traversal': False,
'file_recv': False,
'file_recv_max_size': 100,
'file_ignore_regex': [],
'file_ignore_glob': [],
'fileserver_backend': ['roots'],
'fileserver_followsymlinks': True,
'fileserver_ignoresymlinks': False,
'pillar_roots': {
'base': [salt.syspaths.BASE_PILLAR_ROOTS_DIR,
salt.syspaths.SPM_PILLAR_PATH]
},
'on_demand_ext_pillar': ['libvirt', 'virtkey'],
'decrypt_pillar': [],
'decrypt_pillar_delimiter': ':',
'decrypt_pillar_default': 'gpg',
'decrypt_pillar_renderers': ['gpg'],
# Update intervals
'roots_update_interval': DEFAULT_INTERVAL,
'azurefs_update_interval': DEFAULT_INTERVAL,
'gitfs_update_interval': DEFAULT_INTERVAL,
'hgfs_update_interval': DEFAULT_INTERVAL,
'minionfs_update_interval': DEFAULT_INTERVAL,
's3fs_update_interval': DEFAULT_INTERVAL,
'svnfs_update_interval': DEFAULT_INTERVAL,
'git_pillar_base': 'master',
'git_pillar_branch': 'master',
'git_pillar_env': '',
'git_pillar_root': '',
'git_pillar_ssl_verify': True,
'git_pillar_global_lock': True,
'git_pillar_user': '',
'git_pillar_password': '',
'git_pillar_insecure_auth': False,
'git_pillar_privkey': '',
'git_pillar_pubkey': '',
'git_pillar_passphrase': '',
'git_pillar_refspecs': _DFLT_REFSPECS,
'git_pillar_includes': True,
'gitfs_remotes': [],
'gitfs_mountpoint': '',
'gitfs_root': '',
'gitfs_base': 'master',
'gitfs_user': '',
'gitfs_password': '',
'gitfs_insecure_auth': False,
'gitfs_privkey': '',
'gitfs_pubkey': '',
'gitfs_passphrase': '',
'gitfs_env_whitelist': [],
'gitfs_env_blacklist': [],
'gitfs_saltenv_whitelist': [],
'gitfs_saltenv_blacklist': [],
'gitfs_global_lock': True,
'gitfs_ssl_verify': True,
'gitfs_saltenv': [],
'gitfs_ref_types': ['branch', 'tag', 'sha'],
'gitfs_refspecs': _DFLT_REFSPECS,
'gitfs_disable_saltenv_mapping': False,
'unique_jid': False,
'hash_type': 'sha256',
'disable_modules': [],
'disable_returners': [],
'whitelist_modules': [],
'module_dirs': [],
'returner_dirs': [],
'grains_dirs': [],
'states_dirs': [],
'render_dirs': [],
'outputter_dirs': [],
'utils_dirs': [],
'publisher_acl': {},
'publisher_acl_blacklist': {},
'providers': {},
'clean_dynamic_modules': True,
'open_mode': False,
'auto_accept': True,
'autosign_timeout': 120,
'multiprocessing': True,
'process_count_max': -1,
'mine_enabled': True,
'mine_return_job': False,
'mine_interval': 60,
'ipc_mode': _DFLT_IPC_MODE,
'ipc_write_buffer': _DFLT_IPC_WBUFFER,
'ipv6': False,
'file_buffer_size': 262144,
'tcp_pub_port': 4510,
'tcp_pull_port': 4511,
'tcp_authentication_retries': 5,
'log_file': os.path.join(salt.syspaths.LOGS_DIR, 'minion'),
'log_level': 'warning',
'log_level_logfile': None,
'log_datefmt': _DFLT_LOG_DATEFMT,
'log_datefmt_logfile': _DFLT_LOG_DATEFMT_LOGFILE,
'log_fmt_console': _DFLT_LOG_FMT_CONSOLE,
'log_fmt_logfile': _DFLT_LOG_FMT_LOGFILE,
'log_granular_levels': {},
'log_rotate_max_bytes': 0,
'log_rotate_backup_count': 0,
'max_event_size': 1048576,
'enable_legacy_startup_events': True,
'test': False,
'ext_job_cache': '',
'cython_enable': False,
'enable_zip_modules': False,
'state_verbose': True,
'state_output': 'full',
'state_output_diff': False,
'state_auto_order': True,
'state_events': False,
'state_aggregate': False,
'snapper_states': False,
'snapper_states_config': 'root',
'acceptance_wait_time': 10,
'acceptance_wait_time_max': 0,
'rejected_retry': False,
'loop_interval': 1,
'verify_env': True,
'grains': {},
'permissive_pki_access': False,
'default_include': 'minion.d/*.conf',
'update_url': False,
'update_restart_services': [],
'retry_dns': 30,
'resolve_dns_fallback': True,
'recon_max': 10000,
'recon_default': 1000,
'recon_randomize': True,
'return_retry_timer': 5,
'return_retry_timer_max': 10,
'random_reauth_delay': 10,
'winrepo_source_dir': 'salt://win/repo-ng/',
'winrepo_dir': os.path.join(salt.syspaths.BASE_FILE_ROOTS_DIR, 'win', 'repo'),
'winrepo_dir_ng': os.path.join(salt.syspaths.BASE_FILE_ROOTS_DIR, 'win', 'repo-ng'),
'winrepo_cachefile': 'winrepo.p',
'winrepo_cache_expire_max': 21600,
'winrepo_cache_expire_min': 0,
'winrepo_remotes': ['https://github.com/saltstack/salt-winrepo.git'],
'winrepo_remotes_ng': ['https://github.com/saltstack/salt-winrepo-ng.git'],
'winrepo_branch': 'master',
'winrepo_ssl_verify': True,
'winrepo_user': '',
'winrepo_password': '',
'winrepo_insecure_auth': False,
'winrepo_privkey': '',
'winrepo_pubkey': '',
'winrepo_passphrase': '',
'winrepo_refspecs': _DFLT_REFSPECS,
'pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-minion.pid'),
'range_server': 'range:80',
'reactor_refresh_interval': 60,
'reactor_worker_threads': 10,
'reactor_worker_hwm': 10000,
'engines': [],
'tcp_keepalive': True,
'tcp_keepalive_idle': 300,
'tcp_keepalive_cnt': -1,
'tcp_keepalive_intvl': -1,
'modules_max_memory': -1,
'grains_refresh_every': 0,
'minion_id_caching': True,
'minion_id_lowercase': False,
'keysize': 2048,
'transport': 'zeromq',
'auth_timeout': 5,
'auth_tries': 7,
'master_tries': _MASTER_TRIES,
'master_tops_first': False,
'auth_safemode': False,
'random_master': False,
'minion_floscript': os.path.join(FLO_DIR, 'minion.flo'),
'caller_floscript': os.path.join(FLO_DIR, 'caller.flo'),
'ioflo_verbose': 0,
'ioflo_period': 0.1,
'ioflo_realtime': True,
'ioflo_console_logdir': '',
'raet_port': 4510,
'raet_alt_port': 4511,
'raet_mutable': False,
'raet_main': False,
'raet_clear_remotes': True,
'raet_clear_remote_masters': True,
'raet_road_bufcnt': 2,
'raet_lane_bufcnt': 100,
'cluster_mode': False,
'cluster_masters': [],
'restart_on_error': False,
'ping_interval': 0,
'username': None,
'password': None,
'zmq_filtering': False,
'zmq_monitor': False,
'cache_sreqs': True,
'cmd_safe': True,
'sudo_user': '',
'http_connect_timeout': 20.0, # tornado default - 20 seconds
'http_request_timeout': 1 * 60 * 60.0, # 1 hour
'http_max_body': 100 * 1024 * 1024 * 1024, # 100GB
'event_match_type': 'startswith',
'minion_restart_command': [],
'pub_ret': True,
'proxy_host': '',
'proxy_username': '',
'proxy_password': '',
'proxy_port': 0,
'minion_jid_queue_hwm': 100,
'ssl': None,
'multifunc_ordered': False,
'beacons_before_connect': False,
'scheduler_before_connect': False,
'cache': 'localfs',
'salt_cp_chunk_size': 65536,
'extmod_whitelist': {},
'extmod_blacklist': {},
'minion_sign_messages': False,
'docker.compare_container_networks': {
'static': ['Aliases', 'Links', 'IPAMConfig'],
'automatic': ['IPAddress', 'Gateway',
'GlobalIPv6Address', 'IPv6Gateway'],
},
'discovery': False,
'schedule': {},
}
DEFAULT_MASTER_OPTS = {
'interface': '0.0.0.0',
'publish_port': 4505,
'zmq_backlog': 1000,
'pub_hwm': 1000,
'auth_mode': 1,
'user': _MASTER_USER,
'worker_threads': 5,
'sock_dir': os.path.join(salt.syspaths.SOCK_DIR, 'master'),
'sock_pool_size': 1,
'ret_port': 4506,
'timeout': 5,
'keep_jobs': 24,
'archive_jobs': False,
'root_dir': salt.syspaths.ROOT_DIR,
'pki_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'pki', 'master'),
'key_cache': '',
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'master'),
'file_roots': {
'base': [salt.syspaths.BASE_FILE_ROOTS_DIR,
salt.syspaths.SPM_FORMULA_PATH]
},
'master_roots': {
'base': [salt.syspaths.BASE_MASTER_ROOTS_DIR],
},
'pillar_roots': {
'base': [salt.syspaths.BASE_PILLAR_ROOTS_DIR,
salt.syspaths.SPM_PILLAR_PATH]
},
'on_demand_ext_pillar': ['libvirt', 'virtkey'],
'decrypt_pillar': [],
'decrypt_pillar_delimiter': ':',
'decrypt_pillar_default': 'gpg',
'decrypt_pillar_renderers': ['gpg'],
'thorium_interval': 0.5,
'thorium_roots': {
'base': [salt.syspaths.BASE_THORIUM_ROOTS_DIR],
},
'top_file_merging_strategy': 'merge',
'env_order': [],
'saltenv': None,
'lock_saltenv': False,
'pillarenv': None,
'default_top': 'base',
'file_client': 'local',
'local': True,
# Update intervals
'roots_update_interval': DEFAULT_INTERVAL,
'azurefs_update_interval': DEFAULT_INTERVAL,
'gitfs_update_interval': DEFAULT_INTERVAL,
'hgfs_update_interval': DEFAULT_INTERVAL,
'minionfs_update_interval': DEFAULT_INTERVAL,
's3fs_update_interval': DEFAULT_INTERVAL,
'svnfs_update_interval': DEFAULT_INTERVAL,
'git_pillar_base': 'master',
'git_pillar_branch': 'master',
'git_pillar_env': '',
'git_pillar_root': '',
'git_pillar_ssl_verify': True,
'git_pillar_global_lock': True,
'git_pillar_user': '',
'git_pillar_password': '',
'git_pillar_insecure_auth': False,
'git_pillar_privkey': '',
'git_pillar_pubkey': '',
'git_pillar_passphrase': '',
'git_pillar_refspecs': _DFLT_REFSPECS,
'git_pillar_includes': True,
'git_pillar_verify_config': True,
'gitfs_remotes': [],
'gitfs_mountpoint': '',
'gitfs_root': '',
'gitfs_base': 'master',
'gitfs_user': '',
'gitfs_password': '',
'gitfs_insecure_auth': False,
'gitfs_privkey': '',
'gitfs_pubkey': '',
'gitfs_passphrase': '',
'gitfs_env_whitelist': [],
'gitfs_env_blacklist': [],
'gitfs_saltenv_whitelist': [],
'gitfs_saltenv_blacklist': [],
'gitfs_global_lock': True,
'gitfs_ssl_verify': True,
'gitfs_saltenv': [],
'gitfs_ref_types': ['branch', 'tag', 'sha'],
'gitfs_refspecs': _DFLT_REFSPECS,
'gitfs_disable_saltenv_mapping': False,
'hgfs_remotes': [],
'hgfs_mountpoint': '',
'hgfs_root': '',
'hgfs_base': 'default',
'hgfs_branch_method': 'branches',
'hgfs_env_whitelist': [],
'hgfs_env_blacklist': [],
'hgfs_saltenv_whitelist': [],
'hgfs_saltenv_blacklist': [],
'show_timeout': True,
'show_jid': False,
'unique_jid': False,
'svnfs_remotes': [],
'svnfs_mountpoint': '',
'svnfs_root': '',
'svnfs_trunk': 'trunk',
'svnfs_branches': 'branches',
'svnfs_tags': 'tags',
'svnfs_env_whitelist': [],
'svnfs_env_blacklist': [],
'svnfs_saltenv_whitelist': [],
'svnfs_saltenv_blacklist': [],
'max_event_size': 1048576,
'master_stats': False,
'master_stats_event_iter': 60,
'minionfs_env': 'base',
'minionfs_mountpoint': '',
'minionfs_whitelist': [],
'minionfs_blacklist': [],
'ext_pillar': [],
'pillar_version': 2,
'pillar_opts': False,
'pillar_safe_render_error': True,
'pillar_source_merging_strategy': 'smart',
'pillar_merge_lists': False,
'pillar_includes_override_sls': False,
'pillar_cache': False,
'pillar_cache_ttl': 3600,
'pillar_cache_backend': 'disk',
'ping_on_rotate': False,
'peer': {},
'preserve_minion_cache': False,
'syndic_master': 'masterofmasters',
'syndic_failover': 'random',
'syndic_forward_all_events': False,
'syndic_log_file': os.path.join(salt.syspaths.LOGS_DIR, 'syndic'),
'syndic_pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-syndic.pid'),
'outputter_dirs': [],
'runner_dirs': [],
'utils_dirs': [],
'client_acl_verify': True,
'publisher_acl': {},
'publisher_acl_blacklist': {},
'sudo_acl': False,
'external_auth': {},
'token_expire': 43200,
'token_expire_user_override': False,
'permissive_acl': False,
'keep_acl_in_token': False,
'eauth_acl_module': '',
'eauth_tokens': 'localfs',
'extension_modules': os.path.join(salt.syspaths.CACHE_DIR, 'master', 'extmods'),
'module_dirs': [],
'file_recv': False,
'file_recv_max_size': 100,
'file_buffer_size': 1048576,
'file_ignore_regex': [],
'file_ignore_glob': [],
'fileserver_backend': ['roots'],
'fileserver_followsymlinks': True,
'fileserver_ignoresymlinks': False,
'fileserver_limit_traversal': False,
'fileserver_verify_config': True,
'max_open_files': 100000,
'hash_type': 'sha256',
'conf_file': os.path.join(salt.syspaths.CONFIG_DIR, 'master'),
'open_mode': False,
'auto_accept': False,
'renderer': 'yaml_jinja',
'renderer_whitelist': [],
'renderer_blacklist': [],
'failhard': False,
'state_top': 'top.sls',
'state_top_saltenv': None,
'master_tops': {},
'order_masters': False,
'job_cache': True,
'ext_job_cache': '',
'master_job_cache': 'local_cache',
'job_cache_store_endtime': False,
'minion_data_cache': True,
'enforce_mine_cache': False,
'ipc_mode': _DFLT_IPC_MODE,
'ipc_write_buffer': _DFLT_IPC_WBUFFER,
'ipv6': False,
'tcp_master_pub_port': 4512,
'tcp_master_pull_port': 4513,
'tcp_master_publish_pull': 4514,
'tcp_master_workers': 4515,
'log_file': os.path.join(salt.syspaths.LOGS_DIR, 'master'),
'log_level': 'warning',
'log_level_logfile': None,
'log_datefmt': _DFLT_LOG_DATEFMT,
'log_datefmt_logfile': _DFLT_LOG_DATEFMT_LOGFILE,
'log_fmt_console': _DFLT_LOG_FMT_CONSOLE,
'log_fmt_logfile': _DFLT_LOG_FMT_LOGFILE,
'log_granular_levels': {},
'log_rotate_max_bytes': 0,
'log_rotate_backup_count': 0,
'pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-master.pid'),
'publish_session': 86400,
'range_server': 'range:80',
'reactor': [],
'reactor_refresh_interval': 60,
'reactor_worker_threads': 10,
'reactor_worker_hwm': 10000,
'engines': [],
'event_return': '',
'event_return_queue': 0,
'event_return_whitelist': [],
'event_return_blacklist': [],
'event_match_type': 'startswith',
'runner_returns': True,
'serial': 'msgpack',
'test': False,
'state_verbose': True,
'state_output': 'full',
'state_output_diff': False,
'state_auto_order': True,
'state_events': False,
'state_aggregate': False,
'search': '',
'loop_interval': 60,
'nodegroups': {},
'ssh_list_nodegroups': {},
'ssh_use_home_key': False,
'cython_enable': False,
'enable_gpu_grains': False,
# XXX: Remove 'key_logfile' support in 2014.1.0
'key_logfile': os.path.join(salt.syspaths.LOGS_DIR, 'key'),
'verify_env': True,
'permissive_pki_access': False,
'key_pass': None,
'signing_key_pass': None,
'default_include': 'master.d/*.conf',
'winrepo_dir': os.path.join(salt.syspaths.BASE_FILE_ROOTS_DIR, 'win', 'repo'),
'winrepo_dir_ng': os.path.join(salt.syspaths.BASE_FILE_ROOTS_DIR, 'win', 'repo-ng'),
'winrepo_cachefile': 'winrepo.p',
'winrepo_remotes': ['https://github.com/saltstack/salt-winrepo.git'],
'winrepo_remotes_ng': ['https://github.com/saltstack/salt-winrepo-ng.git'],
'winrepo_branch': 'master',
'winrepo_ssl_verify': True,
'winrepo_user': '',
'winrepo_password': '',
'winrepo_insecure_auth': False,
'winrepo_privkey': '',
'winrepo_pubkey': '',
'winrepo_passphrase': '',
'winrepo_refspecs': _DFLT_REFSPECS,
'syndic_wait': 5,
'jinja_env': {},
'jinja_sls_env': {},
'jinja_lstrip_blocks': False,
'jinja_trim_blocks': False,
'tcp_keepalive': True,
'tcp_keepalive_idle': 300,
'tcp_keepalive_cnt': -1,
'tcp_keepalive_intvl': -1,
'sign_pub_messages': True,
'keysize': 2048,
'transport': 'zeromq',
'gather_job_timeout': 10,
'syndic_event_forward_timeout': 0.5,
'syndic_jid_forward_cache_hwm': 100,
'regen_thin': False,
'ssh_passwd': '',
'ssh_port': '22',
'ssh_sudo': False,
'ssh_sudo_user': '',
'ssh_timeout': 60,
'ssh_user': 'root',
'ssh_scan_ports': '22',
'ssh_scan_timeout': 0.01,
'ssh_identities_only': False,
'ssh_log_file': os.path.join(salt.syspaths.LOGS_DIR, 'ssh'),
'ssh_config_file': os.path.join(salt.syspaths.HOME_DIR, '.ssh', 'config'),
'master_floscript': os.path.join(FLO_DIR, 'master.flo'),
'worker_floscript': os.path.join(FLO_DIR, 'worker.flo'),
'maintenance_floscript': os.path.join(FLO_DIR, 'maint.flo'),
'ioflo_verbose': 0,
'ioflo_period': 0.01,
'ioflo_realtime': True,
'ioflo_console_logdir': '',
'raet_port': 4506,
'raet_alt_port': 4511,
'raet_mutable': False,
'raet_main': True,
'raet_clear_remotes': False,
'raet_clear_remote_masters': True,
'raet_road_bufcnt': 2,
'raet_lane_bufcnt': 100,
'cluster_mode': False,
'cluster_masters': [],
'sqlite_queue_dir': os.path.join(salt.syspaths.CACHE_DIR, 'master', 'queues'),
'queue_dirs': [],
'cli_summary': False,
'max_minions': 0,
'master_sign_key_name': 'master_sign',
'master_sign_pubkey': False,
'master_pubkey_signature': 'master_pubkey_signature',
'master_use_pubkey_signature': False,
'zmq_filtering': False,
'zmq_monitor': False,
'con_cache': False,
'rotate_aes_key': True,
'cache_sreqs': True,
'dummy_pub': False,
'http_connect_timeout': 20.0, # tornado default - 20 seconds
'http_request_timeout': 1 * 60 * 60.0, # 1 hour
'http_max_body': 100 * 1024 * 1024 * 1024, # 100GB
'python2_bin': 'python2',
'python3_bin': 'python3',
'cache': 'localfs',
'memcache_expire_seconds': 0,
'memcache_max_items': 1024,
'memcache_full_cleanup': False,
'memcache_debug': False,
'thin_extra_mods': '',
'min_extra_mods': '',
'ssl': None,
'extmod_whitelist': {},
'extmod_blacklist': {},
'clean_dynamic_modules': True,
'django_auth_path': '',
'django_auth_settings': '',
'allow_minion_key_revoke': True,
'salt_cp_chunk_size': 98304,
'require_minion_sign_messages': False,
'drop_messages_signature_fail': False,
'discovery': False,
'schedule': {},
'auth_events': True,
'minion_data_cache_events': True,
'enable_ssh_minions': False,
}
# ----- Salt Proxy Minion Configuration Defaults ----------------------------------->
# These are merged with DEFAULT_MINION_OPTS since many of them also apply here.
DEFAULT_PROXY_MINION_OPTS = {
'conf_file': os.path.join(salt.syspaths.CONFIG_DIR, 'proxy'),
'log_file': os.path.join(salt.syspaths.LOGS_DIR, 'proxy'),
'add_proxymodule_to_opts': False,
'proxy_merge_grains_in_module': True,
'extension_modules': os.path.join(salt.syspaths.CACHE_DIR, 'proxy', 'extmods'),
'append_minionid_config_dirs': ['cachedir', 'pidfile', 'default_include', 'extension_modules'],
'default_include': 'proxy.d/*.conf',
'proxy_merge_pillar_in_opts': False,
'proxy_deep_merge_pillar_in_opts': False,
'proxy_merge_pillar_in_opts_strategy': 'smart',
'proxy_mines_pillar': True,
# By default, proxies will preserve the connection.
# If this option is set to False,
# the connection with the remote dumb device
# is closed after each command request.
'proxy_always_alive': True,
'proxy_keep_alive': True, # by default will try to keep alive the connection
'proxy_keep_alive_interval': 1, # frequency of the proxy keepalive in minutes
'pki_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'pki', 'proxy'),
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'proxy'),
'sock_dir': os.path.join(salt.syspaths.SOCK_DIR, 'proxy'),
}
# ----- Salt Cloud Configuration Defaults ----------------------------------->
DEFAULT_CLOUD_OPTS = {
'verify_env': True,
'default_include': 'cloud.conf.d/*.conf',
# Global defaults
'ssh_auth': '',
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'cloud'),
'keysize': 4096,
'os': '',
'script': 'bootstrap-salt',
'start_action': None,
'enable_hard_maps': False,
'delete_sshkeys': False,
# Custom deploy scripts
'deploy_scripts_search_path': 'cloud.deploy.d',
# Logging defaults
'log_file': os.path.join(salt.syspaths.LOGS_DIR, 'cloud'),
'log_level': 'warning',
'log_level_logfile': None,
'log_datefmt': _DFLT_LOG_DATEFMT,
'log_datefmt_logfile': _DFLT_LOG_DATEFMT_LOGFILE,
'log_fmt_console': _DFLT_LOG_FMT_CONSOLE,
'log_fmt_logfile': _DFLT_LOG_FMT_LOGFILE,
'log_granular_levels': {},
'log_rotate_max_bytes': 0,
'log_rotate_backup_count': 0,
'bootstrap_delay': None,
'cache': 'localfs',
}
DEFAULT_API_OPTS = {
# ----- Salt master settings overridden by Salt-API --------------------->
'api_pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-api.pid'),
'api_logfile': os.path.join(salt.syspaths.LOGS_DIR, 'api'),
'rest_timeout': 300,
# <---- Salt master settings overridden by Salt-API ----------------------
}
DEFAULT_SPM_OPTS = {
# ----- Salt master settings overridden by SPM --------------------->
'spm_conf_file': os.path.join(salt.syspaths.CONFIG_DIR, 'spm'),
'formula_path': '/srv/spm/salt',
'pillar_path': '/srv/spm/pillar',
'reactor_path': '/srv/spm/reactor',
'spm_logfile': os.path.join(salt.syspaths.LOGS_DIR, 'spm'),
'spm_default_include': 'spm.d/*.conf',
# spm_repos_config also includes a .d/ directory
'spm_repos_config': '/etc/salt/spm.repos',
'spm_cache_dir': os.path.join(salt.syspaths.CACHE_DIR, 'spm'),
'spm_build_dir': '/srv/spm_build',
'spm_build_exclude': ['CVS', '.hg', '.git', '.svn'],
'spm_db': os.path.join(salt.syspaths.CACHE_DIR, 'spm', 'packages.db'),
'cache': 'localfs',
'spm_repo_dups': 'ignore',
# If set, spm_node_type will be either master or minion, but they should
# NOT be a default
'spm_node_type': '',
'spm_share_dir': os.path.join(salt.syspaths.SHARE_DIR, 'spm'),
# <---- Salt master settings overridden by SPM ----------------------
}
VM_CONFIG_DEFAULTS = {
'default_include': 'cloud.profiles.d/*.conf',
}
PROVIDER_CONFIG_DEFAULTS = {
'default_include': 'cloud.providers.d/*.conf',
}
# <---- Salt Cloud Configuration Defaults ------------------------------------
def _validate_file_roots(file_roots):
'''
If the file_roots option has a key that is None then we will error out,
just replace it with an empty list
'''
if not isinstance(file_roots, dict):
log.warning('The file_roots parameter is not properly formatted,'
' using defaults')
return {'base': _expand_glob_path([salt.syspaths.BASE_FILE_ROOTS_DIR])}
for saltenv, dirs in six.iteritems(file_roots):
normalized_saltenv = six.text_type(saltenv)
if normalized_saltenv != saltenv:
file_roots[normalized_saltenv] = file_roots.pop(saltenv)
if not isinstance(dirs, (list, tuple)):
file_roots[normalized_saltenv] = []
file_roots[normalized_saltenv] = \
_expand_glob_path(file_roots[normalized_saltenv])
return file_roots
def _expand_glob_path(file_roots):
'''
Applies shell globbing to a set of directories and returns
the expanded paths
'''
unglobbed_path = []
for path in file_roots:
try:
if glob.has_magic(path):
unglobbed_path.extend(glob.glob(path))
else:
unglobbed_path.append(path)
except Exception:
unglobbed_path.append(path)
return unglobbed_path
def _validate_opts(opts):
'''
Check that all of the types of values passed into the config are
of the right types
'''
def format_multi_opt(valid_type):
try:
num_types = len(valid_type)
except TypeError:
# Bare type name won't have a length, return the name of the type
# passed.
return valid_type.__name__
else:
def get_types(types, type_tuple):
for item in type_tuple:
if isinstance(item, tuple):
get_types(types, item)
else:
try:
types.append(item.__name__)
except AttributeError:
log.warning(
'Unable to interpret type %s while validating '
'configuration', item
)
types = []
get_types(types, valid_type)
ret = ', '.join(types[:-1])
ret += ' or ' + types[-1]
return ret
errors = []
err = (
'Config option \'{0}\' with value {1} has an invalid type of {2}, a '
'{3} is required for this option'
)
for key, val in six.iteritems(opts):
if key in VALID_OPTS:
if val is None:
if VALID_OPTS[key] is None:
continue
else:
try:
if None in VALID_OPTS[key]:
continue
except TypeError:
# VALID_OPTS[key] is not iterable and not None
pass
if isinstance(val, VALID_OPTS[key]):
continue
# We don't know what data type sdb will return at run-time so we
# simply cannot check it for correctness here at start-time.
if isinstance(val, six.string_types) and val.startswith('sdb://'):
continue
if hasattr(VALID_OPTS[key], '__call__'):
try:
VALID_OPTS[key](val)
if isinstance(val, (list, dict)):
# We'll only get here if VALID_OPTS[key] is str or
# bool, and the passed value is a list/dict. Attempting
# to run int() or float() on a list/dict will raise an
# exception, but running str() or bool() on it will
# pass despite not being the correct type.
errors.append(
err.format(
key,
val,
type(val).__name__,
VALID_OPTS[key].__name__
)
)
except (TypeError, ValueError):
errors.append(
err.format(key,
val,
type(val).__name__,
VALID_OPTS[key].__name__)
)
continue
errors.append(
err.format(key,
val,
type(val).__name__,
format_multi_opt(VALID_OPTS[key]))
)
# Convert list to comma-delimited string for 'return' config option
if isinstance(opts.get('return'), list):
opts['return'] = ','.join(opts['return'])
# RAET on Windows uses 'win32file.CreateMailslot()' for IPC. Due to this,
# sock_dirs must start with '\\.\mailslot\' and not contain any colons.
# We don't expect the user to know this, so we will fix up their path for
# them if it isn't compliant.
if (salt.utils.platform.is_windows() and opts.get('transport') == 'raet' and
'sock_dir' in opts and
not opts['sock_dir'].startswith('\\\\.\\mailslot\\')):
opts['sock_dir'] = (
'\\\\.\\mailslot\\' + opts['sock_dir'].replace(':', ''))
for error in errors:
log.warning(error)
if errors:
return False
return True
def _validate_ssh_minion_opts(opts):
'''
Ensure we're not using any invalid ssh_minion_opts. We want to make sure
that the ssh_minion_opts does not override any pillar or fileserver options
inherited from the master config. To add other items, modify the if
statement in the for loop below.
'''
ssh_minion_opts = opts.get('ssh_minion_opts', {})
if not isinstance(ssh_minion_opts, dict):
log.error('Invalidly-formatted ssh_minion_opts')
opts.pop('ssh_minion_opts')
for opt_name in list(ssh_minion_opts):
if re.match('^[a-z0-9]+fs_', opt_name, flags=re.IGNORECASE) \
or 'pillar' in opt_name \
or opt_name in ('fileserver_backend',):
log.warning(
'\'%s\' is not a valid ssh_minion_opts parameter, ignoring',
opt_name
)
ssh_minion_opts.pop(opt_name)
def _append_domain(opts):
'''
Append a domain to the existing id if it doesn't already exist
'''
# Domain already exists
if opts['id'].endswith(opts['append_domain']):
return opts['id']
# Trailing dot should mean an FQDN that is terminated, leave it alone.
if opts['id'].endswith('.'):
return opts['id']
return '{0[id]}.{0[append_domain]}'.format(opts)
def _read_conf_file(path):
'''
Read in a config file from a given path and process it into a dictionary
'''
log.debug('Reading configuration from %s', path)
with salt.utils.files.fopen(path, 'r') as conf_file:
try:
conf_opts = salt.utils.yaml.safe_load(conf_file) or {}
except salt.utils.yaml.YAMLError as err:
message = 'Error parsing configuration file: {0} - {1}'.format(path, err)
log.error(message)
raise salt.exceptions.SaltConfigurationError(message)
# only interpret documents as a valid conf, not things like strings,
# which might have been caused by invalid yaml syntax
if not isinstance(conf_opts, dict):
message = 'Error parsing configuration file: {0} - conf ' \
'should be a document, not {1}.'.format(path, type(conf_opts))
log.error(message)
raise salt.exceptions.SaltConfigurationError(message)
# allow using numeric ids: convert int to string
if 'id' in conf_opts:
if not isinstance(conf_opts['id'], six.string_types):
conf_opts['id'] = six.text_type(conf_opts['id'])
else:
conf_opts['id'] = sdecode(conf_opts['id'])
return conf_opts
def _absolute_path(path, relative_to=None):
'''
Return an absolute path. In case ``relative_to`` is passed and ``path`` is
not an absolute path, we try to prepend ``relative_to`` to ``path``and if
that path exists, return that one
'''
if path and os.path.isabs(path):
return path
if path and relative_to is not None:
_abspath = os.path.join(relative_to, path)
if os.path.isfile(_abspath):
log.debug(
'Relative path \'%s\' converted to existing absolute path '
'\'%s\'', path, _abspath
)
return _abspath
return path
def load_config(path, env_var, default_path=None, exit_on_config_errors=True):
'''
Returns configuration dict from parsing either the file described by
``path`` or the environment variable described by ``env_var`` as YAML.
'''
if path is None:
# When the passed path is None, we just want the configuration
# defaults, not actually loading the whole configuration.
return {}
if default_path is None:
# This is most likely not being used from salt, i.e., could be salt-cloud
# or salt-api which have not yet migrated to the new default_path
# argument. Let's issue a warning message that the environ vars won't
# work.
import inspect
previous_frame = inspect.getframeinfo(inspect.currentframe().f_back)
log.warning(
"The function '%s()' defined in '%s' is not yet using the "
"new 'default_path' argument to `salt.config.load_config()`. "
"As such, the '%s' environment variable will be ignored",
previous_frame.function, previous_frame.filename, env_var
)
# In this case, maintain old behavior
default_path = DEFAULT_MASTER_OPTS['conf_file']
# Default to the environment variable path, if it exists
env_path = os.environ.get(env_var, path)
if not env_path or not os.path.isfile(env_path):
env_path = path
# If non-default path from `-c`, use that over the env variable
if path != default_path:
env_path = path
path = env_path
# If the configuration file is missing, attempt to copy the template,
# after removing the first header line.
if not os.path.isfile(path):
template = '{0}.template'.format(path)
if os.path.isfile(template):
log.debug('Writing %s based on %s', path, template)
with salt.utils.files.fopen(path, 'w') as out:
with salt.utils.files.fopen(template, 'r') as ifile:
ifile.readline() # skip first line
out.write(ifile.read())
opts = {}
if salt.utils.validate.path.is_readable(path):
try:
opts = _read_conf_file(path)
opts['conf_file'] = path
except salt.exceptions.SaltConfigurationError as error:
log.error(error)
if exit_on_config_errors:
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
else:
log.debug('Missing configuration file: %s', path)
return opts
def include_config(include, orig_path, verbose, exit_on_config_errors=False):
'''
Parses extra configuration file(s) specified in an include list in the
main config file.
'''
# Protect against empty option
if not include:
return {}
if orig_path is None:
# When the passed path is None, we just want the configuration
# defaults, not actually loading the whole configuration.
return {}
if isinstance(include, six.string_types):
include = [include]
configuration = {}
for path in include:
# Allow for includes like ~/foo
path = os.path.expanduser(path)
if not os.path.isabs(path):
path = os.path.join(os.path.dirname(orig_path), path)
# Catch situation where user typos path in configuration; also warns
# for empty include directory (which might be by design)
if len(glob.glob(path)) == 0:
if verbose:
log.warning(
'Warning parsing configuration file: "include" path/glob '
"'%s' matches no files", path
)
for fn_ in sorted(glob.glob(path)):
log.debug('Including configuration from \'%s\'', fn_)
try:
opts = _read_conf_file(fn_)
except salt.exceptions.SaltConfigurationError as error:
log.error(error)
if exit_on_config_errors:
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
else:
# Initialize default config if we wish to skip config errors
opts = {}
schedule = opts.get('schedule', {})
if schedule and 'schedule' in configuration:
configuration['schedule'].update(schedule)
include = opts.get('include', [])
if include:
opts.update(include_config(include, fn_, verbose))
salt.utils.dictupdate.update(configuration, opts, True, True)
return configuration
def prepend_root_dir(opts, path_options):
'''
Prepends the options that represent filesystem paths with value of the
'root_dir' option.
'''
root_dir = os.path.abspath(opts['root_dir'])
def_root_dir = salt.syspaths.ROOT_DIR.rstrip(os.sep)
for path_option in path_options:
if path_option in opts:
path = opts[path_option]
tmp_path_def_root_dir = None
tmp_path_root_dir = None
# When running testsuite, salt.syspaths.ROOT_DIR is often empty
if path == def_root_dir or path.startswith(def_root_dir + os.sep):
# Remove the default root dir prefix
tmp_path_def_root_dir = path[len(def_root_dir):]
if root_dir and (path == root_dir or
path.startswith(root_dir + os.sep)):
# Remove the root dir prefix
tmp_path_root_dir = path[len(root_dir):]
if tmp_path_def_root_dir and not tmp_path_root_dir:
# Just the default root dir matched
path = tmp_path_def_root_dir
elif tmp_path_root_dir and not tmp_path_def_root_dir:
# Just the root dir matched
path = tmp_path_root_dir
elif tmp_path_def_root_dir and tmp_path_root_dir:
# In this case both the default root dir and the override root
# dir matched; this means that either
# def_root_dir is a substring of root_dir or vice versa
# We must choose the most specific path
if def_root_dir in root_dir:
path = tmp_path_root_dir
else:
path = tmp_path_def_root_dir
elif salt.utils.platform.is_windows() and not os.path.splitdrive(path)[0]:
# In windows, os.path.isabs resolves '/' to 'C:\\' or whatever
# the root drive is. This elif prevents the next from being
# hit, so that the root_dir is prefixed in cases where the
# drive is not prefixed on a config option
pass
elif os.path.isabs(path):
# Absolute path (not default or overridden root_dir)
# No prepending required
continue
# Prepending the root dir
opts[path_option] = salt.utils.path.join(root_dir, path)
def insert_system_path(opts, paths):
'''
Inserts path into python path taking into consideration 'root_dir' option.
'''
if isinstance(paths, six.string_types):
paths = [paths]
for path in paths:
path_options = {'path': path, 'root_dir': opts['root_dir']}
prepend_root_dir(path_options, path_options)
if (os.path.isdir(path_options['path'])
and path_options['path'] not in sys.path):
sys.path.insert(0, path_options['path'])
def minion_config(path,
env_var='SALT_MINION_CONFIG',
defaults=None,
cache_minion_id=False,
ignore_config_errors=True,
minion_id=None,
role='minion'):
'''
Reads in the minion configuration file and sets up special options
This is useful for Minion-side operations, such as the
:py:class:`~salt.client.Caller` class, and manually running the loader
interface.
.. code-block:: python
import salt.config
minion_opts = salt.config.minion_config('/etc/salt/minion')
'''
if defaults is None:
defaults = DEFAULT_MINION_OPTS.copy()
if not os.environ.get(env_var, None):
# No valid setting was given using the configuration variable.
# Lets see is SALT_CONFIG_DIR is of any use
salt_config_dir = os.environ.get('SALT_CONFIG_DIR', None)
if salt_config_dir:
env_config_file_path = os.path.join(salt_config_dir, 'minion')
if salt_config_dir and os.path.isfile(env_config_file_path):
# We can get a configuration file using SALT_CONFIG_DIR, let's
# update the environment with this information
os.environ[env_var] = env_config_file_path
overrides = load_config(path, env_var, DEFAULT_MINION_OPTS['conf_file'])
default_include = overrides.get('default_include',
defaults['default_include'])
include = overrides.get('include', [])
overrides.update(include_config(default_include, path, verbose=False,
exit_on_config_errors=not ignore_config_errors))
overrides.update(include_config(include, path, verbose=True,
exit_on_config_errors=not ignore_config_errors))
opts = apply_minion_config(overrides, defaults,
cache_minion_id=cache_minion_id,
minion_id=minion_id)
opts['__role'] = role
apply_sdb(opts)
_validate_opts(opts)
return opts
def proxy_config(path,
env_var='SALT_PROXY_CONFIG',
defaults=None,
cache_minion_id=False,
ignore_config_errors=True,
minion_id=None):
'''
Reads in the proxy minion configuration file and sets up special options
This is useful for Minion-side operations, such as the
:py:class:`~salt.client.Caller` class, and manually running the loader
interface.
.. code-block:: python
import salt.config
proxy_opts = salt.config.proxy_config('/etc/salt/proxy')
'''
if defaults is None:
defaults = DEFAULT_MINION_OPTS.copy()
defaults.update(DEFAULT_PROXY_MINION_OPTS)
if not os.environ.get(env_var, None):
# No valid setting was given using the configuration variable.
# Lets see is SALT_CONFIG_DIR is of any use
salt_config_dir = os.environ.get('SALT_CONFIG_DIR', None)
if salt_config_dir:
env_config_file_path = os.path.join(salt_config_dir, 'proxy')
if salt_config_dir and os.path.isfile(env_config_file_path):
# We can get a configuration file using SALT_CONFIG_DIR, let's
# update the environment with this information
os.environ[env_var] = env_config_file_path
overrides = load_config(path, env_var, DEFAULT_PROXY_MINION_OPTS['conf_file'])
default_include = overrides.get('default_include',
defaults['default_include'])
include = overrides.get('include', [])
overrides.update(include_config(default_include, path, verbose=False,
exit_on_config_errors=not ignore_config_errors))
overrides.update(include_config(include, path, verbose=True,
exit_on_config_errors=not ignore_config_errors))
opts = apply_minion_config(overrides, defaults,
cache_minion_id=cache_minion_id,
minion_id=minion_id)
apply_sdb(opts)
_validate_opts(opts)
return opts
def syndic_config(master_config_path,
minion_config_path,
master_env_var='SALT_MASTER_CONFIG',
minion_env_var='SALT_MINION_CONFIG',
minion_defaults=None,
master_defaults=None):
if minion_defaults is None:
minion_defaults = DEFAULT_MINION_OPTS
if master_defaults is None:
master_defaults = DEFAULT_MASTER_OPTS
opts = {}
master_opts = master_config(
master_config_path, master_env_var, master_defaults
)
minion_opts = minion_config(
minion_config_path, minion_env_var, minion_defaults
)
opts['_minion_conf_file'] = master_opts['conf_file']
opts['_master_conf_file'] = minion_opts['conf_file']
opts.update(master_opts)
opts.update(minion_opts)
syndic_opts = {
'__role': 'syndic',
'root_dir': opts.get('root_dir', salt.syspaths.ROOT_DIR),
'pidfile': opts.get('syndic_pidfile', 'salt-syndic.pid'),
'log_file': opts.get('syndic_log_file', 'salt-syndic.log'),
'log_level': master_opts['log_level'],
'id': minion_opts['id'],
'pki_dir': minion_opts['pki_dir'],
'master': opts['syndic_master'],
'interface': master_opts['interface'],
'master_port': int(
opts.get(
# The user has explicitly defined the syndic master port
'syndic_master_port',
opts.get(
# No syndic_master_port, grab master_port from opts
'master_port',
# No master_opts, grab from the provided minion defaults
minion_defaults.get(
'master_port',
# Not on the provided minion defaults, load from the
# static minion defaults
DEFAULT_MINION_OPTS['master_port']
)
)
)
),
'user': opts.get('syndic_user', opts['user']),
'sock_dir': os.path.join(
opts['cachedir'], opts.get('syndic_sock_dir', opts['sock_dir'])
),
'sock_pool_size': master_opts['sock_pool_size'],
'cachedir': master_opts['cachedir'],
}
opts.update(syndic_opts)
# Prepend root_dir to other paths
prepend_root_dirs = [
'pki_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
'autosign_file', 'autoreject_file', 'token_dir', 'autosign_grains_dir'
]
for config_key in ('log_file', 'key_logfile', 'syndic_log_file'):
# If this is not a URI and instead a local path
if urlparse(opts.get(config_key, '')).scheme == '':
prepend_root_dirs.append(config_key)
prepend_root_dir(opts, prepend_root_dirs)
return opts
def apply_sdb(opts, sdb_opts=None):
'''
Recurse for sdb:// links for opts
'''
# Late load of SDB to keep CLI light
import salt.utils.sdb
if sdb_opts is None:
sdb_opts = opts
if isinstance(sdb_opts, six.string_types) and sdb_opts.startswith('sdb://'):
return salt.utils.sdb.sdb_get(sdb_opts, opts)
elif isinstance(sdb_opts, dict):
for key, value in six.iteritems(sdb_opts):
if value is None:
continue
sdb_opts[key] = apply_sdb(opts, value)
elif isinstance(sdb_opts, list):
for key, value in enumerate(sdb_opts):
if value is None:
continue
sdb_opts[key] = apply_sdb(opts, value)
return sdb_opts
# ----- Salt Cloud Configuration Functions ---------------------------------->
def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None,
master_config_path=None, master_config=None,
providers_config_path=None, providers_config=None,
profiles_config_path=None, profiles_config=None):
'''
Read in the Salt Cloud config and return the dict
'''
if path:
config_dir = os.path.dirname(path)
else:
config_dir = salt.syspaths.CONFIG_DIR
# Load the cloud configuration
overrides = load_config(
path,
env_var,
os.path.join(config_dir, 'cloud')
)
if defaults is None:
defaults = DEFAULT_CLOUD_OPTS.copy()
# Set defaults early to override Salt Master's default config values later
defaults.update(overrides)
overrides = defaults
# Load cloud configuration from any default or provided includes
overrides.update(
salt.config.include_config(overrides['default_include'], path, verbose=False)
)
include = overrides.get('include', [])
overrides.update(
salt.config.include_config(include, path, verbose=True)
)
# The includes have been evaluated, let's see if master, providers and
# profiles configuration settings have been included and if not, set the
# default value
if 'master_config' in overrides and master_config_path is None:
# The configuration setting is being specified in the main cloud
# configuration file
master_config_path = overrides['master_config']
elif 'master_config' not in overrides and not master_config \
and not master_config_path:
# The configuration setting is not being provided in the main cloud
# configuration file, and
master_config_path = os.path.join(config_dir, 'master')
# Convert relative to absolute paths if necessary
master_config_path = _absolute_path(master_config_path, config_dir)
if 'providers_config' in overrides and providers_config_path is None:
# The configuration setting is being specified in the main cloud
# configuration file
providers_config_path = overrides['providers_config']
elif 'providers_config' not in overrides and not providers_config \
and not providers_config_path:
providers_config_path = os.path.join(config_dir, 'cloud.providers')
# Convert relative to absolute paths if necessary
providers_config_path = _absolute_path(providers_config_path, config_dir)
if 'profiles_config' in overrides and profiles_config_path is None:
# The configuration setting is being specified in the main cloud
# configuration file
profiles_config_path = overrides['profiles_config']
elif 'profiles_config' not in overrides and not profiles_config \
and not profiles_config_path:
profiles_config_path = os.path.join(config_dir, 'cloud.profiles')
# Convert relative to absolute paths if necessary
profiles_config_path = _absolute_path(profiles_config_path, config_dir)
# Prepare the deploy scripts search path
deploy_scripts_search_path = overrides.get(
'deploy_scripts_search_path',
defaults.get('deploy_scripts_search_path', 'cloud.deploy.d')
)
if isinstance(deploy_scripts_search_path, six.string_types):
deploy_scripts_search_path = [deploy_scripts_search_path]
# Check the provided deploy scripts search path removing any non existing
# entries.
for idx, entry in enumerate(deploy_scripts_search_path[:]):
if not os.path.isabs(entry):
# Let's try adding the provided path's directory name turns the
# entry into a proper directory
entry = os.path.join(os.path.dirname(path), entry)
if os.path.isdir(entry):
# Path exists, let's update the entry (its path might have been
# made absolute)
deploy_scripts_search_path[idx] = entry
continue
# It's not a directory? Remove it from the search path
deploy_scripts_search_path.pop(idx)
# Add the built-in scripts directory to the search path (last resort)
deploy_scripts_search_path.append(
os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
'cloud',
'deploy'
)
)
)
# Let's make the search path a tuple and add it to the overrides.
overrides.update(
deploy_scripts_search_path=tuple(deploy_scripts_search_path)
)
# Grab data from the 4 sources
# 1st - Master config
if master_config_path is not None and master_config is not None:
raise salt.exceptions.SaltCloudConfigError(
'Only pass `master_config` or `master_config_path`, not both.'
)
elif master_config_path is None and master_config is None:
master_config = salt.config.master_config(
overrides.get(
# use the value from the cloud config file
'master_config',
# if not found, use the default path
os.path.join(salt.syspaths.CONFIG_DIR, 'master')
)
)
elif master_config_path is not None and master_config is None:
master_config = salt.config.master_config(master_config_path)
# cloud config has a separate cachedir
del master_config['cachedir']
# 2nd - salt-cloud configuration which was loaded before so we could
# extract the master configuration file if needed.
# Override master configuration with the salt cloud(current overrides)
master_config.update(overrides)
# We now set the overridden master_config as the overrides
overrides = master_config
if providers_config_path is not None and providers_config is not None:
raise salt.exceptions.SaltCloudConfigError(
'Only pass `providers_config` or `providers_config_path`, '
'not both.'
)
elif providers_config_path is None and providers_config is None:
providers_config_path = overrides.get(
# use the value from the cloud config file
'providers_config',
# if not found, use the default path
os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.providers')
)
if profiles_config_path is not None and profiles_config is not None:
raise salt.exceptions.SaltCloudConfigError(
'Only pass `profiles_config` or `profiles_config_path`, not both.'
)
elif profiles_config_path is None and profiles_config is None:
profiles_config_path = overrides.get(
# use the value from the cloud config file
'profiles_config',
# if not found, use the default path
os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.profiles')
)
# Apply the salt-cloud configuration
opts = apply_cloud_config(overrides, defaults)
# 3rd - Include Cloud Providers
if 'providers' in opts:
if providers_config is not None:
raise salt.exceptions.SaltCloudConfigError(
'Do not mix the old cloud providers configuration with '
'the passing a pre-configured providers configuration '
'dictionary.'
)
if providers_config_path is not None:
providers_confd = os.path.join(
os.path.dirname(providers_config_path),
'cloud.providers.d', '*'
)
if (os.path.isfile(providers_config_path) or
glob.glob(providers_confd)):
raise salt.exceptions.SaltCloudConfigError(
'Do not mix the old cloud providers configuration with '
'the new one. The providers configuration should now go '
'in the file `{0}` or a separate `*.conf` file within '
'`cloud.providers.d/` which is relative to `{0}`.'.format(
os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.providers')
)
)
# No exception was raised? It's the old configuration alone
providers_config = opts['providers']
elif providers_config_path is not None:
# Load from configuration file, even if that files does not exist since
# it will be populated with defaults.
providers_config = cloud_providers_config(providers_config_path)
# Let's assign back the computed providers configuration
opts['providers'] = providers_config
# 4th - Include VM profiles config
if profiles_config is None:
# Load profiles configuration from the provided file
profiles_config = vm_profiles_config(profiles_config_path,
providers_config)
opts['profiles'] = profiles_config
# recurse opts for sdb configs
apply_sdb(opts)
# prepend root_dir
prepend_root_dirs = ['cachedir']
prepend_root_dir(opts, prepend_root_dirs)
# Return the final options
return opts
def apply_cloud_config(overrides, defaults=None):
'''
Return a cloud config
'''
if defaults is None:
defaults = DEFAULT_CLOUD_OPTS
config = defaults.copy()
if overrides:
config.update(overrides)
# If the user defined providers in salt cloud's main configuration file, we
# need to take care for proper and expected format.
if 'providers' in config:
# Keep a copy of the defined providers
providers = config['providers'].copy()
# Reset the providers dictionary
config['providers'] = {}
# Populate the providers dictionary
for alias, details in six.iteritems(providers):
if isinstance(details, list):
for detail in details:
if 'driver' not in detail:
raise salt.exceptions.SaltCloudConfigError(
'The cloud provider alias \'{0}\' has an entry '
'missing the required setting of \'driver\'.'.format(
alias
)
)
driver = detail['driver']
if ':' in driver:
# Weird, but...
alias, driver = driver.split(':')
if alias not in config['providers']:
config['providers'][alias] = {}
detail['provider'] = '{0}:{1}'.format(alias, driver)
config['providers'][alias][driver] = detail
elif isinstance(details, dict):
if 'driver' not in details:
raise salt.exceptions.SaltCloudConfigError(
'The cloud provider alias \'{0}\' has an entry '
'missing the required setting of \'driver\''.format(
alias
)
)
driver = details['driver']
if ':' in driver:
# Weird, but...
alias, driver = driver.split(':')
if alias not in config['providers']:
config['providers'][alias] = {}
details['provider'] = '{0}:{1}'.format(alias, driver)
config['providers'][alias][driver] = details
# Migrate old configuration
config = old_to_new(config)
return config
def old_to_new(opts):
providers = (
'AWS',
'CLOUDSTACK',
'DIGITALOCEAN',
'EC2',
'GOGRID',
'IBMSCE',
'JOYENT',
'LINODE',
'OPENSTACK',
'PARALLELS'
'RACKSPACE',
'SALTIFY'
)
for provider in providers:
provider_config = {}
for opt, val in opts.items():
if provider in opt:
value = val
name = opt.split('.', 1)[1]
provider_config[name] = value
lprovider = provider.lower()
if provider_config:
provider_config['provider'] = lprovider
opts.setdefault('providers', {})
# provider alias
opts['providers'][lprovider] = {}
# provider alias, provider driver
opts['providers'][lprovider][lprovider] = provider_config
return opts
def vm_profiles_config(path,
providers,
env_var='SALT_CLOUDVM_CONFIG',
defaults=None):
'''
Read in the salt cloud VM config file
'''
if defaults is None:
defaults = VM_CONFIG_DEFAULTS
overrides = salt.config.load_config(
path, env_var, os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.profiles')
)
default_include = overrides.get(
'default_include', defaults['default_include']
)
include = overrides.get('include', [])
overrides.update(
salt.config.include_config(default_include, path, verbose=False)
)
overrides.update(
salt.config.include_config(include, path, verbose=True)
)
return apply_vm_profiles_config(providers, overrides, defaults)
def apply_vm_profiles_config(providers, overrides, defaults=None):
if defaults is None:
defaults = VM_CONFIG_DEFAULTS
config = defaults.copy()
if overrides:
config.update(overrides)
vms = {}
for key, val in six.iteritems(config):
if key in ('conf_file', 'include', 'default_include', 'user'):
continue
if not isinstance(val, dict):
raise salt.exceptions.SaltCloudConfigError(
'The VM profiles configuration found in \'{0[conf_file]}\' is '
'not in the proper format'.format(config)
)
val['profile'] = key
vms[key] = val
# Is any VM profile extending data!?
for profile, details in six.iteritems(vms.copy()):
if 'extends' not in details:
if ':' in details['provider']:
alias, driver = details['provider'].split(':')
if alias not in providers or driver not in providers[alias]:
log.trace(
'The profile \'%s\' is defining \'%s\' '
'as the provider. Since there is no valid '
'configuration for that provider, the profile will be '
'removed from the available listing',
profile, details['provider']
)
vms.pop(profile)
continue
if 'profiles' not in providers[alias][driver]:
providers[alias][driver]['profiles'] = {}
providers[alias][driver]['profiles'][profile] = details
if details['provider'] not in providers:
log.trace(
'The profile \'%s\' is defining \'%s\' as the '
'provider. Since there is no valid configuration for '
'that provider, the profile will be removed from the '
'available listing', profile, details['provider']
)
vms.pop(profile)
continue
driver = next(iter(list(providers[details['provider']].keys())))
providers[details['provider']][driver].setdefault(
'profiles', {}).update({profile: details})
details['provider'] = '{0[provider]}:{1}'.format(details, driver)
vms[profile] = details
continue
extends = details.pop('extends')
if extends not in vms:
log.error(
'The \'%s\' profile is trying to extend data from \'%s\' '
'though \'%s\' is not defined in the salt profiles loaded '
'data. Not extending and removing from listing!',
profile, extends, extends
)
vms.pop(profile)
continue
extended = deepcopy(vms.get(extends))
extended.pop('profile')
# Merge extended configuration with base profile
extended = salt.utils.dictupdate.update(extended, details)
if ':' not in extended['provider']:
if extended['provider'] not in providers:
log.trace(
'The profile \'%s\' is defining \'%s\' as the '
'provider. Since there is no valid configuration for '
'that provider, the profile will be removed from the '
'available listing', profile, extended['provider']
)
vms.pop(profile)
continue
driver = next(iter(list(providers[extended['provider']].keys())))
providers[extended['provider']][driver].setdefault(
'profiles', {}).update({profile: extended})
extended['provider'] = '{0[provider]}:{1}'.format(extended, driver)
else:
alias, driver = extended['provider'].split(':')
if alias not in providers or driver not in providers[alias]:
log.trace(
'The profile \'%s\' is defining \'%s\' as '
'the provider. Since there is no valid configuration '
'for that provider, the profile will be removed from '
'the available listing', profile, extended['provider']
)
vms.pop(profile)
continue
providers[alias][driver].setdefault('profiles', {}).update(
{profile: extended}
)
# Update the profile's entry with the extended data
vms[profile] = extended
return vms
def cloud_providers_config(path,
env_var='SALT_CLOUD_PROVIDERS_CONFIG',
defaults=None):
'''
Read in the salt cloud providers configuration file
'''
if defaults is None:
defaults = PROVIDER_CONFIG_DEFAULTS
overrides = salt.config.load_config(
path, env_var, os.path.join(salt.syspaths.CONFIG_DIR, 'cloud.providers')
)
default_include = overrides.get(
'default_include', defaults['default_include']
)
include = overrides.get('include', [])
overrides.update(
salt.config.include_config(default_include, path, verbose=False)
)
overrides.update(
salt.config.include_config(include, path, verbose=True)
)
return apply_cloud_providers_config(overrides, defaults)
def apply_cloud_providers_config(overrides, defaults=None):
'''
Apply the loaded cloud providers configuration.
'''
if defaults is None:
defaults = PROVIDER_CONFIG_DEFAULTS
config = defaults.copy()
if overrides:
config.update(overrides)
# Is the user still using the old format in the new configuration file?!
for name, settings in six.iteritems(config.copy()):
if '.' in name:
log.warning(
'Please switch to the new providers configuration syntax'
)
# Let's help out and migrate the data
config = old_to_new(config)
# old_to_new will migrate the old data into the 'providers' key of
# the config dictionary. Let's map it correctly
for prov_name, prov_settings in six.iteritems(config.pop('providers')):
config[prov_name] = prov_settings
break
providers = {}
ext_count = 0
for key, val in six.iteritems(config):
if key in ('conf_file', 'include', 'default_include', 'user'):
continue
if not isinstance(val, (list, tuple)):
val = [val]
else:
# Need to check for duplicate cloud provider entries per "alias" or
# we won't be able to properly reference it.
handled_providers = set()
for details in val:
if 'driver' not in details:
if 'extends' not in details:
log.error(
'Please check your cloud providers configuration. '
'There\'s no \'driver\' nor \'extends\' definition '
'referenced.'
)
continue
if details['driver'] in handled_providers:
log.error(
'You can only have one entry per cloud provider. For '
'example, if you have a cloud provider configuration '
'section named, \'production\', you can only have a '
'single entry for EC2, Joyent, Openstack, and so '
'forth.'
)
raise salt.exceptions.SaltCloudConfigError(
'The cloud provider alias \'{0}\' has multiple entries '
'for the \'{1[driver]}\' driver.'.format(key, details)
)
handled_providers.add(details['driver'])
for entry in val:
if 'driver' not in entry:
entry['driver'] = '-only-extendable-{0}'.format(ext_count)
ext_count += 1
if key not in providers:
providers[key] = {}
provider = entry['driver']
if provider not in providers[key]:
providers[key][provider] = entry
# Is any provider extending data!?
while True:
keep_looping = False
for provider_alias, entries in six.iteritems(providers.copy()):
for driver, details in six.iteritems(entries):
# Set a holder for the defined profiles
providers[provider_alias][driver]['profiles'] = {}
if 'extends' not in details:
continue
extends = details.pop('extends')
if ':' in extends:
alias, provider = extends.split(':')
if alias not in providers:
raise salt.exceptions.SaltCloudConfigError(
'The \'{0}\' cloud provider entry in \'{1}\' is '
'trying to extend data from \'{2}\' though '
'\'{2}\' is not defined in the salt cloud '
'providers loaded data.'.format(
details['driver'],
provider_alias,
alias
)
)
if provider not in providers.get(alias):
raise salt.exceptions.SaltCloudConfigError(
'The \'{0}\' cloud provider entry in \'{1}\' is '
'trying to extend data from \'{2}:{3}\' though '
'\'{3}\' is not defined in \'{1}\''.format(
details['driver'],
provider_alias,
alias,
provider
)
)
details['extends'] = '{0}:{1}'.format(alias, provider)
# change provider details '-only-extendable-' to extended
# provider name
details['driver'] = provider
elif providers.get(extends):
raise salt.exceptions.SaltCloudConfigError(
'The \'{0}\' cloud provider entry in \'{1}\' is '
'trying to extend from \'{2}\' and no provider was '
'specified. Not extending!'.format(
details['driver'], provider_alias, extends
)
)
elif extends not in providers:
raise salt.exceptions.SaltCloudConfigError(
'The \'{0}\' cloud provider entry in \'{1}\' is '
'trying to extend data from \'{2}\' though \'{2}\' '
'is not defined in the salt cloud providers loaded '
'data.'.format(
details['driver'], provider_alias, extends
)
)
else:
if driver in providers.get(extends):
details['extends'] = '{0}:{1}'.format(extends, driver)
elif '-only-extendable-' in providers.get(extends):
details['extends'] = '{0}:{1}'.format(
extends, '-only-extendable-{0}'.format(ext_count)
)
else:
# We're still not aware of what we're trying to extend
# from. Let's try on next iteration
details['extends'] = extends
keep_looping = True
if not keep_looping:
break
while True:
# Merge provided extends
keep_looping = False
for alias, entries in six.iteritems(providers.copy()):
for driver, details in six.iteritems(entries):
if 'extends' not in details:
# Extends resolved or non existing, continue!
continue
if 'extends' in details['extends']:
# Since there's a nested extends, resolve this one in the
# next iteration
keep_looping = True
continue
# Let's get a reference to what we're supposed to extend
extends = details.pop('extends')
# Split the setting in (alias, driver)
ext_alias, ext_driver = extends.split(':')
# Grab a copy of what should be extended
extended = providers.get(ext_alias).get(ext_driver).copy()
# Merge the data to extend with the details
extended = salt.utils.dictupdate.update(extended, details)
# Update the providers dictionary with the merged data
providers[alias][driver] = extended
# Update name of the driver, now that it's populated with extended information
if driver.startswith('-only-extendable-'):
providers[alias][ext_driver] = providers[alias][driver]
# Delete driver with old name to maintain dictionary size
del providers[alias][driver]
if not keep_looping:
break
# Now clean up any providers entry that was just used to be a data tree to
# extend from
for provider_alias, entries in six.iteritems(providers.copy()):
for driver, details in six.iteritems(entries.copy()):
if not driver.startswith('-only-extendable-'):
continue
log.info(
"There's at least one cloud driver under the '%s' "
'cloud provider alias which does not have the required '
"'driver' setting. Removing it from the available "
'providers listing.', provider_alias
)
providers[provider_alias].pop(driver)
if not providers[provider_alias]:
providers.pop(provider_alias)
return providers
def get_cloud_config_value(name, vm_, opts, default=None, search_global=True):
'''
Search and return a setting in a known order:
1. In the virtual machine's configuration
2. In the virtual machine's profile configuration
3. In the virtual machine's provider configuration
4. In the salt cloud configuration if global searching is enabled
5. Return the provided default
'''
# As a last resort, return the default
value = default
if search_global is True and opts.get(name, None) is not None:
# The setting name exists in the cloud(global) configuration
value = deepcopy(opts[name])
if vm_ and name:
# Let's get the value from the profile, if present
if 'profile' in vm_ and vm_['profile'] is not None:
if name in opts['profiles'][vm_['profile']]:
if isinstance(value, dict):
value.update(opts['profiles'][vm_['profile']][name].copy())
else:
value = deepcopy(opts['profiles'][vm_['profile']][name])
# Let's get the value from the provider, if present.
if ':' in vm_['driver']:
# The provider is defined as <provider-alias>:<driver-name>
alias, driver = vm_['driver'].split(':')
if alias in opts['providers'] and \
driver in opts['providers'][alias]:
details = opts['providers'][alias][driver]
if name in details:
if isinstance(value, dict):
value.update(details[name].copy())
else:
value = deepcopy(details[name])
elif len(opts['providers'].get(vm_['driver'], ())) > 1:
# The provider is NOT defined as <provider-alias>:<driver-name>
# and there's more than one entry under the alias.
# WARN the user!!!!
log.error(
"The '%s' cloud provider definition has more than one "
'entry. Your VM configuration should be specifying the '
"provider as 'driver: %s:<driver-engine>'. Since "
"it's not, we're returning the first definition which "
'might not be what you intended.',
vm_['driver'], vm_['driver']
)
if vm_['driver'] in opts['providers']:
# There's only one driver defined for this provider. This is safe.
alias_defs = opts['providers'].get(vm_['driver'])
provider_driver_defs = alias_defs[next(iter(list(alias_defs.keys())))]
if name in provider_driver_defs:
# The setting name exists in the VM's provider configuration.
# Return it!
if isinstance(value, dict):
value.update(provider_driver_defs[name].copy())
else:
value = deepcopy(provider_driver_defs[name])
if name and vm_ and name in vm_:
# The setting name exists in VM configuration.
if isinstance(vm_[name], types.GeneratorType):
value = next(vm_[name], '')
else:
if isinstance(value, dict):
value.update(vm_[name].copy())
else:
value = deepcopy(vm_[name])
return value
def is_provider_configured(opts, provider, required_keys=(), log_message=True):
'''
Check and return the first matching and fully configured cloud provider
configuration.
'''
if ':' in provider:
alias, driver = provider.split(':')
if alias not in opts['providers']:
return False
if driver not in opts['providers'][alias]:
return False
for key in required_keys:
if opts['providers'][alias][driver].get(key, None) is None:
if log_message is True:
# There's at least one require configuration key which is not
# set.
log.warning(
"The required '%s' configuration setting is missing "
"from the '%s' driver, which is configured under the "
"'%s' alias.", key, provider, alias
)
return False
# If we reached this far, there's a properly configured provider.
# Return it!
return opts['providers'][alias][driver]
for alias, drivers in six.iteritems(opts['providers']):
for driver, provider_details in six.iteritems(drivers):
if driver != provider:
continue
# If we reached this far, we have a matching provider, let's see if
# all required configuration keys are present and not None.
skip_provider = False
for key in required_keys:
if provider_details.get(key, None) is None:
if log_message is True:
# This provider does not include all necessary keys,
# continue to next one.
log.warning(
"The required '%s' configuration setting is "
"missing from the '%s' driver, which is configured "
"under the '%s' alias.", key, provider, alias
)
skip_provider = True
break
if skip_provider:
continue
# If we reached this far, the provider included all required keys
return provider_details
# If we reached this point, the provider is not configured.
return False
def is_profile_configured(opts, provider, profile_name, vm_=None):
'''
Check if the requested profile contains the minimum required parameters for
a profile.
Required parameters include image and provider for all drivers, while some
drivers also require size keys.
.. versionadded:: 2015.8.0
'''
# Standard dict keys required by all drivers.
required_keys = ['provider']
alias, driver = provider.split(':')
# Most drivers need an image to be specified, but some do not.
non_image_drivers = ['nova', 'virtualbox', 'libvirt', 'softlayer', 'oneandone', 'profitbricks']
# Most drivers need a size, but some do not.
non_size_drivers = ['opennebula', 'parallels', 'proxmox', 'scaleway',
'softlayer', 'softlayer_hw', 'vmware', 'vsphere',
'virtualbox', 'libvirt', 'oneandone']
provider_key = opts['providers'][alias][driver]
profile_key = opts['providers'][alias][driver]['profiles'][profile_name]
# If cloning on Linode, size and image are not necessary.
# They are obtained from the to-be-cloned VM.
if driver == 'linode' and profile_key.get('clonefrom', False):
non_image_drivers.append('linode')
non_size_drivers.append('linode')
elif driver == 'gce' and 'sourceImage' in six.text_type(vm_.get('ex_disks_gce_struct')):
non_image_drivers.append('gce')
# If cloning on VMware, specifying image is not necessary.
if driver == 'vmware' and 'image' not in list(profile_key.keys()):
non_image_drivers.append('vmware')
if driver not in non_image_drivers:
required_keys.append('image')
if driver == 'vmware':
required_keys.append('datastore')
elif driver in ['linode', 'virtualbox']:
required_keys.append('clonefrom')
elif driver == 'nova':
nova_image_keys = ['image', 'block_device_mapping', 'block_device', 'boot_volume']
if not any([key in provider_key for key in nova_image_keys]) and not any([key in profile_key for key in nova_image_keys]):
required_keys.extend(nova_image_keys)
if driver not in non_size_drivers:
required_keys.append('size')
# Check if required fields are supplied in the provider config. If they
# are present, remove it from the required_keys list.
for item in list(required_keys):
if item in provider_key:
required_keys.remove(item)
# If a vm_ dict was passed in, use that information to get any other configs
# that we might have missed thus far, such as a option provided in a map file.
if vm_:
for item in list(required_keys):
if item in vm_:
required_keys.remove(item)
# Check for remaining required parameters in the profile config.
for item in required_keys:
if profile_key.get(item, None) is None:
# There's at least one required configuration item which is not set.
log.error(
"The required '%s' configuration setting is missing from "
"the '%s' profile, which is configured under the '%s' alias.",
item, profile_name, alias
)
return False
return True
def check_driver_dependencies(driver, dependencies):
'''
Check if the driver's dependencies are available.
.. versionadded:: 2015.8.0
driver
The name of the driver.
dependencies
The dictionary of dependencies to check.
'''
ret = True
for key, value in six.iteritems(dependencies):
if value is False:
log.warning(
"Missing dependency: '%s'. The %s driver requires "
"'%s' to be installed.", key, key, driver
)
ret = False
return ret
# <---- Salt Cloud Configuration Functions -----------------------------------
def _cache_id(minion_id, cache_file):
'''
Helper function, writes minion id to a cache file.
'''
path = os.path.dirname(cache_file)
try:
if not os.path.isdir(path):
os.makedirs(path)
except OSError as exc:
# Handle race condition where dir is created after os.path.isdir check
if os.path.isdir(path):
pass
else:
log.error('Failed to create dirs to minion_id file: %s', exc)
try:
with salt.utils.files.fopen(cache_file, 'w') as idf:
idf.write(minion_id)
except (IOError, OSError) as exc:
log.error('Could not cache minion ID: %s', exc)
def call_id_function(opts):
'''
Evaluate the function that determines the ID if the 'id_function'
option is set and return the result
'''
if opts.get('id'):
return opts['id']
# Import 'salt.loader' here to avoid a circular dependency
import salt.loader as loader
if isinstance(opts['id_function'], six.string_types):
mod_fun = opts['id_function']
fun_kwargs = {}
elif isinstance(opts['id_function'], dict):
mod_fun, fun_kwargs = six.next(six.iteritems(opts['id_function']))
if fun_kwargs is None:
fun_kwargs = {}
else:
log.error('\'id_function\' option is neither a string nor a dictionary')
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# split module and function and try loading the module
mod, fun = mod_fun.split('.')
if not opts.get('grains'):
# Get grains for use by the module
opts['grains'] = loader.grains(opts)
try:
id_mod = loader.raw_mod(opts, mod, fun)
if not id_mod:
raise KeyError
# we take whatever the module returns as the minion ID
newid = id_mod[mod_fun](**fun_kwargs)
if not isinstance(newid, six.string_types) or not newid:
log.error(
'Function %s returned value "%s" of type %s instead of string',
mod_fun, newid, type(newid)
)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
log.info('Evaluated minion ID from module: %s', mod_fun)
return newid
except TypeError:
log.error(
'Function arguments %s are incorrect for function %s',
fun_kwargs, mod_fun
)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
except KeyError:
log.error('Failed to load module %s', mod_fun)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
def get_id(opts, cache_minion_id=False):
'''
Guess the id of the minion.
If CONFIG_DIR/minion_id exists, use the cached minion ID from that file.
If no minion id is configured, use multiple sources to find a FQDN.
If no FQDN is found you may get an ip address.
Returns two values: the detected ID, and a boolean value noting whether or
not an IP address is being used for the ID.
'''
if opts['root_dir'] is None:
root_dir = salt.syspaths.ROOT_DIR
else:
root_dir = opts['root_dir']
config_dir = salt.syspaths.CONFIG_DIR
if config_dir.startswith(salt.syspaths.ROOT_DIR):
config_dir = config_dir.split(salt.syspaths.ROOT_DIR, 1)[-1]
# Check for cached minion ID
id_cache = os.path.join(root_dir,
config_dir.lstrip(os.path.sep),
'minion_id')
if opts.get('minion_id_caching', True):
try:
with salt.utils.files.fopen(id_cache) as idf:
name = salt.utils.stringutils.to_unicode(idf.readline().strip())
bname = salt.utils.stringutils.to_bytes(name)
if bname.startswith(codecs.BOM): # Remove BOM if exists
name = salt.utils.stringutils.to_str(bname.replace(codecs.BOM, '', 1))
if name and name != 'localhost':
log.debug('Using cached minion ID from %s: %s', id_cache, name)
return name, False
except (IOError, OSError):
pass
if '__role' in opts and opts.get('__role') == 'minion':
log.debug(
'Guessing ID. The id can be explicitly set in %s',
os.path.join(salt.syspaths.CONFIG_DIR, 'minion')
)
if opts.get('id_function'):
newid = call_id_function(opts)
else:
newid = salt.utils.network.generate_minion_id()
if opts.get('minion_id_lowercase'):
newid = newid.lower()
log.debug('Changed minion id %s to lowercase.', newid)
if '__role' in opts and opts.get('__role') == 'minion':
if opts.get('id_function'):
log.debug(
'Found minion id from external function %s: %s',
opts['id_function'], newid
)
else:
log.debug('Found minion id from generate_minion_id(): %s', newid)
if cache_minion_id and opts.get('minion_id_caching', True):
_cache_id(newid, id_cache)
is_ipv4 = salt.utils.network.is_ipv4(newid)
return newid, is_ipv4
def _update_ssl_config(opts):
'''
Resolves string names to integer constant in ssl configuration.
'''
if opts['ssl'] in (None, False):
opts['ssl'] = None
return
if opts['ssl'] is True:
opts['ssl'] = {}
return
import ssl
for key, prefix in (('cert_reqs', 'CERT_'),
('ssl_version', 'PROTOCOL_')):
val = opts['ssl'].get(key)
if val is None:
continue
if not isinstance(val, six.string_types) or not val.startswith(prefix) or not hasattr(ssl, val):
message = 'SSL option \'{0}\' must be set to one of the following values: \'{1}\'.' \
.format(key, '\', \''.join([val for val in dir(ssl) if val.startswith(prefix)]))
log.error(message)
raise salt.exceptions.SaltConfigurationError(message)
opts['ssl'][key] = getattr(ssl, val)
def _adjust_log_file_override(overrides, default_log_file):
'''
Adjusts the log_file based on the log_dir override
'''
if overrides.get('log_dir'):
# Adjust log_file if a log_dir override is introduced
if overrides.get('log_file'):
if not os.path.isabs(overrides['log_file']):
# Prepend log_dir if log_file is relative
overrides['log_file'] = os.path.join(overrides['log_dir'],
overrides['log_file'])
else:
# Create the log_file override
overrides['log_file'] = \
os.path.join(overrides['log_dir'],
os.path.basename(default_log_file))
def apply_minion_config(overrides=None,
defaults=None,
cache_minion_id=False,
minion_id=None):
'''
Returns minion configurations dict.
'''
if defaults is None:
defaults = DEFAULT_MINION_OPTS
opts = defaults.copy()
opts['__role'] = 'minion'
_adjust_log_file_override(overrides, defaults['log_file'])
if overrides:
opts.update(overrides)
if 'environment' in opts:
if 'saltenv' in opts:
log.warning(
'The \'saltenv\' and \'environment\' minion config options '
'cannot both be used. Ignoring \'environment\' in favor of '
'\'saltenv\'.',
)
# Set environment to saltenv in case someone's custom module is
# refrencing __opts__['environment']
opts['environment'] = opts['saltenv']
else:
log.warning(
'The \'environment\' minion config option has been renamed '
'to \'saltenv\'. Using %s as the \'saltenv\' config value.',
opts['environment']
)
opts['saltenv'] = opts['environment']
for idx, val in enumerate(opts['fileserver_backend']):
if val in ('git', 'hg', 'svn', 'minion'):
new_val = val + 'fs'
log.debug(
'Changed %s to %s in minion opts\' fileserver_backend list',
val, new_val
)
opts['fileserver_backend'][idx] = new_val
opts['__cli'] = salt.utils.stringutils.to_unicode(
os.path.basename(sys.argv[0])
)
# No ID provided. Will getfqdn save us?
using_ip_for_id = False
if not opts.get('id'):
if minion_id:
opts['id'] = minion_id
else:
opts['id'], using_ip_for_id = get_id(
opts,
cache_minion_id=cache_minion_id)
# it does not make sense to append a domain to an IP based id
if not using_ip_for_id and 'append_domain' in opts:
opts['id'] = _append_domain(opts)
for directory in opts.get('append_minionid_config_dirs', []):
if directory in ('pki_dir', 'cachedir', 'extension_modules'):
newdirectory = os.path.join(opts[directory], opts['id'])
opts[directory] = newdirectory
elif directory == 'default_include' and directory in opts:
include_dir = os.path.dirname(opts[directory])
new_include_dir = os.path.join(include_dir,
opts['id'],
os.path.basename(opts[directory]))
opts[directory] = new_include_dir
# pidfile can be in the list of append_minionid_config_dirs, but pidfile
# is the actual path with the filename, not a directory.
if 'pidfile' in opts.get('append_minionid_config_dirs', []):
newpath_list = os.path.split(opts['pidfile'])
opts['pidfile'] = os.path.join(newpath_list[0], 'salt', opts['id'], newpath_list[1])
if len(opts['sock_dir']) > len(opts['cachedir']) + 10:
opts['sock_dir'] = os.path.join(opts['cachedir'], '.salt-unix')
# Enabling open mode requires that the value be set to True, and
# nothing else!
opts['open_mode'] = opts['open_mode'] is True
opts['file_roots'] = _validate_file_roots(opts['file_roots'])
opts['pillar_roots'] = _validate_file_roots(opts['pillar_roots'])
# Make sure ext_mods gets set if it is an untrue value
# (here to catch older bad configs)
opts['extension_modules'] = (
opts.get('extension_modules') or
os.path.join(opts['cachedir'], 'extmods')
)
# Set up the utils_dirs location from the extension_modules location
opts['utils_dirs'] = (
opts.get('utils_dirs') or
[os.path.join(opts['extension_modules'], 'utils')]
)
# Insert all 'utils_dirs' directories to the system path
insert_system_path(opts, opts['utils_dirs'])
# Prepend root_dir to other paths
prepend_root_dirs = [
'pki_dir', 'cachedir', 'sock_dir', 'extension_modules', 'pidfile',
]
# These can be set to syslog, so, not actual paths on the system
for config_key in ('log_file', 'key_logfile'):
if urlparse(opts.get(config_key, '')).scheme == '':
prepend_root_dirs.append(config_key)
prepend_root_dir(opts, prepend_root_dirs)
# if there is no beacons option yet, add an empty beacons dict
if 'beacons' not in opts:
opts['beacons'] = {}
if (overrides or {}).get('ipc_write_buffer', '') == 'dynamic':
opts['ipc_write_buffer'] = _DFLT_IPC_WBUFFER
if 'ipc_write_buffer' not in overrides:
opts['ipc_write_buffer'] = 0
# Make sure hash_type is lowercase
opts['hash_type'] = opts['hash_type'].lower()
# Check and update TLS/SSL configuration
_update_ssl_config(opts)
_update_discovery_config(opts)
return opts
def _update_discovery_config(opts):
'''
Update discovery config for all instances.
:param opts:
:return:
'''
if opts.get('discovery') not in (None, False):
if opts['discovery'] is True:
opts['discovery'] = {}
discovery_config = {'attempts': 3, 'pause': 5, 'port': 4520, 'match': 'any', 'mapping': {}}
for key in opts['discovery']:
if key not in discovery_config:
raise salt.exceptions.SaltConfigurationError('Unknown discovery option: {0}'.format(key))
if opts.get('__role') != 'minion':
for key in ['attempts', 'pause', 'match']:
del discovery_config[key]
opts['discovery'] = salt.utils.dictupdate.update(discovery_config, opts['discovery'], True, True)
def master_config(path, env_var='SALT_MASTER_CONFIG', defaults=None, exit_on_config_errors=False):
'''
Reads in the master configuration file and sets up default options
This is useful for running the actual master daemon. For running
Master-side client interfaces that need the master opts see
:py:func:`salt.client.client_config`.
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
if not os.environ.get(env_var, None):
# No valid setting was given using the configuration variable.
# Lets see is SALT_CONFIG_DIR is of any use
salt_config_dir = os.environ.get('SALT_CONFIG_DIR', None)
if salt_config_dir:
env_config_file_path = os.path.join(salt_config_dir, 'master')
if salt_config_dir and os.path.isfile(env_config_file_path):
# We can get a configuration file using SALT_CONFIG_DIR, let's
# update the environment with this information
os.environ[env_var] = env_config_file_path
overrides = load_config(path, env_var, DEFAULT_MASTER_OPTS['conf_file'])
default_include = overrides.get('default_include',
defaults['default_include'])
include = overrides.get('include', [])
overrides.update(include_config(default_include, path, verbose=False,
exit_on_config_errors=exit_on_config_errors))
overrides.update(include_config(include, path, verbose=True,
exit_on_config_errors=exit_on_config_errors))
opts = apply_master_config(overrides, defaults)
_validate_ssh_minion_opts(opts)
_validate_opts(opts)
# If 'nodegroups:' is uncommented in the master config file, and there are
# no nodegroups defined, opts['nodegroups'] will be None. Fix this by
# reverting this value to the default, as if 'nodegroups:' was commented
# out or not present.
if opts.get('nodegroups') is None:
opts['nodegroups'] = DEFAULT_MASTER_OPTS.get('nodegroups', {})
if salt.utils.data.is_dictlist(opts['nodegroups']):
opts['nodegroups'] = salt.utils.data.repack_dictlist(opts['nodegroups'])
if opts.get('transport') == 'raet' and 'aes' in opts:
opts.pop('aes')
apply_sdb(opts)
return opts
def apply_master_config(overrides=None, defaults=None):
'''
Returns master configurations dict.
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
opts = defaults.copy()
opts['__role'] = 'master'
_adjust_log_file_override(overrides, defaults['log_file'])
if overrides:
opts.update(overrides)
if 'environment' in opts:
if 'saltenv' in opts:
log.warning(
'The \'saltenv\' and \'environment\' master config options '
'cannot both be used. Ignoring \'environment\' in favor of '
'\'saltenv\'.',
)
# Set environment to saltenv in case someone's custom runner is
# refrencing __opts__['environment']
opts['environment'] = opts['saltenv']
else:
log.warning(
'The \'environment\' master config option has been renamed '
'to \'saltenv\'. Using %s as the \'saltenv\' config value.',
opts['environment']
)
opts['saltenv'] = opts['environment']
if six.PY2 and 'rest_cherrypy' in opts:
# CherryPy is not unicode-compatible
opts['rest_cherrypy'] = salt.utils.data.encode(opts['rest_cherrypy'])
for idx, val in enumerate(opts['fileserver_backend']):
if val in ('git', 'hg', 'svn', 'minion'):
new_val = val + 'fs'
log.debug(
'Changed %s to %s in master opts\' fileserver_backend list',
val, new_val
)
opts['fileserver_backend'][idx] = new_val
if len(opts['sock_dir']) > len(opts['cachedir']) + 10:
opts['sock_dir'] = os.path.join(opts['cachedir'], '.salt-unix')
opts['token_dir'] = os.path.join(opts['cachedir'], 'tokens')
opts['syndic_dir'] = os.path.join(opts['cachedir'], 'syndics')
# Make sure ext_mods gets set if it is an untrue value
# (here to catch older bad configs)
opts['extension_modules'] = (
opts.get('extension_modules') or
os.path.join(opts['cachedir'], 'extmods')
)
# Set up the utils_dirs location from the extension_modules location
opts['utils_dirs'] = (
opts.get('utils_dirs') or
[os.path.join(opts['extension_modules'], 'utils')]
)
# Insert all 'utils_dirs' directories to the system path
insert_system_path(opts, opts['utils_dirs'])
if (overrides or {}).get('ipc_write_buffer', '') == 'dynamic':
opts['ipc_write_buffer'] = _DFLT_IPC_WBUFFER
if 'ipc_write_buffer' not in overrides:
opts['ipc_write_buffer'] = 0
using_ip_for_id = False
append_master = False
if not opts.get('id'):
opts['id'], using_ip_for_id = get_id(
opts,
cache_minion_id=None)
append_master = True
# it does not make sense to append a domain to an IP based id
if not using_ip_for_id and 'append_domain' in opts:
opts['id'] = _append_domain(opts)
if append_master:
opts['id'] += '_master'
# Prepend root_dir to other paths
prepend_root_dirs = [
'pki_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
'autosign_file', 'autoreject_file', 'token_dir', 'syndic_dir',
'sqlite_queue_dir', 'autosign_grains_dir'
]
# These can be set to syslog, so, not actual paths on the system
for config_key in ('log_file', 'key_logfile', 'ssh_log_file'):
log_setting = opts.get(config_key, '')
if log_setting is None:
continue
if urlparse(log_setting).scheme == '':
prepend_root_dirs.append(config_key)
prepend_root_dir(opts, prepend_root_dirs)
# Enabling open mode requires that the value be set to True, and
# nothing else!
opts['open_mode'] = opts['open_mode'] is True
opts['auto_accept'] = opts['auto_accept'] is True
opts['file_roots'] = _validate_file_roots(opts['file_roots'])
opts['pillar_roots'] = _validate_file_roots(opts['pillar_roots'])
if opts['file_ignore_regex']:
# If file_ignore_regex was given, make sure it's wrapped in a list.
# Only keep valid regex entries for improved performance later on.
if isinstance(opts['file_ignore_regex'], six.string_types):
ignore_regex = [opts['file_ignore_regex']]
elif isinstance(opts['file_ignore_regex'], list):
ignore_regex = opts['file_ignore_regex']
opts['file_ignore_regex'] = []
for regex in ignore_regex:
try:
# Can't store compiled regex itself in opts (breaks
# serialization)
re.compile(regex)
opts['file_ignore_regex'].append(regex)
except Exception:
log.warning(
'Unable to parse file_ignore_regex. Skipping: %s',
regex
)
if opts['file_ignore_glob']:
# If file_ignore_glob was given, make sure it's wrapped in a list.
if isinstance(opts['file_ignore_glob'], six.string_types):
opts['file_ignore_glob'] = [opts['file_ignore_glob']]
# Let's make sure `worker_threads` does not drop below 3 which has proven
# to make `salt.modules.publish` not work under the test-suite.
if opts['worker_threads'] < 3 and opts.get('peer', None):
log.warning(
"The 'worker_threads' setting in '%s' cannot be lower than "
'3. Resetting it to the default value of 3.', opts['conf_file']
)
opts['worker_threads'] = 3
opts.setdefault('pillar_source_merging_strategy', 'smart')
# Make sure hash_type is lowercase
opts['hash_type'] = opts['hash_type'].lower()
# Check and update TLS/SSL configuration
_update_ssl_config(opts)
_update_discovery_config(opts)
return opts
def client_config(path, env_var='SALT_CLIENT_CONFIG', defaults=None):
'''
Load Master configuration data
Usage:
.. code-block:: python
import salt.config
master_opts = salt.config.client_config('/etc/salt/master')
Returns a dictionary of the Salt Master configuration file with necessary
options needed to communicate with a locally-running Salt Master daemon.
This function searches for client specific configurations and adds them to
the data from the master configuration.
This is useful for master-side operations like
:py:class:`~salt.client.LocalClient`.
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
xdg_dir = salt.utils.xdg.xdg_config_dir()
if os.path.isdir(xdg_dir):
client_config_dir = xdg_dir
saltrc_config_file = 'saltrc'
else:
client_config_dir = os.path.expanduser('~')
saltrc_config_file = '.saltrc'
# Get the token file path from the provided defaults. If not found, specify
# our own, sane, default
opts = {
'token_file': defaults.get(
'token_file',
os.path.join(client_config_dir, 'salt_token')
)
}
# Update options with the master configuration, either from the provided
# path, salt's defaults or provided defaults
opts.update(
master_config(path, defaults=defaults)
)
# Update with the users salt dot file or with the environment variable
saltrc_config = os.path.join(client_config_dir, saltrc_config_file)
opts.update(
load_config(
saltrc_config,
env_var,
saltrc_config
)
)
# Make sure we have a proper and absolute path to the token file
if 'token_file' in opts:
opts['token_file'] = os.path.abspath(
os.path.expanduser(
opts['token_file']
)
)
# If the token file exists, read and store the contained token
if os.path.isfile(opts['token_file']):
# Make sure token is still valid
expire = opts.get('token_expire', 43200)
if os.stat(opts['token_file']).st_mtime + expire > time.mktime(time.localtime()):
with salt.utils.files.fopen(opts['token_file']) as fp_:
opts['token'] = fp_.read().strip()
# On some platforms, like OpenBSD, 0.0.0.0 won't catch a master running on localhost
if opts['interface'] == '0.0.0.0':
opts['interface'] = '127.0.0.1'
# Make sure the master_uri is set
if 'master_uri' not in opts:
opts['master_uri'] = 'tcp://{ip}:{port}'.format(
ip=salt.utils.zeromq.ip_bracket(opts['interface']),
port=opts['ret_port']
)
# Return the client options
_validate_opts(opts)
return opts
def api_config(path):
'''
Read in the Salt Master config file and add additional configs that
need to be stubbed out for salt-api
'''
# Let's grab a copy of salt-api's required defaults
opts = DEFAULT_API_OPTS
# Let's override them with salt's master opts
opts.update(client_config(path, defaults=DEFAULT_MASTER_OPTS))
# Let's set the pidfile and log_file values in opts to api settings
opts.update({
'pidfile': opts.get('api_pidfile', DEFAULT_API_OPTS['api_pidfile']),
'log_file': opts.get('api_logfile', DEFAULT_API_OPTS['api_logfile']),
})
prepend_root_dir(opts, [
'api_pidfile',
'api_logfile',
'log_file',
'pidfile'
])
return opts
def spm_config(path):
'''
Read in the salt master config file and add additional configs that
need to be stubbed out for spm
.. versionadded:: 2015.8.0
'''
# Let's grab a copy of salt's master default opts
defaults = DEFAULT_MASTER_OPTS.copy()
# Let's override them with spm's required defaults
defaults.update(DEFAULT_SPM_OPTS)
overrides = load_config(path, 'SPM_CONFIG', DEFAULT_SPM_OPTS['spm_conf_file'])
default_include = overrides.get('spm_default_include',
defaults['spm_default_include'])
include = overrides.get('include', [])
overrides.update(include_config(default_include, path, verbose=False))
overrides.update(include_config(include, path, verbose=True))
defaults = apply_master_config(overrides, defaults)
defaults = apply_spm_config(overrides, defaults)
return client_config(path, env_var='SPM_CONFIG', defaults=defaults)
def apply_spm_config(overrides, defaults):
'''
Returns the spm configurations dict.
.. versionadded:: 2015.8.1
'''
opts = defaults.copy()
_adjust_log_file_override(overrides, defaults['log_file'])
if overrides:
opts.update(overrides)
# Prepend root_dir to other paths
prepend_root_dirs = [
'formula_path', 'pillar_path', 'reactor_path',
'spm_cache_dir', 'spm_build_dir'
]
# These can be set to syslog, so, not actual paths on the system
for config_key in ('spm_logfile',):
log_setting = opts.get(config_key, '')
if log_setting is None:
continue
if urlparse(log_setting).scheme == '':
prepend_root_dirs.append(config_key)
prepend_root_dir(opts, prepend_root_dirs)
return opts
| 37.425233 | 130 | 0.631777 |
ceeefd5c05add6402c025389eb298a9cc60347e9 | 9,573 | py | Python | src/m7_loops_within_loops_graphics.py | chenx16/20-Exam3Practice | f4652d66a12219980b8f740358ecc0d20b10c129 | [
"MIT"
] | null | null | null | src/m7_loops_within_loops_graphics.py | chenx16/20-Exam3Practice | f4652d66a12219980b8f740358ecc0d20b10c129 | [
"MIT"
] | null | null | null | src/m7_loops_within_loops_graphics.py | chenx16/20-Exam3Practice | f4652d66a12219980b8f740358ecc0d20b10c129 | [
"MIT"
] | null | null | null | """
PRACTICE Exam 3.
This problem provides practice at:
*** LOOPS WITHIN LOOPS in 2D GRAPHICS problems. ***
Authors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher,
Mark Hays, Amanda Stouder, Aaron Wilkin, their colleagues,
and Xinlai Chen.
""" # done: 1. PUT YOUR NAME IN THE ABOVE LINE.
###############################################################################
# Students:
#
# These problems have DIFFICULTY and TIME ratings:
# DIFFICULTY rating: 1 to 10, where:
# 1 is very easy
# 3 is an "easy" Test 2 question.
# 5 is a "typical" Test 2 question.
# 7 is a "hard" Test 2 question.
# 10 is an EXTREMELY hard problem (too hard for a Test 2 question)
#
# TIME ratings: A ROUGH estimate of the number of minutes that we
# would expect a well-prepared student to take on the problem.
#
# IMPORTANT: For ALL the problems in this module,
# if you reach the time estimate and are NOT close to a solution,
# STOP working on that problem and ASK YOUR INSTRUCTOR FOR HELP
# on it, in class or via Piazza.
###############################################################################
import rosegraphics as rg
import math
def main():
""" Calls the TEST functions in this module. """
run_test_hourglass()
run_test_many_hourglasses()
def run_test_hourglass():
""" Tests the hourglass function. """
print()
print('--------------------------------------------------')
print('Testing the hourglass function:')
print('--------------------------------------------------')
test1 = '(n = 3, radius = 40, blue)'
test2 = '(n = 8, radius = 15, green)'
title1 = 'Hourglass, two tests: {} and {}'.format(test1, test2)
window1 = rg.RoseWindow(600, 500, title1)
hourglass(window1, 3, rg.Point(150, 200), 40, 'blue')
hourglass(window1, 8, rg.Point(450, 250), 15, 'green')
window1.close_on_mouse_click()
test3 = '(n = 6, radius = 30, red)'
title2 = 'Hourglass, one more test: {}'.format(test3)
window2 = rg.RoseWindow(400, 700, title2)
hourglass(window2, 6, rg.Point(200, 350), 30, 'red')
window2.close_on_mouse_click()
def hourglass(window, n, point, radius, color):
"""
See hourglass_picture.pdf in this project for pictures that may
help you better understand the following specification:
Displays an "hourglass" shape of circles in the given window.
-- Each circle has the given radius and given color.
-- Each circle has a horizontal line drawn through it.
-- The middlemost of the circles is centered at the given point.
-- There is a single circle in that middlemost row.
-- There are n rows (including the middlemost row)
of circles going UP from the middlemost circle.
-- There are n rows (including the middlemost row)
of circles going DOWN from the middlemost circle.
-- Each circle barely touches its neighbor circles.
Preconditions:
:type window: rg.RoseWindow
:type n: int
:type point: rg.Point
:type radius: int
:type color: str
where n and radius are positive and color is a string that denotes
a color that rosegraphics understands.
"""
x=point.x
y=point.y
r=radius
d=math.sqrt(3*(r**2))
for k in range(n):
x1=x-r*k
y1=y-d*k
c = rg.Circle(rg.Point(x1, y1), r)
c.fill_color = color
c.attach_to(window)
for i in range(k):
x1=x1+2*r
c=rg.Circle(rg.Point(x1,y1),r)
c.fill_color=color
c.attach_to(window)
for k in range(n):
x1=x-r*k
y1=y+d*k
c = rg.Circle(rg.Point(x1, y1), r)
c.fill_color = color
c.attach_to(window)
for i in range(k):
x1=x1+2*r
c=rg.Circle(rg.Point(x1,y1),r)
c.fill_color=color
c.attach_to(window)
a=x-r
b=y
l=2*r
for k in range(n):
x2=a-r*k
e=x2
y2=b+d*k
for i in range(k+1):
e=e+l
li=rg.Line(rg.Point(e,y2),rg.Point(x2,y2))
li.attach_to(window)
for k in range(n):
x2 = a - r * k
e = x2
y2 = b - d * k
for i in range(k + 1):
e = e + l
li = rg.Line(rg.Point(e, y2), rg.Point(x2, y2))
li.attach_to(window)
window.render()
# -------------------------------------------------------------------------
# done: 2. Implement and test this function.
# We provided some tests for you (above).
# -------------------------------------------------------------------------
###########################################################################
# BONUS: Avoid replicated code if you can. Hint: You are allowed
# to define an additional function(s) if you wish.
###########################################################################
# -------------------------------------------------------------------------
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 8
# TIME ESTIMATE: 25 minutes (warning: this problem is challenging)
# -------------------------------------------------------------------------
def run_test_many_hourglasses():
""" Tests the many_hourglasses function. """
print()
print('--------------------------------------------------')
print('Testing the many_hourglasses function:')
print('--------------------------------------------------')
test1 = '(n = 4, radius = 30, red-blue-black-green)'
test2 = '(n = 3, radius = 70, brown-cyan-yellow-green)'
title1 = 'Many hourglasses, two tests: {} and {}'.format(test1,
test2)
window1 = rg.RoseWindow(800, 400, title1)
square1 = rg.Square(rg.Point(50, 150), 30)
square2 = rg.Square(rg.Point(400, 200), 70)
many_hourglasses(window1, square1, 4,
('red', 'blue', 'black', 'green'))
many_hourglasses(window1, square2, 3,
('brown', 'cyan', 'yellow', 'green'))
window1.close_on_mouse_click()
test3 = '(n = 7, radius = 40, red-black-blue)'
title2 = 'Many hourglasses, one more test: {}'.format(test3)
window2 = rg.RoseWindow(1200, 500, title2)
square3 = rg.Square(rg.Point(50, 250), 40)
many_hourglasses(window2, square3, 7, ('red', 'black', 'blue'))
window2.close_on_mouse_click()
def many_hourglasses(window, square, m, colors):
"""
See many_hourglasses_picture.pdf in this project for pictures that may
help you better understand the following specification:
Displays m rectangles, where:
-- Each rectangle has an hourglass of circles inside it,
per the hourglass function above.
-- The circles in the hourglasses are all the same size.
-- The leftmost rectangle is the given square, and it contains
an hourglass with a single circle that fills the square.
-- Each successive rectangle is immediately to the right of the
previous rectangle, and each contains an hourglass with
the hourglass' n being one greater than the n used
for the previous rectangle.
-- The colors for the hourglass figures use the given sequence of
colors, "wrapping" if m exceeds the length of the sequence.
Preconditions:
:type window: rg.RoseWindow
:type square: rg.Square
:type m: int
:type colors: (list | tuple) of str
where m is positive and colors is a sequence of strings,
each of which denotes a color that rosegraphics understands.
"""
r=square.length_of_each_side//2
c=square.center
d = math.sqrt(3 * (r ** 2))
for k in range(m):
c.x = c.x + (1 + 2 * k) * r
xl=c.x-r*(k+1)
yl=c.y-k*d-r
xr = c.x + r * (1+k)
yr = c.y + k * d + r
re=rg.Rectangle(rg.Point(xl,yl),rg.Point(xr,yr))
re.attach_to(window)
for i in range(k+1):
i = i%len(colors)
co = colors[i]
hourglass(window, k+1 , rg.Point(c.x, c.y), r, co)
window.render()
# -------------------------------------------------------------------------
# done: 3. Implement and test this function.
# We provided some tests for you (above).
# -------------------------------------------------------------------------
###########################################################################
# IMPORTANT:
# 1. Partial credit if you draw JUST the rectangles.
# 2. No additional credit unless you CALL the hourglass function
# in the PREVIOUS problem appropriately
# to draw the hourglass figures.
###########################################################################
# -------------------------------------------------------------------------
# DIFFICULTY AND TIME RATINGS (see top of this file for explanation)
# DIFFICULTY: 7 (assuming that you already have
# a correct "hourglass" function above)
# TIME ESTIMATE: 20 minutes (warning: this problem is challenging)
# -------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main()
| 37.837945 | 79 | 0.510603 |
b5a8bf7dd3d5e6764fa22fa5a9e7dbd6b4039b76 | 4,192 | py | Python | src/postprocessing/review.py | ryanlayerlab/hypedsearch | 7d3421cbbf8d9eea3ca334c4ec72b35bd0d3976c | [
"MIT"
] | null | null | null | src/postprocessing/review.py | ryanlayerlab/hypedsearch | 7d3421cbbf8d9eea3ca334c4ec72b35bd0d3976c | [
"MIT"
] | null | null | null | src/postprocessing/review.py | ryanlayerlab/hypedsearch | 7d3421cbbf8d9eea3ca334c4ec72b35bd0d3976c | [
"MIT"
] | 1 | 2021-07-10T13:19:07.000Z | 2021-07-10T13:19:07.000Z | from objects import SequenceAlignment, Alignments
def __is_swap_up_to_dist(a: str, b: str, i: int, j: int, dist: int, d: list) -> list:
'''
Helper function to edit distance long swaps. Helps identify potential swaps
and if two chars could be swapped then the entry to d is returned
'''
# first index not worth looking at
if i == 0 or j == 0:
return []
# iterate up to our current index or the max swap distance
iter_len = min([i, j, dist])
# keep track of swaps we find
swaps = []
for k in range(1, iter_len + 1):
# if it is a swap then keep track of it
if a[i] == b[j - k] and a[i - k] == b[j]:
swaps.append((i-k, j-k))
return swaps
def __edit_distance_long_swaps(a: str, b: str, dist: int = 0) -> int:
'''
Find the edit distance between two strings allowing for swaps up to a distance dist.
Example:
a: 'ABCDE', b: 'ADCBE'
edit distance with swap dist of 0: 2 (substitution of B for D and D for B)
edit distance with swap dist of 1: 1 (swap of B and D)
Limitations: if a position has 2 swaps, it will not be picked up. Example:
a: 'CBEDA', b: 'ABCDE'
A has been swapped with C then with E, but the final output will be edit distance of 3
for all of the swaps
Inputs:
a: (str) the first string
b: (str) the second string
dist:(int) the swapping distance allowed. Default=0
Outputs:
(int) the minimum edit distance
'''
d = [[0 for _ in range(len(b))] for _ in range(len(a))]
for i in range(len(a)):
d[i][0] = i
for j in range(len(b)):
d[0][1] = j
for i in range(len(a)):
for j in range(len(b)):
# look for swaps
swaps = __is_swap_up_to_dist(a, b, i, j, dist, d)
if a[i] == b[j]:
d[i][j] = d[i-1][j-1]
elif len(swaps):
# get all swaps possible
swap_values = [d[x][y] for x, y in swaps]
d[i][j] = min(swap_values + [
d[i-1][j] + 1, # deletion
d[i][j-1] + 1, # substitution
])
else:
d[i][j] = min([
d[i-1][j] + 1, # deletion
d[i][j-1] + 1, # insertion
d[i-1][j-1] + 1,# substitution
])
return d[len(a)-1][len(b)-1]
# def prioritize_non_hybrids(results: dict) -> dict:
# '''
# Look through all of the results and try to prioritize non hybrid results
# Inputs:
# results: (dict) a value is a list of SequenceAlignments
# Outputs:
# (dict) updated sorted
# '''
# for _id, sas in results.itesm():
# # sas is a list of sequence alignments
def __digest_score(sequence: str, digest_type: str) -> int:
'''
The additional points a sequence gets if it follows the digest rule.
'''
pass
def tie_breaker(results: dict, digest_type: str, n: int) -> dict:
'''
Look through all the results and try to break as many ties as possible
Inputs:
results: (dict) values are Alignments
digest_type: (str) the digest type used
n: (int) return only the top n results
Outputs:
(dict) updated results
'''
for _id, alignments in results.items():
# go through every sequence and see if they follow the digest rule
# hard code for test rn
new_sas = []
for sa in alignments.alignments:
if sa.sequence[0] == 'D':
new_sas.append(sa._replace(total_score=sa.total_score + 1))
else:
new_sas.append(sa)
a = Alignments(
alignments.spectrum,
sorted(new_sas, key=lambda x: (x.total_score, 1/x.precursor_distance), reverse=True)[:n]
)
results[_id] = a
return results | 29.521127 | 100 | 0.515506 |
63333853a38c9cd2f7f6d6af5e0e8549e8b836b0 | 12,754 | py | Python | tests/test_client_session.py | MarcoSulla/aiohttp | 889b7f1450d0e604bcf5dabce1932e2f0f72fb6b | [
"Apache-2.0"
] | 1 | 2021-07-07T06:36:57.000Z | 2021-07-07T06:36:57.000Z | tests/test_client_session.py | MarcoSulla/aiohttp | 889b7f1450d0e604bcf5dabce1932e2f0f72fb6b | [
"Apache-2.0"
] | null | null | null | tests/test_client_session.py | MarcoSulla/aiohttp | 889b7f1450d0e604bcf5dabce1932e2f0f72fb6b | [
"Apache-2.0"
] | 1 | 2021-02-09T10:05:59.000Z | 2021-02-09T10:05:59.000Z | import asyncio
import contextlib
import gc
import re
import types
from unittest import mock
from multidict import CIMultiDict, MultiDict
import aiohttp
import pytest
from aiohttp.client import ClientSession
from aiohttp.connector import BaseConnector, TCPConnector
@pytest.fixture
def connector(loop):
conn = BaseConnector(loop=loop)
transp = mock.Mock()
conn._conns['a'] = [(transp, 'proto', 123)]
return conn
@pytest.yield_fixture
def create_session(loop):
session = None
def maker(*args, **kwargs):
nonlocal session
session = ClientSession(*args, loop=loop, **kwargs)
return session
yield maker
if session is not None:
session.close()
@pytest.fixture
def session(create_session):
return create_session()
@pytest.fixture
def params():
return dict(
headers={"Authorization": "Basic ..."},
max_redirects=2,
encoding="latin1",
version=aiohttp.HttpVersion10,
compress="deflate",
chunked=True,
expect100=True,
read_until_eof=False)
def test_init_headers_simple_dict(create_session):
session = create_session(headers={"h1": "header1",
"h2": "header2"})
assert (sorted(session._default_headers.items()) ==
([("H1", "header1"), ("H2", "header2")]))
def test_init_headers_list_of_tuples(create_session):
session = create_session(headers=[("h1", "header1"),
("h2", "header2"),
("h3", "header3")])
assert (session._default_headers ==
CIMultiDict([("h1", "header1"),
("h2", "header2"),
("h3", "header3")]))
def test_init_headers_MultiDict(create_session):
session = create_session(headers=MultiDict([("h1", "header1"),
("h2", "header2"),
("h3", "header3")]))
assert (session._default_headers ==
CIMultiDict([("H1", "header1"),
("H2", "header2"),
("H3", "header3")]))
def test_init_headers_list_of_tuples_with_duplicates(create_session):
session = create_session(headers=[("h1", "header11"),
("h2", "header21"),
("h1", "header12")])
assert (session._default_headers ==
CIMultiDict([("H1", "header11"),
("H2", "header21"),
("H1", "header12")]))
def test_init_cookies_with_simple_dict(create_session):
session = create_session(cookies={"c1": "cookie1",
"c2": "cookie2"})
assert set(session.cookies) == {'c1', 'c2'}
assert session.cookies['c1'].value == 'cookie1'
assert session.cookies['c2'].value == 'cookie2'
def test_init_cookies_with_list_of_tuples(create_session):
session = create_session(cookies=[("c1", "cookie1"),
("c2", "cookie2")])
assert set(session.cookies) == {'c1', 'c2'}
assert session.cookies['c1'].value == 'cookie1'
assert session.cookies['c2'].value == 'cookie2'
def test_merge_headers(create_session):
# Check incoming simple dict
session = create_session(headers={"h1": "header1",
"h2": "header2"})
headers = session._prepare_headers({"h1": "h1"})
assert isinstance(headers, CIMultiDict)
assert headers == CIMultiDict([("h2", "header2"),
("h1", "h1")])
def test_merge_headers_with_multi_dict(create_session):
session = create_session(headers={"h1": "header1",
"h2": "header2"})
headers = session._prepare_headers(MultiDict([("h1", "h1")]))
assert isinstance(headers, CIMultiDict)
assert headers == CIMultiDict([("h2", "header2"),
("h1", "h1")])
def test_merge_headers_with_list_of_tuples(create_session):
session = create_session(headers={"h1": "header1",
"h2": "header2"})
headers = session._prepare_headers([("h1", "h1")])
assert isinstance(headers, CIMultiDict)
assert headers == CIMultiDict([("h2", "header2"),
("h1", "h1")])
def test_merge_headers_with_list_of_tuples_duplicated_names(create_session):
session = create_session(headers={"h1": "header1",
"h2": "header2"})
headers = session._prepare_headers([("h1", "v1"),
("h1", "v2")])
assert isinstance(headers, CIMultiDict)
assert headers == CIMultiDict([("H2", "header2"),
("H1", "v1"),
("H1", "v2")])
def test_http_GET(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.get("http://test.example.com",
params={"x": 1},
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("GET", "http://test.example.com",),
dict(
params={"x": 1},
allow_redirects=True,
**params)]
def test_http_OPTIONS(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.options("http://opt.example.com",
params={"x": 2},
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("OPTIONS", "http://opt.example.com",),
dict(
params={"x": 2},
allow_redirects=True,
**params)]
def test_http_HEAD(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.head("http://head.example.com",
params={"x": 2},
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("HEAD", "http://head.example.com",),
dict(
params={"x": 2},
allow_redirects=False,
**params)]
def test_http_POST(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.post("http://post.example.com",
params={"x": 2},
data="Some_data",
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("POST", "http://post.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)]
def test_http_PUT(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.put("http://put.example.com",
params={"x": 2},
data="Some_data",
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("PUT", "http://put.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)]
def test_http_PATCH(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.patch("http://patch.example.com",
params={"x": 2},
data="Some_data",
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("PATCH", "http://patch.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)]
def test_http_DELETE(session, params):
with mock.patch("aiohttp.client.ClientSession._request") as patched:
session.delete("http://delete.example.com",
params={"x": 2},
**params)
assert patched.called, "`ClientSession._request` not called"
assert list(patched.call_args) == [("DELETE",
"http://delete.example.com",),
dict(
params={"x": 2},
**params)]
def test_close(create_session, connector):
session = create_session(connector=connector)
session.close()
assert session.connector is None
assert connector.closed
def test_closed(session):
assert not session.closed
session.close()
assert session.closed
def test_connector(create_session, loop):
connector = TCPConnector(loop=loop)
session = create_session(connector=connector)
assert session.connector is connector
def test_connector_loop(loop):
with contextlib.ExitStack() as stack:
another_loop = asyncio.new_event_loop()
stack.enter_context(contextlib.closing(another_loop))
connector = TCPConnector(loop=another_loop)
stack.enter_context(contextlib.closing(connector))
with pytest.raises(ValueError) as ctx:
ClientSession(connector=connector, loop=loop)
assert re.match("loop argument must agree with connector",
str(ctx.value))
def test_cookies_are_readonly(session):
with pytest.raises(AttributeError):
session.cookies = 123
def test_detach(session):
conn = session.connector
try:
assert not conn.closed
session.detach()
assert session.connector is None
assert session.closed
assert not conn.closed
finally:
conn.close()
@pytest.mark.run_loop
def test_request_closed_session(session):
session.close()
with pytest.raises(RuntimeError):
yield from session.request('get', '/')
def test_close_flag_for_closed_connector(session):
conn = session.connector
assert not session.closed
conn.close()
assert session.closed
def test_double_close(connector, create_session):
session = create_session(connector=connector)
session.close()
assert session.connector is None
session.close()
assert session.closed
assert connector.closed
def test_del(connector, loop, warning):
# N.B. don't use session fixture, it stores extra reference internally
session = ClientSession(connector=connector, loop=loop)
loop.set_exception_handler(lambda loop, ctx: None)
with warning(ResourceWarning):
del session
gc.collect()
def test_context_manager(connector, loop):
with ClientSession(loop=loop, connector=connector) as session:
pass
assert session.closed
def test_borrow_connector_loop(connector, create_session, loop):
session = ClientSession(connector=connector, loop=None)
try:
assert session._loop, loop
finally:
session.close()
@pytest.mark.run_loop
def test_reraise_os_error(create_session):
err = OSError(1, "permission error")
req = mock.Mock()
req_factory = mock.Mock(return_value=req)
req.send = mock.Mock(side_effect=err)
session = create_session(request_class=req_factory)
@asyncio.coroutine
def create_connection(req):
# return self.transport, self.protocol
return mock.Mock(), mock.Mock()
session._connector._create_connection = create_connection
with pytest.raises(aiohttp.ClientOSError) as ctx:
yield from session.request('get', 'http://example.com')
e = ctx.value
assert e.errno == err.errno
assert e.strerror == err.strerror
def test_request_ctx_manager_props(loop):
with aiohttp.ClientSession(loop=loop) as client:
ctx_mgr = client.get('http://example.com')
next(ctx_mgr)
assert isinstance(ctx_mgr.gi_frame, types.FrameType)
assert not ctx_mgr.gi_running
assert isinstance(ctx_mgr.gi_code, types.CodeType)
| 34.284946 | 78 | 0.553238 |
ea11abdd753e3cc5cc36821cff20e94030356b2d | 1,889 | py | Python | integrationtest/vm/installation/upgrade/test_zs_degd_latest_1.2_on_cos7.py | sherry546/zstack-woodpecker | 54a37459f2d72ce6820974feaa6eb55772c3d2ce | [
"Apache-2.0"
] | 1 | 2021-03-21T12:41:11.000Z | 2021-03-21T12:41:11.000Z | integrationtest/vm/installation/upgrade/test_zs_degd_latest_1.2_on_cos7.py | sherry546/zstack-woodpecker | 54a37459f2d72ce6820974feaa6eb55772c3d2ce | [
"Apache-2.0"
] | null | null | null | integrationtest/vm/installation/upgrade/test_zs_degd_latest_1.2_on_cos7.py | sherry546/zstack-woodpecker | 54a37459f2d72ce6820974feaa6eb55772c3d2ce | [
"Apache-2.0"
] | 1 | 2017-05-19T06:40:40.000Z | 2017-05-19T06:40:40.000Z | '''
@author: MengLai
'''
import os
import tempfile
import uuid
import time
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
def test():
test_util.test_dsc('Create test vm to test zstack upgrade by -u.')
image_name = os.environ.get('imageName_i_c7')
vm = test_stub.create_vlan_vm(image_name)
test_obj_dict.add_vm(vm)
if os.environ.get('zstackManagementIp') == None:
vm.check()
else:
time.sleep(60)
vm_inv = vm.get_vm()
vm_ip = vm_inv.vmNics[0].ip
test_util.test_dsc('Install latest zstack')
target_file = '/root/zstack-all-in-one.tgz'
test_stub.prepare_test_env(vm_inv, target_file)
ssh_cmd = 'ssh -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null %s' % vm_ip
test_stub.copy_id_dsa(vm_inv, ssh_cmd, tmp_file)
test_stub.copy_id_dsa_pub(vm_inv)
test_stub.execute_all_install(ssh_cmd, target_file, tmp_file)
test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
test_util.test_dsc('Degrade zstack to 1.2')
degrade_target_file = '/root/zstack-degrade-all-in-one.tgz'
install_pkg = os.environ.get('zstackPkg_1.2')
test_stub.prepare_upgrade_test_env(vm_inv, degrade_target_file, install_pkg)
test_stub.upgrade_zstack(ssh_cmd, degrade_target_file, tmp_file)
test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
os.system('rm -f %s' % tmp_file)
vm.destroy()
test_util.test_pass('ZStack upgrade Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
os.system('rm -f %s' % tmp_file)
test_lib.lib_error_cleanup(test_obj_dict)
| 34.345455 | 107 | 0.719428 |
d4fe37f1f7ba1233c145b43420b8ab947e3b7681 | 29,722 | py | Python | pysnmp/CISCO-GSLB-SYSTEM-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-GSLB-SYSTEM-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-GSLB-SYSTEM-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-GSLB-SYSTEM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-GSLB-SYSTEM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:42:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint")
CiscoGslbPeerStatus, CiscoGslbNodeServices = mibBuilder.importSymbols("CISCO-GSLB-TC-MIB", "CiscoGslbPeerStatus", "CiscoGslbNodeServices")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
InterfaceIndexOrZero, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero")
InetAddressType, InetAddress, InetAddressDNS = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress", "InetAddressDNS")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
sysName, = mibBuilder.importSymbols("SNMPv2-MIB", "sysName")
iso, NotificationType, Counter32, Counter64, ObjectIdentity, ModuleIdentity, IpAddress, Bits, TimeTicks, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Gauge32, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "NotificationType", "Counter32", "Counter64", "ObjectIdentity", "ModuleIdentity", "IpAddress", "Bits", "TimeTicks", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Gauge32", "MibIdentifier")
TruthValue, TextualConvention, StorageType, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "StorageType", "RowStatus", "DisplayString")
ciscoGslbSystemMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 589))
ciscoGslbSystemMIB.setRevisions(('2011-06-06 00:00', '2006-12-04 00:00',))
if mibBuilder.loadTexts: ciscoGslbSystemMIB.setLastUpdated('201106060000Z')
if mibBuilder.loadTexts: ciscoGslbSystemMIB.setOrganization('Cisco Systems, Inc.')
ciscoGslbSystemMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 0))
ciscoGslbSystemMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1))
ciscoGslbSystemMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 2))
cgsNotifControl = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 1))
cgsNotifObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 2))
cgsGeneral = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3))
cgsPeer = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4))
cgsProxZoneStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5))
cgsResources = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6))
cgsNodeService = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3, 1), CiscoGslbNodeServices()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cgsNodeService.setStatus('current')
cgsNodeCommIfName = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3, 2), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cgsNodeCommIfName.setStatus('current')
cgsNodeCommIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3, 3), InterfaceIndexOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsNodeCommIfIndex.setStatus('current')
cgsNodeStatus = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3, 4), CiscoGslbPeerStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsNodeStatus.setStatus('current')
cgsNodeLocation = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cgsNodeLocation.setStatus('current')
cgsNodeRegion = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 3, 6), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsNodeRegion.setStatus('current')
cgsPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1), )
if mibBuilder.loadTexts: cgsPeerTable.setStatus('current')
cgsPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1), ).setIndexNames((0, "CISCO-GSLB-SYSTEM-MIB", "cgsPeerAddressType"), (0, "CISCO-GSLB-SYSTEM-MIB", "cgsPeerAddress"))
if mibBuilder.loadTexts: cgsPeerEntry.setStatus('current')
cgsPeerAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 1), InetAddressType())
if mibBuilder.loadTexts: cgsPeerAddressType.setStatus('current')
cgsPeerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 2), InetAddress().subtype(subtypeSpec=ValueSizeConstraint(1, 16)))
if mibBuilder.loadTexts: cgsPeerAddress.setStatus('current')
cgsPeerLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 3), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cgsPeerLocation.setStatus('current')
cgsPeerDnsName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 4), InetAddressDNS()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsPeerDnsName.setStatus('current')
cgsPeerService = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 5), CiscoGslbNodeServices()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsPeerService.setStatus('current')
cgsPeerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 6), CiscoGslbPeerStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsPeerStatus.setStatus('current')
cgsPeerVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 4, 1, 1, 7), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsPeerVersion.setStatus('current')
cgsProxZoneTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1), )
if mibBuilder.loadTexts: cgsProxZoneTable.setStatus('current')
cgsProxZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1), ).setIndexNames((0, "CISCO-GSLB-SYSTEM-MIB", "cgsProxZoneName"))
if mibBuilder.loadTexts: cgsProxZoneEntry.setStatus('current')
cgsProxZoneName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80)))
if mibBuilder.loadTexts: cgsProxZoneName.setStatus('current')
cgsProxPrimaryAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsProxPrimaryAddressType.setStatus('current')
cgsProxPrimaryAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsProxPrimaryAddress.setStatus('current')
cgsProxSecondaryAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 4), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsProxSecondaryAddressType.setStatus('current')
cgsProxSecondaryAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 5), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsProxSecondaryAddress.setStatus('current')
cgsProxEchoSentReqs = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 6), Counter32()).setUnits('requests').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxEchoSentReqs.setStatus('current')
cgsProxEchoRcvdResps = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 7), Counter32()).setUnits('responses').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxEchoRcvdResps.setStatus('current')
cgsProxSentMeasureReqs = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 8), Counter32()).setUnits('requests').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxSentMeasureReqs.setStatus('current')
cgsProxRcvdMeasureResps = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 9), Counter32()).setUnits('responses').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxRcvdMeasureResps.setStatus('current')
cgsProxTotalSentReqs = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 10), Counter32()).setUnits('requests').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxTotalSentReqs.setStatus('current')
cgsProxTotalRcvdResps = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 11), Counter32()).setUnits('responses').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxTotalRcvdResps.setStatus('current')
cgsProxSendRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 12), Unsigned32()).setUnits('rate per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxSendRate.setStatus('current')
cgsProxRcvdRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 13), Unsigned32()).setUnits('rate per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxRcvdRate.setStatus('current')
cgsProxPeakSendRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 14), Unsigned32()).setUnits('rate per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxPeakSendRate.setStatus('current')
cgsProxPeakRcvdRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 15), Unsigned32()).setUnits('rate per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsProxPeakRcvdRate.setStatus('current')
cgsProxStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 16), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsProxStorageType.setStatus('current')
cgsProxRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 5, 1, 1, 17), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsProxRowStatus.setStatus('current')
cgsRegionTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 1), )
if mibBuilder.loadTexts: cgsRegionTable.setStatus('deprecated')
cgsRegionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 1, 1), ).setIndexNames((0, "CISCO-GSLB-SYSTEM-MIB", "cgsRegionName"))
if mibBuilder.loadTexts: cgsRegionEntry.setStatus('deprecated')
cgsRegionName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 1, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80)))
if mibBuilder.loadTexts: cgsRegionName.setStatus('deprecated')
cgsRegionComments = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 1, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionComments.setStatus('deprecated')
cgsRegionStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 1, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionStorageType.setStatus('deprecated')
cgsRegionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionRowStatus.setStatus('deprecated')
cgsLocationTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2), )
if mibBuilder.loadTexts: cgsLocationTable.setStatus('current')
cgsLocationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1), ).setIndexNames((0, "CISCO-GSLB-SYSTEM-MIB", "cgsLocationName"))
if mibBuilder.loadTexts: cgsLocationEntry.setStatus('current')
cgsLocationName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80)))
if mibBuilder.loadTexts: cgsLocationName.setStatus('current')
cgsLocationRegion = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsLocationRegion.setStatus('current')
cgsLocationZone = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1, 3), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsLocationZone.setStatus('current')
cgsLocationComments = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1, 4), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsLocationComments.setStatus('current')
cgsLocationStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsLocationStorageType.setStatus('current')
cgsLocationRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 2, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsLocationRowStatus.setStatus('current')
cgsRegionIdTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3), )
if mibBuilder.loadTexts: cgsRegionIdTable.setStatus('current')
cgsRegionIdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1), ).setIndexNames((0, "CISCO-GSLB-SYSTEM-MIB", "cgsRegionId"))
if mibBuilder.loadTexts: cgsRegionIdEntry.setStatus('current')
cgsRegionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cgsRegionId.setStatus('current')
cgsRegionIdName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionIdName.setStatus('current')
cgsRegionIdComments = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 3), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionIdComments.setStatus('current')
cgsReqCountPerRegionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 4), Counter32()).setUnits('number of hits').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsReqCountPerRegionId.setStatus('current')
cgsReqCountRatePerRegionId1Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 5), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsReqCountRatePerRegionId1Min.setStatus('current')
cgsReqCountRatePerRegionId5Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 6), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsReqCountRatePerRegionId5Min.setStatus('current')
cgsReqCountRatePerRegionId30Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 7), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsReqCountRatePerRegionId30Min.setStatus('current')
cgsReqCountRatePerRegionId4Hr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 8), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsReqCountRatePerRegionId4Hr.setStatus('current')
cgsAnswerCountPerRegionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 9), Counter32()).setUnits('number of hits').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsAnswerCountPerRegionId.setStatus('current')
cgsAnswerCountRatePerRegionId1Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 10), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsAnswerCountRatePerRegionId1Min.setStatus('current')
cgsAnswerCountRatePerRegionId5Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 11), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsAnswerCountRatePerRegionId5Min.setStatus('current')
cgsAnswerCountRatePerRegionId30Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 12), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsAnswerCountRatePerRegionId30Min.setStatus('current')
cgsAnswerCountRatePerRegionId4Hr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 13), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsAnswerCountRatePerRegionId4Hr.setStatus('current')
cgsUnAnswerCountPerRegionId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 14), Counter32()).setUnits('number of hits').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsUnAnswerCountPerRegionId.setStatus('current')
cgsUnAnswerCountRatePerRegionId1Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 15), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsUnAnswerCountRatePerRegionId1Min.setStatus('current')
cgsUnAnswerCountRatePerRegionId5Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 16), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsUnAnswerCountRatePerRegionId5Min.setStatus('current')
cgsUnAnswerCountRatePerRegionId30Min = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 17), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsUnAnswerCountRatePerRegionId30Min.setStatus('current')
cgsUnAnswerCountRatePerRegionId4Hr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 18), Gauge32()).setUnits('hits per second').setMaxAccess("readonly")
if mibBuilder.loadTexts: cgsUnAnswerCountRatePerRegionId4Hr.setStatus('current')
cgsRegionIdStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 19), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionIdStorageType.setStatus('current')
cgsRegionIdRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 6, 3, 1, 20), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cgsRegionIdRowStatus.setStatus('current')
cgsPeerEventNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 1, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cgsPeerEventNotifEnable.setStatus('current')
cgsPeerPrevStatus = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 589, 1, 2, 1), CiscoGslbPeerStatus()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: cgsPeerPrevStatus.setStatus('current')
ciscoGslbSystemPeerEventStatus = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 589, 0, 1)).setObjects(("SNMPv2-MIB", "sysName"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerDnsName"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerService"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerPrevStatus"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerStatus"))
if mibBuilder.loadTexts: ciscoGslbSystemPeerEventStatus.setStatus('current')
ciscoGslbSystemMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 1))
ciscoGslbSystemMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2))
ciscoGslbSystemMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 1, 1)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemGeneralGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemPeerGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemProxZoneGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemResourceGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemNotifControlGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemNotifObjectsGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemNotifGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemMIBCompliance = ciscoGslbSystemMIBCompliance.setStatus('deprecated')
ciscoGslbSystemMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 1, 2)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemGeneralGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemPeerGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemProxZoneGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemNotifControlGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemNotifObjectsGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemNotifGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemResourceLocationGroup"), ("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemResourceRegionGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemMIBComplianceRev1 = ciscoGslbSystemMIBComplianceRev1.setStatus('current')
ciscoGslbSystemGeneralGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 1)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsNodeService"), ("CISCO-GSLB-SYSTEM-MIB", "cgsNodeCommIfName"), ("CISCO-GSLB-SYSTEM-MIB", "cgsNodeCommIfIndex"), ("CISCO-GSLB-SYSTEM-MIB", "cgsNodeStatus"), ("CISCO-GSLB-SYSTEM-MIB", "cgsNodeLocation"), ("CISCO-GSLB-SYSTEM-MIB", "cgsNodeRegion"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemGeneralGroup = ciscoGslbSystemGeneralGroup.setStatus('current')
ciscoGslbSystemPeerGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 2)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsPeerLocation"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerDnsName"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerService"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerStatus"), ("CISCO-GSLB-SYSTEM-MIB", "cgsPeerVersion"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemPeerGroup = ciscoGslbSystemPeerGroup.setStatus('current')
ciscoGslbSystemProxZoneGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 3)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsProxPrimaryAddressType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxPrimaryAddress"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxSecondaryAddressType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxSecondaryAddress"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxEchoSentReqs"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxEchoRcvdResps"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxSentMeasureReqs"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxRcvdMeasureResps"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxTotalSentReqs"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxTotalRcvdResps"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxSendRate"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxRcvdRate"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxPeakSendRate"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxPeakRcvdRate"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxStorageType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsProxRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemProxZoneGroup = ciscoGslbSystemProxZoneGroup.setStatus('current')
ciscoGslbSystemResourceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 4)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsRegionComments"), ("CISCO-GSLB-SYSTEM-MIB", "cgsRegionStorageType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsRegionRowStatus"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationRegion"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationZone"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationComments"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationStorageType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemResourceGroup = ciscoGslbSystemResourceGroup.setStatus('deprecated')
ciscoGslbSystemNotifControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 5)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsPeerEventNotifEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemNotifControlGroup = ciscoGslbSystemNotifControlGroup.setStatus('current')
ciscoGslbSystemNotifObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 6)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsPeerPrevStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemNotifObjectsGroup = ciscoGslbSystemNotifObjectsGroup.setStatus('current')
ciscoGslbSystemNotifGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 7)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "ciscoGslbSystemPeerEventStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemNotifGroup = ciscoGslbSystemNotifGroup.setStatus('current')
ciscoGslbSystemResourceLocationGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 8)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsLocationRegion"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationZone"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationComments"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationStorageType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsLocationRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemResourceLocationGroup = ciscoGslbSystemResourceLocationGroup.setStatus('current')
ciscoGslbSystemResourceRegionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 589, 2, 2, 9)).setObjects(("CISCO-GSLB-SYSTEM-MIB", "cgsRegionIdName"), ("CISCO-GSLB-SYSTEM-MIB", "cgsRegionIdComments"), ("CISCO-GSLB-SYSTEM-MIB", "cgsReqCountPerRegionId"), ("CISCO-GSLB-SYSTEM-MIB", "cgsReqCountRatePerRegionId1Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsReqCountRatePerRegionId5Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsReqCountRatePerRegionId30Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsReqCountRatePerRegionId4Hr"), ("CISCO-GSLB-SYSTEM-MIB", "cgsAnswerCountPerRegionId"), ("CISCO-GSLB-SYSTEM-MIB", "cgsAnswerCountRatePerRegionId1Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsAnswerCountRatePerRegionId5Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsAnswerCountRatePerRegionId30Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsAnswerCountRatePerRegionId4Hr"), ("CISCO-GSLB-SYSTEM-MIB", "cgsUnAnswerCountPerRegionId"), ("CISCO-GSLB-SYSTEM-MIB", "cgsUnAnswerCountRatePerRegionId1Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsUnAnswerCountRatePerRegionId5Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsUnAnswerCountRatePerRegionId30Min"), ("CISCO-GSLB-SYSTEM-MIB", "cgsUnAnswerCountRatePerRegionId4Hr"), ("CISCO-GSLB-SYSTEM-MIB", "cgsRegionIdStorageType"), ("CISCO-GSLB-SYSTEM-MIB", "cgsRegionIdRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoGslbSystemResourceRegionGroup = ciscoGslbSystemResourceRegionGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-GSLB-SYSTEM-MIB", cgsAnswerCountRatePerRegionId5Min=cgsAnswerCountRatePerRegionId5Min, cgsLocationComments=cgsLocationComments, PYSNMP_MODULE_ID=ciscoGslbSystemMIB, cgsReqCountRatePerRegionId30Min=cgsReqCountRatePerRegionId30Min, cgsResources=cgsResources, cgsProxRcvdMeasureResps=cgsProxRcvdMeasureResps, cgsLocationName=cgsLocationName, cgsProxSendRate=cgsProxSendRate, cgsPeer=cgsPeer, ciscoGslbSystemResourceRegionGroup=ciscoGslbSystemResourceRegionGroup, cgsRegionIdName=cgsRegionIdName, cgsUnAnswerCountRatePerRegionId4Hr=cgsUnAnswerCountRatePerRegionId4Hr, ciscoGslbSystemResourceGroup=ciscoGslbSystemResourceGroup, cgsRegionRowStatus=cgsRegionRowStatus, cgsPeerEventNotifEnable=cgsPeerEventNotifEnable, cgsRegionName=cgsRegionName, ciscoGslbSystemMIBCompliance=ciscoGslbSystemMIBCompliance, cgsProxEchoSentReqs=cgsProxEchoSentReqs, cgsAnswerCountRatePerRegionId4Hr=cgsAnswerCountRatePerRegionId4Hr, cgsPeerDnsName=cgsPeerDnsName, cgsProxPeakSendRate=cgsProxPeakSendRate, cgsReqCountRatePerRegionId5Min=cgsReqCountRatePerRegionId5Min, ciscoGslbSystemMIBNotifs=ciscoGslbSystemMIBNotifs, cgsRegionIdEntry=cgsRegionIdEntry, cgsRegionIdTable=cgsRegionIdTable, cgsPeerPrevStatus=cgsPeerPrevStatus, cgsNodeStatus=cgsNodeStatus, cgsAnswerCountPerRegionId=cgsAnswerCountPerRegionId, cgsProxZoneStats=cgsProxZoneStats, cgsPeerService=cgsPeerService, cgsGeneral=cgsGeneral, cgsNodeLocation=cgsNodeLocation, cgsNodeCommIfName=cgsNodeCommIfName, cgsProxZoneTable=cgsProxZoneTable, cgsPeerTable=cgsPeerTable, cgsProxPeakRcvdRate=cgsProxPeakRcvdRate, cgsNodeRegion=cgsNodeRegion, cgsLocationTable=cgsLocationTable, cgsUnAnswerCountPerRegionId=cgsUnAnswerCountPerRegionId, ciscoGslbSystemMIB=ciscoGslbSystemMIB, cgsProxStorageType=cgsProxStorageType, cgsProxRowStatus=cgsProxRowStatus, ciscoGslbSystemGeneralGroup=ciscoGslbSystemGeneralGroup, cgsProxSentMeasureReqs=cgsProxSentMeasureReqs, cgsUnAnswerCountRatePerRegionId1Min=cgsUnAnswerCountRatePerRegionId1Min, cgsProxZoneName=cgsProxZoneName, cgsPeerStatus=cgsPeerStatus, cgsNotifControl=cgsNotifControl, cgsReqCountRatePerRegionId4Hr=cgsReqCountRatePerRegionId4Hr, cgsRegionIdRowStatus=cgsRegionIdRowStatus, cgsProxEchoRcvdResps=cgsProxEchoRcvdResps, ciscoGslbSystemMIBComplianceRev1=ciscoGslbSystemMIBComplianceRev1, cgsProxPrimaryAddress=cgsProxPrimaryAddress, ciscoGslbSystemMIBConform=ciscoGslbSystemMIBConform, cgsLocationRegion=cgsLocationRegion, cgsPeerEntry=cgsPeerEntry, cgsLocationEntry=cgsLocationEntry, cgsLocationRowStatus=cgsLocationRowStatus, ciscoGslbSystemPeerGroup=ciscoGslbSystemPeerGroup, ciscoGslbSystemNotifObjectsGroup=ciscoGslbSystemNotifObjectsGroup, cgsProxSecondaryAddress=cgsProxSecondaryAddress, cgsAnswerCountRatePerRegionId1Min=cgsAnswerCountRatePerRegionId1Min, ciscoGslbSystemPeerEventStatus=ciscoGslbSystemPeerEventStatus, cgsPeerAddress=cgsPeerAddress, cgsRegionTable=cgsRegionTable, cgsPeerVersion=cgsPeerVersion, cgsPeerAddressType=cgsPeerAddressType, cgsRegionId=cgsRegionId, cgsProxTotalRcvdResps=cgsProxTotalRcvdResps, cgsProxRcvdRate=cgsProxRcvdRate, ciscoGslbSystemNotifControlGroup=ciscoGslbSystemNotifControlGroup, cgsRegionEntry=cgsRegionEntry, ciscoGslbSystemMIBGroups=ciscoGslbSystemMIBGroups, ciscoGslbSystemProxZoneGroup=ciscoGslbSystemProxZoneGroup, cgsUnAnswerCountRatePerRegionId30Min=cgsUnAnswerCountRatePerRegionId30Min, ciscoGslbSystemMIBObjects=ciscoGslbSystemMIBObjects, cgsProxZoneEntry=cgsProxZoneEntry, cgsUnAnswerCountRatePerRegionId5Min=cgsUnAnswerCountRatePerRegionId5Min, cgsPeerLocation=cgsPeerLocation, ciscoGslbSystemMIBCompliances=ciscoGslbSystemMIBCompliances, cgsNodeService=cgsNodeService, cgsRegionComments=cgsRegionComments, cgsProxPrimaryAddressType=cgsProxPrimaryAddressType, cgsNodeCommIfIndex=cgsNodeCommIfIndex, ciscoGslbSystemNotifGroup=ciscoGslbSystemNotifGroup, cgsProxSecondaryAddressType=cgsProxSecondaryAddressType, cgsRegionStorageType=cgsRegionStorageType, cgsReqCountRatePerRegionId1Min=cgsReqCountRatePerRegionId1Min, cgsProxTotalSentReqs=cgsProxTotalSentReqs, cgsReqCountPerRegionId=cgsReqCountPerRegionId, cgsNotifObjects=cgsNotifObjects, ciscoGslbSystemResourceLocationGroup=ciscoGslbSystemResourceLocationGroup, cgsLocationZone=cgsLocationZone, cgsLocationStorageType=cgsLocationStorageType, cgsRegionIdComments=cgsRegionIdComments, cgsAnswerCountRatePerRegionId30Min=cgsAnswerCountRatePerRegionId30Min, cgsRegionIdStorageType=cgsRegionIdStorageType)
| 136.967742 | 4,420 | 0.763811 |
fa02ec48c79fd33e60017502c3eecba6a950fe2d | 2,879 | py | Python | machine_learning/project5/binary_classification.py | buivn/Learning | 27cd468512c9d6640b5dcfecb00cd76ee6686847 | [
"BSD-2-Clause"
] | null | null | null | machine_learning/project5/binary_classification.py | buivn/Learning | 27cd468512c9d6640b5dcfecb00cd76ee6686847 | [
"BSD-2-Clause"
] | null | null | null | machine_learning/project5/binary_classification.py | buivn/Learning | 27cd468512c9d6640b5dcfecb00cd76ee6686847 | [
"BSD-2-Clause"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import helpers
def svm_train_brute(training_data):
# w for data 1,2,3,4
w = np.array([[1,0], [0,1], [1,-1], [1,1]])
# w1 = np.array([1,0]) # vertical line
# for data 2
# w2 = np.array([0,1]) # horizontal line
# for data 3
# w3 = np.array([1,-1])
# for data 4
# w4 = np.array([1,1])
b = np.array([-10,-9,-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10])
sup_vector_neg = np.zeros
margin = np.zeros((4,21)).astype(int)
for i in range(len(w)):
for j in range(len(b)):
margin[i][j] = compute_margin(training_data, w[i],b[j])
return 0
def distance_point_to_hyperplan(pt, w, b):
distance = 0
# assuming the line is in the form: w1x + w2y + b = 0
numerator = np.absolute(w[0]*pt[0] + w[1]*pt[1] + b)
denominator = np.sqrt(np.square(w[0])+np.square(w[1]))
distance = numerator/denominator
return distance
def compute_margin(data, w, b):
# the margin from the point to the boundary
inputData = np.array([0,0])
sup_vector_pos = np.array([0,0])
sup_vector_neg = np.array([0,0])
min_negative = 0
min_positive = 0
check_side = 0
for i in range(len(data)):
inputData[0] = data[i][0]
inputData[1] = data[i][1]
distance = distance_point_to_hyperplan(inputData, w, b)
check_side = w[0]*data[i][0]+w[1]*data[i][1]+b
if (data[i][2] == 1):
if (check_side < 0):
print("The boundary does not classified the data")
return 0
if ((distance < min_positive) or (min_positive==0)):
min_positive = distance
sup_vector_pos[0] = data[i][0]
sup_vector_pos[1] = data[i][1]
if (data[i][2] == -1):
if (check_side > 0):
print("The boundary does not classified the data")
return 0
if ((distance < min_negative) or (min_negative == 0)):
min_negative = distance
sup_vector_neg[0] = data[i][0]
sup_vector_neg[1] = data[i][1]
return min_negative+min_positive, sup_vector_neg, sup_vector_pos
def svm_test_brute(w,b,x):
classified = w[0]*x[0] + w[1]x[1] + b
if classified > 0:
return 1
if classified < 0:
return -1
if __name__ == '__main__':
# pt = np.array([0,-1])
# w = np.array([1,1])
# b = 1
# dis = distance_point_to_hyperplan(pt,w,b)
# print(dis)
data1 = helpers.generate_training_data_binary(1)
# for data 1 - > w = [1,0] # vertical line
# for data 2 - > w = [0,1] # horizontal line
# for data 3 - > w = [1,-1]
# for data 4 - > w = [1,1]
draw = helpers.plot_training_data_binary(data1)
| 33.091954 | 74 | 0.533866 |
2281d58cf1075a6624a122f4243fa40f7605c415 | 2,250 | py | Python | finder.py | Andrey9kin/s3-access-logs-parser | 0e1df73a0602c0e2191f0cc1fb3f3767bf996bf2 | [
"Apache-2.0"
] | 1 | 2021-01-17T14:20:16.000Z | 2021-01-17T14:20:16.000Z | finder.py | Andrey9kin/s3-access-logs-parser | 0e1df73a0602c0e2191f0cc1fb3f3767bf996bf2 | [
"Apache-2.0"
] | null | null | null | finder.py | Andrey9kin/s3-access-logs-parser | 0e1df73a0602c0e2191f0cc1fb3f3767bf996bf2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
import re
import sys
import argparse
import logging
from boto3 import resource, client
from botocore.exceptions import ClientError
import pandas
def get_list_of_s3_buckets():
result = dict()
s3 = resource('s3')
s3_client = client('s3')
logging.info('Listing all buckets...')
# pylint: disable=E1101
buckets = s3.buckets.all()
for bucket in buckets:
try:
logging.info('Checking tags for bucket {}'.format(bucket.name))
response = s3_client.get_bucket_tagging(Bucket=bucket.name)
result[bucket.name] = response['TagSet']
except ClientError:
logging.debug(bucket.name, "does not have tags, adding tags")
result[bucket.name] = list()
return result
def buckets_list_to_dataframe(buckets_list):
data = list()
columns = list()
for bucket_name in buckets_list.keys():
bucket = dict()
bucket['BucketName'] = bucket_name
for tag in buckets_list[bucket_name]:
bucket[tag['Key']] = tag['Value']
data.append(bucket)
columns = list(set().union(columns, bucket.keys()))
df = pandas.DataFrame(data, columns=columns)
# Sort columns by name
df = df.reindex(sorted(df.columns), axis=1)
return df
def parse_args(args):
parser = argparse.ArgumentParser(description='Discover buckets and tags that they have')
parser.add_argument('--debug', action='store_true', default=False,
help="enable debug printouts")
return parser.parse_args()
def main():
args = parse_args(sys.argv[1:])
logging_level = logging.INFO if not args.debug else logging.DEBUG
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging_level)
logging.info('Collecting S3 tags data...')
buckets = get_list_of_s3_buckets()
logging.info('Collected tags from {} buckets'.format(len(buckets)))
logging.info('Parsing tags data...')
df = buckets_list_to_dataframe(buckets)
report_filename = './s3-buckets.xls'
logging.info('Writing report to {}...'.format(report_filename))
df.to_excel(report_filename, index=False)
logging.info('Done')
if __name__ == '__main__':
main() | 28.481013 | 92 | 0.662667 |
3d5cd1247e0a59eb6f32bad6ce48c8aac4ffaa88 | 7,397 | py | Python | tests/search/test_indexes.py | mattrobenolt/warehouse | 3ae010f8bcac6f8fd948096bb8925353c2e00ff4 | [
"Apache-2.0"
] | null | null | null | tests/search/test_indexes.py | mattrobenolt/warehouse | 3ae010f8bcac6f8fd948096bb8925353c2e00ff4 | [
"Apache-2.0"
] | null | null | null | tests/search/test_indexes.py | mattrobenolt/warehouse | 3ae010f8bcac6f8fd948096bb8925353c2e00ff4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os
import pretend
import pytest
from elasticsearch import TransportError
from warehouse.search import indexes
from warehouse.search.indexes import Index, BaseMapping
class TestIndex:
def test_reindex(self, monkeypatch):
urandom = pretend.call_recorder(lambda s: b"0" * s)
monkeypatch.setattr(os, "urandom", urandom)
models = pretend.stub()
config = pretend.stub(hosts=[], get=lambda *a: {})
index = Index(models, config)
index.es = pretend.stub(
indices=pretend.stub(
create=pretend.call_recorder(lambda idx, body: None),
),
)
index.types = {
"fake": pretend.stub(
_type="fake",
get_mapping=pretend.call_recorder(lambda: {"foo": "bar"}),
index_all=pretend.call_recorder(lambda index: None),
),
}
index.update_alias = pretend.call_recorder(lambda *a, **kw: None)
index.reindex()
assert index.es.indices.create.calls == [
pretend.call(
"warehouse1e4a1b03",
{"mappings": {"fake": {"foo": "bar"}}},
),
]
assert index.update_alias.calls == [
pretend.call("warehouse", "warehouse1e4a1b03", keep_old=False),
]
assert index.types["fake"].index_all.calls == [
pretend.call(index="warehouse1e4a1b03"),
]
def test_reindex_no_alias(self, monkeypatch):
urandom = pretend.call_recorder(lambda s: b"0" * s)
monkeypatch.setattr(os, "urandom", urandom)
models = pretend.stub()
config = pretend.stub(hosts=[], get=lambda *a: {})
index = Index(models, config)
index.es = pretend.stub(
indices=pretend.stub(
create=pretend.call_recorder(lambda idx, body: None),
),
)
index.types = {
"fake": pretend.stub(
_type="fake",
get_mapping=pretend.call_recorder(lambda: {"foo": "bar"}),
index_all=pretend.call_recorder(lambda index: None),
),
}
index.update_alias = pretend.call_recorder(lambda *a, **kw: None)
index.reindex(alias=False)
assert index.es.indices.create.calls == [
pretend.call(
"warehouse1e4a1b03",
{"mappings": {"fake": {"foo": "bar"}}},
),
]
assert index.update_alias.calls == []
assert index.types["fake"].index_all.calls == [
pretend.call(index="warehouse1e4a1b03"),
]
def test_update_alias(self):
models = pretend.stub()
config = pretend.stub(hosts=[], get=lambda *a: {})
index = Index(models, config)
index.es = pretend.stub(
indices=pretend.stub(
get_alias=pretend.call_recorder(
lambda idx: {"warehouse1234567": "warehouse"},
),
update_aliases=pretend.call_recorder(lambda actions: None),
delete=pretend.call_recorder(lambda idx: None)
),
)
index.update_alias("warehouse", "warehouse7654321")
assert index.es.indices.get_alias.calls == [pretend.call("warehouse")]
assert index.es.indices.update_aliases.calls == [
pretend.call({"actions": [
{
"remove": {
"index": "warehouse1234567",
"alias": "warehouse",
},
},
{"add": {"index": "warehouse7654321", "alias": "warehouse"}}
]}),
]
assert index.es.indices.delete.calls == [
pretend.call("warehouse1234567"),
]
def test_update_alias_no_old_index(self):
models = pretend.stub()
config = pretend.stub(hosts=[], get=lambda *a: {})
def _get_alias(idx):
raise TransportError(404, "Fake 404")
index = Index(models, config)
index.es = pretend.stub(
indices=pretend.stub(
get_alias=pretend.call_recorder(_get_alias),
update_aliases=pretend.call_recorder(lambda actions: None),
delete=pretend.call_recorder(lambda idx: None)
),
)
index.update_alias("warehouse", "warehouse7654321")
assert index.es.indices.get_alias.calls == [pretend.call("warehouse")]
assert index.es.indices.update_aliases.calls == [
pretend.call({"actions": [
{"add": {"index": "warehouse7654321", "alias": "warehouse"}}
]}),
]
assert index.es.indices.delete.calls == []
def test_update_alias_exception(self):
models = pretend.stub()
config = pretend.stub(hosts=[], get=lambda *a: {})
def _get_alias(idx):
raise TransportError(500, "Fake 500")
index = Index(models, config)
index.es = pretend.stub(
indices=pretend.stub(
get_alias=pretend.call_recorder(_get_alias),
),
)
with pytest.raises(TransportError):
index.update_alias("warehouse", "warehouse7654321")
assert index.es.indices.get_alias.calls == [pretend.call("warehouse")]
class TestBaseMapping:
def test_get_mapping(self):
bmap = BaseMapping(index=pretend.stub())
with pytest.raises(NotImplementedError):
bmap.get_mapping()
def test_get_indexable(self):
bmap = BaseMapping(index=pretend.stub())
with pytest.raises(NotImplementedError):
bmap.get_indexable()
def test_extract_id(self):
bmap = BaseMapping(index=pretend.stub())
with pytest.raises(NotImplementedError):
bmap.extract_id(None)
def test_extract_document(self):
bmap = BaseMapping(index=pretend.stub())
with pytest.raises(NotImplementedError):
bmap.extract_document(None)
def test_search(self):
bmap = BaseMapping(index=pretend.stub())
with pytest.raises(NotImplementedError):
bmap.search(None)
def test_index_all(self, monkeypatch):
bulk_index = pretend.call_recorder(lambda es, docs: None)
monkeypatch.setattr(indexes, "bulk_index", bulk_index)
index = pretend.stub(
_index="warehouse",
es=pretend.stub(),
)
bmap = BaseMapping(index=index)
bmap.get_indexable = pretend.call_recorder(lambda: [])
bmap.index_all()
assert bulk_index.calls == [pretend.call(index.es, [])]
assert bmap.get_indexable.calls == [pretend.call()]
| 32.442982 | 78 | 0.584561 |
ddd0c15e226b0436048fee4469341e3fb653c71b | 21,879 | py | Python | training/training_loop.py | benckx/stylegan3 | cafcb6e9e4f84d19e8651c6c1f8606e3ac2f7d6a | [
"BSD-Source-Code"
] | 79 | 2021-11-06T22:38:35.000Z | 2022-03-17T18:41:13.000Z | training/training_loop.py | benckx/stylegan3 | cafcb6e9e4f84d19e8651c6c1f8606e3ac2f7d6a | [
"BSD-Source-Code"
] | null | null | null | training/training_loop.py | benckx/stylegan3 | cafcb6e9e4f84d19e8651c6c1f8606e3ac2f7d6a | [
"BSD-Source-Code"
] | 5 | 2022-01-21T14:30:30.000Z | 2022-02-15T07:14:38.000Z | # Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Main training loop."""
import os
import time
import copy
import json
import pickle
import psutil
import PIL.Image
import numpy as np
import torch
import dnnlib
from torch_utils import misc
from torch_utils import training_stats
from torch_utils.ops import conv2d_gradfix
from torch_utils.ops import grid_sample_gradfix
import legacy
from metrics import metric_main
#----------------------------------------------------------------------------
def setup_snapshot_image_grid(training_set, random_seed=0):
rnd = np.random.RandomState(random_seed)
gw = np.clip(7680 // training_set.image_shape[2], 7, 32)
gh = np.clip(4320 // training_set.image_shape[1], 4, 32)
# No labels => show random subset of training samples.
if not training_set.has_labels:
all_indices = list(range(len(training_set)))
rnd.shuffle(all_indices)
grid_indices = [all_indices[i % len(all_indices)] for i in range(gw * gh)]
else:
# Group training samples by label.
label_groups = dict() # label => [idx, ...]
for idx in range(len(training_set)):
label = tuple(training_set.get_details(idx).raw_label.flat[::-1])
if label not in label_groups:
label_groups[label] = []
label_groups[label].append(idx)
# Reorder.
label_order = sorted(label_groups.keys())
for label in label_order:
rnd.shuffle(label_groups[label])
# Organize into grid.
grid_indices = []
for y in range(gh):
label = label_order[y % len(label_order)]
indices = label_groups[label]
grid_indices += [indices[x % len(indices)] for x in range(gw)]
label_groups[label] = [indices[(i + gw) % len(indices)] for i in range(len(indices))]
# Load data.
images, labels = zip(*[training_set[i] for i in grid_indices])
return (gw, gh), np.stack(images), np.stack(labels)
#----------------------------------------------------------------------------
def save_image_grid(img, fname, drange, grid_size):
lo, hi = drange
img = np.asarray(img, dtype=np.float32)
img = (img - lo) * (255 / (hi - lo))
img = np.rint(img).clip(0, 255).astype(np.uint8)
gw, gh = grid_size
_N, C, H, W = img.shape
img = img.reshape([gh, gw, C, H, W])
img = img.transpose(0, 3, 1, 4, 2)
img = img.reshape([gh * H, gw * W, C])
assert C in [1, 3]
if C == 1:
PIL.Image.fromarray(img[:, :, 0], 'L').save(fname)
if C == 3:
PIL.Image.fromarray(img, 'RGB').save(fname)
#----------------------------------------------------------------------------
def training_loop(
run_dir = '.', # Output directory.
training_set_kwargs = {}, # Options for training set.
data_loader_kwargs = {}, # Options for torch.utils.data.DataLoader.
G_kwargs = {}, # Options for generator network.
D_kwargs = {}, # Options for discriminator network.
G_opt_kwargs = {}, # Options for generator optimizer.
D_opt_kwargs = {}, # Options for discriminator optimizer.
augment_kwargs = None, # Options for augmentation pipeline. None = disable.
loss_kwargs = {}, # Options for loss function.
metrics = [], # Metrics to evaluate during training.
random_seed = 0, # Global random seed.
num_gpus = 1, # Number of GPUs participating in the training.
rank = 0, # Rank of the current process in [0, num_gpus[.
batch_size = 4, # Total batch size for one training iteration. Can be larger than batch_gpu * num_gpus.
batch_gpu = 4, # Number of samples processed at a time by one GPU.
ema_kimg = 10, # Half-life of the exponential moving average (EMA) of generator weights.
ema_rampup = 0.05, # EMA ramp-up coefficient. None = no rampup.
G_reg_interval = None, # How often to perform regularization for G? None = disable lazy regularization.
D_reg_interval = 16, # How often to perform regularization for D? None = disable lazy regularization.
augment_p = 0, # Initial value of augmentation probability.
ada_target = None, # ADA target value. None = fixed p.
ada_interval = 4, # How often to perform ADA adjustment?
ada_kimg = 500, # ADA adjustment speed, measured in how many kimg it takes for p to increase/decrease by one unit.
total_kimg = 25000, # Total length of the training, measured in thousands of real images.
kimg_per_tick = 4, # Progress snapshot interval.
image_snapshot_ticks = 50, # How often to save image snapshots? None = disable.
network_snapshot_ticks = 50, # How often to save network snapshots? None = disable.
resume_pkl = None, # Network pickle to resume training from.
resume_kimg = 0, # First kimg to report when resuming training.
cudnn_benchmark = True, # Enable torch.backends.cudnn.benchmark?
abort_fn = None, # Callback function for determining whether to abort training. Must return consistent results across ranks.
progress_fn = None, # Callback function for updating training progress. Called for all ranks.
):
# Initialize.
start_time = time.time()
device = torch.device('cuda', rank)
np.random.seed(random_seed * num_gpus + rank)
torch.manual_seed(random_seed * num_gpus + rank)
torch.backends.cudnn.benchmark = cudnn_benchmark # Improves training speed.
torch.backends.cuda.matmul.allow_tf32 = False # Improves numerical accuracy.
torch.backends.cudnn.allow_tf32 = False # Improves numerical accuracy.
conv2d_gradfix.enabled = True # Improves training speed.
grid_sample_gradfix.enabled = True # Avoids errors with the augmentation pipe.
# Load training set.
if rank == 0:
print('Loading training set...')
training_set = dnnlib.util.construct_class_by_name(**training_set_kwargs) # subclass of training.dataset.Dataset
training_set_sampler = misc.InfiniteSampler(dataset=training_set, rank=rank, num_replicas=num_gpus, seed=random_seed)
training_set_iterator = iter(torch.utils.data.DataLoader(dataset=training_set, sampler=training_set_sampler, batch_size=batch_size//num_gpus, **data_loader_kwargs))
if rank == 0:
print()
print('Num images: ', len(training_set))
print('Image shape:', training_set.image_shape)
print('Label shape:', training_set.label_shape)
print()
# Construct networks.
if rank == 0:
print('Constructing networks...')
common_kwargs = dict(c_dim=training_set.label_dim, img_resolution=training_set.resolution, img_channels=training_set.num_channels)
G = dnnlib.util.construct_class_by_name(**G_kwargs, **common_kwargs).train().requires_grad_(False).to(device) # subclass of torch.nn.Module
D = dnnlib.util.construct_class_by_name(**D_kwargs, **common_kwargs).train().requires_grad_(False).to(device) # subclass of torch.nn.Module
G_ema = copy.deepcopy(G).eval()
# Resume from existing pickle.
if (resume_pkl is not None) and (rank == 0):
print(f'Resuming from "{resume_pkl}"')
with dnnlib.util.open_url(resume_pkl) as f:
resume_data = legacy.load_network_pkl(f)
for name, module in [('G', G), ('D', D), ('G_ema', G_ema)]:
misc.copy_params_and_buffers(resume_data[name], module, require_all=False)
# Print network summary tables.
if rank == 0:
z = torch.empty([batch_gpu, G.z_dim], device=device)
c = torch.empty([batch_gpu, G.c_dim], device=device)
img = misc.print_module_summary(G, [z, c])
misc.print_module_summary(D, [img, c])
# Setup augmentation.
if rank == 0:
print('Setting up augmentation...')
augment_pipe = None
ada_stats = None
if (augment_kwargs is not None) and (augment_p > 0 or ada_target is not None):
augment_pipe = dnnlib.util.construct_class_by_name(**augment_kwargs).train().requires_grad_(False).to(device) # subclass of torch.nn.Module
augment_pipe.p.copy_(torch.as_tensor(augment_p))
if ada_target is not None:
ada_stats = training_stats.Collector(regex='Loss/signs/real')
# Distribute across GPUs.
if rank == 0:
print(f'Distributing across {num_gpus} GPUs...')
for module in [G, D, G_ema, augment_pipe]:
if module is not None and num_gpus > 1:
for param in misc.params_and_buffers(module):
torch.distributed.broadcast(param, src=0)
# Setup training phases.
if rank == 0:
print('Setting up training phases...')
loss = dnnlib.util.construct_class_by_name(device=device, G=G, D=D, augment_pipe=augment_pipe, **loss_kwargs) # subclass of training.loss.Loss
phases = []
for name, module, opt_kwargs, reg_interval in [('G', G, G_opt_kwargs, G_reg_interval), ('D', D, D_opt_kwargs, D_reg_interval)]:
if reg_interval is None:
opt = dnnlib.util.construct_class_by_name(params=module.parameters(), **opt_kwargs) # subclass of torch.optim.Optimizer
phases += [dnnlib.EasyDict(name=name+'both', module=module, opt=opt, interval=1)]
else: # Lazy regularization.
mb_ratio = reg_interval / (reg_interval + 1)
opt_kwargs = dnnlib.EasyDict(opt_kwargs)
opt_kwargs.lr = opt_kwargs.lr * mb_ratio
opt_kwargs.betas = [beta ** mb_ratio for beta in opt_kwargs.betas]
opt = dnnlib.util.construct_class_by_name(module.parameters(), **opt_kwargs) # subclass of torch.optim.Optimizer
phases += [dnnlib.EasyDict(name=name+'main', module=module, opt=opt, interval=1)]
phases += [dnnlib.EasyDict(name=name+'reg', module=module, opt=opt, interval=reg_interval)]
for phase in phases:
phase.start_event = None
phase.end_event = None
if rank == 0:
phase.start_event = torch.cuda.Event(enable_timing=True)
phase.end_event = torch.cuda.Event(enable_timing=True)
# Export sample images.
grid_size = None
grid_z = None
grid_c = None
if rank == 0:
print('Exporting sample images...')
grid_size, images, labels = setup_snapshot_image_grid(training_set=training_set)
save_image_grid(images, os.path.join(run_dir, 'reals.png'), drange=[0,255], grid_size=grid_size)
grid_z = torch.randn([labels.shape[0], G.z_dim], device=device).split(batch_gpu)
grid_c = torch.from_numpy(labels).to(device).split(batch_gpu)
images = torch.cat([G_ema(z=z, c=c, noise_mode='const').cpu() for z, c in zip(grid_z, grid_c)]).numpy()
save_image_grid(images, os.path.join(run_dir, 'fakes_init.png'), drange=[-1,1], grid_size=grid_size)
# Initialize logs.
if rank == 0:
print('Initializing logs...')
stats_collector = training_stats.Collector(regex='.*')
stats_metrics = dict()
stats_jsonl = None
stats_tfevents = None
if rank == 0:
stats_jsonl = open(os.path.join(run_dir, 'stats.jsonl'), 'wt')
try:
import torch.utils.tensorboard as tensorboard
stats_tfevents = tensorboard.SummaryWriter(run_dir)
except ImportError as err:
print('Skipping tfevents export:', err)
# Train.
if rank == 0:
print(f'Training for {total_kimg} kimg...')
print()
cur_nimg = resume_kimg * 1000
cur_tick = 0
tick_start_nimg = cur_nimg
tick_start_time = time.time()
maintenance_time = tick_start_time - start_time
batch_idx = 0
if progress_fn is not None:
progress_fn(0, total_kimg)
while True:
# Fetch training data.
with torch.autograd.profiler.record_function('data_fetch'):
phase_real_img, phase_real_c = next(training_set_iterator)
phase_real_img = (phase_real_img.to(device).to(torch.float32) / 127.5 - 1).split(batch_gpu)
phase_real_c = phase_real_c.to(device).split(batch_gpu)
all_gen_z = torch.randn([len(phases) * batch_size, G.z_dim], device=device)
all_gen_z = [phase_gen_z.split(batch_gpu) for phase_gen_z in all_gen_z.split(batch_size)]
all_gen_c = [training_set.get_label(np.random.randint(len(training_set))) for _ in range(len(phases) * batch_size)]
all_gen_c = torch.from_numpy(np.stack(all_gen_c)).pin_memory().to(device)
all_gen_c = [phase_gen_c.split(batch_gpu) for phase_gen_c in all_gen_c.split(batch_size)]
# Execute training phases.
for phase, phase_gen_z, phase_gen_c in zip(phases, all_gen_z, all_gen_c):
if batch_idx % phase.interval != 0:
continue
if phase.start_event is not None:
phase.start_event.record(torch.cuda.current_stream(device))
# Accumulate gradients.
phase.opt.zero_grad(set_to_none=True)
phase.module.requires_grad_(True)
for real_img, real_c, gen_z, gen_c in zip(phase_real_img, phase_real_c, phase_gen_z, phase_gen_c):
loss.accumulate_gradients(phase=phase.name, real_img=real_img, real_c=real_c, gen_z=gen_z, gen_c=gen_c, gain=phase.interval, cur_nimg=cur_nimg)
phase.module.requires_grad_(False)
# Update weights.
with torch.autograd.profiler.record_function(phase.name + '_opt'):
params = [param for param in phase.module.parameters() if param.grad is not None]
if len(params) > 0:
flat = torch.cat([param.grad.flatten() for param in params])
if num_gpus > 1:
torch.distributed.all_reduce(flat)
flat /= num_gpus
misc.nan_to_num(flat, nan=0, posinf=1e5, neginf=-1e5, out=flat)
grads = flat.split([param.numel() for param in params])
for param, grad in zip(params, grads):
param.grad = grad.reshape(param.shape)
phase.opt.step()
# Phase done.
if phase.end_event is not None:
phase.end_event.record(torch.cuda.current_stream(device))
# Update G_ema.
with torch.autograd.profiler.record_function('Gema'):
ema_nimg = ema_kimg * 1000
if ema_rampup is not None:
ema_nimg = min(ema_nimg, cur_nimg * ema_rampup)
ema_beta = 0.5 ** (batch_size / max(ema_nimg, 1e-8))
for p_ema, p in zip(G_ema.parameters(), G.parameters()):
p_ema.copy_(p.lerp(p_ema, ema_beta))
for b_ema, b in zip(G_ema.buffers(), G.buffers()):
b_ema.copy_(b)
# Update state.
cur_nimg += batch_size
batch_idx += 1
# Execute ADA heuristic.
if (ada_stats is not None) and (batch_idx % ada_interval == 0):
ada_stats.update()
adjust = np.sign(ada_stats['Loss/signs/real'] - ada_target) * (batch_size * ada_interval) / (ada_kimg * 1000)
augment_pipe.p.copy_((augment_pipe.p + adjust).max(misc.constant(0, device=device)))
# Perform maintenance tasks once per tick.
done = (cur_nimg >= total_kimg * 1000)
if (not done) and (cur_tick != 0) and (cur_nimg < tick_start_nimg + kimg_per_tick * 1000):
continue
# Print status line, accumulating the same information in training_stats.
tick_end_time = time.time()
fields = []
fields += [f"tick {training_stats.report0('Progress/tick', cur_tick):<5d}"]
fields += [f"kimg {training_stats.report0('Progress/kimg', cur_nimg / 1e3):<8.1f}"]
fields += [f"time {dnnlib.util.format_time(training_stats.report0('Timing/total_sec', tick_end_time - start_time)):<12s}"]
fields += [f"sec/tick {training_stats.report0('Timing/sec_per_tick', tick_end_time - tick_start_time):<7.1f}"]
fields += [f"sec/kimg {training_stats.report0('Timing/sec_per_kimg', (tick_end_time - tick_start_time) / (cur_nimg - tick_start_nimg) * 1e3):<7.2f}"]
fields += [f"maintenance {training_stats.report0('Timing/maintenance_sec', maintenance_time):<6.1f}"]
fields += [f"cpumem {training_stats.report0('Resources/cpu_mem_gb', psutil.Process(os.getpid()).memory_info().rss / 2**30):<6.2f}"]
fields += [f"gpumem {training_stats.report0('Resources/peak_gpu_mem_gb', torch.cuda.max_memory_allocated(device) / 2**30):<6.2f}"]
fields += [f"reserved {training_stats.report0('Resources/peak_gpu_mem_reserved_gb', torch.cuda.max_memory_reserved(device) / 2**30):<6.2f}"]
torch.cuda.reset_peak_memory_stats()
fields += [f"augment {training_stats.report0('Progress/augment', float(augment_pipe.p.cpu()) if augment_pipe is not None else 0):.3f}"]
training_stats.report0('Timing/total_hours', (tick_end_time - start_time) / (60 * 60))
training_stats.report0('Timing/total_days', (tick_end_time - start_time) / (24 * 60 * 60))
if rank == 0:
print(' '.join(fields))
# Check for abort.
if (not done) and (abort_fn is not None) and abort_fn():
done = True
if rank == 0:
print()
print('Aborting...')
# Save image snapshot.
if (rank == 0) and (image_snapshot_ticks is not None) and (done or cur_tick % image_snapshot_ticks == 0):
images = torch.cat([G_ema(z=z, c=c, noise_mode='const').cpu() for z, c in zip(grid_z, grid_c)]).numpy()
save_image_grid(images, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}.png'), drange=[-1,1], grid_size=grid_size)
# Save network snapshot.
snapshot_pkl = None
snapshot_data = None
if (network_snapshot_ticks is not None) and (done or cur_tick % network_snapshot_ticks == 0):
snapshot_data = dict(G=G, D=D, G_ema=G_ema, augment_pipe=augment_pipe, training_set_kwargs=dict(training_set_kwargs))
for key, value in snapshot_data.items():
if isinstance(value, torch.nn.Module):
value = copy.deepcopy(value).eval().requires_grad_(False)
if num_gpus > 1:
misc.check_ddp_consistency(value, ignore_regex=r'.*\.[^.]+_(avg|ema)')
for param in misc.params_and_buffers(value):
torch.distributed.broadcast(param, src=0)
snapshot_data[key] = value.cpu()
del value # conserve memory
snapshot_pkl = os.path.join(run_dir, f'network-snapshot-{cur_nimg//1000:06d}.pkl')
if rank == 0:
with open(snapshot_pkl, 'wb') as f:
pickle.dump(snapshot_data, f)
# Evaluate metrics.
if (snapshot_data is not None) and (len(metrics) > 0):
if rank == 0:
print('Evaluating metrics...')
for metric in metrics:
result_dict = metric_main.calc_metric(metric=metric, G=snapshot_data['G_ema'],
dataset_kwargs=training_set_kwargs, num_gpus=num_gpus, rank=rank, device=device)
if rank == 0:
metric_main.report_metric(result_dict, run_dir=run_dir, snapshot_pkl=snapshot_pkl)
stats_metrics.update(result_dict.results)
del snapshot_data # conserve memory
# Collect statistics.
for phase in phases:
value = []
if (phase.start_event is not None) and (phase.end_event is not None):
phase.end_event.synchronize()
value = phase.start_event.elapsed_time(phase.end_event)
training_stats.report0('Timing/' + phase.name, value)
stats_collector.update()
stats_dict = stats_collector.as_dict()
# Update logs.
timestamp = time.time()
if stats_jsonl is not None:
fields = dict(stats_dict, timestamp=timestamp)
stats_jsonl.write(json.dumps(fields) + '\n')
stats_jsonl.flush()
if stats_tfevents is not None:
global_step = int(cur_nimg / 1e3)
walltime = timestamp - start_time
for name, value in stats_dict.items():
stats_tfevents.add_scalar(name, value.mean, global_step=global_step, walltime=walltime)
for name, value in stats_metrics.items():
stats_tfevents.add_scalar(f'Metrics/{name}', value, global_step=global_step, walltime=walltime)
stats_tfevents.flush()
if progress_fn is not None:
progress_fn(cur_nimg // 1000, total_kimg)
# Update state.
cur_tick += 1
tick_start_nimg = cur_nimg
tick_start_time = time.time()
maintenance_time = tick_start_time - tick_end_time
if done:
break
# Done.
if rank == 0:
print()
print('Exiting...')
#----------------------------------------------------------------------------
| 51.119159 | 168 | 0.619361 |
f3b552d5d08c1327ceecb0f45ad67b7beb5bc976 | 1,686 | py | Python | Django/Video_Project/Day04/DjangoView/App/views.py | pyforspider/LearningLog | ac5988d7fbb0d07d6e7485f9050250af5bcba089 | [
"MIT"
] | null | null | null | Django/Video_Project/Day04/DjangoView/App/views.py | pyforspider/LearningLog | ac5988d7fbb0d07d6e7485f9050250af5bcba089 | [
"MIT"
] | 18 | 2020-02-12T01:18:12.000Z | 2022-03-12T00:42:15.000Z | Django/Video_Project/Day04/DjangoView/App/views.py | pyforspider/LearningLog | ac5988d7fbb0d07d6e7485f9050250af5bcba089 | [
"MIT"
] | null | null | null | from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.shortcuts import render, redirect
from django.urls import reverse
def hello(request):
response = HttpResponse()
response.content = "德玛西亚"
# response.status_code = 404
response.write('该刷马桶了')
response.flush()
response.content_type = 'image/apng'
return response
# return HttpResponse('该刷马桶了')
def get_ticket(request):
# redirect(reverse('url get_ticket'))
url = reverse('hello')
# return HttpResponseRedirect('/app/hello/')
# return HttpResponseRedirect(url)
return redirect(url)
def get_info(request):
info = {
'status': 200,
'msg': 'ok',
}
return JsonResponse(info)
def set_cookie(request):
response = HttpResponse('设置Cookie')
response.set_cookie('hobby', 'gaming', max_age=60)
return response
def get_cookie(request):
hobby = request.COOKIES.get('hobby')
return HttpResponse(hobby)
def login(request):
if request.method == 'GET':
return render(request, 'login.html')
elif request.method == 'POST':
response = HttpResponseRedirect(reverse('mine'))
username = request.POST.get('user')
# response.set_cookie('username', username, max_age=6) # 注意这里的response一定要返回,否则cookie设置失败,因为没有返回
response.set_signed_cookie('content', username, 'rock')
return response
# return redirect(reverse('mine'))
def mine(request):
try:
username = request.get_signed_cookie('content', salt='rock')
if username:
return render(request, 'mine.html', context={'username':username})
except:
print('获取失败')
return redirect(reverse('login'))
def logout(request):
response = redirect(reverse('login'))
response.delete_cookie('content')
return response
| 20.814815 | 99 | 0.731317 |
ff823ccb104954c4e6e879783fb81fe9c801e736 | 3,559 | py | Python | api/install.py | vontell/dynabench | 5d88346cbbfe5868395cb303eee591487cafbf0f | [
"MIT"
] | null | null | null | api/install.py | vontell/dynabench | 5d88346cbbfe5868395cb303eee591487cafbf0f | [
"MIT"
] | null | null | null | api/install.py | vontell/dynabench | 5d88346cbbfe5868395cb303eee591487cafbf0f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import importlib
import json
import os
import sqlalchemy as db
from werkzeug.security import generate_password_hash
from models.base import Base
def get_cls_name_helper(ss):
ret, ii = "", 0
while ii < len(ss):
if ii == 0:
ret += ss[ii].upper()
elif ss[ii] == "_":
ret += ss[ii + 1].upper()
ii += 1
else:
ret += ss[ii]
ii += 1
return ret
if __name__ == "__main__":
##
# Create a config.py if it doesn't exist
##
if not os.path.exists("common/config.py"):
print("Config does not exist yet, let's create it.")
print("NOTE: Use absolute paths where applicable!")
example_config_str = open("common/config.py.example").read()
config = {}
exec(example_config_str)
required_fields = [
"db_name",
"db_user",
"db_password",
"ssl_cert_file_path",
"ssl_org_pem_file_path",
]
for field in required_fields:
tmp = input(f"Please enter your {field}: ")
config[field] = tmp
with open("common/config.py", "w") as fw:
fw.write("config = " + json.dumps(config, indent=4, sort_keys=True))
print("Wrote config to common/config.py - feel free to edit.")
else:
print("Config already exists.")
from common.config import config
##
# Mark all existing migrations done
##
from common.migrator import first_time_migrations
first_time_migrations()
##
# Create all tables
##
engine = db.create_engine(
"mysql+pymysql://{}:{}@localhost:3306/{}".format(
config["db_user"], config["db_password"], config["db_name"]
),
# in case you want to debug:
# echo="debug",
# echo_pool=True,
)
connection = engine.connect()
Base.metadata.bind = engine
mods = {}
for m in os.listdir("models/"):
if m.endswith(".py") and not m.startswith("__"):
name = m[:-3]
mod = importlib.import_module("models." + name)
cls = get_cls_name_helper(name)
constructor = getattr(mod, cls)
mods[cls] = constructor()
Base.metadata.create_all(engine)
##
# Create one admin user and one task with one round
##
from models.base import DBSession as dbs
from models.user import User
from models.task import Task
from models.task_user_permission import TaskUserPermission
from models.round import Round
import getpass
dbs.flush()
u = User(
admin=True,
username=input("Enter admin username: "),
email=input("Enter admin email: "),
password=generate_password_hash(
getpass.getpass(prompt="Enter admin password (remains hidden): ")
),
)
dbs.add(u)
dbs.flush()
t = Task(
name="Test",
task_code="test",
desc="Your test task",
annotation_config_json=json.dumps({}),
cur_round=1,
)
dbs.add(t)
dbs.flush()
tup = TaskUserPermission(user=u, task=t, type="owner")
dbs.add(tup)
dbs.flush()
r = Round(task=t, rid=1, desc="Your test round", secret="TBD", url="https://TBD")
dbs.add(r)
dbs.flush()
t.cur_round = r.rid
dbs.commit()
dbs.close()
| 26.962121 | 85 | 0.5805 |
b24d1243f7dc18449662e837876ccd9fef1a2ec6 | 160 | py | Python | chapter 8/sampleCode14.py | DTAIEB/Thoughtful-Data-Science | 8b80e8f3e33b6fdc6672ecee1f27e0b983b28241 | [
"Apache-2.0"
] | 15 | 2018-06-01T19:18:32.000Z | 2021-11-28T03:31:35.000Z | chapter 8/sampleCode14.py | chshychen/Thoughtful-Data-Science | 8b80e8f3e33b6fdc6672ecee1f27e0b983b28241 | [
"Apache-2.0"
] | 1 | 2018-12-17T02:01:42.000Z | 2018-12-17T02:01:42.000Z | chapter 8/sampleCode14.py | chshychen/Thoughtful-Data-Science | 8b80e8f3e33b6fdc6672ecee1f27e0b983b28241 | [
"Apache-2.0"
] | 10 | 2018-09-16T06:06:57.000Z | 2021-06-29T05:49:18.000Z | @route(show_analytic="*")
def show_analytic_screen(self, show_analytic):
return """
<div pd_app="{{show_analytic}}" pd_render_onload></div>
"""
| 26.666667 | 55 | 0.65625 |
54a0062ca1d0730f110ad4b6687f76ac1eaf6d43 | 31,519 | py | Python | run_classifier.py | xiaonan-tx/EPI-xlnet | 9d1606990d930a1a1b5a78777d06f5abdf9e55bc | [
"Apache-2.0"
] | null | null | null | run_classifier.py | xiaonan-tx/EPI-xlnet | 9d1606990d930a1a1b5a78777d06f5abdf9e55bc | [
"Apache-2.0"
] | null | null | null | run_classifier.py | xiaonan-tx/EPI-xlnet | 9d1606990d930a1a1b5a78777d06f5abdf9e55bc | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os.path import join
from absl import flags
import os
import sys
import csv
import collections
import numpy as np
import time
import math
import json
import random
from copy import copy
from collections import defaultdict as dd
import absl.logging as _logging # pylint: disable=unused-import
import tensorflow as tf
import sentencepiece as spm
from data_utils import SEP_ID, VOCAB_SIZE, CLS_ID
import model_utils
import function_builder
from classifier_utils import PaddingInputExample
from classifier_utils import convert_single_example
from prepro_utils import preprocess_text, encode_ids
# Model
flags.DEFINE_string("model_config_path", default=None,
help="Model config path.")
flags.DEFINE_float("dropout", default=0.1,
help="Dropout rate.")
flags.DEFINE_float("dropatt", default=0.1,
help="Attention dropout rate.")
flags.DEFINE_integer("clamp_len", default=-1,
help="Clamp length")
flags.DEFINE_string("summary_type", default="last",
help="Method used to summarize a sequence into a compact vector.")
flags.DEFINE_bool("use_summ_proj", default=True,
help="Whether to use projection for summarizing sequences.")
flags.DEFINE_bool("use_bfloat16", False,
help="Whether to use bfloat16.")
# Parameter initialization
flags.DEFINE_enum("init", default="normal",
enum_values=["normal", "uniform"],
help="Initialization method.")
flags.DEFINE_float("init_std", default=0.02,
help="Initialization std when init is normal.")
flags.DEFINE_float("init_range", default=0.1,
help="Initialization std when init is uniform.")
# I/O paths
flags.DEFINE_bool("overwrite_data", default=False,
help="If False, will use cached data if available.")
flags.DEFINE_string("init_checkpoint", default=None,
help="checkpoint path for initializing the model. "
"Could be a pretrained model or a finetuned model.")
flags.DEFINE_string("output_dir", default="",
help="Output dir for TF records.")
flags.DEFINE_string("spiece_model_file", default="",
help="Sentence Piece model path.")
flags.DEFINE_string("model_dir", default="",
help="Directory for saving the finetuned model.")
flags.DEFINE_string("data_dir", default="",
help="Directory for input data.")
# TPUs and machines
flags.DEFINE_bool("use_tpu", default=False, help="whether to use TPU.")
flags.DEFINE_integer("num_hosts", default=1, help="How many TPU hosts.")
flags.DEFINE_integer("num_core_per_host", default=8,
help="8 for TPU v2 and v3-8, 16 for larger TPU v3 pod. In the context "
"of GPU training, it refers to the number of GPUs used.")
flags.DEFINE_string("tpu_job_name", default=None, help="TPU worker job name.")
flags.DEFINE_string("tpu", default=None, help="TPU name.")
flags.DEFINE_string("tpu_zone", default=None, help="TPU zone.")
flags.DEFINE_string("gcp_project", default=None, help="gcp project.")
flags.DEFINE_string("master", default=None, help="master")
flags.DEFINE_integer("iterations", default=1000,
help="number of iterations per TPU training loop.")
# training
flags.DEFINE_bool("do_train", default=False, help="whether to do training")
flags.DEFINE_integer("train_steps", default=1000,
help="Number of training steps")
flags.DEFINE_integer("warmup_steps", default=0, help="number of warmup steps")
flags.DEFINE_float("learning_rate", default=1e-5, help="initial learning rate")
flags.DEFINE_float("lr_layer_decay_rate", 1.0,
"Top layer: lr[L] = FLAGS.learning_rate."
"Low layer: lr[l-1] = lr[l] * lr_layer_decay_rate.")
flags.DEFINE_float("min_lr_ratio", default=0.0,
help="min lr ratio for cos decay.")
flags.DEFINE_float("clip", default=1.0, help="Gradient clipping")
flags.DEFINE_integer("max_save", default=0,
help="Max number of checkpoints to save. Use 0 to save all.")
flags.DEFINE_integer("save_steps", default=None,
help="Save the model for every save_steps. "
"If None, not to save any model.")
flags.DEFINE_integer("train_batch_size", default=8,
help="Batch size for training")
flags.DEFINE_float("weight_decay", default=0.00, help="Weight decay rate")
flags.DEFINE_float("adam_epsilon", default=1e-8, help="Adam epsilon")
flags.DEFINE_string("decay_method", default="poly", help="poly or cos")
# evaluation
flags.DEFINE_bool("do_eval", default=False, help="whether to do eval")
flags.DEFINE_bool("do_predict", default=False, help="whether to do prediction")
flags.DEFINE_float("predict_threshold", default=0,
help="Threshold for binary prediction.")
flags.DEFINE_string("eval_split", default="dev", help="could be dev or test")
flags.DEFINE_integer("eval_batch_size", default=128,
help="batch size for evaluation")
flags.DEFINE_integer("predict_batch_size", default=128,
help="batch size for prediction.")
flags.DEFINE_string("predict_dir", default=None,
help="Dir for saving prediction files.")
flags.DEFINE_bool("eval_all_ckpt", default=False,
help="Eval all ckpts. If False, only evaluate the last one.")
flags.DEFINE_string("predict_ckpt", default=None,
help="Ckpt path for do_predict. If None, use the last one.")
# task specific
flags.DEFINE_string("task_name", default=None, help="Task name")
flags.DEFINE_integer("max_seq_length", default=128, help="Max sequence length")
flags.DEFINE_integer("shuffle_buffer", default=2048,
help="Buffer size used for shuffle.")
flags.DEFINE_integer("num_passes", default=1,
help="Num passes for processing training data. "
"This is use to batch data without loss for TPUs.")
flags.DEFINE_bool("uncased", default=False,
help="Use uncased.")
flags.DEFINE_string("cls_scope", default=None,
help="Classifier layer scope.")
flags.DEFINE_bool("is_regression", default=False,
help="Whether it's a regression task.")
FLAGS = flags.FLAGS
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, data_dir):
"""Gets a collection of `InputExample`s for prediction."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
if len(line) == 0: continue
lines.append(line)
return lines
class GLUEProcessor(DataProcessor):
def __init__(self):
self.train_file = "train.tsv"
self.dev_file = "dev.tsv"
self.test_file = "test.tsv"
self.label_column = None
self.text_a_column = None
self.text_b_column = None
self.contains_header = True
self.test_text_a_column = None
self.test_text_b_column = None
self.test_contains_header = True
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, self.train_file)), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, self.dev_file)), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
if self.test_text_a_column is None:
self.test_text_a_column = self.text_a_column
if self.test_text_b_column is None:
self.test_text_b_column = self.text_b_column
return self._create_examples(
self._read_tsv(os.path.join(data_dir, self.test_file)), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0 and self.contains_header and set_type != "test":
continue
if i == 0 and self.test_contains_header and set_type == "test":
continue
guid = "%s-%s" % (set_type, i)
a_column = (self.text_a_column if set_type != "test" else
self.test_text_a_column)
b_column = (self.text_b_column if set_type != "test" else
self.test_text_b_column)
# there are some incomplete lines in QNLI
if len(line) <= a_column:
tf.logging.warning('Incomplete line, ignored.')
continue
text_a = line[a_column]
if b_column is not None:
if len(line) <= b_column:
tf.logging.warning('Incomplete line, ignored.')
continue
text_b = line[b_column]
else:
text_b = None
if set_type == "test":
label = self.get_labels()[0]
else:
if len(line) <= self.label_column:
tf.logging.warning('Incomplete line, ignored.')
continue
label = line[self.label_column]
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class Yelp5Processor(DataProcessor):
def get_train_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "train.csv"))
def get_dev_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "test.csv"))
def get_labels(self):
"""See base class."""
return ["1", "2", "3", "4", "5"]
def _create_examples(self, input_file):
"""Creates examples for the training and dev sets."""
examples = []
with tf.gfile.Open(input_file) as f:
reader = csv.reader(f)
for i, line in enumerate(reader):
label = line[0]
text_a = line[1].replace('""', '"').replace('\\"', '"')
examples.append(
InputExample(guid=str(i), text_a=text_a, text_b=None, label=label))
return examples
class ImdbProcessor(DataProcessor):
def get_labels(self):
return ["neg", "pos"]
def get_train_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "train"))
def get_dev_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "test"))
def _create_examples(self, data_dir):
examples = []
for label in ["neg", "pos"]:
cur_dir = os.path.join(data_dir, label)
for filename in tf.gfile.ListDirectory(cur_dir):
if not filename.endswith("txt"): continue
path = os.path.join(cur_dir, filename)
with tf.gfile.Open(path) as f:
text = f.read().strip().replace("<br />", " ")
examples.append(InputExample(
guid="unused_id", text_a=text, text_b=None, label=label))
return examples
class MnliMatchedProcessor(GLUEProcessor):
def __init__(self):
super(MnliMatchedProcessor, self).__init__()
self.dev_file = "dev_matched.tsv"
self.test_file = "test_matched.tsv"
self.label_column = -1
self.text_a_column = 8
self.text_b_column = 9
def get_labels(self):
return ["contradiction", "entailment", "neutral"]
class MnliMismatchedProcessor(MnliMatchedProcessor):
def __init__(self):
super(MnliMismatchedProcessor, self).__init__()
self.dev_file = "dev_mismatched.tsv"
self.test_file = "test_mismatched.tsv"
class StsbProcessor(GLUEProcessor):
def __init__(self):
super(StsbProcessor, self).__init__()
self.label_column = 9
self.text_a_column = 7
self.text_b_column = 8
def get_labels(self):
return [0.0]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0 and self.contains_header and set_type != "test":
continue
if i == 0 and self.test_contains_header and set_type == "test":
continue
guid = "%s-%s" % (set_type, i)
a_column = (self.text_a_column if set_type != "test" else
self.test_text_a_column)
b_column = (self.text_b_column if set_type != "test" else
self.test_text_b_column)
# there are some incomplete lines in QNLI
if len(line) <= a_column:
tf.logging.warning('Incomplete line, ignored.')
continue
text_a = line[a_column]
if b_column is not None:
if len(line) <= b_column:
tf.logging.warning('Incomplete line, ignored.')
continue
text_b = line[b_column]
else:
text_b = None
if set_type == "test":
label = self.get_labels()[0]
else:
if len(line) <= self.label_column:
tf.logging.warning('Incomplete line, ignored.')
continue
label = float(line[self.label_column])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class EPIProcessor(GLUEProcessor):
def __init__(self):
super(StsbProcessor, self).__init__()
self.label_column = 1
self.text_a_column = 2
self.text_b_column = 3
def get_labels(self):
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0 and self.contains_header and set_type != "test":
continue
if i == 0 and self.test_contains_header and set_type == "test":
continue
guid = "%s-%s" % (set_type, i)
a_column = (self.text_a_column if set_type != "test" else
self.test_text_a_column)
b_column = (self.text_b_column if set_type != "test" else
self.test_text_b_column)
# there are some incomplete lines in QNLI
if len(line) <= a_column:
tf.logging.warning('Incomplete line, ignored.')
continue
text_a = line[a_column]
if b_column is not None:
if len(line) <= b_column:
tf.logging.warning('Incomplete line, ignored.')
continue
text_b = line[b_column]
else:
text_b = None
if set_type == "test":
label = self.get_labels()[0]
else:
if len(line) <= self.label_column:
tf.logging.warning('Incomplete line, ignored.')
continue
label = float(line[self.label_column])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def file_based_convert_examples_to_features(
examples, label_list, max_seq_length, tokenize_fn, output_file,
num_passes=1):
"""Convert a set of `InputExample`s to a TFRecord file."""
# do not create duplicated records
if tf.gfile.Exists(output_file) and not FLAGS.overwrite_data:
tf.logging.info("Do not overwrite tfrecord {} exists.".format(output_file))
return
tf.logging.info("Create new tfrecord {}.".format(output_file))
writer = tf.python_io.TFRecordWriter(output_file)
if num_passes > 1:
examples *= num_passes
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
tf.logging.info("Writing example {} of {}".format(ex_index,
len(examples)))
feature = convert_single_example(ex_index, example, label_list,
max_seq_length, tokenize_fn)
def create_int_feature(values):
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
return f
def create_float_feature(values):
f = tf.train.Feature(float_list=tf.train.FloatList(value=list(values)))
return f
features = collections.OrderedDict()
features["input_ids"] = create_int_feature(feature.input_ids)
features["input_mask"] = create_float_feature(feature.input_mask)
features["segment_ids"] = create_int_feature(feature.segment_ids)
if label_list is not None:
features["label_ids"] = create_int_feature([feature.label_id])
else:
features["label_ids"] = create_float_feature([float(feature.label_id)])
features["is_real_example"] = create_int_feature(
[int(feature.is_real_example)])
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
writer.write(tf_example.SerializeToString())
writer.close()
def file_based_input_fn_builder(input_file, seq_length, is_training,
drop_remainder):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
name_to_features = {
"input_ids": tf.FixedLenFeature([seq_length], tf.int64),
"input_mask": tf.FixedLenFeature([seq_length], tf.float32),
"segment_ids": tf.FixedLenFeature([seq_length], tf.int64),
"label_ids": tf.FixedLenFeature([], tf.int64),
"is_real_example": tf.FixedLenFeature([], tf.int64),
}
if FLAGS.is_regression:
name_to_features["label_ids"] = tf.FixedLenFeature([], tf.float32)
tf.logging.info("Input tfrecord file {}".format(input_file))
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.cast(t, tf.int32)
example[name] = t
return example
def input_fn(params, input_context=None):
"""The actual input function."""
if FLAGS.use_tpu:
batch_size = params["batch_size"]
elif is_training:
batch_size = FLAGS.train_batch_size
elif FLAGS.do_eval:
batch_size = FLAGS.eval_batch_size
else:
batch_size = FLAGS.predict_batch_size
d = tf.data.TFRecordDataset(input_file)
# Shard the dataset to difference devices
if input_context is not None:
tf.logging.info("Input pipeline id %d out of %d",
input_context.input_pipeline_id, input_context.num_replicas_in_sync)
d = d.shard(input_context.num_input_pipelines,
input_context.input_pipeline_id)
# For training, we want a lot of parallel reading and shuffling.
# For eval, we want no shuffling and parallel reading doesn't matter.
if is_training:
d = d.shuffle(buffer_size=FLAGS.shuffle_buffer)
d = d.repeat()
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
drop_remainder=drop_remainder))
return d
return input_fn
def get_model_fn(n_class):
def model_fn(features, labels, mode, params):
#### Training or Evaluation
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
#### Get loss from inputs
if FLAGS.is_regression:
(total_loss, per_example_loss, logits
) = function_builder.get_regression_loss(FLAGS, features, is_training)
else:
(total_loss, per_example_loss, logits
) = function_builder.get_classification_loss(
FLAGS, features, n_class, is_training)
#### Check model parameters
num_params = sum([np.prod(v.shape) for v in tf.trainable_variables()])
tf.logging.info('#params: {}'.format(num_params))
#### load pretrained models
scaffold_fn = model_utils.init_from_checkpoint(FLAGS)
#### Evaluation mode
if mode == tf.estimator.ModeKeys.EVAL:
assert FLAGS.num_hosts == 1
def metric_fn(per_example_loss, label_ids, logits, is_real_example):
predictions = tf.argmax(logits, axis=-1, output_type=tf.int32)
eval_input_dict = {
'labels': label_ids,
'predictions': predictions,
'weights': is_real_example
}
accuracy = tf.metrics.accuracy(**eval_input_dict)
loss = tf.metrics.mean(values=per_example_loss, weights=is_real_example)
return {
'eval_accuracy': accuracy,
'eval_loss': loss}
def regression_metric_fn(
per_example_loss, label_ids, logits, is_real_example):
loss = tf.metrics.mean(values=per_example_loss, weights=is_real_example)
pearsonr = tf.contrib.metrics.streaming_pearson_correlation(
logits, label_ids, weights=is_real_example)
return {'eval_loss': loss, 'eval_pearsonr': pearsonr}
is_real_example = tf.cast(features["is_real_example"], dtype=tf.float32)
#### Constucting evaluation TPUEstimatorSpec with new cache.
label_ids = tf.reshape(features['label_ids'], [-1])
if FLAGS.is_regression:
metric_fn = regression_metric_fn
else:
metric_fn = metric_fn
metric_args = [per_example_loss, label_ids, logits, is_real_example]
if FLAGS.use_tpu:
eval_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
eval_metrics=(metric_fn, metric_args),
scaffold_fn=scaffold_fn)
else:
eval_spec = tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
eval_metric_ops=metric_fn(*metric_args))
return eval_spec
elif mode == tf.estimator.ModeKeys.PREDICT:
label_ids = tf.reshape(features["label_ids"], [-1])
predictions = {
"logits": logits,
"labels": label_ids,
"is_real": features["is_real_example"]
}
if FLAGS.use_tpu:
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode, predictions=predictions, scaffold_fn=scaffold_fn)
else:
output_spec = tf.estimator.EstimatorSpec(
mode=mode, predictions=predictions)
return output_spec
#### Configuring the optimizer
train_op, learning_rate, _ = model_utils.get_train_op(FLAGS, total_loss)
monitor_dict = {}
monitor_dict["lr"] = learning_rate
#### Constucting training TPUEstimatorSpec with new cache.
if FLAGS.use_tpu:
#### Creating host calls
if not FLAGS.is_regression:
label_ids = tf.reshape(features['label_ids'], [-1])
predictions = tf.argmax(logits, axis=-1, output_type=label_ids.dtype)
is_correct = tf.equal(predictions, label_ids)
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))
monitor_dict["accuracy"] = accuracy
host_call = function_builder.construct_scalar_host_call(
monitor_dict=monitor_dict,
model_dir=FLAGS.model_dir,
prefix="train/",
reduce_fn=tf.reduce_mean)
else:
host_call = None
train_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode, loss=total_loss, train_op=train_op, host_call=host_call,
scaffold_fn=scaffold_fn)
else:
train_spec = tf.estimator.EstimatorSpec(
mode=mode, loss=total_loss, train_op=train_op)
return train_spec
return model_fn
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
#### Validate flags
if FLAGS.save_steps is not None:
FLAGS.iterations = min(FLAGS.iterations, FLAGS.save_steps)
if FLAGS.do_predict:
predict_dir = FLAGS.predict_dir
if not tf.gfile.Exists(predict_dir):
tf.gfile.MakeDirs(predict_dir)
processors = {
"mnli_matched": MnliMatchedProcessor,
"mnli_mismatched": MnliMismatchedProcessor,
'sts-b': StsbProcessor,
'imdb': ImdbProcessor,
"yelp5": Yelp5Processor
}
if not FLAGS.do_train and not FLAGS.do_eval and not FLAGS.do_predict:
raise ValueError(
"At least one of `do_train`, `do_eval, `do_predict` or "
"`do_submit` must be True.")
if not tf.gfile.Exists(FLAGS.output_dir):
tf.gfile.MakeDirs(FLAGS.output_dir)
task_name = FLAGS.task_name.lower()
if task_name not in processors:
raise ValueError("Task not found: %s" % (task_name))
processor = processors[task_name]()
label_list = processor.get_labels() if not FLAGS.is_regression else None
sp = spm.SentencePieceProcessor()
sp.Load(FLAGS.spiece_model_file)
def tokenize_fn(text):
text = preprocess_text(text, lower=FLAGS.uncased)
return encode_ids(sp, text)
run_config = model_utils.configure_tpu(FLAGS)
model_fn = get_model_fn(len(label_list) if label_list is not None else None)
spm_basename = os.path.basename(FLAGS.spiece_model_file)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPU.
if FLAGS.use_tpu:
estimator = tf.contrib.tpu.TPUEstimator(
use_tpu=FLAGS.use_tpu,
model_fn=model_fn,
config=run_config,
train_batch_size=FLAGS.train_batch_size,
predict_batch_size=FLAGS.predict_batch_size,
eval_batch_size=FLAGS.eval_batch_size)
else:
estimator = tf.estimator.Estimator(
model_fn=model_fn,
config=run_config)
if FLAGS.do_train:
train_file_base = "{}.len-{}.train.tf_record".format(
spm_basename, FLAGS.max_seq_length)
train_file = os.path.join(FLAGS.output_dir, train_file_base)
tf.logging.info("Use tfrecord file {}".format(train_file))
train_examples = processor.get_train_examples(FLAGS.data_dir)
np.random.shuffle(train_examples)
tf.logging.info("Num of train samples: {}".format(len(train_examples)))
file_based_convert_examples_to_features(
train_examples, label_list, FLAGS.max_seq_length, tokenize_fn,
train_file, FLAGS.num_passes)
train_input_fn = file_based_input_fn_builder(
input_file=train_file,
seq_length=FLAGS.max_seq_length,
is_training=True,
drop_remainder=True)
estimator.train(input_fn=train_input_fn, max_steps=FLAGS.train_steps)
if FLAGS.do_eval or FLAGS.do_predict:
if FLAGS.eval_split == "dev":
eval_examples = processor.get_dev_examples(FLAGS.data_dir)
else:
eval_examples = processor.get_test_examples(FLAGS.data_dir)
tf.logging.info("Num of eval samples: {}".format(len(eval_examples)))
if FLAGS.do_eval:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on. These do NOT count towards the metric (all tf.metrics
# support a per-instance weight, and these get a weight of 0.0).
#
# Modified in XL: We also adopt the same mechanism for GPUs.
while len(eval_examples) % FLAGS.eval_batch_size != 0:
eval_examples.append(PaddingInputExample())
eval_file_base = "{}.len-{}.{}.eval.tf_record".format(
spm_basename, FLAGS.max_seq_length, FLAGS.eval_split)
eval_file = os.path.join(FLAGS.output_dir, eval_file_base)
file_based_convert_examples_to_features(
eval_examples, label_list, FLAGS.max_seq_length, tokenize_fn,
eval_file)
assert len(eval_examples) % FLAGS.eval_batch_size == 0
eval_steps = int(len(eval_examples) // FLAGS.eval_batch_size)
eval_input_fn = file_based_input_fn_builder(
input_file=eval_file,
seq_length=FLAGS.max_seq_length,
is_training=False,
drop_remainder=True)
# Filter out all checkpoints in the directory
steps_and_files = []
filenames = tf.gfile.ListDirectory(FLAGS.model_dir)
for filename in filenames:
if filename.endswith(".index"):
ckpt_name = filename[:-6]
cur_filename = join(FLAGS.model_dir, ckpt_name)
global_step = int(cur_filename.split("-")[-1])
tf.logging.info("Add {} to eval list.".format(cur_filename))
steps_and_files.append([global_step, cur_filename])
steps_and_files = sorted(steps_and_files, key=lambda x: x[0])
# Decide whether to evaluate all ckpts
if not FLAGS.eval_all_ckpt:
steps_and_files = steps_and_files[-1:]
eval_results = []
for global_step, filename in sorted(steps_and_files, key=lambda x: x[0]):
ret = estimator.evaluate(
input_fn=eval_input_fn,
steps=eval_steps,
checkpoint_path=filename)
ret["step"] = global_step
ret["path"] = filename
eval_results.append(ret)
tf.logging.info("=" * 80)
log_str = "Eval result | "
for key, val in sorted(ret.items(), key=lambda x: x[0]):
log_str += "{} {} | ".format(key, val)
tf.logging.info(log_str)
key_name = "eval_pearsonr" if FLAGS.is_regression else "eval_accuracy"
eval_results.sort(key=lambda x: x[key_name], reverse=True)
tf.logging.info("=" * 80)
log_str = "Best result | "
for key, val in sorted(eval_results[0].items(), key=lambda x: x[0]):
log_str += "{} {} | ".format(key, val)
tf.logging.info(log_str)
if FLAGS.do_predict:
eval_file_base = "{}.len-{}.{}.predict.tf_record".format(
spm_basename, FLAGS.max_seq_length, FLAGS.eval_split)
eval_file = os.path.join(FLAGS.output_dir, eval_file_base)
file_based_convert_examples_to_features(
eval_examples, label_list, FLAGS.max_seq_length, tokenize_fn,
eval_file)
pred_input_fn = file_based_input_fn_builder(
input_file=eval_file,
seq_length=FLAGS.max_seq_length,
is_training=False,
drop_remainder=False)
predict_results = []
with tf.gfile.Open(os.path.join(predict_dir, "{}.tsv".format(
task_name)), "w") as fout:
fout.write("index\tprediction\n")
for pred_cnt, result in enumerate(estimator.predict(
input_fn=pred_input_fn,
yield_single_examples=True,
checkpoint_path=FLAGS.predict_ckpt)):
if pred_cnt % 1000 == 0:
tf.logging.info("Predicting submission for example: {}".format(
pred_cnt))
logits = [float(x) for x in result["logits"].flat]
predict_results.append(logits)
if len(logits) == 1:
label_out = logits[0]
elif len(logits) == 2:
if logits[1] - logits[0] > FLAGS.predict_threshold:
label_out = label_list[1]
else:
label_out = label_list[0]
elif len(logits) > 2:
max_index = np.argmax(np.array(logits, dtype=np.float32))
label_out = label_list[max_index]
else:
raise NotImplementedError
fout.write("{}\t{}\n".format(pred_cnt, label_out))
predict_json_path = os.path.join(predict_dir, "{}.logits.json".format(
task_name))
with tf.gfile.Open(predict_json_path, "w") as fp:
json.dump(predict_results, fp, indent=4)
if __name__ == "__main__":
tf.app.run()
| 34.750827 | 80 | 0.678067 |
657f164824554f0eb83eccdac5c95d3e6e683765 | 216 | py | Python | CLCC/ex4.py | adstr123/LPTHW | 1a331ef173ffd6122b5c5ed13d8fdcc73ab7ce66 | [
"Zed"
] | null | null | null | CLCC/ex4.py | adstr123/LPTHW | 1a331ef173ffd6122b5c5ed13d8fdcc73ab7ce66 | [
"Zed"
] | null | null | null | CLCC/ex4.py | adstr123/LPTHW | 1a331ef173ffd6122b5c5ed13d8fdcc73ab7ce66 | [
"Zed"
] | null | null | null | # mkdir example
# You can type multiple non-existent levels of a directory to create them all simultaneously
# cd ~
# mkdir -p temp/hello/how/are/you # will work fine with -p (parents) & create all those folders | 43.2 | 95 | 0.736111 |
b99a79aaa22be233d14b7647583cc9160d5c10a9 | 8,561 | py | Python | rdc/core.py | akkaze/rdc | 76e85e3fe441e3ba968a190d9496f467b9d4d2e6 | [
"BSD-3-Clause"
] | 52 | 2018-10-08T01:56:15.000Z | 2021-03-14T12:19:51.000Z | rdc/core.py | akkaze/rdc | 76e85e3fe441e3ba968a190d9496f467b9d4d2e6 | [
"BSD-3-Clause"
] | null | null | null | rdc/core.py | akkaze/rdc | 76e85e3fe441e3ba968a190d9496f467b9d4d2e6 | [
"BSD-3-Clause"
] | 3 | 2019-01-02T05:17:28.000Z | 2020-01-06T03:53:12.000Z | """
Reliable Distribution Communication.
Author: Ankun Zheng
"""
import _pickle as cPickle
import ctypes
import os
import sys
import warnings
import numpy as np
from enum import Enum
from rdc import _LIB
# reduction operators
class Op(Enum):
MAX = 0
MIN = 1
SUM = 2
BITOR = 3
def __new__(cls, value):
member = object.__new__(cls)
member._value_ = value
return member
def __int__(self):
return self.value
def init(args=None, lib='standard', lib_dll=None):
"""Intialize the rdc module, call this once before using anything.
Parameters
----------
args: list of str, optional
The list of arguments used to initialized the rdc
usually you need to pass in sys.argv.
Defaults to sys.argv when it is None.
lib: {'standard', 'mock', 'mpi'}, optional
Type of library we want to load
When cdll is specified
lib_dll: ctypes.DLL, optional
The DLL object used as lib.
When this is presented argument lib will be ignored.
"""
if args is None:
args = [arg.encode() for arg in sys.argv]
arr = (ctypes.c_char_p * len(args))()
arr[:] = args
_LIB.RdcInit(len(args), arr)
def finalize():
"""Finalize the rdc engine.
Call this function after you finished all jobs.
"""
_LIB.RdcFinalize()
def new_communicator(name):
comm = Comm()
comm.handle = _LIB.RdcNewCommunicator(name)
return comm
def get_rank():
"""Get rank of current process.
Returns
-------
rank : int
Rank of current process.
"""
ret = _LIB.RdcGetRank()
return ret
def get_world_size():
"""Get total number workers.
Returns
-------
n : int
Total number of process.
"""
ret = _LIB.RdcGetWorldSize()
return ret
def tracker_print(msg):
"""Print message to the tracker.
This function can be used to communicate the information of
the progress to the tracker
Parameters
----------
msg : str
The message to be printed to tracker.
"""
if not isinstance(msg, str):
msg = str(msg)
_LIB.RdcTrackerPrint(ctypes.c_char_p(msg).encode('utf-8'))
def get_processor_name():
"""Get the processor name.
Returns
-------
name : str
the name of processor(host)
"""
mxlen = 256
length = ctypes.c_ulong()
buf = ctypes.create_string_buffer(mxlen)
_LIB.RdcGetProcessorName(buf, ctypes.byref(length), mxlen)
return buf.value
def broadcast(data, root):
"""Broadcast object from one node to all other nodes.
Parameters
----------
data : any type that can be pickled
Input data, if current rank does not equal root, this can be None
root : int
Rank of the node to broadcast data from.
Returns
-------
object : int
the result of broadcast.
"""
rank = get_rank()
length = ctypes.c_ulong()
if root == rank:
assert data is not None, 'need to pass in data when broadcasting'
s = pickle.dumps(data, protocol=pickle.HIGHEST_PROTOCOL)
length.value = len(s)
# run first broadcast
_LIB.RdcBroadcast(ctypes.byref(length), ctypes.sizeof(ctypes.c_ulong), root)
if root != rank:
dptr = (ctypes.c_char * length.value)()
# run second
_LIB.RdcBroadcast(
ctypes.cast(dptr, ctypes.c_void_p), length.value, root)
data = pickle.loads(dptr.raw)
del dptr
else:
_LIB.RdcBroadcast(
ctypes.cast(ctypes.c_char_p(s), ctypes.c_void_p), length.value,
root)
del s
return data
# enumeration of dtypes
DTYPE_ENUM__ = {
np.dtype('int8'): 0,
np.dtype('uint8'): 1,
np.dtype('int32'): 2,
np.dtype('uint32'): 3,
np.dtype('int64'): 4,
np.dtype('uint64'): 5,
np.dtype('float32'): 6,
np.dtype('float64'): 7
}
def allreduce(data, op, prepare_fun=None):
"""Perform allreduce, return the result.
Parameters
----------
data: numpy array
Input data.
op: int
Reduction operators, can be MIN, MAX, SUM, BITOR
prepare_fun: function
Lazy preprocessing function, if it is not None, prepare_fun(data)
will be called by the function before performing allreduce, to intialize the data
If the result of Allreduce can be recovered directly,
then prepare_fun will NOT be called
Returns
-------
result : array_like
The result of allreduce, have same shape as data
Notes
-----
This function is not thread-safe.
"""
if not isinstance(data, np.ndarray):
raise TypeError('allreduce only takes in numpy.ndarray')
buf = data.ravel()
if buf.base is data.base:
buf = buf.copy()
if buf.dtype not in DTYPE_ENUM__:
raise TypeError('data type %s not supported' % str(buf.dtype))
if prepare_fun is None:
_LIB.RdcAllreduce(
buf.ctypes.data_as(ctypes.c_void_p), buf.size,
DTYPE_ENUM__[buf.dtype], int(op), None, None)
else:
func_ptr = ctypes.CFUNCTYPE(None, ctypes.c_void_p)
def pfunc(args):
"""prepare function."""
prepare_fun(data)
_LIB.RdcAllreduce(
buf.ctypes.data_as(ctypes.c_void_p), buf.size,
DTYPE_ENUM__[buf.dtype], int(op), func_ptr(pfunc), None)
return buf
def _load_model(ptr, length):
"""
Internal function used by the module,
unpickle a model from a buffer specified by ptr, length
Arguments:
ptr: ctypes.POINTER(ctypes._char)
pointer to the memory region of buffer
length: int
the length of buffer
"""
data = (ctypes.c_char * length).from_address(ctypes.addressof(ptr.contents))
return pickle.loads(data.raw)
def load_checkpoint(with_local=False):
"""Load latest check point.
Parameters
----------
with_local: bool, optional
whether the checkpoint contains local model
Returns
-------
tuple : tuple
if with_local: return (version, gobal_model, local_model)
else return (version, gobal_model)
if returned version == 0, this means no model has been CheckPointed
and global_model, local_model returned will be None
"""
gptr = ctypes.POINTER(ctypes.c_char)()
global_len = ctypes.c_ulong()
if with_local:
lptr = ctypes.POINTER(ctypes.c_char)()
local_len = ctypes.c_ulong()
version = _LIB.RdcLoadCheckPoint(
ctypes.byref(gptr), ctypes.byref(global_len), ctypes.byref(lptr),
ctypes.byref(local_len))
if version == 0:
return (version, None, None)
return (version, _load_model(gptr, global_len.value),
_load_model(lptr, local_len.value))
else:
version = _LIB.RdcLoadCheckPoint(
ctypes.byref(gptr), ctypes.byref(global_len), None, None)
if version == 0:
return (version, None)
return (version, _load_model(gptr, global_len.value))
def checkpoint(global_model, local_model=None):
"""Checkpoint the model.
This means we finished a stage of execution.
Every time we call check point, there is a version number which will increase by one.
Parameters
----------
global_model: anytype that can be pickled
globally shared model/state when calling this function,
the caller need to gauranttees that global_model is the same in all nodes
local_model: anytype that can be pickled
Local model, that is specific to current node/rank.
This can be None when no local state is needed.
Notes
-----
local_model requires explicit replication of the model for fault-tolerance.
This will bring replication cost in checkpoint function.
while global_model do not need explicit replication.
It is recommended to use global_model if possible.
"""
sglobal = pickle.dumps(global_model)
if local_model is None:
_LIB.RdcCheckPoint(sglobal, len(sglobal), None, 0)
del sglobal
else:
slocal = pickle.dumps(local_model)
_LIB.RdcCheckPoint(sglobal, len(sglobal), slocal, len(slocal))
del slocal
del sglobal
def version_number():
"""Returns version number of current stored model.
This means how many calls to CheckPoint we made so far.
Returns
-------
version : int
Version number of currently stored model
"""
ret = _LIB.RdcVersionNumber()
return ret
| 27.091772 | 89 | 0.629366 |
90d929b8b2cdfcbc8fd97c92d58435e0becd5d1c | 7,807 | py | Python | docs/conf.py | santimaldonado2/tfm | 7fb93149ab9823602c175991e777ff699f0928b9 | [
"MIT"
] | null | null | null | docs/conf.py | santimaldonado2/tfm | 7fb93149ab9823602c175991e777ff699f0928b9 | [
"MIT"
] | null | null | null | docs/conf.py | santimaldonado2/tfm | 7fb93149ab9823602c175991e777ff699f0928b9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# TFM documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'TFM'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'tfmdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'tfm.tex',
u'TFM Documentation',
u"Santiago José Maldonado", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'tfm', u'TFM Documentation',
[u"Santiago José Maldonado"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'tfm', u'TFM Documentation',
u"Santiago José Maldonado", 'TFM',
'This is the final project for the Master in Data Science & Big Data - AFI, Escuela de Finanzas', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| 31.865306 | 120 | 0.705649 |
23776dd749a97c30ab68bd7df9e004925466ea53 | 1,831 | py | Python | backend/animal_adoption/apps/core/admin.py | PedroHenriqueDevBR/aplicacao-para-adocao-de-animais | 041f041ebbc86147b0192734fd9651fb317fbe36 | [
"MIT"
] | null | null | null | backend/animal_adoption/apps/core/admin.py | PedroHenriqueDevBR/aplicacao-para-adocao-de-animais | 041f041ebbc86147b0192734fd9651fb317fbe36 | [
"MIT"
] | null | null | null | backend/animal_adoption/apps/core/admin.py | PedroHenriqueDevBR/aplicacao-para-adocao-de-animais | 041f041ebbc86147b0192734fd9651fb317fbe36 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import (
Person,
State,
City,
AnimalType,
Animal,
AnimalPhoto,
VaccineBook,
BlockedReason,
AdoptionRequest,
)
@admin.register(Person)
class PersonAdmin(admin.ModelAdmin):
list_display = (
"id",
"image",
"contact",
"latitude",
"longitude",
"is_moderator",
"is_sponsor",
"city",
"user",
)
@admin.register(State)
class StateAdmin(admin.ModelAdmin):
list_display = (
"id",
"name",
)
@admin.register(City)
class CityAdmin(admin.ModelAdmin):
list_display = (
"id",
"name",
"state",
)
@admin.register(AnimalType)
class AnimalTypeAdmin(admin.ModelAdmin):
list_display = (
"id",
"name",
)
@admin.register(Animal)
class AnimalAdmin(admin.ModelAdmin):
list_display = (
"id",
"name",
"breed",
"age",
"sex",
"adopted",
"blocked",
"create_at",
"owner",
"animal_type",
)
@admin.register(AnimalPhoto)
class AnimalPhotoAdmin(admin.ModelAdmin):
list_display = (
"id",
"photo",
"animal",
)
@admin.register(VaccineBook)
class VaccineBookAdmin(admin.ModelAdmin):
list_display = (
"id",
"vaccine_name",
"date",
"animal",
)
@admin.register(BlockedReason)
class BlockedReasonAdmin(admin.ModelAdmin):
list_display = (
"id",
"create_at",
"reason",
"person_requester",
"blocked_animal",
)
@admin.register(AdoptionRequest)
class AdoptionRequestAdmi(admin.ModelAdmin):
list_display = (
"id",
"create_at",
"is_acepted",
"requester",
"animal",
)
| 16.645455 | 44 | 0.543965 |
2105b38400e760342a1eb121c1b701ba5023dde7 | 67,296 | py | Python | wagtailmenus/tests/test_menu_rendering.py | cazgp/wagtailmenus | b0a6acb281227c93b3b4f11265366da0dada4248 | [
"MIT"
] | 329 | 2016-01-28T16:20:16.000Z | 2022-01-31T03:43:54.000Z | wagtailmenus/tests/test_menu_rendering.py | cazgp/wagtailmenus | b0a6acb281227c93b3b4f11265366da0dada4248 | [
"MIT"
] | 337 | 2016-04-15T11:09:44.000Z | 2022-01-31T10:01:32.000Z | wagtailmenus/tests/test_menu_rendering.py | cazgp/wagtailmenus | b0a6acb281227c93b3b4f11265366da0dada4248 | [
"MIT"
] | 105 | 2016-06-17T15:45:07.000Z | 2022-01-21T21:23:56.000Z | from bs4 import BeautifulSoup
from django.test import TestCase, override_settings
from wagtail.core.models import Site
from wagtailmenus.errors import SubMenuUsageError
from wagtailmenus.models import MainMenu, FlatMenu
from wagtailmenus.templatetags.menu_tags import validate_supplied_values
class TestTemplateTags(TestCase):
fixtures = ['test.json']
maxDiff = None
def test_main_menu_created_when_not_exists(self):
menu = MainMenu.objects.get(pk=1)
self.assertEqual(menu.__str__(), 'Main menu for wagtailmenus (co.uk)')
menu.delete()
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
menu = MainMenu.objects.first()
self.assertTrue(menu)
self.assertEqual(menu.__str__(), 'Main menu for wagtailmenus (co.uk)')
def test_flat_menu_get_for_site_with_default_fallback(self):
site_one = Site.objects.get(pk=1)
site_two = Site.objects.get(pk=2)
# Site one (default) definitiely has a menu defined with the handle
# `footer`
menu = FlatMenu.get_for_site('footer', site_one)
site_one_menu_pk = menu.pk
self.assertIsNotNone(menu)
# Site two doesn't have any menus defined, so this should return None
menu = FlatMenu.get_for_site('footer', site_two)
self.assertIsNone(menu)
# But if we use the `use_default_site_menu_as_fallback` flag to fetch
# from the default site, we should get the one defined for site_one
menu = FlatMenu.get_for_site('footer', site_two, True)
self.assertIsNotNone(menu)
self.assertEqual(menu.pk, site_one_menu_pk)
def test_validate_supplied_values(self):
with self.assertRaisesMessage(ValueError, 'The `main_menu` tag expects `max_levels` to be an integer value between 1 and 5. Please review your template.'):
validate_supplied_values(tag='main_menu', max_levels=9)
with self.assertRaisesMessage(ValueError, 'The `main_menu` tag expects `max_levels` to be an integer value between 1 and 5. Please review your template.'):
validate_supplied_values(tag='main_menu', max_levels='1')
with self.assertRaises(ValueError):
validate_supplied_values(tag='main_menu', parent_page=False)
with self.assertRaises(ValueError):
validate_supplied_values(tag='main_menu', menuitem_or_page=5)
def test_homepage(self):
"""
Test that homepage (based on `MenuPage`) renders without errors.
"""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
@override_settings(WAGTAILMENUS_SITE_SPECIFIC_TEMPLATE_DIRS=True,)
def test_about_us(self):
"""
Test that 'About us' page (based on `MenuPage`), with
`repeat_in_subnav=True`, renders without errors.
The `WAGTAILMENUS_SITE_SPECIFIC_TEMPLATE_DIRS` setting is also
applied to increase coverage in get_template() and
get_sub_menu_template() methods.
"""
response = self.client.get('/about-us/')
self.assertEqual(response.status_code, 200)
def test_meet_the_team(self):
"""
Test that 'Meet the team' page (based on `Page`), and within a
section with subnav, renders without errors.
"""
response = self.client.get('/about-us/meet-the-team/')
self.assertEqual(response.status_code, 200)
def test_marvel_comics(self):
"""
Test that 'Marvel comics' page (based on `Page`), and within a
section with subnav, renders without errors.
"""
response = self.client.get('/superheroes/marvel-comics/')
self.assertEqual(response.status_code, 200)
def test_staff_vacancies(self):
"""
Test that 'Staff vacancies' page (based on `Page`), with
`show_in_menus=False`, and within a section with subnav, renders
without errors.
"""
response = self.client.get('/about-us/staff-vacancies/')
self.assertEqual(response.status_code, 200)
def test_non_page(self):
"""
Test that there are no errors when rendering page template without
the `wagtailmenus.wagtail_hooks.wagtailmenu_params_helper()` method
having run to add helpful bits to the context.
"""
response = self.client.get('/custom-url/')
self.assertEqual(response.status_code, 200)
def test_homepage_main_menu_two_levels(self):
"""
Test '{{ main_menu }}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-two-levels').decode()
expected_menu_html = """
<div id="main-menu-two-levels">
<ul class="nav navbar-nav">
<li class="active"><a href="/">Home</a></li>
<li class=" dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class=""><a href="/about-us/">Section home</a></li>
<li class=""><a href="/about-us/meet-the-team/">Meet the team</a></li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_homepage_main_menu_three_levels(self):
"""
Test '{{ main_menu max_levels=3 }}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-three-levels').decode()
expected_menu_html = """
<div id="main-menu-three-levels">
<ul class="nav navbar-nav">
<li class="active"><a href="/">Home</a></li>
<li class=" dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class=""><a href="/about-us/">Section home</a></li>
<li class=" dropdown">
<a href="/about-us/meet-the-team/" class="dropdown-toggle" id="ddtoggle_7" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Meet the team <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_7">
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_homepage_main_menu_absolute_urls(self):
"""
Test '{{ main_menu use_absolute_page_urls=True }}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-absolute-url').decode()
expected_menu_html = """
<div id="main-menu-absolute-url">
<ul class="nav navbar-nav">
<li class="active">
<a href="http://www.wagtailmenus.co.uk:8000/">Home</a>
</li>
<li class=" dropdown">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/">Section home</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/">Meet the team</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/our-heritage/">Our heritage</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/mission-and-values/">Our mission and values</a>
</li>
</ul>
</li>
<li class=" dropdown">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/latest-news/">Latest news</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/upcoming-events/">Upcoming events</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/press/">In the press</a>
</li>
</ul>
</li>
<li class="">
<a href="http://google.co.uk">Google</a>
</li>
<li class=" dropdown">
<a href="http://www.wagtailmenus.co.uk:8000/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support">
<a href="/contact-us/#support">Get support</a>
</li>
<li class="call">
<a href="/contact-us/#call">Speak to someone</a>
</li>
<li class="map">
<a href="/contact-us/#map">Map & directions</a>
</li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_homepage_children_menu_one_level(self):
"""
Test '{% children_menu %}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='children-menu-one-level').decode()
expected_menu_html = """
<div id="children-menu-one-level">
<ul>
<li class=""><a href="/about-us/">About us</a></li>
<li class=""><a href="/news-and-events/">News & events</a></li>
<li class=""><a href="/contact-us/">Contact us</a></li>
<li class=""><a href="/legal/">Legal</a></li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_homepage_children_menu_three_levels(self):
"""
Test '{% children_menu max_levels=3 allow_repeating_parents=False %}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='children-menu-three-levels').decode()
expected_menu_html = """
<div id="children-menu-three-levels">
<ul>
<li class=""><a href="/about-us/">About us</a>
<ul>
<li class="">
<a href="/about-us/meet-the-team/">Meet the team</a>
<ul>
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class="">
<a href="/news-and-events/">News & events</a>
<ul>
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="/contact-us/">Contact us</a></li>
<li class="">
<a href="/legal/">Legal</a>
<ul>
<li class=""><a href="/legal/accessibility/">Accessibility</a></li>
<li class=""><a href="/legal/privacy-policy/">Privacy policy</a></li>
<li class=""><a href="/legal/terms-and-conditions/">Terms and conditions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_homepage_children_absolute_urls(self):
"""
Test '{% children_menu use_absolute_page_urls=True %}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='children-menu-absolute-url').decode()
expected_menu_html = """
<div id="children-menu-absolute-url">
<ul>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/">About us</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/">News & events</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/contact-us/">Contact us</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/legal/">Legal</a>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_main_menu_two_levels(self):
"""
Test '{% main_menu %}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-two-levels').decode()
expected_menu_html = """
<div id="main-menu-two-levels">
<ul class="nav navbar-nav">
<li class=""><a href="/">Home</a></li>
<li class="ancestor dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class="active"><a href="/about-us/">Section home</a></li>
<li class=""><a href="/about-us/meet-the-team/">Meet the team</a></li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_main_menu_three_levels(self):
"""
Test '{% main_menu max_levels=3 %}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-three-levels').decode()
expected_menu_html = """
<div id="main-menu-three-levels">
<ul class="nav navbar-nav">
<li class=""><a href="/">Home</a></li>
<li class="ancestor dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class="active">
<a href="/about-us/">Section home</a>
</li>
<li class=" dropdown">
<a href="/about-us/meet-the-team/" class="dropdown-toggle" id="ddtoggle_7" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Meet the team <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_7">
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_main_menu_absolute_urls(self):
"""
Test '{{ main_menu use_absolute_page_urls=True }}' output for homepage
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-absolute-url').decode()
expected_menu_html = """
<div id="main-menu-absolute-url">
<ul class="nav navbar-nav">
<li class="active">
<a href="http://www.wagtailmenus.co.uk:8000/">Home</a>
</li>
<li class=" dropdown">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/">Section home</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/">Meet the team</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/our-heritage/">Our heritage</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/mission-and-values/">Our mission and values</a>
</li>
</ul>
</li>
<li class=" dropdown">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/latest-news/">Latest news</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/upcoming-events/">Upcoming events</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/news-and-events/press/">In the press</a>
</li>
</ul>
</li>
<li class="">
<a href="http://google.co.uk">Google</a>
</li>
<li class=" dropdown">
<a href="http://www.wagtailmenus.co.uk:8000/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support">
<a href="/contact-us/#support">Get support</a>
</li>
<li class="call">
<a href="/contact-us/#call">Speak to someone</a>
</li>
<li class="map">
<a href="/contact-us/#map">Map & directions</a>
</li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_section_menu_two_levels(self):
"""
Test '{% section_menu %}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """
<div id="section-menu-two-levels">
<nav class="nav-section" role="navigation">
<a href="/about-us/" class="ancestor section_root">About us</a>
<ul>
<li class="active"><a href="/about-us/">Section home</a></li>
<li class="">
<a href="/about-us/meet-the-team/">Meet the team</a>
<ul>
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_section_menu_one_level(self):
"""
Test '{% section_menu max_levels=1 %}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-one-level').decode()
expected_menu_html = """
<div id="section-menu-one-level">
<nav class="nav-section" role="navigation">
<a href="/about-us/" class="ancestor section_root">About us</a>
<ul>
<li class="active"><a href="/about-us/">Section home</a></li>
<li class=""><a href="/about-us/meet-the-team/">Meet the team</a></li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_section_menu_absolute_urls(self):
"""
Test '{% section_menu use_absolute_page_urls=True %}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-absolute-url').decode()
expected_menu_html = """
<div id="section-menu-absolute-url">
<nav class="nav-section" role="navigation">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/" class="ancestor section_root">About us</a>
<ul>
<li class="active">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/">Section home</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/">Meet the team</a>
<ul>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/staff-member-one/">Staff member one</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/staff-member-two/">Staff member two</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/staff-member-three/">Staff member three</a>
</li>
</ul>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/our-heritage/">Our heritage</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/mission-and-values/">Our mission and values</a>
</li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_children_menu_one_level(self):
"""
Test '{{ sub_menu self }}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='children-menu-one-level').decode()
expected_menu_html = """
<div id="children-menu-one-level">
<ul>
<li class=""><a href="/about-us/">Section home</a></li>
<li class=""><a href="/about-us/meet-the-team/">Meet the team</a></li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_children_menu_three_levels(self):
"""
Test '{% children_menu max_levels=3 allow_repeating_parents=False %}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='children-menu-three-levels').decode()
expected_menu_html = """
<div id="children-menu-three-levels">
<ul>
<li class="">
<a href="/about-us/meet-the-team/">Meet the team</a>
<ul>
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_about_us_children_absolute_urls(self):
"""
Test '{{ sub_menu self }}' output for 'About us' page
"""
response = self.client.get('/about-us/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='children-menu-absolute-urls').decode()
expected_menu_html = """
<div id="children-menu-absolute-urls">
<ul>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/">Section home</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/meet-the-team/">Meet the team</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/our-heritage/">Our heritage</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/about-us/mission-and-values/">Our mission and values</a>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_marvel_comics_section_menu_two_levels(self):
"""
Test '{% section_menu %}' output for 'Marvel comics' page
"""
response = self.client.get('/superheroes/marvel-comics/')
soup = BeautifulSoup(response.content, 'html5lib')
menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """
<div id="section-menu-two-levels">
<nav class="nav-section" role="navigation">
<a href="/superheroes/" class="ancestor section_root">Superheroes</a>
<ul>
<li class="active">
<a href="/superheroes/marvel-comics/">Marvel Comics</a>
<ul>
<li class=""><a href="/superheroes/marvel-comics/iron-man/">Iron Man</a></li>
<li class=""><a href="/superheroes/marvel-comics/spiderman/">Spiderman</a></li>
</ul>
</li>
<li class="">
<a href="/superheroes/dc-comics/">D.C. Comics</a>
<ul>
<li class=""><a href="/superheroes/dc-comics/batman/">Batman</a></li>
<li class="">
<a href="/superheroes/dc-comics/wonder-woman/">Wonder Woman</a>
</li>
</ul>
</li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_marvel_comics_section_menu_one_level(self):
"""
Test '{% section_menu max_levels=1 %}' output for 'Marvel comics' page
"""
response = self.client.get('/superheroes/marvel-comics/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-one-level').decode()
expected_menu_html = """
<div id="section-menu-one-level">
<nav class="nav-section" role="navigation">
<a href="/superheroes/" class="ancestor section_root">Superheroes</a>
<ul>
<li class="active"><a href="/superheroes/marvel-comics/">Marvel Comics</a></li>
<li class=""><a href="/superheroes/dc-comics/">D.C. Comics</a></li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_marvel_comics_section_absolute_urls(self):
"""
Test '{% section_menu use_absolute_page_urls=True %}' output for 'Marvel comics' page
"""
response = self.client.get('/superheroes/marvel-comics/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-absolute-url').decode()
expected_menu_html = """
<div id="section-menu-absolute-url">
<nav class="nav-section" role="navigation">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/" class="ancestor section_root">Superheroes</a>
<ul>
<li class="active">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/marvel-comics/">Marvel Comics</a>
<ul>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/marvel-comics/iron-man/">Iron Man</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/marvel-comics/spiderman/">Spiderman</a>
</li>
</ul>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/dc-comics/">D.C. Comics</a>
<ul>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/dc-comics/batman/">Batman</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/superheroes/dc-comics/wonder-woman/">Wonder Woman</a>
</li>
</ul>
</li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_contact_flat_menu_output(self):
"""
Test that the HTML output by the 'flat_menu' tag (when using the handle 'contact') renders as expected.
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='nav-contact').decode()
expected_menu_html = """<div id="nav-contact"><div class="flat-menu contact no_heading"><ul><li class=""><a href="/contact-us/#offices">Call us</a></li><li class=""><a href="#advisor-chat">Chat to an advisor</a></li><li class=""><a href="#request-callback">Request a callback</a></li></ul></div></div>"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_footer_flat_menu_output(self):
"""
Test that the HTML output by the 'flat_menu' tag (when using the handle 'footer') renders as expected.
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='nav-footer').decode()
expected_menu_html = """
<div id="nav-footer">
<div class="flat-menu footer with_heading">
<h4>Important links</h4>
<ul>
<li class=""><a href="/legal/accessibility/">Accessibility</a></li>
<li class=""><a href="/legal/privacy-policy/">Privacy policy</a></li>
<li class=""><a href="/legal/terms-and-conditions/">Terms and conditions</a></li>
<li class=""><a href="/about-us/meet-the-team/custom-url/">Meet the team's pets</a></li>
</ul>
</div>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
response = self.client.get('/legal/privacy-policy/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='nav-footer').decode()
expected_menu_html = """
<div id="nav-footer">
<div class="flat-menu footer with_heading">
<h4>Important links</h4>
<ul>
<li class=""><a href="/legal/accessibility/">Accessibility</a></li>
<li class="active"><a href="/legal/privacy-policy/">Privacy policy</a></li>
<li class=""><a href="/legal/terms-and-conditions/">Terms and conditions</a></li>
<li class=""><a href="/about-us/meet-the-team/custom-url/">Meet the team's pets</a></li>
</ul>
</div>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
response = self.client.get('/about-us/meet-the-team/custom-url/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='nav-footer').decode()
expected_menu_html = """
<div id="nav-footer">
<div class="flat-menu footer with_heading">
<h4>Important links</h4>
<ul>
<li class=""><a href="/legal/accessibility/">Accessibility</a></li>
<li class=""><a href="/legal/privacy-policy/">Privacy policy</a></li>
<li class=""><a href="/legal/terms-and-conditions/">Terms and conditions</a></li>
<li class="active"><a href="/about-us/meet-the-team/custom-url/">Meet the team's pets</a></li>
</ul>
</div>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='nav-footer-absolute-urls').decode()
expected_menu_html = """
<div id="nav-footer-absolute-urls">
<div class="flat-menu footer with_heading">
<h4>Important links</h4>
<ul>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/legal/accessibility/">Accessibility</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/legal/privacy-policy/">Privacy policy</a>
</li>
<li class="">
<a href="http://www.wagtailmenus.co.uk:8000/legal/terms-and-conditions/">Terms and conditions</a>
</li>
<li class="">
<a href="/about-us/meet-the-team/custom-url/">Meet the team's pets</a>
</li>
</ul>
</div>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_custom_page_menu_output(self):
response = self.client.get('/custom-url/')
soup = BeautifulSoup(response.content, 'html5lib')
main_menu_html = soup.find(id='main-menu-two-levels').decode()
expected_menu_html = """
<div id="main-menu-two-levels">
<ul class="nav navbar-nav">
<li class=""><a href="/">Home</a></li>
<li class=" dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class=""><a href="/about-us/">Section home</a></li>
<li class=""><a href="/about-us/meet-the-team/">Meet the team</a></li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(main_menu_html, expected_menu_html)
section_menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """<div id="section-menu-two-levels"></div>"""
self.assertHTMLEqual(section_menu_html, expected_menu_html)
def test_custom_about_us_url_section_menu_two_levels(self):
"""
Test '{% section_menu max_levels=2 %}' output for a custom url that
looks like a page from the 'about us' section, but isn't.
'about-us' and 'meet-the-team' items should be identified as
'ancestors', as indicated by the request path.
"""
response = self.client.get('/about-us/meet-the-team/custom-url/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """
<div id="section-menu-two-levels">
<nav class="nav-section" role="navigation">
<a href="/about-us/" class="ancestor section_root">About us</a>
<ul>
<li class=""><a href="/about-us/">Section home</a></li>
<li class="ancestor">
<a href="/about-us/meet-the-team/">Meet the team</a>
<ul>
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</nav>
</div>
"""
self.assertEqual(response.status_code, 200)
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_custom_about_us_url_main_menu_two_levels(self):
"""
Test '{% main_menu max_levels=2 %}' output for a custom url that
looks like a page from the 'about us' section, but isn't.
'about-us' and 'meet-the-team' items should be identified as
'ancestors', as indicated by the request path.
"""
response = self.client.get('/about-us/meet-the-team/custom-url/')
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-two-levels').decode()
expected_menu_html = """
<div id="main-menu-two-levels">
<ul class="nav navbar-nav">
<li class=""><a href="/">Home</a></li>
<li class="ancestor dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class=""><a href="/about-us/">Section home</a></li>
<li class="ancestor"><a href="/about-us/meet-the-team/">Meet the team</a></li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_custom_superheroes_url_section_menu_two_levels(self):
"""
Test '{% section_menu max_levels=2 %}' output for a custom url that
looks like a page from the superheroes section, but isn't.
'superheroes' and 'marvel-comics' items should be identified as
'ancestors', as indicated by the request path.
"""
response = self.client.get('/superheroes/marvel-comics/custom-man/about/')
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """
<div id="section-menu-two-levels">
<nav class="nav-section" role="navigation">
<a href="/superheroes/" class="ancestor section_root">Superheroes</a>
<ul>
<li class="ancestor">
<a href="/superheroes/marvel-comics/">Marvel Comics</a>
<ul>
<li class=""><a href="/superheroes/marvel-comics/iron-man/">Iron Man</a></li>
<li class=""><a href="/superheroes/marvel-comics/spiderman/">Spiderman</a></li>
</ul>
</li>
<li class="">
<a href="/superheroes/dc-comics/">D.C. Comics</a>
<ul>
<li class=""><a href="/superheroes/dc-comics/batman/">Batman</a></li>
<li class="">
<a href="/superheroes/dc-comics/wonder-woman/">Wonder Woman</a>
</li>
</ul>
</li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_staffmember_direct_url_main_menu(self):
"""
Test '{% main_menu max_levels=3 %}' when serving the following URL:
/about-us/meet-the-team/staff-member-one/
It's a real page in the tree, so we want to identify it and highlight
it as active, but it's not being served via Wagtail's `serve_page`, so
the page is identified using the request path.
"""
response = self.client.get('/about-us/meet-the-team/staff-member-one/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-three-levels').decode()
expected_menu_html = """
<div id="main-menu-three-levels">
<ul class="nav navbar-nav">
<li class=""><a href="/">Home</a></li>
<li class="ancestor dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_6">
<li class="">
<a href="/about-us/">Section home</a>
</li>
<li class="ancestor dropdown">
<a href="/about-us/meet-the-team/" class="dropdown-toggle" id="ddtoggle_7" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Meet the team <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_7">
<li class="active"><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_14">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="dropdown-menu" aria-labelledby="ddtoggle_18">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_staffmember_direct_url_section_menu(self):
"""
Test '{% section_menu max_levels=2 %}' when serving the following URL:
/about-us/meet-the-team/staff-member-one/
It's a real page in the tree, so we want to identify it and highlight
it as active, but it's not being served via Wagtail's `serve_page`, so
the page is identified using the request path.
"""
response = self.client.get('/about-us/meet-the-team/staff-member-one/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """
<div id="section-menu-two-levels">
<nav class="nav-section" role="navigation">
<a href="/about-us/" class="ancestor section_root">About us</a>
<ul>
<li class=""><a href="/about-us/">Section home</a></li>
<li class="ancestor">
<a href="/about-us/meet-the-team/">Meet the team</a>
<ul>
<li class="active"><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_news_and_events_section_menu(self):
"""
Test '{% section_menu max_levels=2 %}' when serving the following URL:
/news-and-events/
It's a real page in the tree, so we want to identify it and highlight
it as active, but it's not being served via Wagtail's `serve_page`, so
the page is identified using the request path.
"""
response = self.client.get('/news-and-events/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='section-menu-two-levels').decode()
expected_menu_html = """
<div id="section-menu-two-levels">
<nav class="nav-section" role="navigation">
<a href="/news-and-events/" class="active section_root">News & events</a>
<ul>
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</nav>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
def test_sub_menu_tag_usage_in_non_menu_template_raises_submenuusageerror(self):
"""
The 'sub_menu' tag should raise an error if used directly (not from
within another menu template)
"""
with self.assertRaises(SubMenuUsageError):
self.client.get('/sub_menu-tag-used-directly/')
def test_main_menu_with_sub_menu_templates(self):
"""
Test '{% main_menu %}' output for 'Home' page when 'sub_menu_templates'
is used to specify different templates for each level
"""
response = self.client.get('/')
soup = BeautifulSoup(response.content, 'html5lib')
# Assertions to compare rendered HTML against expected HTML
menu_html = soup.find(id='main-menu-sub-menu-templates').decode()
expected_menu_html = """
<div id="main-menu-sub-menu-templates">
<ul class="nav navbar-nav">
<li class="active"><a href="/">Home</a></li>
<li class=" dropdown">
<a href="/about-us/" class="dropdown-toggle" id="ddtoggle_6" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
<ul class="sub-menu-level-2" data-level="2">
<li class=""><a href="/about-us/">Section home</a></li>
<li class="">
<a href="/about-us/meet-the-team/">Meet the team</a>
<ul class="sub-menu-level-3" data-level="3">
<li class=""><a href="/about-us/meet-the-team/staff-member-one/">Staff member one</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-two/">Staff member two</a></li>
<li class=""><a href="/about-us/meet-the-team/staff-member-three/">Staff member three</a></li>
</ul>
</li>
<li class=""><a href="/about-us/our-heritage/">Our heritage</a></li>
<li class=""><a href="/about-us/mission-and-values/">Our mission and values</a></li>
</ul>
</li>
<li class=" dropdown">
<a href="/news-and-events/" class="dropdown-toggle" id="ddtoggle_14" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">News & events <span class="caret"></span></a>
<ul class="sub-menu-level-2" data-level="2">
<li class=""><a href="/news-and-events/latest-news/">Latest news</a></li>
<li class=""><a href="/news-and-events/upcoming-events/">Upcoming events</a></li>
<li class=""><a href="/news-and-events/press/">In the press</a></li>
</ul>
</li>
<li class=""><a href="http://google.co.uk">Google</a></li>
<li class=" dropdown">
<a href="/contact-us/" class="dropdown-toggle" id="ddtoggle_18" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">Contact us <span class="caret"></span></a>
<ul class="sub-menu-level-2" data-level="2">
<li class="support"><a href="/contact-us/#support">Get support</a></li>
<li class="call"><a href="/contact-us/#call">Speak to someone</a></li>
<li class="map"><a href="/contact-us/#map">Map & directions</a></li>
</ul>
</li>
</ul>
</div>
"""
self.assertHTMLEqual(menu_html, expected_menu_html)
| 53.622311 | 312 | 0.520492 |
ac4a2993270860545375985cc80a70e8a210a675 | 182 | py | Python | tests/test_number.py | ohyecloudy/oh-python | 5bb2c4b91f79f83169bd129c32b82c086d4221d9 | [
"MIT"
] | null | null | null | tests/test_number.py | ohyecloudy/oh-python | 5bb2c4b91f79f83169bd129c32b82c086d4221d9 | [
"MIT"
] | null | null | null | tests/test_number.py | ohyecloudy/oh-python | 5bb2c4b91f79f83169bd129c32b82c086d4221d9 | [
"MIT"
] | null | null | null | import unittest
class TestNumberMethods(unittest.TestCase):
def test_power_operator(self):
self.assertEqual(5 ** 2, 25)
if __name__ == '__main__':
unittest.main()
| 18.2 | 43 | 0.697802 |
6f5cda8f9918d326b9b4ae73813231b5b1e2b9f6 | 1,469 | py | Python | tests/functional/docs/test_ec2.py | dtrimm/boto3 | 64ed0b140329e77d61571703ad8f6ff5cbf61122 | [
"Apache-2.0"
] | 1 | 2021-03-02T20:31:19.000Z | 2021-03-02T20:31:19.000Z | tests/functional/docs/test_ec2.py | dtrimm/boto3 | 64ed0b140329e77d61571703ad8f6ff5cbf61122 | [
"Apache-2.0"
] | null | null | null | tests/functional/docs/test_ec2.py | dtrimm/boto3 | 64ed0b140329e77d61571703ad8f6ff5cbf61122 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.functional.docs import BaseDocsFunctionalTests
from boto3.session import Session
from boto3.docs.service import ServiceDocumenter
class TestInstanceDeleteTags(BaseDocsFunctionalTests):
def setUp(self):
self.documenter = ServiceDocumenter(
'ec2', session=Session(region_name='us-east-1'))
self.generated_contents = self.documenter.document_service()
self.generated_contents = self.generated_contents.decode('utf-8')
def test_delete_tags_method_is_documented(self):
contents = self.get_class_document_block(
'EC2.Instance', self.generated_contents)
method_contents = self.get_method_document_block(
'delete_tags', contents)
self.assert_contains_lines_in_order([
'response = instance.delete_tags(',
'DryRun=True|False,',
'Tags=[',
], method_contents)
| 40.805556 | 73 | 0.718176 |
261c1f0a1b331f39c6376f06518a45d80f9c130c | 30,141 | py | Python | test/functional/test_runner.py | susheel2335/bitcoin | 3b6d1b61d31674e033e821046baa95c66dedf623 | [
"MIT"
] | 2 | 2021-01-30T08:34:39.000Z | 2022-02-16T06:50:37.000Z | test/functional/test_runner.py | whiteslack/bitcoin | f1f26b8d5baec4a45a3a9ba0440cd4eff7af8407 | [
"MIT"
] | 5 | 2022-02-15T13:18:30.000Z | 2022-03-11T03:25:18.000Z | test/functional/test_runner.py | edmont87/albcoin | 105e85f0bf13d5836f24ffa8a462f581f9f9f8cc | [
"MIT"
] | 1 | 2022-03-16T15:03:37.000Z | 2022-03-16T15:03:37.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run regression test suite.
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts.
For a description of arguments recognized by test scripts, see
`test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`.
"""
import argparse
from collections import deque
import configparser
import datetime
import os
import time
import shutil
import subprocess
import sys
import tempfile
import re
import logging
import unittest
# Formatting. Default colors to empty strings.
BOLD, GREEN, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "")
try:
# Make sure python thinks it can write unicode to its stdout
"\u2713".encode("utf_8").decode(sys.stdout.encoding)
TICK = "✓ "
CROSS = "✖ "
CIRCLE = "○ "
except UnicodeDecodeError:
TICK = "P "
CROSS = "x "
CIRCLE = "o "
if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
if os.name == 'nt':
import ctypes
kernel32 = ctypes.windll.kernel32 # type: ignore
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# Enable ascii color control to stdout
stdout = kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
stdout_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stdout, ctypes.byref(stdout_mode))
kernel32.SetConsoleMode(stdout, stdout_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# Enable ascii color control to stderr
stderr = kernel32.GetStdHandle(STD_ERROR_HANDLE)
stderr_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stderr, ctypes.byref(stderr_mode))
kernel32.SetConsoleMode(stderr, stderr_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# primitive formatting on supported
# terminal via ANSI escape sequences:
BOLD = ('\033[0m', '\033[1m')
GREEN = ('\033[0m', '\033[0;32m')
RED = ('\033[0m', '\033[0;31m')
GREY = ('\033[0m', '\033[1;30m')
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
TEST_FRAMEWORK_MODULES = [
"address",
"blocktools",
"muhash",
"key",
"script",
"segwit_addr",
"util",
]
EXTENDED_SCRIPTS = [
# These tests are not run by default.
# Longest test should go first, to favor running tests in parallel
'feature_pruning.py',
'feature_dbcrash.py',
]
BASE_SCRIPTS = [
# Scripts that are run by default.
# Longest test should go first, to favor running tests in parallel
'wallet_hd.py',
'wallet_hd.py --descriptors',
'wallet_backup.py',
'wallet_backup.py --descriptors',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
'feature_maxuploadtarget.py',
'feature_block.py',
'rpc_fundrawtransaction.py',
'rpc_fundrawtransaction.py --descriptors',
'p2p_compactblocks.py',
'feature_segwit.py --legacy-wallet',
# vv Tests less than 2m vv
'wallet_basic.py',
'wallet_basic.py --descriptors',
'wallet_labels.py',
'wallet_labels.py --descriptors',
'p2p_segwit.py',
'p2p_timeouts.py',
'p2p_tx_download.py',
'mempool_updatefromblock.py',
'wallet_dump.py --legacy-wallet',
'wallet_listtransactions.py',
'wallet_listtransactions.py --descriptors',
'feature_taproot.py',
# vv Tests less than 60s vv
'p2p_sendheaders.py',
'wallet_importmulti.py --legacy-wallet',
'mempool_limit.py',
'rpc_txoutproof.py',
'wallet_listreceivedby.py',
'wallet_listreceivedby.py --descriptors',
'wallet_abandonconflict.py',
'wallet_abandonconflict.py --descriptors',
'feature_csv_activation.py',
'rpc_rawtransaction.py',
'rpc_rawtransaction.py --descriptors',
'wallet_address_types.py',
'wallet_address_types.py --descriptors',
'feature_bip68_sequence.py',
'p2p_feefilter.py',
'feature_reindex.py',
'feature_abortnode.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py',
'wallet_keypool_topup.py --descriptors',
'feature_fee_estimation.py',
'interface_zmq.py',
'interface_bitcoin_cli.py',
'mempool_resurrect.py',
'wallet_txn_doublespend.py --mineblock',
'tool_wallet.py',
'tool_wallet.py --descriptors',
'wallet_txn_clone.py',
'wallet_txn_clone.py --segwit',
'rpc_getchaintips.py',
'rpc_misc.py',
'interface_rest.py',
'mempool_spend_coinbase.py',
'wallet_avoidreuse.py',
'wallet_avoidreuse.py --descriptors',
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
'wallet_multiwallet.py --descriptors',
'wallet_multiwallet.py --usecli',
'wallet_createwallet.py',
'wallet_createwallet.py --usecli',
'wallet_createwallet.py --descriptors',
'wallet_watchonly.py --legacy-wallet',
'wallet_watchonly.py --usecli --legacy-wallet',
'wallet_reorgsrestore.py',
'interface_http.py',
'interface_rpc.py',
'rpc_psbt.py',
'rpc_psbt.py --descriptors',
'rpc_users.py',
'rpc_whitelist.py',
'feature_proxy.py',
'rpc_signrawtransaction.py',
'rpc_signrawtransaction.py --descriptors',
'wallet_groups.py',
'p2p_addrv2_relay.py',
'wallet_groups.py --descriptors',
'p2p_disconnect_ban.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
'rpc_deprecated.py',
'wallet_disable.py',
'wallet_disable.py --descriptors',
'p2p_addr_relay.py',
'p2p_getaddr_caching.py',
'p2p_getdata.py',
'rpc_net.py',
'wallet_keypool.py',
'wallet_keypool.py --descriptors',
'wallet_descriptor.py --descriptors',
'p2p_nobloomfilter_messages.py',
'p2p_filter.py',
'rpc_setban.py',
'p2p_blocksonly.py',
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
'p2p_invalid_messages.py',
'p2p_invalid_tx.py',
'feature_assumevalid.py',
'example_test.py',
'wallet_txn_doublespend.py',
'wallet_txn_doublespend.py --descriptors',
'feature_backwards_compatibility.py',
'feature_backwards_compatibility.py --descriptors',
'wallet_txn_clone.py --mineblock',
'feature_notifications.py',
'rpc_getblockfilter.py',
'rpc_invalidateblock.py',
'feature_rbf.py',
'mempool_packages.py',
'mempool_package_onemore.py',
'rpc_createmultisig.py',
'rpc_createmultisig.py --descriptors',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
'wallet_importprunedfunds.py --descriptors',
'p2p_leak_tx.py',
'p2p_eviction.py',
'rpc_signmessage.py',
'rpc_generateblock.py',
'rpc_generate.py',
'wallet_balance.py',
'wallet_balance.py --descriptors',
'feature_nulldummy.py',
'feature_nulldummy.py --descriptors',
'mempool_accept.py',
'mempool_expiry.py',
'wallet_import_rescan.py --legacy-wallet',
'wallet_import_with_label.py --legacy-wallet',
'wallet_importdescriptors.py --descriptors',
'wallet_upgradewallet.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'mining_basic.py',
'feature_signet.py',
'wallet_bumpfee.py',
'wallet_bumpfee.py --descriptors',
'wallet_implicitsegwit.py --legacy-wallet',
'rpc_named_arguments.py',
'wallet_listsinceblock.py',
'wallet_listsinceblock.py --descriptors',
'p2p_leak.py',
'wallet_encryption.py',
'wallet_encryption.py --descriptors',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
'wallet_resendwallettransactions.py',
'wallet_resendwallettransactions.py --descriptors',
'wallet_fallbackfee.py',
'wallet_fallbackfee.py --descriptors',
'rpc_dumptxoutset.py',
'feature_minchainwork.py',
'rpc_estimatefee.py',
'rpc_getblockstats.py',
'wallet_create_tx.py',
'wallet_send.py',
'wallet_create_tx.py --descriptors',
'p2p_fingerprint.py',
'feature_uacomment.py',
'wallet_coinbase_category.py',
'wallet_coinbase_category.py --descriptors',
'feature_filelock.py',
'feature_loadblock.py',
'p2p_dos_header_tree.py',
'p2p_unrequested_blocks.py',
'p2p_blockfilters.py',
'feature_includeconf.py',
'feature_asmap.py',
'mempool_unbroadcast.py',
'mempool_compatibility.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'p2p_ping.py',
'rpc_scantxoutset.py',
'feature_logging.py',
'p2p_node_network_limited.py',
'p2p_permissions.py',
'feature_blocksdir.py',
'wallet_startup.py',
'feature_config_args.py',
'feature_settings.py',
'rpc_getdescriptorinfo.py',
'rpc_help.py',
'feature_help.py',
'feature_shutdown.py',
'p2p_ibd_txrelay.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
NON_SCRIPTS = [
# These are python files that live in the functional tests directory, but are not test scripts.
"combine_logs.py",
"create_cache.py",
"test_runner.py",
]
def main():
# Parse arguments and pass through unrecognised args
parser = argparse.ArgumentParser(add_help=False,
usage='%(prog)s [test_runner.py options] [script options] [scripts]',
description=__doc__,
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--ansi', action='store_true', default=sys.stdout.isatty(), help="Use ANSI colors and dots in output (enabled by default when standard output is a TTY)")
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, metavar='n', help='On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.')
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
parser.add_argument('--quiet', '-q', action='store_true', help='only print dots, results summary and failure logs')
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
parser.add_argument('--failfast', action='store_true', help='stop execution after the first test failure')
parser.add_argument('--filter', help='filter scripts to run by regular expression')
args, unknown_args = parser.parse_known_args()
if not args.ansi:
global BOLD, GREEN, RED, GREY
BOLD = ("", "")
GREEN = ("", "")
RED = ("", "")
GREY = ("", "")
# args to be passed on always start with two dashes; tests are the remaining unknown args
tests = [arg for arg in unknown_args if arg[:2] != "--"]
passon_args = [arg for arg in unknown_args if arg[:2] == "--"]
# Read config generated by configure.
config = configparser.ConfigParser()
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
config.read_file(open(configfile, encoding="utf8"))
passon_args.append("--configfile=%s" % configfile)
# Set up logging
logging_level = logging.INFO if args.quiet else logging.DEBUG
logging.basicConfig(format='%(message)s', level=logging_level)
# Create base test directory
tmpdir = "%s/test_runner_₿_🏃_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
os.makedirs(tmpdir)
logging.debug("Temporary test directory at %s" % tmpdir)
enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND")
if not enable_bitcoind:
print("No functional tests to run.")
print("Rerun ./configure with --with-daemon and then make")
sys.exit(0)
# Build list of tests
test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept names with or without a .py extension.
# Specified tests can contain wildcards, but in that case the supplied
# paths should be coherent, e.g. the same path as that provided to call
# test_runner.py. Examples:
# `test/functional/test_runner.py test/functional/wallet*`
# `test/functional/test_runner.py ./test/functional/wallet*`
# `test_runner.py wallet*`
# but not:
# `test/functional/test_runner.py wallet*`
# Multiple wildcards can be passed:
# `test_runner.py tool* mempool*`
for test in tests:
script = test.split("/")[-1]
script = script + ".py" if ".py" not in script else script
if script in ALL_SCRIPTS:
test_list.append(script)
else:
print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
elif args.extended:
# Include extended tests
test_list += ALL_SCRIPTS
else:
# Run base tests only
test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
exclude_tests = [test.split('.py')[0] for test in args.exclude.split(',')]
for exclude_test in exclude_tests:
# Remove <test_name>.py and <test_name>.py --arg from the test list
exclude_list = [test for test in test_list if test.split('.py')[0] == exclude_test]
for exclude_item in exclude_list:
test_list.remove(exclude_item)
if not exclude_list:
print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test))
if args.filter:
test_list = list(filter(re.compile(args.filter).search, test_list))
if not test_list:
print("No valid test scripts specified. Check that your test is in one "
"of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests")
sys.exit(0)
if args.help:
# Print help for test_runner.py, then print help of the first script (with args removed) and exit.
parser.print_help()
subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h'])
sys.exit(0)
check_script_list(src_dir=config["environment"]["SRCDIR"], fail_on_warn=args.ci)
check_script_prefixes()
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
run_tests(
test_list=test_list,
src_dir=config["environment"]["SRCDIR"],
build_dir=config["environment"]["BUILDDIR"],
tmpdir=tmpdir,
jobs=args.jobs,
enable_coverage=args.coverage,
args=passon_args,
combined_logs_len=args.combinedlogslen,
failfast=args.failfast,
use_term_control=args.ansi,
)
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control):
args = args or []
# Warn if bitcoind is already running
try:
# pgrep exits with code zero when one or more matching processes found
if subprocess.run(["pgrep", "-x", "bitcoind"], stdout=subprocess.DEVNULL).returncode == 0:
print("%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
except OSError:
# pgrep not supported
pass
# Warn if there is a cache directory
cache_dir = "%s/test/cache" % build_dir
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
# Test Framework Tests
print("Running Unit Tests for Test Framework Modules")
test_framework_tests = unittest.TestSuite()
for module in TEST_FRAMEWORK_MODULES:
test_framework_tests.addTest(unittest.TestLoader().loadTestsFromName("test_framework.{}".format(module)))
result = unittest.TextTestRunner(verbosity=1, failfast=True).run(test_framework_tests)
if not result.wasSuccessful():
logging.debug("Early exiting after failure in TestFramework unit tests")
sys.exit(False)
tests_dir = src_dir + '/test/functional/'
flags = ['--cachedir={}'.format(cache_dir)] + args
if enable_coverage:
coverage = RPCCoverage()
flags.append(coverage.flag)
logging.debug("Initializing coverage directory at %s" % coverage.dir)
else:
coverage = None
if len(test_list) > 1 and jobs > 1:
# Populate cache
try:
subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.output)
raise
#Run Tests
job_queue = TestHandler(
num_tests_parallel=jobs,
tests_dir=tests_dir,
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
use_term_control=use_term_control,
)
start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
test_count = len(test_list)
for i in range(test_count):
test_result, testdir, stdout, stderr = job_queue.get_next()
test_results.append(test_result)
done_str = "{}/{} - {}{}{}".format(i + 1, test_count, BOLD[1], test_result.name, BOLD[0])
if test_result.status == "Passed":
logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time))
elif test_result.status == "Skipped":
logging.debug("%s skipped" % (done_str))
else:
print("%s failed, Duration: %s s\n" % (done_str, test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs_args = [sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir]
if BOLD[0]:
combined_logs_args += ['--color']
combined_logs, _ = subprocess.Popen(combined_logs_args, universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")
break
print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage_passed = coverage.report_rpc_coverage()
logging.debug("Cleaning up coverage data")
coverage.cleanup()
else:
coverage_passed = True
# Clear up the temp directory if all subdirectories are gone
if not os.listdir(tmpdir):
os.rmdir(tmpdir)
all_passed = all(map(lambda test_result: test_result.was_successful, test_results)) and coverage_passed
# This will be a no-op unless failfast is True in which case there may be dangling
# processes which need to be killed.
job_queue.kill_and_join()
sys.exit(not all_passed)
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
for test_result in test_results:
all_passed = all_passed and test_result.was_successful
time_sum += test_result.time
test_result.padding = max_len_name
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
if not all_passed:
results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
if not all_passed:
results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
class TestHandler:
"""
Trigger the test scripts passed in via the list.
"""
def __init__(self, *, num_tests_parallel, tests_dir, tmpdir, test_list, flags, use_term_control):
assert num_tests_parallel >= 1
self.num_jobs = num_tests_parallel
self.tests_dir = tests_dir
self.tmpdir = tmpdir
self.test_list = test_list
self.flags = flags
self.num_running = 0
self.jobs = []
self.use_term_control = use_term_control
def get_next(self):
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
test = self.test_list.pop(0)
portseed = len(self.test_list)
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
stdout=log_stdout,
stderr=log_stderr),
testdir,
log_stdout,
log_stderr))
if not self.jobs:
raise IndexError('pop from empty list')
# Print remaining running jobs when all jobs have been started.
if not self.test_list:
print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs)))
dot_count = 0
while True:
# Return first proc that finishes
time.sleep(.5)
for job in self.jobs:
(name, start_time, proc, testdir, log_out, log_err) = job
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
[stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
elif proc.returncode == TEST_EXIT_SKIPPED:
status = "Skipped"
else:
status = "Failed"
self.num_running -= 1
self.jobs.remove(job)
if self.use_term_control:
clearline = '\r' + (' ' * dot_count) + '\r'
print(clearline, end='', flush=True)
dot_count = 0
return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
if self.use_term_control:
print('.', end='', flush=True)
dot_count += 1
def kill_and_join(self):
"""Send SIGKILL to all jobs and block until all have ended."""
procs = [i[2] for i in self.jobs]
for proc in procs:
proc.kill()
for proc in procs:
proc.wait()
class TestResult():
def __init__(self, name, status, time):
self.name = name
self.status = status
self.time = time
self.padding = 0
def sort_key(self):
if self.status == "Passed":
return 0, self.name.lower()
elif self.status == "Failed":
return 2, self.name.lower()
elif self.status == "Skipped":
return 1, self.name.lower()
def __repr__(self):
if self.status == "Passed":
color = GREEN
glyph = TICK
elif self.status == "Failed":
color = RED
glyph = CROSS
elif self.status == "Skipped":
color = GREY
glyph = CIRCLE
return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0]
@property
def was_successful(self):
return self.status != "Failed"
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names)))
print(" %s" % ("\n ".join(sorted(bad_script_names))))
raise AssertionError("Some tests are not following naming convention!")
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
if fail_on_warn:
# On CI this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `bitcoin-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir=%s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % command) for command in sorted(uncovered)))
return False
else:
print("All RPC commands covered.")
return True
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `test/functional/test_framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
# Consider RPC generate covered, because it is overloaded in
# test_framework/test_node.py and not seen by the coverage check.
covered_cmds = set({'generate'})
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, _, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r', encoding="utf8") as coverage_file:
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
main()
| 38.691913 | 205 | 0.638964 |
a8e8a32992c21b844757ceae9b2708e94e85c2c5 | 768 | py | Python | maze/cif_download.py | kul-group/MAZE-sim | 0f85e74bf93f9242a73bcfaa20a593ae966f38fa | [
"MIT"
] | 13 | 2021-03-10T18:40:32.000Z | 2022-03-21T20:40:57.000Z | maze/cif_download.py | kul-group/MAZE-sim | 0f85e74bf93f9242a73bcfaa20a593ae966f38fa | [
"MIT"
] | 27 | 2021-01-28T23:18:44.000Z | 2021-05-06T19:33:09.000Z | maze/cif_download.py | kul-group/MAZE-sim | 0f85e74bf93f9242a73bcfaa20a593ae966f38fa | [
"MIT"
] | 4 | 2021-03-19T20:46:15.000Z | 2022-03-21T20:40:59.000Z | import os
import urllib.request
from pathlib import Path
from urllib.error import HTTPError
def download_cif(code: str, data_dir='data'):
"""
Args:
code (str):
data_dir:
"""
Path(data_dir).mkdir(parents=True, exist_ok=True) # create output directory if it doesn't exist
output_path = os.path.join(data_dir, code + '.cif') # sets ouput path to data_dir/code.cif
root_url = "https://europe.iza-structure.org/IZA-SC/cif/" # sets root URL for the
url = root_url + code + '.cif'
try:
urllib.request.urlretrieve(url, output_path)
except HTTPError as err:
if err.code == 404:
print("error code 404: Specified Zeolite Framework not found")
raise
else:
raise
| 29.538462 | 100 | 0.634115 |
26ca9234ab4820b66c78981a53cbe7c2cdd6754b | 398 | py | Python | tests/test_vault.py | lepy/sdata | 5cd8b8e65f8aebb238d683daa93bbf620b82863b | [
"MIT"
] | 1 | 2017-04-04T07:54:11.000Z | 2017-04-04T07:54:11.000Z | tests/test_vault.py | lepy/sdata | 5cd8b8e65f8aebb238d683daa93bbf620b82863b | [
"MIT"
] | 1 | 2020-12-21T15:43:13.000Z | 2020-12-21T15:43:13.000Z | tests/test_vault.py | lepy/sdata | 5cd8b8e65f8aebb238d683daa93bbf620b82863b | [
"MIT"
] | 2 | 2017-05-12T07:17:42.000Z | 2018-06-05T11:16:21.000Z | # -*- coding: utf-8 -*-
from sdata.io.vault import FileSystemVault, VaultIndex
from sdata import Data
import pandas as pd
import numpy as np
def test_vault_index():
vi = VaultIndex()
assert isinstance(vi.df, pd.DataFrame)
print(vi.df.columns)
print(sorted(vi.df.columns)==sorted(Data.SDATA_ATTRIBUTES))
assert sorted(vi.df.columns)==sorted(Data.SDATA_ATTRIBUTES)
assert 1
| 26.533333 | 63 | 0.726131 |
87b2010e71e730660638eb597227aae7032d0197 | 8,823 | py | Python | projector.py | JCBrouwer/stylegan2-ada-pytorch | 10058b3c37d36f85112d72808fb616983cb4165b | [
"BSD-Source-Code"
] | 1 | 2021-05-06T20:24:28.000Z | 2021-05-06T20:24:28.000Z | projector.py | JCBrouwer/stylegan2-ada-pytorch | 10058b3c37d36f85112d72808fb616983cb4165b | [
"BSD-Source-Code"
] | null | null | null | projector.py | JCBrouwer/stylegan2-ada-pytorch | 10058b3c37d36f85112d72808fb616983cb4165b | [
"BSD-Source-Code"
] | 1 | 2021-08-19T10:19:13.000Z | 2021-08-19T10:19:13.000Z | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Project given image to the latent space of pretrained network pickle."""
import copy
import os
from time import perf_counter
import click
import imageio
import numpy as np
import PIL.Image
import torch
import torch.nn.functional as F
import dnnlib
import legacy
def project(
G,
target: torch.Tensor, # [C,H,W] and dynamic range [0,255], W & H must match G output resolution
*,
num_steps=1000,
w_avg_samples=10000,
initial_learning_rate=0.1,
initial_noise_factor=0.05,
lr_rampdown_length=0.25,
lr_rampup_length=0.05,
noise_ramp_length=0.75,
regularize_noise_weight=1e5,
verbose=False,
device: torch.device,
):
assert target.shape == (G.img_channels, G.img_resolution, G.img_resolution)
def logprint(*args):
if verbose:
print(*args)
G = copy.deepcopy(G).eval().requires_grad_(False).to(device) # type: ignore
# Compute w stats.
logprint(f"Computing W midpoint and stddev using {w_avg_samples} samples...")
z_samples = np.random.RandomState(123).randn(w_avg_samples, G.z_dim)
w_samples = G.mapping(torch.from_numpy(z_samples).to(device), None) # [N, L, C]
w_samples = w_samples[:, :1, :].cpu().numpy().astype(np.float32) # [N, 1, C]
w_avg = np.mean(w_samples, axis=0, keepdims=True) # [1, 1, C]
w_std = (np.sum((w_samples - w_avg) ** 2) / w_avg_samples) ** 0.5
# Setup noise inputs.
noise_bufs = {name: buf for (name, buf) in G.synthesis.named_buffers() if "noise_const" in name}
# Load VGG16 feature detector.
url = "https://nvlabs-fi-cdn.nvidia.com/stylegan2-ada-pytorch/pretrained/metrics/vgg16.pt"
with dnnlib.util.open_url(url) as f:
vgg16 = torch.jit.load(f).eval().to(device)
# Features for target image.
target_images = target.unsqueeze(0).to(device).to(torch.float32)
if target_images.shape[2] > 256:
target_images = F.interpolate(target_images, size=(256, 256), mode="area")
target_features = vgg16(target_images, resize_images=False, return_lpips=True)
w_opt = torch.tensor(w_avg, dtype=torch.float32, device=device, requires_grad=True) # pylint: disable=not-callable
w_out = torch.zeros([num_steps] + list(w_opt.shape[1:]), dtype=torch.float32, device=device)
optimizer = torch.optim.Adam([w_opt] + list(noise_bufs.values()), betas=(0.9, 0.999), lr=initial_learning_rate)
# Init noise.
for buf in noise_bufs.values():
buf[:] = torch.randn_like(buf)
buf.requires_grad = True
for step in range(num_steps):
# Learning rate schedule.
t = step / num_steps
w_noise_scale = w_std * initial_noise_factor * max(0.0, 1.0 - t / noise_ramp_length) ** 2
lr_ramp = min(1.0, (1.0 - t) / lr_rampdown_length)
lr_ramp = 0.5 - 0.5 * np.cos(lr_ramp * np.pi)
lr_ramp = lr_ramp * min(1.0, t / lr_rampup_length)
lr = initial_learning_rate * lr_ramp
for param_group in optimizer.param_groups:
param_group["lr"] = lr
# Synth images from opt_w.
w_noise = torch.randn_like(w_opt) * w_noise_scale
ws = (w_opt + w_noise).repeat([1, G.mapping.num_ws, 1])
synth_images = G.synthesis(ws, noise_mode="const")
# Downsample image to 256x256 if it's larger than that. VGG was built for 224x224 images.
synth_images = (synth_images + 1) * (255 / 2)
if synth_images.shape[2] > 256:
synth_images = F.interpolate(synth_images, size=(256, 256), mode="area")
# Features for synth images.
synth_features = vgg16(synth_images, resize_images=False, return_lpips=True)
dist = (target_features - synth_features).square().sum()
# Noise regularization.
reg_loss = 0.0
for v in noise_bufs.values():
noise = v[None, None, :, :] # must be [1,1,H,W] for F.avg_pool2d()
while True:
reg_loss += (noise * torch.roll(noise, shifts=1, dims=3)).mean() ** 2
reg_loss += (noise * torch.roll(noise, shifts=1, dims=2)).mean() ** 2
if noise.shape[2] <= 8:
break
noise = F.avg_pool2d(noise, kernel_size=2)
loss = dist + reg_loss * regularize_noise_weight
# Step
optimizer.zero_grad(set_to_none=True)
loss.backward()
optimizer.step()
logprint(f"step {step+1:>4d}/{num_steps}: dist {dist:<4.2f} loss {float(loss):<5.2f}")
# Save projected W for each optimization step.
w_out[step] = w_opt.detach()[0]
# Normalize noise.
with torch.no_grad():
for buf in noise_bufs.values():
buf -= buf.mean()
buf *= buf.square().mean().rsqrt()
return w_out.repeat([1, G.mapping.num_ws, 1])
# ----------------------------------------------------------------------------
@click.command()
@click.option("--network", "network_pkl", help="Network pickle filename", required=True)
@click.option("--target", "target_fname", help="Target image file to project to", required=True, metavar="FILE")
@click.option("--num-steps", help="Number of optimization steps", type=int, default=1000, show_default=True)
@click.option("--seed", help="Random seed", type=int, default=303, show_default=True)
@click.option(
"--save-video", help="Save an mp4 video of optimization progress", type=bool, default=True, show_default=True
)
@click.option("--outdir", help="Where to save the output images", required=True, metavar="DIR")
def run_projection(network_pkl: str, target_fname: str, outdir: str, save_video: bool, seed: int, num_steps: int):
"""Project given image to the latent space of pretrained network pickle.
Examples:
\b
python projector.py --outdir=out --target=~/mytargetimg.png \\
--network=https://nvlabs-fi-cdn.nvidia.com/stylegan2-ada-pytorch/pretrained/ffhq.pkl
"""
np.random.seed(seed)
torch.manual_seed(seed)
# Load networks.
print('Loading networks from "%s"...' % network_pkl)
device = torch.device("cuda")
with dnnlib.util.open_url(network_pkl) as fp:
G = legacy.load_network_pkl(fp)["G_ema"].requires_grad_(False).to(device) # type: ignore
# Load target image.
target_pil = PIL.Image.open(target_fname).convert("RGB")
w, h = target_pil.size
s = min(w, h)
target_pil = target_pil.crop(((w - s) // 2, (h - s) // 2, (w + s) // 2, (h + s) // 2))
target_pil = target_pil.resize((G.img_resolution, G.img_resolution), PIL.Image.LANCZOS)
target_uint8 = np.array(target_pil, dtype=np.uint8)
# Optimize projection.
start_time = perf_counter()
projected_w_steps = project(
G,
target=torch.tensor(target_uint8.transpose([2, 0, 1]), device=device), # pylint: disable=not-callable
num_steps=num_steps,
device=device,
verbose=True,
)
print(f"Elapsed: {(perf_counter()-start_time):.1f} s")
# Render debug output: optional video and projected image and W vector.
os.makedirs(outdir, exist_ok=True)
if save_video:
video = imageio.get_writer(f"{outdir}/proj.mp4", mode="I", fps=10, codec="libx264", bitrate="16M")
print(f'Saving optimization progress video "{outdir}/proj.mp4"')
for projected_w in projected_w_steps:
synth_image = G.synthesis(projected_w.unsqueeze(0), noise_mode="const")
synth_image = (synth_image + 1) * (255 / 2)
synth_image = synth_image.permute(0, 2, 3, 1).clamp(0, 255).to(torch.uint8)[0].cpu().numpy()
video.append_data(np.concatenate([target_uint8, synth_image], axis=1))
video.close()
# Save final projected frame and W vector.
target_pil.save(f"{outdir}/target.png")
projected_w = projected_w_steps[-1]
synth_image = G.synthesis(projected_w.unsqueeze(0), noise_mode="const")
synth_image = (synth_image + 1) * (255 / 2)
synth_image = synth_image.permute(0, 2, 3, 1).clamp(0, 255).to(torch.uint8)[0].cpu().numpy()
PIL.Image.fromarray(synth_image, "RGB").save(f"{outdir}/proj.png")
np.savez(f"{outdir}/projected_w.npz", w=projected_w.unsqueeze(0).cpu().numpy())
# ----------------------------------------------------------------------------
if __name__ == "__main__":
run_projection() # pylint: disable=no-value-for-parameter
# ----------------------------------------------------------------------------
| 41.617925 | 119 | 0.639578 |
0ecec1aefa754e0b51544dc33d3f15980c3b7345 | 1,885 | py | Python | library/python/coredump_filter/tests/test_completed_stack.py | fibersel/catboost | 585e63b766867989acd2100fb2fd2e6ff81e7cc0 | [
"Apache-2.0"
] | null | null | null | library/python/coredump_filter/tests/test_completed_stack.py | fibersel/catboost | 585e63b766867989acd2100fb2fd2e6ff81e7cc0 | [
"Apache-2.0"
] | null | null | null | library/python/coredump_filter/tests/test_completed_stack.py | fibersel/catboost | 585e63b766867989acd2100fb2fd2e6ff81e7cc0 | [
"Apache-2.0"
] | 1 | 2021-04-27T23:40:09.000Z | 2021-04-27T23:40:09.000Z | import unittest
import yatest.common
import os
from library.python.coredump_filter import core_proc, const
AMOUNT_TEST_CASES = 7
class MockStream:
def __init__(self):
self._store = []
def write(self, payload):
self._store.append(payload)
@property
def get(self):
return '\n'.join(self._store)
class TestFingerprint(unittest.TestCase):
def setUp(self):
self.fingerprints = {}
self.test_cases = {}
self.data_dir = yatest.common.source_path(const.TEST_DATA_SOURCE_PATH)
for test_case_id in range(1, AMOUNT_TEST_CASES + 1):
filename = 'test{}.txt'.format(test_case_id)
with open(os.path.join(self.data_dir, filename)) as fd:
stream = MockStream()
parsed_lines = fd.readlines()
core_type = core_proc.detect_coredump_type(''.join(parsed_lines))
if core_type == const.CoredumpType.GDB:
core_proc.filter_stackdump(
file_lines=parsed_lines,
use_fingerprint=True,
stream=stream,
)
else:
core_proc.filter_stackdump_lldb(
file_lines=parsed_lines,
use_fingerprint=True,
stream=stream,
)
self.fingerprints[test_case_id] = stream.get
with open(os.path.join(self.data_dir, 'test{}.txt.fp'.format(test_case_id))) as fd:
parsed_lines = fd.read()
self.test_cases[test_case_id] = parsed_lines
def test_fingerprint(self):
for test_case_id, test_fingerprints in self.test_cases.items():
for fg in test_fingerprints:
self.assertIn(fg, self.fingerprints[test_case_id], 'Fingerprint not found.')
| 34.272727 | 95 | 0.578249 |
11c9b9fda1b4ff6600c798e297f316b5c8129ebf | 4,843 | py | Python | coffe_price_brazil_es/coffee.py | VitorBonella/ProjetoIntegrado1-2021-1 | 3193dd1967b1b8ea4fd3a46517e6de054da47d28 | [
"MIT"
] | 1 | 2021-09-02T16:24:12.000Z | 2021-09-02T16:24:12.000Z | coffe_price_brazil_es/coffee.py | VitorBonella/ProjetoIntegrado1-2021-1 | 3193dd1967b1b8ea4fd3a46517e6de054da47d28 | [
"MIT"
] | null | null | null | coffe_price_brazil_es/coffee.py | VitorBonella/ProjetoIntegrado1-2021-1 | 3193dd1967b1b8ea4fd3a46517e6de054da47d28 | [
"MIT"
] | null | null | null | from coffe_price_brazil_es.get_coffee_csv_file import get_csv_file
import pandas as pd
class Coffee:
"""
Classe para armazenar as informações sobre o café
"""
def __init__(self):
try:
self.table, self.info = get_csv_file()
except AttributeError:
print("CSV COFFEE FILES ARE OFFLINE")
def update_prices(self, table):
""" Atualiza a tabela de preços
Essa função atualiza a tabela de preços mediante o acesso do usuário;
:param table: Tabela de preços
:type table: Matrix
:return:
:rtype:
"""
self.table = get_csv_file()
def get_table(self):
""" Retorna a tabela de preços
:return: Tabela de preços
:rtype: Matrix
"""
return self.table
# Type vai de "ARABICA RUIM","ARABICA BOM", "CONILLON"
def get_prices_by_type(self, coffee_type):
""" Retorna um vetor com os preços de um tipo de café específico
:param coffee_type: Um tipo de café
:type coffee_type: ["ARABICA RUIM","ARABICA BOM", "CONILLON"]
:return: Valores de um tipo de café específico
:rtype: Vetor de valores de um tipo específico
"""
try:
return self.table[coffee_type].index, self.table[coffee_type].values
except KeyError:
print("Type {} doesn't exist. Allowed types are: {}, {} and {}".format(coffee_type, self.table.columns[0], self.table.columns[1], self.table.columns[2]))
return None
def get_prices_by_range(self, start, end, coffee_type=None):
""" Valores do preço do café em um determinado periodo, se coffee_type for passado, retorna os valores de um único tipo de café em um período;
:param start: Dia inicial
:type start: String
:param end: Dia final
:type end: String
:param coffee_type: Tipos de café
:type coffee_type: ["ARABICA RUIM","ARABICA BOM", "CONILLON"]
:return: Vetor com preços da sacas em determinado periodo
:rtype: Vetor
"""
datetime_data = pd.to_datetime(self.table.index, format="%d/%m/%Y")
mask = (datetime_data > start) & (datetime_data <= end)
data = self.table.loc[mask]
if(coffee_type == None):
return data.iloc[:, 0].index, data.iloc[:, 0].values, data.iloc[:, 1].values, data.iloc[:, 2].values
else:
return data[coffee_type].index, data[coffee_type].values
def get_prices_by_year(self, year, coffee_type=None):
""" Valores do preço do café em um determinado ano, se coffee_type for passado, retorna os valores de um único tipo de café em um ano;
:param year: Ano
:type year: String
:param coffee_type: Tipo de café
:type coffee_type: ["ARABICA RUIM","ARABICA BOM", "CONILLON"]
:return: Retorna a tabela de preços de um ano específico
:rtype: Vetor
"""
data = self.table[self.table.index.str.contains(str(year))]
if(coffee_type == None):
return data.iloc[:, 0].index, data.iloc[:, 0].values, data.iloc[:, 1].values, data.iloc[:, 2].values
else:
coffee_type = str(coffee_type)
return data[coffee_type].index, data[coffee_type].values
# Função que printa os tipos de grãos na tabela
def print_info(self):
""" Mostra as informações sobre cada tipo de café
:return: None
:rtype: None
"""
for grao_info in self.info:
print(grao_info)
def get_price(self, coffee_type, day=None, recent=False):
""" Retorna o ultimo preço de um determinado dia, se recent for passado retorna o ultimo dia de coleta;
:param coffee_type: Tipo de café
:type coffee_type: ["ARABICA RUIM","ARABICA BOM", "CONILLON"]
:param day: Dia
:type day: String
:param recent: Preço atual
:type recent: Boolean
:return: Preço de um tipo de café em um determinado dia
:rtype: Float
"""
if recent:
try:
return self.table[coffee_type].iloc[-1]
except KeyError:
print("Type {} doesn't exist. Allowed types are: {}, {} and {}".format(coffee_type, self.table.columns[0], self.table.columns[1], self.table.columns[2]))
return None
else:
try:
return self.table.loc[day, coffee_type]
except KeyError:
if coffee_type not in self.table.columns:
print("Type {} doesn't exist. Allowed types are: {}, {} and {}".format(coffee_type, self.table.columns[0], self.table.columns[1], self.table.columns[2]))
else:
print("Day {} not in the Price Table.".format(day))
return None
| 37.835938 | 173 | 0.600867 |
7dee865de0561262b9b90e98958e61c9c70aed26 | 2,895 | py | Python | mbdata/api/tests/__init__.py | markweaversonos/mbdata | 9f418be4851d5608f67873a6f72dd9427e255a52 | [
"MIT"
] | 51 | 2017-10-25T16:24:20.000Z | 2021-12-21T06:20:30.000Z | mbdata/api/tests/__init__.py | markweaversonos/mbdata | 9f418be4851d5608f67873a6f72dd9427e255a52 | [
"MIT"
] | 29 | 2019-05-14T10:16:27.000Z | 2021-12-10T04:37:20.000Z | mbdata/api/tests/__init__.py | markweaversonos/mbdata | 9f418be4851d5608f67873a6f72dd9427e255a52 | [
"MIT"
] | 19 | 2017-12-23T04:34:23.000Z | 2021-11-29T19:12:28.000Z | # -*- coding: utf8 -*-
from __future__ import print_function
import os
import tempfile
import logging
import json
import functools
import pprint
import difflib
from unittest import TestCase
from unittest.util import safe_repr
from nose.tools import *
from flask import g
os.environ['MBDATA_API_SETTINGS'] = os.path.join(os.path.dirname(__file__), 'settings.py')
from mbdata.api import app
from mbdata.models import Base
from mbdata.utils import patch_model_schemas, NO_SCHEMAS
from mbdata.sample_data import create_sample_data
use_file_db = False
db_fd = db_name = None
def setup_package():
global db_fd, db_name
app.app.config['TESTING'] = True
if use_file_db:
db_fd, db_name = tempfile.mkstemp()
app.app.config['DATABASE_URI'] = 'sqlite:///{0}'.format(db_name)
else:
app.app.config['DATABASE_URI'] = 'sqlite:///:memory:'
app.setup_db()
patch_model_schemas(NO_SCHEMAS)
Base.metadata.create_all(app.engine)
session = app.Session()
create_sample_data(session)
session.close()
if os.environ.get('MBDATA_DATABASE_ECHO'):
app.app.config['DATABASE_ECHO'] = True
app.setup_db()
def teardown_package():
if use_file_db:
os.close(db_fd)
os.unlink(db_name)
def with_client(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with app.app.test_client() as client:
return func(client, *args, **kwargs)
return wrapper
def with_database(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with app.app.test_request_context('/'):
app.app.preprocess_request()
return func(g.db, *args, **kwargs)
return wrapper
class DummyTestCase(TestCase):
def nop(self):
pass
t = DummyTestCase('nop')
def assert_dict_equal(d1, d2):
assert_is_instance(d1, dict, 'First argument is not a dictionary')
assert_is_instance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standard_msg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
standard_msg += '\n'
standard_msg += '\n'.join(difflib.unified_diff(
json.dumps(d2, indent=4, ensure_ascii=False, sort_keys=True).splitlines(),
json.dumps(d1, indent=4, ensure_ascii=False, sort_keys=True).splitlines(),
'(expected)', '(actual)', lineterm=''
))
t.fail(standard_msg)
def assert_json_response_equal(rv, expected):
assert_equal(rv.status_code, 200, 'expected 200, got {0} with data {1!r}'.format(rv.status_code, rv.data))
assert_equal(rv.content_type, 'application/json; charset=UTF-8')
actual = json.loads(rv.data.decode('utf8'))
try:
assert_dict_equal(actual, expected)
except AssertionError:
print('Complete response:')
print(json.dumps(actual, indent=4, ensure_ascii=False))
raise
| 26.081081 | 110 | 0.670812 |
79c9e5e93c782ff15f21fecc8c94f604a78fa47f | 3,265 | py | Python | fixtures/service_group.py | vkolli/5.0_contrail-test | 1793f169a94100400a1b2fafbad21daf5aa4d48a | [
"Apache-2.0"
] | null | null | null | fixtures/service_group.py | vkolli/5.0_contrail-test | 1793f169a94100400a1b2fafbad21daf5aa4d48a | [
"Apache-2.0"
] | 1 | 2021-06-01T22:18:29.000Z | 2021-06-01T22:18:29.000Z | fixtures/service_group.py | lmadhusudhanan/contrail-test | bd39ff19da06a20bd79af8c25e3cde07375577cf | [
"Apache-2.0"
] | null | null | null | import vnc_api_test
from cfgm_common.exceptions import NoIdError
from tcutils.util import get_random_name, retry
class ServiceGroupFixture(vnc_api_test.VncLibFixture):
'''Fixture to handle service group object
Optional:
:param name : name of the service group
:param uuid : UUID of the service group
:param scope : global or local scope, default local
:param services : list of services tuples
eg: [(<protocol>, (<sp_start, sp_end>), (<dp_start, dp_end>))]
'''
def __init__(self, *args, **kwargs):
super(ServiceGroupFixture, self).__init__(*args, **kwargs)
self.name = kwargs.get('name')
self.uuid = kwargs.get('uuid')
self.scope = kwargs.get('scope') or 'local'
self.services = kwargs.get('services') or list()
self.created = False
self.verify_is_run = False
def setUp(self):
super(ServiceGroupFixture, self).setUp()
self.name = self.name or get_random_name(self.project_name)
if self.scope == 'local':
self.parent_type = 'project'
self.fq_name = [self.domain, self.project_name, self.name]
else:
self.parent_type = 'policy-management'
self.fq_name = ['default-policy-management', self.name]
self.create()
def cleanUp(self):
super(ServiceGroupFixture, self).cleanUp()
if self.created == False and self.inputs.fixture_cleanup != 'force':
self.logger.info('Skipping deletion of service group %s:'
%(self.fq_name))
else:
return self.delete()
def read(self):
obj = self.vnc_h.read_service_group(id=self.uuid)
self.name = obj.name
self.fq_name = obj.get_fq_name()
self.parent_type = obj.parent_type
self.scope = 'local' if obj.parent_type == 'project' else 'global'
self.services = list()
for service in obj.get_service_group_firewall_service_list() or []:
proto = service.protocol
sports = (service.src_ports.start_port, service.src_ports.end_port)
dports = (service.dst_ports.start_port, service.dst_ports.end_port)
self.services.append((protocol, sports, dports))
def create(self):
if not self.uuid:
try:
obj = self.vnc_h.read_service_group(fq_name=self.fq_name)
self.uuid = obj.uuid
except NoIdError:
self.uuid = self.vnc_h.create_service_group(
fq_name=self.fq_name,
parent_type=self.parent_type,
services=self.services)
self.created = True
if not self.created:
self.read()
def add_services(self, services):
self.vnc_h.update_service_group(self.uuid, services)
self.services.extend(services)
def delete_services(self, services):
self.vnc_h.update_service_group(self.uuid, services, delete=True)
self.services = list(set(self.services) - set(services))
def delete(self):
self.logger.info('Deleting Service Group %s(%s)'%(self.name, self.uuid))
self.vnc_h.delete_service_group(id=self.uuid)
| 40.308642 | 80 | 0.612864 |
6034f82fc4da8288abc4c7af833b2d41d4294410 | 4,879 | py | Python | stereo/io/gef.py | nilsmechtel/stereopy | cd8f34b45ae6667043611e5d02ea46f8ca3f0c7b | [
"MIT"
] | 61 | 2021-04-09T01:00:41.000Z | 2022-03-07T06:19:41.000Z | stereo/io/gef.py | nilsmechtel/stereopy | cd8f34b45ae6667043611e5d02ea46f8ca3f0c7b | [
"MIT"
] | 20 | 2021-04-13T10:31:04.000Z | 2022-03-29T11:07:30.000Z | stereo/io/gef.py | nilsmechtel/stereopy | cd8f34b45ae6667043611e5d02ea46f8ca3f0c7b | [
"MIT"
] | 13 | 2021-04-16T08:36:08.000Z | 2022-02-21T20:44:00.000Z | # coding: utf-8
import gc
import h5py
import pandas as pd
from scipy.sparse import csr_matrix
import numpy as np
from ..core.cell import Cell
from ..core.gene import Gene
from ..core.stereo_exp_data import StereoExpData
from ..log_manager import logger
class GEF(object):
def __init__(self, file_path: str, bin_size: int = 100, is_sparse: bool = True):
self.file_path = file_path
self.bin_size = bin_size
self.is_sparse = is_sparse
self.df_exp = None
self.df_gene = None
self.genes = None
self.cells = None
self.cell_num = 0
self.gene_num = 0
self._init()
def _init(self):
with h5py.File(self.file_path, mode='r') as h5f:
bin_tag = 'bin{}'.format(self.bin_size)
if bin_tag not in h5f['geneExp'].keys():
raise Exception('The bin size {} info is not in the GEF file'.format(self.bin_size))
h5exp = h5f['geneExp'][bin_tag]['expression']
h5gene = h5f['geneExp'][bin_tag]['gene']
self.df_gene = pd.DataFrame(h5gene['gene', 'offset', 'count'])
self.df_exp = pd.DataFrame(h5exp['x', 'y', 'count'])
def build(self, gene_lst: list = None, region: list = None):
if gene_lst is not None:
self._restrict_to_genes(gene_lst)
if region is not None:
self._restrict_to_region(region)
if gene_lst is None and region is None:
self.genes = self.df_gene['gene'].values
self.gene_num = len(self.genes)
cols = np.zeros((self.df_exp.shape[0],), dtype='uint32')
gene_index = 0
exp_index = 0
for count in self.df_gene['count']:
for i in range(count):
cols[exp_index] = gene_index
exp_index += 1
gene_index += 1
self.df_exp['gene_index'] = cols
self.df_exp['cell_id'] = np.bitwise_or(
np.left_shift(self.df_exp['x'].astype('uint64'), 32), self.df_exp['y'])
self.cells = self.df_exp['cell_id'].unique()
self.cell_num = len(self.cells)
rows = np.zeros((self.df_exp.shape[0],), dtype='uint32')
grp = self.df_exp.groupby('cell_id').groups
i = 0
for cell_id in self.cells:
for j in grp[cell_id]:
rows[j] = i
i += 1
self.df_exp['cell_index'] = rows
del grp
gc.collect()
def _restrict_to_region(self, region):
logger.info(f'restrict to region [{region[0]} <= x <= {region[1]}] and [{region[2]} <= y <= {region[3]}]')
gene_col = []
for row in self.df_gene.itertuples():
for i in range(getattr(row, 'count')):
gene_col.append(getattr(row, 'gene'))
self.df_exp['gene'] = gene_col
self.df_exp = self.df_exp.query(f'{region[0]} <= x <= {region[1]} and {region[2]} <= y <= {region[3]}')
self.genes = self.df_exp['gene'].unique()
self.df_gene = None
self.gene_num = len(self.genes)
genes_dict = dict(zip(self.genes, range(0, self.gene_num)))
self.df_exp['gene_index'] = self.df_exp['gene'].map(genes_dict)
self.df_exp.drop(columns=['gene'])
self.df_exp = self.df_exp.reset_index(drop=True)
def _restrict_to_genes(self, gene_lst):
logger.info('restrict to gene_lst')
cols = np.zeros((self.df_exp.shape[0],), dtype='uint32')
offset_indexes = np.zeros((self.df_exp.shape[0],), dtype='uint32')
self.df_gene = self.df_gene.set_index('gene').loc[gene_lst].reset_index()
self.genes = self.df_gene['gene'].values
self.gene_num = len(self.genes)
gene_index = 0
exp_index = 0
for row in self.df_gene.itertuples():
for i in range(getattr(row, 'count')):
cols[exp_index] = gene_index
offset_indexes[exp_index] = getattr(row, 'offset') + i
exp_index += 1
gene_index += 1
self.df_exp = self.df_exp.loc[offset_indexes[:exp_index]]
self.df_exp['gene_index'] = cols[:exp_index]
self.df_exp = self.df_exp.reset_index(drop=True)
def to_stereo_exp_data(self) -> StereoExpData:
data = StereoExpData(file_path=self.file_path)
logger.info(f'the martrix has {self.cell_num} cells, and {self.gene_num} genes.')
data.position = self.df_exp.loc[:, ['x', 'y']].drop_duplicates().values
exp_matrix = csr_matrix((self.df_exp['count'], (self.df_exp['cell_index'], self.df_exp['gene_index'])),
shape=(self.cell_num, self.gene_num), dtype=np.int32)
data.cells = Cell(cell_name=self.cells)
data.genes = Gene(gene_name=self.genes)
data.exp_matrix = exp_matrix if self.is_sparse else exp_matrix.toarray()
return data
| 40.322314 | 114 | 0.587825 |
4a0502d78ce2411de6a34f48157e656757a53ccf | 1,232 | py | Python | day-04/part-2/ludoge.py | badouralix/adventofcode-2018 | 543ce39d4eeb7d9d695459ffadca001a8c56386d | [
"MIT"
] | 31 | 2018-12-01T00:43:40.000Z | 2020-05-30T05:18:59.000Z | day-04/part-2/ludoge.py | badouralix/adventofcode-2018 | 543ce39d4eeb7d9d695459ffadca001a8c56386d | [
"MIT"
] | 14 | 2018-12-01T12:14:26.000Z | 2021-05-07T22:41:47.000Z | day-04/part-2/ludoge.py | badouralix/adventofcode-2018 | 543ce39d4eeb7d9d695459ffadca001a8c56386d | [
"MIT"
] | 10 | 2018-12-01T23:38:34.000Z | 2020-12-28T13:36:10.000Z | from tool.runners.python import SubmissionPy
from collections import Counter
class LudogeSubmission(SubmissionPy):
def run(self, s):
# :param s: input in string format
# :return: solution flag
# Your code goes here
lines = sorted(s.splitlines())
guards = {}
for line in lines:
minute = int(line.split()[1][3:5])
if "#" in line:
guard_id = int(line.split()[3][1:])
if guard_id not in guards:
guards[guard_id] = []
wakes_up = minute
if "falls asleep" in line:
falls_asleep = minute
if "wakes up" in line:
wakes_up = minute
guards[guard_id] += list(range(falls_asleep, wakes_up))
guard_minute_frequency = {k: dict(Counter(v)) for k, v in guards.items() if Counter(v)}
guard_minute_max = {k: max(v.values()) for k, v in guard_minute_frequency.items()}
max_guard = max(guard_minute_frequency, key=guard_minute_max.get)
max_guard_sleeps = guards[max_guard]
max_minute = max(set(max_guard_sleeps), key=max_guard_sleeps.count)
return max_minute * max_guard
pass
| 35.2 | 95 | 0.581981 |
78681353f18b1fa0f6699e930fc2842f8cdb2b35 | 1,718 | py | Python | code/config.py | mbasart/ORM-cookies | 7837a92e56f0e694778e28a8961aec02b32a5f7a | [
"Apache-2.0"
] | null | null | null | code/config.py | mbasart/ORM-cookies | 7837a92e56f0e694778e28a8961aec02b32a5f7a | [
"Apache-2.0"
] | null | null | null | code/config.py | mbasart/ORM-cookies | 7837a92e56f0e694778e28a8961aec02b32a5f7a | [
"Apache-2.0"
] | null | null | null | import gzip
import zipfile
from contextlib import contextmanager
from os.path import abspath, dirname, join, splitext
PROJECT_DIR = abspath(dirname(__file__))
ALEXA_FILE_PATH = join(PROJECT_DIR, '../assets/alexa/top-1m.csv.zip')
CLEANER_PLUGIN_PATH = join(PROJECT_DIR, '../assets/plugin/cache_killer/cache_killer_1_0_14_6.crx')
MYSQL_HOST = 'XXXdatabaseXXX'
MYSQL_DB = 'ORM'
MYSQL_USER = 'XXXuserXXX'
MYSQL_PASSWORD = 'XXXpasswordXXX'
def load_csv(filename, column):
@contextmanager
def open_zip(filename, *args):
with zipfile.ZipFile(filename) as zf:
name = zf.namelist()[0]
uncompressed = zf.open(name)
try:
yield uncompressed
finally:
uncompressed.close()
def domain(line):
# if csv split by comma
if ',' in line:
# domains don't contain commas, so it should be save
return line.split(',')[column]
# To allow users passing a file with a domain per line
return line
ext = splitext(filename)[1]
opener = {
'.gz': gzip.open,
'.zip': open_zip,
}.get(ext, open)
with opener(filename, 'rb') as f:
# read whole and decode in one pass (not line by line) for speed reasons (15x)
# the top-1m is 20MB, should be handled good enough.
# the memory efficient option is [line.decode() for line in f]
lines = [line for line in f.read().decode('utf8').split('\n')]
# domains don't contain commas, so it should be safe
sites = [domain(line) for line in lines if line]
return sites[column - 1:]
def load_list(path):
with open(path, 'r') as f:
return f.readlines()
| 30.140351 | 98 | 0.62631 |
dddd03b31bf5574b3ad3b0ff5221e87933fabe5a | 10,667 | py | Python | Chapter-07/collections/ansible_collections/amazon/aws/plugins/modules/ec2_snapshot_info.py | PacktPublishing/Ansible-for-Real-life-Automation | 35c0d92ea08a5dbf3bea749e1971cffabd5e6de4 | [
"MIT"
] | 7 | 2021-11-16T04:05:42.000Z | 2022-02-19T21:14:29.000Z | Chapter-07/collections/ansible_collections/amazon/aws/plugins/modules/ec2_snapshot_info.py | PacktPublishing/Ansible-for-Real-life-Automation | 35c0d92ea08a5dbf3bea749e1971cffabd5e6de4 | [
"MIT"
] | 1 | 2022-03-12T02:25:26.000Z | 2022-03-12T02:25:26.000Z | Chapter-05/collections/ansible_collections/amazon/aws/plugins/modules/ec2_snapshot_info.py | PacktPublishing/Ansible-for-Real-life-Automation | 35c0d92ea08a5dbf3bea749e1971cffabd5e6de4 | [
"MIT"
] | 1 | 2022-03-01T05:43:07.000Z | 2022-03-01T05:43:07.000Z | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: ec2_snapshot_info
version_added: 1.0.0
short_description: Gather information about ec2 volume snapshots in AWS
description:
- Gather information about ec2 volume snapshots in AWS.
author:
- "Rob White (@wimnat)"
- Aubin Bikouo (@abikouo)
options:
snapshot_ids:
description:
- If you specify one or more snapshot IDs, only snapshots that have the specified IDs are returned.
required: false
default: []
type: list
elements: str
owner_ids:
description:
- If you specify one or more snapshot owners, only snapshots from the specified owners and for which you have
access are returned.
required: false
default: []
type: list
elements: str
restorable_by_user_ids:
description:
- If you specify a list of restorable users, only snapshots with create snapshot permissions for those users are
returned.
required: false
default: []
type: list
elements: str
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See
U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSnapshots.html) for possible filters. Filter
names and values are case sensitive.
required: false
type: dict
default: {}
max_results:
description:
- The maximum number of snapshot results returned in paginated output.
- When used only a single page along with a C(next_token_id) response element will be returned.
- The remaining results of the initial request can be seen by sending another request with the returned C(next_token_id) value.
- This value can be between 5 and 1000; if I(next_token_id) is given a value larger than 1000, only 1000 results are returned.
- If this parameter is not used, then DescribeSnapshots returns all results.
- This parameter is mutually exclusive with I(snapshot_ids).
required: False
type: int
next_token_id:
description:
- Contains the value returned from a previous paginated request where I(max_results) was used and the results exceeded the value of that parameter.
- Pagination continues from the end of the previous results that returned the I(next_token_id) value.
- This parameter is mutually exclusive with I(snapshot_ids)
required: false
type: str
notes:
- By default, the module will return all snapshots, including public ones. To limit results to snapshots owned by
the account use the filter 'owner-id'.
extends_documentation_fragment:
- amazon.aws.aws
- amazon.aws.ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Gather information about all snapshots, including public ones
- amazon.aws.ec2_snapshot_info:
# Gather information about all snapshots owned by the account 0123456789
- amazon.aws.ec2_snapshot_info:
filters:
owner-id: 0123456789
# Or alternatively...
- amazon.aws.ec2_snapshot_info:
owner_ids:
- 0123456789
# Gather information about a particular snapshot using ID
- amazon.aws.ec2_snapshot_info:
filters:
snapshot-id: snap-00112233
# Or alternatively...
- amazon.aws.ec2_snapshot_info:
snapshot_ids:
- snap-00112233
# Gather information about any snapshot with a tag key Name and value Example
- amazon.aws.ec2_snapshot_info:
filters:
"tag:Name": Example
# Gather information about any snapshot with an error status
- amazon.aws.ec2_snapshot_info:
filters:
status: error
'''
RETURN = '''
snapshots:
description: snapshots retrieved
type: list
returned: success
elements: dict
contains:
snapshot_id:
description: The ID of the snapshot. Each snapshot receives a unique identifier when it is created.
type: str
returned: always
sample: snap-01234567
volume_id:
description: The ID of the volume that was used to create the snapshot.
type: str
returned: always
sample: vol-01234567
state:
description: The snapshot state (completed, pending or error).
type: str
returned: always
sample: completed
state_message:
description: Encrypted Amazon EBS snapshots are copied asynchronously. If a snapshot copy operation fails (for example, if the proper
AWS Key Management Service (AWS KMS) permissions are not obtained) this field displays error state details to help you diagnose why the
error occurred.
type: str
returned: always
sample:
start_time:
description: The time stamp when the snapshot was initiated.
type: str
returned: always
sample: "2015-02-12T02:14:02+00:00"
progress:
description: The progress of the snapshot, as a percentage.
type: str
returned: always
sample: "100%"
owner_id:
description: The AWS account ID of the EBS snapshot owner.
type: str
returned: always
sample: "099720109477"
description:
description: The description for the snapshot.
type: str
returned: always
sample: "My important backup"
volume_size:
description: The size of the volume, in GiB.
type: int
returned: always
sample: 8
owner_alias:
description: The AWS account alias (for example, amazon, self) or AWS account ID that owns the snapshot.
type: str
returned: always
sample: "033440102211"
tags:
description: Any tags assigned to the snapshot.
type: dict
returned: always
sample: "{ 'my_tag_key': 'my_tag_value' }"
encrypted:
description: Indicates whether the snapshot is encrypted.
type: bool
returned: always
sample: "True"
kms_key_id:
description: The full ARN of the AWS Key Management Service (AWS KMS) customer master key (CMK) that was used to \
protect the volume encryption key for the parent volume.
type: str
returned: always
sample: "74c9742a-a1b2-45cb-b3fe-abcdef123456"
data_encryption_key_id:
description: The data encryption key identifier for the snapshot. This value is a unique identifier that \
corresponds to the data encryption key that was used to encrypt the original volume or snapshot copy.
type: str
returned: always
sample: "arn:aws:kms:ap-southeast-2:012345678900:key/74c9742a-a1b2-45cb-b3fe-abcdef123456"
next_token_id:
description:
- Contains the value returned from a previous paginated request where C(max_results) was used and the results exceeded the value of that parameter.
- This value is null when there are no more results to return.
type: str
returned: when option C(max_results) is set in input
'''
try:
from botocore.exceptions import ClientError
except ImportError:
pass # Handled by AnsibleAWSModule
from ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict
from ..module_utils.core import AnsibleAWSModule
from ..module_utils.core import is_boto3_error_code
from ..module_utils.ec2 import AWSRetry
from ..module_utils.ec2 import ansible_dict_to_boto3_filter_list
from ..module_utils.ec2 import boto3_tag_list_to_ansible_dict
def list_ec2_snapshots(connection, module):
snapshot_ids = module.params.get("snapshot_ids")
owner_ids = [str(owner_id) for owner_id in module.params.get("owner_ids")]
restorable_by_user_ids = [str(user_id) for user_id in module.params.get("restorable_by_user_ids")]
filters = ansible_dict_to_boto3_filter_list(module.params.get("filters"))
max_results = module.params.get('max_results')
next_token = module.params.get('next_token_id')
optional_param = {}
if max_results:
optional_param['MaxResults'] = max_results
if next_token:
optional_param['NextToken'] = next_token
try:
snapshots = connection.describe_snapshots(
aws_retry=True,
SnapshotIds=snapshot_ids, OwnerIds=owner_ids,
RestorableByUserIds=restorable_by_user_ids, Filters=filters,
**optional_param)
except is_boto3_error_code('InvalidSnapshot.NotFound') as e:
if len(snapshot_ids) > 1:
module.warn("Some of your snapshots may exist, but %s" % str(e))
snapshots = {'Snapshots': []}
except ClientError as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg='Failed to describe snapshots')
result = {}
# Turn the boto3 result in to ansible_friendly_snaked_names
snaked_snapshots = []
for snapshot in snapshots['Snapshots']:
snaked_snapshots.append(camel_dict_to_snake_dict(snapshot))
# Turn the boto3 result in to ansible friendly tag dictionary
for snapshot in snaked_snapshots:
if 'tags' in snapshot:
snapshot['tags'] = boto3_tag_list_to_ansible_dict(snapshot['tags'], 'key', 'value')
result['snapshots'] = snaked_snapshots
if snapshots.get('NextToken'):
result.update(camel_dict_to_snake_dict({'NextTokenId': snapshots.get('NextToken')}))
module.exit_json(**result)
def main():
argument_spec = dict(
snapshot_ids=dict(default=[], type='list', elements='str'),
owner_ids=dict(default=[], type='list', elements='str'),
restorable_by_user_ids=dict(default=[], type='list', elements='str'),
filters=dict(default={}, type='dict'),
max_results=dict(type='int'),
next_token_id=dict(type='str')
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
mutually_exclusive=[
['snapshot_ids', 'owner_ids', 'restorable_by_user_ids', 'filters'],
['snapshot_ids', 'max_results'],
['snapshot_ids', 'next_token_id']
],
supports_check_mode=True
)
connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff())
list_ec2_snapshots(connection, module)
if __name__ == '__main__':
main()
| 36.406143 | 160 | 0.675729 |
0e36b1c695442f1b6e41d9aec8027a73b7f069fe | 1,482 | py | Python | src/oemof/tabular/examples/scripting/postprocessing.py | jnnr/oemof-tabular | ab58d8c3035b6e97d9d45169832745de11e5bb36 | [
"BSD-3-Clause"
] | 2 | 2019-12-09T17:34:31.000Z | 2022-02-04T12:55:15.000Z | src/oemof/tabular/examples/scripting/postprocessing.py | jnnr/oemof-tabular | ab58d8c3035b6e97d9d45169832745de11e5bb36 | [
"BSD-3-Clause"
] | 28 | 2018-11-24T16:56:55.000Z | 2022-03-25T12:19:41.000Z | src/oemof/tabular/examples/scripting/postprocessing.py | jnnr/oemof-tabular | ab58d8c3035b6e97d9d45169832745de11e5bb36 | [
"BSD-3-Clause"
] | 7 | 2018-12-19T13:42:52.000Z | 2021-11-21T18:43:45.000Z | import pkg_resources as pkg
from oemof.outputlib import views
from oemof.solph import EnergySystem, Model
from oemof.tabular.facades import TYPEMAP
import oemof.tabular.datapackage # noqa
import oemof.tabular.tools.postprocessing as pp
examples = ["dispatch", "investment", "foreignkeys"]
for example in examples:
print("Runnig postprocessing example with datapackage {}".format(example))
es = EnergySystem.from_datapackage(
pkg.resource_filename(
"oemof.tabular",
"examples/datapackages/{}/datapackage.json".format(example),
),
attributemap={},
typemap=TYPEMAP,
)
es.timeindex = es.timeindex[0:5]
m = Model(es)
m.solve(solver="cbc")
# skip foreignkeys example as not all buses are present
if example != "foreignkeys":
br = pp.bus_results(es, m.results(), select="scalars")
if example == "investment":
br["bus0"].xs([es.groups["bus0"], "invest"], level=[1, 2])
pp.supply_results(results=m.results(), es=es, bus=["heat-bus"])
pp.supply_results(results=m.results(), es=es, bus=["bus0", "bus1"])
pp.demand_results(results=m.results(), es=es, bus=["bus0", "bus1"])
pp.component_results(results=m.results(), es=es, select="sequences")
pp.component_results(results=m.results(), es=es, select="scalars")
views.node_input_by_type(
m.results(), node_type=TYPEMAP["storage"], droplevel=[2]
)
| 30.244898 | 78 | 0.647773 |
3e9a6c1a0ef28616fe4c759681cb2ff6cdb5b994 | 11,344 | py | Python | airflow/gcp/utils/mlengine_operator_utils.py | ankit-shrivastava/airflow | 77b1bdc12ca5ddf043d4550d36948766b59f60ce | [
"Apache-2.0"
] | 2 | 2021-07-30T17:37:15.000Z | 2021-08-03T13:50:56.000Z | airflow/gcp/utils/mlengine_operator_utils.py | larryzhu2018/airflow-1 | 1bb12f31585c36661fe30c11c9b3e0f67586a93a | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | airflow/gcp/utils/mlengine_operator_utils.py | larryzhu2018/airflow-1 | 1bb12f31585c36661fe30c11c9b3e0f67586a93a | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
This module contains helper functions for MLEngine operators.
"""
import base64
import json
import os
import re
from urllib.parse import urlsplit
import dill
from airflow.exceptions import AirflowException
from airflow.gcp.hooks.gcs import GoogleCloudStorageHook
from airflow.gcp.operators.dataflow import DataflowCreatePythonJobOperator
from airflow.gcp.operators.mlengine import MLEngineStartBatchPredictionJobOperator
from airflow.operators.python_operator import PythonOperator
def create_evaluate_ops(task_prefix, # pylint: disable=too-many-arguments
data_format,
input_paths,
prediction_path,
metric_fn_and_keys,
validate_fn,
batch_prediction_job_id=None,
project_id=None,
region=None,
dataflow_options=None,
model_uri=None,
model_name=None,
version_name=None,
dag=None,
py_interpreter="python3"):
"""
Creates Operators needed for model evaluation and returns.
It gets prediction over inputs via Cloud ML Engine BatchPrediction API by
calling MLEngineBatchPredictionOperator, then summarize and validate
the result via Cloud Dataflow using DataFlowPythonOperator.
For details and pricing about Batch prediction, please refer to the website
https://cloud.google.com/ml-engine/docs/how-tos/batch-predict
and for Cloud Dataflow, https://cloud.google.com/dataflow/docs/
It returns three chained operators for prediction, summary, and validation,
named as <prefix>-prediction, <prefix>-summary, and <prefix>-validation,
respectively.
(<prefix> should contain only alphanumeric characters or hyphen.)
The upstream and downstream can be set accordingly like:
pred, _, val = create_evaluate_ops(...)
pred.set_upstream(upstream_op)
...
downstream_op.set_upstream(val)
Callers will provide two python callables, metric_fn and validate_fn, in
order to customize the evaluation behavior as they wish.
- metric_fn receives a dictionary per instance derived from json in the
batch prediction result. The keys might vary depending on the model.
It should return a tuple of metrics.
- validation_fn receives a dictionary of the averaged metrics that metric_fn
generated over all instances.
The key/value of the dictionary matches to what's given by
metric_fn_and_keys arg.
The dictionary contains an additional metric, 'count' to represent the
total number of instances received for evaluation.
The function would raise an exception to mark the task as failed, in a
case the validation result is not okay to proceed (i.e. to set the trained
version as default).
Typical examples are like this:
def get_metric_fn_and_keys():
import math # imports should be outside of the metric_fn below.
def error_and_squared_error(inst):
label = float(inst['input_label'])
classes = float(inst['classes']) # 0 or 1
err = abs(classes-label)
squared_err = math.pow(classes-label, 2)
return (err, squared_err) # returns a tuple.
return error_and_squared_error, ['err', 'mse'] # key order must match.
def validate_err_and_count(summary):
if summary['err'] > 0.2:
raise ValueError('Too high err>0.2; summary=%s' % summary)
if summary['mse'] > 0.05:
raise ValueError('Too high mse>0.05; summary=%s' % summary)
if summary['count'] < 1000:
raise ValueError('Too few instances<1000; summary=%s' % summary)
return summary
For the details on the other BatchPrediction-related arguments (project_id,
job_id, region, data_format, input_paths, prediction_path, model_uri),
please refer to MLEngineBatchPredictionOperator too.
:param task_prefix: a prefix for the tasks. Only alphanumeric characters and
hyphen are allowed (no underscores), since this will be used as dataflow
job name, which doesn't allow other characters.
:type task_prefix: str
:param data_format: either of 'TEXT', 'TF_RECORD', 'TF_RECORD_GZIP'
:type data_format: str
:param input_paths: a list of input paths to be sent to BatchPrediction.
:type input_paths: list[str]
:param prediction_path: GCS path to put the prediction results in.
:type prediction_path: str
:param metric_fn_and_keys: a tuple of metric_fn and metric_keys:
- metric_fn is a function that accepts a dictionary (for an instance),
and returns a tuple of metric(s) that it calculates.
- metric_keys is a list of strings to denote the key of each metric.
:type metric_fn_and_keys: tuple of a function and a list[str]
:param validate_fn: a function to validate whether the averaged metric(s) is
good enough to push the model.
:type validate_fn: function
:param batch_prediction_job_id: the id to use for the Cloud ML Batch
prediction job. Passed directly to the MLEngineBatchPredictionOperator as
the job_id argument.
:type batch_prediction_job_id: str
:param project_id: the Google Cloud Platform project id in which to execute
Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s
`default_args['project_id']` will be used.
:type project_id: str
:param region: the Google Cloud Platform region in which to execute Cloud ML
Batch Prediction and Dataflow jobs. If None, then the `dag`'s
`default_args['region']` will be used.
:type region: str
:param dataflow_options: options to run Dataflow jobs. If None, then the
`dag`'s `default_args['dataflow_default_options']` will be used.
:type dataflow_options: dictionary
:param model_uri: GCS path of the model exported by Tensorflow using
tensorflow.estimator.export_savedmodel(). It cannot be used with
model_name or version_name below. See MLEngineBatchPredictionOperator for
more detail.
:type model_uri: str
:param model_name: Used to indicate a model to use for prediction. Can be
used in combination with version_name, but cannot be used together with
model_uri. See MLEngineBatchPredictionOperator for more detail. If None,
then the `dag`'s `default_args['model_name']` will be used.
:type model_name: str
:param version_name: Used to indicate a model version to use for prediction,
in combination with model_name. Cannot be used together with model_uri.
See MLEngineBatchPredictionOperator for more detail. If None, then the
`dag`'s `default_args['version_name']` will be used.
:type version_name: str
:param dag: The `DAG` to use for all Operators.
:type dag: airflow.models.DAG
:param py_interpreter: Python version of the beam pipeline.
If None, this defaults to the python3.
To track python versions supported by beam and related
issues check: https://issues.apache.org/jira/browse/BEAM-1251
:type py_interpreter: str
:returns: a tuple of three operators, (prediction, summary, validation)
:rtype: tuple(DataFlowPythonOperator, DataFlowPythonOperator,
PythonOperator)
"""
# Verify that task_prefix doesn't have any special characters except hyphen
# '-', which is the only allowed non-alphanumeric character by Dataflow.
if not re.match(r"^[a-zA-Z][-A-Za-z0-9]*$", task_prefix):
raise AirflowException(
"Malformed task_id for DataFlowPythonOperator (only alphanumeric "
"and hyphens are allowed but got: " + task_prefix)
metric_fn, metric_keys = metric_fn_and_keys
if not callable(metric_fn):
raise AirflowException("`metric_fn` param must be callable.")
if not callable(validate_fn):
raise AirflowException("`validate_fn` param must be callable.")
if dag is not None and dag.default_args is not None:
default_args = dag.default_args
project_id = project_id or default_args.get('project_id')
region = region or default_args.get('region')
model_name = model_name or default_args.get('model_name')
version_name = version_name or default_args.get('version_name')
dataflow_options = dataflow_options or \
default_args.get('dataflow_default_options')
evaluate_prediction = MLEngineStartBatchPredictionJobOperator(
task_id=(task_prefix + "-prediction"),
project_id=project_id,
job_id=batch_prediction_job_id,
region=region,
data_format=data_format,
input_paths=input_paths,
output_path=prediction_path,
uri=model_uri,
model_name=model_name,
version_name=version_name,
dag=dag)
metric_fn_encoded = base64.b64encode(dill.dumps(metric_fn, recurse=True)).decode()
evaluate_summary = DataflowCreatePythonJobOperator(
task_id=(task_prefix + "-summary"),
py_options=["-m"],
py_file="airflow.gcp.utils.mlengine_prediction_summary",
dataflow_default_options=dataflow_options,
options={
"prediction_path": prediction_path,
"metric_fn_encoded": metric_fn_encoded,
"metric_keys": ','.join(metric_keys)
},
py_interpreter=py_interpreter,
dag=dag)
evaluate_summary.set_upstream(evaluate_prediction)
def apply_validate_fn(*args, **kwargs):
prediction_path = kwargs["templates_dict"]["prediction_path"]
scheme, bucket, obj, _, _ = urlsplit(prediction_path)
if scheme != "gs" or not bucket or not obj:
raise ValueError("Wrong format prediction_path: {}".format(prediction_path))
summary = os.path.join(obj.strip("/"),
"prediction.summary.json")
gcs_hook = GoogleCloudStorageHook()
summary = json.loads(gcs_hook.download(bucket, summary))
return validate_fn(summary)
evaluate_validation = PythonOperator(
task_id=(task_prefix + "-validation"),
python_callable=apply_validate_fn,
templates_dict={"prediction_path": prediction_path},
dag=dag)
evaluate_validation.set_upstream(evaluate_summary)
return evaluate_prediction, evaluate_summary, evaluate_validation
| 43.799228 | 88 | 0.693759 |
b83d46bbfb0b3d9e22de0e11f76a7a36026318ae | 6,426 | py | Python | test/functional/rpc_bind.py | neemcoin-project/neemcoin | 522e8f8f5b7abf582efa42e00ae1d156281ef899 | [
"MIT"
] | null | null | null | test/functional/rpc_bind.py | neemcoin-project/neemcoin | 522e8f8f5b7abf582efa42e00ae1d156281ef899 | [
"MIT"
] | null | null | null | test/functional/rpc_bind.py | neemcoin-project/neemcoin | 522e8f8f5b7abf582efa42e00ae1d156281ef899 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test running neemcoind with the -rpcbind and -rpcallowip options."""
import sys
from test_framework.netutil import all_interfaces, addr_to_hex, get_bind_addrs, test_ipv6_local
from test_framework.test_framework import BitcoinTestFramework, SkipTest
from test_framework.util import assert_equal, assert_raises_rpc_error, get_rpc_proxy, rpc_port, rpc_url
class RPCBindTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.bind_to_localhost_only = False
self.num_nodes = 1
def setup_network(self):
self.add_nodes(self.num_nodes, None)
def add_options(self, parser):
parser.add_argument("--ipv4", action='store_true', dest="run_ipv4", help="Run ipv4 tests only", default=False)
parser.add_argument("--ipv6", action='store_true', dest="run_ipv6", help="Run ipv6 tests only", default=False)
parser.add_argument("--nonloopback", action='store_true', dest="run_nonloopback", help="Run non-loopback tests only", default=False)
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
self.log.info("Bind test for %s" % str(addresses))
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
self.nodes[0].rpchost = connect_to
self.start_node(0, base_args + binds)
pid = self.nodes[0].process.pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
self.stop_nodes()
def run_allowip_test(self, allow_ips, rpchost, rpcport):
'''
Start a node with rpcallow IP, and request getnetworkinfo
at a non-localhost IP.
'''
self.log.info("Allow IP test for %s:%d" % (rpchost, rpcport))
node_args = \
['-disablewallet', '-nolisten'] + \
['-rpcallowip='+x for x in allow_ips] + \
['-rpcbind='+addr for addr in ['127.0.0.1', "%s:%d" % (rpchost, rpcport)]] # Bind to localhost as well so start_nodes doesn't hang
self.nodes[0].rpchost = None
self.start_nodes([node_args])
# connect to node through non-loopback interface
node = get_rpc_proxy(rpc_url(self.nodes[0].datadir, 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir)
node.getnetworkinfo()
self.stop_nodes()
def run_test(self):
# due to OS-specific network stats queries, this test works only on Linux
if sum([self.options.run_ipv4, self.options.run_ipv6, self.options.run_nonloopback]) > 1:
raise AssertionError("Only one of --ipv4, --ipv6 and --nonloopback can be set")
self.log.info("Check for linux")
if not sys.platform.startswith('linux'):
raise SkipTest("This test can only be run on linux.")
self.log.info("Check for ipv6")
have_ipv6 = test_ipv6_local()
if not have_ipv6 and not (self.options.run_ipv4 or self.options.run_nonloopback):
raise SkipTest("This test requires ipv6 support.")
self.log.info("Check for non-loopback interface")
self.non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
self.non_loopback_ip = ip
break
if self.non_loopback_ip is None and self.options.run_nonloopback:
raise SkipTest("This test requires a non-loopback ip address.")
self.defaultport = rpc_port(0)
if not self.options.run_nonloopback:
self._run_loopback_tests()
if not self.options.run_ipv4 and not self.options.run_ipv6:
self._run_nonloopback_tests()
def _run_loopback_tests(self):
if self.options.run_ipv4:
# check only IPv4 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', self.defaultport)])
# check only IPv4 localhost (explicit) with alternative port
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
else:
# check default without rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(None, '127.0.0.1', [],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
# check default with rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
# check only IPv6 localhost (explicit)
self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
[('::1', self.defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
def _run_nonloopback_tests(self):
self.log.info("Using interface %s for testing" % self.non_loopback_ip)
# check only non-loopback interface
self.run_bind_test([self.non_loopback_ip], self.non_loopback_ip, [self.non_loopback_ip],
[(self.non_loopback_ip, self.defaultport)])
# Check that with invalid rpcallowip, we are denied
self.run_allowip_test([self.non_loopback_ip], self.non_loopback_ip, self.defaultport)
assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], self.non_loopback_ip, self.defaultport)
if __name__ == '__main__':
RPCBindTest().main()
| 49.430769 | 172 | 0.633209 |
a04e649b7a89f60fc58c0dddf0fbbf30fb643405 | 543 | py | Python | manage.py | onlywade/airtng-flask | 034d503cc311d5bbd25d829afda865845d11ca33 | [
"MIT"
] | null | null | null | manage.py | onlywade/airtng-flask | 034d503cc311d5bbd25d829afda865845d11ca33 | [
"MIT"
] | null | null | null | manage.py | onlywade/airtng-flask | 034d503cc311d5bbd25d829afda865845d11ca33 | [
"MIT"
] | null | null | null | from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from airtng_flask import app, db
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import sys, unittest
tests = unittest.TestLoader().discover('.', pattern="*_tests.py")
result = unittest.TextTestRunner(verbosity=2).run(tests)
if not result.wasSuccessful():
sys.exit(1)
if __name__ == "__main__":
manager.run()
| 21.72 | 69 | 0.697974 |
3c9518c49539297e1db3e8f848ccd7244551f744 | 5,797 | py | Python | biggraphite/cli/command_stats.py | crto-es-vyg/biggraphite | 6faf66ea6fe77b9f6d538f3397b3d4d7b58b0a76 | [
"Apache-2.0"
] | 1 | 2018-06-19T13:10:19.000Z | 2018-06-19T13:10:19.000Z | biggraphite/cli/command_stats.py | crto-es-vyg/biggraphite | 6faf66ea6fe77b9f6d538f3397b3d4d7b58b0a76 | [
"Apache-2.0"
] | 2 | 2022-02-09T01:02:22.000Z | 2022-02-26T00:46:39.000Z | biggraphite/cli/command_stats.py | crto-es-vyg/biggraphite | 6faf66ea6fe77b9f6d538f3397b3d4d7b58b0a76 | [
"Apache-2.0"
] | 1 | 2019-11-19T16:21:32.000Z | 2019-11-19T16:21:32.000Z | #!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stats Command."""
from __future__ import print_function
import collections
import re
import time
import socket
import logging
import tabulate
from six.moves.configparser import ConfigParser
from biggraphite.cli import command
# Hack to add some more formats.
# TODO: Add Graphite support.
# TODO: Remove padding.
tabulate._table_formats["csv"] = tabulate.TableFormat(
lineabove=None,
linebelowheader=None,
linebetweenrows=None,
linebelow=None,
headerrow=tabulate.DataRow("", ";", ""),
datarow=tabulate.DataRow("", ";", ""),
padding=0,
with_header_hide=None,
)
tabulate.tabulate_formats = list(sorted(tabulate._table_formats.keys()))
class Namespaces(object):
r"""Helper for namespaces.
The config file would look like:
```
[carbon-relay]
pattern = carbon\.relay\.*
[carbon-cache]
pattern = carbon\.agents\.*
[carbon-aggregator]
pattern = carbon\.aggregator\.*
[prometheus]
pattern = prometheus\.*
```
"""
def __init__(self, filename=None):
"""Initializer."""
self.config = ConfigParser({}, collections.OrderedDict)
self.patterns = collections.OrderedDict()
if not filename:
self.patterns[re.compile(".*")] = "total"
self.config.add_section("total")
return
self.config.read(filename)
for section in self.config.sections():
pattern = re.compile(self.config.get(section, "pattern"))
self.patterns[pattern] = section
def lookup(self, metric_name):
"""Return the namespace corresponding to the metric."""
for pattern, section in self.patterns.items():
if pattern.match(metric_name):
return section, self.config.items(section)
return "none", None
class CommandStats(command.BaseCommand):
"""Stats for metrics."""
NAME = "stats"
HELP = "disk usage if one or several specific metrics."
def __init__(self, *args, **kwargs):
"""Initialize."""
super(CommandStats, self).__init__(*args, **kwargs)
self.ns = None
self._n_metrics = collections.defaultdict(int)
self._n_points = collections.defaultdict(int)
def add_arguments(self, parser):
"""Add custom arguments.
See command.CommandBase.
"""
command.add_sharding_arguments(parser)
parser.add_argument(
"-c", "--conf", help="Configuration file for namespaces", dest="conf"
)
formats = tabulate.tabulate_formats
formats.append("graphite")
parser.add_argument(
"-f", "--format", help="Format: %s" % ", ".join(formats), dest="fmt"
)
parser.add_argument(
"--carbon",
help="Carbon host:port to send points to when using graphite output."
)
parser.add_argument(
"--prefix",
help="Prefix to add to every section name.",
default='',
)
self._n_metrics = collections.defaultdict(int)
self._n_points = collections.defaultdict(int)
def run(self, accessor, opts):
"""Disk usage of metrics.
See command.CommandBase.
"""
self.ns = Namespaces(opts.conf)
accessor.connect()
if accessor.TYPE.startswith("elasticsearch+"):
accessor = accessor._metadata_accessor
if accessor.TYPE == "elasticsearch":
# Elasticsearch has a better implementation.
self._n_metrics, self._n_points = accessor.metric_stats(self.ns)
else:
accessor.map(
self.stats,
start_key=opts.start_key,
end_key=opts.end_key,
shard=opts.shard,
nshards=opts.nshards,
)
columns = ("Namespace", "Metrics", "Points")
rows = [columns]
if opts.fmt == "graphite":
now = int(time.time())
output = ""
for k, v in self._n_metrics.items():
output += "%smetrics.%s %s %s\n" % (opts.prefix, k, v, now)
for k, v in self._n_points.items():
output += "%spoints.%s %s %s\n" % (opts.prefix, k, v, now)
if not opts.carbon:
print(output)
else:
# This is a very-very cheap implementation of a carbon client.
host, port = opts.carbon.split(':')
logging.info("Sending data to %s:%s" % (host, port))
sock = socket.socket()
sock.connect((host, int(port)))
sock.sendall(output)
sock.close()
return
for k in self._n_metrics.keys():
data = (
'%s%s' % (opts.prefix, k),
self._n_metrics.get(k),
self._n_points.get(k)
)
rows.append(data)
print(tabulate.tabulate(rows, headers="firstrow", tablefmt=opts.fmt))
def stats(self, metric, done, total):
"""Compute stats."""
ns, _ = self.ns.lookup(metric.name)
self._n_metrics[ns] += 1
self._n_points[ns] += metric.metadata.retention.points
| 30.510526 | 81 | 0.591685 |
ce12fc9d90bd21db076bb2cba408cdbfbc12e746 | 2,532 | py | Python | pset6/caesar.py | DInnaD/CS50 | 0ff04093a11893a6c2b7577bea1c962d4675317d | [
"Apache-2.0"
] | null | null | null | pset6/caesar.py | DInnaD/CS50 | 0ff04093a11893a6c2b7577bea1c962d4675317d | [
"Apache-2.0"
] | null | null | null | pset6/caesar.py | DInnaD/CS50 | 0ff04093a11893a6c2b7577bea1c962d4675317d | [
"Apache-2.0"
] | null | null | null | import cs50
import sys
def main():
if len(sys.argv) != 2:
print("You should provide cmd line arguments!")
exit(1)
#if sys.argv[1].isalpha() == False:
#print("You should provide valid key!")
#exit(1)
kplainText = int(sys.argv[1])
cipher = []
plainText = cs50.get_string()
for symbol in plainText:
if symbol.isalpha():
cipher.append(caesar(symbol, kplainText))
else:
cipher.append(symbol)
print("".join(cipher))
exit(0)
def caesar(char, kplainText):
if char.isupper():
return chr(((ord(char) - 65 + kplainText) % 26) + 65)
else:
return chr(((ord(char) - 97 + kplainText) % 26) + 97)
if __name__ == "__main__":
main()
# #include <ctype.h>
# #include <string.h>
# #include <cs50.h>
# #include <stdio.h>
# #include <stdlib.h>
# //define my caesarCipher
# void caesarCipher(char* plainText,int key);
# def int main(int argc, char* argv[]): # //{//????????????????/char*
# if argc is not 2:
# # {
# print("Usage: ./caesar k\n")
# #return 1
# #}
# #//printf(" %s\n", argv[1]);
# int key = atoi(sys.argv[1])
# char plainText[101]
# print("plaintext: ")#;//ask user
# fgets(plainText, sizeof(plainText), stdin);//get user input & store it in planText var++++++++
# print("ciphertext: ")#;//print the ciphered text
# caesarCipher(plainText,key)
# //system(pause);//connect out if not use wind---------------------------???????????????
# # return 0;
# #}
# void caesarCipher(char* plainText, int key){//key pomen mestami on first plaiiiiiiiiiin
# int i = 0
# char cipher
# int cipherValue
# while plainText[i] != '\0' and strlen(plainText) -1 > i :break#// for(int i=1,len=strlen(name);i<len;i++)
# if isalpha(plainText[i]) and islower(plainText[i]):
# cipherValue = ((int)((plainText[i]) - 97 + key) % 26 + 97)
# cipher = (char)(cipherValue);printf("%c", cipher)
# i++
# else:
# if isalpha(plainText[i]) and isupper(plainText[i]):# // if isaph char
# cipherValue = ((int)(plainText[i] - 65 + key) % 26 + 65)
# cipher = (char)(cipherValue)
# print("%c", cipher)
# i++
# else: #//if not isaplha low or up
# print("%c", plainText[i])
# i++
# print("\n")
#} | 31.259259 | 112 | 0.507504 |
c16d1c105b971e3e06cfa2d91adefc8c9acd2297 | 1,253 | py | Python | Raspberry/focuscontrol.py | kevin-ch-day/MajesticMagicians-Master | bf71e0f2faa2c6a3ed52f509c0beb162d377371e | [
"MIT"
] | null | null | null | Raspberry/focuscontrol.py | kevin-ch-day/MajesticMagicians-Master | bf71e0f2faa2c6a3ed52f509c0beb162d377371e | [
"MIT"
] | null | null | null | Raspberry/focuscontrol.py | kevin-ch-day/MajesticMagicians-Master | bf71e0f2faa2c6a3ed52f509c0beb162d377371e | [
"MIT"
] | null | null | null | import threading
#Single controller for focus. Executes one focus adjustment request at a time. Drops requests while adjusting focus
class FocusControl():
lock = threading.Lock()
def request_focus_adjust(amount):
if not lock.acquire(False):
return 'The focus is currently adjusting. This adjustment will not be processed.'
else:
is_adjust_correct = False
try:
is_adjust_correct = focus_adjust(amount)
except:
pass
finally:
result_str = ''
if is_adjust_correct:
result_str = 'The focus has been adjusted by ' + str(amount) + '.'
else:
result_str = 'The focus stopped sometime while adjusting by ' + str(amount) + '.'
lock.release()
return result_str
def _focus_adjust(amount):
lock.acquire()
try:
#Code to interface with motor
#Wait while motor moving (depends on if this will be controlled by a timer or feedback from the motor)
#If any errors return false
pass
except:
pass
finally:
lock.release()
| 32.973684 | 115 | 0.557861 |
984ece1f57921e9838aac27bca23dad642f6d150 | 419 | py | Python | mnelab/dialogs/errormessagebox.py | yop0/mnelab | 12b62d0611ebc63bc23f7c9101d7eabdc1175055 | [
"BSD-3-Clause"
] | null | null | null | mnelab/dialogs/errormessagebox.py | yop0/mnelab | 12b62d0611ebc63bc23f7c9101d7eabdc1175055 | [
"BSD-3-Clause"
] | null | null | null | mnelab/dialogs/errormessagebox.py | yop0/mnelab | 12b62d0611ebc63bc23f7c9101d7eabdc1175055 | [
"BSD-3-Clause"
] | null | null | null | # Authors: Clemens Brunner <clemens.brunner@gmail.com>
#
# License: BSD (3-clause)
from PySide6.QtWidgets import QMessageBox
class ErrorMessageBox(QMessageBox):
def __init__(self, parent, text, informative, detailed):
super().__init__(parent=parent)
self.setText(text)
self.setInformativeText(informative)
self.setDetailedText(detailed)
self.setIcon(QMessageBox.Critical)
| 27.933333 | 60 | 0.720764 |
26c7882fdb3ff2c1d68b20885739dcfd62c56981 | 370 | py | Python | btk/__init__.py | mpaillassa/BlendingToolKit | 3dfb8bc36d6c7d944c8ef353f2c70623d882fcd1 | [
"MIT"
] | null | null | null | btk/__init__.py | mpaillassa/BlendingToolKit | 3dfb8bc36d6c7d944c8ef353f2c70623d882fcd1 | [
"MIT"
] | null | null | null | btk/__init__.py | mpaillassa/BlendingToolKit | 3dfb8bc36d6c7d944c8ef353f2c70623d882fcd1 | [
"MIT"
] | null | null | null | """Toolkit for fast simulation and analysis of overlapping objects for the
LSST Dark Energy Science Collaboration.
The code generates on the fly images of overlapping parametric galaxies, while
providing a framework to test user defined detection/deblending/measurement
algorithms.
"""
__author__ = "btk developers"
__email__ = "imendoza@umich.edu"
__version__ = "0.1"
| 33.636364 | 78 | 0.805405 |
a36c8b6092cfee16a9dc156dc300ba3592ed8399 | 347 | py | Python | videos/migrations/0002_rename_title_video_name.py | KibetRonoh/Movie_Zone_-Django | 901b211540943a98b531db2bc77d6b3a483a694a | [
"MIT"
] | 58 | 2021-03-13T21:00:01.000Z | 2022-03-29T05:59:39.000Z | videos/migrations/0002_rename_title_video_name.py | KibetRonoh/Movie_Zone_-Django | 901b211540943a98b531db2bc77d6b3a483a694a | [
"MIT"
] | 1 | 2021-05-21T16:40:00.000Z | 2021-05-21T16:40:00.000Z | videos/migrations/0002_rename_title_video_name.py | KibetRonoh/Movie_Zone_-Django | 901b211540943a98b531db2bc77d6b3a483a694a | [
"MIT"
] | 35 | 2021-03-17T12:04:30.000Z | 2022-03-18T02:06:31.000Z | # Generated by Django 3.2b1 on 2021-03-14 20:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('videos', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='video',
old_name='title',
new_name='name',
),
]
| 18.263158 | 47 | 0.567723 |
5ba4f41b00b941ea9a28ab9be776e6bb77e581a1 | 13,670 | py | Python | tests/test_grammar_extraction_python.py | hbrodin/polytracker | cb5733dac7d342419d5ca1dc3bea1d29f4f41edb | [
"Apache-2.0"
] | 304 | 2019-11-01T04:10:13.000Z | 2022-03-31T15:44:06.000Z | tests/test_grammar_extraction_python.py | hbrodin/polytracker | cb5733dac7d342419d5ca1dc3bea1d29f4f41edb | [
"Apache-2.0"
] | 6,091 | 2019-12-04T21:58:22.000Z | 2022-03-14T13:19:50.000Z | tests/test_grammar_extraction_python.py | hbrodin/polytracker | cb5733dac7d342419d5ca1dc3bea1d29f4f41edb | [
"Apache-2.0"
] | 26 | 2019-11-01T21:02:46.000Z | 2022-03-16T04:09:40.000Z | from abc import ABC
from collections import defaultdict
from typing import Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union
import pytest
from polytracker import BasicBlock, ByteOffset, Function, TaintForest, TaintAccess, Taints
from polytracker.grammars import Grammar, parse_tree_to_grammar
from polytracker.inputs import Input
from polytracker.parsing import NonGeneralizedParseTree, trace_to_non_generalized_tree
from polytracker.tracing import BasicBlockEntry, FunctionEntry, FunctionReturn, ProgramTrace, TraceEvent
class Counter:
def __init__(self):
self.n = 0
def increment(self):
self.n += 1
def __int__(self):
ret = self.n
self.increment()
return ret
class BasicBlockMock(BasicBlock):
def taints(self) -> Taints:
raise NotImplementedError("TODO: Implement this function when needed")
def entries(self) -> Iterator["BasicBlockEntry"]:
raise NotImplementedError("TODO: Implement this function when needed")
class FunctionMock(Function):
def taints(self) -> Taints:
raise NotImplementedError("TODO: Implement this function when needed")
def calls_to(self) -> Set["Function"]:
raise NotImplementedError("TODO: Implement this function when needed")
def called_from(self) -> Set["Function"]:
raise NotImplementedError("TODO: Implement this function when needed")
class TracedEvent(ABC, TraceEvent):
def __init__(self, tracer: "Tracer"):
super().__init__(len(tracer.events))
tracer.events[self.uid] = self
self.tracer: Tracer = tracer
entry = tracer.call_stack[-1]
f_name = entry.function.name
self._function: Function = tracer.functions_by_name[f_name]
self._function_entry: TracedFunctionEntry = entry
@property
def function(self) -> Function:
return self._function
@property
def previous_event(self) -> Optional["TraceEvent"]:
if self.uid == 0:
return None
return self.tracer.events[self.uid - 1]
@property
def next_event(self) -> Optional["TraceEvent"]:
if self.uid >= len(self.tracer.events) - 1:
return None
return self.tracer.events[self.uid + 1]
@property
def next_global_event(self) -> Optional["TraceEvent"]:
return self.next_event
@property
def previous_global_event(self) -> Optional["TraceEvent"]:
return self.previous_event
@property
def function_entry(self) -> Optional["TracedFunctionEntry"]:
return self._function_entry
class TracedBasicBlockEntry(TracedEvent, BasicBlockEntry):
def __init__(self, tracer: "Tracer", bb_name: str):
super().__init__(tracer)
self.name: str = bb_name
self.consumed: List[int] = []
tracer.bb_stack[-1].append(self)
f_name = self.function.name
bbs = tracer.bbs[f_name]
if bb_name not in bbs:
bbs[bb_name] = BasicBlockMock(self.function, len(bbs))
self._basic_block: BasicBlock = bbs[bb_name]
@property
def basic_block(self) -> BasicBlock:
return self._basic_block
def taints(self) -> Taints:
return Taints((ByteOffset(source=self.tracer.source, offset=i) for i in self.consumed))
class TracedFunctionEntry(TracedEvent, FunctionEntry):
def __init__(self, tracer: "Tracer", func_name: str):
if func_name not in tracer.functions_by_name:
func: Function = FunctionMock(func_name, len(tracer.functions_by_name))
tracer.functions_by_name[func_name] = func
else:
func = tracer.functions_by_name[func_name]
self._function: Function = func
tracer.call_stack.append(self)
super().__init__(tracer)
self.name: str = func_name
self._function_return: Optional[FunctionReturn] = None
tracer.bb_stack.append([])
@property
def function_return(self) -> Optional[FunctionReturn]:
return self._function_return
@function_return.setter
def function_return(self, new_value: FunctionReturn):
if self._function_return is not None and new_value is not self._function_return:
raise ValueError(f"{self!r} is already set to return to {self._function_return!r}, not {new_value!r}")
self._function_return = new_value
def taints(self) -> Taints:
return Taints(())
class TracedFunctionReturn(TracedEvent, FunctionReturn):
def __init__(self, tracer: "Tracer"):
super().__init__(tracer)
self._basic_block: BasicBlock = tracer.current_bb.basic_block
@property
def basic_block(self) -> BasicBlock:
return self._basic_block
def taints(self) -> Taints:
return Taints(())
class Tracer(ProgramTrace):
def __init__(self, inputstr: bytes):
self.source: Input = Input(uid=1, path="test.data", size=len(inputstr), content=inputstr)
self.call_stack: List[TracedFunctionEntry] = []
self.bb_stack: List[List[TracedBasicBlockEntry]] = []
self.events: Dict[int, TraceEvent] = {}
self.functions_by_name: Dict[str, Function] = {}
self.bbs: Dict[str, Dict[str, BasicBlock]] = defaultdict(dict)
self.inputstr: bytes = inputstr
self.input_offset: int = 0
def __len__(self) -> int:
return len(self.events)
def __iter__(self) -> Iterator[TraceEvent]:
return iter(self.events.values())
@property
def functions(self) -> Iterable[Function]:
return self.functions_by_name.values()
@property
def basic_blocks(self) -> Iterable[BasicBlock]:
bbs: List[BasicBlock] = []
for blocks in self.bbs.values():
bbs.extend(blocks.values())
return bbs
def has_event(self, uid: int) -> bool:
return uid in self.events
def get_event(self, uid: int) -> TraceEvent:
return self.events[uid]
def get_function(self, name: str) -> Function:
return self.functions_by_name[name]
def has_function(self, name: str) -> bool:
return name in self.functions_by_name
def access_sequence(self) -> Iterator[TaintAccess]:
raise NotImplementedError("TODO: Implement this later if we need it")
@property
def num_accesses(self) -> int:
return sum(len(bb.consumed) for bb in self.events if isinstance(bb, TracedBasicBlockEntry))
@property
def inputs(self) -> Iterable[Input]:
return (self.source,)
@property
def taint_forest(self) -> TaintForest:
raise NotImplementedError()
def __getitem__(self, uid: int) -> TraceEvent:
return self.events[uid]
def __contains__(self, uid: int):
return uid in self.events
@property
def last_event(self) -> Optional[TraceEvent]:
if self.events:
return self.events[-1]
else:
return None
@property
def current_bb(self) -> TracedBasicBlockEntry:
return self.bb_stack[-1][-1]
@property
def current_bb_name(self) -> str:
return self.bb_stack[-1][-1].name
def peek(self, num_bytes: int) -> bytes:
bytes_read = self.inputstr[self.input_offset: self.input_offset + num_bytes]
self.current_bb.consumed.extend(range(self.input_offset, self.input_offset + len(bytes_read)))
return bytes_read
def read(self, num_bytes: int) -> bytes:
bytes_read = self.peek(num_bytes)
self.input_offset += len(bytes_read)
return bytes_read
def seek(self, input_offset: int):
self.input_offset = input_offset
def function_call(self, name: str) -> TracedFunctionEntry:
return TracedFunctionEntry(self, name)
def function_return(self, name) -> FunctionReturn:
f = TracedFunctionReturn(self)
if self.call_stack:
self.call_stack[-1].function_return = f
self.call_stack.pop()
self.bb_stack[-1].pop()
if self.call_stack:
self.bb_entry(f"{self.current_bb_name}_after_call_to_{name}")
return f
def bb_entry(self, name: str) -> TracedBasicBlockEntry:
return TracedBasicBlockEntry(self, name)
def traced(func):
def wrapped(tracer: Tracer, *args, **kwargs):
tracer.function_call(func.__name__)
tracer.bb_entry("entry")
ret = func(tracer, *args, **kwargs)
tracer.function_return(func.__name__)
return ret
return wrapped
@traced
def skip_whitespace(tracer: Tracer):
while True:
tracer.bb_entry("while_whitespace")
next_byte = tracer.peek(1)
if next_byte == b" " or next_byte == b"\t" or next_byte == "\n":
tracer.bb_entry("is_whitespace")
tracer.input_offset += 1
else:
tracer.bb_entry("not_whitespace")
break
@traced
def parse_string(tracer: Tracer) -> str:
first_byte = tracer.read(1)
assert first_byte == b'"'
ret = bytearray()
while True:
tracer.bb_entry("while_in_string")
next_byte = tracer.read(1)
if len(next_byte) == 0:
raise ValueError()
elif next_byte == b'"':
tracer.bb_entry("string_finished")
break
tracer.bb_entry("string_not_finished")
ret.extend(next_byte)
return ret.decode("utf-8")
@traced
def parse_int(tracer: Tracer) -> int:
number = bytearray()
while True:
tracer.bb_entry("while_in_int")
next_byte = tracer.peek(1)
if len(next_byte) == 0 or next_byte[0] < ord("0") or next_byte[0] > ord("9"):
tracer.bb_entry("int_finished")
break
tracer.bb_entry("int_not_finished")
number.extend(next_byte)
tracer.input_offset += 1
return int(number.decode("utf-8"))
@traced
def parse_terminal(tracer: Tracer) -> Union[int, str]:
next_byte = tracer.peek(1)
if next_byte == b'"':
tracer.bb_entry("terminal_is_string")
return parse_string(tracer)
else:
tracer.bb_entry("terminal_is_int")
return parse_int(tracer)
@traced
def parse_list(tracer: Tracer) -> List[Union[int, str]]:
ret = []
while True:
tracer.bb_entry("while_list_item")
skip_whitespace(tracer)
first_byte = tracer.peek(1)
if first_byte == b"(":
tracer.bb_entry("found_paren")
ret.append(parse_parens(tracer))
elif first_byte == b")":
tracer.bb_entry("found_close_paren")
break
else:
tracer.bb_entry("no_paren")
ret.append(parse_terminal(tracer))
skip_whitespace(tracer)
if tracer.peek(1) != b",":
tracer.bb_entry("no_comma")
break
tracer.bb_entry("found_comma")
tracer.input_offset += 1
return ret
@traced
def parse_parens(tracer: Tracer) -> List[Union[int, str]]:
skip_whitespace(tracer)
b = tracer.read(1)
assert b == b"("
ret = parse_list(tracer)
b = tracer.read(1)
assert b == b")"
return ret
def make_trace(inputstr: bytes) -> Tuple[List[Union[int, str]], Tracer]:
tracer = Tracer(inputstr)
result = parse_parens(tracer)
return result, tracer
class GrammarTestCase:
def __init__(self, input_string: bytes, trace: Tracer):
self.input_string: bytes = input_string
self.trace: Tracer = trace
self._tree: Optional[NonGeneralizedParseTree] = None
self._grammar: Optional[Grammar] = None
self._simplified_grammar: Optional[Grammar] = None
@property
def tree(self) -> NonGeneralizedParseTree:
if self._tree is None:
self._tree = trace_to_non_generalized_tree(self.trace)
return self._tree
@property
def grammar(self) -> Grammar:
if self._grammar is None:
self._grammar = parse_tree_to_grammar(self.tree)
return self._grammar
@property
def simplified_grammar(self) -> Grammar:
if self._simplified_grammar is None:
self._simplified_grammar = parse_tree_to_grammar(self.tree)
self._simplified_grammar.simplify()
return self._simplified_grammar
@pytest.fixture
def simple_grammar() -> GrammarTestCase:
input_str = b'(1, 2, ("foo", 5, "bar"), 3, 4)'
result, trace = make_trace(input_str)
assert result == [1, 2, ["foo", 5, "bar"], 3, 4]
return GrammarTestCase(input_str, trace)
def test_parse_tree_generation(simple_grammar: GrammarTestCase):
"""Tests the generation of a non-generalized parse tree from a program trace"""
assert simple_grammar.tree.matches() == simple_grammar.trace.inputstr
# print(simple_grammar.tree.to_dag().to_dot(labeler=lambda n: repr(str(n.value))))
def test_parse_tree_simplification(simple_grammar: GrammarTestCase):
"""Tests the generation of a non-generalized parse tree from a program trace"""
tree = simple_grammar.tree.clone()
tree.simplify()
assert tree.matches() == simple_grammar.trace.inputstr
def test_grammar_extraction(simple_grammar: GrammarTestCase):
simple_grammar.grammar.verify(True)
def test_grammar_matching(simple_grammar: GrammarTestCase):
# print(simple_grammar.grammar)
m = simple_grammar.grammar.match(simple_grammar.input_string)
assert bool(m)
# print(m.parse_tree.to_dag().to_dot(labeler=lambda t: repr(str(t.value))))
def test_grammar_simplification(simple_grammar: GrammarTestCase):
# print(simple_grammar.simplified_grammar)
m = simple_grammar.simplified_grammar.match(simple_grammar.input_string)
assert bool(m)
# print(m.parse_tree.to_dag().to_dot(labeler=lambda t: repr(str(t.value))))
| 32.014052 | 114 | 0.658815 |
9ccc7520152fea76b22a810c5e91da7a0a3d7865 | 367 | py | Python | tests/bots/economy/test_futures.py | swipswaps/OpenBBTerminal | 9d33f638f10fdbc77ae461b1838462571faee138 | [
"MIT"
] | null | null | null | tests/bots/economy/test_futures.py | swipswaps/OpenBBTerminal | 9d33f638f10fdbc77ae461b1838462571faee138 | [
"MIT"
] | null | null | null | tests/bots/economy/test_futures.py | swipswaps/OpenBBTerminal | 9d33f638f10fdbc77ae461b1838462571faee138 | [
"MIT"
] | null | null | null | import pytest
from bots.economy.futures import futures_command
@pytest.mark.bots
@pytest.mark.vcr
def test_futures_command(mocker, recorder):
mocker.patch("bots.helpers.uuid_get", return_value="1")
value = futures_command()
value.pop("embed")
for x in ["view", "choices", "embeds_img"]:
value[x] = str(value[x])
recorder.capture(value)
| 24.466667 | 59 | 0.700272 |
c3e6b56ce10e0a4bc55324e72f5425ca6a2519a1 | 3,338 | py | Python | kubeasy_sdk/__init__.py | dylanturn/kubeasy | 7c9fd62e22ecd89632f5aa7a7a17fda24ebe7490 | [
"Apache-2.0"
] | null | null | null | kubeasy_sdk/__init__.py | dylanturn/kubeasy | 7c9fd62e22ecd89632f5aa7a7a17fda24ebe7490 | [
"Apache-2.0"
] | null | null | null | kubeasy_sdk/__init__.py | dylanturn/kubeasy | 7c9fd62e22ecd89632f5aa7a7a17fda24ebe7490 | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
import yaml
import json
from cdk8s import App, Chart
from constructs import Construct
from kubeasy_sdk.utils.collections.chart_resource_collection import ChartResourceCollection
from kubeasy_sdk.utils.resource import Rendered
from kubeasy_sdk.deployment import Deployment
from kubeasy_sdk.container import Container
from kubeasy_sdk.service import Service, ServicePort, ServiceType
from kubeasy_sdk.ingress import Ingress, IngressPath
from kubeasy_sdk.volume import Volume, ConfigMap, EmptyDir
from typing import Mapping
class EasyChart(object):
def __init__(self, name: str, namespace: str, environment: str, release: str):
self.name = name
self.namespace = namespace
self.environment = environment
self.release = release
self.deployment = Deployment(name=self.name,
namespace=self.namespace,
environment=self.environment)
self.service_collection = ChartResourceCollection()
self.ingress_collection = ChartResourceCollection()
def add_init_container(self, name: str, image: str, tag: str) -> Container:
new_init_container = Container(name, image, tag)
self.deployment.add_init_container(new_init_container)
return new_init_container
def add_container(self, name: str, image: str, tag: str) -> Container:
new_container = Container(name, image, tag)
self.deployment.add_container(new_container)
return new_container
def include_volume(self, name: str, labels: Mapping[str, str]) -> Volume:
new_volume = Volume(name, labels)
self.deployment.include_volume(new_volume)
return new_volume
def include_config_map(self, name: str, config_name: str) -> ConfigMap:
new_config_map = ConfigMap(name=name, config_name=config_name)
self.deployment.include_volume(volume=new_config_map)
return new_config_map
def add_empty_dir(self, name: str, size_limit: str, use_memory: bool = False) -> EmptyDir:
new_empty_dir = EmptyDir(name=name, size_limit=size_limit, use_memory=use_memory)
self.deployment.include_volume(volume=new_empty_dir)
return new_empty_dir
def add_service(self, service_name) -> Service:
new_service = Service(name=service_name, deployment=self.deployment)
self.service_collection.add_resource(new_service)
return new_service
def add_ingress(self, name: str, tls: bool = False, labels: dict = None) -> Ingress:
new_ingress = Ingress(name, tls, labels)
self.ingress_collection.add_resource(new_ingress)
return new_ingress
def render(self) -> list:
app = App()
combined_resource_collection = ChartResourceCollection.combine([self.service_collection, self.ingress_collection])
chart_json = self.__EasyChart(app, self.deployment, combined_resource_collection).to_json()
yaml_manifest = ""
for manifest in chart_json:
yaml_manifest += f"---\n{yaml.dump(yaml.load(json.dumps(manifest), Loader=yaml.FullLoader))}"
return yaml_manifest
class __EasyChart(Chart):
def __init__(self, scope: Construct, chart_deployment: Deployment, chart_resources: ChartResourceCollection):
super().__init__(scope, chart_deployment.name)
self.name = chart_deployment.name
self.scope = scope
chart_deployment.render(self)
chart_resources.render(self) | 39.738095 | 118 | 0.753745 |
be0fa2637fe3d8da43605b3154dc9c3aea694efb | 4,296 | py | Python | tests/pruning/test_utils.py | krodyush/nncf | 476a274a90a3f2f1ace7a4cb0c9d90d1ddeb7f6a | [
"Apache-2.0"
] | null | null | null | tests/pruning/test_utils.py | krodyush/nncf | 476a274a90a3f2f1ace7a4cb0c9d90d1ddeb7f6a | [
"Apache-2.0"
] | null | null | null | tests/pruning/test_utils.py | krodyush/nncf | 476a274a90a3f2f1ace7a4cb0c9d90d1ddeb7f6a | [
"Apache-2.0"
] | 1 | 2021-04-05T09:33:51.000Z | 2021-04-05T09:33:51.000Z | """
Copyright (c) 2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pytest
from nncf.dynamic_graph.context import Scope
from nncf.pruning.filter_pruning.algo import FilterPruningBuilder
from nncf.pruning.utils import get_rounded_pruned_element_number, get_bn_for_module_scope, \
get_first_pruned_modules, get_last_pruned_modules
from tests.pruning.helpers import get_basic_pruning_config, BigPruningTestModel, \
PruningTestModelBranching
from tests.helpers import create_compressed_model_and_algo_for_test
# pylint: disable=protected-access
@pytest.mark.parametrize("total,sparsity_rate,multiple_of,ref",
[(20, 0.2, None, 4),
(20, 0.2, 8, 4),
(20, 0.1, 2, 2),
(20, 0.1, 5, 0),
(20, 0.5, None, 4)
])
def test_get_rounded_pruned_element_number(total, sparsity_rate, multiple_of, ref):
if multiple_of is not None:
result = get_rounded_pruned_element_number(total, sparsity_rate, multiple_of)
else:
result = get_rounded_pruned_element_number(total, sparsity_rate)
assert ref == result
if multiple_of is not None:
assert (total - result) % multiple_of == 0
def test_get_bn_for_module_scope():
config = get_basic_pruning_config(input_sample_size=[1, 1, 8, 8])
config['compression']['algorithm'] = 'filter_pruning'
pruned_model, _ = create_compressed_model_and_algo_for_test(BigPruningTestModel(), config)
conv1_scope = Scope.from_str('BigPruningTestModel/NNCFConv2d[conv1]')
bn = get_bn_for_module_scope(pruned_model, conv1_scope)
assert bn is None
conv2_scope = Scope.from_str('BigPruningTestModel/NNCFConv2d[conv2]')
bn = get_bn_for_module_scope(pruned_model, conv2_scope)
assert bn == pruned_model.bn
conv3_scope = Scope.from_str('BigPruningTestModel/NNCFConv2d[conv3]')
bn = get_bn_for_module_scope(pruned_model, conv3_scope)
assert bn is None
@pytest.mark.parametrize(('model', 'ref_first_module_names'),
[(BigPruningTestModel, ['conv1']),
(PruningTestModelBranching, ['conv1', 'conv2', 'conv3']),
],
)
def test_get_first_pruned_layers(model, ref_first_module_names):
config = get_basic_pruning_config(input_sample_size=[1, 1, 8, 8])
config['compression']['algorithm'] = 'filter_pruning'
pruned_model, _ = create_compressed_model_and_algo_for_test(model(), config)
first_pruned_modules = get_first_pruned_modules(pruned_model,
FilterPruningBuilder(config).get_types_of_pruned_modules())
ref_first_modules = [getattr(pruned_model, module_name) for module_name in ref_first_module_names]
assert set(first_pruned_modules) == set(ref_first_modules)
@pytest.mark.parametrize(('model', 'ref_last_module_names'),
[(BigPruningTestModel, ['conv3']),
(PruningTestModelBranching, ['conv4', 'conv5']
),
],
)
def test_get_last_pruned_layers(model, ref_last_module_names):
config = get_basic_pruning_config(input_sample_size=[1, 1, 8, 8])
config['compression']['algorithm'] = 'filter_pruning'
pruned_model, _ = create_compressed_model_and_algo_for_test(model(), config)
first_pruned_modules = get_last_pruned_modules(pruned_model,
FilterPruningBuilder(config).get_types_of_pruned_modules())
ref_last_modules = [getattr(pruned_model, module_name) for module_name in ref_last_module_names]
assert set(first_pruned_modules) == set(ref_last_modules)
| 46.695652 | 111 | 0.685987 |
9b19b628187101a1820fc3f7f813d77272bd6754 | 5,659 | py | Python | extractFastaByName.py | Tong-Chen/easyTranscriptome | f03c9f708faf63735d53f1e2314c8f5ef233b3cd | [
"Apache-2.0"
] | 1 | 2021-08-16T03:16:11.000Z | 2021-08-16T03:16:11.000Z | extractFastaByName.py | Tong-Chen/easyTranscriptome | f03c9f708faf63735d53f1e2314c8f5ef233b3cd | [
"Apache-2.0"
] | null | null | null | extractFastaByName.py | Tong-Chen/easyTranscriptome | f03c9f708faf63735d53f1e2314c8f5ef233b3cd | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#from __future__ import division, with_statement
'''
Copyright 2013, 陈同 (chentong_biology@163.com).
===========================================================
'''
__author__ = 'chentong & ct586[9]'
__author_email__ = 'chentong_biology@163.com'
#=========================================================
desc = '''
Functional description:
This is designed to extract FASTA seq by names.
'''
import sys
import os
from json import dumps as json_dumps
from time import localtime, strftime
timeformat = "%Y-%m-%d %H:%M:%S"
from optparse import OptionParser as OP
#from multiprocessing.dummy import Pool as ThreadPool
debug = 0
def fprint(content):
print(json_dumps(content,indent=1))
def cmdparameter(argv):
if len(argv) == 1:
global desc
print(desc, file=sys.stderr)
cmd = 'python ' + argv[0] + ' -h'
os.system(cmd)
sys.exit(1)
usages = "%prog -i file"
parser = OP(usage=usages)
parser.add_option("-i", "--input-file", dest="filein",
metavar="FILEIN", help="The FASTA file")
parser.add_option("-s", "--separator", dest="sep",
default="IdoNotThinkThisWillAppear, DoyouThinkSo",
metavar="SEPARATOR", help="The separator used to get ID names. \
Default full line (no splitting) except leading > and trailing '\\n' is \
used as ID names. Please use <tab> to specify '\\t' as separtor.")
parser.add_option("-F", "--first-x-words", dest="count",
default=1, help="Default 1 means extracting the first \
word before separator. Accept other number (x) to extract the \
first x words.")
parser.add_option("-n", "--name-list", dest="name",
help="One or several columns file containing ID lists in one column.")
parser.add_option("-c", "--name-col-index", dest="name_col_ix",
default=1, type='int',
help="Specify the columns containing IDs. Default 1 representing the first column.")
parser.add_option("-r", "--rename-col-index", dest="rename_col_ix",
default=0, type='int',
help="Specify the columns containing IDs. Default 0 representing no rename.")
parser.add_option("-v", "--verbose", dest="verbose",
default=0, help="Show process information")
parser.add_option("-d", "--debug", dest="debug",
default=False, action="store_true", help="Debug the program")
(options, args) = parser.parse_args(argv[1:])
assert options.filein != None, "A filename needed for -i"
return (options, args)
#--------------------------------------------------------------------
def main():
options, args = cmdparameter(sys.argv)
#-----------------------------------
file = options.filein
global debug
debug = options.debug
sep = options.sep
#print "*%s*" % sep
if sep == 'tab':
sep = "\t"
count = int(options.count)
nameF = options.name
nameC = options.name_col_ix-1
rename_col_ix = options.rename_col_ix
if rename_col_ix:
rename_col_ix = rename_col_ix - 1
else:
rename_col_ix = nameC
nameD = dict([[line.strip().split()[nameC], line.strip().split()[rename_col_ix]] \
for line in open(nameF)])
if debug:
print(list(nameD.keys()), file=sys.stderr)
#print nameD.keys()[:5]
verbose = options.verbose
#-----------------------------------
if file == '-':
fh = sys.stdin
else:
fh = open(file)
#--------------------------------
output = 0
tmpL = []
oldline = ''
for line in fh:
#print(line)
if line[0] == '>':
if output and tmpL and oldline:
print(oldline.strip())
print(''.join(tmpL))
tmpL = []
output = 0
key = sep.join(line[1:].strip().split(sep)[:count])
#print key
#break
oldline = line
#if key in nameD:
# nameD.pop(key)
# output = 1
output = nameD.pop(key, 0)
if output and rename_col_ix != nameC:
oldline = oldline.strip() + ' ' + output
if debug:
print(key, output, file=sys.stderr)
else:
if output:
tmpL.append(line.strip())
#-------------END reading file----------
if output and tmpL and oldline:
print(oldline, end=' ')
print(''.join(tmpL))
tmpL = []
output = 0
#----close file handle for files-----
if file != '-':
fh.close()
#-----------end close fh-----------
if nameD:
print("The following IDs have no sequences found", file=sys.stderr)
print('\n'.join(list(nameD.keys())), file=sys.stderr)
###--------multi-process------------------
#pool = ThreadPool(5) # 5 represents thread_num
#result = pool.map(func, iterable_object)
#pool.close()
#pool.join()
###--------multi-process------------------
if verbose:
print("--Successful %s" % strftime(timeformat, localtime()), file=sys.stderr)
if __name__ == '__main__':
startTime = strftime(timeformat, localtime())
main()
endTime = strftime(timeformat, localtime())
fh = open('python.log', 'a')
print("%s\n\tRun time : %s - %s " % \
(' '.join(sys.argv), startTime, endTime), file=fh)
fh.close()
###---------profile the program---------
#import profile
#profile_output = sys.argv[0]+".prof.txt")
#profile.run("main()", profile_output)
#import pstats
#p = pstats.Stats(profile_output)
#p.sort_stats("time").print_stats()
###---------profile the program---------
| 34.506098 | 92 | 0.548153 |
22480fd6876e3a754f10f2615b86ab53efee00b3 | 3,250 | py | Python | Lib/ufo2ft/filters/explodeColorLayerGlyphs.py | JeremieHornus/ufo2ft | 6df17f97ba0730b79fb6bb5a0a51c3afaf7338fc | [
"MIT"
] | null | null | null | Lib/ufo2ft/filters/explodeColorLayerGlyphs.py | JeremieHornus/ufo2ft | 6df17f97ba0730b79fb6bb5a0a51c3afaf7338fc | [
"MIT"
] | null | null | null | Lib/ufo2ft/filters/explodeColorLayerGlyphs.py | JeremieHornus/ufo2ft | 6df17f97ba0730b79fb6bb5a0a51c3afaf7338fc | [
"MIT"
] | null | null | null | from ufo2ft.filters import BaseFilter
from ufo2ft.util import _GlyphSet
from ufo2ft.constants import COLOR_LAYERS_KEY, COLOR_LAYER_MAPPING_KEY
class ExplodeColorLayerGlyphsFilter(BaseFilter):
""" This filter doesn't really filter glyphs, but copies glyphs
from UFO layers to alternate glyphs in the default layer, for use
in the COLR table.
"""
def set_context(self, font, glyphSet):
context = super().set_context(font, glyphSet)
context.globalColorLayerMapping = font.lib.get(COLOR_LAYER_MAPPING_KEY)
context.layerGlyphSets = {}
context.colorLayerGlyphNames = set() # glyph names that we added
if COLOR_LAYERS_KEY not in font.lib:
font.lib[COLOR_LAYERS_KEY] = {}
else:
# if the font already contains an explicit COLOR_LAYERS_KEY, we
# assume the color layers have already been 'exploded' once.
context.skipCurrentFont = True
return context
def _getLayer(self, font, layerName):
layer = self.context.layerGlyphSets.get(layerName)
if layer is None:
layer = _GlyphSet.from_layer(font, layerName)
self.context.layerGlyphSets[layerName] = layer
return layer
def _copyGlyph(self, layerGlyphSet, glyphSet, glyphName, layerName):
layerGlyph = layerGlyphSet[glyphName]
layerGlyphName = f"{glyphName}.{layerName}"
if layerGlyphName in glyphSet:
if layerGlyphName in self.context.colorLayerGlyphNames:
# We've added this glyph already, so we're done
return layerGlyphName
from ufo2ft.errors import InvalidFontData
raise InvalidFontData(
f"a glyph named {layerGlyphName} already exists, "
"conflicting with a requested color layer glyph."
)
for component in layerGlyph.components:
baseLayerGlyphName = self._copyGlyph(
layerGlyphSet, glyphSet, component.baseGlyph, layerName
)
component.baseGlyph = baseLayerGlyphName
glyphSet[layerGlyphName] = layerGlyph
self.context.colorLayerGlyphNames.add(layerGlyphName)
return layerGlyphName
def filter(self, glyph):
if getattr(self.context, "skipCurrentFont", False):
return False
font = self.context.font
glyphSet = self.context.glyphSet
colorLayers = font.lib[COLOR_LAYERS_KEY]
colorLayerMapping = glyph.lib.get(COLOR_LAYER_MAPPING_KEY)
if colorLayerMapping is None:
colorLayerMapping = self.context.globalColorLayerMapping
if colorLayerMapping is None:
# No color layer info for this glyph
return False
layers = []
for layerName, colorID in colorLayerMapping:
layerGlyphSet = self._getLayer(font, layerName)
if glyph.name in layerGlyphSet:
layerGlyphName = self._copyGlyph(
layerGlyphSet, glyphSet, glyph.name, layerName
)
layers.append((layerGlyphName, colorID))
if layers:
colorLayers[glyph.name] = layers
return True
else:
return False
| 40.123457 | 79 | 0.645231 |
d8e19368f55f5ad61249fbcca51be9dcbe58ad06 | 866 | py | Python | posts/migrations/0006_pollfile.py | TomerNewmanPrograms/ResuMe | 9d9b7369625fcb044f91cde86c85c91fdaf44ddd | [
"MIT"
] | null | null | null | posts/migrations/0006_pollfile.py | TomerNewmanPrograms/ResuMe | 9d9b7369625fcb044f91cde86c85c91fdaf44ddd | [
"MIT"
] | null | null | null | posts/migrations/0006_pollfile.py | TomerNewmanPrograms/ResuMe | 9d9b7369625fcb044f91cde86c85c91fdaf44ddd | [
"MIT"
] | null | null | null | # Generated by Django 4.0.3 on 2022-03-31 11:01
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0005_poll'),
]
operations = [
migrations.CreateModel(
name='PollFile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(
default=None, upload_to='files',
validators=[django.core.validators.FileExtensionValidator(
allowed_extensions=['pdf', 'png', 'jpg', 'jpeg'])])),
('poll', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='posts.poll')),
],
),
]
| 32.074074 | 117 | 0.583141 |
d4d8db7183ad4377fce14d182399a2232e35a69b | 12,797 | py | Python | src/silx/gui/plot3d/test/testSceneWidgetPicking.py | rnwatanabe/silx | b0395f4a06c048b7778dc04ada828edd195ef02d | [
"CC0-1.0",
"MIT"
] | 94 | 2016-03-04T17:25:53.000Z | 2022-03-18T18:05:23.000Z | src/silx/gui/plot3d/test/testSceneWidgetPicking.py | rnwatanabe/silx | b0395f4a06c048b7778dc04ada828edd195ef02d | [
"CC0-1.0",
"MIT"
] | 2,841 | 2016-01-21T09:06:49.000Z | 2022-03-18T14:53:56.000Z | src/silx/gui/plot3d/test/testSceneWidgetPicking.py | rnwatanabe/silx | b0395f4a06c048b7778dc04ada828edd195ef02d | [
"CC0-1.0",
"MIT"
] | 71 | 2015-09-30T08:35:35.000Z | 2022-03-16T07:16:28.000Z | # coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2018-2019 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ###########################################################################*/
"""Test SceneWidget picking feature"""
__authors__ = ["T. Vincent"]
__license__ = "MIT"
__date__ = "03/10/2018"
import unittest
import numpy
from silx.utils.testutils import ParametricTestCase
from silx.gui.utils.testutils import TestCaseQt
from silx.gui import qt
from silx.gui.plot3d.SceneWidget import SceneWidget, items
class TestSceneWidgetPicking(TestCaseQt, ParametricTestCase):
"""Tests SceneWidget picking feature"""
def setUp(self):
super(TestSceneWidgetPicking, self).setUp()
self.widget = SceneWidget()
self.widget.resize(300, 300)
self.widget.show()
# self.qWaitForWindowExposed(self.widget)
def tearDown(self):
self.qapp.processEvents()
self.widget.setAttribute(qt.Qt.WA_DeleteOnClose)
self.widget.close()
del self.widget
super(TestSceneWidgetPicking, self).tearDown()
def _widgetCenter(self):
"""Returns widget center"""
size = self.widget.size()
return size.width() // 2, size.height() // 2
def testPickImage(self):
"""Test picking of ImageData and ImageRgba items"""
imageData = items.ImageData()
imageData.setData(numpy.arange(100).reshape(10, 10))
imageRgba = items.ImageRgba()
imageRgba.setData(
numpy.arange(300, dtype=numpy.uint8).reshape(10, 10, 3))
for item in (imageData, imageRgba):
with self.subTest(item=item.__class__.__name__):
# Add item
self.widget.clearItems()
self.widget.addItem(item)
self.widget.resetZoom('front')
self.qapp.processEvents()
# Picking on data (at widget center)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 1)
self.assertIs(picking[0].getItem(), item)
self.assertEqual(picking[0].getPositions('ndc').shape, (1, 3))
data = picking[0].getData()
self.assertEqual(len(data), 1)
self.assertTrue(numpy.array_equal(
data,
item.getData()[picking[0].getIndices()]))
# Picking outside data
picking = list(self.widget.pickItems(1, 1))
self.assertEqual(len(picking), 0)
def testPickScatter(self):
"""Test picking of Scatter2D and Scatter3D items"""
data = numpy.arange(100)
scatter2d = items.Scatter2D()
scatter2d.setData(x=data, y=data, value=data)
scatter3d = items.Scatter3D()
scatter3d.setData(x=data, y=data, z=data, value=data)
for item in (scatter2d, scatter3d):
with self.subTest(item=item.__class__.__name__):
# Add item
self.widget.clearItems()
self.widget.addItem(item)
self.widget.resetZoom('front')
self.qapp.processEvents()
# Picking on data (at widget center)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 1)
self.assertIs(picking[0].getItem(), item)
nbPos = len(picking[0].getPositions('ndc'))
data = picking[0].getData()
self.assertEqual(nbPos, len(data))
self.assertTrue(numpy.array_equal(
data,
item.getValueData()[picking[0].getIndices()]))
# Picking outside data
picking = list(self.widget.pickItems(1, 1))
self.assertEqual(len(picking), 0)
def testPickVolume(self):
"""Test picking of volume CutPlane and Isosurface items"""
for dtype in (numpy.float32, numpy.complex64):
with self.subTest(dtype=dtype):
refData = numpy.arange(10**3, dtype=dtype).reshape(10, 10, 10)
volume = self.widget.addVolume(refData)
if dtype == numpy.complex64:
volume.setComplexMode(volume.ComplexMode.REAL)
refData = numpy.real(refData)
self.widget.resetZoom('front')
cutplane = volume.getCutPlanes()[0]
if dtype == numpy.complex64:
cutplane.setComplexMode(volume.ComplexMode.REAL)
cutplane.getColormap().setVRange(0, 100)
cutplane.setNormal((0, 0, 1))
# Picking on data without anything displayed
cutplane.setVisible(False)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 0)
# Picking on data with the cut plane
cutplane.setVisible(True)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 1)
self.assertIs(picking[0].getItem(), cutplane)
data = picking[0].getData()
self.assertEqual(len(data), 1)
self.assertEqual(picking[0].getPositions().shape, (1, 3))
self.assertTrue(numpy.array_equal(
data,
refData[picking[0].getIndices()]))
# Picking on data with an isosurface
isosurface = volume.addIsosurface(
level=500, color=(1., 0., 0., .5))
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 2)
self.assertIs(picking[0].getItem(), cutplane)
self.assertIs(picking[1].getItem(), isosurface)
self.assertEqual(picking[1].getPositions().shape, (1, 3))
data = picking[1].getData()
self.assertEqual(len(data), 1)
self.assertTrue(numpy.array_equal(
data,
refData[picking[1].getIndices()]))
# Picking outside data
picking = list(self.widget.pickItems(1, 1))
self.assertEqual(len(picking), 0)
self.widget.clearItems()
def testPickMesh(self):
"""Test picking of Mesh items"""
triangles = items.Mesh()
triangles.setData(
position=((0, 0, 0), (1, 0, 0), (1, 1, 0),
(0, 0, 0), (1, 1, 0), (0, 1, 0)),
color=(1, 0, 0, 1),
mode='triangles')
triangleStrip = items.Mesh()
triangleStrip.setData(
position=(((1, 0, 0), (0, 0, 0), (1, 1, 0), (0, 1, 0))),
color=(0, 1, 0, 1),
mode='triangle_strip')
triangleFan = items.Mesh()
triangleFan.setData(
position=((0, 0, 0), (1, 0, 0), (1, 1, 0), (0, 1, 0)),
color=(0, 0, 1, 1),
mode='fan')
for item in (triangles, triangleStrip, triangleFan):
with self.subTest(mode=item.getDrawMode()):
# Add item
self.widget.clearItems()
self.widget.addItem(item)
self.widget.resetZoom('front')
self.qapp.processEvents()
# Picking on data (at widget center)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 1)
self.assertIs(picking[0].getItem(), item)
nbPos = len(picking[0].getPositions())
data = picking[0].getData()
self.assertEqual(nbPos, len(data))
self.assertTrue(numpy.array_equal(
data,
item.getPositionData()[picking[0].getIndices()]))
# Picking outside data
picking = list(self.widget.pickItems(1, 1))
self.assertEqual(len(picking), 0)
def testPickMeshWithIndices(self):
"""Test picking of Mesh items defined by indices"""
triangles = items.Mesh()
triangles.setData(
position=((0, 0, 0), (1, 0, 0), (0, 1, 0), (1, 1, 0)),
color=(1, 0, 0, 1),
indices=numpy.array( # dummy triangles and square
(0, 0, 1, 0, 1, 2, 1, 2, 3), dtype=numpy.uint8),
mode='triangles')
triangleStrip = items.Mesh()
triangleStrip.setData(
position=((0, 0, 0), (1, 0, 0), (0, 1, 0), (1, 1, 0)),
color=(0, 1, 0, 1),
indices=numpy.array( # dummy triangles and square
(1, 0, 0, 1, 2, 3), dtype=numpy.uint8),
mode='triangle_strip')
triangleFan = items.Mesh()
triangleFan.setData(
position=((0, 0, 0), (1, 0, 0), (0, 1, 0), (1, 1, 0)),
color=(0, 0, 1, 1),
indices=numpy.array( # dummy triangle, square, dummy
(1, 1, 0, 2, 3, 3), dtype=numpy.uint8),
mode='fan')
for item in (triangles, triangleStrip, triangleFan):
with self.subTest(mode=item.getDrawMode()):
# Add item
self.widget.clearItems()
self.widget.addItem(item)
self.widget.resetZoom('front')
self.qapp.processEvents()
# Picking on data (at widget center)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 1)
self.assertIs(picking[0].getItem(), item)
nbPos = len(picking[0].getPositions())
data = picking[0].getData()
self.assertEqual(nbPos, len(data))
self.assertTrue(numpy.array_equal(
data,
item.getPositionData()[picking[0].getIndices()]))
# Picking outside data
picking = list(self.widget.pickItems(1, 1))
self.assertEqual(len(picking), 0)
def testPickCylindricalMesh(self):
"""Test picking of Box, Cylinder and Hexagon items"""
positions = numpy.array(((0., 0., 0.), (1., 1., 0.), (2., 2., 0.)))
box = items.Box()
box.setData(position=positions)
cylinder = items.Cylinder()
cylinder.setData(position=positions)
hexagon = items.Hexagon()
hexagon.setData(position=positions)
for item in (box, cylinder, hexagon):
with self.subTest(item=item.__class__.__name__):
# Add item
self.widget.clearItems()
self.widget.addItem(item)
self.widget.resetZoom('front')
self.qapp.processEvents()
# Picking on data (at widget center)
picking = list(self.widget.pickItems(*self._widgetCenter()))
self.assertEqual(len(picking), 1)
self.assertIs(picking[0].getItem(), item)
nbPos = len(picking[0].getPositions())
data = picking[0].getData()
print(item.__class__.__name__, [positions[1]], data)
self.assertTrue(numpy.all(numpy.equal(positions[1], data)))
self.assertEqual(nbPos, len(data))
self.assertTrue(numpy.array_equal(
data,
item.getPosition()[picking[0].getIndices()]))
# Picking outside data
picking = list(self.widget.pickItems(1, 1))
self.assertEqual(len(picking), 0)
| 40.625397 | 79 | 0.549895 |
45c05ed74627650c41e236aa20d5b1b51639252d | 12,518 | py | Python | mindspore/_check_version.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | 2 | 2020-11-23T13:46:37.000Z | 2020-12-20T02:02:38.000Z | mindspore/_check_version.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | null | null | null | mindspore/_check_version.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | 1 | 2021-01-01T08:35:01.000Z | 2021-01-01T08:35:01.000Z | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""version and config check"""
import os
import sys
from pathlib import Path
from abc import abstractmethod, ABCMeta
from packaging import version
from . import log as logger
from .version import __version__
from .default_config import __package_name__
class EnvChecker(metaclass=ABCMeta):
"""basic class for environment check"""
@abstractmethod
def check_env(self, e):
pass
@abstractmethod
def set_env(self):
pass
@abstractmethod
def check_version(self):
pass
class GPUEnvChecker(EnvChecker):
"""gpu environment check"""
def __init__(self):
self.version = ["10.1"]
self.cuda_path = "/usr/local/cuda"
if os.path.exists(self.cuda_path):
# cuda default path
self.cuda_bin = self.cuda_path + "/bin"
self.cuda_lib = self.cuda_path + "/lib64"
self.cuda_version = self.cuda_path + "/version.txt"
else:
# custom or unknown environment
self.cuda_path = ""
self.cuda_bin = ""
self.cuda_lib = ""
self.cuda_version = ""
# env
self.path = os.getenv("PATH")
self.ld_lib_path = os.getenv("LD_LIBRARY_PATH")
# check
self.path_check = "/cuda"
self.ld_lib_path_check = "/cuda"
self.v = "0"
def check_env(self, e):
self._check_env()
raise e
def set_env(self):
if not self.cuda_bin:
self._check_env()
return
if Path(self.cuda_bin).is_dir():
os.environ['PATH'] = self.cuda_bin + ":" + os.environ['PATH']
else:
raise EnvironmentError(
f"No such directory: {self.cuda_bin}, please check if cuda is installed correctly.")
def check_version(self):
if not Path(self.cuda_version).is_file():
logger.warning("Using custom cuda path, cuda version checking is skiped, please make sure "
"cuda version is supported, you can reference to the installation guidelines "
"https://www.mindspore.cn/install")
return
v = self._read_version(self.cuda_version)
v = version.parse(v)
v_str = str(v.major) + "." + str(v.minor)
if v_str not in self.version:
logger.warning(f"MindSpore version {__version__} and cuda version {v_str} does not match, "
"reference to the match info on: https://www.mindspore.cn/install")
def _check_env(self):
"""gpu cuda path check"""
if self.path is None or self.path_check not in self.path:
logger.warning("Can not find nvcc compiler(need by mindspore-gpu), please check if you have set env "
"PATH, you can reference to the installation guidelines https://www.mindspore.cn/install")
if self.ld_lib_path is None or self.ld_lib_path_check not in self.ld_lib_path:
logger.warning("Can not find cuda so(need by mindspore-gpu), please check if you have set env "
"LD_LIBRARY_PATH, you can reference to the installation guidelines "
"https://www.mindspore.cn/install")
def _read_version(self, file_path):
"""get gpu version info"""
with open(file_path, 'r') as f:
all_info = f.readlines()
for line in all_info:
if line.startswith("CUDA Version"):
self.v = line.strip().split("CUDA Version")[1]
return self.v
return self.v
class AscendEnvChecker(EnvChecker):
"""ascend environment check"""
def __init__(self):
self.version = ["1.75.22.0.220"]
atlas_nnae_version = "/usr/local/Ascend/nnae/latest/fwkacllib/version.info"
atlas_toolkit_version = "/usr/local/Ascend/ascend-toolkit/latest/fwkacllib/version.info"
hisi_fwk_version = "/usr/local/Ascend/fwkacllib/version.info"
if os.path.exists(atlas_nnae_version):
# atlas default path
self.fwk_path = "/usr/local/Ascend/nnae/latest/fwkacllib"
self.op_impl_path = "/usr/local/Ascend/nnae/latest/opp/op_impl/built-in/ai_core/tbe"
self.tbe_path = self.fwk_path + "/lib64"
self.cce_path = self.fwk_path + "/ccec_compiler/bin"
self.fwk_version = atlas_nnae_version
self.op_path = "/usr/local/Ascend/nnae/latest/opp"
elif os.path.exists(atlas_toolkit_version):
# atlas default path
self.fwk_path = "/usr/local/Ascend/ascend-toolkit/latest/fwkacllib"
self.op_impl_path = "/usr/local/Ascend/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe"
self.tbe_path = self.fwk_path + "/lib64"
self.cce_path = self.fwk_path + "/ccec_compiler/bin"
self.fwk_version = atlas_toolkit_version
self.op_path = "/usr/local/Ascend/ascend-toolkit/latest/opp"
elif os.path.exists(hisi_fwk_version):
# hisi default path
self.fwk_path = "/usr/local/Ascend/fwkacllib"
self.op_impl_path = "/usr/local/Ascend/opp/op_impl/built-in/ai_core/tbe"
self.tbe_path = self.fwk_path + "/lib64"
self.cce_path = self.fwk_path + "/ccec_compiler/bin"
self.fwk_version = hisi_fwk_version
self.op_path = ""
else:
# custom or unknown environment
self.fwk_path = ""
self.op_impl_path = ""
self.tbe_path = ""
self.cce_path = ""
self.fwk_version = ""
self.op_path = ""
# env
self.path = os.getenv("PATH")
self.python_path = os.getenv("PYTHONPATH")
self.ld_lib_path = os.getenv("LD_LIBRARY_PATH")
self.ascend_opp_path = os.getenv("ASCEND_OPP_PATH")
# check content
self.path_check = "/fwkacllib/ccec_compiler/bin/"
self.python_path_check = "opp/op_impl/built-in/ai_core/tbe/"
self.ld_lib_path_check_fwk = "/fwkacllib/lib64/"
self.ld_lib_path_check_addons = "/add-ons/"
self.ascend_opp_path_check = "/op"
self.v = ""
def check_env(self, e):
self._check_env()
raise e
def check_version(self):
if not Path(self.fwk_version).is_file():
logger.warning("Using custom Ascend 910 AI software package path, package version checking is skiped, "
"please make sure Ascend 910 AI software package version is supported, you can reference to "
"the installation guidelines https://www.mindspore.cn/install")
return
v = self._read_version(self.fwk_version)
if v not in self.version:
logger.warning(f"MindSpore version {__version__} and Ascend 910 AI software package version {v} does not "
"match, reference to the match info on: https://www.mindspore.cn/install")
def set_env(self):
if not self.tbe_path:
self._check_env()
return
try:
# pylint: disable=unused-import
import te
except RuntimeError:
if Path(self.tbe_path).is_dir():
os.environ['LD_LIBRARY_PATH'] = self.tbe_path
else:
raise EnvironmentError(
f"No such directory: {self.tbe_path}, Please check if Ascend 910 AI software package is "
"installed correctly.")
if Path(self.op_impl_path).is_dir():
sys.path.append(self.op_impl_path)
else:
raise EnvironmentError(
f"No such directory: {self.op_impl_path}, Please check if Ascend 910 AI software package is "
"installed correctly.")
if Path(self.cce_path).is_dir():
os.environ['PATH'] = self.cce_path + ":" + os.environ['PATH']
else:
raise EnvironmentError(
f"No such directory: {self.cce_path}, Please check if Ascend 910 AI software package is "
"installed correctly.")
if self.op_path is None:
pass
elif Path(self.op_path).is_dir():
os.environ['ASCEND_OPP_PATH'] = self.op_path
else:
raise EnvironmentError(
f"No such directory: {self.op_path}, Please check if Ascend 910 AI software package is "
"installed correctly.")
def _check_env(self):
"""ascend dependence path check"""
if self.path is None or self.path_check not in self.path:
logger.warning("Can not find ccec_compiler(need by mindspore-ascend), please check if you have set env "
"PATH, you can reference to the installation guidelines https://www.mindspore.cn/install")
if self.python_path is None or self.python_path_check not in self.python_path:
logger.warning(
"Can not find tbe op implement(need by mindspore-ascend), please check if you have set env "
"PYTHONPATH, you can reference to the installation guidelines "
"https://www.mindspore.cn/install")
if self.ld_lib_path is None or not (self.ld_lib_path_check_fwk in self.ld_lib_path and
self.ld_lib_path_check_addons in self.ld_lib_path):
logger.warning("Can not find driver so(need by mindspore-ascend), please check if you have set env "
"LD_LIBRARY_PATH, you can reference to the installation guidelines "
"https://www.mindspore.cn/install")
if self.ascend_opp_path is None or self.ascend_opp_path_check not in self.ascend_opp_path:
logger.warning(
"Can not find opp path (need by mindspore-ascend), please check if you have set env ASCEND_OPP_PATH, "
"you can reference to the installation guidelines https://www.mindspore.cn/install")
def _read_version(self, file_path):
"""get ascend version info"""
with open(file_path, 'r') as f:
all_info = f.readlines()
for line in all_info:
if line.startswith("Version="):
self.v = line.strip().split("=")[1]
return self.v
return self.v
def check_version_and_env_config():
"""check version and env config"""
if __package_name__.lower() == "mindspore-ascend":
env_checker = AscendEnvChecker()
elif __package_name__.lower() == "mindspore-gpu":
env_checker = GPUEnvChecker()
else:
logger.info(f"Package version {__package_name__} does not need to check any environment variable, skipping.")
return
try:
# pylint: disable=unused-import
from . import _c_expression
# check version of ascend site or cuda
env_checker.check_version()
env_checker.set_env()
except ImportError as e:
env_checker.check_env(e)
def _set_pb_env():
"""Set env variable `PROTOCOL_BUFFERS` to prevent memory overflow."""
if os.getenv("PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION") == "cpp":
logger.warning("Current env variable `PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp`. "
"When the checkpoint file is too large, "
"it may cause memory limit error durning load checkpoint file. "
"This can be solved by set env `PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python`.")
elif os.getenv("PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION") is None:
logger.warning("Set the env `PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python` to prevent memory overflow.")
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
check_version_and_env_config()
_set_pb_env()
| 42.006711 | 120 | 0.612957 |
dbe4cce127e9f102ad6648709b2a0aeba2c89018 | 532 | py | Python | docs/examples/point_120cells/plot_output.py | aaberbach/bmtk | 42aa70ce2003227a32df6ce5a95420dbf4bdfbd4 | [
"BSD-3-Clause"
] | 216 | 2017-10-03T17:02:42.000Z | 2022-03-20T03:35:48.000Z | docs/examples/point_120cells/plot_output.py | moekay/bmtk | 6efdf6387d2a6badf276b917ee15d238daeae883 | [
"BSD-3-Clause"
] | 70 | 2017-10-05T00:50:41.000Z | 2022-03-30T18:55:01.000Z | docs/examples/point_120cells/plot_output.py | moekay/bmtk | 6efdf6387d2a6badf276b917ee15d238daeae883 | [
"BSD-3-Clause"
] | 97 | 2017-10-03T22:15:06.000Z | 2022-03-23T21:03:26.000Z | import matplotlib.pyplot as plt
from bmtk.analyzer.compartment import plot_traces
from bmtk.analyzer.spike_trains import plot_raster, plot_rates_boxplot
# Setting show to False so we can display all the plots at the same time
plot_raster(config_file='config.json', group_by='pop_name', show=False)
plot_rates_boxplot(config_file='config.json', group_by='pop_name', show=False)
plot_traces(config_file='config.json', report_name='membrane_potential', group_by='pop_name',
times=(0.0, 200.0), show=False)
plt.show()
| 33.25 | 93 | 0.780075 |
1a65f84e12e48119ee312de9916e56f84ffac043 | 4,902 | py | Python | openbook_connections/views.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 164 | 2019-07-29T17:59:06.000Z | 2022-03-19T21:36:01.000Z | openbook_connections/views.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 188 | 2019-03-16T09:53:25.000Z | 2019-07-25T14:57:24.000Z | openbook_connections/views.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 80 | 2019-08-03T17:49:08.000Z | 2022-02-28T16:56:33.000Z | # Create your views here.
from django.contrib.auth import get_user_model
from django.db import transaction
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from openbook_moderation.permissions import IsNotSuspended
from openbook_common.utils.helpers import normalise_request_data
from openbook_connections.serializers import ConnectWithUserSerializer, ConnectionSerializer, \
DisconnectFromUserSerializer, UpdateConnectionSerializer, ConfirmConnectionSerializer, ConnectionUserSerializer
class Connections(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
user = request.user
response_serializer = ConnectionSerializer(user.connections, many=True, context={"request": request})
return Response(response_serializer.data, status=status.HTTP_200_OK)
class ConnectWithUser(APIView):
permission_classes = (IsAuthenticated, IsNotSuspended)
def post(self, request):
request_data = _prepare_request_data_for_validation(request.data)
serializer = ConnectWithUserSerializer(data=request_data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
username = data.get('username')
circles_ids = data.get('circles_ids')
user = request.user
User = get_user_model()
user_to_connect_with = User.objects.get(username=username)
with transaction.atomic():
connection = user.connect_with_user_with_id(user_to_connect_with.pk, circles_ids=circles_ids)
response_serializer = ConnectionSerializer(connection, context={"request": request})
return Response(response_serializer.data, status=status.HTTP_201_CREATED)
class DisconnectFromUser(APIView):
permission_classes = (IsAuthenticated, IsNotSuspended)
def post(self, request):
serializer = DisconnectFromUserSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
username = data.get('username')
user = request.user
User = get_user_model()
user_to_disconnect_from = User.objects.get(username=username)
with transaction.atomic():
user.disconnect_from_user_with_id(user_to_disconnect_from.pk)
response_serializer = ConnectionUserSerializer(user_to_disconnect_from, context={'request': request})
return Response(response_serializer.data, status=status.HTTP_200_OK)
class UpdateConnection(APIView):
permission_classes = (IsAuthenticated, IsNotSuspended)
def post(self, request):
request_data = _prepare_request_data_for_validation(request.data)
serializer = UpdateConnectionSerializer(data=request_data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
username = data.get('username')
circles_ids = data.get('circles_ids')
user = request.user
User = get_user_model()
user_to_update_connection_from = User.objects.get(username=username)
with transaction.atomic():
connection = user.update_connection_with_user_with_id(user_to_update_connection_from.pk,
circles_ids=circles_ids)
response_serializer = ConnectionSerializer(connection, context={'request': request})
return Response(response_serializer.data, status=status.HTTP_200_OK)
class ConfirmConnection(APIView):
permission_classes = (IsAuthenticated, IsNotSuspended)
def post(self, request):
request_data = _prepare_request_data_for_validation(request.data)
serializer = ConfirmConnectionSerializer(data=request_data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
username = data.get('username')
circles_ids = data.get('circles_ids')
user = request.user
User = get_user_model()
user_to_confirm_connection_with = User.objects.get(username=username)
with transaction.atomic():
connection = user.confirm_connection_with_user_with_id(user_to_confirm_connection_with.pk,
circles_ids=circles_ids)
response_serializer = ConnectionSerializer(connection, context={'request': request})
return Response(response_serializer.data, status=status.HTTP_200_OK)
def _prepare_request_data_for_validation(request_data):
request_data_copy = normalise_request_data(request_data)
circles_ids = request_data_copy.get('circles_ids', None)
if isinstance(circles_ids, str):
circles_ids = circles_ids.split(',')
request_data_copy['circles_ids'] = circles_ids
return request_data_copy
| 36.857143 | 115 | 0.728682 |
fce65c0f670872f7d5d54e81c696fce662b0ff88 | 14,777 | py | Python | shadowsocks/crypto/sodium.py | rc452860/shadowsocks-mod | 71a31b5e19bedeb9024f99c1224764efa2c37b5e | [
"Apache-2.0"
] | null | null | null | shadowsocks/crypto/sodium.py | rc452860/shadowsocks-mod | 71a31b5e19bedeb9024f99c1224764efa2c37b5e | [
"Apache-2.0"
] | null | null | null | shadowsocks/crypto/sodium.py | rc452860/shadowsocks-mod | 71a31b5e19bedeb9024f99c1224764efa2c37b5e | [
"Apache-2.0"
] | 1 | 2018-05-31T02:05:04.000Z | 2018-05-31T02:05:04.000Z | #!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
from ctypes import c_char_p, c_int, c_uint, c_ulonglong, byref, \
create_string_buffer, c_void_p
from shadowsocks.crypto import util
from shadowsocks.crypto import aead
from shadowsocks.crypto.aead import AeadCryptoBase
__all__ = ['ciphers']
libsodium = None
loaded = False
buf = None
buf_size = 2048
# for salsa20 and chacha20 and chacha20-ietf
BLOCK_SIZE = 64
def load_libsodium(crypto_path=None):
global loaded, libsodium, buf
crypto_path = dict(crypto_path) if crypto_path else dict()
path = crypto_path.get('sodium', None)
if not aead.sodium_loaded:
aead.load_sodium(path)
if aead.sodium_loaded:
libsodium = aead.libsodium
else:
print('load libsodium again with path %s' % path)
libsodium = util.find_library('sodium', 'crypto_stream_salsa20_xor_ic',
'libsodium', path)
if libsodium is None:
raise Exception('libsodium not found')
if libsodium.sodium_init() < 0:
raise Exception('libsodium init failed')
libsodium.crypto_stream_salsa20_xor_ic.restype = c_int
libsodium.crypto_stream_salsa20_xor_ic.argtypes = (
c_void_p, c_char_p, # cipher output, msg
c_ulonglong, # msg len
c_char_p, c_ulonglong, # nonce, uint64_t initial block counter
c_char_p # key
)
libsodium.crypto_stream_chacha20_xor_ic.restype = c_int
libsodium.crypto_stream_chacha20_xor_ic.argtypes = (
c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p
)
if hasattr(libsodium, 'crypto_stream_xchacha20_xor_ic'):
libsodium.crypto_stream_xchacha20_xor_ic.restype = c_int
libsodium.crypto_stream_xchacha20_xor_ic.argtypes = (
c_void_p, c_char_p,
c_ulonglong,
c_char_p, c_ulonglong,
c_char_p
)
libsodium.crypto_stream_chacha20_ietf_xor_ic.restype = c_int
libsodium.crypto_stream_chacha20_ietf_xor_ic.argtypes = (
c_void_p, c_char_p,
c_ulonglong,
c_char_p,
c_uint, # uint32_t initial counter
c_char_p
)
# chacha20-poly1305
libsodium.crypto_aead_chacha20poly1305_encrypt.restype = c_int
libsodium.crypto_aead_chacha20poly1305_encrypt.argtypes = (
c_void_p, c_void_p, # c, clen
c_char_p, c_ulonglong, # m, mlen
c_char_p, c_ulonglong, # ad, adlen
c_char_p, # nsec, not used
c_char_p, c_char_p # npub, k
)
libsodium.crypto_aead_chacha20poly1305_decrypt.restype = c_int
libsodium.crypto_aead_chacha20poly1305_decrypt.argtypes = (
c_void_p, c_void_p, # m, mlen
c_char_p, # nsec, not used
c_char_p, c_ulonglong, # c, clen
c_char_p, c_ulonglong, # ad, adlen
c_char_p, c_char_p # npub, k
)
# chacha20-ietf-poly1305, same api structure as above
libsodium.crypto_aead_chacha20poly1305_ietf_encrypt.restype = c_int
libsodium.crypto_aead_chacha20poly1305_ietf_encrypt.argtypes = (
c_void_p, c_void_p,
c_char_p, c_ulonglong,
c_char_p, c_ulonglong,
c_char_p,
c_char_p, c_char_p
)
libsodium.crypto_aead_chacha20poly1305_ietf_decrypt.restype = c_int
libsodium.crypto_aead_chacha20poly1305_ietf_decrypt.argtypes = (
c_void_p, c_void_p,
c_char_p,
c_char_p, c_ulonglong,
c_char_p, c_ulonglong,
c_char_p, c_char_p
)
# xchacha20-ietf-poly1305, same api structure as above
if hasattr(libsodium, 'crypto_aead_xchacha20poly1305_ietf_encrypt'):
libsodium.crypto_aead_xchacha20poly1305_ietf_encrypt.restype = c_int
libsodium.crypto_aead_xchacha20poly1305_ietf_encrypt.argtypes = (
c_void_p, c_void_p,
c_char_p, c_ulonglong,
c_char_p, c_ulonglong,
c_char_p,
c_char_p, c_char_p
)
libsodium.crypto_aead_xchacha20poly1305_ietf_decrypt.restype = c_int
libsodium.crypto_aead_xchacha20poly1305_ietf_decrypt.argtypes = (
c_void_p, c_void_p,
c_char_p,
c_char_p, c_ulonglong,
c_char_p, c_ulonglong,
c_char_p, c_char_p
)
# aes-256-gcm, same api structure as above
libsodium.crypto_aead_aes256gcm_is_available.restype = c_int
if libsodium.crypto_aead_aes256gcm_is_available():
libsodium.crypto_aead_aes256gcm_encrypt.restype = c_int
libsodium.crypto_aead_aes256gcm_encrypt.argtypes = (
c_void_p, c_void_p,
c_char_p, c_ulonglong,
c_char_p, c_ulonglong,
c_char_p,
c_char_p, c_char_p
)
libsodium.crypto_aead_aes256gcm_decrypt.restype = c_int
libsodium.crypto_aead_aes256gcm_decrypt.argtypes = (
c_void_p, c_void_p,
c_char_p,
c_char_p, c_ulonglong,
c_char_p, c_ulonglong,
c_char_p, c_char_p
)
buf = create_string_buffer(buf_size)
loaded = True
class SodiumCrypto(object):
def __init__(self, cipher_name, key, iv, op, crypto_path=None):
if not loaded:
load_libsodium(crypto_path)
self.key = key
self.iv = iv
self.key_ptr = c_char_p(key)
self.iv_ptr = c_char_p(iv)
if cipher_name == 'salsa20':
self.cipher = libsodium.crypto_stream_salsa20_xor_ic
elif cipher_name == 'chacha20':
self.cipher = libsodium.crypto_stream_chacha20_xor_ic
elif cipher_name == 'xchacha20':
if hasattr(libsodium, 'crypto_stream_xchacha20_xor_ic'):
self.cipher = libsodium.crypto_stream_xchacha20_xor_ic
else:
raise Exception('Unsupported cipher')
elif cipher_name == 'chacha20-ietf':
self.cipher = libsodium.crypto_stream_chacha20_ietf_xor_ic
else:
raise Exception('Unknown cipher')
# byte counter, not block counter
self.counter = 0
self.encrypt = self.update
self.decrypt = self.update
self.encrypt_once = self.update
self.decrypt_once = self.update
def update(self, data):
global buf_size, buf
l = len(data)
# we can only prepend some padding to make the encryption align to
# blocks
padding = self.counter % BLOCK_SIZE
if buf_size < padding + l:
buf_size = (padding + l) * 2
buf = create_string_buffer(buf_size)
if padding:
data = (b'\0' * padding) + data
self.cipher(byref(buf), c_char_p(data), padding + l,
self.iv_ptr, int(self.counter / BLOCK_SIZE), self.key_ptr)
self.counter += l
# buf is copied to a str object when we access buf.raw
# strip off the padding
return buf.raw[padding:padding + l]
def clean(self):
pass
class SodiumAeadCrypto(AeadCryptoBase):
def __init__(self, cipher_name, key, iv, op, crypto_path=None):
if not loaded:
load_libsodium(crypto_path)
AeadCryptoBase.__init__(self, cipher_name, key, iv, op, crypto_path)
if cipher_name == 'chacha20-poly1305':
self.encryptor = libsodium.crypto_aead_chacha20poly1305_encrypt
self.decryptor = libsodium.crypto_aead_chacha20poly1305_decrypt
elif cipher_name == 'chacha20-ietf-poly1305':
self.encryptor = libsodium. \
crypto_aead_chacha20poly1305_ietf_encrypt
self.decryptor = libsodium. \
crypto_aead_chacha20poly1305_ietf_decrypt
elif cipher_name == 'xchacha20-ietf-poly1305':
if hasattr(libsodium,
'crypto_aead_xchacha20poly1305_ietf_encrypt'):
self.encryptor = libsodium. \
crypto_aead_xchacha20poly1305_ietf_encrypt
self.decryptor = libsodium. \
crypto_aead_xchacha20poly1305_ietf_decrypt
else:
raise Exception('Unsupported cipher')
elif cipher_name == 'sodium:aes-256-gcm':
if hasattr(libsodium, 'crypto_aead_aes256gcm_encrypt'):
self.encryptor = libsodium.crypto_aead_aes256gcm_encrypt
self.decryptor = libsodium.crypto_aead_aes256gcm_decrypt
else:
raise Exception('Unsupported cipher')
else:
raise Exception('Unknown cipher')
def cipher_ctx_init(self):
global libsodium
libsodium.sodium_increment(byref(self._nonce), c_int(self._nlen))
# print("".join("%02x" % ord(b) for b in self._nonce))
def aead_encrypt(self, data):
global buf, buf_size
plen = len(data)
if buf_size < plen + self._tlen:
buf_size = (plen + self._tlen) * 2
buf = create_string_buffer(buf_size)
cipher_out_len = c_ulonglong(0)
self.encryptor(
byref(buf), byref(cipher_out_len),
c_char_p(data), c_ulonglong(plen),
None, c_ulonglong(0), None,
c_char_p(self._nonce.raw), c_char_p(self._skey)
)
if cipher_out_len.value != plen + self._tlen:
raise Exception("Encrypt failed")
self.cipher_ctx_init()
return buf.raw[:cipher_out_len.value]
def aead_decrypt(self, data):
global buf, buf_size
clen = len(data)
if buf_size < clen:
buf_size = clen * 2
buf = create_string_buffer(buf_size)
cipher_out_len = c_ulonglong(0)
r = self.decryptor(
byref(buf), byref(cipher_out_len),
None,
c_char_p(data), c_ulonglong(clen),
None, c_ulonglong(0),
c_char_p(self._nonce.raw), c_char_p(self._skey)
)
if r != 0:
raise Exception("Decrypt failed")
if cipher_out_len.value != clen - self._tlen:
raise Exception("Decrypt failed")
self.cipher_ctx_init()
return buf.raw[:cipher_out_len.value]
ciphers = {
'salsa20': (32, 8, SodiumCrypto),
'chacha20': (32, 8, SodiumCrypto),
'xchacha20': (32, 24, SodiumCrypto),
'chacha20-ietf': (32, 12, SodiumCrypto),
# AEAD: iv_len = salt_len = key_len
'chacha20-poly1305': (32, 32, SodiumAeadCrypto),
'chacha20-ietf-poly1305': (32, 32, SodiumAeadCrypto),
'xchacha20-ietf-poly1305': (32, 32, SodiumAeadCrypto),
'sodium:aes-256-gcm': (32, 32, SodiumAeadCrypto),
}
def test_chacha20():
print("Test chacha20")
cipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('chacha20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_xchacha20():
print("Test xchacha20")
cipher = SodiumCrypto('xchacha20', b'k' * 32, b'i' * 24, 1)
decipher = SodiumCrypto('xchacha20', b'k' * 32, b'i' * 24, 0)
util.run_cipher(cipher, decipher)
def test_salsa20():
print("Test salsa20")
cipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('salsa20', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_chacha20_ietf():
print("Test chacha20-ietf")
cipher = SodiumCrypto('chacha20-ietf', b'k' * 32, b'i' * 16, 1)
decipher = SodiumCrypto('chacha20-ietf', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
def test_chacha20_poly1305():
print("Test chacha20-poly1305 [payload][tag]")
cipher = SodiumAeadCrypto('chacha20-poly1305',
b'k' * 32, b'i' * 32, 1)
decipher = SodiumAeadCrypto('chacha20-poly1305',
b'k' * 32, b'i' * 32, 0)
util.run_cipher(cipher, decipher)
def test_chacha20_poly1305_chunk():
print("Test chacha20-poly1305 chunk [size][tag][payload][tag]")
cipher = SodiumAeadCrypto('chacha20-poly1305',
b'k' * 32, b'i' * 32, 1)
decipher = SodiumAeadCrypto('chacha20-poly1305',
b'k' * 32, b'i' * 32, 0)
cipher.encrypt_once = cipher.encrypt
decipher.decrypt_once = decipher.decrypt
util.run_cipher(cipher, decipher)
def test_chacha20_ietf_poly1305():
print("Test chacha20-ietf-poly1305 [payload][tag]")
cipher = SodiumAeadCrypto('chacha20-ietf-poly1305',
b'k' * 32, b'i' * 32, 1)
decipher = SodiumAeadCrypto('chacha20-ietf-poly1305',
b'k' * 32, b'i' * 32, 0)
util.run_cipher(cipher, decipher)
def test_chacha20_ietf_poly1305_chunk():
print("Test chacha20-ietf-poly1305 chunk [size][tag][payload][tag]")
cipher = SodiumAeadCrypto('chacha20-ietf-poly1305',
b'k' * 32, b'i' * 32, 1)
decipher = SodiumAeadCrypto('chacha20-ietf-poly1305',
b'k' * 32, b'i' * 32, 0)
cipher.encrypt_once = cipher.encrypt
decipher.decrypt_once = decipher.decrypt
util.run_cipher(cipher, decipher)
def test_aes_256_gcm():
print("Test sodium:aes-256-gcm [payload][tag]")
cipher = SodiumAeadCrypto('sodium:aes-256-gcm',
b'k' * 32, b'i' * 32, 1)
decipher = SodiumAeadCrypto('sodium:aes-256-gcm',
b'k' * 32, b'i' * 32, 0)
util.run_cipher(cipher, decipher)
def test_aes_256_gcm_chunk():
print("Test sodium:aes-256-gcm chunk [size][tag][payload][tag]")
cipher = SodiumAeadCrypto('sodium:aes-256-gcm',
b'k' * 32, b'i' * 32, 1)
decipher = SodiumAeadCrypto('sodium:aes-256-gcm',
b'k' * 32, b'i' * 32, 0)
cipher.encrypt_once = cipher.encrypt
decipher.decrypt_once = decipher.decrypt
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test_chacha20()
test_xchacha20()
test_salsa20()
test_chacha20_ietf()
test_chacha20_poly1305()
test_chacha20_poly1305_chunk()
test_chacha20_ietf_poly1305()
test_chacha20_ietf_poly1305_chunk()
test_aes_256_gcm()
test_aes_256_gcm_chunk()
| 34.445221 | 79 | 0.632334 |
23059cf393369c42ad899ba90fc871abdb472803 | 34,164 | py | Python | src/sage/rings/padics/generic_nodes.py | hsm207/sage | 020bd59ec28717bfab9af44d2231c53da1ff99f1 | [
"BSL-1.0"
] | 1 | 2021-10-18T01:24:04.000Z | 2021-10-18T01:24:04.000Z | src/sage/rings/padics/generic_nodes.py | hsm207/sage | 020bd59ec28717bfab9af44d2231c53da1ff99f1 | [
"BSL-1.0"
] | 1 | 2020-04-18T16:30:43.000Z | 2020-04-18T16:30:43.000Z | src/sage/rings/padics/generic_nodes.py | dimpase/sage | 468f23815ade42a2192b0a9cd378de8fdc594dcd | [
"BSL-1.0"
] | null | null | null | """
`p`-Adic Generic Nodes
This file contains a bunch of intermediate classes for the `p`-adic
parents, allowing a function to be implemented at the right level of
generality.
AUTHORS:
- David Roe
"""
# ****************************************************************************
# Copyright (C) 2007-2013 David Roe <roed.math@gmail.com>
# William Stein <wstein@gmail.com>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.rings.padics.local_generic import LocalGeneric
from sage.rings.padics.padic_generic import pAdicGeneric
from sage.rings.ring import EuclideanDomain, Field
from sage.rings.padics.padic_base_generic import pAdicBaseGeneric
from sage.rings.integer_ring import ZZ
from sage.rings.rational_field import QQ
from sage.rings.infinity import infinity, SignError
from .lattice_precision import PrecisionLattice, PrecisionModule
from sage.rings.padics.precision_error import PrecisionError
from .padic_lattice_element import pAdicLatticeElement, pAdicLatticeCapElement, pAdicLatticeFloatElement
class CappedAbsoluteGeneric(LocalGeneric):
def is_capped_absolute(self):
"""
Returns whether this `p`-adic ring bounds precision in a
capped absolute fashion.
The absolute precision of an element is the power of `p` modulo
which that element is defined. In a capped absolute ring, the
absolute precision of elements are bounded by a constant
depending on the ring.
EXAMPLES::
sage: R = ZpCA(5, 15)
sage: R.is_capped_absolute()
True
sage: R(5^7)
5^7 + O(5^15)
sage: S = Zp(5, 15)
sage: S.is_capped_absolute()
False
sage: S(5^7)
5^7 + O(5^22)
"""
return True
def _prec_type(self):
"""
Returns the precision handling type.
EXAMPLES::
sage: ZpCA(5)._prec_type()
'capped-abs'
"""
return 'capped-abs'
class CappedRelativeGeneric(LocalGeneric):
def is_capped_relative(self):
"""
Returns whether this `p`-adic ring bounds precision in a capped
relative fashion.
The relative precision of an element is the power of p modulo
which the unit part of that element is defined. In a capped
relative ring, the relative precision of elements are bounded
by a constant depending on the ring.
EXAMPLES::
sage: R = ZpCA(5, 15)
sage: R.is_capped_relative()
False
sage: R(5^7)
5^7 + O(5^15)
sage: S = Zp(5, 15)
sage: S.is_capped_relative()
True
sage: S(5^7)
5^7 + O(5^22)
"""
return True
def _prec_type(self):
"""
Returns the precision handling type.
EXAMPLES::
sage: Zp(5)._prec_type()
'capped-rel'
"""
return 'capped-rel'
class FixedModGeneric(LocalGeneric):
def is_fixed_mod(self):
"""
Returns whether this `p`-adic ring bounds precision in a fixed
modulus fashion.
The absolute precision of an element is the power of p modulo
which that element is defined. In a fixed modulus ring, the
absolute precision of every element is defined to be the
precision cap of the parent. This means that some operations,
such as division by `p`, don't return a well defined answer.
EXAMPLES::
sage: R = ZpFM(5,15)
sage: R.is_fixed_mod()
True
sage: R(5^7,absprec=9)
5^7
sage: S = ZpCA(5, 15)
sage: S.is_fixed_mod()
False
sage: S(5^7,absprec=9)
5^7 + O(5^9)
"""
return True
def _prec_type(self):
"""
Returns the precision handling type.
EXAMPLES::
sage: ZpFM(5)._prec_type()
'fixed-mod'
"""
return 'fixed-mod'
class FloatingPointGeneric(LocalGeneric):
def is_floating_point(self):
"""
Returns whether this `p`-adic ring uses a floating point precision model.
Elements in the floating point model are stored by giving a
valuation and a unit part. Arithmetic is done where the unit
part is truncated modulo a fixed power of the uniformizer,
stored in the precision cap of the parent.
EXAMPLES::
sage: R = ZpFP(5,15)
sage: R.is_floating_point()
True
sage: R(5^7,absprec=9)
5^7
sage: S = ZpCR(5,15)
sage: S.is_floating_point()
False
sage: S(5^7,absprec=9)
5^7 + O(5^9)
"""
return True
def _prec_type(self):
"""
Returns the precision handling type.
EXAMPLES::
sage: ZpFP(5)._prec_type()
'floating-point'
"""
return 'floating-point'
def _test_distributivity(self, **options):
r"""
Test the distributivity of `*` on `+` on (not necessarily
all) elements of this set.
p-adic floating point rings only satisfy distributivity
up to a precision that depends on the elements.
INPUT:
- ``options`` -- any keyword arguments accepted by :meth:`_tester`
EXAMPLES:
By default, this method runs the tests only on the
elements returned by ``self.some_elements()``::
sage: R = ZpFP(5,3)
sage: R.some_elements()
[0, 1, 5, 1 + 3*5 + 3*5^2, 5 + 4*5^2 + 4*5^3]
sage: R._test_distributivity()
However, the elements tested can be customized with the
``elements`` keyword argument::
sage: R._test_distributivity(elements=[R(0),~R(0),R(42)])
See the documentation for :class:`TestSuite` for more information.
"""
tester = self._tester(**options)
S = tester.some_elements()
from sage.misc.misc import some_tuples
for x,y,z in some_tuples(S, 3, tester._max_runs):
yz_prec = min(y.precision_absolute(), z.precision_absolute())
yz_val = (y + z).valuation()
try:
prec = min(x.valuation() + yz_val + min(x.precision_relative(), yz_prec - yz_val),
x.valuation() + y.valuation() + (x * y).precision_relative(),
x.valuation() + z.valuation() + (x * z).precision_relative())
except SignError:
pass
else:
if prec > -infinity:
# only check left distributivity, since multiplication commutative
tester.assertTrue((x * (y + z)).is_equal_to((x * y) + (x * z),prec))
def _test_additive_associativity(self, **options):
r"""
Test associativity for (not necessarily all) elements of this
additive semigroup.
INPUT:
- ``options`` -- any keyword arguments accepted by :meth:`_tester`
EXAMPLES:
By default, this method tests only the elements returned by
``self.some_elements()``::
sage: R = QpFP(7,3)
sage: R._test_additive_associativity()
However, the elements tested can be customized with the
``elements`` keyword argument::
sage: R._test_additive_associativity(elements = [R(0), ~R(0), R(42)])
See the documentation for :class:`TestSuite` for more information.
"""
tester = self._tester(**options)
S = tester.some_elements()
from sage.misc.misc import some_tuples
for x,y,z in some_tuples(S, 3, tester._max_runs):
tester.assertTrue(((x + y) + z).is_equal_to(x + (y + z), min(x.precision_absolute(), y.precision_absolute(), z.precision_absolute())))
class FloatingPointRingGeneric(FloatingPointGeneric):
pass
class FloatingPointFieldGeneric(FloatingPointGeneric):#, sage.rings.ring.Field):
pass
class CappedRelativeRingGeneric(CappedRelativeGeneric):
pass
class CappedRelativeFieldGeneric(CappedRelativeGeneric):#, sage.rings.ring.Field):
pass
class pAdicLatticeGeneric(pAdicGeneric):
r"""
An implementation of the `p`-adic rationals with lattice precision.
INPUT:
- `p` -- the underlying prime number
- ``prec`` -- the precision
- ``subtype`` -- either ``"cap"`` or ``"float"``,
specifying the precision model used for tracking precision
- ``label`` -- a string or ``None`` (default: ``None``)
TESTS::
sage: R = ZpLC(17) # indirect doctest
doctest:...: FutureWarning: This class/method/function is marked as experimental. It, its functionality or its interface might change without a formal deprecation.
See http://trac.sagemath.org/23505 for details.
sage: R._prec_type()
'lattice-cap'
sage: R = ZpLF(17) # indirect doctest
sage: R._prec_type()
'lattice-float'
sage: R = QpLC(17) # indirect doctest
sage: R._prec_type()
'lattice-cap'
sage: R = QpLF(17) # indirect doctest
sage: R._prec_type()
'lattice-float'
"""
def __init__(self, p, prec, print_mode, names, label=None):
"""
Initialization.
TESTS::
sage: R = ZpLC(17) # indirect doctest
sage: R._prec_type()
'lattice-cap'
sage: R._subtype
'cap'
sage: R = ZpLF(17) # indirect doctest
sage: R._prec_type()
'lattice-float'
sage: R._subtype
'float'
"""
from sage.rings.padics.lattice_precision import pRational
self._approx_zero = pRational(p, 0)
self._approx_one = pRational(p, 1)
self._approx_minusone = pRational(p, -1)
if label is None:
self._label = None
else:
self._label = str(label)
# We do not use the standard attribute element_class
# because we need to be careful with precision
# Instead we implement _element_constructor_ (cf below)
if self._subtype == 'cap':
(self._prec_cap_relative, self._prec_cap_absolute) = prec
self._zero_cap = None
self._precision = PrecisionLattice(p, label)
element_class = pAdicLatticeCapElement
elif self._subtype == 'float':
self._prec_cap_relative = prec
self._prec_cap_absolute = infinity
self._zero_cap = prec
self._precision = PrecisionModule(p, label, prec)
element_class = pAdicLatticeFloatElement
else:
raise ValueError("subtype must be either 'cap' or 'float'")
self._element_class = self.__make_element_class__(element_class)
pAdicGeneric.__init__(self, self, p, prec, print_mode, names, None)
def _prec_type(self):
"""
Return the precision handling type.
EXAMPLES::
sage: ZpLC(5)._prec_type()
'lattice-cap'
"""
return 'lattice-' + self._subtype
def is_lattice_prec(self):
"""
Returns whether this `p`-adic ring bounds precision using
a lattice model.
In lattice precision, relationships between elements
are stored in a precision object of the parent, which
allows for optimal precision tracking at the cost of
increased memory usage and runtime.
EXAMPLES::
sage: R = ZpCR(5, 15)
sage: R.is_lattice_prec()
False
sage: x = R(25, 8)
sage: x - x
O(5^8)
sage: S = ZpLC(5, 15)
sage: S.is_lattice_prec()
True
sage: x = S(25, 8)
sage: x - x
O(5^30)
"""
return True
def precision_cap(self):
"""
Return the relative precision cap for this ring if it is finite.
Otherwise return the absolute precision cap.
EXAMPLES::
sage: R = ZpLC(3)
sage: R.precision_cap()
20
sage: R.precision_cap_relative()
20
sage: R = ZpLC(3, prec=(infinity,20))
sage: R.precision_cap()
20
sage: R.precision_cap_relative()
+Infinity
sage: R.precision_cap_absolute()
20
.. SEEALSO::
:meth:`precision_cap_relative`, :meth:`precision_cap_absolute`
"""
if self._prec_cap_relative is not infinity:
return self._prec_cap_relative
else:
return self._prec_cap_absolute
def _precision_cap(self):
"""
Return the pair of precisions (for ``lattice-cap``)
or the relative precision cap (for ``lattice-float``).
EXAMPLES::
sage: R = ZpLC(11, (27,37))
sage: R._precision_cap()
(27, 37)
sage: R = ZpLF(11, 14)
sage: R._precision_cap()
14
"""
if self._subtype == 'cap':
return (self._prec_cap_relative, self._prec_cap_absolute)
else:
return self._prec_cap_relative
def precision_cap_relative(self):
"""
Return the relative precision cap for this ring.
EXAMPLES::
sage: R = ZpLC(3)
sage: R.precision_cap_relative()
20
sage: R = ZpLC(3, prec=(infinity,20))
sage: R.precision_cap_relative()
+Infinity
.. SEEALSO::
:meth:`precision_cap`, :meth:`precision_cap_absolute`
"""
return self._prec_cap_relative
def precision_cap_absolute(self):
"""
Return the absolute precision cap for this ring.
EXAMPLES::
sage: R = ZpLC(3)
sage: R.precision_cap_absolute()
40
sage: R = ZpLC(3, prec=(infinity,20))
sage: R.precision_cap_absolute()
20
.. SEEALSO::
:meth:`precision_cap`, :meth:`precision_cap_relative`
"""
return self._prec_cap_absolute
def precision(self):
"""
Return the lattice precision object attached to this parent.
EXAMPLES::
sage: R = ZpLC(5, label='precision')
sage: R.precision()
Precision lattice on 0 objects (label: precision)
sage: x = R(1, 10); y = R(1, 5)
sage: R.precision()
Precision lattice on 2 objects (label: precision)
.. SEEALSO::
:class:`sage.rings.padics.lattice_precision.PrecisionLattice`
"""
return self._precision
def label(self):
"""
Return the label of this parent.
NOTE:
Labels can be used to distinguish between parents with
the same defining data.
They are useful in the lattice precision framework in order
to limit the size of the lattice modeling the precision (which
is roughly the number of elements having this parent).
Elements of a parent with some label do not coerce to a parent
with a different label. However conversions are allowed.
EXAMPLES::
sage: R = ZpLC(5)
sage: R.label() # no label by default
sage: R = ZpLC(5, label='mylabel')
sage: R.label()
'mylabel'
Labels are typically useful to isolate computations.
For example, assume that we first want to do some calculations
with matrices::
sage: R = ZpLC(5, label='matrices')
sage: M = random_matrix(R, 4, 4)
sage: d = M.determinant()
Now, if we want to do another unrelated computation, we can
use a different label::
sage: R = ZpLC(5, label='polynomials')
sage: S.<x> = PolynomialRing(R)
sage: P = (x-1)*(x-2)*(x-3)*(x-4)*(x-5)
Without labels, the software would have modeled the
precision on the matrices and on the polynomials using the same
lattice (manipulating a lattice of higher
dimension can have a significant impact on performance).
"""
return self._label
def _element_constructor_(self, x, prec=None):
"""
Create an element of this parent.
INPUT:
- ``x``: the datum from which the element is created
- ``prec`` -- an integer or ``None`` (the default); the
absolute precision of the created element
NOTE:
This function tries to be sharp on precision as much as
possible.
For instance, if the datum ``x`` is itself an element of the
same parent, the software remembers that the created element
is actually equal to ``x`` (at infinite precision)::
sage: R = ZpLC(2, prec=(infinity,50))
sage: x = R(1, 10); x
1 + O(2^10)
sage: y = R(x) # indirect doctest
sage: y
1 + O(2^10)
sage: x - y
O(2^50)
TESTS::
sage: R(x, prec=5)
1 + O(2^5)
"""
# We first try the _copy method which is sharp on precision
try:
if prec is None:
return x._copy(parent=self)
elif x.parent() is self:
return x.add_bigoh(prec)
else:
return x._copy(parent=self).add_bigoh(prec)
except (TypeError, ValueError, AttributeError):
pass
return self._element_class(self, x, prec)
def convert_multiple(self, *elts):
"""
Convert a list of elements to this parent.
NOTE:
This function tries to be sharp on precision as much as
possible.
In particular, if the precision of the input elements are
handled by a lattice, diffused digits of precision are
preserved during the conversion.
EXAMPLES::
sage: R = ZpLC(2)
sage: x = R(1, 10); y = R(1, 5)
sage: x,y = x+y, x-y
Remark that the pair `(x,y)` has diffused digits of precision::
sage: x
2 + O(2^5)
sage: y
O(2^5)
sage: x + y
2 + O(2^11)
sage: R.precision().diffused_digits([x,y])
6
As a consequence, if we convert ``x`` and ``y`` separately, we
loose some precision::
sage: R2 = ZpLC(2, label='copy')
sage: x2 = R2(x); y2 = R2(y)
sage: x2
2 + O(2^5)
sage: y2
O(2^5)
sage: x2 + y2
2 + O(2^5)
sage: R2.precision().diffused_digits([x2,y2])
0
On the other hand, this issue disappears when we use multiple
conversion::
sage: x2,y2 = R2.convert_multiple(x,y)
sage: x2 + y2
2 + O(2^11)
sage: R2.precision().diffused_digits([x2,y2])
6
"""
p = self.prime()
# We sort elements by precision lattice
elt_by_prec = { }
elt_other = [ ]
indices = { }
for i in range(len(elts)):
x = elts[i]
idx = id(x)
if idx in indices:
indices[idx].append(i)
else:
indices[idx] = [i]
if isinstance(x, pAdicLatticeElement):
prec = x.parent().precision()
if prec.prime() != p:
raise TypeError("conversion between different p-adic rings not supported")
if prec in elt_by_prec:
elt_by_prec[prec].append(x)
else:
elt_by_prec[prec] = [x]
else:
elt_other.append(x)
# We create the elements
ans = len(elts)*[None]
selfprec = self._precision
# First the elements with precision lattice
for (prec, L) in elt_by_prec.items():
if prec is selfprec:
# Here, we use the _copy method in order
# to be sharp on precision
for x in L:
y = x._copy(parent=self)
for i in indices[id(x)]:
ans[i] = y
else:
try:
lattice = prec.precision_lattice(L)
except PrecisionError:
raise NotImplementedError("multiple conversion of a set of variables for which the module precision is not a lattice is not implemented yet")
for j in range(len(L)):
x = L[j]; dx = [ ]
for i in range(j):
dx.append([L[i], lattice[i,j]])
prec = lattice[j,j].valuation(p)
y = self._element_class(self, x.value(), prec, dx=dx, dx_mode='values', check=False, reduce=False)
for i in indices[id(x)]:
ans[i] = y
L[j] = y
# Now the other elements
for x in elt_other:
y = self._element_class(self, x)
for i in indices[id(x)]:
ans[i] = y
# We return the created elements
return ans
def is_pAdicRing(R):
"""
Returns ``True`` if and only if ``R`` is a `p`-adic ring (not a
field).
EXAMPLES::
sage: is_pAdicRing(Zp(5))
True
sage: is_pAdicRing(RR)
False
"""
return isinstance(R, pAdicRingGeneric)
class pAdicRingGeneric(pAdicGeneric, EuclideanDomain):
def is_field(self, proof = True):
"""
Returns whether this ring is actually a field, ie ``False``.
EXAMPLES::
sage: Zp(5).is_field()
False
"""
return False
def krull_dimension(self):
r"""
Returns the Krull dimension of self, i.e. 1
INPUT:
- self -- a `p`-adic ring
OUTPUT:
- the Krull dimension of self. Since self is a `p`-adic ring,
this is 1.
EXAMPLES::
sage: Zp(5).krull_dimension()
1
"""
return 1
def _xgcd_univariate_polynomial(self, f, g):
"""
Extended gcd for univariate polynomial rings over self.
Should not be called directly. Use f.xgcd(g) instead.
INPUT:
- ``f``, ``g`` - the polynomials of which to take the xgcd
OUTPUT:
- A tuple (a, b, c) which satisfies `a = b*f + c*g`. There
is not guarentee that a, b, and c are minimal.
.. WARNING::
The computations are performed using the standard Euclidean
algorithm which might produce mathematically incorrect results in
some cases. See :trac:`13439`.
EXAMPLES::
sage: R.<x> = Zp(3,3)[]
sage: f = x + 1
sage: f.xgcd(f^2)
((1 + O(3^3))*x + 1 + O(3^3), 1 + O(3^3), 0)
We check that :trac:`13439` has been fixed::
sage: R.<x> = Zp(3,3)[]
sage: f = 3*x + 7
sage: g = 5*x + 9
sage: f.xgcd(f*g)
((3 + O(3^4))*x + 1 + 2*3 + O(3^3), 1 + O(3^3), 0)
sage: R.<x> = Zp(3)[]
sage: f = 357555295953*x + 257392844
sage: g = 225227399*x - 511940255230575
sage: f.xgcd(f*g)
((3^9 + O(3^29))*x + 2 + 2*3 + 3^2 + 2*3^5 + 3^6 + 3^7
+ 3^8 + 3^10 + 3^11 + 2*3^13 + 3^14 + 3^16 + 2*3^19 +
O(3^20), 1 + 2*3^2 + 3^4 + 2*3^5 + 3^6 + 3^7 +
2*3^8 + 2*3^10 + 2*3^12 + 3^13 + 3^14 + 3^15 + 2*3^17
+ 3^18 + O(3^20), 0)
We check low precision computations::
sage: R.<x> = Zp(3,1)[]
sage: h = 3*x + 7
sage: i = 4*x + 9
sage: h.xgcd(h*i)
((3 + O(3^2))*x + 1 + O(3), 1 + O(3), 0)
"""
from sage.misc.stopgap import stopgap
stopgap("Extended gcd computations over p-adic fields are performed using the standard Euclidean algorithm which might produce mathematically incorrect results in some cases.", 13439)
base_ring = f.base_ring()
fracfield = base_ring.fraction_field()
f_field = f.change_ring(fracfield)
g_field = g.change_ring(fracfield)
xgcd = fracfield._xgcd_univariate_polynomial(f_field,g_field)
lcm = base_ring(1)
for f in xgcd:
for i in f:
lcm = (i.denominator()).lcm(lcm)
returnlst = []
for f in xgcd:
f *= lcm
returnlst.append(f.change_ring(base_ring))
return tuple(returnlst)
def _gcd_univariate_polynomial(self, f, g):
"""
gcd for univariate polynomial rings over self.
INPUT:
- ``f``, ``g`` - the polynomials of which to take the gcd
OUTPUT: A polynomial
EXAMPLES::
sage: R.<a> = Zq(27)
sage: K.<x> = R[]
sage: h = 3*x + a
sage: i = 4*x + 2
sage: h.gcd(h*i)
(3 + O(3^21))*x + a + O(3^20)
"""
return self._xgcd_univariate_polynomial(f , g)[0]
def is_pAdicField(R):
"""
Returns ``True`` if and only if ``R`` is a `p`-adic field.
EXAMPLES::
sage: is_pAdicField(Zp(17))
False
sage: is_pAdicField(Qp(17))
True
"""
return isinstance(R, pAdicFieldGeneric)
class pAdicFieldGeneric(pAdicGeneric, Field):
pass
#def class_field(self, group=None, map=None, generators=None):
# raise NotImplementedError
#def composite(self, subfield1, subfield2):
# raise NotImplementedError
#def norm_equation(self):
# raise NotImplementedError
#def norm_group(self):
# raise NotImplementedError
#def norm_group_discriminant(self, group=None, map=None, generators=None):
# raise NotImplementedError
#def number_of_extensions(self, degree, discriminant=None, e=None, f=None):
# raise NotImplementedError
#def list_of_extensions(self, degree, discriminant=None, e=None, f=None):
# raise NotImplementedError
#def subfield(self, list):
# raise NotImplementedError
#def subfield_lattice(self):
# raise NotImplementedError
#def subfields_of_degree(self, n):
# raise NotImplementedError
class pAdicFixedModRingGeneric(pAdicRingGeneric, FixedModGeneric):
pass
class pAdicCappedAbsoluteRingGeneric(pAdicRingGeneric, CappedAbsoluteGeneric):
pass
class pAdicCappedRelativeRingGeneric(pAdicRingGeneric, CappedRelativeRingGeneric):
pass
class pAdicCappedRelativeFieldGeneric(pAdicFieldGeneric, CappedRelativeFieldGeneric):
pass
class pAdicFloatingPointRingGeneric(pAdicRingGeneric, FloatingPointRingGeneric):
pass
class pAdicFloatingPointFieldGeneric(pAdicFieldGeneric, FloatingPointFieldGeneric):
pass
class pAdicRingBaseGeneric(pAdicBaseGeneric, pAdicRingGeneric):
def construction(self, forbid_frac_field=False):
"""
Returns the functorial construction of self, namely,
completion of the rational numbers with respect a given prime.
Also preserves other information that makes this field unique
(e.g. precision, rounding, print mode).
INPUT:
- ``forbid_frac_field`` -- ignored, for compatibility with other p-adic types.
EXAMPLES::
sage: K = Zp(17, 8, print_mode='val-unit', print_sep='&')
sage: c, L = K.construction(); L
Integer Ring
sage: c(L)
17-adic Ring with capped relative precision 8
sage: K == c(L)
True
TESTS::
sage: R = ZpLC(13,(31,41))
sage: R._precision_cap()
(31, 41)
sage: F, Z = R.construction()
sage: S = F(Z)
sage: S._precision_cap()
(31, 41)
"""
from sage.categories.pushout import CompletionFunctor
extras = {'print_mode':self._printer.dict(), 'type':self._prec_type(), 'names':self._names}
if hasattr(self, '_label'):
extras['label'] = self._label
return (CompletionFunctor(self.prime(), self._precision_cap(), extras), ZZ)
def random_element(self, algorithm='default'):
r"""
Returns a random element of self, optionally using the
algorithm argument to decide how it generates the
element. Algorithms currently implemented:
- default: Choose `a_i`, `i >= 0`, randomly between `0` and
`p-1` until a nonzero choice is made. Then continue choosing
`a_i` randomly between `0` and `p-1` until we reach
precision_cap, and return `\sum a_i p^i`.
EXAMPLES::
sage: Zp(5,6).random_element()
3 + 3*5 + 2*5^2 + 3*5^3 + 2*5^4 + 5^5 + O(5^6)
sage: ZpCA(5,6).random_element()
4*5^2 + 5^3 + O(5^6)
sage: ZpFM(5,6).random_element()
2 + 4*5^2 + 2*5^4 + 5^5
"""
if (algorithm == 'default'):
if self.is_capped_relative():
i = 0
a_i = ZZ.random_element(self.prime())
while a_i.is_zero():
i += 1
a_i = ZZ.random_element(self.prime())
return self((self.prime()**i)*(a_i + self.prime()*ZZ.random_element(self.prime_pow.pow_Integer_Integer(self.precision_cap()-1))))
else:
return self(ZZ.random_element(self.prime_pow.pow_Integer_Integer(self.precision_cap())))
else:
raise NotImplementedError("Don't know %s algorithm"%algorithm)
#def unit_group(self):
# raise NotImplementedError
#def unit_group_gens(self):
# raise NotImplementedError
#def principal_unit_group(self):
# raise NotImplementedError
class pAdicFieldBaseGeneric(pAdicBaseGeneric, pAdicFieldGeneric):
def composite(self, subfield1, subfield2):
r"""
Returns the composite of two subfields of self, i.e., the
largest subfield containing both
INPUT:
- ``self`` -- a `p`-adic field
- ``subfield1`` -- a subfield
- ``subfield2`` -- a subfield
OUTPUT:
- the composite of subfield1 and subfield2
EXAMPLES::
sage: K = Qp(17); K.composite(K, K) is K
True
"""
#should be overridden for extension fields
if (subfield1 is self) and (subfield2 is self):
return self
raise ValueError("Arguments must be subfields of self.")
def subfields_of_degree(self, n):
r"""
Returns the number of subfields of self of degree `n`
INPUT:
- ``self`` -- a `p`-adic field
- ``n`` -- an integer
OUTPUT:
- integer -- the number of subfields of degree ``n`` over self.base_ring()
EXAMPLES::
sage: K = Qp(17)
sage: K.subfields_of_degree(1)
1
"""
if n == 1:
return 1
else:
return 0
def subfield(self, list):
r"""
Returns the subfield generated by the elements in list
INPUT:
- ``self`` -- a `p`-adic field
- ``list`` -- a list of elements of ``self``
OUTPUT:
- the subfield of ``self`` generated by the elements of list
EXAMPLES::
sage: K = Qp(17); K.subfield([K(17), K(1827)]) is K
True
"""
for x in list:
if x not in self:
raise TypeError("Members of the list of generators must be elements of self.")
return self
def construction(self, forbid_frac_field=False):
"""
Returns the functorial construction of ``self``, namely,
completion of the rational numbers with respect a given prime.
Also preserves other information that makes this field unique
(e.g. precision, rounding, print mode).
INPUT:
- ``forbid_frac_field`` -- require a completion functor rather
than a fraction field functor. This is used in the
:meth:`sage.rings.padics.local_generic.LocalGeneric.change` method.
EXAMPLES::
sage: K = Qp(17, 8, print_mode='val-unit', print_sep='&')
sage: c, L = K.construction(); L
17-adic Ring with capped relative precision 8
sage: c
FractionField
sage: c(L)
17-adic Field with capped relative precision 8
sage: K == c(L)
True
We can get a completion functor by forbidding the fraction field::
sage: c, L = K.construction(forbid_frac_field=True); L
Rational Field
sage: c
Completion[17, prec=8]
sage: c(L)
17-adic Field with capped relative precision 8
sage: K == c(L)
True
TESTS::
sage: R = QpLC(13,(31,41))
sage: R._precision_cap()
(31, 41)
sage: F, Z = R.construction()
sage: S = F(Z)
sage: S._precision_cap()
(31, 41)
"""
from sage.categories.pushout import FractionField, CompletionFunctor
if forbid_frac_field:
extras = {'print_mode':self._printer.dict(), 'type':self._prec_type(), 'names':self._names}
if hasattr(self, '_label'):
extras['label'] = self._label
return (CompletionFunctor(self.prime(), self._precision_cap(), extras), QQ)
else:
return FractionField(), self.integer_ring()
| 30.750675 | 191 | 0.552043 |
75ff987606fd5ce7f36f3026dde32259e4e0cc50 | 6,849 | py | Python | dcipipeline/test_main.py | nsilla/dci-pipeline | 5395a715677b5e1c305967b2fd1ba4d699261c93 | [
"Apache-2.0"
] | null | null | null | dcipipeline/test_main.py | nsilla/dci-pipeline | 5395a715677b5e1c305967b2fd1ba4d699261c93 | [
"Apache-2.0"
] | null | null | null | dcipipeline/test_main.py | nsilla/dci-pipeline | 5395a715677b5e1c305967b2fd1ba4d699261c93 | [
"Apache-2.0"
] | 2 | 2021-09-20T10:48:55.000Z | 2022-02-28T19:47:55.000Z | #
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import unittest
import os
from dcipipeline.main import (
process_args,
overload_dicts,
get_prev_stages,
pre_process_stage,
post_process_stage,
upload_junit_files_from_dir,
)
class TestMain(unittest.TestCase):
def test_process_args_empty(self):
args = ["dci-pipeline"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [])
def test_process_args_single(self):
args = ["dci-pipeline", "stage:key=value"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [{"stage": {"key": "value"}}])
def test_process_args_list(self):
args = ["dci-pipeline", "stage:key=value=toto,value2"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [{"stage": {"key": ["value=toto", "value2"]}}])
def test_process_args_dict(self):
args = ["dci-pipeline", "stage:key=subkey:value", "stage:key=subkey2:value2"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(
result,
[
{"stage": {"key": {"subkey": "value"}}},
{"stage": {"key": {"subkey2": "value2"}}},
],
)
def test_process_args_dict_list(self):
args = ["dci-pipeline", "stage:key=subkey:value,value2"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [{"stage": {"key": {"subkey": ["value", "value2"]}}}])
def test_process_args_list1(self):
args = ["dci-pipeline", "stage:key=value=toto,"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [{"stage": {"key": ["value=toto"]}}])
def test_process_args_only_files(self):
args = ["dci-pipeline", "file1", "file2"]
result, args = process_args(args)
self.assertEqual(args, ["file1", "file2"])
self.assertEqual(result, [])
def test_process_args_http(self):
args = ["dci-pipeline", "stage:key=http://lwn.net/"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [{"stage": {"key": "http://lwn.net/"}}])
def test_process_args_https(self):
args = ["dci-pipeline", "stage:key=https://lwn.net/"]
result, args = process_args(args)
self.assertEqual(args, [])
self.assertEqual(result, [{"stage": {"key": "https://lwn.net/"}}])
def test_overload_dicts_add(self):
stage = {"first": "value"}
overload = {"key": ["value=toto", "value2"]}
self.assertEqual(
overload_dicts(overload, stage),
{"first": "value", "key": ["value=toto", "value2"]},
)
def test_overload_dicts_replace_list(self):
overload = {"components": ["ocp=12", "ose-tests"]}
stage = {"components": ["ocp", "cnf-tests"], "topic": "OCP-4.4"}
self.assertEqual(
overload_dicts(overload, stage),
{"components": ["ocp=12", "cnf-tests", "ose-tests"], "topic": "OCP-4.4"},
)
def test_overload_dicts_add_dict(self):
overload = {"ansible_extravars": {"dci_comment": "universal answer"}}
stage = {"ansible_extravars": {"answer": 42}}
self.assertEqual(
overload_dicts(overload, stage),
{"ansible_extravars": {"answer": 42, "dci_comment": "universal answer"}},
)
def test_overload_dicts_add_list_in_dict(self):
overload = {"ansible_extravars": {"dci_comment": "universal answer"}}
stage = {"ansible_extravars": {"answer": 42}}
self.assertEqual(
overload_dicts(overload, stage),
{"ansible_extravars": {"answer": 42, "dci_comment": "universal answer"}},
)
def test_prev_stages(self):
stage1 = {"name": "1", "type": "ocp"}
stage2 = {
"name": "2",
"type": "ocp-upgrade",
"prev_stages": ["ocp-upgrade", "ocp"],
}
stage3 = {
"name": "3",
"type": "ocp-upgrade2",
"prev_stages": ["ocp-upgrade", "ocp"],
}
stage4 = {"name": "4", "type": "cnf2"}
pipeline = [stage1, stage2, stage3, stage4]
prev_stages = get_prev_stages(stage3, pipeline)
self.assertEqual(prev_stages, [stage2, stage1])
@mock.patch("dcipipeline.main.tempfile.mkdtemp")
def test_pre_process_stage(self, m):
stage = {"ansible_envvars": {"envvar": "/@tmpdir"}}
m.return_value = "/tmp/tmppath"
stage_metas, stage = pre_process_stage(stage)
self.assertEqual(stage_metas["tmpdirs"][0]["path"], "/tmp/tmppath")
@mock.patch("dcipipeline.main.shutil.rmtree")
@mock.patch("dcipipeline.main.upload_junit_files_from_dir")
def test_post_process_stage(self, m_upload_junit, m_rmtree):
metas = {
"tmpdirs": [{"name": "JUNIT_OUTPUT_DIR", "path": "/tmp/junit_tmppath"}]
}
post_process_stage("context", "stage", metas)
m_upload_junit.assert_called_with("context", "stage", "/tmp/junit_tmppath")
m_rmtree.assert_called_with("/tmp/junit_tmppath")
m_upload_junit.reset_mock()
m_rmtree.reset_mock()
metas = {"tmpdirs": [{"name": "envvar1", "path": "/tmp/tmppath"}]}
post_process_stage("context", "stage", metas)
self.assertTrue(not m_upload_junit.called)
m_rmtree.assert_called_with("/tmp/tmppath")
@mock.patch("dcipipeline.main.dci_file.create")
def test_upload_junit_files_from_dir(self, m):
try:
os.makedirs("/tmp/junit-tmppath")
except Exception:
pass
open("/tmp/junit-tmppath/junit-tests.xml", "a+").close()
stage = {"job_info": {"job": {"id": "1"}}}
upload_junit_files_from_dir("context", stage, "/tmp/junit-tmppath")
m.assert_called_with(
"context",
"junit-tests",
file_path="/tmp/junit-tmppath/junit-tests.xml",
mime="application/junit",
job_id="1",
)
if __name__ == "__main__":
unittest.main()
# test_main.py ends here
| 36.822581 | 87 | 0.598482 |
6af7beb362b09d9e78b894d9cee63947a26e28ac | 779 | py | Python | tests/test_Circle.py | kat0lia/python_qa_oop | ca11ccae990b85e7dadd31ffbd4e976f544d1527 | [
"MIT"
] | null | null | null | tests/test_Circle.py | kat0lia/python_qa_oop | ca11ccae990b85e7dadd31ffbd4e976f544d1527 | [
"MIT"
] | null | null | null | tests/test_Circle.py | kat0lia/python_qa_oop | ca11ccae990b85e7dadd31ffbd4e976f544d1527 | [
"MIT"
] | null | null | null | from source.Circle import Circle
import math
import pytest
name = "Cr1"
def test_create_class():
circle = Circle(name, 2)
assert isinstance(circle, Circle)
assert circle.radius == 2
assert circle.angles == 0
assert circle.name == name
def test_perimeter():
circle = Circle(name, 2)
assert circle.perimeter() == 2 * math.pi * 2
def test_area():
circle = Circle(name, 2)
assert circle.area() == math.pi * 2 * 2
def test_add_area():
circle1 = Circle(name, 2)
circle2 = Circle(name, 4)
assert circle1.add_area(circle2) == (math.pi * 2 * 2) + (math.pi * 4 * 4)
def test_exception_in_method_add_area():
foo_example = 0
circle = Circle(name, 2)
with pytest.raises(ValueError):
circle.add_area(foo_example)
| 20.5 | 77 | 0.652118 |
32d4d7bd886d3256af6874eb2ee73d9fff50b40e | 27,442 | py | Python | fedenhance/losses/sisdr.py | dbisk/fedenhance | e77cf9b1159c53db594f0d8fefb17777d7c8b58f | [
"MIT"
] | 24 | 2021-05-11T04:57:07.000Z | 2022-03-08T10:32:37.000Z | fedenhance/losses/sisdr.py | dbisk/fedenhance | e77cf9b1159c53db594f0d8fefb17777d7c8b58f | [
"MIT"
] | null | null | null | fedenhance/losses/sisdr.py | dbisk/fedenhance | e77cf9b1159c53db594f0d8fefb17777d7c8b58f | [
"MIT"
] | 3 | 2021-08-10T09:42:15.000Z | 2021-11-02T00:15:16.000Z | """!
@brief SISNR very efficient computation in Torch. PArts have been used from
asteroid for values cross checking.
@author Efthymios Tzinis {etzinis2@illinois.edu}
@copyright University of illinois at Urbana Champaign
"""
import torch
import torch.nn as nn
import itertools
from torch.nn.modules.loss import _Loss
def _sdr( y, z, SI=False):
if SI:
a = ((z*y).mean(-1) / (y*y).mean(-1)).unsqueeze(-1) * y
return 10*torch.log10( (a**2).mean(-1) / ((a-z)**2).mean(-1))
else:
return 10*torch.log10( (y*y).mean(-1) / ((y-z)**2).mean(-1))
# Negative SDRi loss
def sdri_loss( y, z, of=0):
# Add a batch dimension if it's missing
if len( y.shape) < 3:
y = y.unsqueeze(0)
if len( z.shape) < 3:
z = z.unsqueeze(0)
s = _sdr( y, z, SI=False) - of
return -s.mean()
# Negative SI-SDRi loss
def sisdr_loss( y, z, of=0):
# Add a batch dimension if it's missing
if len( y.shape) < 3:
y = y.unsqueeze(0)
if len( z.shape) < 3:
z = z.unsqueeze(0)
s = _sdr( y, z, SI=True) - of
return -s.mean()
# Negative PIT loss
def pit_loss( y, z, of=0, SI=False):
# Add a batch dimension if it's missing
if len( y.shape) < 3:
y = y.unsqueeze(0)
if len( z.shape) < 3:
z = z.unsqueeze(0)
# Get all possible target source permutation SDRs and stack them
p = list( itertools.permutations( range( y.shape[-2])))
s = torch.stack( [_sdr( y[:,j,:], z, SI) for j in p], dim=2)
# Get source-average SDRi
# s = (s - of.unsqueeze(2)).mean(1)
s = s.mean(1)
# Find and return permutation with highest SDRi (negate since we are minimizing)
i = s.argmax(-1)
j = torch.arange( s.shape[0], dtype=torch.long, device=i.device)
return -s[j,i].mean()
class PermInvariantSISDR(nn.Module):
"""!
Class for SISDR computation between reconstructed signals and
target wavs by also regulating it with learned target masks."""
def __init__(self,
batch_size=None,
zero_mean=False,
n_sources=None,
backward_loss=True,
improvement=False,
return_individual_results=False):
"""
Initialization for the results and torch tensors that might
be used afterwards
:param batch_size: The number of the samples in each batch
:param zero_mean: If you want to perform zero-mean across
last dimension (time dim) of the signals before SDR computation
"""
super().__init__()
self.bs = batch_size
self.perform_zero_mean = zero_mean
self.backward_loss = backward_loss
self.permutations = list(itertools.permutations(
torch.arange(n_sources)))
self.permutations_tensor = torch.LongTensor(self.permutations)
self.improvement = improvement
self.n_sources = n_sources
self.return_individual_results = return_individual_results
def normalize_input(self, pr_batch, t_batch, initial_mixtures=None):
min_len = min(pr_batch.shape[-1],
t_batch.shape[-1])
if initial_mixtures is not None:
min_len = min(min_len, initial_mixtures.shape[-1])
initial_mixtures = initial_mixtures[:, :, :min_len]
pr_batch = pr_batch[:, :, :min_len]
t_batch = t_batch[:, :, :min_len]
if self.perform_zero_mean:
pr_batch = pr_batch - torch.mean(
pr_batch, dim=-1, keepdim=True)
t_batch = t_batch - torch.mean(
t_batch, dim=-1, keepdim=True)
if initial_mixtures is not None:
initial_mixtures = initial_mixtures - torch.mean(
initial_mixtures, dim=-1, keepdim=True)
return pr_batch, t_batch, initial_mixtures
@staticmethod
def dot(x, y):
return torch.sum(x * y, dim=-1, keepdim=True)
def compute_permuted_sisnrs(self,
permuted_pr_batch,
t_batch,
t_t_diag, eps=10e-8):
s_t = (self.dot(permuted_pr_batch, t_batch) /
(t_t_diag + eps) * t_batch)
e_t = permuted_pr_batch - s_t
sisnrs = 10 * torch.log10(self.dot(s_t, s_t) /
(self.dot(e_t, e_t) + eps))
return sisnrs
def compute_sisnr(self,
pr_batch,
t_batch,
initial_mixtures=None,
eps=10e-8):
t_t_diag = self.dot(t_batch, t_batch)
sisnr_l = []
for perm in self.permutations:
permuted_pr_batch = pr_batch[:, perm, :]
sisnr = self.compute_permuted_sisnrs(permuted_pr_batch,
t_batch,
t_t_diag, eps=eps)
sisnr_l.append(sisnr)
all_sisnrs = torch.cat(sisnr_l, -1)
best_sisdr, best_perm_ind = torch.max(all_sisnrs.mean(-2), -1)
if self.improvement:
initial_mix = initial_mixtures.repeat(1, self.n_sources, 1)
base_sisdr = self.compute_permuted_sisnrs(initial_mix,
t_batch,
t_t_diag, eps=eps)
best_sisdr -= base_sisdr.mean()
if not self.return_individual_results:
best_sisdr = best_sisdr.mean()
if self.backward_loss:
return -best_sisdr, best_perm_ind
return best_sisdr, best_perm_ind
def forward(self,
pr_batch,
t_batch,
eps=1e-9,
initial_mixtures=None,
return_best_permutation=False):
"""!
:param pr_batch: Reconstructed wavs: Torch Tensors of size:
batch_size x self.n_sources x length_of_wavs
:param t_batch: Target wavs: Torch Tensors of size:
batch_size x self.n_sources x length_of_wavs
:param eps: Numerical stability constant.
:param initial_mixtures: Initial Mixtures for SISDRi: Torch Tensor
of size: batch_size x 1 x length_of_wavs
:returns results_buffer Buffer for loading the results directly
to gpu and not having to reconstruct the results matrix: Torch
Tensor of size: batch_size x 1
"""
pr_batch, t_batch, initial_mixtures = self.normalize_input(
pr_batch, t_batch, initial_mixtures=initial_mixtures)
sisnr_l, best_perm_ind = self.compute_sisnr(
pr_batch, t_batch, eps=eps,
initial_mixtures=initial_mixtures)
if return_best_permutation:
best_permutations = self.permutations_tensor[best_perm_ind]
return sisnr_l, best_permutations
else:
return sisnr_l
# The following is copied from:
# https://github.com/mpariente/asteroid/blob/master/asteroid/losses
class PITLossWrapper(nn.Module):
""" Permutation invariant loss wrapper.
Args:
loss_func: function with signature (targets, est_targets, **kwargs).
pit_from (str): Determines how PIT is applied.
* ``'pw_mtx'`` (pairwise matrix): `loss_func` computes pairwise
losses and returns a torch.Tensor of shape
:math:`(batch, n\_src, n\_src)`. Each element
:math:`[batch, i, j]` corresponds to the loss between
:math:`targets[:, i]` and :math:`est\_targets[:, j]`
* ``'pw_pt'`` (pairwise point): `loss_func` computes the loss for
a batch of single source and single estimates (tensors won't
have the source axis). Output shape : :math:`(batch)`.
See :meth:`~PITLossWrapper.get_pw_losses`.
* ``'perm_avg'``(permutation average): `loss_func` computes the
average loss for a given permutations of the sources and
estimates. Output shape : :math:`(batch)`.
See :meth:`~PITLossWrapper.best_perm_from_perm_avg_loss`.
In terms of efficiency, ``'perm_avg'`` is the least efficicient.
perm_reduce (Callable): torch function to reduce permutation losses.
Defaults to None (equivalent to mean). Signature of the func
(pwl_set, **kwargs) : (B, n_src!, n_src) --> (B, n_src!).
`perm_reduce` can receive **kwargs during forward using the
`reduce_kwargs` argument (dict). If those argument are static,
consider defining a small function or using `functools.partial`.
Only used in `'pw_mtx'` and `'pw_pt'` `pit_from` modes.
For each of these modes, the best permutation and reordering will be
automatically computed.
Examples:
>>> import torch
>>> from asteroid.losses import pairwise_neg_sisdr
>>> sources = torch.randn(10, 3, 16000)
>>> est_sources = torch.randn(10, 3, 16000)
>>> # Compute PIT loss based on pairwise losses
>>> loss_func = PITLossWrapper(pairwise_neg_sisdr, pit_from='pw_mtx')
>>> loss_val = loss_func(est_sources, sources)
>>>
>>> # Using reduce
>>> def reduce(perm_loss, src):
>>> weighted = perm_loss * src.norm(dim=-1, keepdim=True)
>>> return torch.mean(weighted, dim=-1)
>>>
>>> loss_func = PITLossWrapper(pairwise_neg_sisdr, pit_from='pw_mtx',
>>> perm_reduce=reduce)
>>> reduce_kwargs = {'src': sources}
>>> loss_val = loss_func(est_sources, sources,
>>> reduce_kwargs=reduce_kwargs)
"""
def __init__(self, loss_func, pit_from='pw_mtx', perm_reduce=None):
super().__init__()
self.loss_func = loss_func
self.pit_from = pit_from
self.perm_reduce = perm_reduce
if self.pit_from not in ['pw_mtx', 'pw_pt', 'perm_avg']:
raise ValueError('Unsupported loss function type for now. Expected'
'one of [`pw_mtx`, `pw_pt`, `perm_avg`]')
def forward(self, est_targets, targets, return_est=False,
reduce_kwargs=None, **kwargs):
""" Find the best permutation and return the loss.
Args:
est_targets: torch.Tensor. Expected shape [batch, nsrc, *].
The batch of target estimates.
targets: torch.Tensor. Expected shape [batch, nsrc, *].
The batch of training targets
return_est: Boolean. Whether to return the reordered targets
estimates (To compute metrics or to save example).
reduce_kwargs (dict or None): kwargs that will be passed to the
pairwise losses reduce function (`perm_reduce`).
**kwargs: additional keyword argument that will be passed to the
loss function.
Returns:
- Best permutation loss for each batch sample, average over
the batch. torch.Tensor(loss_value)
- The reordered targets estimates if return_est is True.
torch.Tensor of shape [batch, nsrc, *].
"""
n_src = targets.shape[1]
assert n_src < 10, f"Expected source axis along dim 1, found {n_src}"
if self.pit_from == 'pw_mtx':
# Loss function already returns pairwise losses
pw_losses = self.loss_func(est_targets, targets, **kwargs)
elif self.pit_from == 'pw_pt':
# Compute pairwise losses with a for loop.
pw_losses = self.get_pw_losses(self.loss_func, est_targets,
targets, **kwargs)
elif self.pit_from == 'perm_avg':
# Cannot get pairwise losses from this type of loss.
# Find best permutation directly.
min_loss, min_loss_idx = self.best_perm_from_perm_avg_loss(
self.loss_func, est_targets, targets, **kwargs
)
# Take the mean over the batch
mean_loss = torch.mean(min_loss)
if not return_est:
return mean_loss
reordered = self.reorder_source(est_targets, n_src, min_loss_idx)
return mean_loss, reordered
else:
return
assert pw_losses.ndim == 3, ("Something went wrong with the loss "
"function, please read the docs.")
assert (pw_losses.shape[0] ==
targets.shape[0]), "PIT loss needs same batch dim as input"
reduce_kwargs = reduce_kwargs if reduce_kwargs is not None else dict()
min_loss, min_loss_idx = self.find_best_perm(
pw_losses, n_src, perm_reduce=self.perm_reduce, **reduce_kwargs
)
mean_loss = torch.mean(min_loss)
if not return_est:
return mean_loss
reordered = self.reorder_source(est_targets, n_src, min_loss_idx)
return mean_loss, reordered
@staticmethod
def get_pw_losses(loss_func, est_targets, targets, **kwargs):
""" Get pair-wise losses between the training targets and its estimate
for a given loss function.
Args:
loss_func: function with signature (targets, est_targets, **kwargs)
The loss function to get pair-wise losses from.
est_targets: torch.Tensor. Expected shape [batch, nsrc, *].
The batch of target estimates.
targets: torch.Tensor. Expected shape [batch, nsrc, *].
The batch of training targets.
**kwargs: additional keyword argument that will be passed to the
loss function.
Returns:
torch.Tensor or size [batch, nsrc, nsrc], losses computed for
all permutations of the targets and est_targets.
This function can be called on a loss function which returns a tensor
of size [batch]. There are more efficient ways to compute pair-wise
losses using broadcasting.
"""
batch_size, n_src, *_ = targets.shape
pair_wise_losses = targets.new_empty(batch_size, n_src, n_src)
for est_idx, est_src in enumerate(est_targets.transpose(0, 1)):
for target_idx, target_src in enumerate(targets.transpose(0, 1)):
pair_wise_losses[:, est_idx, target_idx] = loss_func(
est_src, target_src, **kwargs)
return pair_wise_losses
@staticmethod
def find_best_perm(pair_wise_losses, n_src, perm_reduce=None, **kwargs):
"""Find the best permutation, given the pair-wise losses.
Args:
pair_wise_losses (:class:`torch.Tensor`):
Tensor of shape [batch, n_src, n_src]. Pairwise losses.
n_src (int): Number of sources.
perm_reduce (Callable): torch function to reduce permutation losses.
Defaults to None (equivalent to mean). Signature of the func
(pwl_set, **kwargs) : (B, n_src!, n_src) --> (B, n_src!)
**kwargs: additional keyword argument that will be passed to the
permutation reduce function.
Returns:
tuple:
:class:`torch.Tensor`: The loss corresponding to the best
permutation of size (batch,).
:class:`torch.LongTensor`: The indexes of the best permutations.
MIT Copyright (c) 2018 Kaituo XU.
See `Original code
<https://github.com/kaituoxu/Conv-TasNet/blob/master>`__ and `License
<https://github.com/kaituoxu/Conv-TasNet/blob/master/LICENSE>`__.
"""
# After transposition, dim 1 corresp. to sources and dim 2 to estimates
pwl = pair_wise_losses.transpose(-1, -2)
perms = pwl.new_tensor(list(itertools.permutations(range(n_src))),
dtype=torch.long)
# Column permutation indices
idx = torch.unsqueeze(perms, 2)
# Loss mean of each permutation
if perm_reduce is None:
# one-hot, [n_src!, n_src, n_src]
perms_one_hot = pwl.new_zeros((*perms.size(), n_src)).scatter_(2,
idx,
1)
loss_set = torch.einsum('bij,pij->bp', [pwl, perms_one_hot])
loss_set /= n_src
else:
batch = pwl.shape[0]
n_perm = idx.shape[0]
# [batch, n_src!, n_src] : Pairwise losses for each permutation.
pwl_set = pwl[:, torch.arange(n_src), idx.squeeze(-1)]
# Apply reduce [batch, n_src!, n_src] --> [batch, n_src!]
loss_set = perm_reduce(pwl_set, **kwargs)
# Indexes and values of min losses for each batch element
min_loss_idx = torch.argmin(loss_set, dim=1)
min_loss, _ = torch.min(loss_set, dim=1, keepdim=True)
return min_loss, min_loss_idx
class PairwiseNegSDR(_Loss):
""" Base class for pairwise negative SI-SDR, SD-SDR and SNR on a batch.
Args:
sdr_type (str): choose between "snr" for plain SNR, "sisdr" for
SI-SDR and "sdsdr" for SD-SDR [1].
zero_mean (bool, optional): by default it zero mean the target
and estimate before computing the loss.
take_log (bool, optional): by default the log10 of sdr is returned.
Shape:
est_targets (:class:`torch.Tensor`): Expected shape
[batch, n_src, time]. Batch of target estimates.
targets (:class:`torch.Tensor`): Expected shape
[batch, n_src, time]. Batch of training targets.
Returns:
:class:`torch.Tensor`: with shape [batch, n_src, n_src].
Pairwise losses.
Examples:
>>> import torch
>>> from asteroid.losses import PITLossWrapper
>>> targets = torch.randn(10, 2, 32000)
>>> est_targets = torch.randn(10, 2, 32000)
>>> loss_func = PITLossWrapper(PairwiseNegSDR("sisdr"),
>>> pit_from='pairwise')
>>> loss = loss_func(est_targets, targets)
References:
[1] Le Roux, Jonathan, et al. "SDR half-baked or well done." IEEE
International Conference on Acoustics, Speech and Signal
Processing (ICASSP) 2019.
"""
def __init__(self, sdr_type, zero_mean=True, take_log=True):
super(PairwiseNegSDR, self).__init__()
assert sdr_type in ["snr", "sisdr", "sdsdr"]
self.sdr_type = sdr_type
self.zero_mean = zero_mean
self.take_log = take_log
def forward(self, est_targets, targets):
assert targets.size() == est_targets.size()
# Step 1. Zero-mean norm
if self.zero_mean:
mean_source = torch.mean(targets, dim=2, keepdim=True)
mean_estimate = torch.mean(est_targets, dim=2, keepdim=True)
targets = targets - mean_source
est_targets = est_targets - mean_estimate
# Step 2. Pair-wise SI-SDR. (Reshape to use broadcast)
s_target = torch.unsqueeze(targets, dim=1)
s_estimate = torch.unsqueeze(est_targets, dim=2)
if self.sdr_type in ["sisdr", "sdsdr"]:
# [batch, n_src, n_src, 1]
pair_wise_dot = torch.sum(s_estimate * s_target, dim=3,
keepdim=True)
# [batch, 1, n_src, 1]
s_target_energy = torch.sum(s_target**2, dim=3, keepdim=True) + 1e-8
# [batch, n_src, n_src, time]
pair_wise_proj = pair_wise_dot * s_target / s_target_energy
else:
# [batch, n_src, n_src, time]
pair_wise_proj = s_target.repeat(1, s_target.shape[2], 1, 1)
if self.sdr_type in ["sdsdr", "snr"]:
e_noise = s_estimate - s_target
else:
e_noise = s_estimate - pair_wise_proj
# [batch, n_src, n_src]
pair_wise_sdr = torch.sum(pair_wise_proj ** 2, dim=3) / (
torch.sum(e_noise ** 2, dim=3) + 1e-8)
if self.take_log:
pair_wise_sdr = 10 * torch.log10(pair_wise_sdr + 1e-8)
return - pair_wise_sdr
class StabilizedPermInvSISDRMetric(nn.Module):
"""!
Class for SISDR computation between reconstructed and target signals."""
def __init__(self,
zero_mean=False,
single_source=False,
n_estimated_sources=None,
n_actual_sources=None,
backward_loss=True,
improvement=False,
return_individual_results=False):
"""
Initialization for the results and torch tensors that might
be used afterwards
:param batch_size: The number of the samples in each batch
:param zero_mean: If you want to perform zero-mean across
last dimension (time dim) of the signals before SDR computation
"""
super().__init__()
self.perform_zero_mean = zero_mean
self.backward_loss = backward_loss
self.improvement = improvement
self.n_estimated_sources = n_estimated_sources
self.n_actual_sources = n_actual_sources
assert self.n_estimated_sources >= self.n_actual_sources, (
'Estimates need to be at least: {} but got: {}'.format(
self.n_actual_sources, self.n_estimated_sources))
self.permutations = list(itertools.permutations(
torch.arange(self.n_estimated_sources),
r=self.n_actual_sources))
self.permutations_tensor = torch.LongTensor(self.permutations)
self.return_individual_results = return_individual_results
self.single_source = single_source
if self.single_source:
assert self.n_actual_sources == 1
def normalize_input(self, input_tensor):
if self.perform_zero_mean:
return input_tensor - torch.mean(input_tensor, dim=-1, keepdim=True)
else:
return input_tensor
@staticmethod
def dot(x, y):
return torch.sum(x * y, dim=-1, keepdim=True)
def compute_stabilized_sisnr(self,
permuted_pr_batch,
t_batch,
t_signal_powers, eps=1e-8):
pr_signal_powers = self.dot(permuted_pr_batch, permuted_pr_batch)
inner_prod_sq = self.dot(permuted_pr_batch, t_batch) ** 2
rho_sq = inner_prod_sq / (pr_signal_powers * t_signal_powers + eps)
return 10 * torch.log10((rho_sq + eps) / (1. - rho_sq + eps))
def compute_sisnr(self,
pr_batch,
t_batch,
initial_mixtures=None,
eps=1e-8):
assert t_batch.shape[-2] == self.n_actual_sources
# The actual number of sources might be less than the estimated ones
t_signal_powers = self.dot(t_batch, t_batch)
sisnr_l = []
for perm in self.permutations:
permuted_pr_batch = pr_batch[:, perm, :]
sisnr = self.compute_stabilized_sisnr(
permuted_pr_batch, t_batch, t_signal_powers, eps=eps)
sisnr_l.append(sisnr)
all_sisnrs = torch.cat(sisnr_l, -1)
best_sisdr, best_perm_ind = torch.max(all_sisnrs.mean(-2), -1)
if self.improvement:
if initial_mixtures is not None:
initial_mix = initial_mixtures.repeat(1, self.n_actual_sources, 1)
else:
initial_mixture = torch.sum(t_batch, -2, keepdim=True)
initial_mixture = self.normalize_input(initial_mixture)
initial_mix = initial_mixture.repeat(1, self.n_actual_sources, 1)
base_sisdr = self.compute_stabilized_sisnr(
initial_mix, t_batch, t_signal_powers, eps=eps)
best_sisdr -= base_sisdr.mean([-2, -1])
if not self.return_individual_results:
best_sisdr = best_sisdr.mean()
if self.backward_loss:
return -best_sisdr, best_perm_ind
return best_sisdr, best_perm_ind
def forward(self,
pr_batch,
t_batch,
eps=1e-9,
initial_mixtures=None,
return_best_permutation=False):
"""!
:param pr_batch: Reconstructed wavs: Torch Tensors of size:
batch_size x self.n_sources x length_of_wavs
:param t_batch: Target wavs: Torch Tensors of size:
batch_size x self.n_sources x length_of_wavs
:param eps: Numerical stability constant.
:returns results_buffer Buffer for loading the results directly
to gpu and not having to reconstruct the results matrix: Torch
Tensor of size: batch_size x 1
"""
if self.single_source:
pr_batch = torch.sum(pr_batch, -2, keepdim=True)
pr_batch = self.normalize_input(pr_batch)
t_batch = self.normalize_input(t_batch)
if initial_mixtures is not None:
initial_mixtures = self.normalize_input(initial_mixtures)
sisnr_l, best_perm_ind = self.compute_sisnr(
pr_batch, t_batch, initial_mixtures=initial_mixtures, eps=eps)
if return_best_permutation:
best_permutations = self.permutations_tensor[best_perm_ind]
return sisnr_l, best_permutations
else:
return sisnr_l
def test_StabilizedPermInvSISDRMetric():
bs = 3
n_secs = 4
n_samples = n_secs * 16000
zero_mean = True
improvement = False
n_estimated_sources = 1
n_actual_sources = 1
pr_batch = torch.rand((bs, n_estimated_sources, n_samples),
dtype=torch.float32)
t_batch = torch.rand((bs, n_actual_sources, n_samples),
dtype=torch.float32)
pr_batch[:, 0] = 4*t_batch[:, 0] + 0.00001 * t_batch[:, 0] ** 3.
pr_batch[:, 0] = 0.00001 * t_batch[:, 0] ** 3.
# # t_batch[:, 1] = 2. *
initial_mixture = torch.sum(t_batch, -2, keepdim=True)
metric = StabilizedPermInvSISDRMetric(
n_estimated_sources=n_estimated_sources,
n_actual_sources=n_actual_sources,
zero_mean=zero_mean,
backward_loss=False,
improvement=improvement,
return_individual_results=True)
metric_result, best_perm = metric(pr_batch, t_batch,
return_best_permutation=True)
print(metric_result, best_perm)
old_me = PermInvariantSISDR(batch_size=None,
zero_mean=zero_mean,
n_sources=n_estimated_sources,
backward_loss=False,
improvement=improvement,
return_individual_results=True)
old_me_result, best_perm = old_me(pr_batch, t_batch,
initial_mixtures=torch.sum(t_batch, -2,
keepdim=True),
return_best_permutation=True)
print(old_me_result, best_perm)
if __name__ == "__main__":
test_StabilizedPermInvSISDRMetric() | 43.283912 | 84 | 0.586911 |
f1022f6b471a36dad34b862a6eafe5c38e4f401d | 5,412 | py | Python | contactmps/admin.py | OpenUpSA/contact-mps | 63d7f86e1b6c9319a4d0344a6125cd22770f34c7 | [
"MIT"
] | null | null | null | contactmps/admin.py | OpenUpSA/contact-mps | 63d7f86e1b6c9319a4d0344a6125cd22770f34c7 | [
"MIT"
] | 12 | 2017-06-08T09:36:49.000Z | 2021-06-10T18:48:08.000Z | contactmps/admin.py | OpenUpSA/contact-mps | 63d7f86e1b6c9319a4d0344a6125cd22770f34c7 | [
"MIT"
] | 2 | 2017-07-17T15:14:25.000Z | 2020-06-22T02:08:58.000Z | import xlwt
import datetime
from django.contrib.sites.shortcuts import get_current_site
from django.http import HttpResponse
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from .models import (
Campaign,
Committee,
ContactDetail,
Email,
Entity,
SenderQA
)
class ContactDetailInline(admin.TabularInline):
model = ContactDetail
class EntityAdmin(admin.ModelAdmin):
readonly_fields = ['created_at', 'updated_at']
inlines = (ContactDetailInline, )
class ContactDetailAdmin(admin.ModelAdmin):
readonly_fields = ['created_at', 'updated_at']
class SenderQAQuestionFilter(admin.SimpleListFilter):
title = _('Question')
parameter_name = 'question'
def lookups(self, request, model_admin):
return (
('make_contact', _('Can be Contacted')),
)
def queryset(self, request, queryset):
if self.value() == 'make_contact':
return queryset.filter(answer='Yes',
question='Are you willing to be contacted by a journalist to elaborate on your answers?')
class SenderQAAdmin(admin.ModelAdmin):
readonly_fields = ['question', 'question']
list_filter = ('email__campaign',
'email__moderation_passed',
SenderQAQuestionFilter)
list_display = ('from_email', 'from_name', 'question', 'answer')
list_select_related = ('email', )
actions = ['export_as_excel']
def from_email(self, obj):
return obj.email.from_email
def from_name(self, obj):
return obj.email.from_name
def export_as_excel(self, request, queryset):
field_names = ['Email', 'Name', 'Question', 'Answer']
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = 'attachment; filename="emails.xls"'
work_book = xlwt.Workbook(encoding='utf-8')
work_sheet = work_book.add_sheet("Extra Submission Answers")
row_num = 0
for col_num in range(len(field_names)):
work_sheet.write(row_num, col_num, field_names[col_num])
for obj in queryset.select_related('email'):
row_num += 1
work_sheet.write(row_num, 0, obj.email.from_email)
work_sheet.write(row_num, 1, obj.email.from_name)
work_sheet.write(row_num, 2, obj.question)
work_sheet.write(row_num, 3, obj.answer)
work_book.save(response)
return response
export_as_excel.short_description = 'Export as Excel'
class EmailAdmin(admin.ModelAdmin):
fieldsets = (
(None, {
'fields': ('subject', 'body_txt', 'is_moderated',
'moderation_passed')
}),
('Advanced options', {
'classes': ('collapse',),
'fields': ('to_entity', 'to_addresses', 'remote_ip',
'user_agent', 'from_name', 'from_email',
'created_at', 'updated_at', 'secure_id',
'sender_secret', 'any_data', 'campaign',
'is_sent')
})
)
readonly_fields = ['created_at', 'updated_at']
list_filter = ('created_at', 'campaign', 'moderation_passed',
'is_moderated', 'is_sent')
list_display = ('from_email', 'to_addresses', 'created_at',
'is_sent', 'is_moderated', 'moderation_passed')
actions = ['export_as_excel', 'send_email']
def send_email(self, request, queryset):
"""
Send all the emails
"""
site = get_current_site(request)
for obj in queryset.filter(is_sent=False):
obj.send(site)
send_email.short_description = 'Send Emails'
def export_as_excel(self, request, queryset):
meta = self.model._meta
field_names = [field.name for field in meta.fields]
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = 'attachment; filename="emails.xls"'
work_book = xlwt.Workbook(encoding='utf-8')
work_sheet = work_book.add_sheet("Emails Submissions")
row_num = 0
for col_num in range(len(field_names)):
work_sheet.write(row_num, col_num, field_names[col_num])
for obj in queryset.values_list():
row_num += 1
for col_num in range(len(obj)):
if isinstance(obj[col_num], datetime.datetime):
work_sheet.write(row_num,
col_num,
str(obj[col_num]))
elif isinstance(obj[col_num], dict):
work_sheet.write(row_num,
col_num,
str(obj[col_num]))
else:
work_sheet.write(row_num,
col_num,
obj[col_num])
work_book.save(response)
return response
export_as_excel.short_description = 'Export as excel'
admin.site.site_header = 'Contact Parliament administration'
admin.site.register(Campaign)
admin.site.register(Committee, admin.ModelAdmin)
admin.site.register(ContactDetail, ContactDetailAdmin)
admin.site.register(Email, EmailAdmin)
admin.site.register(Entity, EntityAdmin)
admin.site.register(SenderQA, SenderQAAdmin)
| 34.692308 | 124 | 0.607169 |
71607b876976356a5d291267e33766c03ed4ef07 | 535 | py | Python | Lib/site-packages/PyInstaller/hooks/hook-babel.py | fhqjgd/python2.7 | 6533019b8b2fbe113aa552e44247c054bdd8a75b | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/PyInstaller/hooks/hook-babel.py | fhqjgd/python2.7 | 6533019b8b2fbe113aa552e44247c054bdd8a75b | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/PyInstaller/hooks/hook-babel.py | fhqjgd/python2.7 | 6533019b8b2fbe113aa552e44247c054bdd8a75b | [
"bzip2-1.0.6"
] | null | null | null | #-----------------------------------------------------------------------------
# Copyright (c) 2013-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from PyInstaller.utils.hooks import collect_data_files
hiddenimports = ["babel.dates"]
datas = collect_data_files('babel')
| 33.4375 | 78 | 0.553271 |
f159d8a79c247690d6141f09f6af3e93f6b55c91 | 2,627 | py | Python | core/views/reports.py | Egorka96/med-org-portal | 3f55fb59daea03684b3bc6b5c394cd06cfd6e2df | [
"MIT"
] | 2 | 2020-03-02T17:29:14.000Z | 2020-05-28T13:19:49.000Z | core/views/reports.py | Egorka96/med-org-portal | 3f55fb59daea03684b3bc6b5c394cd06cfd6e2df | [
"MIT"
] | 88 | 2020-02-17T09:46:57.000Z | 2022-03-12T00:24:32.000Z | core/views/reports.py | Egorka96/med-org-portal | 3f55fb59daea03684b3bc6b5c394cd06cfd6e2df | [
"MIT"
] | 1 | 2020-04-17T15:56:51.000Z | 2020-04-17T15:56:51.000Z | from django.conf import settings
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.urls import reverse
import core.generic.mixins
import core.generic.views
from core import forms
from core.datatools.report import get_report_period
from core.excel.reports import WorkersDoneExcel
from mis.service_client import Mis
class WorkersDoneReport(PermissionRequiredMixin, core.generic.mixins.FormMixin, core.generic.mixins.RestListMixin,
core.generic.views.ListView):
title = 'Отчет по прошедшим'
form_class = forms.WorkersPastReport
paginate_by = 50
permission_required = 'core.view_workers_done_report'
excel_workbook_maker = WorkersDoneExcel
mis_request_path = Mis.WORKERS_DONE_REPORT_URL
template_name = settings.TEMPLATES_DICT.get("workers_done_report")
def get_breadcrumbs(self):
return [
('Главная', reverse('core:index')),
(self.title, ''),
]
def get_workbook_maker_kwargs(self, **kwargs):
kwargs = super().get_workbook_maker_kwargs(**kwargs)
user_orgs = self.request.user.core.get_orgs()
kwargs['show_orgs'] = False if user_orgs and len(user_orgs) < 2 else True
kwargs['show_cost'] = True if self.request.user.has_perm('core.view_money') else False
kwargs['mis_request_path'] = self.mis_request_path
kwargs['filter_params'] = self.get_filter_params()
return kwargs
def get_excel_title(self):
title = self.get_title()
form = self.get_form()
if form.is_valid():
title += get_report_period(
date_from=form.cleaned_data.get('date_from'),
date_to=form.cleaned_data.get('date_to')
)
if orgs := form.cleaned_data.get('orgs'):
title += f'. Организации: {", ".join(str(org) for org in orgs)}'
return title
def get_filter_params(self):
filter_params = super().get_filter_params()
if self.request.GET:
filter_params['group_clients'] = True
return filter_params
def get_objects(self):
self.object_list = super().get_objects()
self.object_list = self.update_object_list(self.object_list)
return self.object_list
def update_object_list(self, objects):
for obj in objects:
obj['main_services'] = []
for app in ['prof', 'lmk', 'certificate', 'heal']:
app_orders = obj[app]
for o in app_orders:
obj['main_services'].append(o.get('main_services'))
return objects
| 34.116883 | 114 | 0.655501 |
66f209c65d581f8778ca1ba0f4548d51aa3738f7 | 970 | py | Python | playwave.py | tebeka/pythonwise | 56f8cb7aa792c3ad6a3dc754e15f6a04890d694a | [
"BSD-3-Clause"
] | 21 | 2016-11-16T20:08:56.000Z | 2021-12-11T23:13:05.000Z | playwave.py | tebeka/pythonwise | 56f8cb7aa792c3ad6a3dc754e15f6a04890d694a | [
"BSD-3-Clause"
] | 1 | 2020-10-05T08:35:31.000Z | 2020-10-05T08:35:31.000Z | playwave.py | tebeka/pythonwise | 56f8cb7aa792c3ad6a3dc754e15f6a04890d694a | [
"BSD-3-Clause"
] | 8 | 2016-11-12T22:54:55.000Z | 2021-02-10T10:46:23.000Z | #!/usr/bin/env python
'''Play a wav file on Linux
Freely copied from
http://www.velocityreviews.com/forums/t337346-how-to-play-sound-in-python.html
'''
import wave
import ossaudiodev
def playwave(wavefile):
fo = wave.open(wavefile, "rb")
(nc, sw, fr, nf, comptype, compname) = fo.getparams()
dsp = ossaudiodev.open("/dev/dsp", "w")
dsp.setparameters(ossaudiodev.AFMT_S16_NE, nc, fr)
data = fo.readframes(nf)
fo.close()
dsp.write(data)
dsp.close()
def main(argv=None):
import sys
from optparse import OptionParser
from os.path import isfile
argv = argv or sys.argv
parser = OptionParser("%prog WAVEFILE")
opts, args = parser.parse_args(argv[1:])
if len(args) != 1:
parser.error("wrong number of arguments") # Will exit
wavefile = args[0]
if not isfile(wavefile):
raise SystemExit("error: can't find %s" % wavefile)
playwave(wavefile)
if __name__ == "__main__":
main()
| 22.55814 | 78 | 0.656701 |
45e438abde0556ed144683763c659906fe73b7aa | 6,423 | py | Python | utils/plot_tasks.py | SamuelePilleri/plaso | f5687f12a89c7309797ccc285da78e855c120579 | [
"Apache-2.0"
] | null | null | null | utils/plot_tasks.py | SamuelePilleri/plaso | f5687f12a89c7309797ccc285da78e855c120579 | [
"Apache-2.0"
] | null | null | null | utils/plot_tasks.py | SamuelePilleri/plaso | f5687f12a89c7309797ccc285da78e855c120579 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to plot tasks from profiling data.
This script requires the matplotlib and numpy Python modules.
"""
from __future__ import print_function
# mathplotlib does not support Unicode strings a column names.
# from __future__ import unicode_literals
import argparse
import glob
import os
import sys
import numpy # pylint: disable=import-error
from matplotlib import pyplot # pylint: disable=import-error
class TaskMeasurements(object):
"""Measurements of a task.
Attributes:
completed_time (float): time when the task was completed by the foreman.
created_time (float): time when the task was created by the foreman.
merging_duration (float): time it took the foreman to merge the task.
merging_time (float): time when the task was started to be merged by
the foreman.
pending_merge (float): time when the task was scheduled to be merged by
the foreman.
processed_time (float): time when the task was processed according to
the foreman.
processing_duration (float): time it took the worker to process the task.
processing_time (float): time when the task started to be processed by
the worker.
scheduled_time (float): time when the task was scheduled onto the task
queue by the foreman.
"""
def __init__(self):
"""Initializes a task measurement."""
super(TaskMeasurements, self).__init__()
self.completed_time = None
self.created_time = None
self.merging_duration = None
self.merging_time = None
self.pending_merge_time = None
self.processed_time = None
self.processing_duration = None
self.processing_time = None
self.scheduled_time = None
def Main():
"""The main program function.
Returns:
bool: True if successful or False if not.
"""
argument_parser = argparse.ArgumentParser(description=(
'Plots memory usage from profiling data.'))
argument_parser.add_argument(
'--output', dest='output_file', type=str, help=(
'path of the output file to write the graph to instead of using '
'interactive mode. The output format deduced from the extension '
'of the filename.'))
argument_parser.add_argument(
'profile_path', type=str, help=(
'path to the directory containing the profiling data.'))
options = argument_parser.parse_args()
if not os.path.isdir(options.profile_path):
print('No such directory: {0:s}'.format(options.profile_path))
return False
names = ['time', 'identifier', 'status']
measurements = {}
glob_expression = os.path.join(options.profile_path, 'tasks-*.csv.gz')
for csv_file_name in glob.glob(glob_expression):
data = numpy.genfromtxt(
csv_file_name, delimiter='\t', dtype=None, encoding='utf-8',
names=names, skip_header=1)
label = os.path.basename(csv_file_name)
label = label.replace('tasks-', '').replace('.csv.gz', '')
for time, identifier, status in data:
if identifier not in measurements:
measurements[identifier] = TaskMeasurements()
task_measurement = measurements[identifier]
if status == 'completed':
task_measurement.completed_time = time
task_measurement.merging_duration = time - task_measurement.merging_time
elif status == 'created':
task_measurement.created_time = time
# TODO: add support for:
# elif status == 'merge_on_hold':
# elif status == 'merge_resumed':
elif status == 'merge_started':
task_measurement.merging_time = time
elif status == 'pending_merge':
task_measurement.pending_merge_time = time
elif status == 'processed':
task_measurement.processed_time = time
elif status == 'processing_started':
task_measurement.processing_time = time
elif status == 'processing_completed':
task_measurement.processing_duration = (
time - task_measurement.processing_time)
elif status == 'scheduled':
task_measurement.scheduled_time = time
before_pending_merge_duration = {}
before_queued_duration = {}
merging_duration = {}
pending_merge_duration = {}
processing_duration = {}
queued_duration = {}
for identifier, task_measurement in measurements.items():
before_pending_merge_duration[task_measurement.scheduled_time] = (
task_measurement.pending_merge_time - (
task_measurement.processing_time +
task_measurement.processing_duration))
before_queued_duration[task_measurement.scheduled_time] = (
task_measurement.scheduled_time - task_measurement.created_time)
merging_duration[task_measurement.merging_time] = (
task_measurement.merging_duration)
pending_merge_duration[task_measurement.processing_time] = (
task_measurement.merging_time - task_measurement.pending_merge_time)
processing_duration[task_measurement.processing_time] = (
task_measurement.processing_duration)
queued_duration[task_measurement.scheduled_time] = (
task_measurement.processing_time - task_measurement.scheduled_time)
keys = sorted(before_pending_merge_duration.keys())
values = [before_pending_merge_duration[key] for key in keys]
pyplot.plot(keys, values, label='Before pending merge')
keys = sorted(before_queued_duration.keys())
values = [before_queued_duration[key] for key in keys]
pyplot.plot(keys, values, label='Before queued')
keys = sorted(merging_duration.keys())
values = [merging_duration[key] for key in keys]
pyplot.plot(keys, values, label='Merging')
keys = sorted(pending_merge_duration.keys())
values = [pending_merge_duration[key] for key in keys]
pyplot.plot(keys, values, label='Pending merge')
keys = sorted(processing_duration.keys())
values = [processing_duration[key] for key in keys]
pyplot.plot(keys, values, label='Processing')
keys = sorted(queued_duration.keys())
values = [queued_duration[key] for key in keys]
pyplot.plot(keys, values, label='Queued')
pyplot.title('Task status duration')
pyplot.xlabel('Time')
pyplot.xscale('linear')
pyplot.ylabel('Duration')
pyplot.yscale('linear')
pyplot.legend()
if options.output_file:
pyplot.savefig(options.output_file)
else:
pyplot.show()
return True
if __name__ == '__main__':
if not Main():
sys.exit(1)
else:
sys.exit(0)
| 31.331707 | 80 | 0.709793 |
d749831e45681d9ff125c2c4cff6b5d864c69758 | 2,624 | py | Python | torchmetrics/functional/retrieval/fall_out.py | niberger/metrics | c537c9b3e8e801772a7e5670ff273321ed26e77b | [
"Apache-2.0"
] | null | null | null | torchmetrics/functional/retrieval/fall_out.py | niberger/metrics | c537c9b3e8e801772a7e5670ff273321ed26e77b | [
"Apache-2.0"
] | null | null | null | torchmetrics/functional/retrieval/fall_out.py | niberger/metrics | c537c9b3e8e801772a7e5670ff273321ed26e77b | [
"Apache-2.0"
] | null | null | null | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
import torch
from torch import Tensor, tensor
from torchmetrics.utilities.checks import _check_retrieval_functional_inputs
def retrieval_fall_out(preds: Tensor, target: Tensor, k: Optional[int] = None) -> Tensor:
"""Computes the Fall-out (for information retrieval), as explained in `IR Fall-out`_ Fall-out is the fraction
of non-relevant documents retrieved among all the non-relevant documents.
``preds`` and ``target`` should be of the same shape and live on the same device. If no ``target`` is ``True``,
``0`` is returned. ``target`` must be either `bool` or `integers` and ``preds`` must be `float`,
otherwise an error is raised. If you want to measure Fall-out@K, ``k`` must be a positive integer.
Args:
preds: estimated probabilities of each document to be relevant.
target: ground truth about each document being relevant or not.
k: consider only the top k elements (default: `None`, which considers them all)
Returns:
a single-value tensor with the fall-out (at ``k``) of the predictions ``preds`` w.r.t. the labels ``target``.
Raises:
ValueError:
If ``k`` parameter is not `None` or an integer larger than 0
Example:
>>> from torchmetrics.functional import retrieval_fall_out
>>> preds = tensor([0.2, 0.3, 0.5])
>>> target = tensor([True, False, True])
>>> retrieval_fall_out(preds, target, k=2)
tensor(1.)
"""
preds, target = _check_retrieval_functional_inputs(preds, target)
k = preds.shape[-1] if k is None else k
if not (isinstance(k, int) and k > 0):
raise ValueError("`k` has to be a positive integer or None")
target = 1 - target # we want to compute the probability of getting a non-relevant doc among all non-relevant docs
if not target.sum():
return tensor(0.0, device=preds.device)
relevant = target[torch.argsort(preds, dim=-1, descending=True)][:k].sum().float()
return relevant / target.sum()
| 41.650794 | 119 | 0.691311 |
389c4f09d94b853d26a91486d518694d3d1ca38e | 7,380 | py | Python | pywikibot/families/wikibooks_family.py | valhallasw/pywikibot-core | 32a8c3c1298a5cb077381fe202daefde82c1c5d3 | [
"MIT"
] | null | null | null | pywikibot/families/wikibooks_family.py | valhallasw/pywikibot-core | 32a8c3c1298a5cb077381fe202daefde82c1c5d3 | [
"MIT"
] | null | null | null | pywikibot/families/wikibooks_family.py | valhallasw/pywikibot-core | 32a8c3c1298a5cb077381fe202daefde82c1c5d3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Family module for Wikibooks."""
from __future__ import unicode_literals
from pywikibot import family
__version__ = '$Id: 389c4f09d94b853d26a91486d518694d3d1ca38e $'
# The Wikimedia family that is known as Wikibooks
class Family(family.WikimediaFamily):
"""Family class for Wikibooks."""
closed_wikis = [
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Afar_Wikibooks
'aa',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Akan_Wikibooks
'ak',
# https://als.wikipedia.org/wiki/Wikipedia:Stammtisch/Archiv_2008-1#Afterwards.2C_closure_and_deletion_of_Wiktionary.2C_Wikibooks_and_Wikiquote_sites
'als',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Assamese_Wikibooks
'as',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Asturianu_Wikibooks
'ast',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Aymar_Wikibooks
'ay',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Bashkir_Wikibooks
'ba',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Bislama_Wikibooks
'bi',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Bambara_Wikibooks
'bm',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Tibetan_Wikibooks
'bo',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Chamorro_Wikibooks
'ch',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Corsu_Wikibooks
'co',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Gaeilge_Wikibooks
'ga',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Gothic_Wikibooks
'got',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Guarani_Wikibooks
'gn',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Gujarati_Wikibooks
'gu',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Kannada_Wikibooks
'kn',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Kashmiri_Wikibooks
'ks',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_L%C3%ABtzebuergesch_Wikibooks
'lb',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Lingala_Wikibooks
'ln',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Latvian_Wikibooks
'lv',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Maori_Wikibooks
'mi',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Mongolian_Wikibooks
'mn',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Burmese_Wikibooks
'my',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Nauruan_Wikibooks
'na',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Nahuatl_Wikibooks
'nah',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Plattd%C3%BC%C3%BCtsch_Wikibooks
'nds',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Pashto_Wikibooks
'ps',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Quechua_Wikibooks
'qu',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Rumantsch_Wikibooks
'rm',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Sami_Wikibooks
'se',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Simple_English_Wikibooks_(3)
'simple',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Basa_Sunda_Wikibooks_(2)
'su',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Swahili_Wikibooks
'sw',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Turkmen_Wikibooks
'tk',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Uyghur_Wikibooks
'ug',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Volap%C3%BCk_Wikibooks
'vo',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Walon_Wikibooks
'wa',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Xhosa_Wikibooks
'xh',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Yoruba_Wikibooks
'yo',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Zhuang_Wikibooks
'za',
# https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Zulu_Wikibooks
'zu',
]
removed_wikis = [
'tokipona',
]
def __init__(self):
"""Constructor."""
super(Family, self).__init__()
self.name = 'wikibooks'
self.languages_by_size = [
'en', 'de', 'fr', 'hu', 'ja', 'it', 'es', 'pt', 'nl', 'pl', 'he',
'vi', 'ca', 'id', 'sq', 'fi', 'ru', 'fa', 'cs', 'zh', 'sv', 'hr',
'tr', 'ro', 'sr', 'ar', 'no', 'th', 'ko', 'gl', 'da', 'ta', 'mk',
'az', 'tl', 'is', 'ka', 'lt', 'tt', 'uk', 'eo', 'bg', 'sk', 'sl',
'el', 'hy', 'ms', 'sa', 'si', 'li', 'la', 'ml', 'ur', 'bn', 'ang',
'ia', 'cv', 'et', 'hi', 'km', 'mr', 'eu', 'oc', 'kk', 'fy', 'ne',
'ie', 'te', 'af', 'tg', 'ky', 'bs', 'pa', 'be', 'mg', 'cy',
'zh-min-nan', 'ku', 'uz',
]
self.langs = dict([(lang, '%s.wikibooks.org' % lang)
for lang in self.languages_by_size])
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation
self.cross_allowed = [
'af', 'ang', 'ca', 'fa', 'fy', 'it', 'nl', 'ru', 'th', 'zh',
]
# Which languages have a special order for putting interlanguage links,
# and what order is it? If a language is not in interwiki_putfirst,
# alphabetical order on language code is used. For languages that are in
# interwiki_putfirst, interwiki_putfirst is checked first, and
# languages are put in the order given there. All other languages are
# put after those, in code-alphabetical order.
self.interwiki_putfirst = {
'en': self.alphabetic,
'fi': self.alphabetic,
'fr': self.alphabetic,
'he': ['en'],
'hu': ['en'],
'pl': self.alphabetic,
'simple': self.alphabetic
}
def shared_data_repository(self, code, transcluded=False):
"""Return the shared data repository for this family."""
return ('wikidata', 'wikidata')
| 49.2 | 157 | 0.666531 |
575193252d49d2a71ea2d36b2f396d37c02dcd50 | 1,894 | py | Python | oslo/concurrency/fixture/lockutils.py | citrix-openstack-build/oslo.concurrency | ca01f962c940927438820e7d14f925bcce1a9895 | [
"Apache-2.0"
] | null | null | null | oslo/concurrency/fixture/lockutils.py | citrix-openstack-build/oslo.concurrency | ca01f962c940927438820e7d14f925bcce1a9895 | [
"Apache-2.0"
] | null | null | null | oslo/concurrency/fixture/lockutils.py | citrix-openstack-build/oslo.concurrency | ca01f962c940927438820e7d14f925bcce1a9895 | [
"Apache-2.0"
] | null | null | null | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from oslo.concurrency import lockutils
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lockutils.lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.lock = self.mgr.__enter__()
| 36.423077 | 79 | 0.68849 |
008f49efd7db1a926a73c76cc44809b557ed3a24 | 4,129 | py | Python | L17/sql_mini_solve/flask_example.py | thebestday/python | 2efb7fbd5c4ee40c03233875c1989ce68aa0fe18 | [
"MIT"
] | null | null | null | L17/sql_mini_solve/flask_example.py | thebestday/python | 2efb7fbd5c4ee40c03233875c1989ce68aa0fe18 | [
"MIT"
] | null | null | null | L17/sql_mini_solve/flask_example.py | thebestday/python | 2efb7fbd5c4ee40c03233875c1989ce68aa0fe18 | [
"MIT"
] | null | null | null | from flask import Flask, request, render_template
import requests
from average_wage import sallaryfunction
import sqlite3 as lite
import sys
app = Flask(__name__)
@app.route('/')
@app.route('/main')
def hello():
return render_template('main.html')
@app.route('/contacts')
def contact():
return render_template('contacts.html')
@app.route('/search', methods=['POST', 'GET'])
def search_form():
spec = request.form['specialisation']
sallary = request.form['sallary']
location = request.form['location']
work_place = request.form['work_place']
comment = request.form['comment']
commentcity = request.form['commentcity']
if comment != '':
spec = comment
if commentcity != '':
location = commentcity
mid_sal_from, mid_sal_to, all_found = sallaryfunction(sallary, spec, location)
data = {
'spec': spec,
'sallary': sallary,
'location': location,
'work_place': work_place,
'comment': comment,
'commentcity': commentcity,
'mid_sal_from': mid_sal_from,
'mid_sal_to': mid_sal_to,
'all_found': all_found
}
print("зарплата {}-разработчика в {} для выбранной зарплаты от {} составляет в среднем от {}руб. до {}руб.".format(spec, location, sallary, mid_sal_from, mid_sal_to))
return render_template('search.html', data=data)
@app.route('/new_data', methods=['POST', 'GET'])
def search_form2():
spec = request.form['specialisation']
sallary = request.form['sallary']
location = request.form['location']
work_place = request.form['work_place']
comment = request.form['comment']
commentcity = request.form['commentcity']
if comment != '':
spec = comment
if commentcity != '':
location = commentcity
mid_sal_from, mid_sal_to, all_found = sallaryfunction(sallary, spec, location)
data = {
'spec': spec,
'sallary': sallary,
'location': location,
'work_place': work_place,
'comment': comment,
'commentcity': commentcity,
'mid_sal_from': mid_sal_from,
'mid_sal_to': mid_sal_to,
'all_found': all_found
}
print("зарплата {}-разработчика в {} для выбранной зарплаты от {} составляет в среднем от {}руб. до {}руб.".format(spec, location, sallary, mid_sal_from, mid_sal_to))
# print(spec, type(spec))
# print(sallary, type(sallary))
# print(location, type(location))
# print(comment, type(comment))
# print(commentcity, type(commentcity))
# print(mid_sal_from, type(mid_sal_from))
# print(mid_sal_to, type(mid_sal_to))
# print(all_found, type(all_found))
print('-----------DB---------------')
connect = None
try:
connect = lite.connect('test.db')
cur = connect.cursor()
cur.execute('SELECT SQLITE_VERSION()')
# берем первый объект из полученных данных и помещяем его в переменную data
datadb = cur.fetchone()[0]
print(f'SQLite version: {datadb}') # SQLite version: 3.31.1
except lite.Error as e:
print(f'Error {e.args[0]}:')
sys.exit()
# что бы создать таблицу - необходимо выполнит такой sql запрос
# поэтому закоментируем код создания таблицы
# cur.execute('CREATE TABLE cities(id INT, spec TEXT, sallary TEXT, location TEXT, comment TEXT, commentcity TEXT, mid_sal_from INT, mid_sal_to INT, all_found INT)')
sqlite_select_query = """SELECT * from cities"""
cur.execute(sqlite_select_query)
records = cur.fetchall()
with connect:
cur.execute("SELECT Count() FROM cities")
id = cur.fetchone()[0]
cur.execute("INSERT INTO cities VALUES(?,?,?,?,?,?,?,?,?)", (id+1, f'{spec}', f'{sallary}', f'{location}', f'{comment}', f'{commentcity}', mid_sal_from, mid_sal_to, all_found))
connect.commit()
for row in records:
print(row)
print(len(records))
print(f'rows updated: {cur.rowcount}')
print(records[0][0])
connect.close()
return render_template('new_data.html', data=data)
if __name__ == "__main__":
app.run(debug=True)
| 29.92029 | 184 | 0.635747 |
e0f877327dfcadd5a67ef4c7a49a5ed6168ca921 | 603 | py | Python | testproject/testproject/settings/base.py | motius/channels | f46ed6ec9865c5e995d4e4de1dee4b097b855ee3 | [
"BSD-3-Clause"
] | 3 | 2019-02-07T09:43:03.000Z | 2020-01-18T08:44:47.000Z | testproject/testproject/settings/base.py | motius/channels | f46ed6ec9865c5e995d4e4de1dee4b097b855ee3 | [
"BSD-3-Clause"
] | 1 | 2022-02-11T03:46:32.000Z | 2022-02-11T03:46:32.000Z | testproject/testproject/settings/base.py | zsjohny/channels | 6d71106c3c6a8924f75e5058cd6c54e765af3b94 | [
"BSD-3-Clause"
] | 1 | 2020-01-16T12:05:52.000Z | 2020-01-16T12:05:52.000Z | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '-3yt98bfvxe)7+^h#(@8k#1(1m_fpd9x3q2wolfbf^!r5ma62u'
DEBUG = True
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
ROOT_URLCONF = 'testproject.urls'
WSGI_APPLICATION = 'testproject.wsgi.application'
STATIC_URL = "/static/"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
| 21.535714 | 71 | 0.681592 |
5b84ee8a28e1afa705949fc374f7ea13be750ad2 | 4,163 | py | Python | Scripts/Image_fetch.py | edoriggio/InstagramDuplicatesBot | 989ef33479ed0fda7be947dd278e716357a927be | [
"Apache-2.0"
] | null | null | null | Scripts/Image_fetch.py | edoriggio/InstagramDuplicatesBot | 989ef33479ed0fda7be947dd278e716357a927be | [
"Apache-2.0"
] | null | null | null | Scripts/Image_fetch.py | edoriggio/InstagramDuplicatesBot | 989ef33479ed0fda7be947dd278e716357a927be | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Edoardo Riggio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from time import sleep
# Selenium modules
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
# Selenium driver init
op = Options()
op.headless = True
# Comment the next line and uncomment the following if you have
# authentication issues. Comment the line after the next to
# make the script run properly after you inserted your 2FA code
driver = webdriver.Firefox(options = op)
# driver = webdriver.Firefox()
driver.implicitly_wait(20)
# Variables
json_file = 'Json/credentials.json'
insta_url = 'https://www.instagram.com/'
# Function for reading and writing data froma a JSON file
def read_write_to_json(file: str, data: dict, readwrite: str = 'r'):
"""
Given a file path, some data and a sigle character string, return the
contents of the given json file. The readwrite string is used to let
the function know whether it should read (r) a file or write (w) to it
file -- string
data -- dictionary
readwrite -- string
return -- dictionary
"""
if readwrite == 'r':
with open(file, 'r') as file_to_read:
return json.load(file_to_read)
elif readwrite == 'w':
with open(file, 'w') as file_to_write:
json.dump(data, file_to_write)
else:
raise Exception("readwrite must be either 'r' or 'w'")
def login_to_instagram():
"""
It performs a login into instagram.com and takes a screenshot of the
user's profile page. It saves the image in a .png format in the Assets
folder
"""
print('\nInitiating login into Instagram page\
\n------------------------------------')
json_data = read_write_to_json(json_file, {})
username = json_data['username']
password = json_data['password']
driver.maximize_window()
driver.get(insta_url)
driver.find_element_by_xpath("//a[contains(text(), 'Log in')]").click()
print('instagram.com loaded')
sleep(3)
print('Typing username and password')
driver.find_element_by_xpath("//input[@name=\"username\"]").send_keys(username)
driver.find_element_by_xpath("//input[@name=\"password\"]").send_keys(password)
driver.find_element_by_xpath("//button[@type='submit']").click()
sleep(3)
driver.find_element_by_xpath("//button[contains(text(), 'Not Now')]").click()
print('Logged in')
sleep(3)
driver.find_element_by_xpath('/html/body/div[1]/section/nav/div[2]/div/div/div[3]/div/div[3]/a').click()
print('About to take screenshot')
sleep(5)
size = lambda x: driver.execute_script('return document.body.parentNode.scroll' + x)
driver.set_window_size(size('Width'), size('Height'))
driver.find_element_by_tag_name('body').screenshot('Assets/screenshot.png')
print('Done, screenshot saved')
driver.close()
def register_new_user():
"""
It makes the user input their Instagram username and password, in order
to login into the platform later on. The data is stored in a json file
"""
username = input('Write down your Instagram username:\n>> ')
password = input('Write down your instagram password:\n>> ')
read_write_to_json(json_file, {'username': username, 'password': password}, 'w')
def check_if_registered():
"""
It checks if the user credentials are stored in a default json file, it returns
True if the user credentials are saved, false otherwise
return -- boolean
"""
with open(json_file, 'r') as file_to_read:
data = json.load(file_to_read)
return data['username'] == '' or data['password'] == '' | 33.845528 | 108 | 0.689166 |
4a41a4195f81e276767804e072be2db8a93c61fd | 1,034 | py | Python | utils/errors.py | iBlackDev/django-project-template | 44de354c560f3a2b22f3c753c1278a2f49aec75e | [
"MIT"
] | null | null | null | utils/errors.py | iBlackDev/django-project-template | 44de354c560f3a2b22f3c753c1278a2f49aec75e | [
"MIT"
] | null | null | null | utils/errors.py | iBlackDev/django-project-template | 44de354c560f3a2b22f3c753c1278a2f49aec75e | [
"MIT"
] | null | null | null | # coding:utf-8
ERROR_0_OK = 0
ERROR_UNKOWN = -100
ERROR_PERMISSION_DENIED = -210
ERROR_TOKEN_ERROR = -300
ERROR_TOKEN_EXPIRE = -310
# 错误号 英文代码 中文描述 备注
CONSTANT = {
# 公用模块
"100000": ["Request type error", u"请求类型错误"],
"100001": ["Required field can not be empty", u"必填字段不能为空"],
"100002": ["Parameter type error", u"参数类型错误"],
"100003": ["param value error", u"参数取值错误"],
"100004": ["Request throw exception", u"请求出现错误."],
"100005":
["You don't have enought permissions to this action!", u"您无权访问该页面."],
"100006": ["Param value out of range!", u"参数取值超出范围."],
"100007": ["The length of string value out of range!", u"字符串长度值超出范围."],
"100008": ["This is the necessary option,please choose.", u"必选项,请选择!"],
"100009": ["Params format error.", u"参数格式错误"],
}
def get_error_msg(k='100100000', en='ch'):
k = str(k)
if not k:
return ""
if en == 'ch':
return CONSTANT.get(k)[1]
elif en == 'en':
return CONSTANT.get(k)[0]
return CONSTANT.get(k)[1]
| 29.542857 | 75 | 0.60735 |
de03e01870134a7cf2b3846ff294bd3a3f6c4f68 | 138 | py | Python | tasks/R2R-judy/src/utils/__init__.py | IMNearth/Curriculum-Learning-For-VLN | d2fe1286eb295dc8c63a0c886b35883f32481d85 | [
"MIT"
] | 8 | 2021-11-09T13:29:19.000Z | 2022-03-30T04:01:42.000Z | tasks/R2R-judy/src/utils/__init__.py | IMNearth/Curriculum-Learning-For-VLN | d2fe1286eb295dc8c63a0c886b35883f32481d85 | [
"MIT"
] | 1 | 2022-03-17T14:16:44.000Z | 2022-03-29T03:16:32.000Z | tasks/R2R-judy/src/utils/__init__.py | IMNearth/Curriculum-Learning-For-VLN | d2fe1286eb295dc8c63a0c886b35883f32481d85 | [
"MIT"
] | null | null | null | from .misc import *
from .dtw import DTW
from .cls import CLS
from .config import get_cfg_defaults
from .region_label import REGION_LABELS | 27.6 | 39 | 0.818841 |
bcf6ca3d6d1b5e368d70ff619ed47470a9e58580 | 1,818 | py | Python | taffrail/heapster_api_source.py | taffrailmetrics/taffrail | 274023855dff0e20053105b7562c73324203f3b3 | [
"MIT"
] | 2 | 2018-01-30T02:44:50.000Z | 2018-02-16T13:02:44.000Z | taffrail/heapster_api_source.py | taffrailmetrics/taffrail | 274023855dff0e20053105b7562c73324203f3b3 | [
"MIT"
] | null | null | null | taffrail/heapster_api_source.py | taffrailmetrics/taffrail | 274023855dff0e20053105b7562c73324203f3b3 | [
"MIT"
] | null | null | null | from kubernetes import client
from prometheus_client.parser import text_string_to_metric_families
import json
from .metrics import MetricsUtility
import os
class HeapsterApiSource(object):
enabled = False
endpoint = {"HEAPSTER_NAME": "heapster", "HEAPSTER_NAMESPACE": "kube-system", "HEAPSTER_PATH": "/metrics"}
def __init__(self, kubernetes_client):
self.name = "heapster"
self.client = kubernetes_client.CoreV1Api()
self.__discover()
def __discover(self):
for env_var in self.endpoint:
if os.environ.get(env_var) is not None:
HeapsterApiSource.endpoint[env_var] = os.environ.get(env_var)
try:
self.client.connect_get_namespaced_service_proxy_with_path(HeapsterApiSource.endpoint['HEAPSTER_NAME'],
HeapsterApiSource.endpoint['HEAPSTER_NAMESPACE'], HeapsterApiSource.endpoint['HEAPSTER_PATH'])
except Exception as err:
return
self.enabled = True
def get_metrics(self):
metrics_list = []
metrics_response = self.client.connect_get_namespaced_service_proxy_with_path(HeapsterApiSource.endpoint['HEAPSTER_NAME'],
HeapsterApiSource.endpoint['HEAPSTER_NAMESPACE'], HeapsterApiSource.endpoint['HEAPSTER_PATH'])
if metrics_response:
for family in text_string_to_metric_families(metrics_response):
metrics_obj = MetricsUtility().to_object(family)
metrics_list.append(metrics_obj)
return HeapsterMetrics(self.name, metrics_list)
class HeapsterMetrics(object):
def __init__(self, name, items):
self.name = name
self.items = items
def to_dict(self):
return HeapsterMetrics(self.name, [ob.__dict__ for ob in self.items]).__dict__
| 36.36 | 130 | 0.689219 |
eef607feb70c93a09a57d76c600c6bc699bc94fc | 7,664 | py | Python | arrested/mixins.py | haskiindahouse/promosales-and-prizes-flask | 5b9ae3e0506d847463e367180f4e784fe835c393 | [
"MIT"
] | 46 | 2016-06-28T10:25:07.000Z | 2019-12-10T20:53:47.000Z | arrested/mixins.py | haskiindahouse/promosales-and-prizes-flask | 5b9ae3e0506d847463e367180f4e784fe835c393 | [
"MIT"
] | 4 | 2018-02-10T10:53:08.000Z | 2018-11-07T08:11:06.000Z | arrested/mixins.py | haskiindahouse/promosales-and-prizes-flask | 5b9ae3e0506d847463e367180f4e784fe835c393 | [
"MIT"
] | 9 | 2016-07-20T17:05:46.000Z | 2022-02-15T18:40:17.000Z |
__all__ = [
'GetListMixin', 'CreateMixin', 'GetObjectMixin', 'PutObjectMixin',
'PatchObjectMixin', 'DeleteObjectMixin', 'ObjectMixin'
]
class HTTPMixin(object):
"""
"""
def _response(self, body, status):
"""
"""
return (self.make_response(body, status=status))
class GetListMixin(HTTPMixin):
"""Base ListMixin class that defines the expected API for all ListMixins
"""
def get_objects(self):
"""
"""
raise NotImplementedError()
def list_response(self, status=200):
"""Pull the processed data from the response_handler and return a response.
:param status: The HTTP status code returned with the response
.. seealso:
:meth:`Endpoint.make_response`
:meth:`Endpoint.handle_get_request`
"""
return self._response(self.response.get_response_data(), status=status)
def handle_get_request(self):
"""Handle incoming GET request to an Endpoint and return an
array of results by calling :meth:`.GetListMixin.get_objects`.
.. seealso::
:meth:`GetListMixin.get_objects`
:meth:`Endpoint.get`
"""
self.objects = self.get_objects()
self.response = self.get_response_handler()
self.response.process(self.objects)
return self.list_response()
class CreateMixin(HTTPMixin):
"""Base CreateMixin class that defines the expected API for all CreateMixins
"""
def save_object(self, obj):
"""Called by :meth:`CreateMixin.handle_post_request` ater the incoming data has
been marshalled by the RequestHandler.
:param obj: The marhsaled object from RequestHandler.
"""
return obj
def create_response(self, status=201):
"""Generate a Response object for a POST request. By default, the newly created
object will be passed to the specified ResponseHandler and will be serialized
as the response body.
"""
self.response = self.get_response_handler()
self.response.process(self.obj)
return self._response(self.response.get_response_data(), status=status)
def handle_post_request(self):
"""Handle incoming POST request to an Endpoint and marshal the request data
via the specified RequestHandler. :meth:`.CreateMixin.save_object`. is then
called and must be implemented by mixins implementing this interfce.
.. seealso::
:meth:`CreateMixin.save_object`
:meth:`Endpoint.post`
"""
self.request = self.get_request_handler()
self.obj = self.request.process().data
self.save_object(self.obj)
return self.create_response()
class ObjectMixin(object):
"""Mixin that provides an interface for working with single data objects
"""
allow_none = False
def get_object(self):
"""Called by :meth:`GetObjectMixin.handle_get_request`. Concrete classes should
implement this method and return object typically by id.
:raises: NotImplementedError
"""
raise NotImplementedError()
def object_response(self, status=200):
"""Generic response generation for Endpoints that return a single
serialized object.
:param status: The HTTP status code returned with the response
:returns: Response object
"""
self.response = self.get_response_handler()
self.response.process(self.obj)
return self._response(self.response.get_response_data(), status=status)
@property
def obj(self):
"""Returns the value of :meth:`ObjectMixin.get_object` and sets a private
property called _obj. This property ensures the logic around allow_none
is enforced across Endpoints using the Object interface.
:raises: :class:`werkzeug.exceptions.BadRequest`
:returns: The result of :meth:ObjectMixin.get_object`
"""
if not getattr(self, '_obj', None):
self._obj = self.get_object()
if self._obj is None and not self.allow_none:
self.return_error(404)
return self._obj
@obj.setter
def obj(self, value):
"""Sets the value of the private _obj property.
"""
self._obj = value
class GetObjectMixin(HTTPMixin, ObjectMixin):
"""Base GetObjectMixins class that defines the expected API for all GetObjectMixins
"""
def handle_get_request(self):
"""Handle incoming GET request to an Endpoint and return a
single object by calling :meth:`.GetListMixin.get_object`.
.. seealso::
:meth:`GetListMixin.get_objects`
:meth:`Endpoint.get`
"""
return self.object_response()
class PutObjectMixin(HTTPMixin, ObjectMixin):
"""Base PutObjectMixins class that defines the expected API for all PutObjectMixin
"""
def put_request_response(self, status=200):
"""Pull the processed data from the response_handler and return a response.
:param status: The HTTP status code returned with the response
.. seealso:
:meth:`ObjectMixin.object_response`
:meth:`Endpoint.handle_put_request`
"""
return self.object_response(status=status)
def handle_put_request(self):
"""
"""
obj = self.obj
self.request = self.get_request_handler()
self.request.process()
self.update_object(obj)
return self.put_request_response()
def update_object(self, obj):
"""Called by :meth:`PutObjectMixin.handle_put_request` ater the incoming data has
been marshalled by the RequestHandler.
:param obj: The marhsaled object from RequestHandler.
"""
return obj
class PatchObjectMixin(HTTPMixin, ObjectMixin):
"""Base PatchObjectMixin class that defines the expected API for all PatchObjectMixin
"""
def patch_request_response(self, status=200):
"""Pull the processed data from the response_handler and return a response.
:param status: The HTTP status code returned with the response
.. seealso:
:meth:`ObjectMixin.object_response`
:meth:`Endpoint.handle_put_request`
"""
return self.object_response(status=status)
def handle_patch_request(self):
"""
"""
obj = self.obj
self.request = self.get_request_handler()
self.request.process()
self.patch_object(obj)
return self.patch_request_response()
def patch_object(self, obj):
"""Called by :meth:`PatchObjectMixin.handle_patch_request` ater the
incoming data has been marshalled by the RequestHandler.
:param obj: The marhsaled object from RequestHandler.
"""
return obj
class DeleteObjectMixin(HTTPMixin, ObjectMixin):
"""Base DeletehObjecttMixin class that defines the expected API for
all DeletehObjecttMixins.
"""
def delete_request_response(self, status=204):
"""Pull the processed data from the response_handler and return a response.
:param status: The HTTP status code returned with the response
"""
return (self.make_response('', status=status))
def handle_delete_request(self):
"""
"""
self.delete_object(self.obj)
return self.delete_request_response()
def delete_object(self, obj):
"""Called by :meth:`DeleteObjectMixin.handle_delete_request`.
:param obj: The marhsaled object being deleted.
"""
return obj
| 30.173228 | 89 | 0.64953 |
897ffd7eedddf610bfd85fd4c105ac898ffa7a5e | 4,531 | py | Python | wagtail/admin/views/pages/copy.py | stevedya/wagtail | 52e5abfe62547cdfd90ea7dfeb8bf5a52f16324c | [
"BSD-3-Clause"
] | 1 | 2022-02-09T05:25:30.000Z | 2022-02-09T05:25:30.000Z | wagtail/admin/views/pages/copy.py | stevedya/wagtail | 52e5abfe62547cdfd90ea7dfeb8bf5a52f16324c | [
"BSD-3-Clause"
] | null | null | null | wagtail/admin/views/pages/copy.py | stevedya/wagtail | 52e5abfe62547cdfd90ea7dfeb8bf5a52f16324c | [
"BSD-3-Clause"
] | null | null | null | from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.utils.translation import gettext as _
from wagtail import hooks
from wagtail.actions.copy_page import CopyPageAction
from wagtail.actions.create_alias import CreatePageAliasAction
from wagtail.admin import messages
from wagtail.admin.auth import user_has_any_page_permission, user_passes_test
from wagtail.admin.forms.pages import CopyForm
from wagtail.admin.views.pages.utils import get_valid_next_url_from_request
from wagtail.models import Page
@user_passes_test(user_has_any_page_permission)
def copy(request, page_id):
page = Page.objects.get(id=page_id)
# Parent page defaults to parent of source page
parent_page = page.get_parent()
# Check if the user has permission to publish subpages on the parent
can_publish = parent_page.permissions_for_user(request.user).can_publish_subpage()
# Create the form
form = CopyForm(
request.POST or None, user=request.user, page=page, can_publish=can_publish
)
next_url = get_valid_next_url_from_request(request)
for fn in hooks.get_hooks("before_copy_page"):
result = fn(request, page)
if hasattr(result, "status_code"):
return result
# Check if user is submitting
if request.method == "POST":
# Prefill parent_page in case the form is invalid (as prepopulated value for the form field,
# because ModelChoiceField seems to not fall back to the user given value)
parent_page = Page.objects.get(id=request.POST["new_parent_page"])
if form.is_valid():
# Receive the parent page (this should never be empty)
if form.cleaned_data["new_parent_page"]:
parent_page = form.cleaned_data["new_parent_page"]
# Re-check if the user has permission to publish subpages on the new parent
can_publish = parent_page.permissions_for_user(
request.user
).can_publish_subpage()
keep_live = can_publish and form.cleaned_data.get("publish_copies")
# Copy the page
# Note that only users who can publish in the new parent page can create an alias.
# This is because alias pages must always match their original page's state.
if can_publish and form.cleaned_data.get("alias"):
action = CreatePageAliasAction(
page.specific,
recursive=form.cleaned_data.get("copy_subpages"),
parent=parent_page,
update_slug=form.cleaned_data["new_slug"],
user=request.user,
)
new_page = action.execute(skip_permission_checks=True)
else:
action = CopyPageAction(
page=page,
recursive=form.cleaned_data.get("copy_subpages"),
to=parent_page,
update_attrs={
"title": form.cleaned_data["new_title"],
"slug": form.cleaned_data["new_slug"],
},
keep_live=keep_live,
user=request.user,
)
new_page = action.execute()
# Give a success message back to the user
if form.cleaned_data.get("copy_subpages"):
messages.success(
request,
_("Page '{0}' and {1} subpages copied.").format(
page.specific_deferred.get_admin_display_title(),
new_page.get_descendants().count(),
),
)
else:
messages.success(
request,
_("Page '{0}' copied.").format(
page.specific_deferred.get_admin_display_title()
),
)
for fn in hooks.get_hooks("after_copy_page"):
result = fn(request, page, new_page)
if hasattr(result, "status_code"):
return result
# Redirect to explore of parent page
if next_url:
return redirect(next_url)
return redirect("wagtailadmin_explore", parent_page.id)
return TemplateResponse(
request,
"wagtailadmin/pages/copy.html",
{
"page": page,
"form": form,
"next": next_url,
},
)
| 39.060345 | 100 | 0.593688 |
2ea3a782db8eca929b01773b1597528b441813e2 | 1,704 | py | Python | setup.py | wmayner/umap | 224a0125cc3fac7a3720875f001ad6e25eed1cbd | [
"BSD-3-Clause"
] | null | null | null | setup.py | wmayner/umap | 224a0125cc3fac7a3720875f001ad6e25eed1cbd | [
"BSD-3-Clause"
] | null | null | null | setup.py | wmayner/umap | 224a0125cc3fac7a3720875f001ad6e25eed1cbd | [
"BSD-3-Clause"
] | null | null | null | from setuptools import setup
def readme():
try:
with open("README.rst", encoding="UTF-8") as readme_file:
return readme_file.read()
except TypeError:
# Python 2.7 doesn't support encoding argument in builtin open
import io
with io.open("README.rst", encoding="UTF-8") as readme_file:
return readme_file.read()
configuration = {
"name": "umap-learn",
"version": "0.3.10",
"description": "Uniform Manifold Approximation and Projection",
"long_description": readme(),
"classifiers": [
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"License :: OSI Approved",
"Programming Language :: C",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Scientific/Engineering",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Operating System :: MacOS",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
],
"keywords": "dimension reduction t-sne manifold",
"url": "http://github.com/lmcinnes/umap",
"maintainer": "Leland McInnes",
"maintainer_email": "leland.mcinnes@gmail.com",
"license": "BSD",
"packages": ["umap"],
"install_requires": [
"numpy >= 1.13",
"scikit-learn >= 0.16",
"scipy >= 0.19",
"numba >= 0.37",
],
"ext_modules": [],
"cmdclass": {},
"test_suite": "nose.collector",
"tests_require": ["nose"],
"data_files": (),
}
setup(**configuration)
| 30.428571 | 70 | 0.578638 |
820b3c8c1adc3ce1e0765efce202ce4cde9c5e45 | 449 | py | Python | cluster_tools/autoencoderpreprocessor.py | ninastijepovic/MasterThesis | 2579f1e74c0ce404f350a6d441e273b6aef4eadc | [
"MIT"
] | null | null | null | cluster_tools/autoencoderpreprocessor.py | ninastijepovic/MasterThesis | 2579f1e74c0ce404f350a6d441e273b6aef4eadc | [
"MIT"
] | null | null | null | cluster_tools/autoencoderpreprocessor.py | ninastijepovic/MasterThesis | 2579f1e74c0ce404f350a6d441e273b6aef4eadc | [
"MIT"
] | null | null | null | #Remove this later
import sys
sys.path.insert(0, '/Users/mishamesarcik/Workspace/phd/Workspace/lofar-dev')
import preprocessor
def preprocess(observation):
if observation is None:
raise ValueError('No data to preprocess.')
if observation is None or 'visibilities' not in observation:
raise ValueError('No visibilities in observation.')
result = observation['visibilities'].transpose([0, 2, 1, 3])
return result
| 24.944444 | 76 | 0.721604 |
cb9d6c5d18a1c28b875447690d67381ea9d144dd | 7,493 | py | Python | tools/torch_tools.py | gaoliyao/Replica_Exchange_Stochastic_Gradient_MCMC | 609f803ca334b21820dc020c16ad8113363a03e2 | [
"MIT"
] | 21 | 2020-06-30T01:14:45.000Z | 2022-03-31T08:03:17.000Z | tools/torch_tools.py | gaoliyao/Replica_Exchange_Stochastic_Gradient_MCMC | 609f803ca334b21820dc020c16ad8113363a03e2 | [
"MIT"
] | null | null | null | tools/torch_tools.py | gaoliyao/Replica_Exchange_Stochastic_Gradient_MCMC | 609f803ca334b21820dc020c16ad8113363a03e2 | [
"MIT"
] | 2 | 2020-11-02T22:06:10.000Z | 2021-07-17T23:35:21.000Z | import math
import numpy as np
import copy
import sys
import os
import timeit
import csv
import dill
from tqdm import tqdm ## better progressbar
from math import exp
from utils import find_classes
import random
import pickle
import numpy as np
from numpy import genfromtxt
## import pytorch modules
import torch
from torch.autograd import Variable
import torch.nn.functional as Func
import torch.nn as nn
from torchvision import datasets #, transforms
from torchvision.datasets import ImageFolder
from torchvision.transforms import ToTensor
from torch.utils.data import DataLoader
import torch.utils.data as data
import torchvision.datasets as datasets
import transforms
from copy import deepcopy
from sys import getsizeof
def imageNet_loader(train_size, valid_size, test_size, crop_size):
# http://blog.outcome.io/pytorch-quick-start-classifying-an-image/
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
train_loader = torch.utils.data.DataLoader(datasets.ImageFolder('./data/kaggle/train', transforms.Compose([transforms.RandomResizedCrop(crop_size),
transforms.RandomHorizontalFlip(), transforms.ToTensor(), normalize, ])), batch_size=train_size, shuffle=True, pin_memory=True, drop_last=True)
valid_loader = torch.utils.data.DataLoader(datasets.ImageFolder('./data/kaggle/valid', transforms.Compose([transforms.CenterCrop(crop_size),
transforms.RandomHorizontalFlip(), transforms.ToTensor(), normalize,])), batch_size=valid_size, shuffle=True, pin_memory=True, drop_last=True)
test_loader = torch.utils.data.DataLoader(datasets.ImageFolder('./data/image_net/small_classes/', transforms.Compose([transforms.CenterCrop(crop_size),
transforms.ToTensor(), normalize,])), batch_size=test_size, shuffle=False)
return train_loader, valid_loader, test_loader
def loader(train_size, test_size, args):
if args.data.startswith('cifar'):
if args.data == 'cifar10':
dataloader = datasets.CIFAR10
else:
dataloader = datasets.CIFAR100
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
transforms.RandomErasing(probability = 0.5, sh = 0.4, r1 = 0.3, ),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
elif args.data == 'mnist':
dataloader = datasets.MNIST
transform_train = transforms.Compose([
# https://github.com/hwalsuklee/tensorflow-mnist-cnn/blob/master/mnist_data.py
#transforms.RandomAffine(translate=0.12),
transforms.RandomCrop(28, padding=4),
transforms.RandomRotation((-15, 15)),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,)),
])
elif args.data == 'fmnist':
dataloader = datasets.FashionMNIST
transform_train = transforms.Compose([
transforms.RandomCrop(28, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,)),
transforms.RandomErasing(probability=0.5, sh=0.4, r1=0.3, mean=[0.4914]),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,)),
])
else:
exit('Unknown dataset')
if args.aug == 0:
transform_train = transforms.ToTensor()
transform_test = transforms.ToTensor()
trainset = dataloader('./data/' + args.data.upper(), train=True, download=True, transform=transform_train)
train_loader = data.DataLoader(trainset, batch_size=train_size, shuffle=True, num_workers=0) # num_workers=0 is crucial for seed
testset = dataloader(root='./data/' + args.data.upper(), train=False, download=False, transform=transform_test)
test_loader = data.DataLoader(testset, batch_size=test_size, shuffle=False, num_workers=0)
return train_loader, test_loader, dataloader
def swap(model1, model2):
temperory = pickle.loads(pickle.dumps(model1))
model1 = model2
model2 = temperory
return(model1, model2)
# Take as many D0 as we want
def process_d0(batches, pars):
full_data = []
for cnt, (data, y) in enumerate(batches):
data_g, y_g = Variable(data).cuda(), Variable(y).cuda()
full_data.append((data_g, y_g))
if cnt >= pars.d0 - 1:
break
return(full_data)
class BayesEval:
def __init__(self):
self.counter = 0
self.bma = []
self.cur_acc = 0
self.bma_acc = 0
self.best_cur_acc = 0
self.best_bma_acc = 0
def eval(self, net, data_loader, weight=1, bma=False, burnIn=100):
net.eval()
one_correct, bma_correct = 0, 0
self.counter += 1
for cnt, (images, labels) in enumerate(data_loader):
images, labels = Variable(images).cuda(), Variable(labels).cuda()
outputs = net.forward(images).data
one_correct += outputs.max(1)[1].eq(labels.data).sum().item()
if bma == True and self.counter >= burnIn:
outputs = outputs * weight
if self.counter == burnIn:
self.bma.append(outputs)
else:
self.bma[cnt] += outputs
prediction = self.bma[cnt].max(1)[1]
bma_correct += prediction.eq(labels.data).sum().item()
self.cur_acc = 100.0 * one_correct / len(data_loader.dataset)
self.bma_acc = 100.0 * bma_correct / len(data_loader.dataset)
self.best_cur_acc = max(self.best_cur_acc, self.cur_acc)
self.best_bma_acc = max(self.best_bma_acc, self.bma_acc)
def model_eval(net, data_loader, epoch=0, if_print=1):
net.eval()
correct = 0
total = 0
for cnt, (images, labels) in enumerate(data_loader):
images, labels = Variable(images).cuda(), Variable(labels).cuda()
outputs = net.forward(images)
prediction = outputs.data.max(1)[1]
correct += prediction.eq(labels.data).sum().item()
if if_print:
print 'Epoch {} Test set accuracy: {:0.2f}%'.format(\
epoch, 100.0 * correct / len(data_loader.dataset))
return(100.0 * correct / len(data_loader.dataset))
def bayes_mv(net, data_loader, bmv, epoch, counter=1):
net.eval()
for cnt, (images, labels) in enumerate(data_loader):
images, labels = Variable(images).cuda(), Variable(labels).cuda()
outputs = (torch.exp(net.forward(images))).data * net.ensemble_w
if counter == 1:
bmv.append(outputs)
else:
bmv[cnt] += outputs
def save_or_pretrain(net, num_epochs, model_name):
if num_epochs > 0:
print('Save model')
#torch.save(net, model_name, pickle_module=dill)
torch.save(net.state_dict(), model_name)
else:
print('Use preTrained model')
#net = torch.load(model_name)
net.load_state_dict(torch.load(model_name))
return net
| 38.229592 | 155 | 0.648338 |
20fcf0fe2f6819adfcb795e5274f2c17d93198ea | 353 | py | Python | syntax/exception/helloCatchException.py | Dev-Learn/LearnPython | a601f5eeeb05236a3e179bf8c34425a95cb0c919 | [
"Apache-2.0"
] | null | null | null | syntax/exception/helloCatchException.py | Dev-Learn/LearnPython | a601f5eeeb05236a3e179bf8c34425a95cb0c919 | [
"Apache-2.0"
] | null | null | null | syntax/exception/helloCatchException.py | Dev-Learn/LearnPython | a601f5eeeb05236a3e179bf8c34425a95cb0c919 | [
"Apache-2.0"
] | null | null | null | print("Three")
value = 10 / 2
print("Two")
value = 10 / 1
print("One")
d = 0
try:
# Phép chia này có vấn đề, chia cho 0.
# Một lỗi được phát ra tại đây (ZeroDivisionError).
value = 10 / d
print("value = ", value)
except ZeroDivisionError as e:
print("Error: ", str(e))
print("Ignore to continue ...")
print("Let's go!") | 14.12 | 55 | 0.586402 |
b3d320abbb67b0d5a687421e22bfcec2b4ee708a | 1,907 | py | Python | autostat/sklearn/tests/custom_periodic_kernel_test.py | bcolloran/autostat | 1653642e42d196223cb0e5f79408100b8e590601 | [
"MIT"
] | null | null | null | autostat/sklearn/tests/custom_periodic_kernel_test.py | bcolloran/autostat | 1653642e42d196223cb0e5f79408100b8e590601 | [
"MIT"
] | null | null | null | autostat/sklearn/tests/custom_periodic_kernel_test.py | bcolloran/autostat | 1653642e42d196223cb0e5f79408100b8e590601 | [
"MIT"
] | null | null | null | import numpy as np
import pytest
from ..custom_periodic_kernel import PeriodicKernelNoConstant
class TestGradients:
@pytest.mark.parametrize("param_init_val", [0.001, 0.04, 0.3, 0.8, 1.5, 10.5, 15])
def test_grad_length_scale(self, param_init_val):
param = "length_scale"
mat_slice = 0
N = 50
max_entry_diff = 0.01
max_mse = max_entry_diff ** 2
X = np.linspace([-1], [1], N)
delta = 1e-7
param_init = param_init_val
kwargs1 = {param: param_init}
k1 = PeriodicKernelNoConstant(**kwargs1)
kMat1, grads = k1(X, eval_gradient=True)
kwargs2 = {param: param_init + delta}
k2 = PeriodicKernelNoConstant(**kwargs2)
kMat2 = k2(X, eval_gradient=False)
grad = grads[:, :, mat_slice]
finite_diff_grad = (kMat2 - kMat1) / delta
assert np.mean((grad - finite_diff_grad) ** 2) < max_mse
@pytest.mark.parametrize(
"param_init_val", [0.012, 0.04, 0.115, 0.15, 0.3, 0.8, 1.5, 10.5, 15]
)
def test_grad_periodicity(self, param_init_val):
param = "periodicity"
mat_slice = 1
N = 50
max_entry_diff = 0.01
max_mse = max_entry_diff ** 2
X = np.linspace([-1], [1], N)
# NOTE: we're at the limit of what finite differences can do...
# going from 1e-11 to 1e-12 makes the test _fail_ on th first case
delta = 1e-11
param_init = param_init_val
kwargs1 = {param: param_init}
k1 = PeriodicKernelNoConstant(**kwargs1)
kMat1, grads = k1(X, eval_gradient=True)
kwargs2 = {param: param_init + delta}
k2 = PeriodicKernelNoConstant(**kwargs2)
kMat2 = k2(X, eval_gradient=False)
grad = grads[:, :, mat_slice]
finite_diff_grad = (kMat2 - kMat1) / delta
assert np.mean((grad - finite_diff_grad) ** 2) < max_mse
| 29.338462 | 86 | 0.60042 |
bc35bd5a31628d48174cc566001f24706ce4c7d3 | 5,586 | py | Python | pagarmeapisdk/models/get_bank_account_response.py | pagarme/pagarme-python-sdk | 5a709ce54d46fc7326f73242700602c1c5a6bd26 | [
"MIT"
] | null | null | null | pagarmeapisdk/models/get_bank_account_response.py | pagarme/pagarme-python-sdk | 5a709ce54d46fc7326f73242700602c1c5a6bd26 | [
"MIT"
] | null | null | null | pagarmeapisdk/models/get_bank_account_response.py | pagarme/pagarme-python-sdk | 5a709ce54d46fc7326f73242700602c1c5a6bd26 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
pagarmeapisdk
This file was automatically generated by APIMATIC v3.0 (
https://www.apimatic.io ).
"""
from pagarmeapisdk.api_helper import APIHelper
from pagarmeapisdk.models.get_recipient_response import GetRecipientResponse
class GetBankAccountResponse(object):
"""Implementation of the 'GetBankAccountResponse' model.
TODO: type model description here.
Attributes:
id (string): Id
holder_name (string): Holder name
holder_type (string): Holder type
bank (string): Bank
branch_number (string): Branch number
branch_check_digit (string): Branch check digit
account_number (string): Account number
account_check_digit (string): Account check digit
mtype (string): Bank account type
status (string): Bank account status
created_at (datetime): Creation date
updated_at (datetime): Last update date
deleted_at (datetime): Deletion date
recipient (GetRecipientResponse): Recipient
metadata (dict): Metadata
pix_key (string): Pix Key
"""
# Create a mapping from Model property names to API property names
_names = {
"id": 'id',
"holder_name": 'holder_name',
"holder_type": 'holder_type',
"bank": 'bank',
"branch_number": 'branch_number',
"branch_check_digit": 'branch_check_digit',
"account_number": 'account_number',
"account_check_digit": 'account_check_digit',
"mtype": 'type',
"status": 'status',
"created_at": 'created_at',
"updated_at": 'updated_at',
"deleted_at": 'deleted_at',
"metadata": 'metadata',
"pix_key": 'pix_key',
"recipient": 'recipient'
}
def __init__(self,
id=None,
holder_name=None,
holder_type=None,
bank=None,
branch_number=None,
branch_check_digit=None,
account_number=None,
account_check_digit=None,
mtype=None,
status=None,
created_at=None,
updated_at=None,
deleted_at=None,
metadata=None,
pix_key=None,
recipient=None):
"""Constructor for the GetBankAccountResponse class"""
# Initialize members of the class
self.id = id
self.holder_name = holder_name
self.holder_type = holder_type
self.bank = bank
self.branch_number = branch_number
self.branch_check_digit = branch_check_digit
self.account_number = account_number
self.account_check_digit = account_check_digit
self.mtype = mtype
self.status = status
self.created_at = APIHelper.RFC3339DateTime(created_at) if created_at else None
self.updated_at = APIHelper.RFC3339DateTime(updated_at) if updated_at else None
self.deleted_at = APIHelper.RFC3339DateTime(deleted_at) if deleted_at else None
self.recipient = recipient
self.metadata = metadata
self.pix_key = pix_key
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object
as obtained from the deserialization of the server's response. The
keys MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
id = dictionary.get('id')
holder_name = dictionary.get('holder_name')
holder_type = dictionary.get('holder_type')
bank = dictionary.get('bank')
branch_number = dictionary.get('branch_number')
branch_check_digit = dictionary.get('branch_check_digit')
account_number = dictionary.get('account_number')
account_check_digit = dictionary.get('account_check_digit')
mtype = dictionary.get('type')
status = dictionary.get('status')
created_at = APIHelper.RFC3339DateTime.from_value(dictionary.get("created_at")).datetime if dictionary.get("created_at") else None
updated_at = APIHelper.RFC3339DateTime.from_value(dictionary.get("updated_at")).datetime if dictionary.get("updated_at") else None
deleted_at = APIHelper.RFC3339DateTime.from_value(dictionary.get("deleted_at")).datetime if dictionary.get("deleted_at") else None
metadata = dictionary.get('metadata')
pix_key = dictionary.get('pix_key')
recipient = GetRecipientResponse.from_dictionary(dictionary.get('recipient')) if dictionary.get('recipient') else None
# Return an object of this model
return cls(id,
holder_name,
holder_type,
bank,
branch_number,
branch_check_digit,
account_number,
account_check_digit,
mtype,
status,
created_at,
updated_at,
deleted_at,
metadata,
pix_key,
recipient)
| 37.743243 | 139 | 0.592195 |
34162ff692f75ac72cc8588917578293295c5d65 | 32,328 | py | Python | se/spelling.py | bryanwills/tools | ef60cd4fc81144697610df1597cdf8decb682425 | [
"CC0-1.0"
] | null | null | null | se/spelling.py | bryanwills/tools | ef60cd4fc81144697610df1597cdf8decb682425 | [
"CC0-1.0"
] | null | null | null | se/spelling.py | bryanwills/tools | ef60cd4fc81144697610df1597cdf8decb682425 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python3
"""
Defines various spelling-related helper functions.
"""
from typing import Set
import importlib_resources
import regex
import se
DICTIONARY: Set[str] = set() # Store our hyphenation dictionary so we don't re-read the file on every pass
def get_xhtml_language(xhtml: str) -> str:
"""
Try to get the IETF lang tag for a complete XHTML document
"""
supported_languages = ["en-US", "en-GB", "en-AU", "en-CA"]
match = regex.search(r"<html[^>]+?xml:lang=\"([^\"]+)\"", xhtml)
if match:
language = match.group(1)
else:
language = None
if language not in supported_languages:
raise se.InvalidLanguageException(f"No valid [attr]xml:lang[/] attribute in [xhtml]<html>[/] element. Only [text]{'[/], [text]'.join(supported_languages[:-1])}[/], and [text]{supported_languages[-1]}[/] are supported.")
return language
def modernize_hyphenation(xhtml: str) -> str:
"""
Convert old-timey hyphenated compounds into single words based on the passed DICTIONARY.
INPUTS
xhtml: A string of XHTML to modernize
OUTPUTS
A string representing the XHTML with its hyphenation modernized
"""
# First, initialize our dictionary if we haven't already
if not se.spelling.DICTIONARY:
with importlib_resources.open_text("se.data", "words") as dictionary:
se.spelling.DICTIONARY = {line.strip().lower() for line in dictionary}
# Easy fix for a common case
xhtml = regex.sub(r"\b([Nn])ow-a-days\b", r"\1owadays", xhtml) # now-a-days -> nowadays
# The non-capturing group at the beginning tries to prevent
# bad matches like stag's-horn -> stag'shorn or dog's-eared -> dog'seared
result = regex.findall(r"(?<![’\'])\b[^\W\d_]+\-[^\W\d_]+\b", xhtml)
for word in set(result): # set() removes duplicates
new_word = word.replace("-", "").lower()
if new_word in se.spelling.DICTIONARY:
# To preserve capitalization of the first word, we get the individual parts
# then replace the original match with them joined together and titlecased.
lhs = regex.sub(r"\-.+$", r"", word)
rhs = regex.sub(r"^.+?\-", r"", word)
xhtml = regex.sub(fr"{lhs}-{rhs}", lhs + rhs.lower(), xhtml)
# Quick fix for a common error cases
xhtml = xhtml.replace("z3998:nonfiction", "z3998:non-fiction")
xhtml = regex.sub(r"\b([Mm])anat-arms", r"\1an-at-arms", xhtml)
xhtml = regex.sub(r"\b([Tt])abled’hôte", r"\1able-d’hôte", xhtml)
xhtml = regex.sub(r"\b([Pp])ita-pat", r"\1it-a-pat", xhtml)
return xhtml
def detect_problem_spellings(xhtml: str) -> list:
"""
Return a list of potential problem spellings, that cannot be scripted due to a
word having various meanings.
For example, "staid" can be an archaic spelling of "stayed",
or as an adjective it could mean "marked by settled sedateness
and often prim self-restraint".
INPUTS
xhtml: A string of XHTML to inspect
OUTPUTS
A list of strings representing potential words to manually inspect
"""
# Uncomment if we eventually need the document language
# language = get_xhtml_language(xhtml)
output = []
if regex.search(r"\bstaid\b", xhtml):
output.append("“staid” detected. This should be modernized if it is the past tense of “stay,” but not if used as an adjective meaning “sedate or prim.”")
if regex.search(r"\bcozen\b", xhtml):
output.append("“cozen” detected. This should be modernized if it means “cousin,” but not if used to mean “to deceive or win over.”")
if regex.search(r"\bgrown-?up\b", xhtml):
output.append("“grownup” or “grown-up” detected. Confirm that “grownup” is strictly a noun, and “grown-up” is strictly an adjective.")
if regex.search(r"\bcommon[\-\s]?sense\b", xhtml):
output.append("“commonsense” or “common sense” or “common-sense” detected. Confirm that “common sense” and “common-sense” are strictly nouns, and that “commonsense” is strictly an adjective.")
if regex.search(r"\bmann?ikin\b", xhtml):
output.append("“mannikin” or “manikin” detected. Confirm that “mannikin” is used in the sense of a small person, and “mannequin” is used in the sense of a dummy or figure.")
if regex.search(r"\bgripe", xhtml):
output.append("“gripe” or “griped” detected. Confirm that “gripe” is used in the sense of illness or complaint, not in the sense of “grip” or “gripped.”")
if regex.search(r"\bmay[\-\s]?day", xhtml):
output.append("“mayday” or “may day” or “may-day” detected. Confirm that “may day” and “may-day” refer to the day, and that “mayday” is used in the sense of a distress signal.")
if regex.search(r"\bfree[\-\s]?will", xhtml):
output.append("“freewill” or “free will” or “free-will” detected. Confirm that “free will” and “free-will” are strictly nouns, and that “freewill” is strictly an adjective.")
return output
def modernize_spelling(xhtml: str) -> str:
"""
Convert old-timey spelling on a case-by-case basis.
INPUTS
xhtml: A string of XHTML to modernize
OUTPUTS
A string representing the XHTML with its spelling modernized
"""
language = get_xhtml_language(xhtml)
# ADDING NEW WORDS TO THIS LIST:
# A good way to check if a word is "archaic" is to do a Google N-Gram search: https://books.google.com/ngrams/graph?case_insensitive=on&year_start=1800&year_end=2000&smoothing=3
# Remember that en-US and en-GB differ significantly, and just because a word might seem strange to you, doesn't mean it's not the common case in the other variant.
# If Google N-Gram shows that a word has declined significantly in usage in BOTH en-US and en-GB (or the SE editor-in-chief makes an exception) then it may be a good candidate to add to this list.
xhtml = regex.sub(r"\b([Dd])evelope\b", r"\1evelop", xhtml) # develope -> develop
xhtml = regex.sub(r"\b([Oo])ker\b", r"\1cher", xhtml) # oker -> ocher
xhtml = regex.sub(r"\b([Ww])ellnigh\b", r"\1ell-nigh", xhtml) # wellnigh -> well-nigh
xhtml = regex.sub(r"\b([Tt]he|[Aa]nd|[Oo]r) what not(?! to)\b", r"\1 whatnot", xhtml) # what not -> whatnot
xhtml = regex.sub(r"\b([Gg])ood[\-]?bye?\b", r"\1oodbye", xhtml) # good-by -> goodbye
xhtml = regex.sub(r"\b([Gg])ood\sbye\b", r"\1oodbye", xhtml) # good bye -> goodbye (Note that we can't do `good by` -> `goodby` because one might do good by someone.
xhtml = regex.sub(r"\b([Gg])ood[\-\s]?bye?s\b", r"\1oodbyes", xhtml) # good bys -> goodbyes
xhtml = regex.sub(r"\b([Hh])ind(u|oo)stanee", r"\1industani", xhtml) # hindoostanee -> hindustani
xhtml = regex.sub(r"\b([Hh])indoo", r"\1indu", xhtml) # hindoo -> hindu
xhtml = regex.sub(r"\b([Ee])xpence", r"\1xpense", xhtml) # expence -> expense
xhtml = regex.sub(r"\b([Ll])otos", r"\1otus", xhtml) # lotos -> lotus
xhtml = regex.sub(r"\b([Ss])collop", r"\1callop", xhtml) # scollop -> scallop
xhtml = regex.sub(r"\b([Ss])ubtile?(?!(ize|izing))", r"\1ubtle", xhtml) # subtil -> subtle (but "subtilize" and "subtilizing")
xhtml = regex.sub(r"\bQuoiff", r"Coif", xhtml) # quoiff -> coif
xhtml = regex.sub(r"\bquoiff", r"coif", xhtml) # quoiff -> coif
xhtml = regex.sub(r"\bIndorse", r"Endorse", xhtml) # Indorse -> Endorse
xhtml = regex.sub(r"\bindorse", r"endorse", xhtml) # indorse -> endorse
xhtml = regex.sub(r"\bIntrust", r"Entrust", xhtml) # Intrust -> Entrust
xhtml = regex.sub(r"\bintrust", r"entrust", xhtml) # intrust -> entrust
xhtml = regex.sub(r"\bPhantasies", r"Fantasies", xhtml) # Phantasies -> Fantasies
xhtml = regex.sub(r"\bphantasies", r"fantasies", xhtml) # phantasies -> fantasies
xhtml = regex.sub(r"\bPhantas(y|ie)", r"Fantasy", xhtml) # Phantasie -> Fantasy
xhtml = regex.sub(r"\bphantas(y|ie)", r"fantasy", xhtml) # phantasie -> fantasy
xhtml = regex.sub(r"\bPhantastic", r"Fantastic", xhtml) # Phantastic -> Fantastic
xhtml = regex.sub(r"\bphantastic", r"fantastic", xhtml) # phantastic -> fantastic
xhtml = regex.sub(r"\bPhren[sz]y", r"Frenzy", xhtml) # Phrensy -> Frenzy
xhtml = regex.sub(r"\bphren[sz]y", r"frenzy", xhtml) # phrensy -> frenzy
xhtml = regex.sub(r"\b([Mm])enage\b", r"\1énage", xhtml) # menage -> ménage
xhtml = regex.sub(r"([Hh])ypothenuse", r"\1ypotenuse", xhtml) # hypothenuse -> hypotenuse
xhtml = regex.sub(r"[‘’]([Bb])us\b", r"\1us", xhtml) # ’bus -> bus
xhtml = regex.sub(r"([Nn])aïve", r"\1aive", xhtml) # naïve -> naive
xhtml = regex.sub(r"([Nn])a[ïi]vet[ée]", r"\1aivete", xhtml) # naïveté -> naivete
xhtml = regex.sub(r"&c\.", r"etc.", xhtml) # &c. -> etc.
xhtml = regex.sub(r"([Pp])rot[ée]g[ée]", r"\1rotégé", xhtml) # protege -> protégé
xhtml = regex.sub(r"([Tt])ete-a-tete", r"\1ête-à-tête", xhtml) # tete-a-tete -> tête-à-tête
xhtml = regex.sub(r"([Vv])is-a-vis", r"\1is-à-vis", xhtml) # vis-a-vis _> vis-à-vis
xhtml = regex.sub(r"([Ff])acade", r"\1açade", xhtml) # facade -> façade
xhtml = regex.sub(r"([Cc])h?ateau([sx]?\b)", r"\1hâteau\2", xhtml) # chateau -> château
xhtml = regex.sub(r"([Hh])abitue", r"\1abitué", xhtml) # habitue -> habitué
xhtml = regex.sub(r"\b([Bb])lase\b", r"\1lasé", xhtml) # blase -> blasé
xhtml = regex.sub(r"\b([Bb])bee[’']s[ \-]wax\b", r"\1eeswax", xhtml) # bee’s-wax -> beeswax
xhtml = regex.sub(r"\b([Cc])afe\b", r"\1afé", xhtml) # cafe -> café
xhtml = regex.sub(r"\b([Cc])afes\b", r"\1afés", xhtml) # cafes -> cafés; We break up cafe so that we don't catch 'cafeteria'
xhtml = regex.sub(r"([Mm])êlée", r"\1elee", xhtml) # mêlée -> melee
xhtml = regex.sub(r"\b([Ff])ete([sd])?\b", r"\1ête\2", xhtml) # fete -> fête
xhtml = regex.sub(r"\b([Rr])ôle(s?)\b", r"\1ole\2", xhtml) # rôle -> role
xhtml = regex.sub(r"\b([Cc])oö", r"\1oo", xhtml) # coö -> coo (as in coöperate)
xhtml = regex.sub(r"\b([Rr])eë", r"\1ee", xhtml) # reë -> ree (as in reëvaluate)
xhtml = regex.sub(r"\b([Pp])reë", r"\1ree", xhtml) # preë -> pree (as in preëmpt)
xhtml = regex.sub(r"\b([Cc])oërc", r"\1oerc", xhtml) # coërc -> coerc (as in coërcion)
xhtml = regex.sub(r"\b([Cc])oëd", r"\1oed", xhtml) # coëd -> coed (as in coëducation)
xhtml = regex.sub(r"\b([Dd])aïs\b", r"\1ais", xhtml) # daïs -> dais
xhtml = regex.sub(r"\b([Cc])oup\-de\-grace", r"\1oup-de-grâce", xhtml) # coup-de-grace -> coup-de-grâce
xhtml = regex.sub(r"\b([Cc])anape", r"\1anapé", xhtml) # canape -> canapé
xhtml = regex.sub(r"\b([Pp])recis\b", r"\1récis", xhtml) # precis -> précis
xhtml = regex.sub(r"\b([Gg])ood\-night", r"\1ood night", xhtml) # good-night -> good night
xhtml = regex.sub(r"\b([Gg])ood\-morning", r"\1ood morning", xhtml) # good-morning -> good morning
xhtml = regex.sub(r"\b([Gg])ood\-evening", r"\1ood evening", xhtml) # good-evening -> good evening
xhtml = regex.sub(r"\b([Gg])ood\-day", r"\1ood day", xhtml) # good-day -> good day
xhtml = regex.sub(r"\b([Gg])ood\-afternoon", r"\1ood afternoon", xhtml) # good-afternoon -> good afternoon
xhtml = regex.sub(r"\b([Bb])ete noir", r"\1ête noir", xhtml) # bete noir -> bête noir
xhtml = regex.sub(r"\bEclat\b", r"Éclat", xhtml) # Eclat -> Éclat
xhtml = regex.sub(r"\beclat\b", r"éclat", xhtml) # eclat -> éclat
xhtml = regex.sub(r"\ba la\b", r"à la", xhtml) # a la -> à la
xhtml = regex.sub(r"\ba propos\b", r"apropos", xhtml) # a propos -> apropos
xhtml = regex.sub(r"\bper cent(s|ages?)?\b", r"percent\1", xhtml) # per cent -> percent
xhtml = regex.sub(r"\bpercent\.(\s+[\p{Lowercase_Letter}])", r"percent\1", xhtml) # percent. followed by lowercase -> percent
xhtml = regex.sub(r"\bpercent\.[,;:\!\?]", r"percent,", xhtml) # per cent. -> percent
xhtml = regex.sub(r"\b([Ee])ntree(s?)\b", r"\1ntrée\2", xhtml) # entree -> entrée
xhtml = regex.sub(r"\b([Ff])iance", r"\1iancé", xhtml) # fiance -> fiancé
xhtml = regex.sub(r"\b([Oo])utre\b", r"\1utré", xhtml) # outre -> outré
xhtml = regex.sub(r"\b([Ff])etich", r"\1etish", xhtml) # fetich -> fetish
xhtml = regex.sub(r"\b([Pp])igstye\b", r"\1igsty", xhtml) # pigstye -> pigsty
xhtml = regex.sub(r"\b([Pp])igstyes\b", r"\1igsties", xhtml) # pigstyes -> pigsties
xhtml = regex.sub(r"\b([Cc])lew(s?)\b", r"\1lue\2", xhtml) # clew -> clue
xhtml = regex.sub(r"\b[ÀA]\s?propos\b", r"Apropos", xhtml) # à propos -> apropos
xhtml = regex.sub(r"\b[àa]\s?propos\b", r"apropos", xhtml) # à propos -> apropos
xhtml = regex.sub(r"\b([Nn])ew comer(s?)\b", r"\1ewcomer\2", xhtml) # new comer -> newcomer
xhtml = regex.sub(r"\b([Pp])ease\b(?![ \-]pudding)", r"\1eas", xhtml) # pease -> peas (but "pease pudding")
xhtml = regex.sub(r"\b([Ss])uch like\b", r"\1uchlike", xhtml) # such like -> suchlike
xhtml = regex.sub(r"\b([Ee])mployé", r"\1mployee", xhtml) # employé -> employee
xhtml = regex.sub(r"\b(?<!ancien )([Rr])égime", r"\1egime", xhtml) # régime -> regime (but "ancien régime")
xhtml = regex.sub(r"\b([Bb])urthen", r"\1urden", xhtml) # burthen -> burden
xhtml = regex.sub(r"\b([Dd])isburthen", r"\1isburden", xhtml) # disburthen -> disburden
xhtml = regex.sub(r"\b([Uu])nburthen", r"\1nburden", xhtml) # unburthen -> unburden
xhtml = regex.sub(r"\b[EÉ]lys[eé]e", r"Élysée", xhtml) # Elysee -> Élysée
xhtml = regex.sub(r"\b([Ll])aw suit", r"\1awsuit", xhtml) # law suit -> lawsuit
xhtml = regex.sub(r"\bIncase", r"Encase", xhtml) # Incase -> Encase
xhtml = regex.sub(r"\bincase", r"encase", xhtml) # incase -> encase
xhtml = regex.sub(r"\bInclose", r"Enclose", xhtml) # Inclose -> Enclose
xhtml = regex.sub(r"\binclose", r"enclose", xhtml) # inclose -> enclose
xhtml = regex.sub(r"\b([Cc])ocoa-?nut", r"\1oconut", xhtml) # cocoanut / cocoa-nut -> coconut
xhtml = regex.sub(r"\b([Ww])aggon", r"\1agon", xhtml) # waggon -> wagon
xhtml = regex.sub(r"\b([Ss])wop", r"\1wap", xhtml) # swop -> swap
xhtml = regex.sub(r"\b([Ll])acquey", r"\1ackey", xhtml) # lacquey -> lackey
xhtml = regex.sub(r"\b([Bb])ric-à-brac", r"\1ric-a-brac", xhtml) # bric-à-brac -> bric-a-brac
xhtml = regex.sub(r"\b([Kk])iosque", r"\1iosk", xhtml) # kiosque -> kiosk
xhtml = regex.sub(r"\b([Dd])epôt", r"\1epot", xhtml) # depôt -> depot
xhtml = regex.sub(r"(?<![Cc]ompl)exion", r"ection", xhtml) # -extion -> -exction (connexion, reflexion, etc., but "complexion")
xhtml = regex.sub(r"\b([Dd])ulness", r"\1ullness", xhtml) # dulness -> dullness
xhtml = regex.sub(r"\b([Ff])iord", r"\1jord", xhtml) # fiord -> fjord
xhtml = regex.sub(r"\b([Ff])ulness\b", r"\1ullness", xhtml) # fulness -> fullness (but not for ex. thoughtfulness)
xhtml = regex.sub(r"['’]([Pp])hone", r"\1hone", xhtml) # ’phone -> phone; note that we can't use \b on the left because it won't match for some reason
xhtml = regex.sub(r"\b([Ss])hew", r"\1how", xhtml) # shew -> show
xhtml = regex.sub(r"\b([Tt])rowsers", r"\1rousers", xhtml) # trowsers -> trousers
xhtml = regex.sub(r"([Bb])iass", r"\1ias", xhtml) # (un)biass(ed) -> (un)bias(ed)
xhtml = regex.sub(r"\b([Cc])huse", r"\1hoose", xhtml) # chuse -> choose
xhtml = regex.sub(r"\b([Cc])husing", r"\1hoosing", xhtml) # chusing -> choosing
xhtml = regex.sub(r"\b([Cc])ontroul(s?)\b", r"\1ontrol\2", xhtml) # controul -> control
xhtml = regex.sub(r"\b([Cc])ontroul(ing|ed)", r"\1ontroll\2", xhtml) # controuling/ed -> controlling/ed
xhtml = regex.sub(r"\b([Ss])urpriz(e|ing)", r"\1urpris\2", xhtml) # surprize->surprise, surprizing->surprising
xhtml = regex.sub(r"\b([Dd])oat\b", r"\1ote", xhtml) # doat -> dote
xhtml = regex.sub(r"\b([Dd])oat(ed|ing)", r"\1ot\2", xhtml) # doating -> doting
xhtml = regex.sub(r"\b([Ss])topt", r"\1topped", xhtml) # stopt -> stopped
xhtml = regex.sub(r"\b([Ss])tept", r"\1tepped", xhtml) # stept -> stepped
xhtml = regex.sub(r"\b([Ss])ecresy", r"\1ecrecy", xhtml) # secresy -> secrecy
xhtml = regex.sub(r"\b([Mm])esalliance", r"\1ésalliance", xhtml) # mesalliance -> mésalliance
xhtml = regex.sub(r"\b([Ss])ate\b", r"\1at", xhtml) # sate -> sat
xhtml = regex.sub(r"\b([Aa])ttache\b", r"\1ttaché", xhtml) # attache -> attaché
xhtml = regex.sub(r"\b([Pp])orte[\- ]coch[eè]re\b", r"\1orte-cochère", xhtml) # porte-cochere -> porte-cochère
xhtml = regex.sub(r"\b([Nn])[eé]glig[eé]e?(s?)\b", r"\1egligee\2", xhtml) # négligée -> negligee
xhtml = regex.sub(r"\b([Ss])hort cut(s?)\b", r"\1hortcut\2", xhtml) # short cut -> shortcut
xhtml = regex.sub(r"\b([Ff])ocuss", r"\1ocus", xhtml) # focuss -> focus
xhtml = regex.sub(r"\b([Mm])ise[ \-]en[ \-]sc[eè]ne", r"\1ise-en-scène", xhtml) # mise en scene -> mise-en-scène
xhtml = regex.sub(r"\b([Nn])ee\b", r"\1ée", xhtml) # nee -> née
xhtml = regex.sub(r"\b([Ee])au[ \-]de[ \-]Cologne\b", r"\1au de cologne", xhtml) # eau de Cologne -> eau de cologne
xhtml = regex.sub(r"\b([Ss])enor", r"\1eñor", xhtml) # senor -> señor (senores, senorita/s, etc.)
xhtml = regex.sub(r"\b([Gg])ramme?(s)?\b", r"\1ram\2", xhtml) # gramm/grammes -> gram/grams
xhtml = regex.sub(r"\b([Aa])larum\b", r"\1larm", xhtml) # alarum -> alarm
xhtml = regex.sub(r"\b([Bb])owlder(s?)\b", r"\1oulder\2", xhtml) # bowlder/bowlders -> boulder/boulders
xhtml = regex.sub(r"\b([Dd])istingue\b", r"\1istingué", xhtml) # distingue -> distingué
xhtml = regex.sub(r"\b[EÉ]cart[eé]\b", r"Écarté", xhtml) # Ecarte -> Écarté
xhtml = regex.sub(r"\b[eé]cart[eé]\b", r"écarté", xhtml) # ecarte -> écarté
xhtml = regex.sub(r"\b([Pp])ere\b", r"\1ère", xhtml) # pere -> père (e.g. père la chaise)
xhtml = regex.sub(r"\b([Tt])able(s?) d’hote\b", r"\1able\2 d’hôte", xhtml) # table d'hote -> table d'hôte
xhtml = regex.sub(r"\b([Ee])au(x?)[ \-]de[ \-]vie\b", r"\1au\2-de-vie", xhtml) # eau de vie -> eau-de-vie
xhtml = regex.sub(r"\b3d\b", r"3rd", xhtml) # 3d -> 3rd (warning: check that we don't convert 3d in the "3 pence" sense!)
xhtml = regex.sub(r"\b2d\b", r"2nd", xhtml) # 2d -> 2nd (warning: check that we don't convert 2d in the "2 pence" sense!)
xhtml = regex.sub(r"\b([Mm])ia[uo]w", r"\1eow", xhtml) # miauw, miaow -> meow
xhtml = regex.sub(r"\b([Cc])aviare", r"\1aviar", xhtml) # caviare -> caviar
xhtml = regex.sub(r"\b([Ss])ha’n’t", r"\1han’t", xhtml) # sha'n't -> shan't (see https://english.stackexchange.com/questions/71414/apostrophes-in-contractions-shant-shant-or-shant)
xhtml = regex.sub(r"\b([Ss])[uû]ret[eé]", r"\1ûreté", xhtml) # Surete -> Sûreté
xhtml = regex.sub(r"\b([Ss])eance", r"\1éance", xhtml) # seance -> séance
xhtml = regex.sub(r"\b([Ff])in[\- ]de[\- ]siecle", r"\1in de siècle", xhtml) # fin de siecle -> fin de siècle
xhtml = regex.sub(r"\bEmpale", r"Impale", xhtml) # Empale -> Impale
xhtml = regex.sub(r"\bempale", r"impale", xhtml) # empale -> impale
xhtml = regex.sub(r"\b([Tt])abu(s?)\b", r"\1aboo\2", xhtml) # tabu -> taboo
xhtml = regex.sub(r"\b([Kk])idnaping\b", r"\1idnapping", xhtml) # kidnaping -> kidnapping
xhtml = regex.sub(r"([,;a-z]\s)Quixotic\b", r"\1quixotic", xhtml) # Quixotic -> quixotic but not at the start of a clause
xhtml = regex.sub(r"([^\p{Lowercase_Letter}]’[Tt])\s(is|were|was|isn’t)\b", r"\1\2", xhtml) # 't is, 't was, 't were 't isn't -> 'tis, 'twas, 'twere, 't isn't
xhtml = regex.sub(r"\b([Uu])p stairs\b", r"\1pstairs", xhtml) # up stairs -> upstairs
xhtml = regex.sub(r"(?<!up and )(?<!up or )\b([Dd])own stairs\b", r"\1ownstairs", xhtml) # down stairs -> downstairs, but not "up (or|and) down stairs"
xhtml = regex.sub(r"([Pp])artizan", r"\1artisan", xhtml) # partizan -> partisan
xhtml = regex.sub(r"([Nn])onplused", r"\1onplussed", xhtml) # nonplused -> nonplussed
xhtml = regex.sub(r"\b([Rr])eärrangement", r"\1earrangement", xhtml) # reärrangement -> rearrangement
xhtml = regex.sub(r"\b([Mm])untru(s?)\b", r"\1antra\2", xhtml) # muntru -> mantra
xhtml = regex.sub(r"\b([Hh])uzz(y|ies)\b", r"\1uss\2", xhtml) # huzzy -> hussy
xhtml = regex.sub(r"\b([Hh])iccough", r"\1iccup", xhtml) # hiccough -> hiccup
xhtml = regex.sub(r"\b([Rr])oue(s?)\b", r"\1oué\2", xhtml) # roue -> roué
xhtml = regex.sub(r"\b([Ii])dee fixe\b", r"\1dée fixe\2", xhtml) # idee fixe -> idée fixe
xhtml = regex.sub(r"\b([Ss])treet[\s\-]arab\b", r"\1treet Arab", xhtml) # street-arab -> street Arab
xhtml = regex.sub(r"\b[EÉ]migr[eé](?!e)", r"Émigré", xhtml) # Emigre -> Émigré (but not emigrée, which is French)
xhtml = regex.sub(r"\b[eé]migr[eé](?!e)", r"émigré", xhtml) # emigre -> émigré (but not emigrée, which is French)
xhtml = regex.sub(r"\b([Cc])ourtezan", r"\1ourtesan", xhtml) # courtezan -> courtesan
xhtml = regex.sub(r"\b([Cc])ompleate?", r"\1omplete", xhtml) # compleat -> complete
xhtml = regex.sub(r"\b([Dd])umfound", r"\1umbfound", xhtml) # dumfound -> dumbfound
xhtml = regex.sub(r"\b’([Cc])ello(s?)\b", r"\1ello\2", xhtml) # 'cello -> cello
xhtml = regex.sub(r"\bwelsh (rarebit|rabbit)\b", r"Welsh \1", xhtml) # welsh rarebit/rabbit -> Welsh rarebit/rabbit
xhtml = regex.sub(r"\b([Yy])our self\b(?!-)", r"\1ourself", xhtml) # your self -> your self, but ignore constructs like `your self-determination` or `your selfish sister`.
xhtml = regex.sub(r"\b([Aa])ny how\b", r"\1nyhow", xhtml) # any how -> anyhow
xhtml = regex.sub(r"\b([Aa])ny body\b", r"\1nybody", xhtml) # any body -> anybody
xhtml = regex.sub(r"\b([Ee])very body\b", r"\1verybody", xhtml) # every body -> everybody
xhtml = regex.sub(r"\bfrench window\b", r"French window", xhtml) # french window -> French window
xhtml = regex.sub(r"\b([Aa])n European", r"\1 European", xhtml) # an European -> a European
xhtml = regex.sub(r"\bProvencal", r"Provençal", xhtml) # Provencal -> Provençal
xhtml = regex.sub(r"\b([Rr])aison ([Dd])’etre", r"\1aison \2’être", xhtml) # raison d'etre -> raison d'être
xhtml = regex.sub(r"\b([Gg])arcon", r"\1arçon", xhtml) # garcon -> garçon
xhtml = regex.sub(r"\b([Cc])uracao", r"\1uraçao", xhtml) # curacao -> curaçao
xhtml = regex.sub(r"\b([Ss])oupcon", r"\1oupçon", xhtml) # soupcon -> soupçon
xhtml = regex.sub(r"\b([Tt])ouzle", r"\1ousle", xhtml) # touzle(d) -> tousle(d)
xhtml = regex.sub(r"\b([Cc])lientèle", r"\1lientele", xhtml) # clientèle -> clientele
xhtml = regex.sub(r"\b([Cc])ardamum", r"\1ardamom", xhtml) # cardamum -> cardamom
xhtml = regex.sub(r"\b([Ff])idgetted", r"\1idgeted", xhtml) # fidgetted -> fidgeted
xhtml = regex.sub(r"\b([Pp])ublick", r"\1ublic", xhtml) # publick -> public
xhtml = regex.sub(r"\b([Pp])rophane", r"\1rofane", xhtml) # prophane -> profane
xhtml = regex.sub(r"\b([Nn])o where", r"\1owhere", xhtml) # no where -> nowhere
xhtml = regex.sub(r"\b([Tt])yth", r"\1ith", xhtml) # tythe -> tithe
xhtml = regex.sub(r"\b([Ss])lily", r"\1lyly", xhtml) # slily -> slyly
xhtml = regex.sub(r"\b([Ff])oretel\b", r"\1oretell", xhtml) # foretel -> foretell
xhtml = regex.sub(r"\b([Cc])ypher", r"\1ipher", xhtml) # cypher -> cipher
xhtml = regex.sub(r"\b([Dd])ivers\b", r"\1iverse", xhtml) # divers -> diverse
xhtml = regex.sub(r"\b([Ll])anthorn", r"\1antern", xhtml) # lanthorn -> lantern
xhtml = regex.sub(r"\b([Oo])rgie\b", r"\1rgy", xhtml) # orgie -> orgy
xhtml = regex.sub(r"\b([Oo])u?rang-[Oo]utang?", r"\1rangutan", xhtml) # ourang-outang -> orangutan
xhtml = regex.sub(r"(?<!-)\b([Ss])o\sand\s([Ss])o\b(?!-)", r"\1o-and-\2o", xhtml) # so and so -> so-and-so; ignore `so-and-so and so-and-so`
xhtml = regex.sub(r"\b([Cc])añon", r"\1anyon", xhtml) # cañon -> canyon
xhtml = regex.sub(r"\b([Kk])vas\b", r"\1vass", xhtml) # kvas -> kvass
xhtml = regex.sub(r"\b([Pp])apier[-\s]mache\b", r"\1apier-mâché", xhtml) # papier-mache -> papier-mâché
xhtml = regex.sub(r"\b([Cc])yder\b", r"\1ider", xhtml) # cyder -> cider
xhtml = regex.sub(r"\b([Cc])onsomme", r"\1onsommé", xhtml) # consomme -> consommé
xhtml = regex.sub(r"\b([Cc])loath(s?)\b", r"\1lothe\2", xhtml) # cloath(s) -> clothe(s)
xhtml = regex.sub(r"\b([Cc])loath", r"\1loth", xhtml) # cloath -> cloth(ed|ing|...)
xhtml = regex.sub(r"\b([Pp])aultry", r"\1altry", xhtml) # paultry -> paltry
xhtml = regex.sub(r"\b([Bb])ye-?(word|law)", r"\1y\2", xhtml) # bye-(word|law) -> by(word|law)
xhtml = regex.sub(r"\btaylor", r"tailor", xhtml) # taylor -> tailor (but not uppercase as it might be a last name
xhtml = regex.sub(r"\b([Gg])ulph", r"\1ulf", xhtml) # gulph -> gulf
xhtml = regex.sub(r"\b([Mm])usicke?\b", r"\1usic", xhtml) # musick -> music
xhtml = regex.sub(r"\b([Ee])very where\b", r"\1verywhere", xhtml) # every where -> everywhere
xhtml = regex.sub(r"\b([Aa])ny where\b", r"\1nywhere", xhtml) # any where -> anywhere
xhtml = regex.sub(r"\b([Ee])very thing\b", r"\1verything", xhtml) # every thing -> everything
xhtml = regex.sub(r"\b([Aa])ny thing\b", r"\1nything", xhtml) # any thing -> anything
xhtml = regex.sub(r"\b([Rr])e-?enforce", r"\1einforce", xhtml) # re-enforce -> reinforce
xhtml = regex.sub(r"\b([Ll])uny", r"\1oony", xhtml) # luny -> loony
xhtml = regex.sub(r"\b([Vv])icuna", r"\1icuña", xhtml) # vicuna -> vicuña
xhtml = regex.sub(r"\b([Cc])larionet", r"\1larinet", xhtml) # clarionet -> clarinet
xhtml = regex.sub(r"\b([Bb])ye?[\- ]the[\- ]bye?\b", r"\1y the by", xhtml) # by-the-bye -> by the by
xhtml = regex.sub(r"\b([Ss])pung", r"\1pong", xhtml) # spung(e|ing|y) -> sponge
xhtml = regex.sub(r"\b([Ww])oful", r"\1oeful", xhtml) # woful -> woeful
xhtml = regex.sub(r"\b([Hh]e|[Ss]he|[Yy]ou|[Tt]hey)’ld", r"\1’d", xhtml) # he'ld, she'ld, you'ld, they'ld -> he'd, she'd, you'd, they'd
xhtml = regex.sub(r"\b([Ii])n the mean time", r"\1n the meantime", xhtml) # in the mean time -> in the meantime
xhtml = regex.sub(r"\b([Ch])huck[\- ]full\b", r"\1ock-full", xhtml) # chuck-full -> chock-full
xhtml = regex.sub(r"\b([Pp])rythee\b", r"\1rithee", xhtml) # prythee -> prithee
xhtml = regex.sub(r"\b([Hh])av’n’t", r"\1aven’t", xhtml) # hav’n’t -> haven’t
xhtml = regex.sub(r"\b([Bb])awble", r"\1auble", xhtml) # bawble -> bauble
xhtml = regex.sub(r"\b([Pp])iny?on(s?)\b", r"\1iñon\2", xhtml) # pinyon -> piñon
xhtml = regex.sub(r"\b([Ii])kon(s?)\b", r"\1con\2", xhtml) # ikon -> icon
xhtml = regex.sub(r"\b([Pp])remiss\b", r"\1remise", xhtml) # premiss -> premise
xhtml = regex.sub(r"\b([Pp])remisses", r"\1remises", xhtml) # premisses -> premises
# Normalize some names
xhtml = regex.sub(r"Moliere", r"Molière", xhtml) # Moliere -> Molière
xhtml = regex.sub(r"Tolstoi", r"Tolstoy", xhtml) # Tolstoi -> Tolstoy
xhtml = regex.sub(r"Buonaparte", r"Bonaparte", xhtml) # Buonaparte -> Bonaparte
xhtml = regex.sub(r"Shake?spea?r([^ie])", r"Shakespeare\1", xhtml) # Shakespear/Shakspeare -> Shakespeare
xhtml = regex.sub(r"Shake?spea?re", r"Shakespeare", xhtml) # Shakespear/Shakspeare -> Shakespeare
xhtml = regex.sub(r"Shakspea?rean", r"Shakespearean", xhtml) # Shaksperean -> Shakespearean
xhtml = regex.sub(r"Shakspea?re?’s", r"Shakespeare’s", xhtml) # Shakspere’s -> Shakespeare’s
xhtml = regex.sub(r"Raffaelle", r"Raphael", xhtml) # Raffaelle -> Raphael
xhtml = regex.sub(r"Michael Angelo", r"Michaelangelo", xhtml) # Michael Angelo -> Michaelangelo
xhtml = regex.sub(r"\bVergil", r"Virgil", xhtml) # Vergil -> Virgil
xhtml = regex.sub(r"\bVishnoo", r"Vishnu", xhtml) # Vishnoo -> Vishnu
xhtml = regex.sub(r"\bPekin\b", r"Peking", xhtml) # Pekin -> Peking
xhtml = regex.sub(r"\bBuenos Ayres\b", r"Buenos Aires", xhtml) # Buenos Ayres -> Buenos Aires
xhtml = regex.sub(r"\bCracow", r"Krakow", xhtml) # Cracow -> Krakow
xhtml = regex.sub(r"\bKieff?\b", r"Kiev", xhtml) # Kief -> Kiev
xhtml = regex.sub(r"\bRo?umania", r"Romania", xhtml) # Roumania(n) -> Romania(n)
xhtml = regex.sub(r"\b([Rr])enascence", r"\1enaissance", xhtml) # renascence -> renaissance
xhtml = regex.sub(r"\bThibet", r"Tibet", xhtml) # Thibet -> Tibet
xhtml = regex.sub(r"\bTimbuctoo", r"Timbuktu", xhtml) # Timbuctoo -> Timbuktu
xhtml = regex.sub(r"\bTokio", r"Tokyo", xhtml) # Tokio -> Tokyo
xhtml = regex.sub(r"\bTchekh?ov", r"Chekhov", xhtml) # Tchekhov -> Chekhov
xhtml = regex.sub(r"\bVereshtchagin", r"Vereshchagin", xhtml) # Vereshtchagin -> Vereshchagin
xhtml = regex.sub(r"\bSoudan", "Sudan", xhtml) # Soudan -> Sudan
xhtml = regex.sub(r"\bJack-in-the-box", "jack-in-the-box", xhtml) # Jack-in-the-box -> jack-in-the-box
xhtml = regex.sub(r"\bServia", r"Serbia", xhtml) # Servia(n) -> Serbia(n)
xhtml = regex.sub(r"\bEsquimaux?\b", r"Eskimo", xhtml) # Esquimau -> Eskimo
xhtml = regex.sub(r"\bLaocoon", r"Laocoön", xhtml) # Lacoon -> Laocoön
xhtml = regex.sub(r"Porto Rico", "Puerto Rico", xhtml) # Porto Rico -> Puerto Rico
xhtml = regex.sub(r"Mahomet", "Muhammad", xhtml) # Mahomet -> Muhammad
xhtml = regex.sub(r"M[ao]hommed", "Muhammad", xhtml) # Mahommed -> Muhammad
xhtml = regex.sub(r"Esthonian", "Estonian", xhtml) # Esthonian -> Estonian
xhtml = regex.sub(r"\b([Ss])anscrit\b", r"\1anskrit", xhtml) # Sanscrit -> Sanskrit
xhtml = regex.sub(r"Francois", r"François", xhtml) # Francois -> François
xhtml = regex.sub(r"Hayti(\b|an\b)", r"Haiti\1", xhtml) # Hayti -> Haiti
xhtml = regex.sub(r"Zymbabwe", r"Zimbabwe", xhtml) # Zymbabwe -> Zimbabwe
xhtml = regex.sub(r"Moslem(s?)\b", r"Muslim\1", xhtml) # Moslem -> Muslim, but stop at a word break for `Moslemin`, a rare word that has no modern spelling equivalent
xhtml = regex.sub(r"Bronte\b", r"Brontë", xhtml) # Bronte -> Brontë
xhtml = regex.sub(r"Leipsick?\b", r"Leipzig", xhtml) # Leipsic -> Leipzig; note that there are some US cities actually named `Leipsic`!
xhtml = regex.sub(r"Gengis", r"Genghis", xhtml) # Gengis -> Genghis
xhtml = regex.sub(r"Hamburgh", r"Hamburg", xhtml) # Hamburgh -> Hamburg
xhtml = regex.sub(r"Dant[sz]ick?", r"Danzig", xhtml) # Dantsic -> Danzig
xhtml = regex.sub(r"Barbadoes", r"Barbados", xhtml) # Barbadoes -> Barbados
xhtml = regex.sub(r"jesuit", r"Jesuit", xhtml) # jesuit -> Jesuit
xhtml = regex.sub(r"Roman catholic", r"Roman Catholic", xhtml) # Roman catholic -> Roman Catholic; Note that we can't uppercase `catholic` in the generic sense because `catholic` can mean "worldly"
xhtml = regex.sub(r"Burmah", r"Burma", xhtml) # Burmah -> Burma
# Remove archaic diphthongs
xhtml = regex.sub(r"\b([Mm])edi(æ|ae)val", r"\1edieval", xhtml)
xhtml = xhtml.replace("Cæsar", "Caesar")
xhtml = xhtml.replace("Crœsus", "Croesus")
xhtml = xhtml.replace("\bæon\b", "aeon")
xhtml = xhtml.replace("\bÆon\b", "Aeon")
xhtml = xhtml.replace("Æneas", "Aeneas")
xhtml = xhtml.replace("Æneid", "Aeneid")
xhtml = xhtml.replace("Æschylus", "Aeschylus")
xhtml = xhtml.replace("æsthet", "aesthet") # aesthetic, aesthete, etc.
xhtml = xhtml.replace("Æsthet", "Aesthet") # Aesthetic, Aesthete, etc.
xhtml = regex.sub(r"\b([Hh])yæna", r"\1yena", xhtml)
xhtml = regex.sub(r"\b([Ll])arvæ", r"\1arvae", xhtml)
xhtml = xhtml.replace("Œdip", "Oedip") # Oedipus, Oedipal
xhtml = regex.sub(r"\b([Pp])æan", r"\1aean", xhtml)
xhtml = regex.sub(r"\b([Vv])ertebræ", r"\1ertebrae", xhtml)
# Remove spaces before contractions like `n’t` e.g. `is n’t` -> `isn’t`
xhtml = regex.sub(r" n’t\b", "n’t", xhtml)
# Remove spaces before contractions like `it 'll`
xhtml = regex.sub(r"([\p{Letter}])\s[‘’]ll\b", r"\1’ll", xhtml)
# Remove roman ordinals
xhtml = regex.sub(r"<span epub:type=\"z3998:roman\">(.*?)</span>(st|nd|rd|th)\b", r'<span epub:type="z3998:roman">\1</span>', xhtml)
# X-ray is always capitalized. Match a preceding space so that we don't catch it in an ID attribute.
xhtml = regex.sub(r"([\p{Punctuation}\s])x-ray", r"\1X-ray", xhtml)
# Replace 2d with 2nd and 3d with 3rd
# Check for a following abbr because `3<abbr>d.</abbr>` could mean `3 pence`
xhtml = regex.sub(r"\b([0-9]*2)d(?!</abbr>)", r"\1nd", xhtml)
xhtml = regex.sub(r"\b([0-9]*3)d(?!</abbr>)", r"\1rd", xhtml)
# Canadian spelling follows US
if language in ["en-US", "en-CA"]:
xhtml = regex.sub(r"\b([Cc])osey", r"\1ozy", xhtml)
# Australian spelling follows GB
if language in ["en-GB", "en-AU"]:
xhtml = regex.sub(r"\b([Cc])osey", r"\1osy", xhtml)
# US spelling is unique
if language == "en-US":
xhtml = regex.sub(r"\b([Mm])anœuv(?:er|re)", r"\1aneuver", xhtml) # Omit last letter to catch both maneuverS and maneuverING
xhtml = regex.sub(r"\b([Mm])anœuvering", r"\1aneuvering", xhtml)
else:
xhtml = regex.sub(r"\b([Mm])anœuv(?:er|re)", r"\1anoeuvre", xhtml)
xhtml = regex.sub(r"\b([Mm])anœuvring", r"\1anoeuvring", xhtml)
xhtml = regex.sub(r"\b([Mm])anoeuvreing", r"\1anoeuvring", xhtml)
return xhtml
| 69.672414 | 221 | 0.63496 |
640bfefb859aa35de769fc5572187db2ada7fa4d | 5,712 | py | Python | isi_sdk_9_0_0/isi_sdk_9_0_0/models/create_hardware_tape_name_response.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_9_0_0/isi_sdk_9_0_0/models/create_hardware_tape_name_response.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_9_0_0/isi_sdk_9_0_0/models/create_hardware_tape_name_response.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 10
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_9_0_0.models.create_hardware_tape_name_response_node import CreateHardwareTapeNameResponseNode # noqa: F401,E501
from isi_sdk_9_0_0.models.node_status_cpu_error import NodeStatusCpuError # noqa: F401,E501
class CreateHardwareTapeNameResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'errors': 'list[NodeStatusCpuError]',
'nodes': 'list[CreateHardwareTapeNameResponseNode]',
'total': 'int'
}
attribute_map = {
'errors': 'errors',
'nodes': 'nodes',
'total': 'total'
}
def __init__(self, errors=None, nodes=None, total=None): # noqa: E501
"""CreateHardwareTapeNameResponse - a model defined in Swagger""" # noqa: E501
self._errors = None
self._nodes = None
self._total = None
self.discriminator = None
if errors is not None:
self.errors = errors
if nodes is not None:
self.nodes = nodes
if total is not None:
self.total = total
@property
def errors(self):
"""Gets the errors of this CreateHardwareTapeNameResponse. # noqa: E501
A list of errors encountered by the individual nodes involved in this request, or an empty list if there were no errors. # noqa: E501
:return: The errors of this CreateHardwareTapeNameResponse. # noqa: E501
:rtype: list[NodeStatusCpuError]
"""
return self._errors
@errors.setter
def errors(self, errors):
"""Sets the errors of this CreateHardwareTapeNameResponse.
A list of errors encountered by the individual nodes involved in this request, or an empty list if there were no errors. # noqa: E501
:param errors: The errors of this CreateHardwareTapeNameResponse. # noqa: E501
:type: list[NodeStatusCpuError]
"""
self._errors = errors
@property
def nodes(self):
"""Gets the nodes of this CreateHardwareTapeNameResponse. # noqa: E501
The responses from the individual nodes involved in this request. # noqa: E501
:return: The nodes of this CreateHardwareTapeNameResponse. # noqa: E501
:rtype: list[CreateHardwareTapeNameResponseNode]
"""
return self._nodes
@nodes.setter
def nodes(self, nodes):
"""Sets the nodes of this CreateHardwareTapeNameResponse.
The responses from the individual nodes involved in this request. # noqa: E501
:param nodes: The nodes of this CreateHardwareTapeNameResponse. # noqa: E501
:type: list[CreateHardwareTapeNameResponseNode]
"""
self._nodes = nodes
@property
def total(self):
"""Gets the total of this CreateHardwareTapeNameResponse. # noqa: E501
The total number of nodes responding. # noqa: E501
:return: The total of this CreateHardwareTapeNameResponse. # noqa: E501
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this CreateHardwareTapeNameResponse.
The total number of nodes responding. # noqa: E501
:param total: The total of this CreateHardwareTapeNameResponse. # noqa: E501
:type: int
"""
if total is not None and total > 2147483647: # noqa: E501
raise ValueError("Invalid value for `total`, must be a value less than or equal to `2147483647`") # noqa: E501
if total is not None and total < 0: # noqa: E501
raise ValueError("Invalid value for `total`, must be a value greater than or equal to `0`") # noqa: E501
self._total = total
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateHardwareTapeNameResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 32.089888 | 142 | 0.61292 |
1a96b1d978f6593ae87fde022a7f5afaa97c852a | 2,157 | py | Python | AyiinXd/modules/yinsban.py | AyiinXd/Ayiin-Userbot | 6e2fcbf087c14282eb9f4313954552c06eb52f47 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 3 | 2022-02-05T09:45:49.000Z | 2022-03-25T04:52:11.000Z | AyiinXd/modules/yinsban.py | AyiinXd/Ayiin-Userbot | 6e2fcbf087c14282eb9f4313954552c06eb52f47 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 3 | 2022-03-03T22:35:03.000Z | 2022-03-07T02:40:10.000Z | AyiinXd/modules/yinsban.py | AyiinXd/Ayiin-Userbot | 6e2fcbf087c14282eb9f4313954552c06eb52f47 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 20 | 2022-01-25T05:19:18.000Z | 2022-03-31T08:23:23.000Z | # Port By @VckyouuBitch From GeezProject
# Perkontolan Dengan Hapus Credits
# Recode By : @AyiinXd
from asyncio import sleep
from telethon.tl.types import ChatBannedRights
from telethon.tl.functions.channels import EditBannedRequest
from telethon.tl.types import ChannelParticipantsKicked
from AyiinXd import CMD_HELP
from AyiinXd import CMD_HANDLER as cmd
from AyiinXd.ayiin import ayiin_cmd, eod, eor
from Stringyins import get_string
@ayiin_cmd(pattern="banall(?: |$)(.*)")
async def testing(ayiinxd):
ayiin = await ayiinxd.get_chat()
yins = await ayiinxd.client.get_me()
admin = ayiin.admin_rights
creator = ayiin.creator
if not admin and not creator:
await eod(ayiinxd, get_string("stvc_1").format(yins.first_name))
return
xnxx = await eor(ayiinxd, get_string("yiban_1"))
# Thank for Dark_Cobra
ayiinkontol = await ayiinxd.client.get_participants(ayiinxd.chat_id)
for user in ayiinkontol:
if user.id == yins.id:
pass
try:
xx = await ayiinxd.client(EditBannedRequest(ayiinxd.chat_id, int(user.id), ChatBannedRights(until_date=None, view_messages=True)))
except Exception as e:
await eod(xnxx, get_string("error_1").format(str(e)))
await sleep(.5)
await xnxx.edit(get_string("yiban_2"))
@ayiin_cmd(pattern="unbanall(?: |$)(.*)")
async def _(ayiin):
yins = await eor(ayiin, get_string("yiban_3"))
p = 0
(await ayiin.get_chat()).title
async for i in ayiin.client.iter_participants(
ayiin.chat_id,
filter=ChannelParticipantsKicked,
aggressive=True,
):
try:
await ayiin.client.edit_permissions(ayiin.chat_id, i, view_messages=True)
p += 1
except BaseException:
pass
await yins.edit(get_string("yiban_4").format(p))
CMD_HELP.update(
{
"yinsban": f"**Plugin : **`yinsban`\
\n\n » **Perintah :** `{cmd}banall`\
\n » **Kegunaan :** Banned Semua Member Dalam Satu Ketikan.\
\n\n » **Perintah :** `{cmd}unbanall`\
\n » **Kegunaan :** Membatalkan Banned Anggota Group.\
"
}
)
| 31.720588 | 142 | 0.658785 |
8b089206140cec00916fde72173e860a5876aaa0 | 1,909 | py | Python | src/PriceComparer/middlewares.py | bugsancho/PriceComparer | 21d1c1b38c1d570c914f5d870ed3f3dedb15a886 | [
"MIT"
] | null | null | null | src/PriceComparer/middlewares.py | bugsancho/PriceComparer | 21d1c1b38c1d570c914f5d870ed3f3dedb15a886 | [
"MIT"
] | null | null | null | src/PriceComparer/middlewares.py | bugsancho/PriceComparer | 21d1c1b38c1d570c914f5d870ed3f3dedb15a886 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class PricecomparerSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| 33.491228 | 78 | 0.674175 |
45d00c1e0d73f1faec1dbf2cdac7fbfb4f4154eb | 4,393 | py | Python | src/unity/python/turicreate/test/test_sframe_builder.py | fossabot/turicreate | a500d5e52143ad15ebdf771d9f74198982c7c45c | [
"BSD-3-Clause"
] | 1 | 2019-04-16T19:51:18.000Z | 2019-04-16T19:51:18.000Z | src/unity/python/turicreate/test/test_sframe_builder.py | tashby/turicreate | 7f07ce795833d0c56c72b3a1fb9339bed6d178d1 | [
"BSD-3-Clause"
] | 3 | 2021-09-08T02:18:00.000Z | 2022-03-12T00:39:44.000Z | src/unity/python/turicreate/test/test_sframe_builder.py | tashby/turicreate | 7f07ce795833d0c56c72b3a1fb9339bed6d178d1 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from ..data_structures.sframe import SFrame
import unittest
import array
import datetime as dt
from ..util.timezone import GMT
from ..util import _assert_sframe_equal
class SFrameBuilderTest(unittest.TestCase):
def setUp(self):
self.int_data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
self.float_data = [1., 2., 3., 4., 5., 6., 7., 8., 9., 10.]
self.string_data = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
self.vec_data = [array.array('d', [i, i+1]) for i in self.int_data]
self.list_data = [[i, str(i), i * 1.0] for i in self.int_data]
self.dict_data = [{str(i): i, i : float(i)} for i in self.int_data]
self.datetime_data = [dt.datetime(2013, 5, 7, 10, 4, 10),
dt.datetime(1902, 10, 21, 10, 34, 10).replace(tzinfo=GMT(0.0))]
self.all_type_cols = [self.int_data,
self.float_data,
self.string_data,
self.vec_data,
self.list_data,
self.dict_data,
self.datetime_data*5]
self.sf_all_types = SFrame({"X"+str(i[0]):i[1] for i in zip(range(1,8),
self.all_type_cols)})
self.all_types = [int,float,str,array.array,list,dict,dt.datetime]
def test_basic(self):
from ..data_structures.sframe_builder import SFrameBuilder
sf_data = list(zip(*self.all_type_cols))
sb = SFrameBuilder(self.all_types)
for i in sf_data:
sb.append(i)
sf = sb.close()
_assert_sframe_equal(sf, self.sf_all_types)
sb = SFrameBuilder(self.all_types)
sb.append_multiple(sf_data)
sf = sb.close()
_assert_sframe_equal(sf, self.sf_all_types)
def test_history(self):
from ..data_structures.sframe_builder import SFrameBuilder
sb = SFrameBuilder([int,float], history_size=10)
sb.append_multiple(([i,i+0.0] for i in range(8)))
hist = sb.read_history(3)
self.assertEquals(hist,[[5,5.0],[6,6.0],[7,7.0]])
hist = sb.read_history(20)
self.assertEquals(hist, [[i,i+0.0] for i in range(8)])
hist = sb.read_history()
self.assertEquals(hist, [[i,i+0.0] for i in range(8)])
sb.append_multiple(([i,i+0.0] for i in range(5)))
hist = sb.read_history(10)
self.assertEquals(hist, [[i,i+0.0] for i in [3,4,5,6,7,0,1,2,3,4]])
sb.append([50,50.0])
hist = sb.read_history(10)
self.assertEquals(hist, [[i,i+0.0] for i in [4,5,6,7,0,1,2,3,4,50]])
hist = sb.read_history(-1)
self.assertEquals(hist, [])
hist = sb.read_history(0)
self.assertEquals(hist, [])
expected_data = [[i,i+0.0] for i in range(8)] + [[i,i+0.0] for i in range(5)] + [[50,50.0]]
cols = [[],[]]
for i in expected_data:
cols[0].append(i[0])
cols[1].append(i[1])
expected_sf = SFrame({'X1':cols[0],'X2':cols[1]})
sf = sb.close()
_assert_sframe_equal(sf,expected_sf)
def test_segments(self):
from ..data_structures.sframe_builder import SFrameBuilder
sb = SFrameBuilder([int],num_segments=4)
sb.append_multiple(([i] for i in range(20,30)), segment=2)
sb.append_multiple(([i] for i in range(10,20)), segment=1)
sb.append_multiple(([i] for i in range(30,40)), segment=3)
sb.append_multiple(([i] for i in range(0,10)), segment=0)
hist = sb.read_history(3, segment=0)
self.assertSequenceEqual(hist, [[7],[8],[9]])
hist = sb.read_history(3, segment=1)
self.assertSequenceEqual(hist, [[17],[18],[19]])
hist = sb.read_history(3, segment=2)
self.assertSequenceEqual(hist, [[27],[28],[29]])
hist = sb.read_history(3, segment=3)
self.assertSequenceEqual(hist, [[37],[38],[39]])
sf = sb.close()
expected_sf = SFrame({'X1':range(40)})
_assert_sframe_equal(sf, expected_sf)
| 40.302752 | 99 | 0.582062 |
4e62c4548f59bd9bbb7245535174e7cc111eab96 | 5,105 | py | Python | orquesta/expressions/base.py | igcherkaev/orquesta | 2baa66d33f53cb04b660b3ce284a52d478ecc528 | [
"Apache-2.0"
] | 85 | 2018-07-26T04:29:49.000Z | 2022-03-31T10:47:50.000Z | orquesta/expressions/base.py | igcherkaev/orquesta | 2baa66d33f53cb04b660b3ce284a52d478ecc528 | [
"Apache-2.0"
] | 149 | 2018-07-27T22:36:45.000Z | 2022-03-31T10:54:32.000Z | orquesta/expressions/base.py | igcherkaev/orquesta | 2baa66d33f53cb04b660b3ce284a52d478ecc528 | [
"Apache-2.0"
] | 24 | 2018-08-07T13:37:41.000Z | 2021-12-16T18:12:43.000Z | # Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import inspect
import logging
import re
import six
import threading
from stevedore import extension
from orquesta.utils import expression as expr_util
from orquesta.utils import plugin as plugin_util
LOG = logging.getLogger(__name__)
_EXP_EVALUATORS = None
_EXP_EVALUATORS_LOCK = threading.Lock()
_EXP_EVALUATOR_NAMESPACE = "orquesta.expressions.evaluators"
@six.add_metaclass(abc.ABCMeta)
class Evaluator(object):
_type = "unspecified"
_delimiter = None
@classmethod
def get_type(cls):
return cls._type
@classmethod
def strip_delimiter(cls, expr):
return expr.strip(cls._delimiter).strip()
@classmethod
def get_statement_regex(cls):
raise NotImplementedError()
@classmethod
def has_expressions(cls, text):
raise NotImplementedError()
@classmethod
@abc.abstractmethod
def validate(cls, statement):
raise NotImplementedError()
@classmethod
@abc.abstractmethod
def evaluate(cls, text, data=None):
raise NotImplementedError()
@classmethod
@abc.abstractmethod
def extract_vars(cls, statement):
raise NotImplementedError()
def get_evaluator(language):
return plugin_util.get_module(_EXP_EVALUATOR_NAMESPACE, language)
def get_evaluators():
global _EXP_EVALUATORS
global _EXP_EVALUATORS_LOCK
with _EXP_EVALUATORS_LOCK:
if _EXP_EVALUATORS is None:
_EXP_EVALUATORS = {}
mgr = extension.ExtensionManager(
namespace=_EXP_EVALUATOR_NAMESPACE, invoke_on_load=False
)
for name in mgr.names():
_EXP_EVALUATORS[name] = get_evaluator(name)
return _EXP_EVALUATORS
def get_statement_regexes():
return {t: e.get_statement_regex() for t, e in six.iteritems(get_evaluators())}
def has_expressions(text):
result = {t: e.has_expressions(text) for t, e in six.iteritems(get_evaluators())}
return any(result.values())
def validate(statement):
errors = []
if isinstance(statement, dict):
for k, v in six.iteritems(statement):
errors.extend(validate(k)["errors"])
errors.extend(validate(v)["errors"])
elif isinstance(statement, list):
for item in statement:
errors.extend(validate(item)["errors"])
elif isinstance(statement, six.string_types):
evaluators = [
evaluator
for name, evaluator in six.iteritems(get_evaluators())
if evaluator.has_expressions(statement)
]
if len(evaluators) == 1:
errors.extend(evaluators[0].validate(statement))
elif len(evaluators) > 1:
message = "Expression with multiple types is not supported."
errors.append(expr_util.format_error(None, statement, message))
return {"errors": errors}
def evaluate(statement, data=None):
if isinstance(statement, dict):
return {evaluate(k, data=data): evaluate(v, data=data) for k, v in six.iteritems(statement)}
elif isinstance(statement, list):
return [evaluate(item, data=data) for item in statement]
elif isinstance(statement, six.string_types):
for name, evaluator in six.iteritems(get_evaluators()):
if evaluator.has_expressions(statement):
return evaluator.evaluate(statement, data=data)
return statement
def extract_vars(statement):
variables = []
if isinstance(statement, dict):
for k, v in six.iteritems(statement):
variables.extend(extract_vars(k))
variables.extend(extract_vars(v))
elif isinstance(statement, list):
for item in statement:
variables.extend(extract_vars(item))
elif isinstance(statement, six.string_types):
for name, evaluator in six.iteritems(get_evaluators()):
for var_ref in evaluator.extract_vars(statement):
for regex_var_extract in evaluator.get_var_extraction_regexes():
result = re.search(regex_var_extract, var_ref)
var = result.group(1) if result else ""
variables.append((evaluator.get_type(), statement, var))
variables = [v for v in variables if v[2] != ""]
return sorted(list(set(variables)), key=lambda var: var[2])
def func_has_ctx_arg(func):
getargspec = (
inspect.getargspec if six.PY2 else inspect.getfullargspec # pylint: disable=no-member
)
return "context" in getargspec(func).args
| 28.679775 | 100 | 0.678746 |
eabfa17177e0c50fd32702cecb6186777266a9a5 | 1,019 | py | Python | sc2/controller.py | dbelliss/python-sc2 | 8ccc2c529550c44cc0c93b9a4f6702339fc2831d | [
"MIT"
] | 1 | 2018-05-16T06:14:12.000Z | 2018-05-16T06:14:12.000Z | sc2/controller.py | dbelliss/python-sc2 | 8ccc2c529550c44cc0c93b9a4f6702339fc2831d | [
"MIT"
] | null | null | null | sc2/controller.py | dbelliss/python-sc2 | 8ccc2c529550c44cc0c93b9a4f6702339fc2831d | [
"MIT"
] | null | null | null | from s2clientprotocol import sc2api_pb2 as sc_pb
from .protocol import Protocol
from .player import Computer
import logging
logger = logging.getLogger(__name__)
class Controller(Protocol):
def __init__(self, ws):
super().__init__(ws)
async def create_game(self, game_map, players, realtime):
assert isinstance(realtime, bool)
req = sc_pb.RequestCreateGame(
local_map=sc_pb.LocalMap(
map_path=str(game_map.path)
),
realtime=realtime
)
for player in players:
p = req.player_setup.add()
p.type = player.type.value
if isinstance(player, Computer):
p.race = player.race.value
p.difficulty = player.difficulty.value
logger.info("Creating new game")
logger.info(f"Map: {game_map.name}")
logger.info(f"Players: {', '.join(str(p) for p in players)}")
result = await self._execute(create_game=req)
return result
| 30.878788 | 69 | 0.613346 |
e8474ee4171b46a0e1537b469c5c84d93f9697f2 | 67 | py | Python | PacManTaro/create_app.py | SomHackathon2020/somhackathon2020-pacmantaro | 9fb3fb83ada72fa043e61d2c44b8b02f14cbe60a | [
"MIT"
] | 3 | 2020-02-09T10:56:49.000Z | 2020-02-09T12:14:26.000Z | PacManTaro/create_app.py | SomHackathon2020/somhackathon2020-pacmantaro | 9fb3fb83ada72fa043e61d2c44b8b02f14cbe60a | [
"MIT"
] | null | null | null | PacManTaro/create_app.py | SomHackathon2020/somhackathon2020-pacmantaro | 9fb3fb83ada72fa043e61d2c44b8b02f14cbe60a | [
"MIT"
] | null | null | null | from __init__ import db, create_app
db.create_all(app=create_app()) | 33.5 | 35 | 0.820896 |
61d784a94017623cb9d16e8b7f532318cfa8d284 | 422 | py | Python | tests/mock_requests_responses.py | varunvarma/panoptes | 733e1b17e01d47fe0a399e2fe635f614cc5a0b88 | [
"Apache-2.0"
] | 1 | 2020-07-14T20:43:32.000Z | 2020-07-14T20:43:32.000Z | tests/mock_requests_responses.py | nghia-tran/panoptes | f91d6cd5e2afa3e1ecd9976bdfd13be3a10f0c00 | [
"Apache-2.0"
] | null | null | null | tests/mock_requests_responses.py | nghia-tran/panoptes | f91d6cd5e2afa3e1ecd9976bdfd13be3a10f0c00 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2018, Oath Inc.
Licensed under the terms of the Apache 2.0 license. See LICENSE file in project root for terms.
"""
class MockRequestsResponseBadRequest(object):
status_code = 400
content = 'Bad Request'
class MockRequestsResponseServerFailure(object):
status_code = 500
content = 'Internal Server Failure'
class MockRequestsResponseOK(object):
status_code = 200
content = 'OK'
| 21.1 | 95 | 0.734597 |
b243675c5c002c0ba739115212b854d57d033ad5 | 15,809 | py | Python | test/api_tests.py | echoprotocol/echopy-lib | 7fe11eeeba727023ea61e19b2c1b0ff5c097a2f3 | [
"MIT"
] | 7 | 2019-02-26T18:28:24.000Z | 2019-12-18T14:47:04.000Z | test/api_tests.py | echoprotocol/echopy-lib | 7fe11eeeba727023ea61e19b2c1b0ff5c097a2f3 | [
"MIT"
] | null | null | null | test/api_tests.py | echoprotocol/echopy-lib | 7fe11eeeba727023ea61e19b2c1b0ff5c097a2f3 | [
"MIT"
] | 2 | 2019-09-20T10:20:54.000Z | 2020-03-20T10:06:55.000Z | import unittest
from .fixtures import connect_echo, disconnect_echo, get_keys
from echopy.echoapi.ws.exceptions import RPCError
class ApiTest(unittest.TestCase):
def setUp(self):
self.echo = connect_echo()
def tearDown(self):
disconnect_echo(self.echo)
# ASSET API TESTS
def test_get_asset_holders(self):
api = self.echo.api.asset
get_asset_holders_result = api.get_asset_holders('1.3.0', 1, 1)
self.assertIsInstance(get_asset_holders_result, list)
self.assertTrue(len(get_asset_holders_result))
self.assertIsInstance(get_asset_holders_result[0], dict)
self.assertTrue(len(get_asset_holders_result[0].keys()))
self.assertIsInstance(get_asset_holders_result[0]['name'], str)
self.assertIsInstance(get_asset_holders_result[0]['account_id'], str)
self.assertIsInstance(get_asset_holders_result[0]['amount'], int)
def test_get_asset_holders_count(self):
api = self.echo.api.asset
get_asset_holders_count_result = api.get_asset_holders_count('1.3.0')
self.assertIsInstance(get_asset_holders_count_result, int)
def test_get_all_asset_holders(self):
api = self.echo.api.asset
get_all_asset_holders_result = api.get_all_asset_holders()
self.assertIsInstance(get_all_asset_holders_result, list)
self.assertTrue(len(get_all_asset_holders_result))
self.assertIsInstance(get_all_asset_holders_result[0], dict)
self.assertTrue(len(get_all_asset_holders_result[0].keys()))
self.assertIsInstance(get_all_asset_holders_result[0]['asset_id'], str)
self.assertIsInstance(get_all_asset_holders_result[0]['count'], int)
# DATABASE API TESTS
def test_get_chain_properties(self):
api = self.echo.api.database
get_chain_properties_result = api.get_chain_properties()
self.assertIsInstance(get_chain_properties_result, dict)
self.assertIsInstance(get_chain_properties_result['chain_id'], str)
self.assertIsInstance(get_chain_properties_result['id'], str)
self.assertIsInstance(get_chain_properties_result['immutable_parameters'], dict)
self.assertTrue(len(get_chain_properties_result['immutable_parameters'].keys()))
def test_get_global_properties(self):
api = self.echo.api.database
get_global_properties_result = api.get_global_properties()
self.assertIsInstance(get_global_properties_result, dict)
self.assertIsInstance(get_global_properties_result['active_committee_members'], list)
self.assertIsInstance(get_global_properties_result['id'], str)
self.assertIsInstance(get_global_properties_result['next_available_vote_id'], int)
self.assertIsInstance(get_global_properties_result['parameters'], dict)
self.assertTrue(len(get_global_properties_result['parameters'].keys()))
def test_get_config(self):
api = self.echo.api.database
get_config_result = api.get_config()
self.assertIsInstance(get_config_result, dict)
self.assertTrue(len(get_config_result.keys()))
def test_get_chain_id(self):
api = self.echo.api.database
get_chain_id_result = api.get_chain_id()
self.assertIsInstance(get_chain_id_result, str)
self.assertTrue(len(get_chain_id_result))
def test_get_dynamic_global_properties(self):
api = self.echo.api.database
get_dynamic_global_properties_result = api.get_dynamic_global_properties()
self.assertIsInstance(get_dynamic_global_properties_result, dict)
self.assertTrue(len(get_dynamic_global_properties_result.keys()))
def test_get_block(self):
api = self.echo.api.database
block_number = 20
get_block_result = api.get_block(block_number)
self.assertIsInstance(get_block_result, dict)
self.assertTrue(len(get_block_result.keys()))
def test_get_transaction(self):
api = self.echo.api.database
block_number = 3028
transaction_index = 0
get_transaction_result = api.get_transaction(block_number, transaction_index)
self.assertIsInstance(get_transaction_result, dict)
self.assertTrue(len(get_transaction_result.keys()))
def test_get_accounts(self):
api = self.echo.api.database
account_id1 = '1.2.5'
account_id2 = '1.2.6'
accounts = [account_id1, account_id2]
get_accounts_result = api.get_accounts(accounts)
self.assertIsInstance(get_accounts_result, list)
self.assertEqual(len(get_accounts_result), len(accounts))
for i in range(len(accounts)):
self.assertTrue(len(get_accounts_result[i]))
def test_get_full_accounts_result(self):
api = self.echo.api.database
account_id1 = '1.2.5'
account_id2 = '1.2.6'
accounts = [account_id1, account_id2]
get_full_accounts_result = api.get_full_accounts(accounts, False)
self.assertIsInstance(get_full_accounts_result, list)
self.assertEqual(len(get_full_accounts_result), len(accounts))
for i in range(len(accounts)):
self.assertIsInstance(get_full_accounts_result[i], list)
self.assertEqual(len(get_full_accounts_result[i]), 2)
self.assertIsInstance(get_full_accounts_result[i][0], str)
self.assertTrue(len(get_full_accounts_result[i][0]))
self.assertIsInstance(get_full_accounts_result[i][1], dict)
self.assertTrue(len(get_full_accounts_result[i][1].keys()))
def test_get_account_count(self):
api = self.echo.api.database
get_account_count_result = api.get_account_count()
self.assertIsInstance(get_account_count_result, int)
def test_lookup_asset_symbols(self):
api = self.echo.api.database
asset_key = 'ECHO'
assets = [asset_key]
lookup_asset_symbols_result = api.lookup_asset_symbols(assets)
self.assertIsInstance(lookup_asset_symbols_result, list)
self.assertEqual(len(lookup_asset_symbols_result), len(assets))
for i in range(len(assets)):
self.assertIsInstance(lookup_asset_symbols_result[i], dict)
self.assertTrue(len(lookup_asset_symbols_result[i].keys()))
def test_get_assets(self):
api = self.echo.api.database
asset_id = '1.3.0'
assets = [asset_id]
get_assets_result = api.get_assets(assets)
self.assertIsInstance(get_assets_result, list)
self.assertEqual(len(get_assets_result), len(assets))
for i in range(len(assets)):
self.assertIsInstance(get_assets_result[i], dict)
self.assertTrue(len(get_assets_result[i].keys()))
def test_get_objects(self):
api = self.echo.api.database
account_id = '1.2.5'
asset_id = '1.3.0'
objects = [account_id, asset_id]
get_objects_result = api.get_objects(objects)
self.assertIsInstance(get_objects_result, list)
self.assertEqual(len(get_objects_result), len(objects))
for i in range(len(objects)):
self.assertIsInstance(get_objects_result[i], dict)
self.assertTrue(len(get_objects_result[i].keys()))
def test_get_committee_members(self):
api = self.echo.api.database
committee_member = '1.5.1'
committee_members = [committee_member]
get_committee_members_result = api.get_committee_members(committee_members)
self.assertIsInstance(get_committee_members_result, list)
self.assertEqual(len(get_committee_members_result), len(committee_members))
for i in range(len(committee_members)):
self.assertIsInstance(get_committee_members_result[i], dict)
self.assertTrue(len(get_committee_members_result[i].keys()))
self.assertEqual(get_committee_members_result[i]['id'], committee_members[i])
def test_get_account_by_name(self):
api = self.echo.api.database
account_name = 'nathan'
get_account_by_name_result = api.get_account_by_name(account_name)
self.assertIsInstance(get_account_by_name_result, dict)
self.assertTrue(len(get_account_by_name_result.keys()))
def test_lookup_accounts(self):
api = self.echo.api.database
lower_bound_name = 't'
count = 2
lookup_accounts_result = api.lookup_accounts(lower_bound_name, count)
self.assertIsInstance(lookup_accounts_result, list)
self.assertEqual(len(lookup_accounts_result), count)
for i in range(count):
self.assertIsInstance(lookup_accounts_result[i], list)
self.assertIsInstance(lookup_accounts_result[i][0], str)
self.assertIsInstance(lookup_accounts_result[i][1], str)
self.assertEqual(lookup_accounts_result[i][1][:3], '1.2')
def test_list_assets(self):
api = self.echo.api.database
lower_bound_symbol = 'E'
count = 2
list_assets_result = api.list_assets(lower_bound_symbol, count)
self.assertIsInstance(list_assets_result, list)
self.assertEqual(len(list_assets_result), count)
for i in range(count):
self.assertIsInstance(list_assets_result[i], dict)
self.assertTrue(len(list_assets_result[i].keys()))
def test_get_block_header(self):
api = self.echo.api.database
block_number = 20
get_block_header_result = api.get_block_header(block_number)
self.assertIsInstance(get_block_header_result, dict)
self.assertTrue(len(get_block_header_result.keys()))
def test_get_contract(self):
api = self.echo.api.database
contract_id = '1.14.0'
get_contract_result = api.get_contract(contract_id)
self.assertIsInstance(get_contract_result, list)
self.assertIsInstance(get_contract_result[0], int)
self.assertIsInstance(get_contract_result[1], dict)
self.assertIn('code', get_contract_result[1])
self.assertIsInstance(get_contract_result[1]['code'], str)
if 'storage' in get_contract_result[1]:
self.assertIsInstance(get_contract_result[1]['storage'], list)
for elem in get_contract_result[1]['storage']:
self.assertIsInstance(elem, list)
self.assertEqual(len(elem), 2)
self.assertIsInstance(elem[0], str)
self.assertIsInstance(elem[1], list)
self.assertEqual(len(elem[1]), 2)
for part in elem[1]:
self.assertIsInstance(part, str)
def test_get_contracts(self):
api = self.echo.api.database
contract_id = '1.14.0'
contracts = [contract_id]
get_contracts_result = api.get_contracts(contracts)
self.assertIsInstance(get_contracts_result, list)
self.assertEqual(len(get_contracts_result), len(contracts))
self.assertIsInstance(get_contracts_result[0], dict)
self.assertTrue(len(get_contracts_result[0].keys()))
def test_lookup_vote_ids(self):
api = self.echo.api.database
committee_vote_id = '0:1'
vote_ids = [committee_vote_id]
lookup_vote_ids_result = api.lookup_vote_ids(vote_ids)
self.assertIsInstance(lookup_vote_ids_result, list)
self.assertEqual(len(lookup_vote_ids_result), len(vote_ids))
for i in range(len(vote_ids)):
self.assertIsInstance(lookup_vote_ids_result[i], dict)
self.assertTrue(len(lookup_vote_ids_result[i]))
self.assertEqual(lookup_vote_ids_result[i]['vote_id'], vote_ids[i])
def test_get_committee_member_by_account(self):
api = self.echo.api.database
account_id = '1.2.6'
get_committee_member_by_account_result = api.get_committee_member_by_account(account_id)
self.assertIsInstance(get_committee_member_by_account_result, dict)
self.assertTrue(len(get_committee_member_by_account_result.keys()))
self.assertEqual(get_committee_member_by_account_result['committee_member_account'], account_id)
def test_get_sidechain_transfers(self):
api = self.echo.api.database
ethereum_address = '17A686Cc581e0582e0213Ec49153Af6c1941CAc7'
get_sidechain_transfers_result = api.get_sidechain_transfers(ethereum_address)
self.assertIsInstance(get_sidechain_transfers_result, list)
# REGISTRATION API TESTS
def test_register_account(self):
api = self.echo.api.registration
private_base58, public_base58, private_hex, public_hex = get_keys()
account_name = 'testversion1'
memo = 'ECHO59St8wBpta2ZREBnA3dQQTVFBrEcx5UK12Tm5geG7kv7Hwyzyc'
with self.assertRaises(RPCError) as cm:
api.register_account('1', account_name, public_base58, public_base58, memo, public_base58)
self.assertIn('Assert Exception', str(cm.exception))
self.assertIn('Account with this name already exists', str(cm.exception))
# HISTORY API TESTS
def test_get_account_history(self):
api = self.echo.api.history
account_id = '1.2.2'
limit = 3
get_account_history_result = api.get_account_history(account_id, limit=limit)
self.assertIsInstance(get_account_history_result, list)
if len(get_account_history_result):
self.assertLessEqual(len(get_account_history_result), limit)
for history_point in get_account_history_result:
self.assertIsInstance(history_point, dict)
self.assertTrue(len(history_point.keys()))
def test_get_relative_account_history(self):
api = self.echo.api.history
account_id = '1.2.22'
start = stop = 0
limit = 3
get_relative_account_history_result = api.get_relative_account_history(account_id, stop, limit, start)
self.assertIsInstance(get_relative_account_history_result, list)
if len(get_relative_account_history_result):
self.assertLessEqual(len(get_relative_account_history_result), limit)
for history_point in get_relative_account_history_result:
self.assertIsInstance(history_point, dict)
self.assertTrue(len(history_point.keys()))
def test_get_account_history_operations(self):
api = self.echo.api.history
account_id = '1.2.22'
operation_id = 0
limit = 3
get_account_history_operations_result = api.get_account_history_operations(account_id,
operation_id,
limit=limit)
self.assertIsInstance(get_account_history_operations_result, list)
if len(get_account_history_operations_result):
self.assertLessEqual(len(get_account_history_operations_result), limit)
for history_point in get_account_history_operations_result:
self.assertIsInstance(history_point, dict)
self.assertTrue(len(history_point.keys()))
def test_get_contract_history(self):
api = self.echo.api.history
contract_id = '1.14.7'
limit = 3
get_contract_history_result = api.get_contract_history(contract_id, limit=limit)
self.assertIsInstance(get_contract_history_result, list)
if len(get_contract_history_result):
self.assertLessEqual(len(get_contract_history_result), limit)
for i in range(min(limit, len(get_contract_history_result))):
self.assertIsInstance(get_contract_history_result[i], dict)
self.assertTrue(len(get_contract_history_result[i].keys()))
| 39.228288 | 110 | 0.690746 |
b91b1c026ed9f424f4171b2cca83e9bdcfa91607 | 362 | py | Python | venv/lib/python3.9/site-packages/django_postgres_extensions/signals.py | boshanxiang/bernoulli_backend | d446a33014d7bd9b5232a921edfd377d05fe0121 | [
"MIT"
] | null | null | null | venv/lib/python3.9/site-packages/django_postgres_extensions/signals.py | boshanxiang/bernoulli_backend | d446a33014d7bd9b5232a921edfd377d05fe0121 | [
"MIT"
] | null | null | null | venv/lib/python3.9/site-packages/django_postgres_extensions/signals.py | boshanxiang/bernoulli_backend | d446a33014d7bd9b5232a921edfd377d05fe0121 | [
"MIT"
] | null | null | null | def delete_reverse_related(sender, signal, instance, using, **kwargs):
for related in instance._meta.related_objects:
field = related.field
if getattr(field, 'many_to_many_array', False):
accessor_name = field.get_reverse_accessor_name()
accessor = getattr(instance, accessor_name)
accessor.clear()
| 45.25 | 71 | 0.668508 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.