hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c327c3b65f8a3b5bc4ad8215b779b5c5535b24b | 19,759 | py | Python | flask_appbuilder/tests/test_mongoengine.py | paulvic/Flask-AppBuilder | 44638156b7ec65e9a77ae474aed75f0de949b505 | [
"BSD-3-Clause"
] | null | null | null | flask_appbuilder/tests/test_mongoengine.py | paulvic/Flask-AppBuilder | 44638156b7ec65e9a77ae474aed75f0de949b505 | [
"BSD-3-Clause"
] | null | null | null | flask_appbuilder/tests/test_mongoengine.py | paulvic/Flask-AppBuilder | 44638156b7ec65e9a77ae474aed75f0de949b505 | [
"BSD-3-Clause"
] | 1 | 2019-09-24T16:23:50.000Z | 2019-09-24T16:23:50.000Z | import datetime
import logging
import os
import random
import string
from flask_appbuilder.charts.views import (
DirectByChartView,
DirectChartView,
GroupByChartView
)
from flask_appbuilder.models.group import aggregate_avg, aggregate_count, aggregate_sum
from flask_appbuilder.models.mongoengine.filters import FilterEqual, FilterStartsWith
from flask_appbuilder.views import CompactCRUDMixin, MasterDetailView
from flask_mongoengine import MongoEngine
import jinja2
from nose.tools import eq_, ok_
from .base import FABTestCase
from .mongoengine.models import Model1, Model2
logging.basicConfig(format="%(asctime)s:%(levelname)s:%(name)s:%(message)s")
logging.getLogger().setLevel(logging.DEBUG)
"""
Constant english display string from framework
"""
DEFAULT_INDEX_STRING = "Welcome"
INVALID_LOGIN_STRING = "Invalid login"
ACCESS_IS_DENIED = "Access is Denied"
UNIQUE_VALIDATION_STRING = "Already exists"
NOTNULL_VALIDATION_STRING = "This field is required"
DEFAULT_ADMIN_USER = "admin"
DEFAULT_ADMIN_PASSWORD = "general"
log = logging.getLogger(__name__)
class FlaskTestCase(FABTestCase):
def setUp(self):
from flask import Flask
from flask_appbuilder import AppBuilder
from flask_appbuilder.models.mongoengine.interface import MongoEngineInterface
from flask_appbuilder import ModelView
from flask_appbuilder.security.mongoengine.manager import SecurityManager
self.app = Flask(__name__)
self.app.jinja_env.undefined = jinja2.StrictUndefined
self.basedir = os.path.abspath(os.path.dirname(__file__))
self.app.config["MONGODB_SETTINGS"] = {"DB": "test"}
self.app.config["CSRF_ENABLED"] = False
self.app.config["SECRET_KEY"] = "thisismyscretkey"
self.app.config["WTF_CSRF_ENABLED"] = False
self.db = MongoEngine(self.app)
self.appbuilder = AppBuilder(self.app, security_manager_class=SecurityManager)
class Model2View(ModelView):
datamodel = MongoEngineInterface(Model2)
list_columns = [
"field_integer",
"field_float",
"field_string",
"field_method",
"group.field_string",
]
edit_form_query_rel_fields = {
"group": [["field_string", FilterEqual, "G2"]]
}
add_form_query_rel_fields = {"group": [["field_string", FilterEqual, "G1"]]}
add_exclude_columns = ["excluded_string"]
class Model22View(ModelView):
datamodel = MongoEngineInterface(Model2)
list_columns = [
"field_integer",
"field_float",
"field_string",
"field_method",
"group.field_string",
]
add_exclude_columns = ["excluded_string"]
edit_exclude_columns = ["excluded_string"]
show_exclude_columns = ["excluded_string"]
class Model1View(ModelView):
datamodel = MongoEngineInterface(Model1)
related_views = [Model2View]
list_columns = ["field_string", "field_file"]
class Model1CompactView(CompactCRUDMixin, ModelView):
datamodel = MongoEngineInterface(Model1)
class Model1Filtered1View(ModelView):
datamodel = MongoEngineInterface(Model1)
base_filters = [["field_string", FilterStartsWith, "a"]]
class Model1MasterView(MasterDetailView):
datamodel = MongoEngineInterface(Model1)
related_views = [Model2View]
class Model1Filtered2View(ModelView):
datamodel = MongoEngineInterface(Model1)
base_filters = [["field_integer", FilterEqual, 0]]
class Model2GroupByChartView(GroupByChartView):
datamodel = MongoEngineInterface(Model2)
chart_title = "Test Model1 Chart"
definitions = [
{
"group": "field_string",
"series": [
(
aggregate_sum,
"field_integer",
aggregate_avg,
"field_integer",
aggregate_count,
"field_integer",
)
],
}
]
class Model2DirectByChartView(DirectByChartView):
datamodel = MongoEngineInterface(Model2)
chart_title = "Test Model1 Chart"
definitions = [
{"group": "field_string", "series": ["field_integer", "field_float"]}
]
class Model2DirectChartView(DirectChartView):
datamodel = MongoEngineInterface(Model2)
chart_title = "Test Model1 Chart"
direct_columns = {"stat1": ("group", "field_integer")}
class Model1MasterChartView(MasterDetailView):
datamodel = MongoEngineInterface(Model1)
related_views = [Model2DirectByChartView]
self.appbuilder.add_view(Model1View, "Model1", category="Model1")
self.appbuilder.add_view(Model1CompactView, "Model1Compact", category="Model1")
self.appbuilder.add_view(Model1MasterView, "Model1Master", category="Model1")
self.appbuilder.add_view(
Model1MasterChartView, "Model1MasterChart", category="Model1"
)
self.appbuilder.add_view(
Model1Filtered1View, "Model1Filtered1", category="Model1"
)
self.appbuilder.add_view(
Model1Filtered2View, "Model1Filtered2", category="Model1"
)
self.appbuilder.add_view(Model2View, "Model2")
self.appbuilder.add_view(Model22View, "Model22")
self.appbuilder.add_view(Model2View, "Model2 Add", href="/model2view/add")
self.appbuilder.add_view(Model2GroupByChartView, "Model2 Group By Chart")
self.appbuilder.add_view(Model2DirectByChartView, "Model2 Direct By Chart")
self.appbuilder.add_view(Model2DirectChartView, "Model2 Direct Chart")
role_admin = self.appbuilder.sm.find_role("Admin")
try:
self.appbuilder.sm.add_user(
"admin", "admin", "user", "admin@fab.org", role_admin, "general"
)
except Exception:
pass
def tearDown(self):
self.appbuilder = None
self.app = None
self.db = None
log.debug("TEAR DOWN")
""" ---------------------------------
TEST HELPER FUNCTIONS
---------------------------------
"""
def insert_data(self):
for x, i in zip(string.ascii_letters[:23], range(23)):
model = Model1(field_string="%stest" % (x), field_integer=i)
model.save()
def insert_data2(self):
models1 = [
Model1(field_string="G1"),
Model1(field_string="G2"),
Model1(field_string="G3"),
]
for model1 in models1:
try:
model1.save()
for x, i in zip(string.ascii_letters[:10], range(10)):
model = Model2(
field_string="%stest" % (x),
field_integer=random.randint(1, 10),
field_float=random.uniform(0.0, 1.0),
group=model1,
)
year = random.choice(range(1900, 2012))
month = random.choice(range(1, 12))
day = random.choice(range(1, 28))
model.field_date = datetime.datetime(year, month, day)
model.save()
except Exception as e:
print("ERROR {0}".format(str(e)))
def clean_data(self):
Model1.drop_collection()
Model2.drop_collection()
def test_fab_views(self):
"""
Test views creation and registration
"""
eq_(len(self.appbuilder.baseviews), 26) # current minimal views are 26
def test_index(self):
"""
Test initial access and index message
"""
client = self.app.test_client()
# Check for Welcome Message
rv = client.get("/")
data = rv.data.decode("utf-8")
ok_(DEFAULT_INDEX_STRING in data)
def test_sec_login(self):
"""
Test Security Login, Logout, invalid login, invalid access
"""
client = self.app.test_client()
# Try to List and Redirect to Login
rv = client.get("/model1view/list/")
eq_(rv.status_code, 302)
rv = client.get("/model2view/list/")
eq_(rv.status_code, 302)
# Login and list with admin
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get("/model1view/list/")
eq_(rv.status_code, 200)
rv = client.get("/model2view/list/")
eq_(rv.status_code, 200)
# Logout and and try to list
self.browser_logout(client)
rv = client.get("/model1view/list/")
eq_(rv.status_code, 302)
rv = client.get("/model2view/list/")
eq_(rv.status_code, 302)
# Invalid Login
rv = self.browser_login(client, DEFAULT_ADMIN_USER, "password")
data = rv.data.decode("utf-8")
ok_(INVALID_LOGIN_STRING in data)
def test_sec_reset_password(self):
"""
Test Security reset password
"""
from flask_appbuilder.security.mongoengine.models import User
client = self.app.test_client()
# Try Reset My password
user = User.objects.filter(**{"username": "admin"})[0]
rv = client.get(
"/users/action/resetmypassword/{0}".format(user.id), follow_redirects=True
)
data = rv.data.decode("utf-8")
ok_(ACCESS_IS_DENIED in data)
# Reset My password
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get(
"/users/action/resetmypassword/{0}".format(user.id), follow_redirects=True
)
data = rv.data.decode("utf-8")
ok_("Reset Password Form" in data)
rv = client.post(
"/resetmypassword/form",
data=dict(password="password", conf_password="password"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
self.browser_logout(client)
self.browser_login(client, DEFAULT_ADMIN_USER, "password")
rv = client.post(
"/resetmypassword/form",
data=dict(
password=DEFAULT_ADMIN_PASSWORD, conf_password=DEFAULT_ADMIN_PASSWORD
),
follow_redirects=True,
)
eq_(rv.status_code, 200)
# Reset Password Admin
rv = client.get(
"/users/action/resetpasswords/{0}".format(user.id), follow_redirects=True
)
data = rv.data.decode("utf-8")
ok_("Reset Password Form" in data)
rv = client.post(
"/resetmypassword/form",
data=dict(
password=DEFAULT_ADMIN_PASSWORD, conf_password=DEFAULT_ADMIN_PASSWORD
),
follow_redirects=True,
)
eq_(rv.status_code, 200)
def test_generic_interface(self):
"""
Test Generic Interface for generic-alter datasource
"""
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get("/psview/list")
rv.data.decode("utf-8")
def test_model_crud(self):
"""
Test Model add, delete, edit
"""
client = self.app.test_client()
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.post(
"/model1view/add",
data=dict(
field_string="test1",
field_integer="1",
field_float="0.12",
field_date="2014-01-01 23:10:07",
),
follow_redirects=True,
)
eq_(rv.status_code, 200)
model = Model1.objects[0]
eq_(model.field_string, u"test1")
eq_(model.field_integer, 1)
model1 = Model1.objects(field_string="test1")[0]
rv = client.post(
"/model1view/edit/{0}".format(model1.id),
data=dict(field_string="test2", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
model = Model1.objects[0]
eq_(model.field_string, u"test2")
eq_(model.field_integer, 2)
rv = client.get(
"/model1view/delete/{0}".format(model.id), follow_redirects=True
)
eq_(rv.status_code, 200)
model = Model1.objects
eq_(len(model), 0)
self.clean_data()
def test_excluded_cols(self):
"""
Test add_exclude_columns, edit_exclude_columns, show_exclude_columns
"""
client = self.app.test_client()
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get("/model22view/add")
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("field_string" in data)
ok_("field_integer" in data)
ok_("field_float" in data)
ok_("field_date" in data)
ok_("excluded_string" not in data)
self.insert_data2()
model2 = Model2.objects[0]
rv = client.get("/model22view/edit/{0}".format(model2.id))
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("field_string" in data)
ok_("field_integer" in data)
ok_("field_float" in data)
ok_("field_date" in data)
ok_("excluded_string" not in data)
rv = client.get("/model22view/show/{0}".format(model2.id))
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("Field String" in data)
ok_("Field Integer" in data)
ok_("Field Float" in data)
ok_("Field Date" in data)
ok_("Excluded String" not in data)
self.clean_data()
def test_query_rel_fields(self):
"""
Test add and edit form related fields filter
"""
client = self.app.test_client()
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data2()
# Base filter string starts with
rv = client.get("/model2view/add")
data = rv.data.decode("utf-8")
ok_("G1" in data)
ok_("G2" not in data)
model2 = Model2.objects[0]
# Base filter string starts with
rv = client.get("/model2view/edit/{0}".format(model2.id))
data = rv.data.decode("utf-8")
ok_("G2" in data)
ok_("G1" not in data)
self.clean_data()
def test_model_list_order(self):
"""
Test Model order on lists
"""
self.insert_data()
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.post(
"/model1view/list?_oc_Model1View=field_string&_od_Model1View=asc",
follow_redirects=True,
)
# TODO: fix this 405 Method not allowed error
# eq_(rv.status_code, 200)
rv.data.decode("utf-8")
# TODO
# VALIDATE LIST IS ORDERED
rv = client.post(
"/model1view/list?_oc_Model1View=field_string&_od_Model1View=desc",
follow_redirects=True,
)
# TODO: fix this 405 Method not allowed error
# eq_(rv.status_code, 200)
rv.data.decode("utf-8")
# TODO
# VALIDATE LIST IS ORDERED
self.clean_data()
def test_model_add_validation(self):
"""
Test Model add validations
"""
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.post(
"/model1view/add",
data=dict(field_string="test1", field_integer="1"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
rv = client.post(
"/model1view/add",
data=dict(field_string="test1", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(UNIQUE_VALIDATION_STRING in data)
model = Model1.objects()
eq_(len(model), 1)
rv = client.post(
"/model1view/add",
data=dict(field_string="", field_integer="1"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(NOTNULL_VALIDATION_STRING in data)
model = Model1.objects()
eq_(len(model), 1)
self.clean_data()
def test_model_edit_validation(self):
"""
Test Model edit validations
"""
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
client.post(
"/model1view/add",
data=dict(field_string="test1", field_integer="1"),
follow_redirects=True,
)
model1 = Model1.objects(field_string="test1")[0]
client.post(
"/model1view/add",
data=dict(field_string="test2", field_integer="1"),
follow_redirects=True,
)
rv = client.post(
"/model1view/edit/{0}".format(model1.id),
data=dict(field_string="test2", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(UNIQUE_VALIDATION_STRING in data)
rv = client.post(
"/model1view/edit/{0}".format(model1.id),
data=dict(field_string="", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(NOTNULL_VALIDATION_STRING in data)
self.clean_data()
def test_model_base_filter(self):
"""
Test Model base filtered views
"""
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data()
models = Model1.objects()
eq_(len(models), 23)
# Base filter string starts with
rv = client.get("/model1filtered1view/list/")
data = rv.data.decode("utf-8")
ok_("atest" in data)
ok_("btest" not in data)
# Base filter integer equals
rv = client.get("/model1filtered2view/list/")
data = rv.data.decode("utf-8")
ok_("atest" in data)
ok_("btest" not in data)
self.clean_data()
def test_model_list_method_field(self):
"""
Tests a model's field has a method
"""
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data2()
rv = client.get("/model2view/list/")
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("field_method_value" in data)
self.clean_data()
def test_compactCRUDMixin(self):
"""
Test CompactCRUD Mixin view
"""
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data2()
rv = client.get("/model1compactview/list/")
eq_(rv.status_code, 200)
self.clean_data()
| 34.185121 | 88 | 0.582722 | import datetime
import logging
import os
import random
import string
from flask_appbuilder.charts.views import (
DirectByChartView,
DirectChartView,
GroupByChartView
)
from flask_appbuilder.models.group import aggregate_avg, aggregate_count, aggregate_sum
from flask_appbuilder.models.mongoengine.filters import FilterEqual, FilterStartsWith
from flask_appbuilder.views import CompactCRUDMixin, MasterDetailView
from flask_mongoengine import MongoEngine
import jinja2
from nose.tools import eq_, ok_
from .base import FABTestCase
from .mongoengine.models import Model1, Model2
logging.basicConfig(format="%(asctime)s:%(levelname)s:%(name)s:%(message)s")
logging.getLogger().setLevel(logging.DEBUG)
DEFAULT_INDEX_STRING = "Welcome"
INVALID_LOGIN_STRING = "Invalid login"
ACCESS_IS_DENIED = "Access is Denied"
UNIQUE_VALIDATION_STRING = "Already exists"
NOTNULL_VALIDATION_STRING = "This field is required"
DEFAULT_ADMIN_USER = "admin"
DEFAULT_ADMIN_PASSWORD = "general"
log = logging.getLogger(__name__)
class FlaskTestCase(FABTestCase):
def setUp(self):
from flask import Flask
from flask_appbuilder import AppBuilder
from flask_appbuilder.models.mongoengine.interface import MongoEngineInterface
from flask_appbuilder import ModelView
from flask_appbuilder.security.mongoengine.manager import SecurityManager
self.app = Flask(__name__)
self.app.jinja_env.undefined = jinja2.StrictUndefined
self.basedir = os.path.abspath(os.path.dirname(__file__))
self.app.config["MONGODB_SETTINGS"] = {"DB": "test"}
self.app.config["CSRF_ENABLED"] = False
self.app.config["SECRET_KEY"] = "thisismyscretkey"
self.app.config["WTF_CSRF_ENABLED"] = False
self.db = MongoEngine(self.app)
self.appbuilder = AppBuilder(self.app, security_manager_class=SecurityManager)
class Model2View(ModelView):
datamodel = MongoEngineInterface(Model2)
list_columns = [
"field_integer",
"field_float",
"field_string",
"field_method",
"group.field_string",
]
edit_form_query_rel_fields = {
"group": [["field_string", FilterEqual, "G2"]]
}
add_form_query_rel_fields = {"group": [["field_string", FilterEqual, "G1"]]}
add_exclude_columns = ["excluded_string"]
class Model22View(ModelView):
datamodel = MongoEngineInterface(Model2)
list_columns = [
"field_integer",
"field_float",
"field_string",
"field_method",
"group.field_string",
]
add_exclude_columns = ["excluded_string"]
edit_exclude_columns = ["excluded_string"]
show_exclude_columns = ["excluded_string"]
class Model1View(ModelView):
datamodel = MongoEngineInterface(Model1)
related_views = [Model2View]
list_columns = ["field_string", "field_file"]
class Model1CompactView(CompactCRUDMixin, ModelView):
datamodel = MongoEngineInterface(Model1)
class Model1Filtered1View(ModelView):
datamodel = MongoEngineInterface(Model1)
base_filters = [["field_string", FilterStartsWith, "a"]]
class Model1MasterView(MasterDetailView):
datamodel = MongoEngineInterface(Model1)
related_views = [Model2View]
class Model1Filtered2View(ModelView):
datamodel = MongoEngineInterface(Model1)
base_filters = [["field_integer", FilterEqual, 0]]
class Model2GroupByChartView(GroupByChartView):
datamodel = MongoEngineInterface(Model2)
chart_title = "Test Model1 Chart"
definitions = [
{
"group": "field_string",
"series": [
(
aggregate_sum,
"field_integer",
aggregate_avg,
"field_integer",
aggregate_count,
"field_integer",
)
],
}
]
class Model2DirectByChartView(DirectByChartView):
datamodel = MongoEngineInterface(Model2)
chart_title = "Test Model1 Chart"
definitions = [
{"group": "field_string", "series": ["field_integer", "field_float"]}
]
class Model2DirectChartView(DirectChartView):
datamodel = MongoEngineInterface(Model2)
chart_title = "Test Model1 Chart"
direct_columns = {"stat1": ("group", "field_integer")}
class Model1MasterChartView(MasterDetailView):
datamodel = MongoEngineInterface(Model1)
related_views = [Model2DirectByChartView]
self.appbuilder.add_view(Model1View, "Model1", category="Model1")
self.appbuilder.add_view(Model1CompactView, "Model1Compact", category="Model1")
self.appbuilder.add_view(Model1MasterView, "Model1Master", category="Model1")
self.appbuilder.add_view(
Model1MasterChartView, "Model1MasterChart", category="Model1"
)
self.appbuilder.add_view(
Model1Filtered1View, "Model1Filtered1", category="Model1"
)
self.appbuilder.add_view(
Model1Filtered2View, "Model1Filtered2", category="Model1"
)
self.appbuilder.add_view(Model2View, "Model2")
self.appbuilder.add_view(Model22View, "Model22")
self.appbuilder.add_view(Model2View, "Model2 Add", href="/model2view/add")
self.appbuilder.add_view(Model2GroupByChartView, "Model2 Group By Chart")
self.appbuilder.add_view(Model2DirectByChartView, "Model2 Direct By Chart")
self.appbuilder.add_view(Model2DirectChartView, "Model2 Direct Chart")
role_admin = self.appbuilder.sm.find_role("Admin")
try:
self.appbuilder.sm.add_user(
"admin", "admin", "user", "admin@fab.org", role_admin, "general"
)
except Exception:
pass
def tearDown(self):
self.appbuilder = None
self.app = None
self.db = None
log.debug("TEAR DOWN")
def insert_data(self):
for x, i in zip(string.ascii_letters[:23], range(23)):
model = Model1(field_string="%stest" % (x), field_integer=i)
model.save()
def insert_data2(self):
models1 = [
Model1(field_string="G1"),
Model1(field_string="G2"),
Model1(field_string="G3"),
]
for model1 in models1:
try:
model1.save()
for x, i in zip(string.ascii_letters[:10], range(10)):
model = Model2(
field_string="%stest" % (x),
field_integer=random.randint(1, 10),
field_float=random.uniform(0.0, 1.0),
group=model1,
)
year = random.choice(range(1900, 2012))
month = random.choice(range(1, 12))
day = random.choice(range(1, 28))
model.field_date = datetime.datetime(year, month, day)
model.save()
except Exception as e:
print("ERROR {0}".format(str(e)))
def clean_data(self):
Model1.drop_collection()
Model2.drop_collection()
def test_fab_views(self):
eq_(len(self.appbuilder.baseviews), 26)
def test_index(self):
client = self.app.test_client()
rv = client.get("/")
data = rv.data.decode("utf-8")
ok_(DEFAULT_INDEX_STRING in data)
def test_sec_login(self):
client = self.app.test_client()
rv = client.get("/model1view/list/")
eq_(rv.status_code, 302)
rv = client.get("/model2view/list/")
eq_(rv.status_code, 302)
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get("/model1view/list/")
eq_(rv.status_code, 200)
rv = client.get("/model2view/list/")
eq_(rv.status_code, 200)
self.browser_logout(client)
rv = client.get("/model1view/list/")
eq_(rv.status_code, 302)
rv = client.get("/model2view/list/")
eq_(rv.status_code, 302)
rv = self.browser_login(client, DEFAULT_ADMIN_USER, "password")
data = rv.data.decode("utf-8")
ok_(INVALID_LOGIN_STRING in data)
def test_sec_reset_password(self):
from flask_appbuilder.security.mongoengine.models import User
client = self.app.test_client()
user = User.objects.filter(**{"username": "admin"})[0]
rv = client.get(
"/users/action/resetmypassword/{0}".format(user.id), follow_redirects=True
)
data = rv.data.decode("utf-8")
ok_(ACCESS_IS_DENIED in data)
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get(
"/users/action/resetmypassword/{0}".format(user.id), follow_redirects=True
)
data = rv.data.decode("utf-8")
ok_("Reset Password Form" in data)
rv = client.post(
"/resetmypassword/form",
data=dict(password="password", conf_password="password"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
self.browser_logout(client)
self.browser_login(client, DEFAULT_ADMIN_USER, "password")
rv = client.post(
"/resetmypassword/form",
data=dict(
password=DEFAULT_ADMIN_PASSWORD, conf_password=DEFAULT_ADMIN_PASSWORD
),
follow_redirects=True,
)
eq_(rv.status_code, 200)
rv = client.get(
"/users/action/resetpasswords/{0}".format(user.id), follow_redirects=True
)
data = rv.data.decode("utf-8")
ok_("Reset Password Form" in data)
rv = client.post(
"/resetmypassword/form",
data=dict(
password=DEFAULT_ADMIN_PASSWORD, conf_password=DEFAULT_ADMIN_PASSWORD
),
follow_redirects=True,
)
eq_(rv.status_code, 200)
def test_generic_interface(self):
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get("/psview/list")
rv.data.decode("utf-8")
def test_model_crud(self):
client = self.app.test_client()
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.post(
"/model1view/add",
data=dict(
field_string="test1",
field_integer="1",
field_float="0.12",
field_date="2014-01-01 23:10:07",
),
follow_redirects=True,
)
eq_(rv.status_code, 200)
model = Model1.objects[0]
eq_(model.field_string, u"test1")
eq_(model.field_integer, 1)
model1 = Model1.objects(field_string="test1")[0]
rv = client.post(
"/model1view/edit/{0}".format(model1.id),
data=dict(field_string="test2", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
model = Model1.objects[0]
eq_(model.field_string, u"test2")
eq_(model.field_integer, 2)
rv = client.get(
"/model1view/delete/{0}".format(model.id), follow_redirects=True
)
eq_(rv.status_code, 200)
model = Model1.objects
eq_(len(model), 0)
self.clean_data()
def test_excluded_cols(self):
client = self.app.test_client()
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.get("/model22view/add")
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("field_string" in data)
ok_("field_integer" in data)
ok_("field_float" in data)
ok_("field_date" in data)
ok_("excluded_string" not in data)
self.insert_data2()
model2 = Model2.objects[0]
rv = client.get("/model22view/edit/{0}".format(model2.id))
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("field_string" in data)
ok_("field_integer" in data)
ok_("field_float" in data)
ok_("field_date" in data)
ok_("excluded_string" not in data)
rv = client.get("/model22view/show/{0}".format(model2.id))
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("Field String" in data)
ok_("Field Integer" in data)
ok_("Field Float" in data)
ok_("Field Date" in data)
ok_("Excluded String" not in data)
self.clean_data()
def test_query_rel_fields(self):
client = self.app.test_client()
rv = self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data2()
rv = client.get("/model2view/add")
data = rv.data.decode("utf-8")
ok_("G1" in data)
ok_("G2" not in data)
model2 = Model2.objects[0]
rv = client.get("/model2view/edit/{0}".format(model2.id))
data = rv.data.decode("utf-8")
ok_("G2" in data)
ok_("G1" not in data)
self.clean_data()
def test_model_list_order(self):
self.insert_data()
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.post(
"/model1view/list?_oc_Model1View=field_string&_od_Model1View=asc",
follow_redirects=True,
)
rv.data.decode("utf-8")
rv = client.post(
"/model1view/list?_oc_Model1View=field_string&_od_Model1View=desc",
follow_redirects=True,
)
rv.data.decode("utf-8")
self.clean_data()
def test_model_add_validation(self):
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
rv = client.post(
"/model1view/add",
data=dict(field_string="test1", field_integer="1"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
rv = client.post(
"/model1view/add",
data=dict(field_string="test1", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(UNIQUE_VALIDATION_STRING in data)
model = Model1.objects()
eq_(len(model), 1)
rv = client.post(
"/model1view/add",
data=dict(field_string="", field_integer="1"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(NOTNULL_VALIDATION_STRING in data)
model = Model1.objects()
eq_(len(model), 1)
self.clean_data()
def test_model_edit_validation(self):
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
client.post(
"/model1view/add",
data=dict(field_string="test1", field_integer="1"),
follow_redirects=True,
)
model1 = Model1.objects(field_string="test1")[0]
client.post(
"/model1view/add",
data=dict(field_string="test2", field_integer="1"),
follow_redirects=True,
)
rv = client.post(
"/model1view/edit/{0}".format(model1.id),
data=dict(field_string="test2", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(UNIQUE_VALIDATION_STRING in data)
rv = client.post(
"/model1view/edit/{0}".format(model1.id),
data=dict(field_string="", field_integer="2"),
follow_redirects=True,
)
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_(NOTNULL_VALIDATION_STRING in data)
self.clean_data()
def test_model_base_filter(self):
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data()
models = Model1.objects()
eq_(len(models), 23)
rv = client.get("/model1filtered1view/list/")
data = rv.data.decode("utf-8")
ok_("atest" in data)
ok_("btest" not in data)
rv = client.get("/model1filtered2view/list/")
data = rv.data.decode("utf-8")
ok_("atest" in data)
ok_("btest" not in data)
self.clean_data()
def test_model_list_method_field(self):
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data2()
rv = client.get("/model2view/list/")
eq_(rv.status_code, 200)
data = rv.data.decode("utf-8")
ok_("field_method_value" in data)
self.clean_data()
def test_compactCRUDMixin(self):
client = self.app.test_client()
self.browser_login(client, DEFAULT_ADMIN_USER, DEFAULT_ADMIN_PASSWORD)
self.insert_data2()
rv = client.get("/model1compactview/list/")
eq_(rv.status_code, 200)
self.clean_data()
| true | true |
1c327d42b132530b5c5576706689cbfda1e2995d | 23 | py | Python | irc_bot/__version__.py | gazpachoking/irc_bot | 8dc69386e5e75a0259a32ce4de538c8ff5abea6d | [
"MIT"
] | null | null | null | irc_bot/__version__.py | gazpachoking/irc_bot | 8dc69386e5e75a0259a32ce4de538c8ff5abea6d | [
"MIT"
] | null | null | null | irc_bot/__version__.py | gazpachoking/irc_bot | 8dc69386e5e75a0259a32ce4de538c8ff5abea6d | [
"MIT"
] | null | null | null | __version__ = '1.0.34'
| 11.5 | 22 | 0.652174 | __version__ = '1.0.34'
| true | true |
1c327d96fca9133a54293dca75bf94b325167f62 | 22,647 | py | Python | tests/unit/states/postgres_test.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 3 | 2015-04-16T18:42:35.000Z | 2017-10-30T16:57:49.000Z | tests/unit/states/postgres_test.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 16 | 2015-11-18T00:44:03.000Z | 2018-10-29T20:48:27.000Z | tests/unit/states/postgres_test.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 1 | 2018-04-19T16:57:27.000Z | 2018-04-19T16:57:27.000Z | # -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
# Import Salt Testing libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import NO_MOCK, NO_MOCK_REASON, Mock, MagicMock, patch
ensure_in_syspath('../../')
# Import salt libs
from salt.modules import postgres as postgresmod
from salt.states import (
postgres_database,
postgres_user,
postgres_group,
postgres_extension,
postgres_schema,
)
MODS = (
postgres_database,
postgres_user,
postgres_group,
postgres_extension,
postgres_schema,
)
OPTS = {'test': False}
for postgres in MODS:
postgres.__grains__ = {} # in order to stub it w/patch below
postgres.__salt__ = {} # in order to stub it w/patch below
postgres.__opts__ = {} # in order to stub it w/patch below
if NO_MOCK is False:
SALT_STUB = {
'config.option': Mock(),
'cmd.run_all': Mock(),
'file.chown': Mock(),
'file.remove': Mock(),
}
else:
SALT_STUB = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_user,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresUserTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value=None),
'postgres.user_create': MagicMock(),
})
def test_present__creation(self):
# test=True
with patch.dict(postgres_user.__opts__, {'test': True}):
ret = postgres_user.present('foo')
self.assertEqual(
ret,
{'comment': 'User foo is set to be created',
'changes': {}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.user_create'].call_count, 0)
# test=False
ret = postgres_user.present('foo')
self.assertEqual(
ret,
{'comment': 'The user foo has been created',
'changes': {'foo': 'Present'}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.user_create'].assert_called_once_with(username='foo',
superuser=None,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
replication=None,
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=None,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.user_update': MagicMock(),
})
def test_present__update(self):
# test=True
with patch.dict(postgres_user.__opts__, {'test': True}):
ret = postgres_user.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'User foo is set to be updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
# test=False
ret = postgres_user.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'The user foo has been updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.user_update'].assert_called_once_with(username='foo',
superuser=None,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
replication=False,
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=True,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.user_update': MagicMock(),
})
def test_present__no_update(self):
# test=True
with patch.dict(OPTS, {'test': True}):
ret = postgres_user.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'User foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
# test=False
ret = postgres_user.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'User foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_group,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresGroupTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value=None),
'postgres.group_create': MagicMock(),
})
def test_present__creation(self):
# test=True
with patch.dict(postgres_group.__opts__, {'test': True}):
ret = postgres_group.present('foo')
self.assertEqual(
ret,
{'comment': 'Group foo is set to be created',
'changes': {}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.group_create'].call_count, 0)
# test=False
ret = postgres_group.present('foo')
self.assertEqual(
ret,
{'comment': 'The group foo has been created',
'changes': {}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.group_create'].assert_called_once_with(superuser=None,
replication=None,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
groupname='foo',
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=None,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.group_update': MagicMock(),
})
def test_present__update(self):
# test=True
with patch.dict(postgres_group.__opts__, {'test': True}):
ret = postgres_group.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'Group foo is set to be updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.group_update'].call_count, 0)
# test=False
ret = postgres_group.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'The group foo has been updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.group_update'].assert_called_once_with(superuser=None,
replication=False,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
groupname='foo',
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=True,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.group_update': MagicMock(),
})
def test_present__no_update(self):
# test=True
with patch.dict(OPTS, {'test': True}):
ret = postgres_group.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'Group foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.group_update'].call_count, 0)
# test=False
ret = postgres_group.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'Group foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.group_update'].call_count, 0)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_extension,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresExtensionTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.create_metadata': Mock(side_effect=[
[postgresmod._EXTENSION_NOT_INSTALLED],
[postgresmod._EXTENSION_TO_MOVE, postgresmod._EXTENSION_INSTALLED],
]),
'postgres.create_extension': Mock(side_effect=[
False, False,
]),
})
def test_present_failed(self):
'''
scenario of creating upgrading extensions with possible schema and
version specifications
'''
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Failed to install extension foo',
'changes': {}, 'name': 'foo', 'result': False},
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Failed to upgrade extension foo',
'changes': {}, 'name': 'foo', 'result': False}
)
@patch.dict(SALT_STUB, {
'postgres.create_metadata': Mock(side_effect=[
[postgresmod._EXTENSION_NOT_INSTALLED],
[postgresmod._EXTENSION_INSTALLED],
[postgresmod._EXTENSION_TO_MOVE, postgresmod._EXTENSION_INSTALLED],
]),
'postgres.create_extension': Mock(side_effect=[
True, True, True,
]),
})
def test_present(self):
'''
scenario of creating upgrading extensions with possible schema and
version specifications
'''
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'The extension foo has been installed',
'changes': {}, 'name': 'foo', 'result': True}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'The extension foo has been upgraded',
'changes': {}, 'name': 'foo', 'result': True}
)
@patch.dict(OPTS, {'test': True})
@patch.dict(SALT_STUB, {
'postgres.create_metadata': Mock(side_effect=[
[postgresmod._EXTENSION_NOT_INSTALLED],
[postgresmod._EXTENSION_INSTALLED],
[postgresmod._EXTENSION_TO_MOVE, postgresmod._EXTENSION_INSTALLED],
]),
'postgres.create_extension': Mock(side_effect=[
True, True, True,
]),
})
def test_presenttest(self):
'''
scenario of creating upgrading extensions with possible schema and
version specifications
'''
with patch.dict(postgres_extension.__opts__, {'test': True}):
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo is set to be installed',
'changes': {}, 'name': 'foo', 'result': None}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': "Extension foo is set to be created",
'changes': {}, 'name': 'foo', 'result': None}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': "Extension foo is set to be upgraded",
'changes': {}, 'name': 'foo', 'result': None}
)
@patch.dict(SALT_STUB, {
'postgres.is_installed_extension': Mock(side_effect=[
True, False,
]),
'postgres.drop_extension': Mock(side_effect=[
True, True,
]),
})
def test_absent(self):
'''
scenario of creating upgrading extensions with possible schema and
version specifications
'''
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo has been removed',
'changes': {'foo': 'Absent'}, 'name': 'foo', 'result': True}
)
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': (
'Extension foo is not present, '
'so it cannot be removed'),
'changes': {}, 'name': 'foo', 'result': True}
)
@patch.dict(OPTS, {'test': False})
@patch.dict(SALT_STUB, {
'postgres.is_installed_extension': Mock(side_effect=[
True, True,
]),
'postgres.drop_extension': Mock(side_effect=[
False, False,
]),
})
def test_absent_failed(self):
'''
scenario of creating upgrading extensions with possible schema and
version specifications
'''
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo failed to be removed',
'changes': {}, 'name': 'foo', 'result': False}
)
@patch.dict(OPTS, {'test': True})
@patch.dict(SALT_STUB, {
'postgres.is_installed_extension': Mock(side_effect=[
True, True,
]),
'postgres.drop_extension': Mock(side_effect=[
False, False,
]),
})
def test_absent_failedtest(self):
with patch.dict(postgres_extension.__opts__, {'test': True}):
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo is set to be removed',
'changes': {}, 'name': 'foo', 'result': None}
)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_schema,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresSchemaTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.schema_get': Mock(return_value=None),
'postgres.schema_create': MagicMock(),
})
def test_present_creation(self):
ret = postgres_schema.present('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo has been created in database dbname',
'changes': {'foo': 'Present'},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_create'].call_count, 1)
@patch.dict(SALT_STUB, {
'postgres.schema_get': Mock(return_value={'foo':
{'acl': '',
'owner': 'postgres'}
}),
'postgres.schema_create': MagicMock(),
})
def test_present_nocreation(self):
ret = postgres_schema.present('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo already exists in database dbname',
'changes': {},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_create'].call_count, 0)
@patch.dict(SALT_STUB, {
'postgres.schema_exists': Mock(return_value=True),
'postgres.schema_remove': MagicMock(),
})
def test_absent_remove(self):
ret = postgres_schema.absent('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo has been removed from database dbname',
'changes': {'foo': 'Absent'},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_remove'].call_count, 1)
@patch.dict(SALT_STUB, {
'postgres.schema_exists': Mock(return_value=False),
'postgres.schema_remove': MagicMock(),
})
def test_absent_noremove(self):
ret = postgres_schema.absent('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo is not present in database dbname,'
' so it cannot be removed',
'changes': {},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_remove'].call_count, 0)
if __name__ == '__main__':
from integration import run_tests
run_tests(PostgresExtensionTestCase, needs_daemon=False)
| 39.317708 | 87 | 0.461915 |
from __future__ import absolute_import
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import NO_MOCK, NO_MOCK_REASON, Mock, MagicMock, patch
ensure_in_syspath('../../')
from salt.modules import postgres as postgresmod
from salt.states import (
postgres_database,
postgres_user,
postgres_group,
postgres_extension,
postgres_schema,
)
MODS = (
postgres_database,
postgres_user,
postgres_group,
postgres_extension,
postgres_schema,
)
OPTS = {'test': False}
for postgres in MODS:
postgres.__grains__ = {}
postgres.__salt__ = {}
postgres.__opts__ = {}
if NO_MOCK is False:
SALT_STUB = {
'config.option': Mock(),
'cmd.run_all': Mock(),
'file.chown': Mock(),
'file.remove': Mock(),
}
else:
SALT_STUB = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_user,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresUserTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value=None),
'postgres.user_create': MagicMock(),
})
def test_present__creation(self):
with patch.dict(postgres_user.__opts__, {'test': True}):
ret = postgres_user.present('foo')
self.assertEqual(
ret,
{'comment': 'User foo is set to be created',
'changes': {}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.user_create'].call_count, 0)
ret = postgres_user.present('foo')
self.assertEqual(
ret,
{'comment': 'The user foo has been created',
'changes': {'foo': 'Present'}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.user_create'].assert_called_once_with(username='foo',
superuser=None,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
replication=None,
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=None,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.user_update': MagicMock(),
})
def test_present__update(self):
with patch.dict(postgres_user.__opts__, {'test': True}):
ret = postgres_user.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'User foo is set to be updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
ret = postgres_user.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'The user foo has been updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.user_update'].assert_called_once_with(username='foo',
superuser=None,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
replication=False,
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=True,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.user_update': MagicMock(),
})
def test_present__no_update(self):
with patch.dict(OPTS, {'test': True}):
ret = postgres_user.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'User foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
ret = postgres_user.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'User foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.user_update'].call_count, 0)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_group,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresGroupTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value=None),
'postgres.group_create': MagicMock(),
})
def test_present__creation(self):
with patch.dict(postgres_group.__opts__, {'test': True}):
ret = postgres_group.present('foo')
self.assertEqual(
ret,
{'comment': 'Group foo is set to be created',
'changes': {}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.group_create'].call_count, 0)
ret = postgres_group.present('foo')
self.assertEqual(
ret,
{'comment': 'The group foo has been created',
'changes': {}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.group_create'].assert_called_once_with(superuser=None,
replication=None,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
groupname='foo',
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=None,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.group_update': MagicMock(),
})
def test_present__update(self):
with patch.dict(postgres_group.__opts__, {'test': True}):
ret = postgres_group.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'Group foo is set to be updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': None}
)
self.assertEqual(SALT_STUB['postgres.group_update'].call_count, 0)
ret = postgres_group.present('foo', login=True, replication=False)
self.assertEqual(
ret,
{'comment': 'The group foo has been updated',
'changes': {'foo': {'login': True}}, 'name': 'foo', 'result': True}
)
SALT_STUB['postgres.group_update'].assert_called_once_with(superuser=None,
replication=False,
encrypted=True,
runas=None,
inherit=None,
rolepassword=None,
port=None,
groupname='foo',
host=None,
createroles=None,
user=None,
groups=None,
maintenance_db=None,
login=True,
password=None,
createdb=None)
@patch.dict(SALT_STUB, {
'postgres.role_get': Mock(return_value={
'can create databases': False,
'can create roles': False,
'can login': False,
'can update system catalogs': False,
'connections': None,
'defaults variables': {},
'expiry time': None,
'inherits privileges': True,
'replication': False,
'superuser': False,
}),
'postgres.group_update': MagicMock(),
})
def test_present__no_update(self):
with patch.dict(OPTS, {'test': True}):
ret = postgres_group.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'Group foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.group_update'].call_count, 0)
ret = postgres_group.present('foo', login=False, replication=False)
self.assertEqual(
ret,
{'comment': 'Group foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
self.assertEqual(SALT_STUB['postgres.group_update'].call_count, 0)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_extension,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresExtensionTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.create_metadata': Mock(side_effect=[
[postgresmod._EXTENSION_NOT_INSTALLED],
[postgresmod._EXTENSION_TO_MOVE, postgresmod._EXTENSION_INSTALLED],
]),
'postgres.create_extension': Mock(side_effect=[
False, False,
]),
})
def test_present_failed(self):
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Failed to install extension foo',
'changes': {}, 'name': 'foo', 'result': False},
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Failed to upgrade extension foo',
'changes': {}, 'name': 'foo', 'result': False}
)
@patch.dict(SALT_STUB, {
'postgres.create_metadata': Mock(side_effect=[
[postgresmod._EXTENSION_NOT_INSTALLED],
[postgresmod._EXTENSION_INSTALLED],
[postgresmod._EXTENSION_TO_MOVE, postgresmod._EXTENSION_INSTALLED],
]),
'postgres.create_extension': Mock(side_effect=[
True, True, True,
]),
})
def test_present(self):
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'The extension foo has been installed',
'changes': {}, 'name': 'foo', 'result': True}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo is already present',
'changes': {}, 'name': 'foo', 'result': True}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'The extension foo has been upgraded',
'changes': {}, 'name': 'foo', 'result': True}
)
@patch.dict(OPTS, {'test': True})
@patch.dict(SALT_STUB, {
'postgres.create_metadata': Mock(side_effect=[
[postgresmod._EXTENSION_NOT_INSTALLED],
[postgresmod._EXTENSION_INSTALLED],
[postgresmod._EXTENSION_TO_MOVE, postgresmod._EXTENSION_INSTALLED],
]),
'postgres.create_extension': Mock(side_effect=[
True, True, True,
]),
})
def test_presenttest(self):
with patch.dict(postgres_extension.__opts__, {'test': True}):
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo is set to be installed',
'changes': {}, 'name': 'foo', 'result': None}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': "Extension foo is set to be created",
'changes': {}, 'name': 'foo', 'result': None}
)
ret = postgres_extension.present('foo')
self.assertEqual(
ret,
{'comment': "Extension foo is set to be upgraded",
'changes': {}, 'name': 'foo', 'result': None}
)
@patch.dict(SALT_STUB, {
'postgres.is_installed_extension': Mock(side_effect=[
True, False,
]),
'postgres.drop_extension': Mock(side_effect=[
True, True,
]),
})
def test_absent(self):
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo has been removed',
'changes': {'foo': 'Absent'}, 'name': 'foo', 'result': True}
)
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': (
'Extension foo is not present, '
'so it cannot be removed'),
'changes': {}, 'name': 'foo', 'result': True}
)
@patch.dict(OPTS, {'test': False})
@patch.dict(SALT_STUB, {
'postgres.is_installed_extension': Mock(side_effect=[
True, True,
]),
'postgres.drop_extension': Mock(side_effect=[
False, False,
]),
})
def test_absent_failed(self):
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo failed to be removed',
'changes': {}, 'name': 'foo', 'result': False}
)
@patch.dict(OPTS, {'test': True})
@patch.dict(SALT_STUB, {
'postgres.is_installed_extension': Mock(side_effect=[
True, True,
]),
'postgres.drop_extension': Mock(side_effect=[
False, False,
]),
})
def test_absent_failedtest(self):
with patch.dict(postgres_extension.__opts__, {'test': True}):
ret = postgres_extension.absent('foo')
self.assertEqual(
ret,
{'comment': 'Extension foo is set to be removed',
'changes': {}, 'name': 'foo', 'result': None}
)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch.multiple(postgres_schema,
__grains__={'os_family': 'Linux'},
__salt__=SALT_STUB,
__opts__={'test': False})
@patch('salt.utils.which', Mock(return_value='/usr/bin/pgsql'))
class PostgresSchemaTestCase(TestCase):
@patch.dict(SALT_STUB, {
'postgres.schema_get': Mock(return_value=None),
'postgres.schema_create': MagicMock(),
})
def test_present_creation(self):
ret = postgres_schema.present('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo has been created in database dbname',
'changes': {'foo': 'Present'},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_create'].call_count, 1)
@patch.dict(SALT_STUB, {
'postgres.schema_get': Mock(return_value={'foo':
{'acl': '',
'owner': 'postgres'}
}),
'postgres.schema_create': MagicMock(),
})
def test_present_nocreation(self):
ret = postgres_schema.present('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo already exists in database dbname',
'changes': {},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_create'].call_count, 0)
@patch.dict(SALT_STUB, {
'postgres.schema_exists': Mock(return_value=True),
'postgres.schema_remove': MagicMock(),
})
def test_absent_remove(self):
ret = postgres_schema.absent('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo has been removed from database dbname',
'changes': {'foo': 'Absent'},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_remove'].call_count, 1)
@patch.dict(SALT_STUB, {
'postgres.schema_exists': Mock(return_value=False),
'postgres.schema_remove': MagicMock(),
})
def test_absent_noremove(self):
ret = postgres_schema.absent('dbname', 'foo')
self.assertEqual(
ret,
{'comment': 'Schema foo is not present in database dbname,'
' so it cannot be removed',
'changes': {},
'dbname': 'dbname',
'name': 'foo',
'result': True}
)
self.assertEqual(SALT_STUB['postgres.schema_remove'].call_count, 0)
if __name__ == '__main__':
from integration import run_tests
run_tests(PostgresExtensionTestCase, needs_daemon=False)
| true | true |
1c327e79fdd831f32292c1e0db57bf924a41ea04 | 2,937 | py | Python | colored-led-counter/app.py | ceausuveronica/OpenCV-projects | fe9a95eead97212de5afeb59b31a8996ce7fb116 | [
"MIT"
] | null | null | null | colored-led-counter/app.py | ceausuveronica/OpenCV-projects | fe9a95eead97212de5afeb59b31a8996ce7fb116 | [
"MIT"
] | 1 | 2018-06-02T17:58:12.000Z | 2018-06-02T17:58:12.000Z | colored-led-counter/app.py | ceausuveronica/OpenCV-projects | fe9a95eead97212de5afeb59b31a8996ce7fb116 | [
"MIT"
] | null | null | null | # import the necessary packages
import argparse
import numpy as np
from pprint import pprint
try:
from cv2 import cv2
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True,
help="path to the input image")
ap.add_argument("-c", "--color",
default="red",
help="red, green, blue, yellow")
args = vars(ap.parse_args())
img_path = 'leds/' + args['image']
img = cv2.imread(img_path) # type: numpy.ndarray
#scale
max_dimension = max(img.shape)
scale = 816/max_dimension
# reval, threshold = cv2.threshold(img, 85, 220, cv2.THRESH_BINARY)
img = cv2.resize(img, None, fx=scale, fy=scale)
color = args['color']
if color == 'yellow':
alpha = 2
beta = 30
else:
alpha = 2
beta = 30
img =cv2.addWeighted(img, alpha, np.zeros(img.shape, img.dtype), 0, beta)
img = cv2.GaussianBlur(img, (7,7), 0)
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lower_green = np.array([55, 100, 50])
upper_green = np.array([85, 255, 255])
lower_yellow = np.array([30, 20, 50])
upper_yellow = np.array([60, 255, 255])
lower_blue = np.array([90, 100, 50])
upper_blue = np.array([150, 255, 255])
lower_red = np.array([0, 50, 50])
upper_red = np.array([20, 255, 255])
lower_cold_red = np.array([160, 50, 50])
upper_cold_red = np.array([255, 255, 255])
if color == 'red':
mask = cv2.inRange(hsv, lower_red, upper_red)
mask2 = cv2.inRange(hsv, lower_cold_red, upper_cold_red)
mask = cv2.bitwise_or(mask, mask2)
if color == 'green':
mask = cv2.inRange(hsv, lower_green, upper_green)
if color == 'blue':
mask = cv2.inRange(hsv, lower_blue, upper_blue)
if color == 'yellow':
mask = cv2.inRange(hsv, lower_yellow, upper_yellow)
res = cv2.bitwise_and(img, img, mask=mask)
cv2.imshow('img', img)
cv2.imshow('res', res)
new_res = cv2.cvtColor(img, cv2.COLOR_HSV2RGB)
new_res = cv2.bitwise_and(new_res, new_res, mask=mask)
new_res = cv2.cvtColor(new_res, cv2.COLOR_BGR2GRAY)
reval, new_res = cv2.threshold(new_res, 10, 220, cv2.THRESH_BINARY)
cv2.imshow('new_res', new_res)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (15,15))
mask_closed = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
mask_clean = cv2.morphologyEx(mask_closed, cv2.MORPH_OPEN, kernel)
image, contours, hierarchy = cv2.findContours(new_res.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours]
led_contours = list(filter(lambda x: x[0] > 20 and x[0] < 300 , contour_sizes))
nr_leds = len(led_contours)
print(str(nr_leds) +' LEDs' )
print('Press escape')
cv2.waitKey(0)
cv2.destroyAllWindows()
except ImportError:
print("Ooops..") | 27.448598 | 105 | 0.630916 |
import argparse
import numpy as np
from pprint import pprint
try:
from cv2 import cv2
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True,
help="path to the input image")
ap.add_argument("-c", "--color",
default="red",
help="red, green, blue, yellow")
args = vars(ap.parse_args())
img_path = 'leds/' + args['image']
img = cv2.imread(img_path)
max_dimension = max(img.shape)
scale = 816/max_dimension
img = cv2.resize(img, None, fx=scale, fy=scale)
color = args['color']
if color == 'yellow':
alpha = 2
beta = 30
else:
alpha = 2
beta = 30
img =cv2.addWeighted(img, alpha, np.zeros(img.shape, img.dtype), 0, beta)
img = cv2.GaussianBlur(img, (7,7), 0)
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lower_green = np.array([55, 100, 50])
upper_green = np.array([85, 255, 255])
lower_yellow = np.array([30, 20, 50])
upper_yellow = np.array([60, 255, 255])
lower_blue = np.array([90, 100, 50])
upper_blue = np.array([150, 255, 255])
lower_red = np.array([0, 50, 50])
upper_red = np.array([20, 255, 255])
lower_cold_red = np.array([160, 50, 50])
upper_cold_red = np.array([255, 255, 255])
if color == 'red':
mask = cv2.inRange(hsv, lower_red, upper_red)
mask2 = cv2.inRange(hsv, lower_cold_red, upper_cold_red)
mask = cv2.bitwise_or(mask, mask2)
if color == 'green':
mask = cv2.inRange(hsv, lower_green, upper_green)
if color == 'blue':
mask = cv2.inRange(hsv, lower_blue, upper_blue)
if color == 'yellow':
mask = cv2.inRange(hsv, lower_yellow, upper_yellow)
res = cv2.bitwise_and(img, img, mask=mask)
cv2.imshow('img', img)
cv2.imshow('res', res)
new_res = cv2.cvtColor(img, cv2.COLOR_HSV2RGB)
new_res = cv2.bitwise_and(new_res, new_res, mask=mask)
new_res = cv2.cvtColor(new_res, cv2.COLOR_BGR2GRAY)
reval, new_res = cv2.threshold(new_res, 10, 220, cv2.THRESH_BINARY)
cv2.imshow('new_res', new_res)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (15,15))
mask_closed = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
mask_clean = cv2.morphologyEx(mask_closed, cv2.MORPH_OPEN, kernel)
image, contours, hierarchy = cv2.findContours(new_res.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contour_sizes = [(cv2.contourArea(contour), contour) for contour in contours]
led_contours = list(filter(lambda x: x[0] > 20 and x[0] < 300 , contour_sizes))
nr_leds = len(led_contours)
print(str(nr_leds) +' LEDs' )
print('Press escape')
cv2.waitKey(0)
cv2.destroyAllWindows()
except ImportError:
print("Ooops..") | true | true |
1c327f0296a46c906815f7847743c4fe97379c29 | 4,433 | py | Python | face_detection_mtcnn_img.py | WhiTExB3AR/PreProduceCode-FMPN-FER | 0273fc08bc86a522525d35fe0826e2f106d16e3e | [
"MIT"
] | null | null | null | face_detection_mtcnn_img.py | WhiTExB3AR/PreProduceCode-FMPN-FER | 0273fc08bc86a522525d35fe0826e2f106d16e3e | [
"MIT"
] | null | null | null | face_detection_mtcnn_img.py | WhiTExB3AR/PreProduceCode-FMPN-FER | 0273fc08bc86a522525d35fe0826e2f106d16e3e | [
"MIT"
] | null | null | null | # https://github.com/ipazc/mtcnn
# USAGE
# python face_detection_mtcnn_img.py --image images\face_detection\team_hlv_tram_anhnguyen_hop.jpg
import argparse
import imutils
# imutils for crop, rotation and resize (also can transition, get edge, plat 3D to 2D and visualize in matplotlib)
# https://github.com/PyImageSearch/imutils
import cv2
import os
from PIL import Image # can use for crop image
from mtcnn import MTCNN
from pathlib import Path
detector = MTCNN()
# construct the argument parser and parse the arguments
parser = argparse.ArgumentParser()
parser.add_argument("-img", "--image",
type=str,
required=True,
help="path to input image")
args = vars(parser.parse_args())
# image_org = cv2.cvtColor(cv2.imread(args["image"]), cv2.COLOR_BGR2RGB)
# faces_result = detector.detect_faces(image_org)
image_org = cv2.imread(args["image"])
gray = cv2.cvtColor(image_org, cv2.COLOR_BGR2GRAY)
rgb = cv2.cvtColor(image_org, cv2.COLOR_BGR2RGB)
faces_result = detector.detect_faces(rgb)
# Result is an array with all the bounding boxes detected. For 1 face:
# bounding_box = faces_result[0]['box']
# keypoints = faces_result[0]['keypoints']
# cv2.rectangle(
# rgb,
# (bounding_box[0], bounding_box[1]),
# (bounding_box[0]+bounding_box[2], bounding_box[1] + bounding_box[3]),
# (0,155,255),
# 2
# )
# Result is an array with all the bounding boxes detected. For many faces:
for i in range(len(faces_result)):
bounding_box = faces_result[i]['box']
keypoints = faces_result[i]['keypoints']
cv2.rectangle(
rgb,
(bounding_box[0], bounding_box[1]),
(bounding_box[0]+bounding_box[2], bounding_box[1] + bounding_box[3]),
(255,155,0),
2 # width of line of box
)
## Show point for 5 landmark points
# cv2.circle(image,(keypoints['left_eye']), 2, (0,155,255), 2)
# cv2.circle(image,(keypoints['right_eye']), 2, (0,155,255), 2)
# cv2.circle(image,(keypoints['nose']), 2, (0,155,255), 2)
# cv2.circle(image,(keypoints['mouth_left']), 2, (0,155,255), 2)
# cv2.circle(image,(keypoints['mouth_right']), 2, (0,155,255), 2)
print("[INFO] {} faces detected...".format(len(faces_result)))
## print all result from JSON file faces_result
# print("[Face result landmark] ", faces_result)
for i in range(len(faces_result)):
print(faces_result[i]['box'], faces_result[i]['confidence'])
# ------- Start: split to file name -------
# 1. using pathlib
file_name_pathlib = Path(args["image"]).stem
print("Got the file name original: ", file_name_pathlib)
# # 2. using os module basename and splitext
# base_name = os.path.basename(args["image"])
# file_name = os.path.splitext(base_name)[0]
# print(file_name)
# # 3. using os module basename and split
# file_name_spilt = os.path.basename(args["image"]).split('.')[0]
# print(file_name_spilt)
# # 4. using split
# directory = args["image"]
# name = directory.split('.')
# filename = name[0].split('/')
# print(filename[-1])
# ------- End: split to file name -------
# Save the image with rectangle face detection
cv2.imwrite('images/results/detected_rectangle/' +
"Face Detected MTCNN_" +
str(file_name_pathlib) +
".png",
cv2.cvtColor(rgb, cv2.COLOR_RGB2BGR)) # Convert img detected in line 28 from rgb to bgr
print('=> Successfully saved face detection rectangle and show each face now')
# ------- Start: Crop face -------
# 1. using OpenCV
for i in range(len(faces_result)):
bounding_box = faces_result[i]['box']
y = bounding_box[0]
x = bounding_box[1]
w = bounding_box[1] + bounding_box[3]
h = bounding_box[0] + bounding_box[2]
crop_face = gray[x:w, y:h]
cv2.imwrite('images/results/face_focus/mtcnn/' +
str(file_name_pathlib) +
# '_MTCNN_face_' +
# str(i) +
'.png',
crop_face)
cv2.imshow("Cropped face.png", crop_face)
cv2.waitKey(0)
# # 2. using PIL
# for i in range(len(faces_result)):
# bounding_box = faces_result[i]['box']
# left = bounding_box[0]
# right = bounding_box[1]
# top = bounding_box[1] + bounding_box[3]
# bottom = bounding_box[0] + bounding_box[2]
# img_pil = Image.open(args["image"])
# crop_image = img_pil.crop((left, top, right, bottom))
# # cv2.imwrite("Face detected",[i],".png", crop_image)
# crop_image.show()
# cv2.waitKey(0)
# ------- End: Crop face ------- | 33.839695 | 114 | 0.660501 |
import argparse
import imutils
import cv2
import os
from PIL import Image
from mtcnn import MTCNN
from pathlib import Path
detector = MTCNN()
parser = argparse.ArgumentParser()
parser.add_argument("-img", "--image",
type=str,
required=True,
help="path to input image")
args = vars(parser.parse_args())
image_org = cv2.imread(args["image"])
gray = cv2.cvtColor(image_org, cv2.COLOR_BGR2GRAY)
rgb = cv2.cvtColor(image_org, cv2.COLOR_BGR2RGB)
faces_result = detector.detect_faces(rgb)
for i in range(len(faces_result)):
bounding_box = faces_result[i]['box']
keypoints = faces_result[i]['keypoints']
cv2.rectangle(
rgb,
(bounding_box[0], bounding_box[1]),
(bounding_box[0]+bounding_box[2], bounding_box[1] + bounding_box[3]),
(255,155,0),
2
)
cted...".format(len(faces_result)))
(faces_result[i]['box'], faces_result[i]['confidence'])
file_name_pathlib = Path(args["image"]).stem
print("Got the file name original: ", file_name_pathlib)
TCNN_" +
str(file_name_pathlib) +
".png",
cv2.cvtColor(rgb, cv2.COLOR_RGB2BGR))
print('=> Successfully saved face detection rectangle and show each face now')
for i in range(len(faces_result)):
bounding_box = faces_result[i]['box']
y = bounding_box[0]
x = bounding_box[1]
w = bounding_box[1] + bounding_box[3]
h = bounding_box[0] + bounding_box[2]
crop_face = gray[x:w, y:h]
cv2.imwrite('images/results/face_focus/mtcnn/' +
str(file_name_pathlib) +
'.png',
crop_face)
cv2.imshow("Cropped face.png", crop_face)
cv2.waitKey(0)
| true | true |
1c327f792bcbb80f4d9b267bc06b4d4888738fc7 | 25,834 | py | Python | saw-remote-api/python/saw_client/crucible.py | GaloisInc/saw-script | d012ffe6bf538456ad19f15974c8367734e45413 | [
"BSD-3-Clause"
] | 411 | 2015-06-09T22:00:47.000Z | 2022-03-30T11:41:23.000Z | saw-remote-api/python/saw_client/crucible.py | GaloisInc/saw-script | d012ffe6bf538456ad19f15974c8367734e45413 | [
"BSD-3-Clause"
] | 1,151 | 2015-06-12T20:46:31.000Z | 2022-03-23T02:56:32.000Z | saw-remote-api/python/saw_client/crucible.py | GaloisInc/saw-script | d012ffe6bf538456ad19f15974c8367734e45413 | [
"BSD-3-Clause"
] | 65 | 2015-06-10T17:52:26.000Z | 2022-02-10T18:17:06.000Z | from abc import ABCMeta, abstractmethod
from cryptol import cryptoltypes
from .utils import deprecated
from dataclasses import dataclass
import dataclasses
import re
from typing import Any, Dict, List, Optional, Set, Union, overload
from typing_extensions import Literal
import inspect
import uuid
from .llvm_type import *
from .jvm_type import *
class SetupVal(metaclass=ABCMeta):
"""Represent a ``SetupValue`` in SawScript, which "corresponds to
values that can occur during symbolic execution, which includes both 'Term'
values, pointers, and composite types consisting of either of these
(both structures and arrays)."
"""
@abstractmethod
def to_json(self) -> Any:
"""JSON representation for this ``SetupVal`` (i.e., how it is represented in expressions, etc).
N.B., should be a JSON object with a ``'setup value'`` field with a unique tag which the
server will dispatch on to then interpret the rest of the JSON object.``"""
pass
@overload
def __getitem__(self, key : int) -> 'ElemVal':
pass
@overload
def __getitem__(self, key : str) -> 'FieldVal':
pass
def __getitem__(self, key : Union[int,str]) -> 'SetupVal':
"""``SetupVal`` element indexing and field access.
:param key: If ``key`` is an integer, a ``SetupVal`` corresponding to accessing the element
at that index is returned. If ``key`` is a string, a ``SetupVal`` corresponding
to accessing a field with that name is returned.
"""
if isinstance(key, int):
return elem(self, key)
elif isinstance(key, str):
return field(self, key)
else:
raise ValueError(f'{key!r} is not a valid element index or field name.')
class NamedSetupVal(SetupVal):
"""Represents those ``SetupVal``s which are a named reference to some value, e.e., a variable
or reference to allocated memory."""
@abstractmethod
def to_init_json(self) -> Any:
"""JSON representation with the information for those ``SetupVal``s which require additional
information to initialize/allocate them vs that which is required later to reference them.
I.e., ``.to_json()`` will be used to refer to such ``SetupVal``s in expressions, and
``.to_init_json() is used to initialize/allocate them.``
"""
pass
class CryptolTerm(SetupVal):
expression : cryptoltypes.CryptolJSON
def __init__(self, code : Union[str, cryptoltypes.CryptolJSON]):
if isinstance(code, str):
self.expression = cryptoltypes.CryptolLiteral(code)
else:
self.expression = code
def __call__(self, *args : cryptoltypes.CryptolJSON) -> 'CryptolTerm':
out_term = self.expression
for a in args:
out_term = cryptoltypes.CryptolApplication(out_term, a)
return CryptolTerm(out_term)
def __repr__(self) -> str:
return f"CryptolTerm({self.expression!r})"
def to_json(self) -> Any:
return {'setup value': 'Cryptol', 'expression': cryptoltypes.to_cryptol(self.expression)}
def __to_cryptol__(self, ty : Any) -> Any:
return self.expression.__to_cryptol__(ty)
class FreshVar(NamedSetupVal):
__name : Optional[str]
def __init__(self, spec : 'Contract', type : Union['LLVMType', 'JVMType'], suggested_name : Optional[str] = None) -> None:
self.__name = suggested_name
self.spec = spec
self.type = type
def __to_cryptol__(self, ty : Any) -> Any:
return cryptoltypes.CryptolLiteral(self.name()).__to_cryptol__(ty)
def to_init_json(self) -> Any:
#FIXME it seems we don't actually use two names ever... just the one...do we actually need both?
name = self.name()
return {"server name": name,
"name": name,
"type": self.type.to_json()}
def name(self) -> str:
if self.__name is None:
self.__name = self.spec.get_fresh_name()
return self.__name
def to_json(self) -> Any:
return {'setup value': 'named', 'name': self.name()}
def __gt__(self, other : cryptoltypes.CryptolJSON) -> CryptolTerm:
gt = CryptolTerm("(>)")
return gt(self, other)
def __lt__(self, other : cryptoltypes.CryptolJSON) -> CryptolTerm:
lt = CryptolTerm("(<)")
return lt(self, other)
class Allocated(NamedSetupVal):
name : Optional[str]
def __init__(self, spec : 'Contract', type : Union['LLVMType','JVMType'], *,
mutable : bool = True, alignment : Optional[int] = None) -> None:
self.name = None
self.spec = spec
self.type = type
self.mutable = mutable
self.alignment = alignment
def to_init_json(self) -> Any:
if self.name is None:
self.name = self.spec.get_fresh_name()
return {"server name": self.name,
"type": self.type.to_json(),
"mutable": self.mutable,
"alignment": self.alignment}
def to_json(self) -> Any:
if self.name is None:
self.name = self.spec.get_fresh_name()
return {'setup value': 'named', 'name': self.name}
class StructVal(SetupVal):
fields : List[SetupVal]
def __init__(self, fields : List[SetupVal]) -> None:
self.fields = fields
def to_json(self) -> Any:
return {'setup value': 'tuple', 'elements': [fld.to_json() for fld in self.fields]}
class ElemVal(SetupVal):
base : SetupVal
index : int
def __init__(self, base : SetupVal, index : int) -> None:
self.base = base
self.index = index
def to_json(self) -> Any:
return {'setup value': 'element lvalue',
'base': self.base.to_json(), 'index': self.index}
class FieldVal(SetupVal):
base : SetupVal
field_name : str
def __init__(self, base : SetupVal, field_name : str) -> None:
self.base = base
self.field_name = field_name
def to_json(self) -> Any:
return {'setup value': 'field',
'base': self.base.to_json(), 'field': self.field_name}
class GlobalInitializerVal(SetupVal):
name : str
def __init__(self, name : str) -> None:
self.name = name
def to_json(self) -> Any:
return {'setup value': 'global initializer', 'name': self.name}
class GlobalVarVal(SetupVal):
name : str
def __init__(self, name : str) -> None:
self.name = name
def to_json(self) -> Any:
return {'setup value': 'global lvalue', 'name': self.name}
class NullVal(SetupVal):
def to_json(self) -> Any:
return {'setup value': 'null'}
class ArrayVal(SetupVal):
elements : List[SetupVal]
def __init__(self, elements : List[SetupVal]) -> None:
self.elements = elements
def to_json(self) -> Any:
return {'setup value': 'array',
'elements': [element.to_json() for element in self.elements]}
name_regexp = re.compile('^(?P<prefix>.*[^0-9])?(?P<number>[0-9]+)?$')
def next_name(x : str) -> str:
match = name_regexp.match(x)
if match is None:
return 'x'
prefix, number = match.group('prefix', 'number')
if prefix is None:
prefix = 'x'
if number is None:
next_number = 0
else:
next_number = int(number) + 1
return f'{prefix}{next_number}'
def uniquify(x : str, used : Set[str]) -> str:
while x in used:
x = next_name(x)
return x
class PointerType:
"""A trivial class indicating that PointsTo should check ``target``'s type
against the type that ``pointer``'s type points to.
"""
pass
class Condition:
def __init__(self, condition : CryptolTerm) -> None:
self.cryptol_term = condition
def to_json(self) -> Any:
return cryptoltypes.to_cryptol(self.cryptol_term)
class PointsTo:
"""The workhorse for ``points_to``.
"""
def __init__(self, pointer : SetupVal, target : SetupVal, *,
check_target_type : Union[PointerType, 'LLVMType', 'JVMType', None] = PointerType(),
condition : Optional[Condition] = None) -> None:
self.pointer = pointer
self.target = target
self.check_target_type = check_target_type
self.condition = condition
def to_json(self) -> Any:
check_target_type_json: Optional[Dict[str, Any]]
if self.check_target_type is None:
check_target_type_json = None
elif isinstance(self.check_target_type, PointerType):
check_target_type_json = { "check against": "pointer type" }
elif isinstance(self.check_target_type, LLVMType):
check_target_type_json = { "check against": "casted type"
, "type": self.check_target_type.to_json() }
return {"pointer": self.pointer.to_json(),
"points to": self.target.to_json(),
"check points to type": check_target_type_json,
"condition": self.condition.to_json() if self.condition is not None else self.condition}
class PointsToBitfield:
"""The workhorse for ``points_to_bitfield``.
"""
def __init__(self, pointer : SetupVal, field_name : str,
target : SetupVal) -> None:
self.pointer = pointer
self.field_name = field_name
self.target = target
def to_json(self) -> Any:
return {"pointer": self.pointer.to_json(),
"field name": self.field_name,
"points to": self.target.to_json()}
@dataclass
class GhostVariable:
name: str
server_name: str
class GhostValue:
"""A class containing the statement that a given ghost variable should have the
value given by a Cryptol expression.
"""
def __init__(self, name: str, value: CryptolTerm) -> None:
self.name = name
self.value = value
def to_json(self) -> Any:
return {"server name": self.name,
"value": cryptoltypes.to_cryptol(self.value)}
@dataclass
class State:
contract : 'Contract'
fresh : List[FreshVar] = dataclasses.field(default_factory=list)
conditions : List[Condition] = dataclasses.field(default_factory=list)
allocated : List[Allocated] = dataclasses.field(default_factory=list)
points_to : List[PointsTo] = dataclasses.field(default_factory=list)
points_to_bitfield : List[PointsToBitfield] = dataclasses.field(default_factory=list)
ghost_values : List[GhostValue] = dataclasses.field(default_factory=list)
def to_json(self) -> Any:
return {'variables': [v.to_init_json() for v in self.fresh],
'conditions': [c.to_json() for c in self.conditions],
'allocated': [a.to_init_json() for a in self.allocated],
'points to': [p.to_json() for p in self.points_to],
'points to bitfield': [p.to_json() for p in self.points_to_bitfield],
'ghost values': [g.to_json() for g in self.ghost_values]
}
ContractState = \
Union[Literal['pre'],
Literal['post'],
Literal['done']]
@dataclass
class Void:
def to_json(self) -> Any:
return None
void = Void()
@dataclass
class VerifyResult:
contract : 'Contract'
lemma_name : str
# Lemma names are generated deterministically with respect to a
# particular Python execution trace. This means that re-running the
# same script will be fast when using caching, but REPL-style usage
# will be slow, invalidating the cache at each step. We should be
# smarter about this.
used_lemma_names = set([]) # type: Set[str]
class Contract:
__used_names : Set[str]
__state : ContractState = 'pre'
__pre_state : State
__post_state : State
__returns : Optional[Union[SetupVal, Void]]
__arguments : Optional[List[SetupVal]]
__definition_lineno : Optional[int]
__definition_filename : Optional[str]
__unique_id : uuid.UUID
__cached_json : Optional[Any]
def __init__(self) -> None:
self.__pre_state = State(self)
self.__post_state = State(self)
self.__used_names = set()
self.__arguments = None
self.__returns = None
self.__unique_id = uuid.uuid4()
self.__cached_json = None
frame = inspect.currentframe()
if frame is not None and frame.f_back is not None:
self.__definition_lineno = frame.f_back.f_lineno
self.__definition_filename = frame.f_back.f_code.co_filename
else:
self.__definition_lineno = None
self.__definition_filename = None
# To be overridden by users
def specification(self) -> None:
pass
def execute_func(self, *args : SetupVal) -> None:
"""Denotes the end of the precondition specification portion of this ``Contract``, records that
the function is executed with arguments ``args``, and denotes the beginning of the postcondition
portion of this ``Contract``."""
if self.__arguments is not None:
raise ValueError("The function has already been called once during the specification.")
elif self.__state != 'pre':
raise ValueError("Contract state expected to be 'pre', but found {self.__state!r} (has `execute_func` already been called for this contract?).")
else:
self.__arguments = [arg for arg in args]
self.__state = 'post'
def get_fresh_name(self, hint : str = 'x') -> str:
new_name = uniquify(hint, self.__used_names)
self.__used_names.add(new_name)
return new_name
def fresh_var(self, type : Union['LLVMType','JVMType'], suggested_name : Optional[str] = None) -> FreshVar:
"""Declares a fresh variable of type ``type`` (with name ``suggested_name`` if provided and available)."""
fresh_name = self.get_fresh_name('x' if suggested_name is None else self.get_fresh_name(suggested_name))
v = FreshVar(self, type, fresh_name)
if self.__state == 'pre':
self.__pre_state.fresh.append(v)
elif self.__state == 'post':
self.__post_state.fresh.append(v)
else:
raise Exception("wrong state")
return v
def alloc(self, type : Union['LLVMType', 'JVMType'], *, read_only : bool = False,
alignment : Optional[int] = None,
points_to : Optional[SetupVal] = None) -> SetupVal:
"""Allocates a pointer of type ``type``.
If ``read_only == True`` then the allocated memory is immutable.
If ``alignment != None``, then the start of the allocated region of
memory will be aligned to a multiple of the specified number of bytes
(which must be a power of 2).
If ``points_to != None``, it will also be asserted that the allocated memory contains the
value specified by ``points_to``.
:returns A pointer of the proper type to the allocated region."""
a = Allocated(self, type, mutable = not read_only, alignment = alignment)
if self.__state == 'pre':
self.__pre_state.allocated.append(a)
elif self.__state == 'post':
self.__post_state.allocated.append(a)
else:
raise Exception("wrong state")
if points_to is not None:
self.points_to(a, points_to)
return a
def points_to(self, pointer : SetupVal, target : SetupVal, *,
check_target_type : Union[PointerType, 'LLVMType', 'JVMType', None] = PointerType(),
condition : Optional[Condition] = None) -> None:
"""Declare that the memory location indicated by the ``pointer``
contains the ``target``.
If ``check_target_type == PointerType()``, then this will check that
``target``'s type matches the type that ``pointer``'s type points to.
If ``check_target_type`` is an ``LLVMType``, then this will check that
``target``'s type matches that type.
If ``check_target_type == None`, then this will not check ``target``'s
type at all.
If ``condition != None`, then this will only declare that ``pointer``
points to ``target`` is the ``condition`` holds.
"""
pt = PointsTo(pointer, target, check_target_type = check_target_type, condition = condition)
if self.__state == 'pre':
self.__pre_state.points_to.append(pt)
elif self.__state == 'post':
self.__post_state.points_to.append(pt)
else:
raise Exception("wrong state")
def points_to_bitfield(self, pointer : SetupVal, field_name : str,
target : SetupVal) -> None:
"""Declare that the memory location indicated by the ``pointer``
is a bitfield whose field, indicated by the ``field_name``,
contains the ``target``.
Currently, this function only supports LLVM verification. Attempting to
use this function for JVM verification will result in an error.
"""
pt = PointsToBitfield(pointer, field_name, target)
if self.__state == 'pre':
self.__pre_state.points_to_bitfield.append(pt)
elif self.__state == 'post':
self.__post_state.points_to_bitfield.append(pt)
else:
raise Exception("wrong state")
def ghost_value(self, var: GhostVariable, value: CryptolTerm) -> None:
"""Declare that the given ghost variable should have a value specified by the given Cryptol expression.
Usable either before or after `execute_func`.
"""
gv = GhostValue(var.name, value)
if self.__state == 'pre':
self.__pre_state.ghost_values.append(gv)
elif self.__state == 'post':
self.__post_state.ghost_values.append(gv)
else:
raise Exception("wrong state")
@deprecated
def proclaim(self, proposition : Union[str, CryptolTerm, cryptoltypes.CryptolJSON]) -> None:
"""DEPRECATED: Use ``precondition`` or ``postcondition`` instead. This method will
eventually be removed."""
if not isinstance(proposition, CryptolTerm):
condition = Condition(CryptolTerm(proposition))
else:
condition = Condition(proposition)
if self.__state == 'pre':
self.__pre_state.conditions.append(condition)
elif self.__state == 'post':
self.__post_state.conditions.append(condition)
else:
raise Exception("wrong state")
def precondition(self, proposition : Union[str, CryptolTerm, cryptoltypes.CryptolJSON]) -> None:
"""Establishes ``proposition`` as a precondition for the function ```Contract```
being specified.
Preconditions must be specified before ``execute_func`` is called in the contract specification."""
if not isinstance(proposition, CryptolTerm):
condition = Condition(CryptolTerm(proposition))
else:
condition = Condition(proposition)
if self.__state == 'pre':
self.__pre_state.conditions.append(condition)
else:
raise Exception("preconditions must be specified before execute_func is called in the contract")
def postcondition(self, proposition : Union[str, CryptolTerm, cryptoltypes.CryptolJSON]) -> None:
"""Establishes ``proposition`` as a postcondition for the function ```Contract```
being specified.
Postconditions must be specified after ``execute_func`` is called in the contract specification."""
if not isinstance(proposition, CryptolTerm):
condition = Condition(CryptolTerm(proposition))
else:
condition = Condition(proposition)
if self.__state == 'post':
self.__post_state.conditions.append(condition)
else:
raise Exception("postconditions must be specified after execute_func is called in the contract")
def returns(self, val : Union[Void,SetupVal]) -> None:
if self.__state == 'post':
if self.__returns is None:
self.__returns = val
else:
raise ValueError("Return value already specified")
else:
raise ValueError("Not in postcondition")
def lemma_name(self, hint : Optional[str] = None) -> str:
if hint is None:
hint = self.__class__.__name__
name = uniquify('lemma_' + hint, used_lemma_names)
used_lemma_names.add(name)
return name
def definition_lineno(self) -> Optional[int]:
return self.__definition_lineno
def definition_filename(self) -> Optional[str]:
return self.__definition_filename
def to_json(self) -> Any:
if self.__cached_json is not None:
return self.__cached_json
else:
if self.__state != 'pre':
raise Exception(f'Internal error: wrong contract state -- expected \'pre\', but got: {self.__state!r}')
self.specification()
if self.__state != 'post':
raise Exception(f'Internal error: wrong contract state -- expected \'post\', but got: {self.__state!r}')
self.__state = 'done'
if self.__returns is None:
raise Exception("forgot return")
self.__cached_json = \
{'pre vars': [v.to_init_json() for v in self.__pre_state.fresh],
'pre conds': [c.to_json() for c in self.__pre_state.conditions],
'pre allocated': [a.to_init_json() for a in self.__pre_state.allocated],
'pre ghost values': [g.to_json() for g in self.__pre_state.ghost_values],
'pre points tos': [pt.to_json() for pt in self.__pre_state.points_to],
'pre points to bitfields': [pt.to_json() for pt in self.__pre_state.points_to_bitfield],
'argument vals': [a.to_json() for a in self.__arguments] if self.__arguments is not None else [],
'post vars': [v.to_init_json() for v in self.__post_state.fresh],
'post conds': [c.to_json() for c in self.__post_state.conditions],
'post allocated': [a.to_init_json() for a in self.__post_state.allocated],
'post ghost values': [g.to_json() for g in self.__post_state.ghost_values],
'post points tos': [pt.to_json() for pt in self.__post_state.points_to],
'post points to bitfields': [pt.to_json() for pt in self.__post_state.points_to_bitfield],
'return val': self.__returns.to_json()}
return self.__cached_json
##################################################
# Helpers for value construction
##################################################
# It's tempting to name this `global` to mirror SAWScript's `llvm_global`,
# but that would clash with the Python keyword `global`.
def global_var(name: str) -> SetupVal:
"""Returns a pointer to the named global ``name`` (i.e., a ``GlobalVarVal``)."""
return GlobalVarVal(name)
# FIXME Is `Any` too permissive here -- can we be a little more precise?
def cryptol(data : Any) -> 'CryptolTerm':
"""Constructs a Cryptol value from ``data`` (i.e., a ``CryptolTerm``, which is also a ``SetupVal``).
``data`` should be a string literal representing Cryptol syntax or the result of a Cryptol-realted server computation."""
return CryptolTerm(data)
def array(*elements: SetupVal) -> SetupVal:
"""Returns an array with the provided ``elements`` (i.e., an ``ArrayVal``).
N.B., one or more ``elements`` must be provided.""" # FIXME why? document this here when we figure it out.
if len(elements) == 0:
raise ValueError('An array must be constructed with one or more elements')
for e in elements:
if not isinstance(e, SetupVal):
raise ValueError('array expected a SetupVal, but got {e!r}')
return ArrayVal(list(elements))
def elem(base: SetupVal, index: int) -> SetupVal:
"""Returns the value of the array element at position ``index`` in ``base`` (i.e., an ``ElemVal``).
Can also be created by using an ``int`` indexing key on a ``SetupVal``: ``base[index]``."""
if not isinstance(base, SetupVal):
raise ValueError('elem expected a SetupVal, but got {base!r}')
if not isinstance(index, int):
raise ValueError('elem expected an int, but got {index!r}')
return ElemVal(base, index)
def field(base : SetupVal, field_name : str) -> SetupVal:
"""Returns the value of struct ``base``'s field ``field_name`` (i.e., a ``FieldVal``).
Can also be created by using a ``str`` indexing key on a ``SetupVal``: ``base[field_name]``."""
if not isinstance(base, SetupVal):
raise ValueError('field expected a SetupVal, but got {base!r}')
if not isinstance(field_name, str):
raise ValueError('field expected a str, but got {field_name!r}')
return FieldVal(base, field_name)
def global_initializer(name: str) -> SetupVal:
"""Returns the initializer value of a named global ``name`` (i.e., a ``GlobalInitializerVal``)."""
if not isinstance(name, str):
raise ValueError('global_initializer expected a str naming a global value, but got {name!r}')
return GlobalInitializerVal(name)
def null() -> SetupVal:
"""Returns a null pointer value (i.e., a ``NullVal``)."""
return NullVal()
def struct(*fields : SetupVal) -> SetupVal:
"""Returns an LLVM structure value with the given ``fields`` (i.e., a ``StructVal``)."""
for field in fields:
if not isinstance(field, SetupVal):
raise ValueError('struct expected a SetupVal, but got {field!r}')
return StructVal(list(fields))
| 38.84812 | 156 | 0.626577 | from abc import ABCMeta, abstractmethod
from cryptol import cryptoltypes
from .utils import deprecated
from dataclasses import dataclass
import dataclasses
import re
from typing import Any, Dict, List, Optional, Set, Union, overload
from typing_extensions import Literal
import inspect
import uuid
from .llvm_type import *
from .jvm_type import *
class SetupVal(metaclass=ABCMeta):
@abstractmethod
def to_json(self) -> Any:
pass
@overload
def __getitem__(self, key : int) -> 'ElemVal':
pass
@overload
def __getitem__(self, key : str) -> 'FieldVal':
pass
def __getitem__(self, key : Union[int,str]) -> 'SetupVal':
if isinstance(key, int):
return elem(self, key)
elif isinstance(key, str):
return field(self, key)
else:
raise ValueError(f'{key!r} is not a valid element index or field name.')
class NamedSetupVal(SetupVal):
@abstractmethod
def to_init_json(self) -> Any:
pass
class CryptolTerm(SetupVal):
expression : cryptoltypes.CryptolJSON
def __init__(self, code : Union[str, cryptoltypes.CryptolJSON]):
if isinstance(code, str):
self.expression = cryptoltypes.CryptolLiteral(code)
else:
self.expression = code
def __call__(self, *args : cryptoltypes.CryptolJSON) -> 'CryptolTerm':
out_term = self.expression
for a in args:
out_term = cryptoltypes.CryptolApplication(out_term, a)
return CryptolTerm(out_term)
def __repr__(self) -> str:
return f"CryptolTerm({self.expression!r})"
def to_json(self) -> Any:
return {'setup value': 'Cryptol', 'expression': cryptoltypes.to_cryptol(self.expression)}
def __to_cryptol__(self, ty : Any) -> Any:
return self.expression.__to_cryptol__(ty)
class FreshVar(NamedSetupVal):
__name : Optional[str]
def __init__(self, spec : 'Contract', type : Union['LLVMType', 'JVMType'], suggested_name : Optional[str] = None) -> None:
self.__name = suggested_name
self.spec = spec
self.type = type
def __to_cryptol__(self, ty : Any) -> Any:
return cryptoltypes.CryptolLiteral(self.name()).__to_cryptol__(ty)
def to_init_json(self) -> Any:
name = self.name()
return {"server name": name,
"name": name,
"type": self.type.to_json()}
def name(self) -> str:
if self.__name is None:
self.__name = self.spec.get_fresh_name()
return self.__name
def to_json(self) -> Any:
return {'setup value': 'named', 'name': self.name()}
def __gt__(self, other : cryptoltypes.CryptolJSON) -> CryptolTerm:
gt = CryptolTerm("(>)")
return gt(self, other)
def __lt__(self, other : cryptoltypes.CryptolJSON) -> CryptolTerm:
lt = CryptolTerm("(<)")
return lt(self, other)
class Allocated(NamedSetupVal):
name : Optional[str]
def __init__(self, spec : 'Contract', type : Union['LLVMType','JVMType'], *,
mutable : bool = True, alignment : Optional[int] = None) -> None:
self.name = None
self.spec = spec
self.type = type
self.mutable = mutable
self.alignment = alignment
def to_init_json(self) -> Any:
if self.name is None:
self.name = self.spec.get_fresh_name()
return {"server name": self.name,
"type": self.type.to_json(),
"mutable": self.mutable,
"alignment": self.alignment}
def to_json(self) -> Any:
if self.name is None:
self.name = self.spec.get_fresh_name()
return {'setup value': 'named', 'name': self.name}
class StructVal(SetupVal):
fields : List[SetupVal]
def __init__(self, fields : List[SetupVal]) -> None:
self.fields = fields
def to_json(self) -> Any:
return {'setup value': 'tuple', 'elements': [fld.to_json() for fld in self.fields]}
class ElemVal(SetupVal):
base : SetupVal
index : int
def __init__(self, base : SetupVal, index : int) -> None:
self.base = base
self.index = index
def to_json(self) -> Any:
return {'setup value': 'element lvalue',
'base': self.base.to_json(), 'index': self.index}
class FieldVal(SetupVal):
base : SetupVal
field_name : str
def __init__(self, base : SetupVal, field_name : str) -> None:
self.base = base
self.field_name = field_name
def to_json(self) -> Any:
return {'setup value': 'field',
'base': self.base.to_json(), 'field': self.field_name}
class GlobalInitializerVal(SetupVal):
name : str
def __init__(self, name : str) -> None:
self.name = name
def to_json(self) -> Any:
return {'setup value': 'global initializer', 'name': self.name}
class GlobalVarVal(SetupVal):
name : str
def __init__(self, name : str) -> None:
self.name = name
def to_json(self) -> Any:
return {'setup value': 'global lvalue', 'name': self.name}
class NullVal(SetupVal):
def to_json(self) -> Any:
return {'setup value': 'null'}
class ArrayVal(SetupVal):
elements : List[SetupVal]
def __init__(self, elements : List[SetupVal]) -> None:
self.elements = elements
def to_json(self) -> Any:
return {'setup value': 'array',
'elements': [element.to_json() for element in self.elements]}
name_regexp = re.compile('^(?P<prefix>.*[^0-9])?(?P<number>[0-9]+)?$')
def next_name(x : str) -> str:
match = name_regexp.match(x)
if match is None:
return 'x'
prefix, number = match.group('prefix', 'number')
if prefix is None:
prefix = 'x'
if number is None:
next_number = 0
else:
next_number = int(number) + 1
return f'{prefix}{next_number}'
def uniquify(x : str, used : Set[str]) -> str:
while x in used:
x = next_name(x)
return x
class PointerType:
pass
class Condition:
def __init__(self, condition : CryptolTerm) -> None:
self.cryptol_term = condition
def to_json(self) -> Any:
return cryptoltypes.to_cryptol(self.cryptol_term)
class PointsTo:
def __init__(self, pointer : SetupVal, target : SetupVal, *,
check_target_type : Union[PointerType, 'LLVMType', 'JVMType', None] = PointerType(),
condition : Optional[Condition] = None) -> None:
self.pointer = pointer
self.target = target
self.check_target_type = check_target_type
self.condition = condition
def to_json(self) -> Any:
check_target_type_json: Optional[Dict[str, Any]]
if self.check_target_type is None:
check_target_type_json = None
elif isinstance(self.check_target_type, PointerType):
check_target_type_json = { "check against": "pointer type" }
elif isinstance(self.check_target_type, LLVMType):
check_target_type_json = { "check against": "casted type"
, "type": self.check_target_type.to_json() }
return {"pointer": self.pointer.to_json(),
"points to": self.target.to_json(),
"check points to type": check_target_type_json,
"condition": self.condition.to_json() if self.condition is not None else self.condition}
class PointsToBitfield:
def __init__(self, pointer : SetupVal, field_name : str,
target : SetupVal) -> None:
self.pointer = pointer
self.field_name = field_name
self.target = target
def to_json(self) -> Any:
return {"pointer": self.pointer.to_json(),
"field name": self.field_name,
"points to": self.target.to_json()}
@dataclass
class GhostVariable:
name: str
server_name: str
class GhostValue:
def __init__(self, name: str, value: CryptolTerm) -> None:
self.name = name
self.value = value
def to_json(self) -> Any:
return {"server name": self.name,
"value": cryptoltypes.to_cryptol(self.value)}
@dataclass
class State:
contract : 'Contract'
fresh : List[FreshVar] = dataclasses.field(default_factory=list)
conditions : List[Condition] = dataclasses.field(default_factory=list)
allocated : List[Allocated] = dataclasses.field(default_factory=list)
points_to : List[PointsTo] = dataclasses.field(default_factory=list)
points_to_bitfield : List[PointsToBitfield] = dataclasses.field(default_factory=list)
ghost_values : List[GhostValue] = dataclasses.field(default_factory=list)
def to_json(self) -> Any:
return {'variables': [v.to_init_json() for v in self.fresh],
'conditions': [c.to_json() for c in self.conditions],
'allocated': [a.to_init_json() for a in self.allocated],
'points to': [p.to_json() for p in self.points_to],
'points to bitfield': [p.to_json() for p in self.points_to_bitfield],
'ghost values': [g.to_json() for g in self.ghost_values]
}
ContractState = \
Union[Literal['pre'],
Literal['post'],
Literal['done']]
@dataclass
class Void:
def to_json(self) -> Any:
return None
void = Void()
@dataclass
class VerifyResult:
contract : 'Contract'
lemma_name : str
# Lemma names are generated deterministically with respect to a
# particular Python execution trace. This means that re-running the
# same script will be fast when using caching, but REPL-style usage
# will be slow, invalidating the cache at each step. We should be
# smarter about this.
used_lemma_names = set([]) # type: Set[str]
class Contract:
__used_names : Set[str]
__state : ContractState = 'pre'
__pre_state : State
__post_state : State
__returns : Optional[Union[SetupVal, Void]]
__arguments : Optional[List[SetupVal]]
__definition_lineno : Optional[int]
__definition_filename : Optional[str]
__unique_id : uuid.UUID
__cached_json : Optional[Any]
def __init__(self) -> None:
self.__pre_state = State(self)
self.__post_state = State(self)
self.__used_names = set()
self.__arguments = None
self.__returns = None
self.__unique_id = uuid.uuid4()
self.__cached_json = None
frame = inspect.currentframe()
if frame is not None and frame.f_back is not None:
self.__definition_lineno = frame.f_back.f_lineno
self.__definition_filename = frame.f_back.f_code.co_filename
else:
self.__definition_lineno = None
self.__definition_filename = None
# To be overridden by users
def specification(self) -> None:
pass
def execute_func(self, *args : SetupVal) -> None:
if self.__arguments is not None:
raise ValueError("The function has already been called once during the specification.")
elif self.__state != 'pre':
raise ValueError("Contract state expected to be 'pre', but found {self.__state!r} (has `execute_func` already been called for this contract?).")
else:
self.__arguments = [arg for arg in args]
self.__state = 'post'
def get_fresh_name(self, hint : str = 'x') -> str:
new_name = uniquify(hint, self.__used_names)
self.__used_names.add(new_name)
return new_name
def fresh_var(self, type : Union['LLVMType','JVMType'], suggested_name : Optional[str] = None) -> FreshVar:
fresh_name = self.get_fresh_name('x' if suggested_name is None else self.get_fresh_name(suggested_name))
v = FreshVar(self, type, fresh_name)
if self.__state == 'pre':
self.__pre_state.fresh.append(v)
elif self.__state == 'post':
self.__post_state.fresh.append(v)
else:
raise Exception("wrong state")
return v
def alloc(self, type : Union['LLVMType', 'JVMType'], *, read_only : bool = False,
alignment : Optional[int] = None,
points_to : Optional[SetupVal] = None) -> SetupVal:
a = Allocated(self, type, mutable = not read_only, alignment = alignment)
if self.__state == 'pre':
self.__pre_state.allocated.append(a)
elif self.__state == 'post':
self.__post_state.allocated.append(a)
else:
raise Exception("wrong state")
if points_to is not None:
self.points_to(a, points_to)
return a
def points_to(self, pointer : SetupVal, target : SetupVal, *,
check_target_type : Union[PointerType, 'LLVMType', 'JVMType', None] = PointerType(),
condition : Optional[Condition] = None) -> None:
pt = PointsTo(pointer, target, check_target_type = check_target_type, condition = condition)
if self.__state == 'pre':
self.__pre_state.points_to.append(pt)
elif self.__state == 'post':
self.__post_state.points_to.append(pt)
else:
raise Exception("wrong state")
def points_to_bitfield(self, pointer : SetupVal, field_name : str,
target : SetupVal) -> None:
pt = PointsToBitfield(pointer, field_name, target)
if self.__state == 'pre':
self.__pre_state.points_to_bitfield.append(pt)
elif self.__state == 'post':
self.__post_state.points_to_bitfield.append(pt)
else:
raise Exception("wrong state")
def ghost_value(self, var: GhostVariable, value: CryptolTerm) -> None:
gv = GhostValue(var.name, value)
if self.__state == 'pre':
self.__pre_state.ghost_values.append(gv)
elif self.__state == 'post':
self.__post_state.ghost_values.append(gv)
else:
raise Exception("wrong state")
@deprecated
def proclaim(self, proposition : Union[str, CryptolTerm, cryptoltypes.CryptolJSON]) -> None:
if not isinstance(proposition, CryptolTerm):
condition = Condition(CryptolTerm(proposition))
else:
condition = Condition(proposition)
if self.__state == 'pre':
self.__pre_state.conditions.append(condition)
elif self.__state == 'post':
self.__post_state.conditions.append(condition)
else:
raise Exception("wrong state")
def precondition(self, proposition : Union[str, CryptolTerm, cryptoltypes.CryptolJSON]) -> None:
if not isinstance(proposition, CryptolTerm):
condition = Condition(CryptolTerm(proposition))
else:
condition = Condition(proposition)
if self.__state == 'pre':
self.__pre_state.conditions.append(condition)
else:
raise Exception("preconditions must be specified before execute_func is called in the contract")
def postcondition(self, proposition : Union[str, CryptolTerm, cryptoltypes.CryptolJSON]) -> None:
if not isinstance(proposition, CryptolTerm):
condition = Condition(CryptolTerm(proposition))
else:
condition = Condition(proposition)
if self.__state == 'post':
self.__post_state.conditions.append(condition)
else:
raise Exception("postconditions must be specified after execute_func is called in the contract")
def returns(self, val : Union[Void,SetupVal]) -> None:
if self.__state == 'post':
if self.__returns is None:
self.__returns = val
else:
raise ValueError("Return value already specified")
else:
raise ValueError("Not in postcondition")
def lemma_name(self, hint : Optional[str] = None) -> str:
if hint is None:
hint = self.__class__.__name__
name = uniquify('lemma_' + hint, used_lemma_names)
used_lemma_names.add(name)
return name
def definition_lineno(self) -> Optional[int]:
return self.__definition_lineno
def definition_filename(self) -> Optional[str]:
return self.__definition_filename
def to_json(self) -> Any:
if self.__cached_json is not None:
return self.__cached_json
else:
if self.__state != 'pre':
raise Exception(f'Internal error: wrong contract state -- expected \'pre\', but got: {self.__state!r}')
self.specification()
if self.__state != 'post':
raise Exception(f'Internal error: wrong contract state -- expected \'post\', but got: {self.__state!r}')
self.__state = 'done'
if self.__returns is None:
raise Exception("forgot return")
self.__cached_json = \
{'pre vars': [v.to_init_json() for v in self.__pre_state.fresh],
'pre conds': [c.to_json() for c in self.__pre_state.conditions],
'pre allocated': [a.to_init_json() for a in self.__pre_state.allocated],
'pre ghost values': [g.to_json() for g in self.__pre_state.ghost_values],
'pre points tos': [pt.to_json() for pt in self.__pre_state.points_to],
'pre points to bitfields': [pt.to_json() for pt in self.__pre_state.points_to_bitfield],
'argument vals': [a.to_json() for a in self.__arguments] if self.__arguments is not None else [],
'post vars': [v.to_init_json() for v in self.__post_state.fresh],
'post conds': [c.to_json() for c in self.__post_state.conditions],
'post allocated': [a.to_init_json() for a in self.__post_state.allocated],
'post ghost values': [g.to_json() for g in self.__post_state.ghost_values],
'post points tos': [pt.to_json() for pt in self.__post_state.points_to],
'post points to bitfields': [pt.to_json() for pt in self.__post_state.points_to_bitfield],
'return val': self.__returns.to_json()}
return self.__cached_json
##################################################
# Helpers for value construction
##################################################
# It's tempting to name this `global` to mirror SAWScript's `llvm_global`,
# but that would clash with the Python keyword `global`.
def global_var(name: str) -> SetupVal:
return GlobalVarVal(name)
# FIXME Is `Any` too permissive here -- can we be a little more precise?
def cryptol(data : Any) -> 'CryptolTerm':
return CryptolTerm(data)
def array(*elements: SetupVal) -> SetupVal:
if len(elements) == 0:
raise ValueError('An array must be constructed with one or more elements')
for e in elements:
if not isinstance(e, SetupVal):
raise ValueError('array expected a SetupVal, but got {e!r}')
return ArrayVal(list(elements))
def elem(base: SetupVal, index: int) -> SetupVal:
if not isinstance(base, SetupVal):
raise ValueError('elem expected a SetupVal, but got {base!r}')
if not isinstance(index, int):
raise ValueError('elem expected an int, but got {index!r}')
return ElemVal(base, index)
def field(base : SetupVal, field_name : str) -> SetupVal:
if not isinstance(base, SetupVal):
raise ValueError('field expected a SetupVal, but got {base!r}')
if not isinstance(field_name, str):
raise ValueError('field expected a str, but got {field_name!r}')
return FieldVal(base, field_name)
def global_initializer(name: str) -> SetupVal:
if not isinstance(name, str):
raise ValueError('global_initializer expected a str naming a global value, but got {name!r}')
return GlobalInitializerVal(name)
def null() -> SetupVal:
return NullVal()
def struct(*fields : SetupVal) -> SetupVal:
for field in fields:
if not isinstance(field, SetupVal):
raise ValueError('struct expected a SetupVal, but got {field!r}')
return StructVal(list(fields))
| true | true |
1c327fe199f4fd4dc7cff997cd775b5afdd96687 | 13,546 | py | Python | tests/integration_tests/performance/test_vsock_throughput.py | psalaberria002/firecracker | 86340cb109d7eb1174bb080ef0bcb0aadc80b0f9 | [
"Apache-2.0"
] | 1 | 2020-03-29T00:58:58.000Z | 2020-03-29T00:58:58.000Z | tests/integration_tests/performance/test_vsock_throughput.py | psalaberria002/firecracker | 86340cb109d7eb1174bb080ef0bcb0aadc80b0f9 | [
"Apache-2.0"
] | null | null | null | tests/integration_tests/performance/test_vsock_throughput.py | psalaberria002/firecracker | 86340cb109d7eb1174bb080ef0bcb0aadc80b0f9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Tests the VSOCK throughput of Firecracker uVMs."""
import os
import json
import logging
import time
import concurrent.futures
import pytest
from conftest import _test_images_s3_bucket
from framework.artifacts import ArtifactCollection, ArtifactSet
from framework.matrix import TestMatrix, TestContext
from framework.builder import MicrovmBuilder
from framework.stats import core, consumer, producer
from framework.stats.baseline import Provider as BaselineProvider
from framework.stats.metadata import DictProvider as DictMetadataProvider
from framework.utils import CpuMap, CmdBuilder, run_cmd, get_cpu_percent, \
DictQuery
from framework.utils_cpuid import get_cpu_model_name
import host_tools.network as net_tools
from integration_tests.performance.configs import defs
from integration_tests.performance.utils import handle_failure, \
dump_test_result
CONFIG = json.load(open(defs.CFG_LOCATION /
"vsock_throughput_test_config.json"))
SERVER_STARTUP_TIME = CONFIG["server_startup_time"]
VSOCK_UDS_PATH = "v.sock"
IPERF3 = "iperf3-vsock"
THROUGHPUT = "throughput"
DURATION = "duration"
BASE_PORT = 5201
CPU_UTILIZATION_VMM = "cpu_utilization_vmm"
CPU_UTILIZATION_VCPUS_TOTAL = "cpu_utilization_vcpus_total"
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG = "cpu_utilization_percent"
IPERF3_END_RESULTS_TAG = "end"
TARGET_TAG = "target"
DELTA_PERCENTAGE_TAG = "delta_percentage"
THROUGHPUT_UNIT = "Mbps"
DURATION_UNIT = "seconds"
CPU_UTILIZATION_UNIT = "percentage"
# pylint: disable=R0903
class VsockThroughputBaselineProvider(BaselineProvider):
"""Implementation of a baseline provider for the vsock throughput...
...performance test.
"""
def __init__(self, env_id, iperf_id):
"""Vsock throughput baseline provider initialization."""
cpu_model_name = get_cpu_model_name()
baselines = list(filter(
lambda cpu_baseline: cpu_baseline["model"] == cpu_model_name,
CONFIG["hosts"]["instances"]["m5d.metal"]["cpus"]))
super().__init__(DictQuery(dict()))
if len(baselines) > 0:
super().__init__(DictQuery(baselines[0]))
self._tag = "baselines/{}/" + env_id + "/{}/" + iperf_id
def get(self, ms_name: str, st_name: str) -> dict:
"""Return the baseline corresponding to the key."""
key = self._tag.format(ms_name, st_name)
baseline = self._baselines.get(key)
if baseline:
target = baseline.get("target")
delta_percentage = baseline.get("delta_percentage")
return {
"target": target,
"delta": delta_percentage * target / 100,
}
return None
def produce_iperf_output(basevm,
guest_cmd_builder,
current_avail_cpu,
runtime,
omit,
load_factor,
modes):
"""Produce iperf raw output from server-client connection."""
# Check if we have enough CPUs to pin the servers on the host.
# The available CPUs are the total minus vcpus, vmm and API threads.
assert load_factor * basevm.vcpus_count < CpuMap.len() - \
basevm.vcpus_count - 2
host_uds_path = os.path.join(
basevm.path,
VSOCK_UDS_PATH
)
# Start the servers.
for server_idx in range(load_factor*basevm.vcpus_count):
assigned_cpu = CpuMap(current_avail_cpu)
iperf_server = \
CmdBuilder(f"taskset --cpu-list {assigned_cpu}") \
.with_arg(IPERF3) \
.with_arg("-sD") \
.with_arg("--vsock") \
.with_arg("-B", host_uds_path) \
.with_arg("-p", f"{BASE_PORT + server_idx}") \
.with_arg("-1") \
.build()
run_cmd(iperf_server)
current_avail_cpu += 1
# Wait for iperf3 servers to start.
time.sleep(SERVER_STARTUP_TIME)
# Start `vcpus` iperf3 clients. We can not use iperf3 parallel streams
# due to non deterministic results and lack of scaling.
def spawn_iperf_client(conn, client_idx, mode):
# Add the port where the iperf3 client is going to send/receive.
cmd = guest_cmd_builder.with_arg(
"-p", BASE_PORT + client_idx).with_arg(mode).build()
# Bind the UDS in the jailer's root.
basevm.create_jailed_resource(os.path.join(
basevm.path,
_make_host_port_path(VSOCK_UDS_PATH, BASE_PORT + client_idx)))
pinned_cmd = f"taskset --cpu-list {client_idx % basevm.vcpus_count}" \
f" {cmd}"
rc, stdout, _ = conn.execute_command(pinned_cmd)
assert rc == 0
return stdout.read()
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = list()
cpu_load_future = executor.submit(get_cpu_percent,
basevm.jailer_clone_pid,
runtime - SERVER_STARTUP_TIME,
omit)
modes_len = len(modes)
ssh_connection = net_tools.SSHConnection(basevm.ssh_config)
for client_idx in range(load_factor*basevm.vcpus_count):
futures.append(executor.submit(spawn_iperf_client,
ssh_connection,
client_idx,
# Distribute the modes evenly.
modes[client_idx % modes_len]))
cpu_load = cpu_load_future.result()
for future in futures[:-1]:
res = json.loads(future.result())
res[IPERF3_END_RESULTS_TAG][
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG] = None
yield res
# Attach the real CPU utilization vmm/vcpus to
# the last iperf3 server-client pair measurements.
res = json.loads(futures[-1].result())
# We expect a single emulation thread tagged with `firecracker` name.
tag = "firecracker"
assert tag in cpu_load and len(cpu_load[tag]) == 1
thread_id = list(cpu_load[tag])[0]
data = cpu_load[tag][thread_id]
vmm_util = sum(data)/len(data)
cpu_util_perc = res[IPERF3_END_RESULTS_TAG][
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG] = dict()
cpu_util_perc[CPU_UTILIZATION_VMM] = vmm_util
vcpus_util = 0
for vcpu in range(basevm.vcpus_count):
# We expect a single fc_vcpu thread tagged with
# f`fc_vcpu {vcpu}`.
tag = f"fc_vcpu {vcpu}"
assert tag in cpu_load and len(cpu_load[tag]) == 1
thread_id = list(cpu_load[tag])[0]
data = cpu_load[tag][thread_id]
vcpus_util += (sum(data)/len(data))
cpu_util_perc[CPU_UTILIZATION_VCPUS_TOTAL] = vcpus_util
yield res
def consume_iperf_output(cons, result):
"""Consume iperf3 output result for TCP workload."""
total_received = result[IPERF3_END_RESULTS_TAG]['sum_received']
duration = float(total_received['seconds'])
cons.consume_data(DURATION, duration)
# Computed at the receiving end.
total_recv_bytes = int(total_received['bytes'])
tput = round((total_recv_bytes*8) / (1024*1024*duration), 2)
cons.consume_data(THROUGHPUT, tput)
cpu_util = result[IPERF3_END_RESULTS_TAG][
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG]
if cpu_util:
cpu_util_host = cpu_util[CPU_UTILIZATION_VMM]
cpu_util_guest = cpu_util[CPU_UTILIZATION_VCPUS_TOTAL]
cons.consume_stat("Avg", CPU_UTILIZATION_VMM, cpu_util_host)
cons.consume_stat("Avg", CPU_UTILIZATION_VCPUS_TOTAL, cpu_util_guest)
def pipes(basevm, current_avail_cpu, env_id):
"""Producer/Consumer pipes generator."""
for mode in CONFIG["modes"]:
# We run bi-directional tests only on uVM with more than 2 vCPus
# because we need to pin one iperf3/direction per vCPU, and since we
# have two directions, we need at least two vCPUs.
if mode == "bd" and basevm.vcpus_count < 2:
continue
for protocol in CONFIG["protocols"]:
for payload_length in protocol["payload_length"]:
iperf_guest_cmd_builder = CmdBuilder(IPERF3) \
.with_arg("--vsock") \
.with_arg("-c", 2) \
.with_arg("--json") \
.with_arg("--omit", protocol["omit"]) \
.with_arg("--time", CONFIG["time"])
if payload_length != "DEFAULT":
iperf_guest_cmd_builder = iperf_guest_cmd_builder \
.with_arg("--len", f"{payload_length}")
iperf3_id = f"vsock-p{payload_length}-{mode}"
cons = consumer.LambdaConsumer(
metadata_provider=DictMetadataProvider(
CONFIG["measurements"],
VsockThroughputBaselineProvider(env_id, iperf3_id)),
func=consume_iperf_output
)
prod_kwargs = {
"guest_cmd_builder": iperf_guest_cmd_builder,
"basevm": basevm,
"current_avail_cpu": current_avail_cpu,
"runtime": CONFIG["time"],
"omit": protocol["omit"],
"load_factor": CONFIG["load_factor"],
"modes": CONFIG["modes"][mode],
}
prod = producer.LambdaProducer(produce_iperf_output,
prod_kwargs)
yield cons, prod, f"{env_id}/{iperf3_id}"
@pytest.mark.nonci
@pytest.mark.timeout(600)
def test_vsock_throughput(bin_cloner_path, results_file_dumper):
"""Test vsock throughput driver for multiple artifacts."""
logger = logging.getLogger("vsock_throughput")
artifacts = ArtifactCollection(_test_images_s3_bucket())
microvm_artifacts = ArtifactSet(artifacts.microvms(keyword="1vcpu_1024mb"))
microvm_artifacts.insert(artifacts.microvms(keyword="2vcpu_1024mb"))
kernel_artifacts = ArtifactSet(
artifacts.kernels(keyword="vmlinux-4.14.bin"))
disk_artifacts = ArtifactSet(artifacts.disks(keyword="ubuntu"))
# Create a test context and add builder, logger, network.
test_context = TestContext()
test_context.custom = {
'builder': MicrovmBuilder(bin_cloner_path),
'logger': logger,
'name': 'vsock_throughput',
'results_file_dumper': results_file_dumper
}
test_matrix = TestMatrix(context=test_context,
artifact_sets=[
microvm_artifacts,
kernel_artifacts,
disk_artifacts
])
test_matrix.run_test(iperf_workload)
def iperf_workload(context):
"""Run a statistic exercise."""
vm_builder = context.custom['builder']
logger = context.custom["logger"]
file_dumper = context.custom['results_file_dumper']
# Create a rw copy artifact.
rw_disk = context.disk.copy()
# Get ssh key from read-only artifact.
ssh_key = context.disk.ssh_key()
# Create a fresh microvm from artifacts.
vm_instance = vm_builder.build(kernel=context.kernel,
disks=[rw_disk],
ssh_key=ssh_key,
config=context.microvm)
basevm = vm_instance.vm
# Create a vsock device
basevm.vsock.put(
vsock_id="vsock0",
guest_cid=3,
uds_path="/" + VSOCK_UDS_PATH
)
basevm.start()
st_core = core.Core(name="vsock_throughput",
iterations=1,
custom={'cpu_model_name': get_cpu_model_name()})
# Check if the needed CPU cores are available. We have the API thread, VMM
# thread and then one thread for each configured vCPU.
assert CpuMap.len() >= 2 + basevm.vcpus_count
# Pin uVM threads to physical cores.
current_avail_cpu = 0
assert basevm.pin_vmm(current_avail_cpu), \
"Failed to pin firecracker thread."
current_avail_cpu += 1
assert basevm.pin_api(current_avail_cpu), \
"Failed to pin fc_api thread."
for i in range(basevm.vcpus_count):
current_avail_cpu += 1
assert basevm.pin_vcpu(i, current_avail_cpu), \
f"Failed to pin fc_vcpu {i} thread."
logger.info("Testing with microvm: \"{}\", kernel {}, disk {}"
.format(context.microvm.name(),
context.kernel.name(),
context.disk.name()))
for cons, prod, tag in \
pipes(basevm,
current_avail_cpu + 1,
f"{context.kernel.name()}/{context.disk.name()}/"
f"{context.microvm.name()}"):
st_core.add_pipe(prod, cons, tag)
# Start running the commands on guest, gather results and verify pass
# criteria.
try:
result = st_core.run_exercise()
except core.CoreException as err:
handle_failure(file_dumper, err)
dump_test_result(file_dumper, result)
def _make_host_port_path(uds_path, port):
"""Build the path for a Unix socket, mapped to host vsock port `port`."""
return "{}_{}".format(uds_path, port)
| 38.157746 | 79 | 0.614573 |
import os
import json
import logging
import time
import concurrent.futures
import pytest
from conftest import _test_images_s3_bucket
from framework.artifacts import ArtifactCollection, ArtifactSet
from framework.matrix import TestMatrix, TestContext
from framework.builder import MicrovmBuilder
from framework.stats import core, consumer, producer
from framework.stats.baseline import Provider as BaselineProvider
from framework.stats.metadata import DictProvider as DictMetadataProvider
from framework.utils import CpuMap, CmdBuilder, run_cmd, get_cpu_percent, \
DictQuery
from framework.utils_cpuid import get_cpu_model_name
import host_tools.network as net_tools
from integration_tests.performance.configs import defs
from integration_tests.performance.utils import handle_failure, \
dump_test_result
CONFIG = json.load(open(defs.CFG_LOCATION /
"vsock_throughput_test_config.json"))
SERVER_STARTUP_TIME = CONFIG["server_startup_time"]
VSOCK_UDS_PATH = "v.sock"
IPERF3 = "iperf3-vsock"
THROUGHPUT = "throughput"
DURATION = "duration"
BASE_PORT = 5201
CPU_UTILIZATION_VMM = "cpu_utilization_vmm"
CPU_UTILIZATION_VCPUS_TOTAL = "cpu_utilization_vcpus_total"
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG = "cpu_utilization_percent"
IPERF3_END_RESULTS_TAG = "end"
TARGET_TAG = "target"
DELTA_PERCENTAGE_TAG = "delta_percentage"
THROUGHPUT_UNIT = "Mbps"
DURATION_UNIT = "seconds"
CPU_UTILIZATION_UNIT = "percentage"
class VsockThroughputBaselineProvider(BaselineProvider):
def __init__(self, env_id, iperf_id):
cpu_model_name = get_cpu_model_name()
baselines = list(filter(
lambda cpu_baseline: cpu_baseline["model"] == cpu_model_name,
CONFIG["hosts"]["instances"]["m5d.metal"]["cpus"]))
super().__init__(DictQuery(dict()))
if len(baselines) > 0:
super().__init__(DictQuery(baselines[0]))
self._tag = "baselines/{}/" + env_id + "/{}/" + iperf_id
def get(self, ms_name: str, st_name: str) -> dict:
key = self._tag.format(ms_name, st_name)
baseline = self._baselines.get(key)
if baseline:
target = baseline.get("target")
delta_percentage = baseline.get("delta_percentage")
return {
"target": target,
"delta": delta_percentage * target / 100,
}
return None
def produce_iperf_output(basevm,
guest_cmd_builder,
current_avail_cpu,
runtime,
omit,
load_factor,
modes):
assert load_factor * basevm.vcpus_count < CpuMap.len() - \
basevm.vcpus_count - 2
host_uds_path = os.path.join(
basevm.path,
VSOCK_UDS_PATH
)
for server_idx in range(load_factor*basevm.vcpus_count):
assigned_cpu = CpuMap(current_avail_cpu)
iperf_server = \
CmdBuilder(f"taskset --cpu-list {assigned_cpu}") \
.with_arg(IPERF3) \
.with_arg("-sD") \
.with_arg("--vsock") \
.with_arg("-B", host_uds_path) \
.with_arg("-p", f"{BASE_PORT + server_idx}") \
.with_arg("-1") \
.build()
run_cmd(iperf_server)
current_avail_cpu += 1
time.sleep(SERVER_STARTUP_TIME)
def spawn_iperf_client(conn, client_idx, mode):
cmd = guest_cmd_builder.with_arg(
"-p", BASE_PORT + client_idx).with_arg(mode).build()
basevm.create_jailed_resource(os.path.join(
basevm.path,
_make_host_port_path(VSOCK_UDS_PATH, BASE_PORT + client_idx)))
pinned_cmd = f"taskset --cpu-list {client_idx % basevm.vcpus_count}" \
f" {cmd}"
rc, stdout, _ = conn.execute_command(pinned_cmd)
assert rc == 0
return stdout.read()
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = list()
cpu_load_future = executor.submit(get_cpu_percent,
basevm.jailer_clone_pid,
runtime - SERVER_STARTUP_TIME,
omit)
modes_len = len(modes)
ssh_connection = net_tools.SSHConnection(basevm.ssh_config)
for client_idx in range(load_factor*basevm.vcpus_count):
futures.append(executor.submit(spawn_iperf_client,
ssh_connection,
client_idx,
# Distribute the modes evenly.
modes[client_idx % modes_len]))
cpu_load = cpu_load_future.result()
for future in futures[:-1]:
res = json.loads(future.result())
res[IPERF3_END_RESULTS_TAG][
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG] = None
yield res
# Attach the real CPU utilization vmm/vcpus to
# the last iperf3 server-client pair measurements.
res = json.loads(futures[-1].result())
# We expect a single emulation thread tagged with `firecracker` name.
tag = "firecracker"
assert tag in cpu_load and len(cpu_load[tag]) == 1
thread_id = list(cpu_load[tag])[0]
data = cpu_load[tag][thread_id]
vmm_util = sum(data)/len(data)
cpu_util_perc = res[IPERF3_END_RESULTS_TAG][
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG] = dict()
cpu_util_perc[CPU_UTILIZATION_VMM] = vmm_util
vcpus_util = 0
for vcpu in range(basevm.vcpus_count):
# We expect a single fc_vcpu thread tagged with
# f`fc_vcpu {vcpu}`.
tag = f"fc_vcpu {vcpu}"
assert tag in cpu_load and len(cpu_load[tag]) == 1
thread_id = list(cpu_load[tag])[0]
data = cpu_load[tag][thread_id]
vcpus_util += (sum(data)/len(data))
cpu_util_perc[CPU_UTILIZATION_VCPUS_TOTAL] = vcpus_util
yield res
def consume_iperf_output(cons, result):
total_received = result[IPERF3_END_RESULTS_TAG]['sum_received']
duration = float(total_received['seconds'])
cons.consume_data(DURATION, duration)
# Computed at the receiving end.
total_recv_bytes = int(total_received['bytes'])
tput = round((total_recv_bytes*8) / (1024*1024*duration), 2)
cons.consume_data(THROUGHPUT, tput)
cpu_util = result[IPERF3_END_RESULTS_TAG][
IPERF3_CPU_UTILIZATION_PERCENT_OUT_TAG]
if cpu_util:
cpu_util_host = cpu_util[CPU_UTILIZATION_VMM]
cpu_util_guest = cpu_util[CPU_UTILIZATION_VCPUS_TOTAL]
cons.consume_stat("Avg", CPU_UTILIZATION_VMM, cpu_util_host)
cons.consume_stat("Avg", CPU_UTILIZATION_VCPUS_TOTAL, cpu_util_guest)
def pipes(basevm, current_avail_cpu, env_id):
for mode in CONFIG["modes"]:
# We run bi-directional tests only on uVM with more than 2 vCPus
# because we need to pin one iperf3/direction per vCPU, and since we
# have two directions, we need at least two vCPUs.
if mode == "bd" and basevm.vcpus_count < 2:
continue
for protocol in CONFIG["protocols"]:
for payload_length in protocol["payload_length"]:
iperf_guest_cmd_builder = CmdBuilder(IPERF3) \
.with_arg("--vsock") \
.with_arg("-c", 2) \
.with_arg("--json") \
.with_arg("--omit", protocol["omit"]) \
.with_arg("--time", CONFIG["time"])
if payload_length != "DEFAULT":
iperf_guest_cmd_builder = iperf_guest_cmd_builder \
.with_arg("--len", f"{payload_length}")
iperf3_id = f"vsock-p{payload_length}-{mode}"
cons = consumer.LambdaConsumer(
metadata_provider=DictMetadataProvider(
CONFIG["measurements"],
VsockThroughputBaselineProvider(env_id, iperf3_id)),
func=consume_iperf_output
)
prod_kwargs = {
"guest_cmd_builder": iperf_guest_cmd_builder,
"basevm": basevm,
"current_avail_cpu": current_avail_cpu,
"runtime": CONFIG["time"],
"omit": protocol["omit"],
"load_factor": CONFIG["load_factor"],
"modes": CONFIG["modes"][mode],
}
prod = producer.LambdaProducer(produce_iperf_output,
prod_kwargs)
yield cons, prod, f"{env_id}/{iperf3_id}"
@pytest.mark.nonci
@pytest.mark.timeout(600)
def test_vsock_throughput(bin_cloner_path, results_file_dumper):
logger = logging.getLogger("vsock_throughput")
artifacts = ArtifactCollection(_test_images_s3_bucket())
microvm_artifacts = ArtifactSet(artifacts.microvms(keyword="1vcpu_1024mb"))
microvm_artifacts.insert(artifacts.microvms(keyword="2vcpu_1024mb"))
kernel_artifacts = ArtifactSet(
artifacts.kernels(keyword="vmlinux-4.14.bin"))
disk_artifacts = ArtifactSet(artifacts.disks(keyword="ubuntu"))
# Create a test context and add builder, logger, network.
test_context = TestContext()
test_context.custom = {
'builder': MicrovmBuilder(bin_cloner_path),
'logger': logger,
'name': 'vsock_throughput',
'results_file_dumper': results_file_dumper
}
test_matrix = TestMatrix(context=test_context,
artifact_sets=[
microvm_artifacts,
kernel_artifacts,
disk_artifacts
])
test_matrix.run_test(iperf_workload)
def iperf_workload(context):
vm_builder = context.custom['builder']
logger = context.custom["logger"]
file_dumper = context.custom['results_file_dumper']
# Create a rw copy artifact.
rw_disk = context.disk.copy()
# Get ssh key from read-only artifact.
ssh_key = context.disk.ssh_key()
# Create a fresh microvm from artifacts.
vm_instance = vm_builder.build(kernel=context.kernel,
disks=[rw_disk],
ssh_key=ssh_key,
config=context.microvm)
basevm = vm_instance.vm
# Create a vsock device
basevm.vsock.put(
vsock_id="vsock0",
guest_cid=3,
uds_path="/" + VSOCK_UDS_PATH
)
basevm.start()
st_core = core.Core(name="vsock_throughput",
iterations=1,
custom={'cpu_model_name': get_cpu_model_name()})
# Check if the needed CPU cores are available. We have the API thread, VMM
# thread and then one thread for each configured vCPU.
assert CpuMap.len() >= 2 + basevm.vcpus_count
# Pin uVM threads to physical cores.
current_avail_cpu = 0
assert basevm.pin_vmm(current_avail_cpu), \
"Failed to pin firecracker thread."
current_avail_cpu += 1
assert basevm.pin_api(current_avail_cpu), \
"Failed to pin fc_api thread."
for i in range(basevm.vcpus_count):
current_avail_cpu += 1
assert basevm.pin_vcpu(i, current_avail_cpu), \
f"Failed to pin fc_vcpu {i} thread."
logger.info("Testing with microvm: \"{}\", kernel {}, disk {}"
.format(context.microvm.name(),
context.kernel.name(),
context.disk.name()))
for cons, prod, tag in \
pipes(basevm,
current_avail_cpu + 1,
f"{context.kernel.name()}/{context.disk.name()}/"
f"{context.microvm.name()}"):
st_core.add_pipe(prod, cons, tag)
# Start running the commands on guest, gather results and verify pass
# criteria.
try:
result = st_core.run_exercise()
except core.CoreException as err:
handle_failure(file_dumper, err)
dump_test_result(file_dumper, result)
def _make_host_port_path(uds_path, port):
return "{}_{}".format(uds_path, port)
| true | true |
1c328000667faa803cdf321cb49c6f78d6edb454 | 2,646 | py | Python | letsencrypt/constants.py | diafygi/letsencrypt | abe1aa999af7b45e28958a7dcdd8fcc0daa310dd | [
"Apache-2.0"
] | 3 | 2019-03-20T08:32:11.000Z | 2021-02-04T13:03:38.000Z | letsencrypt/constants.py | diafygi/letsencrypt | abe1aa999af7b45e28958a7dcdd8fcc0daa310dd | [
"Apache-2.0"
] | null | null | null | letsencrypt/constants.py | diafygi/letsencrypt | abe1aa999af7b45e28958a7dcdd8fcc0daa310dd | [
"Apache-2.0"
] | null | null | null | """Let's Encrypt constants."""
import logging
from acme import challenges
SETUPTOOLS_PLUGINS_ENTRY_POINT = "letsencrypt.plugins"
"""Setuptools entry point group name for plugins."""
CLI_DEFAULTS = dict(
config_files=["/etc/letsencrypt/cli.ini"],
verbose_count=-(logging.WARNING / 10),
server="https://www.letsencrypt-demo.org/acme/new-reg",
rsa_key_size=2048,
rollback_checkpoints=0,
config_dir="/etc/letsencrypt",
work_dir="/var/lib/letsencrypt",
logs_dir="/var/log/letsencrypt",
no_verify_ssl=False,
dvsni_port=challenges.DVSNI.PORT,
auth_cert_path="./cert.pem",
auth_chain_path="./chain.pem",
)
"""Defaults for CLI flags and `.IConfig` attributes."""
RENEWER_DEFAULTS = dict(
renewer_enabled="yes",
renew_before_expiry="30 days",
deploy_before_expiry="20 days",
)
"""Defaults for renewer script."""
EXCLUSIVE_CHALLENGES = frozenset([frozenset([
challenges.DVSNI, challenges.SimpleHTTP])])
"""Mutually exclusive challenges."""
ENHANCEMENTS = ["redirect", "http-header", "ocsp-stapling", "spdy"]
"""List of possible :class:`letsencrypt.interfaces.IInstaller`
enhancements.
List of expected options parameters:
- redirect: None
- http-header: TODO
- ocsp-stapling: TODO
- spdy: TODO
"""
ARCHIVE_DIR = "archive"
"""Archive directory, relative to `IConfig.config_dir`."""
CONFIG_DIRS_MODE = 0o755
"""Directory mode for ``.IConfig.config_dir`` et al."""
ACCOUNTS_DIR = "accounts"
"""Directory where all accounts are saved."""
ACCOUNT_KEYS_DIR = "keys"
"""Directory where account keys are saved. Relative to `ACCOUNTS_DIR`."""
BACKUP_DIR = "backups"
"""Directory (relative to `IConfig.work_dir`) where backups are kept."""
CERT_DIR = "certs"
"""See `.IConfig.cert_dir`."""
CERT_KEY_BACKUP_DIR = "keys-certs"
"""Directory where all certificates and keys are stored (relative to
`IConfig.work_dir`). Used for easy revocation."""
IN_PROGRESS_DIR = "IN_PROGRESS"
"""Directory used before a permanent checkpoint is finalized (relative to
`IConfig.work_dir`)."""
KEY_DIR = "keys"
"""Directory (relative to `IConfig.config_dir`) where keys are saved."""
LIVE_DIR = "live"
"""Live directory, relative to `IConfig.config_dir`."""
TEMP_CHECKPOINT_DIR = "temp_checkpoint"
"""Temporary checkpoint directory (relative to `IConfig.work_dir`)."""
REC_TOKEN_DIR = "recovery_tokens"
"""Directory where all recovery tokens are saved (relative to
`IConfig.work_dir`)."""
RENEWAL_CONFIGS_DIR = "configs"
"""Renewal configs directory, relative to `IConfig.config_dir`."""
RENEWER_CONFIG_FILENAME = "renewer.conf"
"""Renewer config file name (relative to `IConfig.config_dir`)."""
| 27.278351 | 73 | 0.728269 | import logging
from acme import challenges
SETUPTOOLS_PLUGINS_ENTRY_POINT = "letsencrypt.plugins"
CLI_DEFAULTS = dict(
config_files=["/etc/letsencrypt/cli.ini"],
verbose_count=-(logging.WARNING / 10),
server="https://www.letsencrypt-demo.org/acme/new-reg",
rsa_key_size=2048,
rollback_checkpoints=0,
config_dir="/etc/letsencrypt",
work_dir="/var/lib/letsencrypt",
logs_dir="/var/log/letsencrypt",
no_verify_ssl=False,
dvsni_port=challenges.DVSNI.PORT,
auth_cert_path="./cert.pem",
auth_chain_path="./chain.pem",
)
RENEWER_DEFAULTS = dict(
renewer_enabled="yes",
renew_before_expiry="30 days",
deploy_before_expiry="20 days",
)
EXCLUSIVE_CHALLENGES = frozenset([frozenset([
challenges.DVSNI, challenges.SimpleHTTP])])
ENHANCEMENTS = ["redirect", "http-header", "ocsp-stapling", "spdy"]
ARCHIVE_DIR = "archive"
CONFIG_DIRS_MODE = 0o755
ACCOUNTS_DIR = "accounts"
ACCOUNT_KEYS_DIR = "keys"
BACKUP_DIR = "backups"
CERT_DIR = "certs"
CERT_KEY_BACKUP_DIR = "keys-certs"
IN_PROGRESS_DIR = "IN_PROGRESS"
KEY_DIR = "keys"
LIVE_DIR = "live"
TEMP_CHECKPOINT_DIR = "temp_checkpoint"
REC_TOKEN_DIR = "recovery_tokens"
RENEWAL_CONFIGS_DIR = "configs"
RENEWER_CONFIG_FILENAME = "renewer.conf"
| true | true |
1c32804e4b4371c9580b161cc812f77631eeb2ff | 105 | py | Python | gunicorn.conf.py | Moustikitos/ark-zen | f02ec0c5565634aaf154c46173572402bf414817 | [
"MIT"
] | 3 | 2018-12-24T09:45:05.000Z | 2020-04-23T09:00:00.000Z | gunicorn.conf.py | Moustikitos/zen | bd11216cd2c891804d6ec4444f9321137b0f54e4 | [
"MIT"
] | 6 | 2018-05-13T13:12:17.000Z | 2018-05-28T19:36:16.000Z | gunicorn.conf.py | Moustikitos/zen | bd11216cd2c891804d6ec4444f9321137b0f54e4 | [
"MIT"
] | 1 | 2018-04-07T12:03:43.000Z | 2018-04-07T12:03:43.000Z | # -*- coding:utf-8 -*-
def post_worker_init(worker):
pass
def on_exit(server):
pass
| 10.5 | 30 | 0.561905 |
def post_worker_init(worker):
pass
def on_exit(server):
pass
| true | true |
1c328154f063336c316c04e0f4be1661f6341130 | 895 | py | Python | part3/webapp/starter/pypi_web_mongodb_s/pypi_web_mongodb/data/mongo_setup.py | israelrico007/build-pypi-mongodb-webcast-series | b5960e2619ba4527e27cdb8e6efd7de2bb6c7448 | [
"MIT"
] | 25 | 2018-05-30T18:03:35.000Z | 2021-11-27T19:03:16.000Z | part3/webapp/starter/pypi_web_mongodb_s/pypi_web_mongodb/data/mongo_setup.py | israelrico007/build-pypi-mongodb-webcast-series | b5960e2619ba4527e27cdb8e6efd7de2bb6c7448 | [
"MIT"
] | 3 | 2018-10-18T11:08:32.000Z | 2019-12-26T16:42:26.000Z | part3/webapp/starter/pypi_web_mongodb_s/pypi_web_mongodb/data/mongo_setup.py | israelrico007/build-pypi-mongodb-webcast-series | b5960e2619ba4527e27cdb8e6efd7de2bb6c7448 | [
"MIT"
] | 10 | 2018-05-30T18:03:49.000Z | 2021-11-27T19:03:19.000Z | import ssl
import mongoengine
def global_init(user=None, password=None, port=27017, server='localhost', use_ssl=True, db_name='pypi_demo'):
if user or password:
if server != 'localhost' and not server.startswith('mongodb+srv://'):
server = 'mongodb+srv://' + server
data = dict(
username=user,
password=password,
host=server,
port=port,
authentication_source='admin',
authentication_mechanism='SCRAM-SHA-1',
ssl=use_ssl,
ssl_cert_reqs=ssl.CERT_NONE)
mongoengine.register_connection(alias='core', name=db_name, **data)
data['password'] = '*************'
print(" --> Registering prod connection: {}".format(data))
else:
print(" --> Registering dev connection")
mongoengine.register_connection(alias='core', name=db_name)
| 34.423077 | 109 | 0.597765 | import ssl
import mongoengine
def global_init(user=None, password=None, port=27017, server='localhost', use_ssl=True, db_name='pypi_demo'):
if user or password:
if server != 'localhost' and not server.startswith('mongodb+srv://'):
server = 'mongodb+srv://' + server
data = dict(
username=user,
password=password,
host=server,
port=port,
authentication_source='admin',
authentication_mechanism='SCRAM-SHA-1',
ssl=use_ssl,
ssl_cert_reqs=ssl.CERT_NONE)
mongoengine.register_connection(alias='core', name=db_name, **data)
data['password'] = '*************'
print(" --> Registering prod connection: {}".format(data))
else:
print(" --> Registering dev connection")
mongoengine.register_connection(alias='core', name=db_name)
| true | true |
1c328257d3f7cf8f22ac061f8036f5d6e4c832fc | 7,694 | py | Python | is452/routes/insurance_offchain.py | is452-griffininsurance/backend-service | f6d97bad153e7ba705f727e00f18cb027a4c3d3b | [
"MIT"
] | null | null | null | is452/routes/insurance_offchain.py | is452-griffininsurance/backend-service | f6d97bad153e7ba705f727e00f18cb027a4c3d3b | [
"MIT"
] | null | null | null | is452/routes/insurance_offchain.py | is452-griffininsurance/backend-service | f6d97bad153e7ba705f727e00f18cb027a4c3d3b | [
"MIT"
] | null | null | null | from flask import request, jsonify
from is452 import app
from bson.objectid import ObjectId
from pymongo import MongoClient
from datetime import datetime
import json
client = MongoClient("mongodb+srv://is452_admin:mianbaochaoren@is452-project.n0htb.mongodb.net/insurance?retryWrites=true&w=majority")
collection = client.insurance.insurances
date_elements = {
"flight_delay": "flight_date",
"car": "expiry_date"
}
@app.route("/create_insurance", methods=['POST'])
def create_insurance():
"""
Structure of incoming json data
{
'contract_address': 'xxx', (type: str)
'flight_no': 'xxx' (type: str),
'flight_date': 'YYYY-MM-DD' (type: str),
'coverage_amount': 1234.56 (type: float),
'premium_amount': 1234.56 (type: float),
'insured_wallet_addr': 'xxxx' (type: str)
}
"""
insurance_type = request.args.get("insurance_type", None)
insurance_data = request.get_json()
"""
Structure of document to be stored
{
'contract_address': 'xxx', (type: str)
'flight_no': 'xxx' (type: str),
'flight_date': 'YYYY-MM-DD' (type: datetime),
'coverage_amount': 1234.56 (type: float),
'premium_amount': 1234.56 (type: float),
'insured_wallet_addr': 'xxxx' (type: str),
'insurers': [] (type: str),
'status': 'open' (type:str)
}
"""
if insurance_type == "flight_delay":
insurance_data['flight_date'] = datetime.strptime(insurance_data['flight_date'], "%Y-%m-%d")
else:
insurance_data['expiry_date'] = datetime.strptime(insurance_data['flight_date'], "%Y-%m-%d")
insurance_data['insurers'] = []
insurance_data['status'] = 'open'
insurance_data['insurance_type'] = insurance_type
response = collection.insert_one(insurance_data)
return {
"status": "Insurance record created",
"insurance_id": str(response.inserted_id)
}
@app.route("/get_all_insurances", methods=['GET'])
def get_all_insurances():
insurance_type = request.args.get("insurance_type", None)
status = request.args.get("status", None)
insurances = collection.find({"$and": [{"insurance_type": insurance_type}, {"status": status}]})
date_element = date_elements[insurance_type]
transformed_insurances = []
for i in insurances:
i['_id'] = str(i['_id'])
i[date_element] = i[date_element].strftime("%Y-%m-%d")
cum_insured_amt = 0
if i["insurers"]:
for insurer in i["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
i['percent_insured'] = round(cum_insured_amt/float(i['max_insured_amount']), 2)
else:
i['percent_insured'] = round(cum_insured_amt, 2)
transformed_insurances.append(i)
if insurances:
return {
"status": "All insurances has been retrieved",
"insurances": transformed_insurances
}
return {
"status": "No insurances in the system at the moment"
}
@app.route("/get_insurance_by_id", methods=['GET'])
def get_insurance_by_id():
insurance_id = request.args.get("insurance_id", None)
insurance = collection.find_one({"_id": ObjectId(insurance_id)})
date_element = date_elements[insurance['insurance_type']]
if insurance:
insurance["_id"] = str(insurance["_id"])
insurance[date_element] = insurance[date_element].strftime("%Y-%m-%d")
cum_insured_amt = 0
if insurance["insurers"]:
for insurer in insurance["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
insurance['percent_insured'] = round(cum_insured_amt/float(insurance['max_insured_amount']), 2)
else:
insurance['percent_insured'] = round(cum_insured_amt, 2)
return {
"status": "Found request",
"insurance": insurance
}
return {
"status": "Insurance ID does not exist in database"
}
# filter requests by user
@app.route("/get_insurance_by_user", methods=['GET'])
def get_insurance_by_user():
"""
Structure of document to be stored
{
'contract_address': 'xxx', (type: str)
'flight_no': 'xxx' (type: str),
'flight_date': 'YYYY-MM-DD' (type: datetime),
'coverage_amount': 1234.56 (type: float),
'premium_amount': 1234.56 (type: float),
'insured_wallet_addr': 'xxxx' (type: str),
'insurers': [
{"wallet_addr": "xxx" (type: str), "insuring_amount": 123.56 (type: float)},
{"wallet_addr": "xxx" (type: str), "insuring_amount": 123.56 (type: float)},
...
] (type: str),
'status': 'open' (type:str)
}
"""
user_wallet_addr = request.args.get("user_wallet_addr", None)
# print(user_wallet_addr)
raw_insured_insurances = collection.find({"insured_wallet_addr": user_wallet_addr})
insured_insurances = []
# print("insured insurances")
if raw_insured_insurances:
for i in raw_insured_insurances:
# print(i)
i['_id'] = str(i["_id"])
i['flight_date'] = i['flight_date'].strftime("%Y-%m-%d")
cum_insured_amt = 0
if i["insurers"]:
for insurer in i["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
i['percent_insured'] = round(cum_insured_amt/float(i['max_insured_amount']), 2)
else:
i['percent_insured'] = round(cum_insured_amt, 2)
insured_insurances.append(i)
# print(insured_insurances)
raw_insuring_insurances = collection.find({"insurers.wallet_addr" : user_wallet_addr})
insuring_insurances = []
if raw_insuring_insurances:
for i in raw_insuring_insurances:
# print(i)
i["_id"] = str(i["_id"])
i['flight_date'] = i['flight_date'].strftime("%Y-%m-%d")
cum_insured_amt = 0
if i["insurers"]:
for insurer in i["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
i['percent_insured'] = round(cum_insured_amt/float(i['max_insured_amount']), 2)
else:
i['percent_insured'] = round(cum_insured_amt, 2)
insuring_insurances.append(i)
# print(insuring_insurances)
return {
"insured_insurances": insured_insurances,
"insuring_insurances": insuring_insurances
}
@app.route("/add_insurer", methods=["POST"])
def add_insurer():
"""
Structure of incoming data:
{
"wallet_addr": "xxx", (type: str)
"insuring_amount": 123.56 (type: float)
}
"""
contract_address = request.args.get("contract_address", None)
new_insurer_data = request.get_json()
collection.find_one_and_update(
{
"contract_address": contract_address
},
{
"$addToSet": {"insurers": new_insurer_data}
}
)
transaction_data = {
"sending_wallet_addr": new_insurer_data['wallet_addr'],
"receiving_wallet_addr": contract_address,
"transfer_amount": new_insurer_data['insuring_amount']
}
transaction_collection = client.insurance.transactions
response = transaction_collection.insert_one(transaction_data)
return {
"status": f"New insurer ({new_insurer_data['wallet_addr']}) has been added to insurance ({contract_address})"
}
| 34.044248 | 134 | 0.609566 | from flask import request, jsonify
from is452 import app
from bson.objectid import ObjectId
from pymongo import MongoClient
from datetime import datetime
import json
client = MongoClient("mongodb+srv://is452_admin:mianbaochaoren@is452-project.n0htb.mongodb.net/insurance?retryWrites=true&w=majority")
collection = client.insurance.insurances
date_elements = {
"flight_delay": "flight_date",
"car": "expiry_date"
}
@app.route("/create_insurance", methods=['POST'])
def create_insurance():
insurance_type = request.args.get("insurance_type", None)
insurance_data = request.get_json()
if insurance_type == "flight_delay":
insurance_data['flight_date'] = datetime.strptime(insurance_data['flight_date'], "%Y-%m-%d")
else:
insurance_data['expiry_date'] = datetime.strptime(insurance_data['flight_date'], "%Y-%m-%d")
insurance_data['insurers'] = []
insurance_data['status'] = 'open'
insurance_data['insurance_type'] = insurance_type
response = collection.insert_one(insurance_data)
return {
"status": "Insurance record created",
"insurance_id": str(response.inserted_id)
}
@app.route("/get_all_insurances", methods=['GET'])
def get_all_insurances():
insurance_type = request.args.get("insurance_type", None)
status = request.args.get("status", None)
insurances = collection.find({"$and": [{"insurance_type": insurance_type}, {"status": status}]})
date_element = date_elements[insurance_type]
transformed_insurances = []
for i in insurances:
i['_id'] = str(i['_id'])
i[date_element] = i[date_element].strftime("%Y-%m-%d")
cum_insured_amt = 0
if i["insurers"]:
for insurer in i["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
i['percent_insured'] = round(cum_insured_amt/float(i['max_insured_amount']), 2)
else:
i['percent_insured'] = round(cum_insured_amt, 2)
transformed_insurances.append(i)
if insurances:
return {
"status": "All insurances has been retrieved",
"insurances": transformed_insurances
}
return {
"status": "No insurances in the system at the moment"
}
@app.route("/get_insurance_by_id", methods=['GET'])
def get_insurance_by_id():
insurance_id = request.args.get("insurance_id", None)
insurance = collection.find_one({"_id": ObjectId(insurance_id)})
date_element = date_elements[insurance['insurance_type']]
if insurance:
insurance["_id"] = str(insurance["_id"])
insurance[date_element] = insurance[date_element].strftime("%Y-%m-%d")
cum_insured_amt = 0
if insurance["insurers"]:
for insurer in insurance["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
insurance['percent_insured'] = round(cum_insured_amt/float(insurance['max_insured_amount']), 2)
else:
insurance['percent_insured'] = round(cum_insured_amt, 2)
return {
"status": "Found request",
"insurance": insurance
}
return {
"status": "Insurance ID does not exist in database"
}
@app.route("/get_insurance_by_user", methods=['GET'])
def get_insurance_by_user():
user_wallet_addr = request.args.get("user_wallet_addr", None)
raw_insured_insurances = collection.find({"insured_wallet_addr": user_wallet_addr})
insured_insurances = []
if raw_insured_insurances:
for i in raw_insured_insurances:
i['_id'] = str(i["_id"])
i['flight_date'] = i['flight_date'].strftime("%Y-%m-%d")
cum_insured_amt = 0
if i["insurers"]:
for insurer in i["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
i['percent_insured'] = round(cum_insured_amt/float(i['max_insured_amount']), 2)
else:
i['percent_insured'] = round(cum_insured_amt, 2)
insured_insurances.append(i)
raw_insuring_insurances = collection.find({"insurers.wallet_addr" : user_wallet_addr})
insuring_insurances = []
if raw_insuring_insurances:
for i in raw_insuring_insurances:
i["_id"] = str(i["_id"])
i['flight_date'] = i['flight_date'].strftime("%Y-%m-%d")
cum_insured_amt = 0
if i["insurers"]:
for insurer in i["insurers"]:
cum_insured_amt += float(insurer['insuring_amount'])
if cum_insured_amt > 0:
i['percent_insured'] = round(cum_insured_amt/float(i['max_insured_amount']), 2)
else:
i['percent_insured'] = round(cum_insured_amt, 2)
insuring_insurances.append(i)
return {
"insured_insurances": insured_insurances,
"insuring_insurances": insuring_insurances
}
@app.route("/add_insurer", methods=["POST"])
def add_insurer():
contract_address = request.args.get("contract_address", None)
new_insurer_data = request.get_json()
collection.find_one_and_update(
{
"contract_address": contract_address
},
{
"$addToSet": {"insurers": new_insurer_data}
}
)
transaction_data = {
"sending_wallet_addr": new_insurer_data['wallet_addr'],
"receiving_wallet_addr": contract_address,
"transfer_amount": new_insurer_data['insuring_amount']
}
transaction_collection = client.insurance.transactions
response = transaction_collection.insert_one(transaction_data)
return {
"status": f"New insurer ({new_insurer_data['wallet_addr']}) has been added to insurance ({contract_address})"
}
| true | true |
1c32835832835b2d2505fe59b4541b16a736ed38 | 6,571 | py | Python | scripts/position_data.py | jtmccr1/variant_pipeline | 7376550d8bb90673914845b53b354f87bfd28d5c | [
"Apache-2.0"
] | 11 | 2018-03-29T06:17:50.000Z | 2021-12-16T07:04:57.000Z | scripts/position_data.py | jtmccr1/variant_pipeline | 7376550d8bb90673914845b53b354f87bfd28d5c | [
"Apache-2.0"
] | 7 | 2018-01-05T17:53:14.000Z | 2019-05-09T17:46:18.000Z | scripts/position_data.py | lauringlab/variant_pipeline | b4a26398a5707814884cbf80a2e22032476e6479 | [
"Apache-2.0"
] | 15 | 2017-02-07T21:41:33.000Z | 2021-03-19T07:59:31.000Z | from __future__ import division
import pysam
import numpy as np
import yaml
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
#from scripts.seq_classes import locus, segment, tally, allele # %%
from seq_classes import locus, segment, tally, allele
#from scripts.trim_to_regions import ReadFASTA #%%
from trim_to_regions import ReadFASTA
import argparse
import os
import json
def main():
parser = argparse.ArgumentParser(description='This scipts takes a bam file \
and identifies variants and according to a consensus file.',
usage ="python position_data.py sample.bed reference.fa sample.bam sample.json -maxDepth 1000 ")
parser.add_argument('bed_json', metavar='bed_json', nargs='+',
help='a json bed like file with regions to compare')
parser.add_argument('reference_fa', metavar='ref',nargs='+',
help = 'The sample consensus file which will be used to call nonsynonymous and synonymous mutations')
parser.add_argument('bam', metavar='bam', nargs='+',
help='The bam file of the sample. For naming purposes we expect it to be sample_name.removed.bam')
parser.add_argument('output', metavar='output', nargs='+',
help='The json file to hold the output')
parser.add_argument('--maxDepth', metavar='maxDepth', type=int,
help='the max depth to use for pileup default is 1000')
#parser.add_argument('-mqc','--quality_metrics',action= 'store_true',dest = 'mqc',default = False)
args = parser.parse_args()
sample_name = args.bam[0].split(".removed.bam")[0].split("/")[-1]
if args.maxDepth==None:
maxDepth = 1000
else:
maxDepth=args.maxDepth
# get bam file
bam = pysam.AlignmentFile(args.bam[0],"rb")
# set up reference dictions with key for each segment and value of [0,length]
ref_genome_main={}
# this is to maintain the order for concatenated pos
chr_order = []
chr_length = []
# This needs to be changed to account for the new bed file format
# it should be from the min of all start codons for each ORF to the max end
with open(args.bed_json[0],"r") as f:
regions=json.load(f)
for segment in regions["genome"]:
start = []
stop = []
chr_order.append(segment["seg"])
chr_length.append(segment["size"])
for orf in segment["ORF"]:
for reading in orf["regions"]:
start.append(reading["start"])
stop.append(reading["stop"])
ref_genome_main.update({segment["seg"]: [min(start),max(stop)]})
chr_cumsum = [0] + list(np.cumsum(chr_length))
# tally up base counts for each segement
sample_genome={}
for seg in ref_genome_main:
sample_genome.update({seg: tally(bamfile=bam,chr=seg,\
start = ref_genome_main[seg][0],stop = ref_genome_main[seg][1],maxDepth=maxDepth)})
#makes sure the frequencies are up to date
# probably don't need it now
for seg in sample_genome:
sample_genome[seg].consensus()
# Here we will classify the variants
ref_file = ReadFASTA(args.reference_fa[0])
for seg in sample_genome:
for ref_seg in regions["genome"]:
if seg == ref_seg["seg"]:
consensus_sequence = [s.seq for s in ref_file if s.id==seg]# the regions are relative to this sequence
if len(consensus_sequence)==0:
raise ValueError("Segment %s not found in the reference fasta file" % seg)
elif len(consensus_sequence)<1:
raise ValueError("Segment %s found in the reference fasta file %d times " % seg,len(consensus_sequence))
else:
consensus_sequence = consensus_sequence[0]
for orf in ref_seg["ORF"]:
for l in sample_genome[seg].seq:
for nucleotide in l.alleles:
l.alleles[nucleotide].classifyVar(consensus_sequence,orf,l.pos)
# set concatpos
for seg in sample_genome:
for pos in sample_genome[seg].seq:
# set concatpos
pos.concat_pos = pos.pos + chr_cumsum[chr_order.index(seg)]
with open(args.output[0],'w') as f:
out_data={"Sample": sample_name,"genome" :[]}
for seg in sample_genome:
out_data["genome"].append(sample_genome[seg].reprJSON())
json.dump(out_data,f,sort_keys=True,indent=4)
"""
if args.mqc:
# check if mqc dir exists if not make it
if not os.path.exists("./mqc_position_stats"):
os.makedirs("./mqc_position_stats")
# get sample name
basename = "./mqc_position_stats/"+os.path.splitext(os.path.basename(args.bam[0]))[0]
concat_cov=[]
concat_pos = []
i = 1
for loci in sample_genome[seg].seq:
concat_cov.append(loci.coverage)
concat_pos.append(loci.concat_pos)
i+=1
with open(basename+"_mqc.cov.csv","w") as covfile:
i = 0
while i<len(concat_cov):
covfile.write("%d,%d\n" %(concat_pos[i],concat_cov[i]))
i+=1
# Frequencies
concat_pos = []
max_pos = 0
freqs = []
for seg in sample_genome:
seg_freq=[]
pos = []
for loci in sample_genome[seg].seq:
for k,v in loci.freqs.items():
if v >0 and k!=loci.consensus:
freqs.append(v)
seg_freq.append(v)
concat_pos.append(loci.concat_pos)
seg_freq = np.sort(seg_freq)
cdf = np.array(range(len(seg_freq)))/float(len(seg_freq))
with open(basename+ "-"+seg+"_mqc.cdf.csv","w") as cdfile:
i = 0
while i<len(seg_freq):
cdfile.write("%f,%f\n" %(np.log10(seg_freq[i]),cdf[i]))
i+=1
with open(basename+"_mqc.frequencies.csv","w") as freqfile:
i = 0
while i<len(freqs):
freqfile.write("%d,%f\n" %(concat_pos[i],np.log10(freqs[i])))
i+=1
"""
if __name__ == '__main__':
main()
| 38.881657 | 129 | 0.569015 | from __future__ import division
import pysam
import numpy as np
import yaml
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
m seq_classes import locus, segment, tally, allele
om trim_to_regions import ReadFASTA
import argparse
import os
import json
def main():
parser = argparse.ArgumentParser(description='This scipts takes a bam file \
and identifies variants and according to a consensus file.',
usage ="python position_data.py sample.bed reference.fa sample.bam sample.json -maxDepth 1000 ")
parser.add_argument('bed_json', metavar='bed_json', nargs='+',
help='a json bed like file with regions to compare')
parser.add_argument('reference_fa', metavar='ref',nargs='+',
help = 'The sample consensus file which will be used to call nonsynonymous and synonymous mutations')
parser.add_argument('bam', metavar='bam', nargs='+',
help='The bam file of the sample. For naming purposes we expect it to be sample_name.removed.bam')
parser.add_argument('output', metavar='output', nargs='+',
help='The json file to hold the output')
parser.add_argument('--maxDepth', metavar='maxDepth', type=int,
help='the max depth to use for pileup default is 1000')
args = parser.parse_args()
sample_name = args.bam[0].split(".removed.bam")[0].split("/")[-1]
if args.maxDepth==None:
maxDepth = 1000
else:
maxDepth=args.maxDepth
bam = pysam.AlignmentFile(args.bam[0],"rb")
ref_genome_main={}
chr_order = []
chr_length = []
with open(args.bed_json[0],"r") as f:
regions=json.load(f)
for segment in regions["genome"]:
start = []
stop = []
chr_order.append(segment["seg"])
chr_length.append(segment["size"])
for orf in segment["ORF"]:
for reading in orf["regions"]:
start.append(reading["start"])
stop.append(reading["stop"])
ref_genome_main.update({segment["seg"]: [min(start),max(stop)]})
chr_cumsum = [0] + list(np.cumsum(chr_length))
sample_genome={}
for seg in ref_genome_main:
sample_genome.update({seg: tally(bamfile=bam,chr=seg,\
start = ref_genome_main[seg][0],stop = ref_genome_main[seg][1],maxDepth=maxDepth)})
for seg in sample_genome:
sample_genome[seg].consensus()
# Here we will classify the variants
ref_file = ReadFASTA(args.reference_fa[0])
for seg in sample_genome:
for ref_seg in regions["genome"]:
if seg == ref_seg["seg"]:
consensus_sequence = [s.seq for s in ref_file if s.id==seg]# the regions are relative to this sequence
if len(consensus_sequence)==0:
raise ValueError("Segment %s not found in the reference fasta file" % seg)
elif len(consensus_sequence)<1:
raise ValueError("Segment %s found in the reference fasta file %d times " % seg,len(consensus_sequence))
else:
consensus_sequence = consensus_sequence[0]
for orf in ref_seg["ORF"]:
for l in sample_genome[seg].seq:
for nucleotide in l.alleles:
l.alleles[nucleotide].classifyVar(consensus_sequence,orf,l.pos)
# set concatpos
for seg in sample_genome:
for pos in sample_genome[seg].seq:
# set concatpos
pos.concat_pos = pos.pos + chr_cumsum[chr_order.index(seg)]
with open(args.output[0],'w') as f:
out_data={"Sample": sample_name,"genome" :[]}
for seg in sample_genome:
out_data["genome"].append(sample_genome[seg].reprJSON())
json.dump(out_data,f,sort_keys=True,indent=4)
if __name__ == '__main__':
main()
| true | true |
1c3283d088da48484e2b2afc072a46a997b2b035 | 327 | py | Python | braponto-back/core/migrations/0002_auto_20210223_2124.py | TheRodrigoBraga/Braponto | 8367e5e8fdb1dde192c8068b93c4014d77165785 | [
"MIT"
] | null | null | null | braponto-back/core/migrations/0002_auto_20210223_2124.py | TheRodrigoBraga/Braponto | 8367e5e8fdb1dde192c8068b93c4014d77165785 | [
"MIT"
] | 2 | 2021-02-21T13:39:59.000Z | 2021-02-23T01:52:00.000Z | braponto-back/core/migrations/0002_auto_20210223_2124.py | TheRodrigoBraga/braponto | 8367e5e8fdb1dde192c8068b93c4014d77165785 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-02-24 00:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Funcionarios',
new_name='Funcionario',
),
]
| 18.166667 | 47 | 0.590214 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Funcionarios',
new_name='Funcionario',
),
]
| true | true |
1c3284f669f39734ab38e65c80681e81e655f75f | 32,613 | py | Python | discovery-infra/tests/base_test.py | rollandf/assisted-test-infra | f2d3411ceb0838f3045e4ad88f2686bed516cf8f | [
"Apache-2.0"
] | null | null | null | discovery-infra/tests/base_test.py | rollandf/assisted-test-infra | f2d3411ceb0838f3045e4ad88f2686bed516cf8f | [
"Apache-2.0"
] | 164 | 2020-11-02T07:02:58.000Z | 2022-03-28T16:03:34.000Z | discovery-infra/tests/base_test.py | rollandf/assisted-test-infra | f2d3411ceb0838f3045e4ad88f2686bed516cf8f | [
"Apache-2.0"
] | null | null | null | import json
import logging
import os
import shutil
from contextlib import suppress
from pathlib import Path
from typing import Callable, List, Optional, Tuple
import libvirt
from kubernetes.client.exceptions import ApiException as K8sApiException
import pytest
import test_infra.utils as infra_utils
import waiting
from _pytest.fixtures import FixtureRequest
from assisted_service_client.rest import ApiException
from download_logs import download_logs
from junit_report import JunitFixtureTestCase, JunitTestCase
from kubernetes.client import CoreV1Api
from kubernetes.client.exceptions import ApiException as K8sApiException
from netaddr import IPNetwork
from paramiko import SSHException
from test_infra import consts
from test_infra.assisted_service_api import InventoryClient
from test_infra.consts import OperatorResource
from test_infra.controllers.iptables import IptableRule
from test_infra.controllers.nat_controller import NatController
from test_infra.controllers.node_controllers import (Node, NodeController,
TerraformController,
VSphereController)
from test_infra.controllers.proxy_controller.proxy_controller import \
ProxyController
from test_infra.helper_classes.cluster import Cluster
from test_infra.helper_classes.config import BaseTerraformConfig
from test_infra.helper_classes.config.controller_config import BaseNodeConfig
from test_infra.helper_classes.config.vsphere_config import \
VSphereControllerConfig
from test_infra.helper_classes.infra_env import InfraEnv
from test_infra.helper_classes.kube_helpers import (KubeAPIContext,
create_kube_api_client)
from test_infra.helper_classes.nodes import Nodes
from test_infra.tools.assets import LibvirtNetworkAssets
from test_infra.utils import utils
from test_infra.utils.operators_utils import (parse_olm_operators_from_env,
resource_param)
from tests.config import (ClusterConfig, InfraEnvConfig, TerraformConfig,
global_variables)
class BaseTest:
@pytest.fixture
def new_controller_configuration(self) -> BaseNodeConfig:
"""
Creates the controller configuration object according to the platform.
Override this fixture in your test class to provide a custom configuration object
:rtype: new node controller configuration
"""
if global_variables.platform == consts.Platforms.VSPHERE:
return VSphereControllerConfig()
return TerraformConfig()
@pytest.fixture
def prepared_controller_configuration(self, new_controller_configuration: BaseNodeConfig) -> BaseNodeConfig:
if not isinstance(new_controller_configuration, TerraformConfig):
yield new_controller_configuration
return
# Configuring net asset which currently supported by libvirt terraform only
net_asset = LibvirtNetworkAssets()
new_controller_configuration.net_asset = net_asset.get()
yield new_controller_configuration
net_asset.release_all()
@pytest.fixture
def controller_configuration(self, request: pytest.FixtureRequest,
prepared_controller_configuration: BaseNodeConfig) -> BaseNodeConfig:
"""
Allows the test to modify the controller configuration by registering a custom fixture.
To register the custom fixture you have to mark the test with "override_controller_configuration" marker.
For example:
@pytest.fixture
def FIXTURE_NAME(self, prepared_controller_configuration):
yield prepared_controller_configuration
@pytest.mark.override_controller_configuration(FIXTURE_NAME.__name__)
def test_something(cluster):
pass
"""
yield utils.run_marked_fixture(prepared_controller_configuration, "override_controller_configuration", request)
@pytest.fixture
def new_cluster_configuration(self) -> ClusterConfig:
"""
Creates new cluster configuration object.
Override this fixture in your test class to provide a custom cluster configuration. (See TestInstall)
:rtype: new cluster configuration object
"""
return ClusterConfig()
@pytest.fixture
def new_infra_env_configuration(self) -> InfraEnvConfig:
"""
Creates new cluster configuration object.
Override this fixture in your test class to provide a custom cluster configuration. (See TestInstall)
:rtype: new cluster configuration object
"""
return InfraEnvConfig()
@pytest.fixture
def cluster_configuration(self, request: pytest.FixtureRequest,
new_cluster_configuration: ClusterConfig) -> ClusterConfig:
"""
Allows the test to modify the cluster configuration by registering a custom fixture.
To register the custom fixture you have to mark the test with "override_cluster_configuration" marker.
For example:
@pytest.fixture
def FIXTURE_NAME(self, new_cluster_configuration):
yield new_cluster_configuration
@pytest.mark.override_cluster_configuration(FIXTURE_NAME.__name__)
def test_something(cluster):
pass
"""
yield utils.run_marked_fixture(new_cluster_configuration, "override_cluster_configuration", request)
@pytest.fixture
def infra_env_configuration(self, request: pytest.FixtureRequest,
new_infra_env_configuration: InfraEnvConfig) -> InfraEnvConfig:
"""
Allows the test to modify the cluster configuration by registering a custom fixture.
To register the custom fixture you have to mark the test with "override_cluster_configuration" marker.
For example:
@pytest.fixture
def FIXTURE_NAME(self, new_cluster_configuration):
yield new_cluster_configuration
@pytest.mark.override_cluster_configuration(FIXTURE_NAME.__name__)
def test_something(cluster):
pass
"""
yield utils.run_marked_fixture(new_infra_env_configuration, "override_infra_env_configuration", request)
@pytest.fixture
def controller(self, cluster_configuration: ClusterConfig,
controller_configuration: BaseNodeConfig) -> NodeController:
if cluster_configuration.platform == consts.Platforms.VSPHERE:
return VSphereController(controller_configuration, cluster_configuration)
return TerraformController(controller_configuration, entity_config=cluster_configuration)
@pytest.fixture
def infraenv_controller(self, infra_env_configuration: InfraEnvConfig,
controller_configuration: BaseNodeConfig) -> NodeController:
if infra_env_configuration.platform == consts.Platforms.VSPHERE:
# TODO implement for Vsphere
raise NotImplementedError
return TerraformController(controller_configuration, entity_config=infra_env_configuration)
@pytest.fixture
def nodes(self, controller: NodeController) -> Nodes:
return Nodes(controller)
@pytest.fixture
def infraenv_nodes(self, infraenv_controller: NodeController) -> Nodes:
return Nodes(infraenv_controller)
@pytest.fixture
def prepare_nodes(self, nodes: Nodes, cluster_configuration: ClusterConfig) -> Nodes:
try:
nodes.prepare_nodes()
yield nodes
finally:
if global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
nodes.destroy_all_nodes()
logging.info(
f'--- TEARDOWN --- deleting iso file from: {cluster_configuration.iso_download_path}\n')
infra_utils.run_command(f"rm -f {cluster_configuration.iso_download_path}", shell=True)
@pytest.fixture
def prepare_infraenv_nodes(self, infraenv_nodes: Nodes, infra_env_configuration: InfraEnvConfig) -> Nodes:
try:
infraenv_nodes.prepare_nodes()
yield infraenv_nodes
finally:
if global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
infraenv_nodes.destroy_all_nodes()
logging.info(
f'--- TEARDOWN --- deleting iso file from: {infra_env_configuration.iso_download_path}\n')
infra_utils.run_command(f"rm -f {infra_env_configuration.iso_download_path}", shell=True)
@classmethod
def _prepare_nodes_network(cls, prepared_nodes: Nodes, controller_configuration: BaseNodeConfig) -> Nodes:
if global_variables.platform not in (consts.Platforms.BARE_METAL, consts.Platforms.NONE):
yield prepared_nodes
return
interfaces = cls.nat_interfaces(controller_configuration) # todo need to fix mismatch config types
nat = NatController(interfaces, NatController.get_namespace_index(interfaces[0]))
nat.add_nat_rules()
yield prepared_nodes
cls.teardown_nat(nat)
@pytest.fixture
def prepare_nodes_network(self, prepare_nodes: Nodes, controller_configuration: BaseNodeConfig) -> Nodes:
yield from self._prepare_nodes_network(prepare_nodes, controller_configuration)
@pytest.fixture
def prepare_infraenv_nodes_network(self, prepare_infraenv_nodes: Nodes,
controller_configuration: BaseNodeConfig) -> Nodes:
yield from self._prepare_nodes_network(prepare_infraenv_nodes, controller_configuration)
@staticmethod
def teardown_nat(nat: NatController) -> None:
if global_variables.test_teardown and nat:
nat.remove_nat_rules()
@pytest.fixture
@JunitFixtureTestCase()
def cluster(self, api_client: InventoryClient, request: FixtureRequest,
proxy_server, prepare_nodes_network: Nodes, cluster_configuration: ClusterConfig):
logging.debug(f'--- SETUP --- Creating cluster for test: {request.node.name}\n')
cluster = Cluster(api_client=api_client, config=cluster_configuration, nodes=prepare_nodes_network)
if self._does_need_proxy_server(prepare_nodes_network):
self._set_up_proxy_server(cluster, cluster_configuration, proxy_server)
yield cluster
if BaseTest._is_test_failed(request):
logging.info(f'--- TEARDOWN --- Collecting Logs for test: {request.node.name}\n')
self.collect_test_logs(cluster, api_client, request, cluster.nodes)
if global_variables.test_teardown:
if cluster.is_installing() or cluster.is_finalizing():
cluster.cancel_install()
with suppress(ApiException):
logging.info(f'--- TEARDOWN --- deleting created cluster {cluster.id}\n')
cluster.delete()
@pytest.fixture
@JunitFixtureTestCase()
def infra_env(self, api_client: InventoryClient, request: FixtureRequest, proxy_server,
prepare_infraenv_nodes_network: Nodes, infra_env_configuration: InfraEnvConfig):
logging.debug(f'--- SETUP --- Creating InfraEnv for test: {request.node.name}\n')
infra_env = InfraEnv(api_client=api_client, config=infra_env_configuration,
nodes=prepare_infraenv_nodes_network)
yield infra_env
logging.info('--- TEARDOWN --- Infra env\n')
@pytest.fixture
def prepared_cluster(self, cluster):
cluster.prepare_for_installation()
yield cluster
@pytest.fixture(scope="function")
def get_nodes(self) -> Callable[[BaseTerraformConfig, ClusterConfig], Nodes]:
""" Currently support only single instance of nodes """
nodes_data = dict()
@JunitTestCase()
def get_nodes_func(tf_config: BaseTerraformConfig, cluster_config: ClusterConfig):
if "nodes" in nodes_data:
return nodes_data["nodes"]
nodes_data["configs"] = cluster_config, tf_config
net_asset = LibvirtNetworkAssets()
tf_config.net_asset = net_asset.get()
nodes_data["net_asset"] = net_asset
controller = TerraformController(tf_config, entity_config=cluster_config)
nodes = Nodes(controller)
nodes_data["nodes"] = nodes
nodes.prepare_nodes()
interfaces = BaseTest.nat_interfaces(tf_config)
nat = NatController(interfaces, NatController.get_namespace_index(interfaces[0]))
nat.add_nat_rules()
nodes_data["nat"] = nat
return nodes
yield get_nodes_func
_nodes: Nodes = nodes_data.get("nodes")
_cluster_config, _tf_config = nodes_data.get("configs")
_nat: NatController = nodes_data.get("nat")
_net_asset: LibvirtNetworkAssets = nodes_data.get("net_asset")
try:
if _nodes and global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
_nodes.destroy_all_nodes()
logging.info(f'--- TEARDOWN --- deleting iso file from: {_cluster_config.iso_download_path}\n')
infra_utils.run_command(f"rm -f {_cluster_config.iso_download_path}", shell=True)
self.teardown_nat(_nat)
finally:
if _net_asset:
_net_asset.release_all()
@pytest.fixture(scope="function")
def get_nodes_infraenv(self) -> Callable[[BaseTerraformConfig, InfraEnvConfig], Nodes]:
""" Currently support only single instance of nodes """
nodes_data = dict()
@JunitTestCase()
def get_nodes_func(tf_config: BaseTerraformConfig, infraenv_config: InfraEnvConfig):
if "nodes" in nodes_data:
return nodes_data["nodes"]
nodes_data["configs"] = infraenv_config, tf_config
net_asset = LibvirtNetworkAssets()
tf_config.net_asset = net_asset.get()
nodes_data["net_asset"] = net_asset
controller = TerraformController(tf_config, entity_config=infraenv_config)
nodes = Nodes(controller)
nodes_data["nodes"] = nodes
nodes.prepare_nodes()
interfaces = BaseTest.nat_interfaces(tf_config)
nat = NatController(interfaces, NatController.get_namespace_index(interfaces[0]))
nat.add_nat_rules()
nodes_data["nat"] = nat
return nodes
yield get_nodes_func
_nodes: Nodes = nodes_data.get("nodes")
_infraenv_config, _tf_config = nodes_data.get("configs")
_nat: NatController = nodes_data.get("nat")
_net_asset: LibvirtNetworkAssets = nodes_data.get("net_asset")
try:
if _nodes and global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
_nodes.destroy_all_nodes()
logging.info(f'--- TEARDOWN --- deleting iso file from: {_infraenv_config.iso_download_path}\n')
infra_utils.run_command(f"rm -f {_infraenv_config.iso_download_path}", shell=True)
self.teardown_nat(_nat)
finally:
if _net_asset:
_net_asset.release_all()
@classmethod
def nat_interfaces(cls, config: TerraformConfig) -> Tuple[str, str]:
return config.net_asset.libvirt_network_if, config.net_asset.libvirt_secondary_network_if
@pytest.fixture()
@JunitFixtureTestCase()
def get_cluster(self, api_client, request, proxy_server, get_nodes) -> Callable[[Nodes, ClusterConfig], Cluster]:
""" Do not use get_nodes fixture in this fixture. It's here only to force pytest teardown
nodes after cluster """
clusters = list()
@JunitTestCase()
def get_cluster_func(nodes: Nodes, cluster_config: ClusterConfig) -> Cluster:
logging.debug(f'--- SETUP --- Creating cluster for test: {request.node.name}\n')
_cluster = Cluster(api_client=api_client, config=cluster_config, nodes=nodes)
if self._does_need_proxy_server(nodes):
self._set_up_proxy_server(_cluster, cluster_config, proxy_server)
clusters.append(_cluster)
return _cluster
yield get_cluster_func
for cluster in clusters:
if BaseTest._is_test_failed(request):
logging.info(f'--- TEARDOWN --- Collecting Logs for test: {request.node.name}\n')
self.collect_test_logs(cluster, api_client, request, cluster.nodes)
if global_variables.test_teardown:
if cluster.is_installing() or cluster.is_finalizing():
cluster.cancel_install()
with suppress(ApiException):
logging.info(f'--- TEARDOWN --- deleting created cluster {cluster.id}\n')
cluster.delete()
@pytest.fixture
def infraenv_config(self) -> InfraEnvConfig:
yield InfraEnvConfig()
@pytest.fixture
def cluster_config(self) -> ClusterConfig:
yield ClusterConfig()
@pytest.fixture
def terraform_config(self) -> TerraformConfig:
yield TerraformConfig()
@pytest.fixture
def configs(self, cluster_config, terraform_config) -> Tuple[ClusterConfig, TerraformConfig]:
""" Get configurations objects - while using configs fixture cluster and tf configs are the same
For creating new Config object just call it explicitly e.g. ClusterConfig(masters_count=1) """
yield cluster_config, terraform_config
@staticmethod
def _does_need_proxy_server(nodes: Nodes):
return nodes and nodes.is_ipv6 and not nodes.is_ipv4
@staticmethod
def _set_up_proxy_server(cluster: Cluster, cluster_config: ClusterConfig, proxy_server):
proxy_name = "squid-" + cluster_config.cluster_name.suffix
port = infra_utils.scan_for_free_port(consts.DEFAULT_PROXY_SERVER_PORT)
machine_cidr = cluster.get_primary_machine_cidr()
host_ip = str(IPNetwork(machine_cidr).ip + 1)
no_proxy = []
no_proxy += [str(cluster_network.cidr) for cluster_network in cluster_config.cluster_networks]
no_proxy += [str(service_network.cidr) for service_network in cluster_config.service_networks]
no_proxy += [machine_cidr]
no_proxy += [f".{str(cluster_config.cluster_name)}.redhat.com"]
no_proxy = ",".join(no_proxy)
proxy = proxy_server(name=proxy_name, port=port, dir=proxy_name, host_ip=host_ip,
is_ipv6=cluster.nodes.is_ipv6)
cluster.set_proxy_values(http_proxy=proxy.address, https_proxy=proxy.address, no_proxy=no_proxy)
install_config = cluster.get_install_config()
proxy_details = install_config.get("proxy") or install_config.get("Proxy")
assert proxy_details, str(install_config)
assert proxy_details.get("httpsProxy") == proxy.address, f"{proxy_details.get('httpsProxy')} should equal {proxy.address}"
@pytest.fixture()
def iptables(self) -> Callable[[Cluster, List[IptableRule], Optional[List[Node]]], None]:
rules = []
def set_iptables_rules_for_nodes(
cluster: Cluster,
iptables_rules: List[IptableRule],
given_nodes=None,
):
given_node_ips = []
given_nodes = given_nodes or cluster.nodes.nodes
cluster_config = cluster.config
if cluster_config.download_image:
cluster.generate_and_download_infra_env(
iso_download_path=cluster_config.iso_download_path,
)
cluster.nodes.start_given(given_nodes)
for node in given_nodes:
given_node_ips.append(node.ips[0])
cluster.nodes.shutdown_given(given_nodes)
logging.info(f'Given node ips: {given_node_ips}')
for _rule in iptables_rules:
_rule.add_sources(given_node_ips)
rules.append(_rule)
_rule.insert()
yield set_iptables_rules_for_nodes
logging.info('---TEARDOWN iptables ---')
for rule in rules:
rule.delete()
@staticmethod
def attach_disk_flags(persistent):
modified_nodes = set()
def attach(node, disk_size, bootable=False, with_wwn=False):
nonlocal modified_nodes
node.attach_test_disk(disk_size, bootable=bootable, persistent=persistent, with_wwn=with_wwn)
modified_nodes.add(node)
yield attach
if global_variables.test_teardown:
for modified_node in modified_nodes:
try:
modified_node.detach_all_test_disks()
logging.info(f'Successfully detach test disks from node {modified_node.name}')
except (libvirt.libvirtError, FileNotFoundError):
logging.warning(f'Failed to detach test disks from node {modified_node.name}')
@pytest.fixture(scope="function")
def attach_disk(self):
yield from self.attach_disk_flags(persistent=False)
@pytest.fixture(scope="function")
def attach_disk_persistent(self):
yield from self.attach_disk_flags(persistent=True)
@pytest.fixture()
def attach_interface(self):
added_networks = []
def add(node, network_name=None, network_xml=None):
interface_mac = ""
network = ""
if network_xml:
network, interface_mac = node.attach_interface(network_xml)
elif network_name:
interface_mac = node.add_interface(network_name)
network = node.get_network_by_name(network_name)
added_networks.append({"node": node, "network": network, "mac": interface_mac})
yield add
for added_network in added_networks:
logging.info(f'Deleting custom networks:{added_networks}')
with suppress(Exception):
node_obj = added_network.get("node")
node_obj.undefine_interface(added_network.get("mac"))
node_obj.destroy_network(added_network.get("network"))
@pytest.fixture()
def proxy_server(self):
logging.info('--- SETUP --- proxy controller')
proxy_servers = []
def start_proxy_server(**kwargs):
proxy_server = ProxyController(**kwargs)
proxy_servers.append(proxy_server)
return proxy_server
yield start_proxy_server
if global_variables.test_teardown:
logging.info('--- TEARDOWN --- proxy controller')
for server in proxy_servers:
server.remove()
@staticmethod
def get_cluster_by_name(api_client, cluster_name):
clusters = api_client.clusters_list()
for cluster in clusters:
if cluster['name'] == cluster_name:
return cluster
return None
@staticmethod
def assert_http_error_code(api_call, status, reason, **kwargs):
with pytest.raises(ApiException) as response:
api_call(**kwargs)
assert response.value.status == status
assert response.value.reason == reason
@staticmethod
def assert_cluster_validation(cluster_info, validation_section, validation_id, expected_status):
found_status = infra_utils.get_cluster_validation_value(cluster_info, validation_section, validation_id)
assert found_status == expected_status, "Found validation status " + found_status + " rather than " + \
expected_status + " for validation " + validation_id
@staticmethod
def assert_string_length(string, expected_len):
assert len(string) == expected_len, "Expected len string of: " + str(expected_len) + \
" rather than: " + str(len(string)) + " String value: " + string
def collect_test_logs(self, cluster, api_client, request, nodes: Nodes):
log_dir_name = f"{global_variables.log_folder}/{request.node.name}"
with suppress(ApiException):
cluster_details = json.loads(json.dumps(cluster.get_details().to_dict(), sort_keys=True, default=str))
download_logs(api_client, cluster_details, log_dir_name,
BaseTest._is_test_failed(request),
pull_secret=global_variables.pull_secret)
self._collect_virsh_logs(nodes, log_dir_name)
self._collect_journalctl(nodes, log_dir_name)
@classmethod
def _is_test_failed(cls, test):
# When cancelling a test the test.result_call isn't available, mark it as failed
return not hasattr(test.node, "result_call") or test.node.result_call.failed
@classmethod
def _collect_virsh_logs(cls, nodes: Nodes, log_dir_name):
logging.info('Collecting virsh logs\n')
os.makedirs(log_dir_name, exist_ok=True)
virsh_log_path = os.path.join(log_dir_name, "libvirt_logs")
os.makedirs(virsh_log_path, exist_ok=False)
libvirt_list_path = os.path.join(virsh_log_path, "virsh_list")
infra_utils.run_command(f"virsh list --all >> {libvirt_list_path}", shell=True)
libvirt_net_list_path = os.path.join(virsh_log_path, "virsh_net_list")
infra_utils.run_command(f"virsh net-list --all >> {libvirt_net_list_path}", shell=True)
network_name = nodes.get_cluster_network()
virsh_leases_path = os.path.join(virsh_log_path, "net_dhcp_leases")
infra_utils.run_command(f"virsh net-dhcp-leases {network_name} >> {virsh_leases_path}", shell=True)
messages_log_path = os.path.join(virsh_log_path, "messages.log")
try:
shutil.copy('/var/log/messages', messages_log_path)
except FileNotFoundError:
logging.warning('Failed to copy /var/log/messages, file does not exist')
qemu_libvirt_path = os.path.join(virsh_log_path, "qemu_libvirt_logs")
os.makedirs(qemu_libvirt_path, exist_ok=False)
for node in nodes:
try:
shutil.copy(f'/var/log/libvirt/qemu/{node.name}.log', f'{qemu_libvirt_path}/{node.name}-qemu.log')
except FileNotFoundError:
logging.warning(f"Failed to copy {node.name} qemu log, file does not exist")
console_log_path = os.path.join(virsh_log_path, "console_logs")
os.makedirs(console_log_path, exist_ok=False)
for node in nodes:
try:
shutil.copy(f'/var/log/libvirt/qemu/{node.name}-console.log',
f'{console_log_path}/{node.name}-console.log')
except FileNotFoundError:
logging.warning(f"Failed to copy {node.name} console log, file does not exist")
libvird_log_path = os.path.join(virsh_log_path, "libvirtd_journal")
infra_utils.run_command(f"journalctl --since \"{nodes.setup_time}\" "
f"-u libvirtd -D /run/log/journal >> {libvird_log_path}", shell=True)
@staticmethod
def _collect_journalctl(nodes: Nodes, log_dir_name):
logging.info('Collecting journalctl\n')
infra_utils.recreate_folder(log_dir_name, with_chmod=False, force_recreate=False)
journal_ctl_path = Path(log_dir_name) / 'nodes_journalctl'
infra_utils.recreate_folder(journal_ctl_path, with_chmod=False)
for node in nodes:
try:
node.run_command(f'sudo journalctl >> /tmp/{node.name}-journalctl')
journal_path = journal_ctl_path / node.name
node.download_file(f'/tmp/{node.name}-journalctl', str(journal_path))
except (RuntimeError, TimeoutError, SSHException):
logging.info(f'Could not collect journalctl for {node.name}')
@staticmethod
def verify_no_logs_uploaded(cluster, cluster_tar_path):
with pytest.raises(ApiException) as ex:
cluster.download_installation_logs(cluster_tar_path)
assert "No log files" in str(ex.value)
@staticmethod
def update_oc_config(nodes, cluster):
os.environ["KUBECONFIG"] = cluster.config.kubeconfig_path
if nodes.masters_count == 1:
main_cidr = cluster.get_primary_machine_cidr()
api_vip = cluster.get_ip_for_single_node(cluster.api_client, cluster.id, main_cidr)
else:
vips = nodes.controller.get_ingress_and_api_vips()
api_vip = vips['api_vip']
infra_utils.config_etc_hosts(cluster_name=cluster.name,
base_dns_domain=global_variables.base_dns_domain,
api_vip=api_vip)
def wait_for_controller(self, cluster, nodes):
cluster.download_kubeconfig_no_ingress()
self.update_oc_config(nodes, cluster)
def check_status():
res = infra_utils.get_assisted_controller_status(cluster.config.kubeconfig_path)
return "Running" in str(res, 'utf-8')
waiting.wait(
lambda: check_status(),
timeout_seconds=900,
sleep_seconds=30,
waiting_for="controller to be running",
)
@pytest.fixture(scope='session')
def kube_api_client(self):
yield create_kube_api_client()
@pytest.fixture()
def kube_api_context(self, kube_api_client):
kube_api_context = KubeAPIContext(kube_api_client, clean_on_exit=global_variables.test_teardown)
with kube_api_context:
v1 = CoreV1Api(kube_api_client)
try:
v1.create_namespace(body={
"apiVersion": "v1",
"kind": "Namespace",
"metadata": {
"name": global_variables.spoke_namespace,
"labels": {
"name": global_variables.spoke_namespace,
},
},
})
except K8sApiException as e:
if e.status != 409:
raise
yield kube_api_context
if global_variables.test_teardown:
v1.delete_namespace(global_variables.spoke_namespace)
@pytest.fixture(scope="function")
def update_olm_config(self) -> Callable:
def update_config(tf_config: TerraformConfig = TerraformConfig(),
cluster_config: ClusterConfig = ClusterConfig(), operators=None):
if operators is None:
operators = parse_olm_operators_from_env()
tf_config.worker_memory = resource_param(tf_config.worker_memory,
OperatorResource.WORKER_MEMORY_KEY, operators)
tf_config.master_memory = resource_param(tf_config.master_memory,
OperatorResource.MASTER_MEMORY_KEY, operators)
tf_config.worker_vcpu = resource_param(tf_config.worker_vcpu,
OperatorResource.WORKER_VCPU_KEY, operators)
tf_config.master_vcpu = resource_param(tf_config.master_vcpu,
OperatorResource.MASTER_VCPU_KEY, operators)
tf_config.workers_count = resource_param(tf_config.workers_count,
OperatorResource.WORKER_COUNT_KEY, operators)
tf_config.worker_disk = resource_param(tf_config.worker_disk,
OperatorResource.WORKER_DISK_KEY, operators)
tf_config.master_disk = resource_param(tf_config.master_disk,
OperatorResource.MASTER_DISK_KEY, operators)
tf_config.master_disk_count = resource_param(tf_config.master_disk_count,
OperatorResource.MASTER_DISK_COUNT_KEY, operators)
tf_config.worker_disk_count = resource_param(tf_config.worker_disk_count,
OperatorResource.WORKER_DISK_COUNT_KEY, operators)
tf_config.nodes_count = tf_config.masters_count + tf_config.workers_count
cluster_config.olm_operators = [operators]
yield update_config
| 44.071622 | 130 | 0.658296 | import json
import logging
import os
import shutil
from contextlib import suppress
from pathlib import Path
from typing import Callable, List, Optional, Tuple
import libvirt
from kubernetes.client.exceptions import ApiException as K8sApiException
import pytest
import test_infra.utils as infra_utils
import waiting
from _pytest.fixtures import FixtureRequest
from assisted_service_client.rest import ApiException
from download_logs import download_logs
from junit_report import JunitFixtureTestCase, JunitTestCase
from kubernetes.client import CoreV1Api
from kubernetes.client.exceptions import ApiException as K8sApiException
from netaddr import IPNetwork
from paramiko import SSHException
from test_infra import consts
from test_infra.assisted_service_api import InventoryClient
from test_infra.consts import OperatorResource
from test_infra.controllers.iptables import IptableRule
from test_infra.controllers.nat_controller import NatController
from test_infra.controllers.node_controllers import (Node, NodeController,
TerraformController,
VSphereController)
from test_infra.controllers.proxy_controller.proxy_controller import \
ProxyController
from test_infra.helper_classes.cluster import Cluster
from test_infra.helper_classes.config import BaseTerraformConfig
from test_infra.helper_classes.config.controller_config import BaseNodeConfig
from test_infra.helper_classes.config.vsphere_config import \
VSphereControllerConfig
from test_infra.helper_classes.infra_env import InfraEnv
from test_infra.helper_classes.kube_helpers import (KubeAPIContext,
create_kube_api_client)
from test_infra.helper_classes.nodes import Nodes
from test_infra.tools.assets import LibvirtNetworkAssets
from test_infra.utils import utils
from test_infra.utils.operators_utils import (parse_olm_operators_from_env,
resource_param)
from tests.config import (ClusterConfig, InfraEnvConfig, TerraformConfig,
global_variables)
class BaseTest:
@pytest.fixture
def new_controller_configuration(self) -> BaseNodeConfig:
if global_variables.platform == consts.Platforms.VSPHERE:
return VSphereControllerConfig()
return TerraformConfig()
@pytest.fixture
def prepared_controller_configuration(self, new_controller_configuration: BaseNodeConfig) -> BaseNodeConfig:
if not isinstance(new_controller_configuration, TerraformConfig):
yield new_controller_configuration
return
net_asset = LibvirtNetworkAssets()
new_controller_configuration.net_asset = net_asset.get()
yield new_controller_configuration
net_asset.release_all()
@pytest.fixture
def controller_configuration(self, request: pytest.FixtureRequest,
prepared_controller_configuration: BaseNodeConfig) -> BaseNodeConfig:
yield utils.run_marked_fixture(prepared_controller_configuration, "override_controller_configuration", request)
@pytest.fixture
def new_cluster_configuration(self) -> ClusterConfig:
return ClusterConfig()
@pytest.fixture
def new_infra_env_configuration(self) -> InfraEnvConfig:
return InfraEnvConfig()
@pytest.fixture
def cluster_configuration(self, request: pytest.FixtureRequest,
new_cluster_configuration: ClusterConfig) -> ClusterConfig:
yield utils.run_marked_fixture(new_cluster_configuration, "override_cluster_configuration", request)
@pytest.fixture
def infra_env_configuration(self, request: pytest.FixtureRequest,
new_infra_env_configuration: InfraEnvConfig) -> InfraEnvConfig:
yield utils.run_marked_fixture(new_infra_env_configuration, "override_infra_env_configuration", request)
@pytest.fixture
def controller(self, cluster_configuration: ClusterConfig,
controller_configuration: BaseNodeConfig) -> NodeController:
if cluster_configuration.platform == consts.Platforms.VSPHERE:
return VSphereController(controller_configuration, cluster_configuration)
return TerraformController(controller_configuration, entity_config=cluster_configuration)
@pytest.fixture
def infraenv_controller(self, infra_env_configuration: InfraEnvConfig,
controller_configuration: BaseNodeConfig) -> NodeController:
if infra_env_configuration.platform == consts.Platforms.VSPHERE:
raise NotImplementedError
return TerraformController(controller_configuration, entity_config=infra_env_configuration)
@pytest.fixture
def nodes(self, controller: NodeController) -> Nodes:
return Nodes(controller)
@pytest.fixture
def infraenv_nodes(self, infraenv_controller: NodeController) -> Nodes:
return Nodes(infraenv_controller)
@pytest.fixture
def prepare_nodes(self, nodes: Nodes, cluster_configuration: ClusterConfig) -> Nodes:
try:
nodes.prepare_nodes()
yield nodes
finally:
if global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
nodes.destroy_all_nodes()
logging.info(
f'--- TEARDOWN --- deleting iso file from: {cluster_configuration.iso_download_path}\n')
infra_utils.run_command(f"rm -f {cluster_configuration.iso_download_path}", shell=True)
@pytest.fixture
def prepare_infraenv_nodes(self, infraenv_nodes: Nodes, infra_env_configuration: InfraEnvConfig) -> Nodes:
try:
infraenv_nodes.prepare_nodes()
yield infraenv_nodes
finally:
if global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
infraenv_nodes.destroy_all_nodes()
logging.info(
f'--- TEARDOWN --- deleting iso file from: {infra_env_configuration.iso_download_path}\n')
infra_utils.run_command(f"rm -f {infra_env_configuration.iso_download_path}", shell=True)
@classmethod
def _prepare_nodes_network(cls, prepared_nodes: Nodes, controller_configuration: BaseNodeConfig) -> Nodes:
if global_variables.platform not in (consts.Platforms.BARE_METAL, consts.Platforms.NONE):
yield prepared_nodes
return
interfaces = cls.nat_interfaces(controller_configuration)
nat = NatController(interfaces, NatController.get_namespace_index(interfaces[0]))
nat.add_nat_rules()
yield prepared_nodes
cls.teardown_nat(nat)
@pytest.fixture
def prepare_nodes_network(self, prepare_nodes: Nodes, controller_configuration: BaseNodeConfig) -> Nodes:
yield from self._prepare_nodes_network(prepare_nodes, controller_configuration)
@pytest.fixture
def prepare_infraenv_nodes_network(self, prepare_infraenv_nodes: Nodes,
controller_configuration: BaseNodeConfig) -> Nodes:
yield from self._prepare_nodes_network(prepare_infraenv_nodes, controller_configuration)
@staticmethod
def teardown_nat(nat: NatController) -> None:
if global_variables.test_teardown and nat:
nat.remove_nat_rules()
@pytest.fixture
@JunitFixtureTestCase()
def cluster(self, api_client: InventoryClient, request: FixtureRequest,
proxy_server, prepare_nodes_network: Nodes, cluster_configuration: ClusterConfig):
logging.debug(f'--- SETUP --- Creating cluster for test: {request.node.name}\n')
cluster = Cluster(api_client=api_client, config=cluster_configuration, nodes=prepare_nodes_network)
if self._does_need_proxy_server(prepare_nodes_network):
self._set_up_proxy_server(cluster, cluster_configuration, proxy_server)
yield cluster
if BaseTest._is_test_failed(request):
logging.info(f'--- TEARDOWN --- Collecting Logs for test: {request.node.name}\n')
self.collect_test_logs(cluster, api_client, request, cluster.nodes)
if global_variables.test_teardown:
if cluster.is_installing() or cluster.is_finalizing():
cluster.cancel_install()
with suppress(ApiException):
logging.info(f'--- TEARDOWN --- deleting created cluster {cluster.id}\n')
cluster.delete()
@pytest.fixture
@JunitFixtureTestCase()
def infra_env(self, api_client: InventoryClient, request: FixtureRequest, proxy_server,
prepare_infraenv_nodes_network: Nodes, infra_env_configuration: InfraEnvConfig):
logging.debug(f'--- SETUP --- Creating InfraEnv for test: {request.node.name}\n')
infra_env = InfraEnv(api_client=api_client, config=infra_env_configuration,
nodes=prepare_infraenv_nodes_network)
yield infra_env
logging.info('--- TEARDOWN --- Infra env\n')
@pytest.fixture
def prepared_cluster(self, cluster):
cluster.prepare_for_installation()
yield cluster
@pytest.fixture(scope="function")
def get_nodes(self) -> Callable[[BaseTerraformConfig, ClusterConfig], Nodes]:
nodes_data = dict()
@JunitTestCase()
def get_nodes_func(tf_config: BaseTerraformConfig, cluster_config: ClusterConfig):
if "nodes" in nodes_data:
return nodes_data["nodes"]
nodes_data["configs"] = cluster_config, tf_config
net_asset = LibvirtNetworkAssets()
tf_config.net_asset = net_asset.get()
nodes_data["net_asset"] = net_asset
controller = TerraformController(tf_config, entity_config=cluster_config)
nodes = Nodes(controller)
nodes_data["nodes"] = nodes
nodes.prepare_nodes()
interfaces = BaseTest.nat_interfaces(tf_config)
nat = NatController(interfaces, NatController.get_namespace_index(interfaces[0]))
nat.add_nat_rules()
nodes_data["nat"] = nat
return nodes
yield get_nodes_func
_nodes: Nodes = nodes_data.get("nodes")
_cluster_config, _tf_config = nodes_data.get("configs")
_nat: NatController = nodes_data.get("nat")
_net_asset: LibvirtNetworkAssets = nodes_data.get("net_asset")
try:
if _nodes and global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
_nodes.destroy_all_nodes()
logging.info(f'--- TEARDOWN --- deleting iso file from: {_cluster_config.iso_download_path}\n')
infra_utils.run_command(f"rm -f {_cluster_config.iso_download_path}", shell=True)
self.teardown_nat(_nat)
finally:
if _net_asset:
_net_asset.release_all()
@pytest.fixture(scope="function")
def get_nodes_infraenv(self) -> Callable[[BaseTerraformConfig, InfraEnvConfig], Nodes]:
nodes_data = dict()
@JunitTestCase()
def get_nodes_func(tf_config: BaseTerraformConfig, infraenv_config: InfraEnvConfig):
if "nodes" in nodes_data:
return nodes_data["nodes"]
nodes_data["configs"] = infraenv_config, tf_config
net_asset = LibvirtNetworkAssets()
tf_config.net_asset = net_asset.get()
nodes_data["net_asset"] = net_asset
controller = TerraformController(tf_config, entity_config=infraenv_config)
nodes = Nodes(controller)
nodes_data["nodes"] = nodes
nodes.prepare_nodes()
interfaces = BaseTest.nat_interfaces(tf_config)
nat = NatController(interfaces, NatController.get_namespace_index(interfaces[0]))
nat.add_nat_rules()
nodes_data["nat"] = nat
return nodes
yield get_nodes_func
_nodes: Nodes = nodes_data.get("nodes")
_infraenv_config, _tf_config = nodes_data.get("configs")
_nat: NatController = nodes_data.get("nat")
_net_asset: LibvirtNetworkAssets = nodes_data.get("net_asset")
try:
if _nodes and global_variables.test_teardown:
logging.info('--- TEARDOWN --- node controller\n')
_nodes.destroy_all_nodes()
logging.info(f'--- TEARDOWN --- deleting iso file from: {_infraenv_config.iso_download_path}\n')
infra_utils.run_command(f"rm -f {_infraenv_config.iso_download_path}", shell=True)
self.teardown_nat(_nat)
finally:
if _net_asset:
_net_asset.release_all()
@classmethod
def nat_interfaces(cls, config: TerraformConfig) -> Tuple[str, str]:
return config.net_asset.libvirt_network_if, config.net_asset.libvirt_secondary_network_if
@pytest.fixture()
@JunitFixtureTestCase()
def get_cluster(self, api_client, request, proxy_server, get_nodes) -> Callable[[Nodes, ClusterConfig], Cluster]:
clusters = list()
@JunitTestCase()
def get_cluster_func(nodes: Nodes, cluster_config: ClusterConfig) -> Cluster:
logging.debug(f'--- SETUP --- Creating cluster for test: {request.node.name}\n')
_cluster = Cluster(api_client=api_client, config=cluster_config, nodes=nodes)
if self._does_need_proxy_server(nodes):
self._set_up_proxy_server(_cluster, cluster_config, proxy_server)
clusters.append(_cluster)
return _cluster
yield get_cluster_func
for cluster in clusters:
if BaseTest._is_test_failed(request):
logging.info(f'--- TEARDOWN --- Collecting Logs for test: {request.node.name}\n')
self.collect_test_logs(cluster, api_client, request, cluster.nodes)
if global_variables.test_teardown:
if cluster.is_installing() or cluster.is_finalizing():
cluster.cancel_install()
with suppress(ApiException):
logging.info(f'--- TEARDOWN --- deleting created cluster {cluster.id}\n')
cluster.delete()
@pytest.fixture
def infraenv_config(self) -> InfraEnvConfig:
yield InfraEnvConfig()
@pytest.fixture
def cluster_config(self) -> ClusterConfig:
yield ClusterConfig()
@pytest.fixture
def terraform_config(self) -> TerraformConfig:
yield TerraformConfig()
@pytest.fixture
def configs(self, cluster_config, terraform_config) -> Tuple[ClusterConfig, TerraformConfig]:
yield cluster_config, terraform_config
@staticmethod
def _does_need_proxy_server(nodes: Nodes):
return nodes and nodes.is_ipv6 and not nodes.is_ipv4
@staticmethod
def _set_up_proxy_server(cluster: Cluster, cluster_config: ClusterConfig, proxy_server):
proxy_name = "squid-" + cluster_config.cluster_name.suffix
port = infra_utils.scan_for_free_port(consts.DEFAULT_PROXY_SERVER_PORT)
machine_cidr = cluster.get_primary_machine_cidr()
host_ip = str(IPNetwork(machine_cidr).ip + 1)
no_proxy = []
no_proxy += [str(cluster_network.cidr) for cluster_network in cluster_config.cluster_networks]
no_proxy += [str(service_network.cidr) for service_network in cluster_config.service_networks]
no_proxy += [machine_cidr]
no_proxy += [f".{str(cluster_config.cluster_name)}.redhat.com"]
no_proxy = ",".join(no_proxy)
proxy = proxy_server(name=proxy_name, port=port, dir=proxy_name, host_ip=host_ip,
is_ipv6=cluster.nodes.is_ipv6)
cluster.set_proxy_values(http_proxy=proxy.address, https_proxy=proxy.address, no_proxy=no_proxy)
install_config = cluster.get_install_config()
proxy_details = install_config.get("proxy") or install_config.get("Proxy")
assert proxy_details, str(install_config)
assert proxy_details.get("httpsProxy") == proxy.address, f"{proxy_details.get('httpsProxy')} should equal {proxy.address}"
@pytest.fixture()
def iptables(self) -> Callable[[Cluster, List[IptableRule], Optional[List[Node]]], None]:
rules = []
def set_iptables_rules_for_nodes(
cluster: Cluster,
iptables_rules: List[IptableRule],
given_nodes=None,
):
given_node_ips = []
given_nodes = given_nodes or cluster.nodes.nodes
cluster_config = cluster.config
if cluster_config.download_image:
cluster.generate_and_download_infra_env(
iso_download_path=cluster_config.iso_download_path,
)
cluster.nodes.start_given(given_nodes)
for node in given_nodes:
given_node_ips.append(node.ips[0])
cluster.nodes.shutdown_given(given_nodes)
logging.info(f'Given node ips: {given_node_ips}')
for _rule in iptables_rules:
_rule.add_sources(given_node_ips)
rules.append(_rule)
_rule.insert()
yield set_iptables_rules_for_nodes
logging.info('---TEARDOWN iptables ---')
for rule in rules:
rule.delete()
@staticmethod
def attach_disk_flags(persistent):
modified_nodes = set()
def attach(node, disk_size, bootable=False, with_wwn=False):
nonlocal modified_nodes
node.attach_test_disk(disk_size, bootable=bootable, persistent=persistent, with_wwn=with_wwn)
modified_nodes.add(node)
yield attach
if global_variables.test_teardown:
for modified_node in modified_nodes:
try:
modified_node.detach_all_test_disks()
logging.info(f'Successfully detach test disks from node {modified_node.name}')
except (libvirt.libvirtError, FileNotFoundError):
logging.warning(f'Failed to detach test disks from node {modified_node.name}')
@pytest.fixture(scope="function")
def attach_disk(self):
yield from self.attach_disk_flags(persistent=False)
@pytest.fixture(scope="function")
def attach_disk_persistent(self):
yield from self.attach_disk_flags(persistent=True)
@pytest.fixture()
def attach_interface(self):
added_networks = []
def add(node, network_name=None, network_xml=None):
interface_mac = ""
network = ""
if network_xml:
network, interface_mac = node.attach_interface(network_xml)
elif network_name:
interface_mac = node.add_interface(network_name)
network = node.get_network_by_name(network_name)
added_networks.append({"node": node, "network": network, "mac": interface_mac})
yield add
for added_network in added_networks:
logging.info(f'Deleting custom networks:{added_networks}')
with suppress(Exception):
node_obj = added_network.get("node")
node_obj.undefine_interface(added_network.get("mac"))
node_obj.destroy_network(added_network.get("network"))
@pytest.fixture()
def proxy_server(self):
logging.info('--- SETUP --- proxy controller')
proxy_servers = []
def start_proxy_server(**kwargs):
proxy_server = ProxyController(**kwargs)
proxy_servers.append(proxy_server)
return proxy_server
yield start_proxy_server
if global_variables.test_teardown:
logging.info('--- TEARDOWN --- proxy controller')
for server in proxy_servers:
server.remove()
@staticmethod
def get_cluster_by_name(api_client, cluster_name):
clusters = api_client.clusters_list()
for cluster in clusters:
if cluster['name'] == cluster_name:
return cluster
return None
@staticmethod
def assert_http_error_code(api_call, status, reason, **kwargs):
with pytest.raises(ApiException) as response:
api_call(**kwargs)
assert response.value.status == status
assert response.value.reason == reason
@staticmethod
def assert_cluster_validation(cluster_info, validation_section, validation_id, expected_status):
found_status = infra_utils.get_cluster_validation_value(cluster_info, validation_section, validation_id)
assert found_status == expected_status, "Found validation status " + found_status + " rather than " + \
expected_status + " for validation " + validation_id
@staticmethod
def assert_string_length(string, expected_len):
assert len(string) == expected_len, "Expected len string of: " + str(expected_len) + \
" rather than: " + str(len(string)) + " String value: " + string
def collect_test_logs(self, cluster, api_client, request, nodes: Nodes):
log_dir_name = f"{global_variables.log_folder}/{request.node.name}"
with suppress(ApiException):
cluster_details = json.loads(json.dumps(cluster.get_details().to_dict(), sort_keys=True, default=str))
download_logs(api_client, cluster_details, log_dir_name,
BaseTest._is_test_failed(request),
pull_secret=global_variables.pull_secret)
self._collect_virsh_logs(nodes, log_dir_name)
self._collect_journalctl(nodes, log_dir_name)
@classmethod
def _is_test_failed(cls, test):
return not hasattr(test.node, "result_call") or test.node.result_call.failed
@classmethod
def _collect_virsh_logs(cls, nodes: Nodes, log_dir_name):
logging.info('Collecting virsh logs\n')
os.makedirs(log_dir_name, exist_ok=True)
virsh_log_path = os.path.join(log_dir_name, "libvirt_logs")
os.makedirs(virsh_log_path, exist_ok=False)
libvirt_list_path = os.path.join(virsh_log_path, "virsh_list")
infra_utils.run_command(f"virsh list --all >> {libvirt_list_path}", shell=True)
libvirt_net_list_path = os.path.join(virsh_log_path, "virsh_net_list")
infra_utils.run_command(f"virsh net-list --all >> {libvirt_net_list_path}", shell=True)
network_name = nodes.get_cluster_network()
virsh_leases_path = os.path.join(virsh_log_path, "net_dhcp_leases")
infra_utils.run_command(f"virsh net-dhcp-leases {network_name} >> {virsh_leases_path}", shell=True)
messages_log_path = os.path.join(virsh_log_path, "messages.log")
try:
shutil.copy('/var/log/messages', messages_log_path)
except FileNotFoundError:
logging.warning('Failed to copy /var/log/messages, file does not exist')
qemu_libvirt_path = os.path.join(virsh_log_path, "qemu_libvirt_logs")
os.makedirs(qemu_libvirt_path, exist_ok=False)
for node in nodes:
try:
shutil.copy(f'/var/log/libvirt/qemu/{node.name}.log', f'{qemu_libvirt_path}/{node.name}-qemu.log')
except FileNotFoundError:
logging.warning(f"Failed to copy {node.name} qemu log, file does not exist")
console_log_path = os.path.join(virsh_log_path, "console_logs")
os.makedirs(console_log_path, exist_ok=False)
for node in nodes:
try:
shutil.copy(f'/var/log/libvirt/qemu/{node.name}-console.log',
f'{console_log_path}/{node.name}-console.log')
except FileNotFoundError:
logging.warning(f"Failed to copy {node.name} console log, file does not exist")
libvird_log_path = os.path.join(virsh_log_path, "libvirtd_journal")
infra_utils.run_command(f"journalctl --since \"{nodes.setup_time}\" "
f"-u libvirtd -D /run/log/journal >> {libvird_log_path}", shell=True)
@staticmethod
def _collect_journalctl(nodes: Nodes, log_dir_name):
logging.info('Collecting journalctl\n')
infra_utils.recreate_folder(log_dir_name, with_chmod=False, force_recreate=False)
journal_ctl_path = Path(log_dir_name) / 'nodes_journalctl'
infra_utils.recreate_folder(journal_ctl_path, with_chmod=False)
for node in nodes:
try:
node.run_command(f'sudo journalctl >> /tmp/{node.name}-journalctl')
journal_path = journal_ctl_path / node.name
node.download_file(f'/tmp/{node.name}-journalctl', str(journal_path))
except (RuntimeError, TimeoutError, SSHException):
logging.info(f'Could not collect journalctl for {node.name}')
@staticmethod
def verify_no_logs_uploaded(cluster, cluster_tar_path):
with pytest.raises(ApiException) as ex:
cluster.download_installation_logs(cluster_tar_path)
assert "No log files" in str(ex.value)
@staticmethod
def update_oc_config(nodes, cluster):
os.environ["KUBECONFIG"] = cluster.config.kubeconfig_path
if nodes.masters_count == 1:
main_cidr = cluster.get_primary_machine_cidr()
api_vip = cluster.get_ip_for_single_node(cluster.api_client, cluster.id, main_cidr)
else:
vips = nodes.controller.get_ingress_and_api_vips()
api_vip = vips['api_vip']
infra_utils.config_etc_hosts(cluster_name=cluster.name,
base_dns_domain=global_variables.base_dns_domain,
api_vip=api_vip)
def wait_for_controller(self, cluster, nodes):
cluster.download_kubeconfig_no_ingress()
self.update_oc_config(nodes, cluster)
def check_status():
res = infra_utils.get_assisted_controller_status(cluster.config.kubeconfig_path)
return "Running" in str(res, 'utf-8')
waiting.wait(
lambda: check_status(),
timeout_seconds=900,
sleep_seconds=30,
waiting_for="controller to be running",
)
@pytest.fixture(scope='session')
def kube_api_client(self):
yield create_kube_api_client()
@pytest.fixture()
def kube_api_context(self, kube_api_client):
kube_api_context = KubeAPIContext(kube_api_client, clean_on_exit=global_variables.test_teardown)
with kube_api_context:
v1 = CoreV1Api(kube_api_client)
try:
v1.create_namespace(body={
"apiVersion": "v1",
"kind": "Namespace",
"metadata": {
"name": global_variables.spoke_namespace,
"labels": {
"name": global_variables.spoke_namespace,
},
},
})
except K8sApiException as e:
if e.status != 409:
raise
yield kube_api_context
if global_variables.test_teardown:
v1.delete_namespace(global_variables.spoke_namespace)
@pytest.fixture(scope="function")
def update_olm_config(self) -> Callable:
def update_config(tf_config: TerraformConfig = TerraformConfig(),
cluster_config: ClusterConfig = ClusterConfig(), operators=None):
if operators is None:
operators = parse_olm_operators_from_env()
tf_config.worker_memory = resource_param(tf_config.worker_memory,
OperatorResource.WORKER_MEMORY_KEY, operators)
tf_config.master_memory = resource_param(tf_config.master_memory,
OperatorResource.MASTER_MEMORY_KEY, operators)
tf_config.worker_vcpu = resource_param(tf_config.worker_vcpu,
OperatorResource.WORKER_VCPU_KEY, operators)
tf_config.master_vcpu = resource_param(tf_config.master_vcpu,
OperatorResource.MASTER_VCPU_KEY, operators)
tf_config.workers_count = resource_param(tf_config.workers_count,
OperatorResource.WORKER_COUNT_KEY, operators)
tf_config.worker_disk = resource_param(tf_config.worker_disk,
OperatorResource.WORKER_DISK_KEY, operators)
tf_config.master_disk = resource_param(tf_config.master_disk,
OperatorResource.MASTER_DISK_KEY, operators)
tf_config.master_disk_count = resource_param(tf_config.master_disk_count,
OperatorResource.MASTER_DISK_COUNT_KEY, operators)
tf_config.worker_disk_count = resource_param(tf_config.worker_disk_count,
OperatorResource.WORKER_DISK_COUNT_KEY, operators)
tf_config.nodes_count = tf_config.masters_count + tf_config.workers_count
cluster_config.olm_operators = [operators]
yield update_config
| true | true |
1c32855ce040c09944098b703eb82adf950f3828 | 43 | py | Python | config.py | cbroms/project-archive | 8e7b59b73de2cc3d59f2e302968f6706d2b25e72 | [
"Apache-2.0"
] | null | null | null | config.py | cbroms/project-archive | 8e7b59b73de2cc3d59f2e302968f6706d2b25e72 | [
"Apache-2.0"
] | 1 | 2021-06-28T20:34:30.000Z | 2021-06-28T20:34:30.000Z | config.py | cbroms/project-archive | 8e7b59b73de2cc3d59f2e302968f6706d2b25e72 | [
"Apache-2.0"
] | null | null | null |
PATH_TO_STATIC_FILES = "static/static/"
| 8.6 | 39 | 0.744186 |
PATH_TO_STATIC_FILES = "static/static/"
| true | true |
1c328629c4074fde72880bb57b9d5bba66ca7c6a | 3,437 | py | Python | pytorch/diora/net/outside_index.py | willzhang100/diora | d7d80389eeb3294398d125e451e4158ea0b40eb5 | [
"Apache-2.0"
] | 81 | 2019-03-31T20:16:01.000Z | 2022-02-25T15:39:35.000Z | pytorch/diora/net/outside_index.py | ldzhangyx/diora-for-music-structure | 40b3533e0a181c23cb3d17a65fca528c72813cfb | [
"Apache-2.0"
] | 7 | 2019-04-24T02:33:00.000Z | 2022-01-08T20:49:34.000Z | pytorch/diora/net/outside_index.py | ldzhangyx/diora-for-music-structure | 40b3533e0a181c23cb3d17a65fca528c72813cfb | [
"Apache-2.0"
] | 21 | 2019-04-07T12:37:38.000Z | 2022-03-01T09:18:24.000Z | import torch
from diora.net.offset_cache import get_offset_cache
class OutsideIndex(object):
def get_pairs(self, level, i, n):
"""
Returns all (parent, sibling) coordinate pairs that
are used to construct a node at coordinates
(level, i) where there n leaf nodes.
"""
pairs = []
for level_ in range(level + 1, i + 1):
p_level = level_
p_i = i
s_level = level_ - level - 1
s_i = i - level - 1
pairs.append([(p_level, p_i), (s_level, s_i)])
for i_ in range(i + 1, n):
p_level = level + i_ - i
p_i = i_
s_level = i_ - i - 1
s_i = i_
pairs.append([(p_level, p_i), (s_level, s_i)])
return pairs
def xget_all_pairs(self, level, n):
pairs = []
for i in range(level, n):
pairs += self.get_pairs(level, i, n)
return pairs
def get_all_pairs(self, level, n):
L = n - level
N = L - 1
pairs = []
for i in range(N):
jseen = 0
for j in range(L):
if j < N - i:
s_level = n - i - 1
s_i = N - i - j - 1
p_level = s_level
p_i = s_level - j
else:
s_level = j - 1
s_i = jseen
p_level = n - (N - s_level)
p_i = n - (N - s_i)
jseen += 1
pair = [(p_i, p_level), (s_i, s_level)]
pairs.append(pair)
return pairs
class OutsideIndexCheck(object):
def __init__(self, length, spans, siblings):
sib_map = {}
for x, y, n in siblings:
sib_map[x] = (y, n)
sib_map[y] = (x, n)
check = {}
for sibling, (target, name) in sib_map.items():
xlength = target[1] - target[0]
xlevel = xlength - 1
xpos = target[0]
tgt = (xlevel, xpos)
slength = sibling[1] - sibling[0]
slevel = slength - 1
spos = sibling[0]
sis = (slevel, spos)
par = (sis[0] + tgt[0] + 1, min(sis[1], tgt[1]))
check[(par, sis)] = True
self.check = check
def is_valid(self, par, sis):
return (par, sis) in self.check
def get_outside_index(length, level, offset_cache=None, cuda=False):
if offset_cache is None:
offset_cache = get_offset_cache(length)
index = OutsideIndex()
pairs = index.get_all_pairs(level, length)
par_lvl, par_pos = [], []
sis_lvl, sis_pos = [], []
for pair in pairs:
par, sis = pair
par_lvl.append(par[0])
par_pos.append(par[1] - par[0])
sis_lvl.append(sis[0])
sis_pos.append(sis[1] - sis[0])
device = torch.cuda.current_device() if cuda else None
# Parent
index = []
for lvl, pos in zip(par_lvl, par_pos):
offset = offset_cache[lvl]
idx = offset + pos
index.append(idx)
par_index = torch.tensor(index, dtype=torch.int64, device=device)
# Sibling
index = []
for lvl, pos in zip(sis_lvl, sis_pos):
offset = offset_cache[lvl]
idx = offset + pos
index.append(idx)
sis_index = torch.tensor(index, dtype=torch.int64, device=device)
return par_index, sis_index
| 26.643411 | 69 | 0.495781 | import torch
from diora.net.offset_cache import get_offset_cache
class OutsideIndex(object):
def get_pairs(self, level, i, n):
pairs = []
for level_ in range(level + 1, i + 1):
p_level = level_
p_i = i
s_level = level_ - level - 1
s_i = i - level - 1
pairs.append([(p_level, p_i), (s_level, s_i)])
for i_ in range(i + 1, n):
p_level = level + i_ - i
p_i = i_
s_level = i_ - i - 1
s_i = i_
pairs.append([(p_level, p_i), (s_level, s_i)])
return pairs
def xget_all_pairs(self, level, n):
pairs = []
for i in range(level, n):
pairs += self.get_pairs(level, i, n)
return pairs
def get_all_pairs(self, level, n):
L = n - level
N = L - 1
pairs = []
for i in range(N):
jseen = 0
for j in range(L):
if j < N - i:
s_level = n - i - 1
s_i = N - i - j - 1
p_level = s_level
p_i = s_level - j
else:
s_level = j - 1
s_i = jseen
p_level = n - (N - s_level)
p_i = n - (N - s_i)
jseen += 1
pair = [(p_i, p_level), (s_i, s_level)]
pairs.append(pair)
return pairs
class OutsideIndexCheck(object):
def __init__(self, length, spans, siblings):
sib_map = {}
for x, y, n in siblings:
sib_map[x] = (y, n)
sib_map[y] = (x, n)
check = {}
for sibling, (target, name) in sib_map.items():
xlength = target[1] - target[0]
xlevel = xlength - 1
xpos = target[0]
tgt = (xlevel, xpos)
slength = sibling[1] - sibling[0]
slevel = slength - 1
spos = sibling[0]
sis = (slevel, spos)
par = (sis[0] + tgt[0] + 1, min(sis[1], tgt[1]))
check[(par, sis)] = True
self.check = check
def is_valid(self, par, sis):
return (par, sis) in self.check
def get_outside_index(length, level, offset_cache=None, cuda=False):
if offset_cache is None:
offset_cache = get_offset_cache(length)
index = OutsideIndex()
pairs = index.get_all_pairs(level, length)
par_lvl, par_pos = [], []
sis_lvl, sis_pos = [], []
for pair in pairs:
par, sis = pair
par_lvl.append(par[0])
par_pos.append(par[1] - par[0])
sis_lvl.append(sis[0])
sis_pos.append(sis[1] - sis[0])
device = torch.cuda.current_device() if cuda else None
index = []
for lvl, pos in zip(par_lvl, par_pos):
offset = offset_cache[lvl]
idx = offset + pos
index.append(idx)
par_index = torch.tensor(index, dtype=torch.int64, device=device)
index = []
for lvl, pos in zip(sis_lvl, sis_pos):
offset = offset_cache[lvl]
idx = offset + pos
index.append(idx)
sis_index = torch.tensor(index, dtype=torch.int64, device=device)
return par_index, sis_index
| true | true |
1c3286a6e08dc9dab868e091995ae78493c7e703 | 11,873 | py | Python | src/backend/api/handlers/team.py | bovlb/the-blue-alliance | 29389649d96fe060688f218d463e642dcebfd6cc | [
"MIT"
] | null | null | null | src/backend/api/handlers/team.py | bovlb/the-blue-alliance | 29389649d96fe060688f218d463e642dcebfd6cc | [
"MIT"
] | null | null | null | src/backend/api/handlers/team.py | bovlb/the-blue-alliance | 29389649d96fe060688f218d463e642dcebfd6cc | [
"MIT"
] | null | null | null | from typing import Optional
from flask import Response
from backend.api.handlers.decorators import (
api_authenticated,
validate_keys,
)
from backend.api.handlers.helpers.model_properties import (
filter_event_properties,
filter_match_properties,
filter_team_properties,
ModelType,
)
from backend.api.handlers.helpers.profiled_jsonify import profiled_jsonify
from backend.api.handlers.helpers.track_call import track_call_after_response
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.consts.media_tag import get_enum_from_url
from backend.common.decorators import cached_public
from backend.common.models.event_team import EventTeam
from backend.common.models.keys import EventKey, TeamKey
from backend.common.models.team import Team
from backend.common.queries.award_query import (
TeamAwardsQuery,
TeamEventAwardsQuery,
TeamYearAwardsQuery,
)
from backend.common.queries.district_query import TeamDistrictsQuery
from backend.common.queries.event_query import (
TeamEventsQuery,
TeamYearEventsQuery,
TeamYearEventTeamsQuery,
)
from backend.common.queries.match_query import (
TeamEventMatchesQuery,
TeamYearMatchesQuery,
)
from backend.common.queries.media_query import (
TeamSocialMediaQuery,
TeamTagMediasQuery,
TeamYearMediaQuery,
TeamYearTagMediasQuery,
)
from backend.common.queries.robot_query import TeamRobotsQuery
from backend.common.queries.team_query import (
TeamListQuery,
TeamListYearQuery,
TeamParticipationQuery,
TeamQuery,
)
@api_authenticated
@validate_keys
@cached_public
def team(team_key: TeamKey, model_type: Optional[ModelType] = None) -> Response:
"""
Returns details about one team, specified by |team_key|.
"""
track_call_after_response("team", team_key, model_type)
team = TeamQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3)
if model_type is not None:
team = filter_team_properties([team], model_type)[0]
return profiled_jsonify(team)
@api_authenticated
@validate_keys
@cached_public
def team_years_participated(team_key: TeamKey) -> Response:
"""
Returns a list of years the given Team participated in an event.
"""
track_call_after_response("team/years_participated", team_key)
years_participated = TeamParticipationQuery(team_key=team_key).fetch()
years_participated = sorted(years_participated)
return profiled_jsonify(years_participated)
@api_authenticated
@validate_keys
@cached_public
def team_history_districts(team_key: TeamKey) -> Response:
"""
Returns a list of all DistrictTeam models associated with the given Team.
"""
track_call_after_response("team/history/districts", team_key)
team_districts = TeamDistrictsQuery(team_key=team_key).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(team_districts)
@api_authenticated
@validate_keys
@cached_public
def team_history_robots(team_key: TeamKey) -> Response:
"""
Returns a list of all Robot models associated with the given Team.
"""
track_call_after_response("team/history/robots", team_key)
team_robots = TeamRobotsQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3)
return profiled_jsonify(team_robots)
@api_authenticated
@validate_keys
@cached_public
def team_social_media(team_key: TeamKey) -> Response:
"""
Returns a list of all social media models associated with the given Team.
"""
track_call_after_response("team/social_media", team_key)
team_social_media = TeamSocialMediaQuery(team_key=team_key).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(team_social_media)
@api_authenticated
@validate_keys
@cached_public
def team_events(
team_key: TeamKey,
year: Optional[int] = None,
model_type: Optional[ModelType] = None,
) -> Response:
"""
Returns a list of all event models associated with the given Team.
Optionally only returns events from the specified year.
"""
api_action = "team/events"
if year is not None:
api_action += f"/{year}"
track_call_after_response(api_action, team_key, model_type)
if year is None:
team_events = TeamEventsQuery(team_key=team_key).fetch_dict(
ApiMajorVersion.API_V3
)
else:
team_events = TeamYearEventsQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
team_events = filter_event_properties(team_events, model_type)
return profiled_jsonify(team_events)
@api_authenticated
@validate_keys
@cached_public
def team_events_statuses_year(team_key: TeamKey, year: int) -> Response:
"""
Returns a dict of { event_key: status_dict } for all events in the given year for the associated team.
"""
track_call_after_response("team/events/statuses", f"{team_key}/{year}")
event_teams = TeamYearEventTeamsQuery(team_key=team_key, year=year).fetch()
statuses = {}
for event_team in event_teams:
status = event_team.status
if status is not None:
status_strings = event_team.status_strings
status.update(
{
"alliance_status_str": status_strings["alliance"],
"playoff_status_str": status_strings["playoff"],
"overall_status_str": status_strings["overall"],
}
)
statuses[event_team.event.id()] = status
return profiled_jsonify(statuses)
@api_authenticated
@validate_keys
@cached_public
def team_event_matches(
team_key: TeamKey, event_key: EventKey, model_type: Optional[ModelType] = None
) -> Response:
"""
Returns a list of matches for a team at an event.
"""
track_call_after_response(
"team/event/matches", f"{team_key}/{event_key}", model_type
)
matches = TeamEventMatchesQuery(team_key=team_key, event_key=event_key).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
matches = filter_match_properties(matches, model_type)
return profiled_jsonify(matches)
@api_authenticated
@validate_keys
@cached_public
def team_event_awards(team_key: TeamKey, event_key: EventKey) -> Response:
"""
Returns a list of awards for a team at an event.
"""
track_call_after_response("team/event/awards", f"{team_key}/{event_key}")
awards = TeamEventAwardsQuery(team_key=team_key, event_key=event_key).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(awards)
@api_authenticated
@validate_keys
@cached_public
def team_event_status(team_key: TeamKey, event_key: EventKey) -> Response:
"""
Return the status for a team at an event.
"""
track_call_after_response("team/event/status", f"{team_key}/{event_key}")
event_team = EventTeam.get_by_id("{}_{}".format(event_key, team_key))
status = None
if event_team is not None:
status = event_team.status
if status is not None:
status_strings = event_team.status_strings
status.update(
{ # pyre-ignore[55]
"alliance_status_str": status_strings["alliance"],
"playoff_status_str": status_strings["playoff"],
"overall_status_str": status_strings["overall"],
}
)
return profiled_jsonify(status)
@api_authenticated
@validate_keys
@cached_public
def team_awards(
team_key: TeamKey,
year: Optional[int] = None,
) -> Response:
"""
Returns a list of awards associated with the given Team.
Optionally only returns events from the specified year.
"""
if year is None:
track_call_after_response("team/history/awards", team_key)
awards = TeamAwardsQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3)
else:
track_call_after_response("team/year/awards", f"{team_key}/{year}")
awards = TeamYearAwardsQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(awards)
@api_authenticated
@validate_keys
@cached_public
def team_matches(
team_key: TeamKey,
year: int,
model_type: Optional[ModelType] = None,
) -> Response:
"""
Returns a list of matches associated with the given Team in a given year.
"""
track_call_after_response("team/year/matches", f"{team_key}/{year}", model_type)
matches = TeamYearMatchesQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
matches = filter_match_properties(matches, model_type)
return profiled_jsonify(matches)
@api_authenticated
@validate_keys
@cached_public
def team_media_year(team_key: TeamKey, year: int) -> Response:
"""
Returns a list of media associated with the given Team in a given year.
"""
track_call_after_response("team/media", f"{team_key}/{year}")
media = TeamYearMediaQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(media)
@api_authenticated
@validate_keys
@cached_public
def team_media_tag(
team_key: TeamKey, media_tag: str, year: Optional[int] = None
) -> Response:
"""
Returns a list of media associated with the given Team with a given tag.
Optionally filters by year.
"""
api_label = f"{team_key}/{media_tag}"
if year is not None:
api_label += f"/{year}"
track_call_after_response("team/media/tag", api_label)
tag_enum = get_enum_from_url(media_tag)
if tag_enum is None:
return profiled_jsonify([])
if year is None:
media = TeamTagMediasQuery(team_key=team_key, media_tag=tag_enum).fetch_dict(
ApiMajorVersion.API_V3
)
else:
media = TeamYearTagMediasQuery(
team_key=team_key, media_tag=tag_enum, year=year
).fetch_dict(ApiMajorVersion.API_V3)
return profiled_jsonify(media)
@api_authenticated
@cached_public
def team_list_all(model_type: Optional[ModelType] = None) -> Response:
"""
Returns a list of all teams.
"""
track_call_after_response("team/list", "all", model_type)
max_team_key = Team.query().order(-Team.team_number).fetch(1, keys_only=True)[0]
max_team_num = int(max_team_key.id()[3:])
max_team_page = int(max_team_num / 500)
futures = []
for page_num in range(max_team_page + 1):
futures.append(
TeamListQuery(page=page_num).fetch_dict_async(ApiMajorVersion.API_V3)
)
team_list = []
for future in futures:
partial_team_list = future.get_result()
team_list += partial_team_list
if model_type is not None:
team_list = filter_team_properties(team_list, model_type)
return profiled_jsonify(team_list)
@api_authenticated
@cached_public
def team_list(
page_num: int, year: Optional[int] = None, model_type: Optional[ModelType] = None
) -> Response:
"""
Returns a list of teams, paginated by team number in sets of 500.
Optionally only returns teams that participated in the specified year.
page_num = 0 returns teams from 0-499
page_num = 1 returns teams from 500-999
page_num = 2 returns teams from 1000-1499
etc.
"""
api_action = "team/list"
if year is not None:
api_action += f"/{year}"
track_call_after_response(api_action, str(page_num), model_type)
if year is None:
team_list = TeamListQuery(page=page_num).fetch_dict(ApiMajorVersion.API_V3)
else:
team_list = TeamListYearQuery(year=year, page=page_num).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
team_list = filter_team_properties(team_list, model_type)
return profiled_jsonify(team_list)
| 30.44359 | 106 | 0.712541 | from typing import Optional
from flask import Response
from backend.api.handlers.decorators import (
api_authenticated,
validate_keys,
)
from backend.api.handlers.helpers.model_properties import (
filter_event_properties,
filter_match_properties,
filter_team_properties,
ModelType,
)
from backend.api.handlers.helpers.profiled_jsonify import profiled_jsonify
from backend.api.handlers.helpers.track_call import track_call_after_response
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.consts.media_tag import get_enum_from_url
from backend.common.decorators import cached_public
from backend.common.models.event_team import EventTeam
from backend.common.models.keys import EventKey, TeamKey
from backend.common.models.team import Team
from backend.common.queries.award_query import (
TeamAwardsQuery,
TeamEventAwardsQuery,
TeamYearAwardsQuery,
)
from backend.common.queries.district_query import TeamDistrictsQuery
from backend.common.queries.event_query import (
TeamEventsQuery,
TeamYearEventsQuery,
TeamYearEventTeamsQuery,
)
from backend.common.queries.match_query import (
TeamEventMatchesQuery,
TeamYearMatchesQuery,
)
from backend.common.queries.media_query import (
TeamSocialMediaQuery,
TeamTagMediasQuery,
TeamYearMediaQuery,
TeamYearTagMediasQuery,
)
from backend.common.queries.robot_query import TeamRobotsQuery
from backend.common.queries.team_query import (
TeamListQuery,
TeamListYearQuery,
TeamParticipationQuery,
TeamQuery,
)
@api_authenticated
@validate_keys
@cached_public
def team(team_key: TeamKey, model_type: Optional[ModelType] = None) -> Response:
track_call_after_response("team", team_key, model_type)
team = TeamQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3)
if model_type is not None:
team = filter_team_properties([team], model_type)[0]
return profiled_jsonify(team)
@api_authenticated
@validate_keys
@cached_public
def team_years_participated(team_key: TeamKey) -> Response:
track_call_after_response("team/years_participated", team_key)
years_participated = TeamParticipationQuery(team_key=team_key).fetch()
years_participated = sorted(years_participated)
return profiled_jsonify(years_participated)
@api_authenticated
@validate_keys
@cached_public
def team_history_districts(team_key: TeamKey) -> Response:
track_call_after_response("team/history/districts", team_key)
team_districts = TeamDistrictsQuery(team_key=team_key).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(team_districts)
@api_authenticated
@validate_keys
@cached_public
def team_history_robots(team_key: TeamKey) -> Response:
track_call_after_response("team/history/robots", team_key)
team_robots = TeamRobotsQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3)
return profiled_jsonify(team_robots)
@api_authenticated
@validate_keys
@cached_public
def team_social_media(team_key: TeamKey) -> Response:
track_call_after_response("team/social_media", team_key)
team_social_media = TeamSocialMediaQuery(team_key=team_key).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(team_social_media)
@api_authenticated
@validate_keys
@cached_public
def team_events(
team_key: TeamKey,
year: Optional[int] = None,
model_type: Optional[ModelType] = None,
) -> Response:
api_action = "team/events"
if year is not None:
api_action += f"/{year}"
track_call_after_response(api_action, team_key, model_type)
if year is None:
team_events = TeamEventsQuery(team_key=team_key).fetch_dict(
ApiMajorVersion.API_V3
)
else:
team_events = TeamYearEventsQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
team_events = filter_event_properties(team_events, model_type)
return profiled_jsonify(team_events)
@api_authenticated
@validate_keys
@cached_public
def team_events_statuses_year(team_key: TeamKey, year: int) -> Response:
track_call_after_response("team/events/statuses", f"{team_key}/{year}")
event_teams = TeamYearEventTeamsQuery(team_key=team_key, year=year).fetch()
statuses = {}
for event_team in event_teams:
status = event_team.status
if status is not None:
status_strings = event_team.status_strings
status.update(
{
"alliance_status_str": status_strings["alliance"],
"playoff_status_str": status_strings["playoff"],
"overall_status_str": status_strings["overall"],
}
)
statuses[event_team.event.id()] = status
return profiled_jsonify(statuses)
@api_authenticated
@validate_keys
@cached_public
def team_event_matches(
team_key: TeamKey, event_key: EventKey, model_type: Optional[ModelType] = None
) -> Response:
track_call_after_response(
"team/event/matches", f"{team_key}/{event_key}", model_type
)
matches = TeamEventMatchesQuery(team_key=team_key, event_key=event_key).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
matches = filter_match_properties(matches, model_type)
return profiled_jsonify(matches)
@api_authenticated
@validate_keys
@cached_public
def team_event_awards(team_key: TeamKey, event_key: EventKey) -> Response:
track_call_after_response("team/event/awards", f"{team_key}/{event_key}")
awards = TeamEventAwardsQuery(team_key=team_key, event_key=event_key).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(awards)
@api_authenticated
@validate_keys
@cached_public
def team_event_status(team_key: TeamKey, event_key: EventKey) -> Response:
track_call_after_response("team/event/status", f"{team_key}/{event_key}")
event_team = EventTeam.get_by_id("{}_{}".format(event_key, team_key))
status = None
if event_team is not None:
status = event_team.status
if status is not None:
status_strings = event_team.status_strings
status.update(
{
"alliance_status_str": status_strings["alliance"],
"playoff_status_str": status_strings["playoff"],
"overall_status_str": status_strings["overall"],
}
)
return profiled_jsonify(status)
@api_authenticated
@validate_keys
@cached_public
def team_awards(
team_key: TeamKey,
year: Optional[int] = None,
) -> Response:
if year is None:
track_call_after_response("team/history/awards", team_key)
awards = TeamAwardsQuery(team_key=team_key).fetch_dict(ApiMajorVersion.API_V3)
else:
track_call_after_response("team/year/awards", f"{team_key}/{year}")
awards = TeamYearAwardsQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(awards)
@api_authenticated
@validate_keys
@cached_public
def team_matches(
team_key: TeamKey,
year: int,
model_type: Optional[ModelType] = None,
) -> Response:
track_call_after_response("team/year/matches", f"{team_key}/{year}", model_type)
matches = TeamYearMatchesQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
matches = filter_match_properties(matches, model_type)
return profiled_jsonify(matches)
@api_authenticated
@validate_keys
@cached_public
def team_media_year(team_key: TeamKey, year: int) -> Response:
track_call_after_response("team/media", f"{team_key}/{year}")
media = TeamYearMediaQuery(team_key=team_key, year=year).fetch_dict(
ApiMajorVersion.API_V3
)
return profiled_jsonify(media)
@api_authenticated
@validate_keys
@cached_public
def team_media_tag(
team_key: TeamKey, media_tag: str, year: Optional[int] = None
) -> Response:
api_label = f"{team_key}/{media_tag}"
if year is not None:
api_label += f"/{year}"
track_call_after_response("team/media/tag", api_label)
tag_enum = get_enum_from_url(media_tag)
if tag_enum is None:
return profiled_jsonify([])
if year is None:
media = TeamTagMediasQuery(team_key=team_key, media_tag=tag_enum).fetch_dict(
ApiMajorVersion.API_V3
)
else:
media = TeamYearTagMediasQuery(
team_key=team_key, media_tag=tag_enum, year=year
).fetch_dict(ApiMajorVersion.API_V3)
return profiled_jsonify(media)
@api_authenticated
@cached_public
def team_list_all(model_type: Optional[ModelType] = None) -> Response:
track_call_after_response("team/list", "all", model_type)
max_team_key = Team.query().order(-Team.team_number).fetch(1, keys_only=True)[0]
max_team_num = int(max_team_key.id()[3:])
max_team_page = int(max_team_num / 500)
futures = []
for page_num in range(max_team_page + 1):
futures.append(
TeamListQuery(page=page_num).fetch_dict_async(ApiMajorVersion.API_V3)
)
team_list = []
for future in futures:
partial_team_list = future.get_result()
team_list += partial_team_list
if model_type is not None:
team_list = filter_team_properties(team_list, model_type)
return profiled_jsonify(team_list)
@api_authenticated
@cached_public
def team_list(
page_num: int, year: Optional[int] = None, model_type: Optional[ModelType] = None
) -> Response:
api_action = "team/list"
if year is not None:
api_action += f"/{year}"
track_call_after_response(api_action, str(page_num), model_type)
if year is None:
team_list = TeamListQuery(page=page_num).fetch_dict(ApiMajorVersion.API_V3)
else:
team_list = TeamListYearQuery(year=year, page=page_num).fetch_dict(
ApiMajorVersion.API_V3
)
if model_type is not None:
team_list = filter_team_properties(team_list, model_type)
return profiled_jsonify(team_list)
| true | true |
1c3287c229049cb1b42b466f18808e837cb06b36 | 41,354 | py | Python | tensorflow/python/distribute/collective_all_reduce_strategy.py | Geolem/tensorflow | cfc9d3e1ba3b50dc66f1b81eaea537772ab16024 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/distribute/collective_all_reduce_strategy.py | Geolem/tensorflow | cfc9d3e1ba3b50dc66f1b81eaea537772ab16024 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/distribute/collective_all_reduce_strategy.py | Geolem/tensorflow | cfc9d3e1ba3b50dc66f1b81eaea537772ab16024 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class CollectiveAllReduceStrategy implementing DistributionStrategy."""
import copy
import threading
import time
import weakref
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.protobuf import tensorflow_server_pb2
from tensorflow.python.distribute import collective_util
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import cross_device_utils
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import numpy_dataset
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import values
from tensorflow.python.distribute.cluster_resolver import ClusterResolver
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from tensorflow.python.distribute.cluster_resolver import TFConfigClusterResolver
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import collective_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.tpu import tpu_strategy_util
from tensorflow.python.training.tracking import base
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=line-too-long
@tf_export("distribute.MultiWorkerMirroredStrategy", v1=[])
class CollectiveAllReduceStrategy(distribute_lib.Strategy):
"""A distribution strategy for synchronous training on multiple workers.
This strategy implements synchronous distributed training across multiple
workers, each with potentially multiple GPUs. Similar to
`tf.distribute.MirroredStrategy`, it replicates all variables and computations
to each local device. The difference is that it uses a distributed collective
implementation (e.g. all-reduce), so that multiple workers can work together.
You need to launch your program on each worker and configure
`cluster_resolver` correctly. For example, if you are using
`tf.distribute.cluster_resolver.TFConfigClusterResolver`, each worker needs to
have its corresponding `task_type` and `task_id` set in the `TF_CONFIG`
environment variable. An example TF_CONFIG on worker-0 of a two worker cluster
is:
```
TF_CONFIG = '{"cluster": {"worker": ["localhost:12345", "localhost:23456"]}, "task": {"type": "worker", "index": 0} }'
```
Your program runs on each worker as-is. Note that collectives require each
worker to participate. All `tf.distribute` and non `tf.distribute` API may use
collectives internally, e.g. checkpointing and saving since reading a
`tf.Variable` with `tf.VariableSynchronization.ON_READ` all-reduces the value.
Therefore it's recommended to run exactly the same program on each worker.
Dispatching based on `task_type` or `task_id` of the worker is error-prone.
`cluster_resolver.num_accelerators()` determines the number of GPUs the
strategy uses. If it's zero, the strategy uses the CPU. All workers need to
use the same number of devices, otherwise the behavior is undefined.
This strategy is not intended for TPU. Use `tf.distribute.TPUStrategy`
instead.
After setting up TF_CONFIG, using this strategy is similar to using
`tf.distribute.MirroredStrategy` and `tf.distribute.TPUStrategy`.
```
strategy = tf.distribute.MultiWorkerMirroredStrategy()
with strategy.scope():
model = tf.keras.Sequential([
tf.keras.layers.Dense(2, input_shape=(5,)),
])
optimizer = tf.keras.optimizers.SGD(learning_rate=0.1)
def dataset_fn(ctx):
x = np.random.random((2, 5)).astype(np.float32)
y = np.random.randint(2, size=(2, 1))
dataset = tf.data.Dataset.from_tensor_slices((x, y))
return dataset.repeat().batch(1, drop_remainder=True)
dist_dataset = strategy.distribute_datasets_from_function(dataset_fn)
model.compile()
model.fit(dist_dataset)
```
You can also write your own training loop:
```
@tf.function
def train_step(iterator):
def step_fn(inputs):
features, labels = inputs
with tf.GradientTape() as tape:
logits = model(features, training=True)
loss = tf.keras.losses.sparse_categorical_crossentropy(
labels, logits)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
strategy.run(step_fn, args=(next(iterator),))
for _ in range(NUM_STEP):
train_step(iterator)
```
See
[Multi-worker training with Keras](https://www.tensorflow.org/tutorials/distribute/multi_worker_with_keras)
for a detailed tutorial.
__Saving__
You need to save and checkpoint on all workers instead of just one. This is
because variables whose synchronization=ON_READ triggers aggregation during
saving. It's recommended to save to a different path on each worker to avoid
race conditions. Each worker saves the same thing. See
[Multi-worker training with Keras](https://www.tensorflow.org/tutorials/distribute/multi_worker_with_keras#model_saving_and_loading)
tutorial for examples.
__Known Issues__
* `tf.distribute.cluster_resolver.TFConfigClusterResolver` does not return the
correct number of accelerators. The strategy uses all available GPUs if
`cluster_resolver` is `tf.distribute.cluster_resolver.TFConfigClusterResolver`
or `None`.
* In eager mode, the strategy needs to be created before calling any other
Tensorflow API.
"""
# pylint: enable=line-too-long
# TODO(anjalisridhar): Update our guides with examples showing how we can use
# the cluster_resolver argument.
# The starting number for collective keys. This should only be set in tests.
_collective_key_base = 0
def __init__(self,
cluster_resolver=None,
communication_options=None):
"""Creates the strategy.
Args:
cluster_resolver: optional
`tf.distribute.cluster_resolver.ClusterResolver`. If `None`,
`tf.distribute.cluster_resolver.TFConfigClusterResolver` is used.
communication_options: optional
`tf.distribute.experimental.CommunicationOptions`. This configures the
default options for cross device communications. It can be overridden by
options provided to the communication APIs like
`tf.distribute.ReplicaContext.all_reduce`. See
`tf.distribute.experimental.CommunicationOptions` for details.
"""
if communication_options is None:
communication_options = collective_util.Options()
super(CollectiveAllReduceStrategy, self).__init__(
CollectiveAllReduceExtended(
self,
cluster_resolver=cluster_resolver,
communication_options=communication_options))
distribute_lib.distribution_strategy_gauge.get_cell("V2").set(
"MultiWorkerMirroredStrategy")
# pylint: disable=protected-access
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_workers").set(self.extended._num_workers)
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_replicas_per_worker").set(self.extended._num_devices_per_worker)
@classmethod
def _from_local_devices(cls, devices, communication_options=None):
"""A convenience method to create an object with a list of devices."""
obj = cls(communication_options=communication_options)
obj.extended._initialize_local(TFConfigClusterResolver(), devices=devices) # pylint: disable=protected-access
return obj
@property
def cluster_resolver(self):
"""Returns the cluster resolver associated with this strategy.
As a multi-worker strategy, `tf.distribute.MultiWorkerMirroredStrategy`
provides the associated `tf.distribute.cluster_resolver.ClusterResolver`. If
the user provides one in `__init__`, that instance is returned; if the user
does not, a default `TFConfigClusterResolver` is provided.
"""
return self.extended._cluster_resolver # pylint: disable=protected-access
class _CollectiveAllReduceStrategyExperimentalMeta(type):
@classmethod
def __instancecheck__(cls, instance):
# This is to make isinstance(tf.distribute.MultiWorkerMirroredStrategy(),
# tf.distribute.experimental.MultiWorkerMirroredStrategy). Some libraries is
# performing such check.
return isinstance(instance, CollectiveAllReduceStrategy)
@tf_export("distribute.experimental.MultiWorkerMirroredStrategy", v1=[])
class _CollectiveAllReduceStrategyExperimental(
CollectiveAllReduceStrategy,
metaclass=_CollectiveAllReduceStrategyExperimentalMeta):
__doc__ = CollectiveAllReduceStrategy.__doc__
@deprecation.deprecated(
None, "use distribute.MultiWorkerMirroredStrategy instead")
def __init__(self,
communication=collective_util.CommunicationImplementation.AUTO,
cluster_resolver=None):
"""Creates the strategy.
Args:
communication: optional
`tf.distribute.experimental.CommunicationImplementation`. This is a hint
on the preferred collective communication implementation. Possible
values include `AUTO`, `RING`, and `NCCL`.
cluster_resolver: optional
`tf.distribute.cluster_resolver.ClusterResolver`. If `None`,
`tf.distribute.cluster_resolver.TFConfigClusterResolver` is used.
"""
communication_options = collective_util.Options(
implementation=communication)
super(_CollectiveAllReduceStrategyExperimental,
self).__init__(cluster_resolver, communication_options)
@classmethod
def _from_local_devices(
cls,
devices,
communication=collective_util.CommunicationImplementation.AUTO):
"""A convenience method to create an object with a list of devices."""
obj = cls(communication)
obj.extended._initialize_local(TFConfigClusterResolver(), devices=devices) # pylint: disable=protected-access
return obj
_CollectiveAllReduceStrategyExperimental.__name__ = CollectiveAllReduceStrategy.__name__
@tf_export(v1=["distribute.experimental.MultiWorkerMirroredStrategy"]) # pylint: disable=missing-docstring
class CollectiveAllReduceStrategyV1(distribute_lib.StrategyV1):
__doc__ = CollectiveAllReduceStrategy.__doc__
# The starting number for collective keys. This should only be set in tests.
_collective_key_base = 0
def __init__(self,
communication=collective_util.CommunicationImplementation.AUTO,
cluster_resolver=None):
"""Initializes the object."""
communication_options = collective_util.Options(
implementation=communication)
super(CollectiveAllReduceStrategyV1, self).__init__(
CollectiveAllReduceExtended(
self,
cluster_resolver=cluster_resolver,
communication_options=communication_options))
distribute_lib.distribution_strategy_gauge.get_cell("V1").set(
"MultiWorkerMirroredStrategy")
# pylint: disable=protected-access
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_workers").set(self.extended._num_workers)
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_gpu_per_worker").set(
self.extended._num_devices_per_worker
if self.extended._local_device_type == "GPU"
else 0)
class CollectiveAllReduceExtended(mirrored_strategy.MirroredExtended):
"""Implementation of CollectiveAllReduceStrategy."""
# Whether to perdically check the health of the cluster. If any worker is not
# reachable, collectives are aborted and the user program should get a
# tf.errors.UnavailableError. It's required to restart in order to recover.
_enable_check_health = True
# Check health interval in seconds.
_check_health_interval = 30
# Timeout in seconds for the first check health. The first check health needs
# to wait for cluster, which may make a longer time.
_check_health_initial_timeout = 0
# Times to retry before considering the peer is down.
_check_health_retry_limit = 3
# Timeout in seconds the each check health.
_check_health_timeout = 10
def __init__(self, container_strategy, cluster_resolver,
communication_options):
if not isinstance(communication_options, collective_util.Options):
raise ValueError("communication_options must be an instance of "
"tf.distribute.experimental.CommunicationOptions")
self._cluster_resolver = cluster_resolver or TFConfigClusterResolver()
if not isinstance(self._cluster_resolver, ClusterResolver):
raise ValueError("cluster_resolver must be an instance of "
"tf.distribute.cluster_resolver.ClusterResolver")
distribute_lib.StrategyExtendedV1.__init__(self, container_strategy)
self._communication_options = communication_options
self._collective_key_base = container_strategy._collective_key_base # pylint: disable=protected-access
self._initialize_strategy(self._cluster_resolver)
self._cfer_fn_cache = weakref.WeakKeyDictionary()
self.experimental_enable_get_next_as_optional = True
assert isinstance(self._cross_device_ops,
cross_device_ops_lib.CollectiveAllReduce)
def _use_merge_call(self):
"""XLA is not supported for multi-worker strategy."""
return True
def _initialize_strategy(self, cluster_resolver):
if cluster_resolver.cluster_spec().as_dict():
self._initialize_multi_worker(cluster_resolver)
else:
self._initialize_local(cluster_resolver)
def _initialize_local_devices(self, cluster_resolver, worker_device):
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
if isinstance(cluster_resolver, TFConfigClusterResolver):
num_gpus = context.num_gpus()
num_tpus = 0
else:
num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
num_tpus = cluster_resolver.num_accelerators().get("TPU", 0)
if num_gpus:
local_device_type = "GPU"
num_local_devices = num_gpus
elif num_tpus:
local_device_type = "TPU"
num_local_devices = num_tpus
else:
local_device_type = "CPU"
num_local_devices = 1
local_devices = tuple(
f"{worker_device}/device:{local_device_type}:{i}"
for i in range(num_local_devices))
return local_devices, local_device_type
def _initialize_local(self, cluster_resolver, devices=None):
"""Initializes the object for local training."""
self._is_chief = True
self._num_workers = 1
if ops.executing_eagerly_outside_functions():
try:
context.context().configure_collective_ops(
scoped_allocator_enabled_ops=("CollectiveReduce",))
except RuntimeError:
logging.warning("Collective ops is not configured at program startup. "
"Some performance features may not be enabled.")
self._collective_ops_configured = True
if devices:
local_devices = devices
if "GPU" in devices[0]:
local_device_type = "GPU"
elif "TPU" in devices[0]:
local_device_type = "TPU"
else:
local_device_type = "CPU"
else:
local_devices, local_device_type = self._initialize_local_devices(
cluster_resolver, worker_device="")
self._worker_device = device_util.canonicalize("/device:CPU:0")
self._host_input_device = numpy_dataset.SingleDevice(self._worker_device)
self._collective_keys = cross_device_utils.CollectiveKeys(
group_key_start=1 + self._collective_key_base)
self._cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=local_devices,
group_size=len(local_devices),
collective_keys=self._collective_keys)
# CrossDeviceOps for per host tensors.
self._host_cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=[self._worker_device],
group_size=self._num_workers,
collective_keys=self._collective_keys)
super(CollectiveAllReduceExtended, self)._initialize_single_worker(
local_devices)
self._cluster_spec = None
self._task_type = None
self._task_id = None
self._id_in_cluster = 0
# This is a mark to tell whether we are running with standalone client or
# independent worker. Right now with standalone client, strategy object is
# created as local strategy and then turn into multi-worker strategy via
# configure call.
self._local_or_standalone_client_mode = True
# Save the num_devices_per_worker and rpc_layer for configure method.
self._num_devices_per_worker = len(local_devices)
self._local_device_type = local_device_type
self._rpc_layer = cluster_resolver.rpc_layer
self._warn_nccl_no_gpu()
logging.info(
"Single-worker MultiWorkerMirroredStrategy with local_devices "
"= %r, communication = %s", local_devices,
self._communication_options.implementation)
def _initialize_multi_worker(self, cluster_resolver):
"""Initializes the object for multi-worker training."""
cluster_spec = multi_worker_util.normalize_cluster_spec(
cluster_resolver.cluster_spec())
task_type = cluster_resolver.task_type
task_id = cluster_resolver.task_id
if task_type is None or task_id is None:
raise ValueError("When `cluster_spec` is given, you must also specify "
"`task_type` and `task_id`.")
self._cluster_spec = cluster_spec
self._task_type = task_type
self._task_id = task_id
self._id_in_cluster = multi_worker_util.id_in_cluster(
self._cluster_spec, self._task_type, self._task_id)
self._num_workers = multi_worker_util.worker_count(cluster_spec, task_type)
if not self._num_workers:
raise ValueError("No `worker`, `chief` or `evaluator` tasks can be found "
"in `cluster_spec`.")
self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type,
task_id)
self._worker_device = "/job:%s/task:%d" % (task_type, task_id)
self._host_input_device = numpy_dataset.SingleDevice(self._worker_device)
if (ops.executing_eagerly_outside_functions() and
not getattr(self, "_local_or_standalone_client_mode", False)):
context.context().configure_collective_ops(
collective_leader=multi_worker_util.collective_leader(
cluster_spec, task_type, task_id),
scoped_allocator_enabled_ops=("CollectiveReduce",),
device_filters=("/job:%s/task:%d" % (task_type, task_id),))
self._collective_ops_configured = True
# Starting a std server in eager mode and in independent worker mode.
if (context.executing_eagerly() and
not getattr(self, "_std_server_started", False) and
not getattr(self, "_local_or_standalone_client_mode", False)):
# Checking _local_or_standalone_client_mode as well because we should not
# create the std server in standalone client mode.
config_proto = copy.deepcopy(context.context().config)
config_proto = self._update_config_proto(config_proto)
# If coordination service is enabled, use its internal heartbeat to detect
# peer failures instead of the Python-level health check.
if config_proto.experimental.coordination_config.service_type:
self._enable_check_health = False
if hasattr(cluster_resolver, "port"):
port = cluster_resolver.port
else:
port = 0
server_def = tensorflow_server_pb2.ServerDef(
cluster=cluster_spec.as_cluster_def(),
default_session_config=config_proto,
job_name=task_type,
task_index=task_id,
protocol=cluster_resolver.rpc_layer or "grpc",
port=port)
context.context().enable_collective_ops(server_def)
self._std_server_started = True
# The `ensure_initialized` is needed before calling
# `context.context().devices()`.
context.context().ensure_initialized()
logging.info(
"Enabled multi-worker collective ops with available devices: %r",
context.context().devices())
# TODO(yuefengz): The `num_gpus` is only for this particular task. It
# assumes all workers have the same number of GPUs. We should remove this
# assumption by querying all tasks for their numbers of GPUs.
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
local_devices, local_device_type = self._initialize_local_devices(
cluster_resolver, self._worker_device)
if local_device_type == "TPU":
tpu_strategy_util.initialize_tpu_system()
self._collective_keys = cross_device_utils.CollectiveKeys(
group_key_start=1 + self._collective_key_base)
self._cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=local_devices,
group_size=len(local_devices) * self._num_workers,
collective_keys=self._collective_keys)
# CrossDeviceOps for per host tensors.
self._host_cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=[self._worker_device],
group_size=self._num_workers,
collective_keys=self._collective_keys)
super(CollectiveAllReduceExtended, self)._initialize_single_worker(
local_devices)
# Add a default device so that ops without specified devices will not end up
# on other workers.
self._default_device = "/job:%s/task:%d" % (task_type, task_id)
# Save the num_devices_per_worker and rpc_layer for configure method.
self._num_devices_per_worker = len(local_devices)
self._local_device_type = local_device_type
self._rpc_layer = cluster_resolver.rpc_layer
self._warn_nccl_no_gpu()
if self._enable_check_health and context.executing_eagerly():
self._start_check_health_thread()
else:
logging.info("Check health not enabled.")
logging.info(
"MultiWorkerMirroredStrategy with cluster_spec = %r, task_type = %r, "
"task_id = %r, num_workers = %r, local_devices = %r, "
"communication = %s", cluster_spec.as_dict(), task_type, task_id,
self._num_workers, local_devices,
self._communication_options.implementation)
def __del__(self):
self._stop_check_health_thread()
def _input_workers_with_options(self, options=None):
host_device = device_util.get_host_for_device(self._worker_device)
if not options or options.experimental_fetch_to_device:
return input_lib.InputWorkers([(host_device, self.worker_devices)])
else:
return input_lib.InputWorkers([(
host_device,
[device_util.get_host_for_device(worker) for worker in
self.worker_devices])])
@property
def _input_workers(self):
return self._input_workers_with_options()
def _get_variable_creator_initial_value(self,
replica_id,
device,
primary_var,
**kwargs):
if replica_id == 0: # First replica on each worker.
assert device is not None
assert primary_var is None
def initial_value_fn(): # pylint: disable=g-missing-docstring
# Only the first device participates in the broadcast of initial values.
group_key = self._collective_keys.get_group_key([device])
group_size = self._num_workers
collective_instance_key = (
self._collective_keys.get_instance_key(group_key, device))
with ops.device(device):
initial_value = kwargs["initial_value"]
if callable(initial_value):
initial_value = initial_value()
if isinstance(initial_value, base.CheckpointInitialValue):
initial_value = initial_value.wrapped_value
assert not callable(initial_value)
initial_value = ops.convert_to_tensor(
initial_value, dtype=kwargs.get("dtype", None))
if self._num_workers > 1:
if self._is_chief:
bcast_send = collective_ops.broadcast_send(
initial_value, initial_value.shape, initial_value.dtype,
group_size, group_key, collective_instance_key)
with ops.control_dependencies([bcast_send]):
return array_ops.identity(initial_value)
else:
return collective_ops.broadcast_recv(initial_value.shape,
initial_value.dtype,
group_size, group_key,
collective_instance_key)
return initial_value
return initial_value_fn
else:
return super(CollectiveAllReduceExtended,
self)._get_variable_creator_initial_value(
replica_id=replica_id,
device=device,
primary_var=primary_var,
**kwargs)
def _make_input_context(self):
input_context = distribute_lib.InputContext(
num_input_pipelines=self._num_workers,
input_pipeline_id=self._id_in_cluster,
num_replicas_in_sync=self._num_replicas_in_sync)
return input_context
def _experimental_distribute_dataset(self, dataset, options):
if (options and options.experimental_replication_mode ==
distribute_lib.InputReplicationMode.PER_REPLICA):
raise NotImplementedError(
"InputReplicationMode.PER_REPLICA "
"is only supported in "
"`distribute_datasets_from_function` "
"of tf.distribute.MirroredStrategy"
)
input_context = self._make_input_context()
return input_lib.get_distributed_dataset(
dataset,
self._input_workers_with_options(options),
self._container_strategy(),
num_replicas_in_sync=self._num_replicas_in_sync,
input_context=input_context,
options=options)
def _distribute_datasets_from_function(self, dataset_fn, options):
if (options and options.experimental_replication_mode ==
distribute_lib.InputReplicationMode.PER_REPLICA):
raise NotImplementedError(
"InputReplicationMode.PER_REPLICA "
"is only supported in "
"`distribute_datasets_from_function` "
"of tf.distribute.MirroredStrategy")
input_context = self._make_input_context()
return input_lib.get_distributed_datasets_from_function(
dataset_fn=dataset_fn,
input_workers=self._input_workers_with_options(options),
input_contexts=[input_context],
strategy=self._container_strategy(),
options=options)
def _experimental_distribute_values_from_function(self, value_fn):
per_replica_values = []
num_local_replicas = len(self.worker_devices)
for local_replica_id in range(num_local_replicas):
replica_id = (self._id_in_cluster * num_local_replicas +
local_replica_id)
value_context = distribute_lib.ValueContext(
replica_id, self._num_replicas_in_sync)
per_replica_values.append(value_fn(value_context))
return distribute_utils.regroup(per_replica_values, always_wrap=True)
def _make_dataset_iterator(self, dataset):
"""Distributes the dataset to each local GPU."""
input_context = self._make_input_context()
return input_lib.DatasetIterator(
dataset,
self._input_workers,
self._container_strategy(),
num_replicas_in_sync=self._num_replicas_in_sync,
input_context=input_context)
def _make_input_fn_iterator(
self,
input_fn,
replication_mode=distribute_lib.InputReplicationMode.PER_WORKER):
"""Distributes the input function to each local GPU."""
input_context = self._make_input_context()
return input_lib.InputFunctionIterator(input_fn, self._input_workers,
[input_context],
self._container_strategy())
def _configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
"""Configures the object.
Args:
session_config: a `tf.compat.v1.ConfigProto`
cluster_spec: a dict, ClusterDef or ClusterSpec object specifying the
cluster configurations.
task_type: the current task type, such as "worker".
task_id: the current task id.
Raises:
ValueError: if `task_type` is not in the `cluster_spec`.
"""
if cluster_spec:
cluster_resolver = SimpleClusterResolver(
cluster_spec=multi_worker_util.normalize_cluster_spec(cluster_spec),
task_type=task_type,
task_id=task_id,
num_accelerators={
self._local_device_type: self._num_devices_per_worker},
rpc_layer=self._rpc_layer)
self._initialize_multi_worker(cluster_resolver)
assert isinstance(self._cross_device_ops,
cross_device_ops_lib.CollectiveAllReduce)
if session_config:
session_config.CopyFrom(self._update_config_proto(session_config))
def _update_config_proto(self, config_proto):
updated_config = copy.deepcopy(config_proto)
# Enable the scoped allocator optimization for CollectiveOps. This
# optimization converts many small all-reduces into fewer larger
# all-reduces.
rewrite_options = updated_config.graph_options.rewrite_options
rewrite_options.scoped_allocator_optimization = (
rewriter_config_pb2.RewriterConfig.ON)
# We turn on ScopedAllocator only for CollectiveReduce op, i.e. enable_op =
# ["CollectiveReduce"]. Since we can't assign to a repeated proto field, we
# clear and then append.
del rewrite_options.scoped_allocator_opts.enable_op[:]
rewrite_options.scoped_allocator_opts.enable_op.append("CollectiveReduce")
if (not ops.executing_eagerly_outside_functions() and
self._communication_options.implementation ==
collective_util.CommunicationImplementation.NCCL):
updated_config.experimental.collective_nccl = True
if not self._cluster_spec:
return updated_config
assert self._task_type
assert self._task_id is not None
# Collective group leader is needed for collective ops to coordinate
# workers.
updated_config.experimental.collective_group_leader = (
multi_worker_util.collective_leader(self._cluster_spec, self._task_type,
self._task_id))
# The device filters prevent communication between workers.
del updated_config.device_filters[:]
updated_config.device_filters.append(
"/job:%s/task:%d" % (self._task_type, self._task_id))
return updated_config
def _get_cross_device_ops(self, value):
# CollectiveAllReduce works on a predefined set of devices. In most cases
# they should be the compute devices, but certain use cases may reduce host
# tensors as well (e.g. early stopping). We infer the cross_device_ops to
# use based on the number of devices, since inputs don't always have device
# annotations. The compute devices one is preferred since we can potentially
# leverage NCCL.
if isinstance(value, values.DistributedValues):
num_devices = len(value._values) # pylint: disable=protected-access
else:
num_devices = 1
if num_devices == len(self.worker_devices):
return self._cross_device_ops
else:
return self._host_cross_device_ops
def _gather_to_implementation(self, value, destinations, axis, options):
return self._get_cross_device_ops(value)._gather( # pylint: disable=protected-access
value,
destinations=destinations,
axis=axis,
options=options)
def _reduce_to(self, reduce_op, value, destinations, options):
if (isinstance(value, values.Mirrored) and
reduce_op == reduce_util.ReduceOp.MEAN):
return value
assert not isinstance(value, values.Mirrored)
if (isinstance(value, values.DistributedValues) and
len(self.worker_devices) == 1):
value = value.values[0]
# When there are multiple workers, we need to reduce across workers using
# collective ops.
if (not isinstance(value, values.DistributedValues) and
self._num_workers == 1):
# This function handles reducing values that are not PerReplica or
# Mirrored values. For example, the same value could be present on all
# replicas in which case `value` would be a single value or value could
# be 0.
return cross_device_ops_lib.reduce_non_distributed_value(
reduce_op, value, destinations, len(self.worker_devices))
return self._get_cross_device_ops(value).reduce(
reduce_op,
value,
destinations=destinations,
options=self._communication_options.merge(options))
def _replica_ctx_all_reduce(self, reduce_op, value, options=None):
"""Implements `StrategyExtendedV2._replica_ctx_all_reduce`."""
# This implementation avoids using `merge_call` and just launches collective
# ops in one replica.
if options is None:
options = collective_util.Options()
if context.executing_eagerly():
# In eager mode, falls back to the default implemenation that uses
# `merge_call`. Replica functions are running sequentially in eager mode,
# and due to the blocking nature of collective ops, execution will hang if
# collective ops are to be launched sequentially.
return super()._replica_ctx_all_reduce(reduce_op, value, options)
replica_context = ds_context.get_replica_context()
assert replica_context, (
"`StrategyExtended._replica_ctx_all_reduce` must be called in a "
"replica context")
return self._cross_device_ops._all_reduce( # pylint: disable=protected-access
reduce_op,
value,
replica_context._replica_id, # pylint: disable=protected-access
options)
def _check_health(self):
while True:
if self._check_health_thread_should_stop.is_set():
return
for job in self._cluster_spec.jobs:
for task_id in range(self._cluster_spec.num_tasks(job)):
peer = "/job:{}/replica:0/task:{}".format(job, task_id)
attempts = 0
while True:
attempts += 1
try:
context.context().check_collective_ops_peer_health(
peer, timeout_in_ms=self._check_health_timeout * 1000)
# If check_collective_ops_peer_health doesn't raise an Exception,
# the peer is healthy.
break
except (errors.UnavailableError, errors.FailedPreconditionError,
errors.DeadlineExceededError) as e:
# TODO(b/151232436): Always raise UnavailableError when a peer
# fails. Now there could be many kinds of errors:
# - Unavailable: when the peer is not reachable, e.g. it's down.
# - FailedPrecondition: when the peer has restarted.
if attempts < self._check_health_retry_limit:
logging.warning("%s seems down, retrying %d/%d", peer, attempts,
self._check_health_retry_limit)
continue
logging.error(
"Cluster check alive failed, %s is down, "
"aborting collectives: %s", peer, e)
context.context().abort_collective_ops(
errors.UNAVAILABLE,
"cluster check alive failed, {} is down".format(peer))
return
except Exception as e: # pylint: disable=broad-except
logging.error("Unexpected exception in check alive: %s", e)
context.context().abort_collective_ops(
errors.INTERNAL,
"unexecpted exception in check alive: %s" % e)
return
time.sleep(self._check_health_interval)
def _start_check_health_thread(self):
# Use a dummy all-reduce as a barrier to wait for all workers to be up,
# otherwise the check health may fail immediately.
# Use array_ops.identity to create the dummy tensor so that we have a new
# Tensor. If we use constant it may be a cached from on a /job:localhost
# device, which will cause some code that relies on tensor.device to error.
#
# TODO(b/151232436): change to an explicit barrier if we have it.
dummy_value = array_ops.identity([])
logging.info("Waiting for the cluster, timeout = %s",
self._check_health_initial_timeout or "inf")
try:
self._host_cross_device_ops.reduce(
reduce_util.ReduceOp.SUM,
dummy_value,
dummy_value,
options=collective_util.Options(
timeout_seconds=self._check_health_initial_timeout,
implementation=collective_util.CommunicationImplementation.RING))
if context.is_async():
context.async_wait()
except errors.DeadlineExceededError:
raise RuntimeError(
"Timeout waiting for the cluster, timeout is %d seconds" %
self._check_health_initial_timeout)
logging.info("Cluster is ready.")
self._check_health_thread_should_stop = threading.Event()
# Start the thread as daemon to avoid it blocking the program from exiting.
# We try best to shutdown the thread but __del__ is not guaranteed to be
# called when program exists.
self._check_health_thread = threading.Thread(
target=self._check_health,
daemon=True)
self._check_health_thread.start()
def _stop_check_health_thread(self):
if getattr(self, "_check_health_thread", None):
logging.info("stopping check health thread")
self._check_health_thread_should_stop.set()
self._check_health_thread.join()
self._check_health_thread = None
logging.info("check health thread stopped")
def _warn_nccl_no_gpu(self):
if ((self._communication_options.implementation ==
collective_util.CommunicationImplementation.NCCL) and
self._local_device_type != "GPU"):
logging.warning("Enabled NCCL communication but no GPUs detected/"
"specified.")
def _in_multi_worker_mode(self):
"""Whether this strategy indicates working in multi-worker settings."""
return self._num_workers > 1
@property
def experimental_between_graph(self):
return True
@property
def experimental_should_init(self):
return True
@property
def should_checkpoint(self):
return self._is_chief
@property
def should_save_summary(self):
return self._is_chief
@property
def _num_replicas_in_sync(self):
return len(self.worker_devices) * self._num_workers
# TODO(priyag): Delete this once all strategies use global batch size.
@property
def _global_batch_size(self):
"""`make_dataset_iterator` and `make_numpy_iterator` use global batch size.
`make_input_fn_iterator` assumes per-replica batching.
Returns:
Boolean.
"""
return True
def _get_replica_id_in_sync_group(self, replica_id):
return self._id_in_cluster * len(self.worker_devices) + replica_id
def _get_local_replica_id(self, replica_id_in_sync_group):
return (replica_id_in_sync_group -
self._id_in_cluster * len(self.worker_devices))
def __deepcopy__(self, memo):
# We check the check health thread instead of whether we are in eager mode
# to limit the backward incompatibility.
if hasattr(self, "_check_health_thread"):
raise ValueError(
"MultiWorkerMirroredStrategy cannot be deep copied in eager mode. "
"If you're using Estimator and see this error message, call "
"tf.compat.v1.disable_eager_execution() at the beginning of your "
"program")
# Otherwise, do a regular deepcopy.
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, copy.deepcopy(v, memo))
return result
| 42.241062 | 134 | 0.714683 |
import copy
import threading
import time
import weakref
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.protobuf import tensorflow_server_pb2
from tensorflow.python.distribute import collective_util
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import cross_device_utils
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import numpy_dataset
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import values
from tensorflow.python.distribute.cluster_resolver import ClusterResolver
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from tensorflow.python.distribute.cluster_resolver import TFConfigClusterResolver
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import collective_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.tpu import tpu_strategy_util
from tensorflow.python.training.tracking import base
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@tf_export("distribute.MultiWorkerMirroredStrategy", v1=[])
class CollectiveAllReduceStrategy(distribute_lib.Strategy):
_collective_key_base = 0
def __init__(self,
cluster_resolver=None,
communication_options=None):
if communication_options is None:
communication_options = collective_util.Options()
super(CollectiveAllReduceStrategy, self).__init__(
CollectiveAllReduceExtended(
self,
cluster_resolver=cluster_resolver,
communication_options=communication_options))
distribute_lib.distribution_strategy_gauge.get_cell("V2").set(
"MultiWorkerMirroredStrategy")
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_workers").set(self.extended._num_workers)
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_replicas_per_worker").set(self.extended._num_devices_per_worker)
@classmethod
def _from_local_devices(cls, devices, communication_options=None):
obj = cls(communication_options=communication_options)
obj.extended._initialize_local(TFConfigClusterResolver(), devices=devices)
return obj
@property
def cluster_resolver(self):
return self.extended._cluster_resolver
class _CollectiveAllReduceStrategyExperimentalMeta(type):
@classmethod
def __instancecheck__(cls, instance):
return isinstance(instance, CollectiveAllReduceStrategy)
@tf_export("distribute.experimental.MultiWorkerMirroredStrategy", v1=[])
class _CollectiveAllReduceStrategyExperimental(
CollectiveAllReduceStrategy,
metaclass=_CollectiveAllReduceStrategyExperimentalMeta):
__doc__ = CollectiveAllReduceStrategy.__doc__
@deprecation.deprecated(
None, "use distribute.MultiWorkerMirroredStrategy instead")
def __init__(self,
communication=collective_util.CommunicationImplementation.AUTO,
cluster_resolver=None):
communication_options = collective_util.Options(
implementation=communication)
super(_CollectiveAllReduceStrategyExperimental,
self).__init__(cluster_resolver, communication_options)
@classmethod
def _from_local_devices(
cls,
devices,
communication=collective_util.CommunicationImplementation.AUTO):
obj = cls(communication)
obj.extended._initialize_local(TFConfigClusterResolver(), devices=devices)
return obj
_CollectiveAllReduceStrategyExperimental.__name__ = CollectiveAllReduceStrategy.__name__
@tf_export(v1=["distribute.experimental.MultiWorkerMirroredStrategy"])
class CollectiveAllReduceStrategyV1(distribute_lib.StrategyV1):
__doc__ = CollectiveAllReduceStrategy.__doc__
_collective_key_base = 0
def __init__(self,
communication=collective_util.CommunicationImplementation.AUTO,
cluster_resolver=None):
communication_options = collective_util.Options(
implementation=communication)
super(CollectiveAllReduceStrategyV1, self).__init__(
CollectiveAllReduceExtended(
self,
cluster_resolver=cluster_resolver,
communication_options=communication_options))
distribute_lib.distribution_strategy_gauge.get_cell("V1").set(
"MultiWorkerMirroredStrategy")
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_workers").set(self.extended._num_workers)
distribute_lib.distribution_strategy_replica_gauge.get_cell(
"num_gpu_per_worker").set(
self.extended._num_devices_per_worker
if self.extended._local_device_type == "GPU"
else 0)
class CollectiveAllReduceExtended(mirrored_strategy.MirroredExtended):
_enable_check_health = True
# Check health interval in seconds.
_check_health_interval = 30
# Timeout in seconds for the first check health. The first check health needs
# to wait for cluster, which may make a longer time.
_check_health_initial_timeout = 0
# Times to retry before considering the peer is down.
_check_health_retry_limit = 3
# Timeout in seconds the each check health.
_check_health_timeout = 10
def __init__(self, container_strategy, cluster_resolver,
communication_options):
if not isinstance(communication_options, collective_util.Options):
raise ValueError("communication_options must be an instance of "
"tf.distribute.experimental.CommunicationOptions")
self._cluster_resolver = cluster_resolver or TFConfigClusterResolver()
if not isinstance(self._cluster_resolver, ClusterResolver):
raise ValueError("cluster_resolver must be an instance of "
"tf.distribute.cluster_resolver.ClusterResolver")
distribute_lib.StrategyExtendedV1.__init__(self, container_strategy)
self._communication_options = communication_options
self._collective_key_base = container_strategy._collective_key_base # pylint: disable=protected-access
self._initialize_strategy(self._cluster_resolver)
self._cfer_fn_cache = weakref.WeakKeyDictionary()
self.experimental_enable_get_next_as_optional = True
assert isinstance(self._cross_device_ops,
cross_device_ops_lib.CollectiveAllReduce)
def _use_merge_call(self):
return True
def _initialize_strategy(self, cluster_resolver):
if cluster_resolver.cluster_spec().as_dict():
self._initialize_multi_worker(cluster_resolver)
else:
self._initialize_local(cluster_resolver)
def _initialize_local_devices(self, cluster_resolver, worker_device):
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
if isinstance(cluster_resolver, TFConfigClusterResolver):
num_gpus = context.num_gpus()
num_tpus = 0
else:
num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
num_tpus = cluster_resolver.num_accelerators().get("TPU", 0)
if num_gpus:
local_device_type = "GPU"
num_local_devices = num_gpus
elif num_tpus:
local_device_type = "TPU"
num_local_devices = num_tpus
else:
local_device_type = "CPU"
num_local_devices = 1
local_devices = tuple(
f"{worker_device}/device:{local_device_type}:{i}"
for i in range(num_local_devices))
return local_devices, local_device_type
def _initialize_local(self, cluster_resolver, devices=None):
self._is_chief = True
self._num_workers = 1
if ops.executing_eagerly_outside_functions():
try:
context.context().configure_collective_ops(
scoped_allocator_enabled_ops=("CollectiveReduce",))
except RuntimeError:
logging.warning("Collective ops is not configured at program startup. "
"Some performance features may not be enabled.")
self._collective_ops_configured = True
if devices:
local_devices = devices
if "GPU" in devices[0]:
local_device_type = "GPU"
elif "TPU" in devices[0]:
local_device_type = "TPU"
else:
local_device_type = "CPU"
else:
local_devices, local_device_type = self._initialize_local_devices(
cluster_resolver, worker_device="")
self._worker_device = device_util.canonicalize("/device:CPU:0")
self._host_input_device = numpy_dataset.SingleDevice(self._worker_device)
self._collective_keys = cross_device_utils.CollectiveKeys(
group_key_start=1 + self._collective_key_base)
self._cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=local_devices,
group_size=len(local_devices),
collective_keys=self._collective_keys)
# CrossDeviceOps for per host tensors.
self._host_cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=[self._worker_device],
group_size=self._num_workers,
collective_keys=self._collective_keys)
super(CollectiveAllReduceExtended, self)._initialize_single_worker(
local_devices)
self._cluster_spec = None
self._task_type = None
self._task_id = None
self._id_in_cluster = 0
# This is a mark to tell whether we are running with standalone client or
# independent worker. Right now with standalone client, strategy object is
# created as local strategy and then turn into multi-worker strategy via
# configure call.
self._local_or_standalone_client_mode = True
# Save the num_devices_per_worker and rpc_layer for configure method.
self._num_devices_per_worker = len(local_devices)
self._local_device_type = local_device_type
self._rpc_layer = cluster_resolver.rpc_layer
self._warn_nccl_no_gpu()
logging.info(
"Single-worker MultiWorkerMirroredStrategy with local_devices "
"= %r, communication = %s", local_devices,
self._communication_options.implementation)
def _initialize_multi_worker(self, cluster_resolver):
cluster_spec = multi_worker_util.normalize_cluster_spec(
cluster_resolver.cluster_spec())
task_type = cluster_resolver.task_type
task_id = cluster_resolver.task_id
if task_type is None or task_id is None:
raise ValueError("When `cluster_spec` is given, you must also specify "
"`task_type` and `task_id`.")
self._cluster_spec = cluster_spec
self._task_type = task_type
self._task_id = task_id
self._id_in_cluster = multi_worker_util.id_in_cluster(
self._cluster_spec, self._task_type, self._task_id)
self._num_workers = multi_worker_util.worker_count(cluster_spec, task_type)
if not self._num_workers:
raise ValueError("No `worker`, `chief` or `evaluator` tasks can be found "
"in `cluster_spec`.")
self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type,
task_id)
self._worker_device = "/job:%s/task:%d" % (task_type, task_id)
self._host_input_device = numpy_dataset.SingleDevice(self._worker_device)
if (ops.executing_eagerly_outside_functions() and
not getattr(self, "_local_or_standalone_client_mode", False)):
context.context().configure_collective_ops(
collective_leader=multi_worker_util.collective_leader(
cluster_spec, task_type, task_id),
scoped_allocator_enabled_ops=("CollectiveReduce",),
device_filters=("/job:%s/task:%d" % (task_type, task_id),))
self._collective_ops_configured = True
# Starting a std server in eager mode and in independent worker mode.
if (context.executing_eagerly() and
not getattr(self, "_std_server_started", False) and
not getattr(self, "_local_or_standalone_client_mode", False)):
# Checking _local_or_standalone_client_mode as well because we should not
# create the std server in standalone client mode.
config_proto = copy.deepcopy(context.context().config)
config_proto = self._update_config_proto(config_proto)
# If coordination service is enabled, use its internal heartbeat to detect
# peer failures instead of the Python-level health check.
if config_proto.experimental.coordination_config.service_type:
self._enable_check_health = False
if hasattr(cluster_resolver, "port"):
port = cluster_resolver.port
else:
port = 0
server_def = tensorflow_server_pb2.ServerDef(
cluster=cluster_spec.as_cluster_def(),
default_session_config=config_proto,
job_name=task_type,
task_index=task_id,
protocol=cluster_resolver.rpc_layer or "grpc",
port=port)
context.context().enable_collective_ops(server_def)
self._std_server_started = True
# The `ensure_initialized` is needed before calling
# `context.context().devices()`.
context.context().ensure_initialized()
logging.info(
"Enabled multi-worker collective ops with available devices: %r",
context.context().devices())
# TODO(yuefengz): The `num_gpus` is only for this particular task. It
# assumes all workers have the same number of GPUs. We should remove this
# assumption by querying all tasks for their numbers of GPUs.
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
local_devices, local_device_type = self._initialize_local_devices(
cluster_resolver, self._worker_device)
if local_device_type == "TPU":
tpu_strategy_util.initialize_tpu_system()
self._collective_keys = cross_device_utils.CollectiveKeys(
group_key_start=1 + self._collective_key_base)
self._cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=local_devices,
group_size=len(local_devices) * self._num_workers,
collective_keys=self._collective_keys)
# CrossDeviceOps for per host tensors.
self._host_cross_device_ops = cross_device_ops_lib.CollectiveAllReduce(
devices=[self._worker_device],
group_size=self._num_workers,
collective_keys=self._collective_keys)
super(CollectiveAllReduceExtended, self)._initialize_single_worker(
local_devices)
# Add a default device so that ops without specified devices will not end up
# on other workers.
self._default_device = "/job:%s/task:%d" % (task_type, task_id)
# Save the num_devices_per_worker and rpc_layer for configure method.
self._num_devices_per_worker = len(local_devices)
self._local_device_type = local_device_type
self._rpc_layer = cluster_resolver.rpc_layer
self._warn_nccl_no_gpu()
if self._enable_check_health and context.executing_eagerly():
self._start_check_health_thread()
else:
logging.info("Check health not enabled.")
logging.info(
"MultiWorkerMirroredStrategy with cluster_spec = %r, task_type = %r, "
"task_id = %r, num_workers = %r, local_devices = %r, "
"communication = %s", cluster_spec.as_dict(), task_type, task_id,
self._num_workers, local_devices,
self._communication_options.implementation)
def __del__(self):
self._stop_check_health_thread()
def _input_workers_with_options(self, options=None):
host_device = device_util.get_host_for_device(self._worker_device)
if not options or options.experimental_fetch_to_device:
return input_lib.InputWorkers([(host_device, self.worker_devices)])
else:
return input_lib.InputWorkers([(
host_device,
[device_util.get_host_for_device(worker) for worker in
self.worker_devices])])
@property
def _input_workers(self):
return self._input_workers_with_options()
def _get_variable_creator_initial_value(self,
replica_id,
device,
primary_var,
**kwargs):
if replica_id == 0: # First replica on each worker.
assert device is not None
assert primary_var is None
def initial_value_fn(): # pylint: disable=g-missing-docstring
# Only the first device participates in the broadcast of initial values.
group_key = self._collective_keys.get_group_key([device])
group_size = self._num_workers
collective_instance_key = (
self._collective_keys.get_instance_key(group_key, device))
with ops.device(device):
initial_value = kwargs["initial_value"]
if callable(initial_value):
initial_value = initial_value()
if isinstance(initial_value, base.CheckpointInitialValue):
initial_value = initial_value.wrapped_value
assert not callable(initial_value)
initial_value = ops.convert_to_tensor(
initial_value, dtype=kwargs.get("dtype", None))
if self._num_workers > 1:
if self._is_chief:
bcast_send = collective_ops.broadcast_send(
initial_value, initial_value.shape, initial_value.dtype,
group_size, group_key, collective_instance_key)
with ops.control_dependencies([bcast_send]):
return array_ops.identity(initial_value)
else:
return collective_ops.broadcast_recv(initial_value.shape,
initial_value.dtype,
group_size, group_key,
collective_instance_key)
return initial_value
return initial_value_fn
else:
return super(CollectiveAllReduceExtended,
self)._get_variable_creator_initial_value(
replica_id=replica_id,
device=device,
primary_var=primary_var,
**kwargs)
def _make_input_context(self):
input_context = distribute_lib.InputContext(
num_input_pipelines=self._num_workers,
input_pipeline_id=self._id_in_cluster,
num_replicas_in_sync=self._num_replicas_in_sync)
return input_context
def _experimental_distribute_dataset(self, dataset, options):
if (options and options.experimental_replication_mode ==
distribute_lib.InputReplicationMode.PER_REPLICA):
raise NotImplementedError(
"InputReplicationMode.PER_REPLICA "
"is only supported in "
"`distribute_datasets_from_function` "
"of tf.distribute.MirroredStrategy"
)
input_context = self._make_input_context()
return input_lib.get_distributed_dataset(
dataset,
self._input_workers_with_options(options),
self._container_strategy(),
num_replicas_in_sync=self._num_replicas_in_sync,
input_context=input_context,
options=options)
def _distribute_datasets_from_function(self, dataset_fn, options):
if (options and options.experimental_replication_mode ==
distribute_lib.InputReplicationMode.PER_REPLICA):
raise NotImplementedError(
"InputReplicationMode.PER_REPLICA "
"is only supported in "
"`distribute_datasets_from_function` "
"of tf.distribute.MirroredStrategy")
input_context = self._make_input_context()
return input_lib.get_distributed_datasets_from_function(
dataset_fn=dataset_fn,
input_workers=self._input_workers_with_options(options),
input_contexts=[input_context],
strategy=self._container_strategy(),
options=options)
def _experimental_distribute_values_from_function(self, value_fn):
per_replica_values = []
num_local_replicas = len(self.worker_devices)
for local_replica_id in range(num_local_replicas):
replica_id = (self._id_in_cluster * num_local_replicas +
local_replica_id)
value_context = distribute_lib.ValueContext(
replica_id, self._num_replicas_in_sync)
per_replica_values.append(value_fn(value_context))
return distribute_utils.regroup(per_replica_values, always_wrap=True)
def _make_dataset_iterator(self, dataset):
input_context = self._make_input_context()
return input_lib.DatasetIterator(
dataset,
self._input_workers,
self._container_strategy(),
num_replicas_in_sync=self._num_replicas_in_sync,
input_context=input_context)
def _make_input_fn_iterator(
self,
input_fn,
replication_mode=distribute_lib.InputReplicationMode.PER_WORKER):
input_context = self._make_input_context()
return input_lib.InputFunctionIterator(input_fn, self._input_workers,
[input_context],
self._container_strategy())
def _configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
if cluster_spec:
cluster_resolver = SimpleClusterResolver(
cluster_spec=multi_worker_util.normalize_cluster_spec(cluster_spec),
task_type=task_type,
task_id=task_id,
num_accelerators={
self._local_device_type: self._num_devices_per_worker},
rpc_layer=self._rpc_layer)
self._initialize_multi_worker(cluster_resolver)
assert isinstance(self._cross_device_ops,
cross_device_ops_lib.CollectiveAllReduce)
if session_config:
session_config.CopyFrom(self._update_config_proto(session_config))
def _update_config_proto(self, config_proto):
updated_config = copy.deepcopy(config_proto)
# Enable the scoped allocator optimization for CollectiveOps. This
# optimization converts many small all-reduces into fewer larger
# all-reduces.
rewrite_options = updated_config.graph_options.rewrite_options
rewrite_options.scoped_allocator_optimization = (
rewriter_config_pb2.RewriterConfig.ON)
# We turn on ScopedAllocator only for CollectiveReduce op, i.e. enable_op =
# ["CollectiveReduce"]. Since we can't assign to a repeated proto field, we
del rewrite_options.scoped_allocator_opts.enable_op[:]
rewrite_options.scoped_allocator_opts.enable_op.append("CollectiveReduce")
if (not ops.executing_eagerly_outside_functions() and
self._communication_options.implementation ==
collective_util.CommunicationImplementation.NCCL):
updated_config.experimental.collective_nccl = True
if not self._cluster_spec:
return updated_config
assert self._task_type
assert self._task_id is not None
updated_config.experimental.collective_group_leader = (
multi_worker_util.collective_leader(self._cluster_spec, self._task_type,
self._task_id))
del updated_config.device_filters[:]
updated_config.device_filters.append(
"/job:%s/task:%d" % (self._task_type, self._task_id))
return updated_config
def _get_cross_device_ops(self, value):
# annotations. The compute devices one is preferred since we can potentially
# leverage NCCL.
if isinstance(value, values.DistributedValues):
num_devices = len(value._values) # pylint: disable=protected-access
else:
num_devices = 1
if num_devices == len(self.worker_devices):
return self._cross_device_ops
else:
return self._host_cross_device_ops
def _gather_to_implementation(self, value, destinations, axis, options):
return self._get_cross_device_ops(value)._gather( # pylint: disable=protected-access
value,
destinations=destinations,
axis=axis,
options=options)
def _reduce_to(self, reduce_op, value, destinations, options):
if (isinstance(value, values.Mirrored) and
reduce_op == reduce_util.ReduceOp.MEAN):
return value
assert not isinstance(value, values.Mirrored)
if (isinstance(value, values.DistributedValues) and
len(self.worker_devices) == 1):
value = value.values[0]
# When there are multiple workers, we need to reduce across workers using
# collective ops.
if (not isinstance(value, values.DistributedValues) and
self._num_workers == 1):
# This function handles reducing values that are not PerReplica or
# Mirrored values. For example, the same value could be present on all
# replicas in which case `value` would be a single value or value could
# be 0.
return cross_device_ops_lib.reduce_non_distributed_value(
reduce_op, value, destinations, len(self.worker_devices))
return self._get_cross_device_ops(value).reduce(
reduce_op,
value,
destinations=destinations,
options=self._communication_options.merge(options))
def _replica_ctx_all_reduce(self, reduce_op, value, options=None):
# This implementation avoids using `merge_call` and just launches collective
# ops in one replica.
if options is None:
options = collective_util.Options()
if context.executing_eagerly():
# In eager mode, falls back to the default implemenation that uses
# `merge_call`. Replica functions are running sequentially in eager mode,
# and due to the blocking nature of collective ops, execution will hang if
# collective ops are to be launched sequentially.
return super()._replica_ctx_all_reduce(reduce_op, value, options)
replica_context = ds_context.get_replica_context()
assert replica_context, (
"`StrategyExtended._replica_ctx_all_reduce` must be called in a "
"replica context")
return self._cross_device_ops._all_reduce( # pylint: disable=protected-access
reduce_op,
value,
replica_context._replica_id, # pylint: disable=protected-access
options)
def _check_health(self):
while True:
if self._check_health_thread_should_stop.is_set():
return
for job in self._cluster_spec.jobs:
for task_id in range(self._cluster_spec.num_tasks(job)):
peer = "/job:{}/replica:0/task:{}".format(job, task_id)
attempts = 0
while True:
attempts += 1
try:
context.context().check_collective_ops_peer_health(
peer, timeout_in_ms=self._check_health_timeout * 1000)
# If check_collective_ops_peer_health doesn't raise an Exception,
break
except (errors.UnavailableError, errors.FailedPreconditionError,
errors.DeadlineExceededError) as e:
# - FailedPrecondition: when the peer has restarted.
if attempts < self._check_health_retry_limit:
logging.warning("%s seems down, retrying %d/%d", peer, attempts,
self._check_health_retry_limit)
continue
logging.error(
"Cluster check alive failed, %s is down, "
"aborting collectives: %s", peer, e)
context.context().abort_collective_ops(
errors.UNAVAILABLE,
"cluster check alive failed, {} is down".format(peer))
return
except Exception as e: # pylint: disable=broad-except
logging.error("Unexpected exception in check alive: %s", e)
context.context().abort_collective_ops(
errors.INTERNAL,
"unexecpted exception in check alive: %s" % e)
return
time.sleep(self._check_health_interval)
def _start_check_health_thread(self):
# Use a dummy all-reduce as a barrier to wait for all workers to be up,
# otherwise the check health may fail immediately.
# Use array_ops.identity to create the dummy tensor so that we have a new
# Tensor. If we use constant it may be a cached from on a /job:localhost
# device, which will cause some code that relies on tensor.device to error.
#
# TODO(b/151232436): change to an explicit barrier if we have it.
dummy_value = array_ops.identity([])
logging.info("Waiting for the cluster, timeout = %s",
self._check_health_initial_timeout or "inf")
try:
self._host_cross_device_ops.reduce(
reduce_util.ReduceOp.SUM,
dummy_value,
dummy_value,
options=collective_util.Options(
timeout_seconds=self._check_health_initial_timeout,
implementation=collective_util.CommunicationImplementation.RING))
if context.is_async():
context.async_wait()
except errors.DeadlineExceededError:
raise RuntimeError(
"Timeout waiting for the cluster, timeout is %d seconds" %
self._check_health_initial_timeout)
logging.info("Cluster is ready.")
self._check_health_thread_should_stop = threading.Event()
# Start the thread as daemon to avoid it blocking the program from exiting.
# We try best to shutdown the thread but __del__ is not guaranteed to be
# called when program exists.
self._check_health_thread = threading.Thread(
target=self._check_health,
daemon=True)
self._check_health_thread.start()
def _stop_check_health_thread(self):
if getattr(self, "_check_health_thread", None):
logging.info("stopping check health thread")
self._check_health_thread_should_stop.set()
self._check_health_thread.join()
self._check_health_thread = None
logging.info("check health thread stopped")
def _warn_nccl_no_gpu(self):
if ((self._communication_options.implementation ==
collective_util.CommunicationImplementation.NCCL) and
self._local_device_type != "GPU"):
logging.warning("Enabled NCCL communication but no GPUs detected/"
"specified.")
def _in_multi_worker_mode(self):
return self._num_workers > 1
@property
def experimental_between_graph(self):
return True
@property
def experimental_should_init(self):
return True
@property
def should_checkpoint(self):
return self._is_chief
@property
def should_save_summary(self):
return self._is_chief
@property
def _num_replicas_in_sync(self):
return len(self.worker_devices) * self._num_workers
# TODO(priyag): Delete this once all strategies use global batch size.
@property
def _global_batch_size(self):
return True
def _get_replica_id_in_sync_group(self, replica_id):
return self._id_in_cluster * len(self.worker_devices) + replica_id
def _get_local_replica_id(self, replica_id_in_sync_group):
return (replica_id_in_sync_group -
self._id_in_cluster * len(self.worker_devices))
def __deepcopy__(self, memo):
# We check the check health thread instead of whether we are in eager mode
# to limit the backward incompatibility.
if hasattr(self, "_check_health_thread"):
raise ValueError(
"MultiWorkerMirroredStrategy cannot be deep copied in eager mode. "
"If you're using Estimator and see this error message, call "
"tf.compat.v1.disable_eager_execution() at the beginning of your "
"program")
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, copy.deepcopy(v, memo))
return result
| true | true |
1c32884b97603e9760e7db39e37fe69b40866493 | 5,702 | py | Python | alipay/aop/api/domain/InterTradeCompanyConsultOpenApiResult.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/InterTradeCompanyConsultOpenApiResult.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/InterTradeCompanyConsultOpenApiResult.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class InterTradeCompanyConsultOpenApiResult(object):
def __init__(self):
self._belong_group = None
self._cid_list = None
self._mid_list = None
self._name = None
self._ou = None
self._pid_list = None
self._related_flag = None
self._uid_list = None
@property
def belong_group(self):
return self._belong_group
@belong_group.setter
def belong_group(self, value):
self._belong_group = value
@property
def cid_list(self):
return self._cid_list
@cid_list.setter
def cid_list(self, value):
if isinstance(value, list):
self._cid_list = list()
for i in value:
self._cid_list.append(i)
@property
def mid_list(self):
return self._mid_list
@mid_list.setter
def mid_list(self, value):
if isinstance(value, list):
self._mid_list = list()
for i in value:
self._mid_list.append(i)
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def ou(self):
return self._ou
@ou.setter
def ou(self, value):
self._ou = value
@property
def pid_list(self):
return self._pid_list
@pid_list.setter
def pid_list(self, value):
if isinstance(value, list):
self._pid_list = list()
for i in value:
self._pid_list.append(i)
@property
def related_flag(self):
return self._related_flag
@related_flag.setter
def related_flag(self, value):
self._related_flag = value
@property
def uid_list(self):
return self._uid_list
@uid_list.setter
def uid_list(self, value):
if isinstance(value, list):
self._uid_list = list()
for i in value:
self._uid_list.append(i)
def to_alipay_dict(self):
params = dict()
if self.belong_group:
if hasattr(self.belong_group, 'to_alipay_dict'):
params['belong_group'] = self.belong_group.to_alipay_dict()
else:
params['belong_group'] = self.belong_group
if self.cid_list:
if isinstance(self.cid_list, list):
for i in range(0, len(self.cid_list)):
element = self.cid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.cid_list[i] = element.to_alipay_dict()
if hasattr(self.cid_list, 'to_alipay_dict'):
params['cid_list'] = self.cid_list.to_alipay_dict()
else:
params['cid_list'] = self.cid_list
if self.mid_list:
if isinstance(self.mid_list, list):
for i in range(0, len(self.mid_list)):
element = self.mid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.mid_list[i] = element.to_alipay_dict()
if hasattr(self.mid_list, 'to_alipay_dict'):
params['mid_list'] = self.mid_list.to_alipay_dict()
else:
params['mid_list'] = self.mid_list
if self.name:
if hasattr(self.name, 'to_alipay_dict'):
params['name'] = self.name.to_alipay_dict()
else:
params['name'] = self.name
if self.ou:
if hasattr(self.ou, 'to_alipay_dict'):
params['ou'] = self.ou.to_alipay_dict()
else:
params['ou'] = self.ou
if self.pid_list:
if isinstance(self.pid_list, list):
for i in range(0, len(self.pid_list)):
element = self.pid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.pid_list[i] = element.to_alipay_dict()
if hasattr(self.pid_list, 'to_alipay_dict'):
params['pid_list'] = self.pid_list.to_alipay_dict()
else:
params['pid_list'] = self.pid_list
if self.related_flag:
if hasattr(self.related_flag, 'to_alipay_dict'):
params['related_flag'] = self.related_flag.to_alipay_dict()
else:
params['related_flag'] = self.related_flag
if self.uid_list:
if isinstance(self.uid_list, list):
for i in range(0, len(self.uid_list)):
element = self.uid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.uid_list[i] = element.to_alipay_dict()
if hasattr(self.uid_list, 'to_alipay_dict'):
params['uid_list'] = self.uid_list.to_alipay_dict()
else:
params['uid_list'] = self.uid_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = InterTradeCompanyConsultOpenApiResult()
if 'belong_group' in d:
o.belong_group = d['belong_group']
if 'cid_list' in d:
o.cid_list = d['cid_list']
if 'mid_list' in d:
o.mid_list = d['mid_list']
if 'name' in d:
o.name = d['name']
if 'ou' in d:
o.ou = d['ou']
if 'pid_list' in d:
o.pid_list = d['pid_list']
if 'related_flag' in d:
o.related_flag = d['related_flag']
if 'uid_list' in d:
o.uid_list = d['uid_list']
return o
| 32.033708 | 75 | 0.542967 |
import json
from alipay.aop.api.constant.ParamConstants import *
class InterTradeCompanyConsultOpenApiResult(object):
def __init__(self):
self._belong_group = None
self._cid_list = None
self._mid_list = None
self._name = None
self._ou = None
self._pid_list = None
self._related_flag = None
self._uid_list = None
@property
def belong_group(self):
return self._belong_group
@belong_group.setter
def belong_group(self, value):
self._belong_group = value
@property
def cid_list(self):
return self._cid_list
@cid_list.setter
def cid_list(self, value):
if isinstance(value, list):
self._cid_list = list()
for i in value:
self._cid_list.append(i)
@property
def mid_list(self):
return self._mid_list
@mid_list.setter
def mid_list(self, value):
if isinstance(value, list):
self._mid_list = list()
for i in value:
self._mid_list.append(i)
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def ou(self):
return self._ou
@ou.setter
def ou(self, value):
self._ou = value
@property
def pid_list(self):
return self._pid_list
@pid_list.setter
def pid_list(self, value):
if isinstance(value, list):
self._pid_list = list()
for i in value:
self._pid_list.append(i)
@property
def related_flag(self):
return self._related_flag
@related_flag.setter
def related_flag(self, value):
self._related_flag = value
@property
def uid_list(self):
return self._uid_list
@uid_list.setter
def uid_list(self, value):
if isinstance(value, list):
self._uid_list = list()
for i in value:
self._uid_list.append(i)
def to_alipay_dict(self):
params = dict()
if self.belong_group:
if hasattr(self.belong_group, 'to_alipay_dict'):
params['belong_group'] = self.belong_group.to_alipay_dict()
else:
params['belong_group'] = self.belong_group
if self.cid_list:
if isinstance(self.cid_list, list):
for i in range(0, len(self.cid_list)):
element = self.cid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.cid_list[i] = element.to_alipay_dict()
if hasattr(self.cid_list, 'to_alipay_dict'):
params['cid_list'] = self.cid_list.to_alipay_dict()
else:
params['cid_list'] = self.cid_list
if self.mid_list:
if isinstance(self.mid_list, list):
for i in range(0, len(self.mid_list)):
element = self.mid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.mid_list[i] = element.to_alipay_dict()
if hasattr(self.mid_list, 'to_alipay_dict'):
params['mid_list'] = self.mid_list.to_alipay_dict()
else:
params['mid_list'] = self.mid_list
if self.name:
if hasattr(self.name, 'to_alipay_dict'):
params['name'] = self.name.to_alipay_dict()
else:
params['name'] = self.name
if self.ou:
if hasattr(self.ou, 'to_alipay_dict'):
params['ou'] = self.ou.to_alipay_dict()
else:
params['ou'] = self.ou
if self.pid_list:
if isinstance(self.pid_list, list):
for i in range(0, len(self.pid_list)):
element = self.pid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.pid_list[i] = element.to_alipay_dict()
if hasattr(self.pid_list, 'to_alipay_dict'):
params['pid_list'] = self.pid_list.to_alipay_dict()
else:
params['pid_list'] = self.pid_list
if self.related_flag:
if hasattr(self.related_flag, 'to_alipay_dict'):
params['related_flag'] = self.related_flag.to_alipay_dict()
else:
params['related_flag'] = self.related_flag
if self.uid_list:
if isinstance(self.uid_list, list):
for i in range(0, len(self.uid_list)):
element = self.uid_list[i]
if hasattr(element, 'to_alipay_dict'):
self.uid_list[i] = element.to_alipay_dict()
if hasattr(self.uid_list, 'to_alipay_dict'):
params['uid_list'] = self.uid_list.to_alipay_dict()
else:
params['uid_list'] = self.uid_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = InterTradeCompanyConsultOpenApiResult()
if 'belong_group' in d:
o.belong_group = d['belong_group']
if 'cid_list' in d:
o.cid_list = d['cid_list']
if 'mid_list' in d:
o.mid_list = d['mid_list']
if 'name' in d:
o.name = d['name']
if 'ou' in d:
o.ou = d['ou']
if 'pid_list' in d:
o.pid_list = d['pid_list']
if 'related_flag' in d:
o.related_flag = d['related_flag']
if 'uid_list' in d:
o.uid_list = d['uid_list']
return o
| true | true |
1c3289843eccdfc2f96541b9080888251b654cde | 500 | py | Python | arithmetic_op04.py | Mr-Umidjon/Arithmetic_operators | 572027880d929134f1aa58477f4bde00a910454c | [
"MIT"
] | null | null | null | arithmetic_op04.py | Mr-Umidjon/Arithmetic_operators | 572027880d929134f1aa58477f4bde00a910454c | [
"MIT"
] | null | null | null | arithmetic_op04.py | Mr-Umidjon/Arithmetic_operators | 572027880d929134f1aa58477f4bde00a910454c | [
"MIT"
] | null | null | null | # number nomli uch honali butun sonli o’zgaruvchi yarating.
# number o’zgaruvchining birinchi raqamini toping va x1 ga taminlang.
# number o’zgaruvchining ikkinchi raqamini toping va x2 ga taminlang.
# number o’zgaruvchining uchunchi raqamini toping va x3 ga taminlang.
# Ularning yig’indisini answer nomli o’zgaruchiga taminlang va natijani
# chop eting.
number = 184
x1 = 184 % 10
number //= 10
x2 = number % 10
number //= 10
x3 = number % 10
number //= 10
answer = x1 + x2 + x3
print(answer)
| 23.809524 | 71 | 0.742 |
number = 184
x1 = 184 % 10
number //= 10
x2 = number % 10
number //= 10
x3 = number % 10
number //= 10
answer = x1 + x2 + x3
print(answer)
| true | true |
1c328a667887edc617702bd49f36f391c052d1d2 | 14,439 | py | Python | examples/maze_runner/maze_runner.py | noahhefner/nhefner_pygame_menus | f03d0d0b78d4513d81e80aec08bd7479ff6c62bf | [
"MIT"
] | null | null | null | examples/maze_runner/maze_runner.py | noahhefner/nhefner_pygame_menus | f03d0d0b78d4513d81e80aec08bd7479ff6c62bf | [
"MIT"
] | null | null | null | examples/maze_runner/maze_runner.py | noahhefner/nhefner_pygame_menus | f03d0d0b78d4513d81e80aec08bd7479ff6c62bf | [
"MIT"
] | null | null | null | """
This script shows how to implement a menu system with nhefner_pygame_menus. The
general structure should be laid out as follows:
imports
classes
def main:
game setup stuff here (create players, setup world, etc.)
menu manager stuff here (create manager, buttons, pages, etc.)
while true:
manager.run()
game_in_progress = True
while game_in_progress:
game logic here
game drawing here
game reset code here
if __name__ == "__main__":
main()
pygame.quit()
---------------------------- CREDITS FOR THIS GAME -----------------------------
Sample Python/Pygame Programs
Simpson College Computer Science
http://programarcadegames.com/
http://simpson.edu/computer-science/
From:
http://programarcadegames.com/python_examples/f.php?file=maze_runner.py
Explanation video: http://youtu.be/5-SbFanyUkQ
Part of a series:
http://programarcadegames.com/python_examples/f.php?file=move_with_walls_example.py
http://programarcadegames.com/python_examples/f.php?file=maze_runner.py
http://programarcadegames.com/python_examples/f.php?file=platform_jumper.py
http://programarcadegames.com/python_examples/f.php?file=platform_scroller.py
http://programarcadegames.com/python_examples/f.php?file=platform_moving.py
http://programarcadegames.com/python_examples/sprite_sheets/
--------------------------------------------------------------------------------
"""
import pygame
# Imports for the menu system
import os
from nhefner_pygame_menus import *
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
PURPLE = (255, 0, 255)
class Wall(pygame.sprite.Sprite):
"""This class represents the bar at the bottom that the player controls """
def __init__(self, x, y, width, height, color):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Make a BLUE wall, of the size specified in the parameters
self.image = pygame.Surface([width, height])
self.image.fill(color)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class Player(pygame.sprite.Sprite):
""" This class represents the bar at the bottom that the
player controls """
# Set speed vector
change_x = 0
change_y = 0
def __init__(self, x, y):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Set height, width
self.image = pygame.Surface([15, 15])
self.image.fill(BLUE)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
def set_color(self, new_color):
""" Change color of player block. """
self.image.fill(new_color)
def changespeed(self, x, y):
""" Change the speed of the player. Called with a keypress. """
self.change_x += x
self.change_y += y
def move(self, walls):
""" Find a new position for the player """
# Move left/right
self.rect.x += self.change_x
# Did this update cause us to hit a wall?
block_hit_list = pygame.sprite.spritecollide(self, walls, False)
for block in block_hit_list:
# If we are moving right, set our right side to the left side of
# the item we hit
if self.change_x > 0:
self.rect.right = block.rect.left
else:
# Otherwise if we are moving left, do the opposite.
self.rect.left = block.rect.right
# Move up/down
self.rect.y += self.change_y
# Check and see if we hit anything
block_hit_list = pygame.sprite.spritecollide(self, walls, False)
for block in block_hit_list:
# Reset our position based on the top/bottom of the object.
if self.change_y > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom
class Room(object):
""" Base class for all rooms. """
# Each room has a list of walls, and of enemy sprites.
wall_list = None
enemy_sprites = None
def __init__(self):
""" Constructor, create our lists. """
self.wall_list = pygame.sprite.Group()
self.enemy_sprites = pygame.sprite.Group()
class Room1(Room):
"""This creates all the walls in room 1"""
def __init__(self):
super().__init__()
# Make the walls. (x_pos, y_pos, width, height)
# This is a list of walls. Each is in the form [x, y, width, height]
walls = [[0, 0, 20, 250, WHITE],
[0, 350, 20, 250, WHITE],
[780, 0, 20, 250, WHITE],
[780, 350, 20, 250, WHITE],
[20, 0, 760, 20, WHITE],
[20, 580, 760, 20, WHITE],
[390, 50, 20, 500, BLUE]
]
# Loop through the list. Create the wall, add it to the list
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
class Room2(Room):
"""This creates all the walls in room 2"""
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, RED],
[0, 350, 20, 250, RED],
[780, 0, 20, 250, RED],
[780, 350, 20, 250, RED],
[20, 0, 760, 20, RED],
[20, 580, 760, 20, RED],
[190, 50, 20, 500, GREEN],
[590, 50, 20, 500, GREEN]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
class Room3(Room):
"""This creates all the walls in room 3"""
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, PURPLE],
[0, 350, 20, 250, PURPLE],
[780, 0, 20, 250, PURPLE],
[780, 350, 20, 250, PURPLE],
[20, 0, 760, 20, PURPLE],
[20, 580, 760, 20, PURPLE]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
for x in range(100, 800, 100):
for y in range(50, 451, 300):
wall = Wall(x, y, 20, 200, RED)
self.wall_list.add(wall)
for x in range(150, 700, 100):
wall = Wall(x, 200, 20, 200, WHITE)
self.wall_list.add(wall)
def main():
""" Main Program """
# Call this function so the Pygame library can initialize itself
pygame.init()
# Create an 800x600 sized screen
screen = pygame.display.set_mode([800, 600])
clock = pygame.time.Clock()
# Set the title of the window
pygame.display.set_caption('Maze Runner')
# Create the player paddle object
player = Player(50, 50)
movingsprites = pygame.sprite.Group()
movingsprites.add(player)
rooms = []
room = Room1()
rooms.append(room)
room = Room2()
rooms.append(room)
room = Room3()
rooms.append(room)
current_room_no = 0
current_room = rooms[current_room_no]
""" Menu Manager Additions --------------------------------------------- """
# Step One: Create menu manager --------------------------------------------
man = MenuManager(screen, clock)
# Step Two: Create pages ---------------------------------------------------
home = Page("home")
options = Page("options")
confirm_exit = Page("confirm_exit")
# Step Three: Create elements for the pages --------------------------------
font = pygame.font.Font("arcade_font.ttf", 40) # Font for text buttons
button_play = ButtonText("PLAY", font, pos = [20, 375], background_color = [255, 0, 0])
button_options = ButtonText("OPTIONS", font, pos = [20, 400], background_color = [255, 0, 0])
button_quit = ButtonText("QUIT", font, pos = [20, 425], background_color = [255, 0, 0])
# When making buttons, we need to use the add_action method to define their functionality
# If the function requires arguments, we simply pass them to the add_action method after
# the function name.
button_play.add_action(man.exit_menu)
button_options.add_action(man.navigate, "options") # Function with arguments example.
# The navigate function on the MenuManager
# takes one argument, the id of the page we
# want to navigate to, so we pass the id of
# our options page as an argument to the
# add_action method.
button_quit.add_action(man.navigate, "confirm_exit")
button_back_op = ButtonText("BACK", font, pos = [10, 10], background_color = [255, 0, 0])
text_player_color = Text("CHOSSE YOUR PLAYER COLOR", font, pos = [10, 100])
button_blue_player = ButtonText("BLUE", font, pos = [10, 150])
button_red_player = ButtonText("RED", font, pos = [10, 200])
picture_arrow = Picture("arrow.png", pos = [120, 120])
button_back_op.add_action(man.navigate, "home")
button_blue_player.add_action(player.set_color, BLUE)
button_blue_player.add_action(picture_arrow.set_pos, [120, 120])
button_red_player.add_action(player.set_color, RED)
button_red_player.add_action(picture_arrow.set_pos, [120, 170])
# Notice we can use the set_color method on the Player class to change the
# color of the player when the user presses this button.
text_confirmation = Text("ARE YOU SURE ABOUT THAT", font, pos = [20, 20], background_color = [255, 0, 0])
button_yes = ButtonText("YES", font, pos = [20, 70], background_color = [255, 0, 0])
button_no = ButtonText("NO", font, pos = [20, 95], background_color = [255, 0, 0])
button_yes.add_action(man.kill_program)
button_no.add_action(man.navigate, "home")
# Step Four: Add elements to their pages -----------------------------------
home.add_element(button_play)
home.add_element(button_options)
home.add_element(button_quit)
options.add_element(button_back_op)
options.add_element(text_player_color)
options.add_element(button_blue_player)
options.add_element(button_red_player)
options.add_element(picture_arrow)
confirm_exit.add_element(text_confirmation)
confirm_exit.add_element(button_yes)
confirm_exit.add_element(button_no)
# Step Five: Add pages to menu manager -------------------------------------
man.add_page(home)
man.add_page(options)
man.add_page(confirm_exit)
# Step Six: Set a start page -----------------------------------------------
man.set_start_page("home")
"""
NOTICE: Put everything in an infinite loop. What will happen is that after
we exit the menu, we will enter the game.
"""
while True:
# Call this function to run the menu manager
man.run()
# Game code goes here, use while loop as normal
in_game = True
while in_game:
# --- Event Processing ---
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit() # Kill the program when we hit the X
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
player.changespeed(-5, 0)
if event.key == pygame.K_RIGHT:
player.changespeed(5, 0)
if event.key == pygame.K_UP:
player.changespeed(0, -5)
if event.key == pygame.K_DOWN:
player.changespeed(0, 5)
if event.key == pygame.K_q:
in_game = False
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT:
player.changespeed(5, 0)
if event.key == pygame.K_RIGHT:
player.changespeed(-5, 0)
if event.key == pygame.K_UP:
player.changespeed(0, 5)
if event.key == pygame.K_DOWN:
player.changespeed(0, -5)
# --- Game Logic ---
player.move(current_room.wall_list)
if player.rect.x < 0:
if current_room_no == 0:
player.rect.x = 0
elif current_room_no == 1:
current_room_no = 0
current_room = rooms[current_room_no]
player.rect.x = 790
elif current_room_no == 2:
current_room_no = 1
current_room = rooms[current_room_no]
player.rect.x = 790
if player.rect.x > 801:
if current_room_no == 0:
current_room_no = 1
current_room = rooms[current_room_no]
player.rect.x = 0
elif current_room_no == 1:
current_room_no = 2
current_room = rooms[current_room_no]
player.rect.x = 0
else:
in_game = False
# --- Drawing ---
screen.fill(BLACK)
movingsprites.draw(screen)
current_room.wall_list.draw(screen)
pygame.display.flip()
clock.tick(60)
"""
This is important! When we are done playing the game, we need to reset
everything to how it was before the game started. This allows us to play
the game multiple times without killing the program. In this case, we
need to reset the current room, current room number, player position,
and the change_x and change_y attributes of the player.
"""
current_room_no = 0
current_room = rooms[current_room_no]
player.rect.x = 50
player.rect.y = 50
player.change_x = 0
player.change_y = 0
if __name__ == "__main__":
main()
pygame.quit()
| 33.193103 | 109 | 0.557172 | import pygame
import os
from nhefner_pygame_menus import *
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
PURPLE = (255, 0, 255)
class Wall(pygame.sprite.Sprite):
def __init__(self, x, y, width, height, color):
super().__init__()
# Make a BLUE wall, of the size specified in the parameters
self.image = pygame.Surface([width, height])
self.image.fill(color)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class Player(pygame.sprite.Sprite):
# Set speed vector
change_x = 0
change_y = 0
def __init__(self, x, y):
# Call the parent's constructor
super().__init__()
self.image = pygame.Surface([15, 15])
self.image.fill(BLUE)
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
def set_color(self, new_color):
self.image.fill(new_color)
def changespeed(self, x, y):
self.change_x += x
self.change_y += y
def move(self, walls):
self.rect.x += self.change_x
block_hit_list = pygame.sprite.spritecollide(self, walls, False)
for block in block_hit_list:
if self.change_x > 0:
self.rect.right = block.rect.left
else:
self.rect.left = block.rect.right
self.rect.y += self.change_y
block_hit_list = pygame.sprite.spritecollide(self, walls, False)
for block in block_hit_list:
if self.change_y > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom
class Room(object):
wall_list = None
enemy_sprites = None
def __init__(self):
self.wall_list = pygame.sprite.Group()
self.enemy_sprites = pygame.sprite.Group()
class Room1(Room):
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, WHITE],
[0, 350, 20, 250, WHITE],
[780, 0, 20, 250, WHITE],
[780, 350, 20, 250, WHITE],
[20, 0, 760, 20, WHITE],
[20, 580, 760, 20, WHITE],
[390, 50, 20, 500, BLUE]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
class Room2(Room):
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, RED],
[0, 350, 20, 250, RED],
[780, 0, 20, 250, RED],
[780, 350, 20, 250, RED],
[20, 0, 760, 20, RED],
[20, 580, 760, 20, RED],
[190, 50, 20, 500, GREEN],
[590, 50, 20, 500, GREEN]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
class Room3(Room):
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, PURPLE],
[0, 350, 20, 250, PURPLE],
[780, 0, 20, 250, PURPLE],
[780, 350, 20, 250, PURPLE],
[20, 0, 760, 20, PURPLE],
[20, 580, 760, 20, PURPLE]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
for x in range(100, 800, 100):
for y in range(50, 451, 300):
wall = Wall(x, y, 20, 200, RED)
self.wall_list.add(wall)
for x in range(150, 700, 100):
wall = Wall(x, 200, 20, 200, WHITE)
self.wall_list.add(wall)
def main():
pygame.init()
screen = pygame.display.set_mode([800, 600])
clock = pygame.time.Clock()
pygame.display.set_caption('Maze Runner')
player = Player(50, 50)
movingsprites = pygame.sprite.Group()
movingsprites.add(player)
rooms = []
room = Room1()
rooms.append(room)
room = Room2()
rooms.append(room)
room = Room3()
rooms.append(room)
current_room_no = 0
current_room = rooms[current_room_no]
man = MenuManager(screen, clock)
home = Page("home")
options = Page("options")
confirm_exit = Page("confirm_exit")
font = pygame.font.Font("arcade_font.ttf", 40)
button_play = ButtonText("PLAY", font, pos = [20, 375], background_color = [255, 0, 0])
button_options = ButtonText("OPTIONS", font, pos = [20, 400], background_color = [255, 0, 0])
button_quit = ButtonText("QUIT", font, pos = [20, 425], background_color = [255, 0, 0])
button_play.add_action(man.exit_menu)
button_options.add_action(man.navigate, "options")
button_quit.add_action(man.navigate, "confirm_exit")
button_back_op = ButtonText("BACK", font, pos = [10, 10], background_color = [255, 0, 0])
text_player_color = Text("CHOSSE YOUR PLAYER COLOR", font, pos = [10, 100])
button_blue_player = ButtonText("BLUE", font, pos = [10, 150])
button_red_player = ButtonText("RED", font, pos = [10, 200])
picture_arrow = Picture("arrow.png", pos = [120, 120])
button_back_op.add_action(man.navigate, "home")
button_blue_player.add_action(player.set_color, BLUE)
button_blue_player.add_action(picture_arrow.set_pos, [120, 120])
button_red_player.add_action(player.set_color, RED)
button_red_player.add_action(picture_arrow.set_pos, [120, 170])
text_confirmation = Text("ARE YOU SURE ABOUT THAT", font, pos = [20, 20], background_color = [255, 0, 0])
button_yes = ButtonText("YES", font, pos = [20, 70], background_color = [255, 0, 0])
button_no = ButtonText("NO", font, pos = [20, 95], background_color = [255, 0, 0])
button_yes.add_action(man.kill_program)
button_no.add_action(man.navigate, "home")
home.add_element(button_play)
home.add_element(button_options)
home.add_element(button_quit)
options.add_element(button_back_op)
options.add_element(text_player_color)
options.add_element(button_blue_player)
options.add_element(button_red_player)
options.add_element(picture_arrow)
confirm_exit.add_element(text_confirmation)
confirm_exit.add_element(button_yes)
confirm_exit.add_element(button_no)
man.add_page(home)
man.add_page(options)
man.add_page(confirm_exit)
man.set_start_page("home")
while True:
man.run()
in_game = True
while in_game:
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
player.changespeed(-5, 0)
if event.key == pygame.K_RIGHT:
player.changespeed(5, 0)
if event.key == pygame.K_UP:
player.changespeed(0, -5)
if event.key == pygame.K_DOWN:
player.changespeed(0, 5)
if event.key == pygame.K_q:
in_game = False
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT:
player.changespeed(5, 0)
if event.key == pygame.K_RIGHT:
player.changespeed(-5, 0)
if event.key == pygame.K_UP:
player.changespeed(0, 5)
if event.key == pygame.K_DOWN:
player.changespeed(0, -5)
player.move(current_room.wall_list)
if player.rect.x < 0:
if current_room_no == 0:
player.rect.x = 0
elif current_room_no == 1:
current_room_no = 0
current_room = rooms[current_room_no]
player.rect.x = 790
elif current_room_no == 2:
current_room_no = 1
current_room = rooms[current_room_no]
player.rect.x = 790
if player.rect.x > 801:
if current_room_no == 0:
current_room_no = 1
current_room = rooms[current_room_no]
player.rect.x = 0
elif current_room_no == 1:
current_room_no = 2
current_room = rooms[current_room_no]
player.rect.x = 0
else:
in_game = False
screen.fill(BLACK)
movingsprites.draw(screen)
current_room.wall_list.draw(screen)
pygame.display.flip()
clock.tick(60)
current_room_no = 0
current_room = rooms[current_room_no]
player.rect.x = 50
player.rect.y = 50
player.change_x = 0
player.change_y = 0
if __name__ == "__main__":
main()
pygame.quit()
| true | true |
1c328ae56d56346203a6d125a5f42ea0b98fd624 | 2,135 | py | Python | geoviews/__init__.py | jonmmease/geoviews | ef1424426b82d1674110f2e559ec1a6bcc129326 | [
"BSD-3-Clause"
] | null | null | null | geoviews/__init__.py | jonmmease/geoviews | ef1424426b82d1674110f2e559ec1a6bcc129326 | [
"BSD-3-Clause"
] | null | null | null | geoviews/__init__.py | jonmmease/geoviews | ef1424426b82d1674110f2e559ec1a6bcc129326 | [
"BSD-3-Clause"
] | null | null | null | import param
from holoviews import (extension, help, opts, output, renderer, Store, # noqa (API import)
Cycle, Palette, Overlay, Layout, NdOverlay, NdLayout,
HoloMap, DynamicMap, GridSpace, Dimension, dim)
try:
# Only available in HoloViews >=1.11
from holoviews import render, save # noqa (API import)
except:
pass
from .element import (_Element, Feature, Tiles, # noqa (API import)
WMTS, LineContours, FilledContours, Text, Image,
Points, Path, Polygons, Shape, Dataset, RGB,
Contours, Graph, TriMesh, Nodes, EdgePaths,
QuadMesh, VectorField, HexTiles, Labels)
from .util import load_tiff, from_xarray # noqa (API import)
from .operation import project # noqa (API import)
from . import data # noqa (API import)
from . import operation # noqa (API import)
from . import plotting # noqa (API import)
from . import feature # noqa (API import)
from . import tile_sources # noqa (API import)
__version__ = str(param.version.Version(fpath=__file__, archive_commit="$Format:%h$",
reponame="geoviews"))
# Ensure opts utility is initialized with GeoViews elements
if Store._options:
Store.set_current_backend(Store.current_backend)
# make pyct's example/data commands available if possible
from functools import partial
try:
from pyct.cmd import copy_examples as _copy, fetch_data as _fetch, examples as _examples
copy_examples = partial(_copy, 'geoviews')
fetch_data = partial(_fetch, 'geoviews')
examples = partial(_examples, 'geoviews')
except ImportError:
def _missing_cmd(*args,**kw): return("install pyct to enable this command (e.g. `conda install -c pyviz pyct`)")
_copy = _fetch = _examples = _missing_cmd
def _err(): raise ValueError(_missing_cmd())
fetch_data = copy_examples = examples = _err
del partial, _examples, _copy, _fetch
| 46.413043 | 116 | 0.626698 | import param
from holoviews import (extension, help, opts, output, renderer, Store,
Cycle, Palette, Overlay, Layout, NdOverlay, NdLayout,
HoloMap, DynamicMap, GridSpace, Dimension, dim)
try:
from holoviews import render, save
except:
pass
from .element import (_Element, Feature, Tiles,
WMTS, LineContours, FilledContours, Text, Image,
Points, Path, Polygons, Shape, Dataset, RGB,
Contours, Graph, TriMesh, Nodes, EdgePaths,
QuadMesh, VectorField, HexTiles, Labels)
from .util import load_tiff, from_xarray
from .operation import project
from . import data
from . import operation
from . import plotting
from . import feature
from . import tile_sources
__version__ = str(param.version.Version(fpath=__file__, archive_commit="$Format:%h$",
reponame="geoviews"))
if Store._options:
Store.set_current_backend(Store.current_backend)
from functools import partial
try:
from pyct.cmd import copy_examples as _copy, fetch_data as _fetch, examples as _examples
copy_examples = partial(_copy, 'geoviews')
fetch_data = partial(_fetch, 'geoviews')
examples = partial(_examples, 'geoviews')
except ImportError:
def _missing_cmd(*args,**kw): return("install pyct to enable this command (e.g. `conda install -c pyviz pyct`)")
_copy = _fetch = _examples = _missing_cmd
def _err(): raise ValueError(_missing_cmd())
fetch_data = copy_examples = examples = _err
del partial, _examples, _copy, _fetch
| true | true |
1c328b2e4ae065c1082e8fbbd863543fca6786a7 | 524 | py | Python | phrydy/__init__.py | Josef-Friedrich/phrydy | d89acb06564a387b6e4286b65c00aae61a6316ed | [
"MIT"
] | 6 | 2018-10-05T18:20:18.000Z | 2021-12-14T06:38:46.000Z | phrydy/__init__.py | Josef-Friedrich/phrydy | d89acb06564a387b6e4286b65c00aae61a6316ed | [
"MIT"
] | null | null | null | phrydy/__init__.py | Josef-Friedrich/phrydy | d89acb06564a387b6e4286b65c00aae61a6316ed | [
"MIT"
] | null | null | null | from . import field_docs # noqa: F401
from . import doc_generator # noqa: F401
from . import mediafile_extended # noqa: F401
from ._version import get_versions
from .mediafile_extended import \
MediaFile, \
MediaFileExtended # noqa: F401
from .field_docs import \
fields, \
FieldDocCollection # noqa: F401
from .doc_generator import \
get_max_field_length, \
format_fields_as_txt, \
merge_fields, \
print_debug # noqa: F401
__version__ = get_versions()['version']
del get_versions
| 22.782609 | 46 | 0.721374 | from . import field_docs
from . import doc_generator
from . import mediafile_extended
from ._version import get_versions
from .mediafile_extended import \
MediaFile, \
MediaFileExtended
from .field_docs import \
fields, \
FieldDocCollection
from .doc_generator import \
get_max_field_length, \
format_fields_as_txt, \
merge_fields, \
print_debug
__version__ = get_versions()['version']
del get_versions
| true | true |
1c328ba4a136a085fa44642c7e34ce0bed1de999 | 53,828 | py | Python | data/transcoder_evaluation_gfg/python/MOBILE_NUMERIC_KEYPAD_PROBLEM.py | mxl1n/CodeGen | e5101dd5c5e9c3720c70c80f78b18f13e118335a | [
"MIT"
] | 241 | 2021-07-20T08:35:20.000Z | 2022-03-31T02:39:08.000Z | data/transcoder_evaluation_gfg/python/MOBILE_NUMERIC_KEYPAD_PROBLEM.py | mxl1n/CodeGen | e5101dd5c5e9c3720c70c80f78b18f13e118335a | [
"MIT"
] | 49 | 2021-07-22T23:18:42.000Z | 2022-03-24T09:15:26.000Z | data/transcoder_evaluation_gfg/python/MOBILE_NUMERIC_KEYPAD_PROBLEM.py | mxl1n/CodeGen | e5101dd5c5e9c3720c70c80f78b18f13e118335a | [
"MIT"
] | 71 | 2021-07-21T05:17:52.000Z | 2022-03-29T23:49:28.000Z | # Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( keypad , n ) :
if ( not keypad or n <= 0 ) :
return 0
if ( n == 1 ) :
return 10
odd = [ 0 ] * 10
even = [ 0 ] * 10
i = 0
j = 0
useOdd = 0
totalCount = 0
for i in range ( 10 ) :
odd [ i ] = 1
for j in range ( 2 , n + 1 ) :
useOdd = 1 - useOdd
if ( useOdd == 1 ) :
even [ 0 ] = odd [ 0 ] + odd [ 8 ]
even [ 1 ] = odd [ 1 ] + odd [ 2 ] + odd [ 4 ]
even [ 2 ] = odd [ 2 ] + odd [ 1 ] + odd [ 3 ] + odd [ 5 ]
even [ 3 ] = odd [ 3 ] + odd [ 2 ] + odd [ 6 ]
even [ 4 ] = odd [ 4 ] + odd [ 1 ] + odd [ 5 ] + odd [ 7 ]
even [ 5 ] = odd [ 5 ] + odd [ 2 ] + odd [ 4 ] + odd [ 8 ] + odd [ 6 ]
even [ 6 ] = odd [ 6 ] + odd [ 3 ] + odd [ 5 ] + odd [ 9 ]
even [ 7 ] = odd [ 7 ] + odd [ 4 ] + odd [ 8 ]
even [ 8 ] = odd [ 8 ] + odd [ 0 ] + odd [ 5 ] + odd [ 7 ] + odd [ 9 ]
even [ 9 ] = odd [ 9 ] + odd [ 6 ] + odd [ 8 ]
else :
odd [ 0 ] = even [ 0 ] + even [ 8 ]
odd [ 1 ] = even [ 1 ] + even [ 2 ] + even [ 4 ]
odd [ 2 ] = even [ 2 ] + even [ 1 ] + even [ 3 ] + even [ 5 ]
odd [ 3 ] = even [ 3 ] + even [ 2 ] + even [ 6 ]
odd [ 4 ] = even [ 4 ] + even [ 1 ] + even [ 5 ] + even [ 7 ]
odd [ 5 ] = even [ 5 ] + even [ 2 ] + even [ 4 ] + even [ 8 ] + even [ 6 ]
odd [ 6 ] = even [ 6 ] + even [ 3 ] + even [ 5 ] + even [ 9 ]
odd [ 7 ] = even [ 7 ] + even [ 4 ] + even [ 8 ]
odd [ 8 ] = even [ 8 ] + even [ 0 ] + even [ 5 ] + even [ 7 ] + even [ 9 ]
odd [ 9 ] = even [ 9 ] + even [ 6 ] + even [ 8 ]
totalCount = 0
if ( useOdd == 1 ) :
for i in range ( 10 ) :
totalCount += even [ i ]
else :
for i in range ( 10 ) :
totalCount += odd [ i ]
return totalCount
#TOFILL
if __name__ == '__main__':
param = [
([[' ', 'A', 'C', 'K', 'R', 'R', 'V', 'c', 'd', 'i', 'i', 'j', 'm', 'o', 'q', 'q', 'r', 'r', 'v', 'v', 'x', 'z'], ['B', 'D', 'I', 'M', 'N', 'Q', 'R', 'Z', 'c', 'f', 'i', 'j', 'j', 'l', 'l', 'n', 'p', 'q', 's', 't', 't', 'w'], ['A', 'F', 'F', 'G', 'H', 'J', 'K', 'K', 'N', 'V', 'V', 'b', 'c', 'c', 'g', 'i', 'j', 'l', 'l', 's', 't', 'y'], [' ', 'A', 'B', 'B', 'E', 'H', 'I', 'J', 'J', 'P', 'Q', 'T', 'U', 'V', 'Z', 'c', 'c', 'j', 'p', 'w', 'y', 'z'], [' ', ' ', 'A', 'C', 'F', 'G', 'H', 'M', 'N', 'R', 'R', 'V', 'c', 'i', 'j', 'o', 'p', 'p', 'q', 'r', 'w', 'y'], [' ', ' ', 'C', 'C', 'D', 'H', 'I', 'J', 'K', 'O', 'S', 'X', 'Y', 'f', 'h', 'h', 'o', 'p', 'p', 'u', 'u', 'w'], ['B', 'C', 'D', 'H', 'M', 'M', 'Q', 'Q', 'R', 'S', 'X', 'Z', 'e', 'e', 'e', 'j', 'k', 'l', 'm', 'o', 'v', 'w'], ['A', 'C', 'C', 'D', 'H', 'H', 'I', 'J', 'L', 'L', 'L', 'M', 'N', 'S', 'U', 'c', 'd', 'f', 'f', 's', 'u', 'y'], ['A', 'B', 'D', 'D', 'I', 'J', 'K', 'L', 'L', 'M', 'P', 'S', 'S', 'Y', 'b', 'e', 'h', 'j', 'm', 'o', 'q', 's'], [' ', 'B', 'E', 'H', 'H', 'J', 'M', 'P', 'S', 'T', 'U', 'V', 'Z', 'd', 'j', 'm', 'm', 'p', 'q', 'v', 'w', 'w'], ['B', 'E', 'F', 'G', 'H', 'M', 'M', 'M', 'N', 'O', 'Q', 'R', 'T', 'V', 'a', 'c', 'g', 'g', 'i', 's', 'x', 'y'], ['A', 'E', 'G', 'J', 'O', 'R', 'R', 'S', 'T', 'W', 'a', 'b', 'f', 'h', 'h', 'i', 'm', 'n', 's', 'u', 'v', 'y'], ['B', 'D', 'E', 'H', 'I', 'I', 'K', 'M', 'N', 'P', 'Q', 'S', 'a', 'e', 'i', 'j', 'm', 'o', 'p', 'r', 'x', 'z'], ['A', 'G', 'I', 'K', 'K', 'L', 'O', 'P', 'U', 'U', 'X', 'X', 'Z', 'a', 'c', 'f', 'g', 'i', 'l', 'o', 'o', 'v'], [' ', ' ', 'E', 'H', 'J', 'J', 'L', 'M', 'N', 'O', 'P', 'S', 'S', 'X', 'c', 'f', 'g', 'r', 'u', 'v', 'x', 'z'], ['C', 'E', 'F', 'F', 'H', 'H', 'I', 'K', 'M', 'M', 'U', 'Z', 'e', 'e', 'h', 'h', 'h', 'j', 'k', 'k', 'p', 'r'], [' ', ' ', ' ', 'C', 'G', 'I', 'J', 'O', 'O', 'P', 'T', 'V', 'Y', 'b', 'j', 'n', 'o', 'o', 's', 'u', 'w', 'x'], ['A', 'D', 'F', 'F', 'H', 'H', 'N', 'R', 'S', 'W', 'W', 'Y', 'Y', 'b', 'f', 'i', 'k', 'o', 'u', 'y', 'y', 'z'], [' ', 'C', 'G', 'I', 'I', 'L', 'P', 'S', 'X', 'Y', 'd', 'd', 'f', 'g', 'g', 'k', 'm', 'o', 'r', 'r', 'r', 'x'], ['F', 'I', 'J', 'N', 'P', 'P', 'Q', 'Q', 'R', 'X', 'Y', 'a', 'b', 'h', 'h', 'j', 'l', 'm', 'n', 'p', 'r', 'y'], [' ', 'C', 'D', 'E', 'F', 'L', 'Q', 'Q', 'V', 'c', 'g', 'h', 'k', 'k', 'l', 'l', 'n', 'o', 'p', 'r', 'u', 'x'], [' ', 'A', 'G', 'K', 'L', 'M', 'T', 'U', 'U', 'W', 'Z', 'a', 'f', 'i', 'k', 'k', 'n', 'n', 'p', 'q', 'v', 'z']],13,),
([['3', '5', '1', '5', '6', '7', '7', '3', '0', '4', '7', '6', '1', '4', '0', '6', '3', '4', '1', '3', '1', '2', '9', '8', '7', '8', '0', '2', '7', '6', '1', '0', '3', '8', '0', '5', '9', '3', '9', '9', '8', '6'], ['0', '3', '8', '5', '0', '2', '0', '6', '1', '8', '7', '2', '8', '6', '0', '3', '9', '4', '9', '5', '7', '4', '3', '7', '4', '3', '8', '6', '1', '5', '4', '8', '0', '8', '3', '2', '7', '7', '6', '9', '7', '9'], ['6', '7', '1', '1', '7', '2', '5', '3', '2', '8', '4', '7', '8', '6', '1', '5', '2', '1', '6', '5', '7', '6', '8', '6', '8', '8', '1', '6', '3', '1', '1', '7', '1', '6', '4', '9', '2', '8', '2', '6', '3', '4'], ['8', '7', '9', '2', '0', '6', '6', '6', '2', '3', '1', '4', '8', '2', '3', '5', '5', '9', '2', '8', '0', '3', '2', '7', '2', '0', '2', '7', '0', '6', '5', '8', '2', '9', '3', '9', '8', '1', '9', '7', '9', '7'], ['9', '8', '1', '5', '0', '9', '9', '7', '7', '8', '4', '1', '8', '0', '4', '6', '7', '0', '5', '8', '6', '5', '6', '5', '1', '4', '0', '4', '3', '4', '6', '7', '6', '7', '3', '5', '4', '5', '6', '7', '1', '1'], ['4', '4', '4', '9', '8', '8', '7', '5', '3', '1', '8', '4', '8', '1', '0', '4', '9', '8', '9', '5', '2', '7', '5', '3', '4', '8', '2', '4', '7', '5', '0', '3', '6', '2', '5', '6', '3', '1', '9', '4', '8', '9'], ['7', '2', '7', '6', '2', '8', '8', '8', '1', '1', '5', '4', '6', '5', '3', '0', '3', '7', '4', '0', '0', '2', '4', '1', '8', '0', '0', '7', '6', '4', '7', '1', '8', '8', '1', '8', '8', '2', '3', '1', '7', '2'], ['2', '7', '5', '8', '7', '6', '2', '9', '9', '0', '6', '1', '7', '8', '1', '3', '3', '1', '5', '7', '9', '8', '2', '0', '7', '6', '0', '0', '1', '1', '5', '8', '6', '7', '7', '9', '9', '0', '4', '4', '3', '4'], ['0', '9', '9', '0', '5', '4', '9', '9', '3', '0', '3', '1', '5', '9', '9', '5', '3', '0', '2', '3', '9', '9', '7', '8', '5', '4', '6', '4', '2', '8', '7', '0', '2', '3', '6', '5', '2', '6', '0', '6', '5', '7'], ['1', '1', '4', '1', '4', '2', '7', '1', '9', '7', '9', '9', '4', '4', '2', '7', '6', '8', '2', '6', '7', '3', '1', '8', '0', '5', '3', '0', '3', '9', '0', '4', '7', '9', '6', '8', '1', '7', '0', '3', '2', '4'], ['6', '3', '1', '3', '2', '9', '5', '5', '4', '7', '2', '4', '7', '6', '9', '2', '0', '1', '2', '1', '4', '3', '8', '4', '9', '8', '9', '7', '7', '6', '8', '2', '4', '5', '3', '0', '1', '3', '0', '1', '0', '9'], ['5', '9', '4', '2', '1', '5', '0', '2', '6', '6', '0', '8', '3', '0', '3', '3', '3', '0', '7', '8', '0', '7', '7', '4', '3', '0', '6', '9', '6', '2', '2', '2', '8', '3', '7', '2', '4', '0', '0', '4', '5', '2'], ['3', '1', '1', '6', '2', '9', '7', '0', '3', '2', '8', '0', '5', '2', '2', '9', '9', '2', '8', '3', '5', '7', '4', '2', '8', '7', '8', '0', '4', '9', '7', '8', '0', '3', '2', '2', '1', '5', '1', '4', '9', '1'], ['6', '4', '8', '2', '4', '2', '5', '4', '0', '1', '0', '9', '0', '3', '0', '6', '4', '8', '6', '7', '9', '3', '0', '1', '6', '9', '5', '7', '5', '2', '9', '4', '7', '0', '6', '4', '1', '4', '4', '1', '3', '5'], ['6', '7', '8', '2', '9', '5', '0', '2', '6', '5', '4', '9', '4', '7', '8', '4', '6', '7', '6', '5', '1', '3', '8', '1', '7', '5', '9', '3', '9', '4', '0', '6', '5', '6', '9', '8', '4', '6', '9', '9', '0', '2'], ['6', '9', '2', '4', '3', '7', '2', '5', '8', '6', '3', '6', '3', '6', '7', '2', '6', '8', '6', '4', '3', '9', '6', '2', '1', '3', '1', '8', '8', '9', '6', '2', '0', '2', '2', '9', '3', '6', '4', '4', '8', '7'], ['1', '4', '5', '5', '7', '2', '3', '8', '3', '6', '9', '3', '3', '4', '4', '2', '3', '7', '5', '5', '2', '8', '7', '2', '7', '6', '0', '5', '1', '4', '1', '5', '5', '0', '4', '8', '7', '8', '1', '4', '2', '6'], ['5', '6', '8', '0', '0', '6', '3', '8', '3', '8', '2', '0', '8', '5', '4', '4', '0', '0', '8', '5', '8', '9', '1', '3', '3', '1', '1', '2', '9', '9', '1', '2', '1', '3', '5', '8', '7', '9', '3', '1', '3', '5'], ['9', '6', '7', '4', '9', '0', '2', '8', '9', '4', '3', '6', '4', '1', '8', '3', '1', '8', '0', '4', '4', '2', '1', '2', '9', '8', '3', '6', '7', '3', '9', '5', '7', '9', '1', '4', '6', '1', '4', '5', '4', '0'], ['5', '7', '4', '0', '6', '7', '8', '3', '6', '5', '8', '1', '4', '9', '9', '2', '7', '7', '4', '2', '8', '0', '8', '3', '2', '7', '3', '5', '7', '4', '4', '1', '3', '5', '1', '9', '6', '1', '0', '9', '5', '4'], ['3', '4', '0', '0', '3', '2', '2', '2', '9', '7', '5', '5', '1', '8', '4', '7', '9', '0', '7', '4', '1', '9', '3', '7', '3', '9', '5', '0', '3', '6', '6', '8', '8', '4', '1', '8', '2', '3', '9', '5', '3', '3'], ['7', '0', '6', '2', '5', '2', '1', '8', '1', '4', '4', '8', '9', '0', '3', '0', '3', '1', '9', '0', '8', '0', '1', '0', '3', '7', '6', '6', '3', '9', '4', '3', '4', '4', '1', '4', '7', '2', '9', '5', '8', '3'], ['7', '5', '7', '9', '8', '8', '3', '4', '3', '2', '5', '2', '4', '6', '5', '6', '1', '6', '0', '4', '9', '6', '8', '0', '3', '3', '2', '1', '1', '8', '9', '5', '3', '8', '3', '0', '4', '7', '7', '9', '2', '6'], ['6', '3', '9', '7', '5', '8', '5', '1', '1', '6', '6', '0', '8', '3', '2', '7', '3', '0', '4', '5', '1', '2', '3', '0', '4', '2', '8', '4', '1', '1', '0', '2', '3', '2', '5', '6', '3', '0', '1', '2', '2', '5'], ['8', '7', '2', '1', '4', '9', '6', '5', '2', '0', '9', '1', '0', '8', '6', '9', '7', '3', '4', '5', '6', '7', '2', '8', '3', '0', '1', '9', '5', '4', '4', '1', '6', '4', '0', '5', '1', '5', '7', '8', '2', '4'], ['4', '8', '1', '1', '7', '0', '8', '0', '2', '1', '8', '2', '2', '7', '6', '2', '3', '5', '2', '5', '5', '5', '9', '3', '4', '9', '4', '9', '8', '8', '0', '1', '6', '7', '7', '5', '7', '5', '9', '3', '6', '1'], ['5', '8', '6', '8', '0', '7', '3', '1', '9', '2', '3', '5', '5', '5', '0', '9', '2', '2', '2', '8', '7', '7', '6', '7', '6', '7', '4', '3', '9', '8', '3', '9', '3', '5', '7', '1', '3', '1', '4', '0', '7', '1'], ['9', '2', '6', '8', '8', '6', '8', '4', '8', '6', '7', '7', '7', '0', '2', '6', '5', '1', '5', '3', '8', '0', '5', '6', '5', '4', '9', '4', '6', '0', '0', '7', '2', '2', '1', '1', '0', '5', '1', '2', '5', '1'], ['1', '8', '4', '3', '2', '6', '1', '8', '3', '6', '5', '5', '1', '5', '9', '8', '0', '2', '8', '9', '4', '2', '1', '9', '6', '5', '1', '2', '5', '4', '6', '7', '3', '8', '7', '3', '2', '4', '7', '6', '6', '0'], ['9', '2', '9', '7', '5', '6', '4', '9', '5', '4', '8', '5', '2', '4', '0', '5', '5', '1', '0', '9', '3', '6', '4', '0', '9', '4', '2', '7', '5', '1', '3', '4', '8', '3', '7', '4', '2', '8', '3', '0', '2', '8'], ['8', '4', '4', '7', '5', '7', '3', '2', '8', '9', '5', '5', '2', '3', '8', '3', '3', '8', '0', '4', '9', '5', '9', '8', '5', '9', '1', '9', '4', '3', '9', '7', '4', '3', '0', '9', '3', '1', '3', '1', '3', '9'], ['9', '3', '7', '7', '4', '9', '1', '1', '8', '9', '2', '1', '2', '4', '1', '0', '9', '2', '8', '8', '9', '7', '2', '6', '0', '4', '3', '6', '2', '1', '4', '7', '6', '2', '4', '0', '8', '5', '1', '6', '2', '1'], ['6', '8', '7', '3', '6', '4', '3', '9', '3', '7', '1', '5', '0', '5', '5', '1', '7', '9', '3', '9', '8', '9', '9', '6', '6', '3', '1', '2', '2', '2', '0', '7', '8', '4', '7', '3', '6', '2', '2', '1', '9', '6'], ['1', '3', '1', '5', '7', '5', '2', '5', '3', '4', '0', '7', '6', '8', '5', '9', '7', '1', '0', '3', '3', '8', '2', '9', '7', '2', '4', '8', '6', '3', '1', '3', '3', '0', '7', '1', '5', '9', '0', '9', '8', '1'], ['4', '1', '6', '2', '2', '3', '9', '7', '6', '5', '6', '5', '3', '0', '8', '4', '3', '0', '6', '8', '7', '4', '1', '4', '2', '3', '2', '2', '1', '0', '0', '5', '3', '4', '0', '8', '4', '8', '4', '9', '0', '0'], ['2', '1', '1', '4', '8', '0', '6', '9', '7', '0', '9', '4', '7', '6', '1', '1', '5', '2', '0', '6', '9', '2', '0', '2', '7', '3', '3', '0', '5', '2', '6', '3', '0', '1', '8', '3', '5', '5', '3', '9', '8', '5'], ['1', '3', '2', '8', '8', '7', '7', '2', '6', '3', '8', '8', '5', '6', '7', '0', '1', '7', '7', '8', '5', '1', '9', '5', '2', '5', '7', '2', '2', '5', '9', '6', '0', '3', '1', '2', '2', '2', '3', '0', '1', '9'], ['2', '5', '0', '6', '4', '0', '1', '6', '9', '7', '0', '6', '7', '4', '9', '1', '0', '2', '5', '5', '7', '0', '2', '0', '8', '0', '6', '2', '6', '8', '1', '1', '0', '6', '4', '4', '0', '6', '5', '8', '7', '3'], ['9', '7', '8', '6', '0', '3', '7', '5', '7', '5', '6', '0', '5', '6', '3', '9', '6', '3', '2', '6', '0', '0', '6', '5', '8', '3', '7', '3', '7', '3', '5', '2', '4', '9', '4', '1', '0', '7', '9', '7', '6', '2'], ['3', '0', '7', '5', '1', '4', '8', '7', '9', '9', '0', '7', '6', '8', '6', '0', '5', '8', '0', '8', '9', '4', '8', '1', '3', '1', '8', '6', '0', '5', '1', '7', '3', '4', '7', '6', '4', '2', '8', '6', '1', '7'], ['4', '2', '8', '1', '1', '3', '2', '6', '5', '1', '9', '1', '2', '8', '8', '8', '2', '6', '2', '5', '6', '0', '7', '5', '2', '0', '9', '3', '0', '1', '4', '1', '1', '0', '0', '3', '9', '3', '4', '8', '8', '3'], ['9', '1', '9', '0', '9', '4', '0', '8', '4', '9', '7', '6', '7', '6', '0', '7', '1', '1', '7', '4', '9', '0', '0', '7', '3', '2', '8', '1', '6', '9', '7', '2', '0', '1', '6', '1', '9', '8', '9', '7', '5', '3']],39,),
([['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1']],15,),
([['b', 'q', 'b', 'D', 't', 'y', 'Z', 'G', 'd', 'r', 'R', 'R', 'z', 'A', 'Y', 'H', 'D', 'Q', 'X', 'U', 'o', 'a', 'S', 'P', 'S', 'c', 'W', 'r', 'I', 'y', 'E', 'x', 'E', 'k', 'l', 'F', 'M', 'G', 'z', 'T', 'I', 'E', 'D', 'K', 'M', 'l'], ['V', 'm', 'W', 'M', 'l', 'H', 'l', 'j', 'f', 'S', 'k', 'g', 'O', 'W', 'S', 'R', 'I', 'L', 'J', 'Z', 'V', 'X', 'w', 'l', 'K', 's', 'F', 'o', 'X', 'k', 'a', 'L', 'K', 'H', ' ', 'E', 'x', 'b', 'Z', 'w', 'Z', 'Y', 'U', 'y', 'I', 'Q'], ['I', 'o', 's', 'A', 'f', 'Z', 'C', 'o', 'X', 'b', 'd', 's', ' ', 'Y', 'Q', 'U', 'C', 'T', 'K', 'r', 'Q', 'U', 'P', 'C', 'w', 'R', 'e', 's', 'L', 'A', 'j', 'g', 'p', 'B', 'I', 'W', 'L', 'e', 'w', 'b', 'R', 'z', 'Y', 'M', 'M', 'E'], ['k', 'Y', 'v', 'L', 'f', 'x', 'v', 'l', 'C', 'g', 'J', 'V', 'l', 'q', 'p', 'x', 'z', 'A', 'J', 'h', 'V', 'i', 'h', 'r', 'Z', 'i', ' ', 'y', 'M', 'k', 'p', 'q', 'X', 'M', 'U', 'W', 'v', 'v', 'P', 'L', 'n', 'j', 'r', 'O', 'k', ' '], ['K', 'k', 'K', 'Z', 'X', 'W', 'e', ' ', 'x', 'u', 'r', 'l', 'l', 'z', 'V', 'e', 'K', 'z', 'y', 'x', 'f', 'v', 'n', 'f', 'K', 'p', 'b', 'I', 'C', 'p', 'b', 'V', 'R', 't', 'n', 't', 'm', 'A', 'F', 'J', 'U', 'M', 'n', 'g', 'M', 'W'], ['a', 'e', 'x', 'A', 'U', 'V', 'P', 'W', 'W', 'l', 'p', ' ', 'o', 'L', 'X', 'E', 'g', 'k', 'Y', 'W', 'P', 'Y', 'B', 't', 'Z', 'm', 'V', 'Z', 'O', 'z', 'o', 'O', 'm', 's', 'x', 'O', 'L', 'q', 'Z', 'E', 'y', 'B', 'l', 'h', 'h', 'T'], ['c', 'x', 'R', 'R', 'x', 'S', 'R', 'y', 'J', 'Y', 'e', 'F', 'X', 'x', 'h', 'L', 'N', 'Q', 'j', 'X', 's', 'H', 'Z', 'M', 'Q', 'b', 'Q', 'h', 'x', 'R', 'Y', 'C', 'r', 'D', 'b', 'O', 'l', 'W', 'J', 'I', 'A', 'P', 'x', 'D', 'T', 'c'], ['Y', 's', 'B', 'N', 'B', 'g', 'e', 'h', 'l', 'y', 'N', 's', 'a', 'f', 'k', 'p', 'C', 'Q', 'c', 'U', 'A', 'N', 'w', 'V', 'z', 'F', 'j', 'M', 'F', 'g', 'q', 'x', 'r', 'l', 'e', 'Y', 'T', 'z', ' ', 'a', 'n', 'n', 'x', 'p', 'm', 'J'], ['v', 'O', 'a', 'A', 'E', 'q', 'L', 'P', ' ', 'w', 'l', 'G', 'k', 'f', 'M', 'A', 'k', 'i', 'f', 'D', 'z', 'A', 'J', 'Y', 'b', 'g', 'a', 'h', 'e', 'S', 'Q', 'H', 'c', 'f', 'I', 'S', 'X', 'Y', 'J', 'g', 'f', 'n', 'G', 'J', 'r', 'S'], [' ', 'S', 'w', 'G', 'b', 'v', 'z', 'U', 'l', 'k', 'a', 'w', 'y', 'D', 'Q', 'v', 'c', 'T', 'S', 'S', 'n', 'M', 'm', 'j', 'U', 'X', 'a', 'k', 'O', 'A', 'T', 'a', 'U', 'u', 'y', 's', 'W', 'j', 'k', 'n', 'a', 'V', 'X', 'N', 'D', 'C'], ['Z', 'o', 'O', 'a', 'z', 'M', 'X', 'k', 'm', 'X', 'J', 'w', 'y', 'd', 'j', 'c', 'Q', 'E', 'E', 'i', 'g', 'q', 'U', 'v', 'C', 'k', 'y', 't', 'T', 'A', 'o', 'u', 'o', 'e', 'J', 'c', 'c', 'd', 'i', 'o', 'b', 'A', 'h', 'g', 'y', 'Y'], ['O', 'j', 'F', 'A', 'f', 't', 'J', 'u', 'V', 'J', 'P', 'Z', 'C', 'c', 'c', 'y', 'G', 's', 'W', 'X', 'O', 'g', 'q', 'l', 'z', 'L', 'p', 'U', 'o', 'A', 'k', 'v', 'q', 'v', 'I', 'W', 'k', 'r', 'm', 'Y', 'i', 'V', 'Y', 'c', 'P', 'S'], ['N', ' ', 'W', 'k', 'z', 'o', 'V', 'w', 'M', 'a', 'q', 'c', 'P', 'D', 'x', 'O', 'M', 'y', ' ', 'B', 'y', 'L', 'V', 'E', 'j', 'i', 'C', 'k', ' ', ' ', 'c', 'K', 'c', 'h', 'y', 'K', 'c', 'G', 'Q', 'h', 'B', 'i', 'L', 'Q', 'P', 's'], ['X', 'p', 'y', 'I', 'W', 'F', 'F', 'o', 'W', 'g', 'A', 'H', 'a', 'H', 'X', 'F', 'd', 'Y', 'I', 'x', 'n', 'r', 's', 'c', 'B', 'L', 'o', 'B', 'C', 'o', 'G', 'v', 'T', 'q', 'A', 'Z', 'a', 'Z', 'd', 'S', 'B', 'S', 'F', 'I', 'm', 'C'], ['F', 't', 'c', 'w', 'E', 'X', 's', 'F', 'e', 'J', 'h', 'Y', 'f', 'g', 'd', 'f', 'N', 'X', 'G', 'l', 'n', 'M', 'L', 'k', 'P', 'Y', 'M', ' ', 'U', 'X', 'n', 's', 'o', 'F', 'R', 'g', 'E', 'I', 'G', 'P', 'x', 'f', 'h', 'K', 'b', 'k'], ['a', 'p', 'j', 'Q', 'X', 'p', 'h', 'R', 'g', 'U', 'O', 'x', 'X', 'k', 'v', 'm', 'o', 'E', 'Z', 'Z', 'W', 'v', 'k', 'l', 'o', 'O', 'N', 'P', 'Q', 'k', 'A', 'K', 'c', 'l', 'w', 'a', 'k', 'Z', 'd', 'T', 'S', 't', 'K', 'L', 'x', 'k'], ['t', 'f', 'V', 'Q', 'X', 'e', 's', 'f', 'o', 'N', 'U', 'z', 'y', 'K', 'F', ' ', 'A', 'V', 'W', 'A', 'j', 'C', 'T', 'G', 'z', 'K', 'j', ' ', 'I', 'w', 'h', 'Q', 't', 'I', 'm', 'V', 'h', 'M', 'L', 'Q', 'J', 'g', 'p', 'x', 'P', 'i'], ['X', 'Q', 'b', 'i', 'T', 'A', 'R', 'f', 'c', 'r', 'K', 't', 'J', 'E', 'Z', 'd', 'W', 'O', 'G', 'X', 'u', 'I', 'z', ' ', 'm', 'H', 's', 'P', 'd', 's', 'k', 'm', 'E', 'K', 'Y', 'H', 'L', 'b', 'Z', 'y', 'I', 'c', 'p', 'y', 'Y', 'T'], ['P', 'g', 'C', 'T', 'i', 'Z', 's', 's', 'r', 'E', 'L', 'P', 'T', 'o', 'r', 'g', 'x', 'c', 'U', 'b', 'o', 'l', 'H', 'H', 'k', 'b', 'N', 'e', 'S', 'E', 'U', 'c', 'g', 'V', 'E', 'V', 'l', 'L', ' ', 'I', 'h', 'M', 'L', 'z', 'P', 'e'], ['l', 'i', 'O', 'F', 'S', 'e', 'Z', 'j', 'y', 'J', 'p', 'c', 'q', 'j', 'Q', 'E', 'j', 'd', 'u', 'S', 'N', 'Y', 'R', ' ', 'F', 'I', 'f', 'u', 'd', 't', 'u', 'Q', 'J', 'v', 'i', 'x', 'A', 'd', 'k', 'v', 'H', 'Z', 'B', 'u', 'o', 'k'], ['V', 'p', 'B', 'h', 'M', 'a', 'p', 'n', 'z', 'L', 's', 'g', 'c', 'G', 'T', 'X', 'a', 'X', 's', 'h', 'O', 'x', 'h', 's', 'x', 'N', ' ', 'O', 'w', 'F', 'v', 'M', 'W', 'u', 'c', 'Y', 'x', 'x', 'H', 'P', 'T', 'h', 's', 'W', 'w', 'l'], ['B', 'f', 'k', 'U', 'j', 'b', 'X', 'J', 'z', 'y', 'w', 'B', 'n', 'f', 'x', 'N', 'Y', 'l', 'Q', 'h', 't', 'v', 'U', 'y', 'I', 'G', 'q', 'T', 'a', 'i', 'N', 'p', 'e', 'Z', 'Y', 'Q', 'B', 'G', 'e', 'N', 'V', 's', 'E', 'U', 'B', 'h'], ['q', 'Y', 'r', 'w', 't', 'G', 'G', 'M', 'F', ' ', 'e', 'u', 'E', 'g', 's', 'D', 'c', 'h', 'L', 'G', 'x', 'u', 'V', 'j', 'u', 'U', 'i', 'm', 'Y', 'J', 'L', 'P', 'h', 'X', 'p', 'P', 'F', 'f', 'O', 'u', 'U', 'H', 'Y', 'I', 'A', 'X'], ['v', ' ', 'W', 'A', 'e', 't', 'Y', 't', 'I', 's', 'w', 'M', ' ', 'E', 'R', 'K', 'x', 'i', 'O', 'w', 'h', 'e', 'f', 'N', 'i', 'N', 'v', 'q', 'F', 'u', 'A', 'c', 'e', 's', 'p', 'N', 'j', 'G', 'q', 'W', 'q', 'U', 'J', 'b', 'V', 'i'], ['p', 'Y', 'p', 'f', 'I', 'N', 'S', 'C', 'J', 'p', 'O', 'O', 's', 'V', 's', 'Z', 'y', 's', 'l', 'o', 'b', 'e', 'L', 'J', 'm', 'W', 'g', 'P', 'x', 'l', 'W', 'N', 'a', 'T', 'm', 'D', 'p', 'p', 'l', 'P', 'E', 'V', 'c', 'O', 'T', 'Z'], ['x', ' ', 'v', 'X', 'T', 's', 'i', 'A', 'J', 'q', 'H', 'P', 'x', 'q', 'Y', 'n', 's', 'i', 'W', 'z', 'Y', 'q', 'a', 'Z', 't', 'M', 's', 'A', 'q', 'e', 'W', 'V', 'g', 'y', 'x', 'n', 'E', 'p', 'x', 't', 'q', 'R', 'T', 'm', 'h', 'm'], ['M', 'u', 'D', 'R', 'R', 'h', 'B', 'f', ' ', 'H', 'b', 'l', 'q', 'X', 'f', 'b', 'r', 'e', 'v', 'D', 'm', 'T', 'v', 'l', 'g', 'l', 'z', 'y', 'A', 'O', 'i', 'G', 'Q', 'l', 'K', 'G', 'H', 'G', 'S', 'b', 'a', 'b', 'k', 'p', 'g', 'R'], ['G', 'Q', 'P', 'e', 'P', 'r', 'K', 'U', 'l', 'g', 'X', 'q', 'I', 'O', 'U', 'y', 'V', 'Z', 't', 'Q', 'g', 'd', 'T', 'J', 'q', 'w', 'z', 'L', 'V', 'p', 'e', 'X', 'x', 'D', 'k', 'R', 'P', 'U', 'L', 'z', 'a', 'L', 'L', 'Q', 'z', 'D'], ['j', 'W', 'Q', 'E', 'P', 'V', 'f', 'p', 'w', 'n', 'U', 'j', 'Z', 'P', 'f', 'v', 'R', 'r', 'h', 'z', 'r', 'l', 'T', 'P', 'U', 'f', 'v', 'A', 'B', 'k', 'b', 'n', 'o', 'h', 'j', 'K', 'h', 'r', 'f', 'q', 'x', 'E', 'U', 'g', 'd', 'C'], ['C', 'v', 'D', ' ', 'K', 'd', 'd', 'D', 'd', 'f', 'U', 'F', 'l', 'x', 'E', 'D', 'Q', 'L', 'W', 'X', 'E', 'E', 'X', 'T', 'M', 'C', 'e', 'B', 'x', 'o', 'C', 'f', 'd', 'o', 'F', 'T', 'J', 'F', 'G', 'l', 'k', 'x', 'u', 'q', 'N', 't'], ['l', 'd', 'P', 'k', 'N', 'w', 't', 'C', 'u', 'n', 'f', 'Z', 'B', 'A', 'Z', 'z', 'z', 'v', 'Z', 's', 'n', 'f', 'Y', 'c', 's', 'j', 'e', 'M', 'E', 'i', 'N', 'Y', 'D', 'k', 'k', 'n', 'j', 'X', 'q', 'T', 'T', 'G', 'S', 'd', 't', 'd'], ['c', 'c', 'G', 'd', 'y', 'T', 'E', 'w', 'k', 'R', 'd', 'N', 'S', 'M', 'L', 'p', 'H', 'F', 'C', 'L', 'n', 'l', 'C', 'M', 'L', 'u', 'k', ' ', 'X', 'E', 'L', 'J', 'L', 'G', 'l', 'H', 'l', 'r', 'p', 'v', 'D', 'T', 'r', 'L', 'v', 'e'], ['t', 'A', 's', 'J', 'M', 'b', 'P', 'a', 'p', 'G', 'q', 'p', 'i', 'd', 'b', 'C', 'S', 'w', 'c', 'N', 'm', 'A', 'C', 'm', 'f', 'T', 'P', 'z', 'U', 'L', 'o', 'V', 'N', 'M', 'G', 'h', 'V', 'h', 'U', 'S', 'I', 'N', 'f', 'H', 'L', 'f'], ['q', 'V', 'Z', 'j', 's', 'c', 'T', 'n', 'U', 'l', 'E', 'V', 'c', 's', 'J', 'n', 'q', 'b', 'c', 'h', 'e', 'x', 'H', 'G', 'k', 'U', 'P', 'U', 'T', 'W', 'n', 't', 'p', 'i', 'b', 'u', 'b', 'H', 's', 'D', 'L', 'Y', 'Z', 'u', 'P', 'w'], ['s', 'F', 'O', 't', 'J', 'e', 'f', 'P', 'l', 'l', 'v', 'G', 'B', 'J', 'i', 'b', 'i', 'r', 'P', 'x', 'a', 'i', 'X', 'T', 'G', 'G', 'a', 'k', 'd', 'Z', 'L', 'Y', 'U', 'r', 'b', 'p', 't', 'k', 'L', 't', 'x', 'T', 'k', 'v', 'a', 'k'], ['c', 's', 'B', 'Z', 'd', 'h', 'd', 'P', 'w', 'D', 'a', 'c', 'G', 'M', 'T', 'u', 'U', 'O', 'T', 'w', 'a', 'o', 'x', 'V', 'J', 'g', 'N', 'w', 'w', 'f', 'g', 'u', 'j', 'p', 'G', 'T', 'w', 'X', 'J', 'p', 'M', 'y', 'o', 'G', 'm', 'w'], ['w', 'j', 'K', 'u', 'K', 'd', 'N', 'I', 'w', 'E', ' ', 'K', 'K', 'c', 'x', 'U', 'A', 'A', 'v', 'F', 'z', 'a', 'z', 'C', 'V', 'W', 'A', 'o', 'm', 'Z', 'i', 'U', 'F', 'e', 'p', 'w', 'O', 'A', 'T', 'u', 'a', 'P', 'l', 'y', 'w', 'J'], ['b', 'M', 'e', 'h', 'S', 'Q', 'c', 'G', 'D', 'A', 'I', 'H', 'g', 'f', 'E', 'j', 'x', 'u', 'P', 'p', 'p', 'd', 'V', 'F', 'D', 'L', 'L', 'g', 'H', 'h', 'n', 'Q', 'K', 'L', 'g', 'K', 'y', 'Y', 'u', 'A', 'g', 'W', 't', 'J', 'X', 'F'], ['k', 'J', 'l', 'X', 'J', 'm', 'e', 'Y', 'd', 'Z', 'L', 'W', 'r', 'W', 'T', 'J', 'G', 'f', ' ', 's', 'j', 'j', 'P', 'h', 'k', 'x', 'k', 'k', 'B', 'N', 'j', 'h', 's', 'o', 'b', 'm', 'u', 'O', 'i', 'D', 'c', 'B', 'a', 'h', 'B', 'Y'], ['L', 'l', 'R', 'Z', 'f', 'j', 'G', 'E', 'j', 'g', 'X', 'S', 'P', 'H', 'T', 'a', 'c', 'Y', 'b', 'r', 'N', 'N', 'R', 'n', 'd', 'j', 'H', 'M', 'X', 'A', 'V', 'G', 'c', 'r', 'l', 'v', 'F', 'e', 'z', 'k', 'z', 'Q', 'r', 'F', 'L', 'H'], ['U', 'o', 'Y', 'O', 'n', 'J', 'c', 'i', 'j', 'a', 'j', 'H', 'O', 'u', 'S', 'm', 'K', 'y', 'i', 'T', 'v', 'j', ' ', 'v', 'H', 'f', 'r', 'q', 'F', 'a', 'l', 'u', 'F', 'E', 'p', 'b', 'V', ' ', 'm', 'O', 'M', 'E', 'f', 'Q', 't', 'T'], [' ', 'B', 'H', 'i', 'H', 'c', 'T', ' ', 'K', 'u', 'd', 'C', 'F', 'F', 'S', 'v', 'Z', 'A', 'b', 't', 'r', 'G', 'I', 'F', 'p', 'L', 'G', 'N', 'h', 'y', 'm', 'b', 'z', 'V', 'G', 'D', 'p', 'K', 'p', 'C', 'X', 'y', 'w', 'c', 'z', 'K'], ['P', 'q', 'o', 'M', 'T', 'U', 'o', 'r', 'A', 'h', 'S', 'q', 'T', 's', 'V', 'u', 'c', 'N', 'v', 'E', 'r', 'X', 'k', 'v', 'M', 'p', 'Q', 'd', 'Y', 'Q', 'J', 'c', 'L', 'M', 'r', 'Z', 'D', 'k', 'V', 'u', 'G', ' ', 'Y', 'O', 'i', 'x'], ['V', 'x', 'o', 'G', 'T', 'g', 'G', 'N', 'A', 'q', 'p', 'l', 'K', 't', 'j', 'n', 'C', 'U', 'c', 'b', 'q', 'q', 'c', 'C', 'w', 'x', 'B', 'C', 't', 'V', 'z', 'y', 'y', 'o', 'U', 'E', 'O', 'X', 'j', 'V', 'r', 'y', 't', 'n', 'R', 'H'], ['Z', 'O', 'w', 'z', 'v', 'K', 'U', 'c', 'N', 'M', 'h', 'W', 'Y', 'Z', 'g', 'k', 'h', 'o', 'K', 'B', 'K', 'q', 'u', 'P', 'z', 'v', 'j', 'u', 'z', 'P', 'B', 'y', 'p', 'Y', 'U', 'W', 'Z', 'I', 'c', 'm', 'W', 'J', 'c', 'l', ' ', 'O'], ['Q', 'A', 'B', 'Z', 'C', 'D', 'N', 'i', 'W', 'E', 'W', 'V', 'Z', 'k', 'A', 'D', 'z', 'Z', 'I', 't', 'Y', 'K', 'u', 'T', 'u', 'q', 'p', 'V', 'P', 'y', 'o', 'e', 'Y', 'x', 'd', 'L', 'P', 'L', 'p', 'Z', 'E', 'N', 'r', 'c', 'K', 'Z']],31,),
([['1', '1', '1', '1', '2', '2', '3', '3', '3', '4', '4', '5', '5', '6', '7', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '1', '1', '2', '3', '4', '5', '6', '6', '6', '6', '6', '6', '6', '7', '7', '8', '8', '9'], ['0', '0', '0', '0', '0', '1', '1', '2', '2', '2', '3', '3', '4', '5', '5', '5', '5', '6', '7', '7', '8'], ['0', '1', '1', '2', '2', '2', '2', '2', '3', '3', '4', '4', '5', '5', '6', '6', '7', '7', '7', '9', '9'], ['0', '0', '1', '1', '2', '2', '2', '3', '3', '3', '4', '4', '4', '4', '4', '6', '7', '7', '8', '8', '9'], ['0', '0', '0', '0', '1', '1', '2', '3', '3', '3', '3', '4', '4', '4', '5', '7', '8', '8', '8', '9', '9'], ['0', '0', '0', '0', '0', '0', '1', '1', '2', '3', '4', '5', '5', '6', '6', '7', '7', '8', '8', '9', '9'], ['0', '2', '2', '2', '4', '4', '4', '4', '4', '5', '5', '5', '6', '6', '7', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '2', '3', '3', '3', '4', '4', '5', '5', '5', '7', '7', '7', '8', '8', '8', '9', '9', '9'], ['0', '0', '1', '2', '2', '3', '4', '4', '4', '4', '4', '5', '6', '6', '6', '7', '8', '8', '9', '9', '9'], ['0', '0', '1', '1', '1', '1', '1', '2', '2', '2', '2', '3', '4', '4', '5', '5', '6', '6', '8', '8', '9'], ['0', '0', '1', '2', '2', '2', '3', '3', '5', '5', '5', '6', '7', '7', '7', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '1', '1', '3', '5', '5', '5', '5', '6', '6', '6', '6', '6', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '2', '2', '2', '2', '2', '2', '3', '3', '5', '5', '5', '6', '7', '8', '8', '9', '9', '9'], ['0', '0', '0', '0', '2', '3', '5', '5', '5', '5', '5', '6', '6', '6', '7', '7', '7', '7', '7', '8', '9'], ['0', '0', '1', '2', '2', '3', '3', '3', '4', '4', '4', '5', '5', '5', '6', '6', '6', '7', '7', '8', '9'], ['0', '0', '0', '0', '1', '1', '3', '3', '3', '4', '4', '5', '5', '6', '7', '8', '8', '8', '9', '9', '9'], ['0', '0', '1', '1', '1', '1', '1', '2', '2', '3', '5', '5', '6', '6', '6', '7', '7', '7', '7', '8', '8'], ['0', '1', '1', '1', '1', '2', '2', '4', '4', '4', '4', '4', '5', '5', '6', '7', '7', '8', '8', '9', '9'], ['1', '1', '2', '2', '3', '3', '4', '5', '5', '5', '5', '6', '6', '7', '7', '7', '8', '8', '8', '9', '9'], ['0', '0', '0', '0', '2', '2', '2', '3', '3', '4', '5', '5', '5', '5', '5', '5', '6', '7', '7', '7', '9']],11,),
([['0', '1', '0', '1', '1', '1', '0', '1', '1', '0', '1', '0', '0', '0', '1', '1', '1', '1', '0', '0', '0', '1', '1', '1', '0', '1', '1', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '0', '1'], ['1', '0', '0', '0', '1', '0', '1', '1', '0', '0', '0', '0', '1', '0', '0', '0', '1', '1', '0', '0', '0', '1', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '0', '1', '0'], ['0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '1', '1', '0', '1', '0', '0', '1', '0', '1', '0', '1', '1', '1', '0', '1', '0', '0', '0', '1', '0', '0', '1', '1', '1', '0', '0'], ['0', '1', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '1', '1', '0'], ['1', '1', '1', '1', '1', '0', '0', '0', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '1', '0', '0', '1', '0', '0', '1', '1'], ['1', '0', '1', '0', '0', '1', '1', '1', '1', '0', '1', '1', '0', '0', '0', '0', '1', '0', '0', '1', '0', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '1', '1', '0', '1', '0', '1', '0'], ['0', '0', '0', '0', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '1', '0', '1', '1', '0', '1', '0', '0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '1', '0', '0', '0', '1', '1'], ['1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '0', '0', '0', '1', '0', '0'], ['0', '0', '1', '1', '1', '1', '0', '1', '0', '1', '1', '1', '1', '0', '1', '1', '0', '0', '0', '0', '0', '0', '0', '1', '0', '1', '1', '0', '1', '0', '0', '0', '1', '1', '0', '1', '1', '1', '1'], ['1', '0', '0', '1', '1', '0', '1', '1', '0', '0', '0', '1', '1', '0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '0', '1', '1', '0', '0', '1', '0', '0', '0', '1', '1', '0', '1', '0', '0'], ['0', '0', '0', '1', '0', '0', '1', '1', '0', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '1', '0', '1', '0', '0', '1', '0', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '0', '1'], ['1', '0', '0', '1', '0', '0', '0', '0', '1', '1', '1', '0', '1', '1', '1', '0', '0', '0', '0', '0', '0', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '0', '1', '0', '1', '1', '0', '1', '0'], ['0', '0', '1', '0', '0', '0', '1', '1', '1', '1', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '1', '0', '0'], ['0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '1', '1', '1', '0', '0', '0', '0', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '0', '0', '0'], ['0', '0', '0', '0', '1', '1', '0', '0', '1', '1', '0', '0', '0', '1', '1', '0', '1', '0', '0', '0', '0', '1', '0', '0', '1', '1', '1', '0', '0', '1', '1', '1', '1', '0', '1', '0', '1', '1', '1'], ['1', '1', '0', '1', '1', '0', '0', '0', '0', '0', '0', '1', '0', '1', '0', '0', '0', '1', '1', '0', '1', '1', '0', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '0', '1', '0', '1'], ['0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '0', '1', '0', '0', '1', '1', '1', '1', '1', '0'], ['1', '1', '0', '1', '1', '1', '0', '0', '1', '1', '0', '0', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '0', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '0', '1', '0', '1', '1'], ['0', '0', '1', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '1', '1', '0', '1', '1', '1', '0', '0'], ['1', '1', '0', '1', '0', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '0', '1', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '1', '0', '0', '0', '1', '0', '0', '0', '1', '1', '1'], ['1', '0', '0', '1', '1', '1', '0', '0', '1', '1', '1', '1', '1', '0', '0', '0', '0', '0', '1', '0', '1', '0', '0', '0', '0', '1', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '0', '0', '0'], ['0', '0', '1', '1', '0', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '1', '0', '1', '1', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0'], ['0', '1', '0', '1', '1', '1', '1', '1', '0', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '0', '1', '0', '1', '0', '0', '1', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '1', '1', '1', '1', '0', '1', '1', '0', '0', '1', '0', '0', '1', '1', '0', '1', '0', '0', '0', '1', '1', '0', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '1', '1', '0', '0', '1'], ['0', '0', '0', '1', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0', '0', '1'], ['1', '0', '0', '1', '0', '1', '0', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '0', '1', '1', '0', '1', '1', '1', '0', '0', '1', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '0'], ['0', '0', '1', '0', '1', '1', '0', '0', '1', '1', '1', '0', '0', '1', '1', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '1', '1', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '0', '0'], ['1', '1', '0', '0', '1', '1', '1', '0', '0', '1', '0', '1', '1', '1', '0', '0', '0', '0', '0', '1', '0', '1', '0', '1', '1', '0', '1', '1', '1', '0', '0', '1', '0', '0', '1', '0', '1', '1', '1'], ['0', '1', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1', '1', '0', '1', '0', '1'], ['1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '1', '0', '0', '0', '0', '1', '1', '0', '0', '0', '0', '0', '0', '1', '1', '0', '1', '0', '1', '1', '1', '0', '0', '0', '0', '1', '1', '1', '0'], ['1', '0', '1', '0', '1', '0', '1', '0', '0', '1', '1', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '1', '0', '0', '0', '1', '1', '0', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1'], ['1', '1', '0', '0', '1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '1', '1', '0', '0', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '0', '0'], ['1', '0', '0', '1', '1', '0', '1', '1', '0', '0', '0', '0', '0', '1', '0', '0', '1', '1', '1', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '0', '0', '1', '1', '0', '1', '1', '1', '0', '1'], ['0', '1', '0', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '1'], ['1', '0', '1', '1', '1', '1', '1', '1', '0', '0', '1', '0', '1', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '1', '1', '0', '0', '1', '0'], ['0', '1', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '0', '0', '1', '0', '0', '0', '0', '1', '0', '0', '0', '0', '0', '1', '1', '1', '0', '1', '1', '0', '1', '1', '1', '1', '1'], ['1', '1', '1', '0', '1', '1', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '0', '1', '0', '0', '1', '1', '1', '0', '1', '1', '0', '1', '1', '1', '0', '1', '1', '0', '0', '0', '1'], ['0', '1', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '0', '0', '1', '1', '0', '1', '0', '0', '1', '1', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '0', '1', '0', '1', '1', '0'], ['1', '1', '0', '1', '1', '0', '0', '1', '1', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '0', '1', '0', '0', '0', '1', '0', '0', '1', '1', '1']],20,),
([['B', 'D', 'D', 'E', 'H', 'H', 'J', 'M', 'M', 'M', 'M', 'N', 'O', 'O', 'P', 'R', 'S', 'T', 'U', 'U', 'W', 'W', 'Z', 'Z', 'b', 'c', 'c', 'e', 'f', 'g', 'j', 'k', 'k', 'n', 'o', 'r', 't', 'u', 'v'], [' ', 'A', 'A', 'A', 'C', 'C', 'D', 'D', 'E', 'F', 'H', 'J', 'J', 'K', 'L', 'L', 'N', 'T', 'T', 'U', 'W', 'Y', 'Z', 'c', 'f', 'g', 'i', 'i', 'k', 'k', 'm', 'n', 'o', 'p', 'r', 'r', 'u', 'v', 'x'], [' ', 'A', 'A', 'C', 'D', 'E', 'G', 'H', 'K', 'K', 'L', 'Q', 'S', 'U', 'V', 'Z', 'a', 'd', 'e', 'g', 'i', 'i', 'j', 'n', 'o', 'o', 'p', 'p', 'q', 's', 's', 't', 't', 'w', 'x', 'x', 'x', 'y', 'z'], [' ', 'B', 'D', 'E', 'G', 'H', 'H', 'H', 'H', 'K', 'M', 'O', 'O', 'R', 'R', 'S', 'S', 'U', 'V', 'X', 'a', 'a', 'd', 'e', 'e', 'f', 'h', 'i', 'j', 'p', 'p', 'q', 'q', 'q', 's', 'w', 'w', 'y', 'z'], [' ', 'A', 'A', 'C', 'E', 'F', 'G', 'H', 'J', 'J', 'K', 'M', 'O', 'S', 'S', 'U', 'X', 'Y', 'Z', 'b', 'd', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'n', 'q', 'q', 's', 's', 't', 'u', 'u', 'v', 'y', 'z'], ['H', 'H', 'H', 'H', 'J', 'J', 'K', 'M', 'N', 'S', 'U', 'U', 'V', 'V', 'V', 'W', 'Y', 'a', 'b', 'c', 'c', 'e', 'f', 'f', 'f', 'h', 'k', 'l', 'm', 'q', 'q', 's', 't', 'v', 'v', 'w', 'w', 'y', 'z'], ['A', 'B', 'D', 'G', 'H', 'I', 'J', 'J', 'L', 'M', 'N', 'P', 'Q', 'S', 'T', 'T', 'X', 'X', 'X', 'Y', 'Z', 'a', 'c', 'd', 'd', 'd', 'i', 'k', 'l', 'm', 'n', 'p', 'q', 'q', 't', 'w', 'x', 'y', 'y'], [' ', 'B', 'B', 'C', 'E', 'F', 'G', 'H', 'I', 'I', 'I', 'J', 'J', 'K', 'M', 'N', 'O', 'O', 'P', 'Q', 'S', 'T', 'W', 'Y', 'Y', 'a', 'c', 'd', 'h', 'h', 'i', 'j', 'k', 'o', 'o', 's', 'z', 'z', 'z'], [' ', 'A', 'C', 'C', 'D', 'E', 'E', 'E', 'F', 'H', 'H', 'M', 'M', 'N', 'N', 'R', 'T', 'W', 'Z', 'Z', 'd', 'e', 'h', 'h', 'j', 'j', 'k', 'm', 'n', 'o', 'p', 'r', 's', 's', 't', 'w', 'x', 'x', 'x'], ['A', 'D', 'I', 'M', 'P', 'Q', 'U', 'U', 'Y', 'Y', 'Z', 'Z', 'Z', 'a', 'b', 'c', 'e', 'f', 'f', 'f', 'g', 'g', 'h', 'h', 'i', 'i', 'j', 'm', 'n', 'o', 'p', 'q', 'r', 'u', 'u', 'u', 'w', 'x', 'z'], [' ', 'A', 'A', 'A', 'B', 'C', 'E', 'F', 'G', 'H', 'J', 'Q', 'R', 'S', 'U', 'U', 'V', 'W', 'Y', 'Z', 'a', 'b', 'b', 'd', 'g', 'j', 'k', 'l', 'l', 'm', 'n', 'n', 'o', 's', 's', 'u', 'w', 'w', 'w'], [' ', 'A', 'B', 'C', 'E', 'E', 'E', 'H', 'J', 'J', 'K', 'M', 'N', 'P', 'R', 'U', 'U', 'V', 'W', 'a', 'e', 'f', 'k', 'k', 'k', 'l', 'l', 'm', 'n', 'n', 'o', 'o', 'o', 'q', 'r', 'r', 't', 'u', 'x'], [' ', 'B', 'B', 'E', 'F', 'F', 'H', 'O', 'O', 'P', 'P', 'Q', 'R', 'S', 'T', 'X', 'a', 'a', 'a', 'b', 'e', 'f', 'g', 'i', 'j', 'm', 'n', 'p', 'r', 't', 't', 't', 'u', 'v', 'v', 'w', 'x', 'x', 'z'], [' ', 'A', 'B', 'C', 'D', 'E', 'E', 'G', 'H', 'J', 'J', 'J', 'K', 'K', 'M', 'P', 'Q', 'R', 'R', 'W', 'X', 'X', 'Z', 'a', 'a', 'e', 'h', 'i', 'j', 'k', 'q', 'q', 'r', 'r', 's', 'u', 'x', 'x', 'y'], [' ', 'B', 'I', 'I', 'J', 'J', 'K', 'N', 'O', 'P', 'P', 'R', 'U', 'X', 'Z', 'Z', 'Z', 'b', 'd', 'f', 'f', 'h', 'h', 'h', 'j', 'k', 'k', 'n', 'n', 'o', 'o', 'p', 'q', 's', 't', 'v', 'w', 'x', 'z'], [' ', ' ', 'B', 'E', 'K', 'L', 'M', 'N', 'Q', 'Q', 'R', 'S', 'T', 'U', 'V', 'V', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'e', 'e', 'g', 'i', 'i', 'm', 'n', 'o', 'p', 's', 'u', 'u', 'v', 'w', 'x', 'z'], ['E', 'E', 'E', 'E', 'J', 'K', 'K', 'M', 'N', 'P', 'Q', 'S', 'S', 'V', 'W', 'W', 'W', 'X', 'Y', 'c', 'c', 'd', 'e', 'f', 'h', 'n', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'x', 'x', 'y', 'z', 'z'], [' ', ' ', ' ', 'E', 'E', 'F', 'F', 'G', 'G', 'H', 'J', 'L', 'O', 'Q', 'R', 'R', 'T', 'V', 'W', 'Y', 'Y', 'Z', 'Z', 'c', 'f', 'g', 'h', 'h', 'j', 'l', 'q', 'q', 'q', 't', 'v', 'x', 'x', 'y', 'y'], ['B', 'D', 'G', 'G', 'H', 'J', 'J', 'K', 'M', 'Q', 'S', 'S', 'T', 'T', 'T', 'U', 'V', 'Z', 'Z', 'a', 'b', 'd', 'e', 'g', 'g', 'h', 'h', 'l', 'l', 'n', 'o', 's', 'u', 'u', 'v', 'v', 'w', 'x', 'y'], [' ', ' ', 'B', 'B', 'B', 'C', 'D', 'D', 'E', 'I', 'L', 'M', 'O', 'O', 'P', 'P', 'Q', 'R', 'R', 'R', 'R', 'R', 'U', 'a', 'b', 'c', 'd', 'e', 'g', 'k', 'l', 'l', 'n', 'n', 'n', 'p', 'p', 'r', 'r'], [' ', ' ', 'B', 'E', 'E', 'F', 'G', 'L', 'M', 'N', 'N', 'O', 'P', 'R', 'R', 'S', 'S', 'S', 'T', 'T', 'Y', 'Y', 'Z', 'a', 'a', 'b', 'd', 'e', 'f', 'j', 'j', 'k', 'l', 'l', 'm', 'o', 'o', 'p', 'y'], ['A', 'B', 'E', 'E', 'H', 'H', 'I', 'J', 'J', 'N', 'O', 'P', 'Q', 'R', 'V', 'V', 'W', 'W', 'X', 'X', 'Y', 'Z', 'Z', 'g', 'i', 'j', 'j', 'm', 'n', 'o', 'q', 'r', 'r', 's', 's', 's', 's', 't', 'x'], [' ', 'G', 'J', 'L', 'M', 'M', 'Q', 'Q', 'Q', 'S', 'U', 'W', 'W', 'Y', 'Z', 'Z', 'a', 'b', 'f', 'h', 'i', 'i', 'l', 'l', 'm', 'n', 'o', 'p', 'p', 'p', 'q', 'q', 'q', 's', 's', 't', 'u', 'v', 'w'], ['B', 'B', 'D', 'E', 'E', 'H', 'I', 'J', 'K', 'K', 'L', 'S', 'T', 'V', 'X', 'b', 'b', 'b', 'd', 'd', 'g', 'h', 'h', 'h', 'i', 'i', 'k', 'l', 'm', 'm', 'n', 'o', 'v', 'w', 'x', 'x', 'x', 'z', 'z'], ['B', 'C', 'C', 'C', 'D', 'D', 'E', 'F', 'J', 'K', 'M', 'N', 'O', 'O', 'Q', 'Q', 'R', 'R', 'R', 'S', 'T', 'U', 'V', 'W', 'W', 'a', 'b', 'f', 'g', 'i', 'm', 'n', 'n', 'n', 'p', 'p', 'p', 'u', 'v'], [' ', 'B', 'D', 'F', 'F', 'H', 'J', 'J', 'M', 'M', 'N', 'T', 'U', 'c', 'd', 'e', 'e', 'j', 'j', 'j', 'l', 'l', 'm', 'm', 'n', 'n', 'o', 'p', 'p', 'p', 's', 't', 't', 'v', 'v', 'w', 'y', 'y', 'y'], [' ', 'A', 'A', 'B', 'D', 'G', 'H', 'H', 'H', 'I', 'K', 'N', 'O', 'P', 'R', 'S', 'T', 'Y', 'Y', 'a', 'b', 'c', 'e', 'f', 'g', 'h', 'j', 'j', 'j', 'm', 'n', 'o', 's', 's', 'u', 'u', 'x', 'x', 'z'], [' ', ' ', 'F', 'G', 'G', 'J', 'N', 'N', 'P', 'S', 'S', 'S', 'T', 'T', 'X', 'Z', 'a', 'd', 'e', 'f', 'f', 'h', 'i', 'j', 'k', 'm', 'm', 'n', 'r', 's', 's', 't', 'v', 'w', 'x', 'x', 'x', 'z', 'z'], ['B', 'B', 'D', 'I', 'J', 'L', 'M', 'M', 'N', 'P', 'P', 'Q', 'S', 'U', 'X', 'X', 'X', 'Y', 'Z', 'a', 'b', 'e', 'e', 'f', 'g', 'i', 'j', 'l', 'm', 'o', 'q', 'r', 'r', 't', 'v', 'w', 'w', 'w', 'w'], [' ', 'A', 'B', 'C', 'D', 'D', 'E', 'F', 'F', 'H', 'I', 'J', 'J', 'M', 'N', 'N', 'O', 'S', 'U', 'V', 'W', 'W', 'e', 'g', 'h', 'h', 'i', 'j', 'j', 'o', 'p', 'q', 'q', 'r', 't', 'v', 'v', 'x', 'y'], [' ', 'A', 'A', 'C', 'C', 'D', 'D', 'D', 'E', 'G', 'I', 'J', 'O', 'Q', 'S', 'S', 'S', 'T', 'T', 'V', 'X', 'Y', 'Y', 'b', 'i', 'k', 'l', 'l', 'm', 'n', 'p', 't', 'v', 'w', 'w', 'x', 'x', 'y', 'z'], ['A', 'A', 'D', 'F', 'G', 'H', 'I', 'L', 'N', 'P', 'Q', 'S', 'T', 'U', 'V', 'W', 'W', 'X', 'Y', 'Z', 'b', 'c', 'f', 'g', 'g', 'g', 'j', 'j', 'j', 'l', 'q', 's', 's', 'v', 'v', 'w', 'x', 'y', 'z'], ['B', 'H', 'I', 'J', 'K', 'K', 'L', 'L', 'M', 'N', 'N', 'N', 'P', 'P', 'S', 'T', 'U', 'V', 'W', 'W', 'a', 'a', 'a', 'a', 'b', 'j', 'j', 'k', 'm', 'n', 'p', 'u', 'u', 'u', 'v', 'w', 'x', 'y', 'z'], ['B', 'B', 'D', 'D', 'D', 'E', 'G', 'H', 'I', 'I', 'I', 'L', 'N', 'N', 'O', 'P', 'R', 'R', 'R', 'S', 'V', 'V', 'Y', 'Z', 'a', 'b', 'h', 'k', 'l', 'm', 'n', 'o', 'p', 'p', 'q', 'r', 's', 'x', 'z'], ['A', 'B', 'B', 'G', 'G', 'H', 'J', 'J', 'L', 'M', 'M', 'N', 'N', 'P', 'P', 'P', 'R', 'S', 'T', 'X', 'Z', 'd', 'd', 'f', 'f', 'j', 'j', 'j', 'l', 'l', 'l', 'm', 'r', 'r', 'u', 'v', 'v', 'x', 'x'], [' ', 'B', 'B', 'C', 'E', 'G', 'J', 'J', 'K', 'L', 'N', 'O', 'Q', 'R', 'T', 'T', 'V', 'V', 'X', 'X', 'b', 'e', 'f', 'i', 'i', 'k', 'm', 'n', 'o', 'o', 'p', 's', 's', 'u', 'u', 'w', 'x', 'x', 'x'], ['A', 'A', 'A', 'B', 'B', 'E', 'H', 'H', 'H', 'I', 'J', 'J', 'N', 'Q', 'Q', 'R', 'R', 'U', 'V', 'X', 'a', 'b', 'd', 'd', 'e', 'e', 'g', 'g', 'k', 'k', 'l', 'n', 'n', 'p', 'q', 'q', 'v', 'w', 'x'], ['B', 'B', 'B', 'C', 'C', 'D', 'E', 'F', 'H', 'I', 'I', 'K', 'N', 'N', 'P', 'P', 'P', 'U', 'W', 'X', 'Z', 'c', 'e', 'h', 'h', 'i', 'j', 'l', 'p', 'p', 'r', 'r', 'r', 'r', 'v', 'w', 'x', 'x', 'y'], [' ', ' ', 'B', 'C', 'C', 'D', 'E', 'E', 'H', 'L', 'O', 'P', 'P', 'S', 'T', 'V', 'Y', 'Y', 'Y', 'c', 'd', 'e', 'e', 'f', 'h', 'h', 'h', 'j', 'k', 'l', 'm', 'n', 'r', 's', 's', 'u', 'x', 'y', 'y']],38,),
([['8', '0', '3', '3', '7', '7', '3', '5', '4', '9', '6', '9', '4', '6', '9'], ['8', '7', '2', '2', '6', '9', '6', '0', '0', '6', '8', '1', '6', '1', '5'], ['2', '0', '5', '1', '8', '0', '0', '2', '9', '4', '1', '4', '8', '0', '2'], ['9', '9', '9', '5', '1', '8', '9', '5', '8', '7', '2', '9', '4', '0', '4'], ['1', '6', '7', '1', '7', '4', '7', '4', '6', '4', '3', '8', '0', '4', '9'], ['2', '7', '9', '6', '1', '2', '2', '9', '0', '7', '2', '3', '2', '0', '9'], ['9', '5', '3', '3', '6', '1', '3', '1', '3', '4', '3', '4', '1', '5', '9'], ['1', '6', '5', '2', '6', '7', '1', '8', '6', '6', '2', '2', '6', '7', '6'], ['5', '3', '8', '0', '3', '6', '3', '2', '1', '2', '3', '8', '1', '0', '2'], ['2', '2', '6', '8', '0', '6', '5', '9', '9', '3', '9', '5', '8', '6', '4'], ['4', '1', '0', '3', '9', '1', '0', '8', '3', '4', '0', '9', '0', '6', '8'], ['1', '7', '9', '6', '6', '1', '7', '2', '5', '9', '5', '2', '1', '1', '8'], ['7', '7', '4', '5', '2', '6', '4', '3', '4', '9', '1', '4', '3', '7', '2'], ['1', '3', '0', '5', '9', '2', '2', '6', '2', '4', '0', '7', '2', '6', '1'], ['0', '4', '4', '2', '6', '9', '5', '4', '3', '2', '6', '5', '6', '4', '0']],8,),
([['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1']],6,),
([['u', 'V', 'l', 'L', 'o', 'i', 'o', 'L', 'S', 'D', 'S', 'u', 'Z', 'E', 's', 'q', 'P', 'X', 'd', 'v', 'W', 'J', 'p', 'r', 'e', 'j', 'F', 'l', 'Z', 'U', 'R', 'Y', 'M', 'C', 'S', 'C', 'Q', 'A'], ['w', 'p', 'O', 'x', 'a', 'v', 'Q', 'Z', 'n', 'Q', 'j', 't', 'N', ' ', 'n', 'u', 'y', 'x', 'E', 'r', 'd', 'e', 'g', 'e', 'H', 'Z', 'b', 's', 'A', 'R', 'x', 'h', 'v', 'X', 'x', 'K', 'P', 'M'], ['y', 'D', 'z', 't', 'g', 'L', 'B', 'N', 'i', 'g', 'E', 'l', 'P', 'q', 'j', 'm', 'c', 'X', 'b', 'X', 'Z', 'w', 's', 'Z', 'F', 'p', 'r', 'P', 'o', 'p', 'Y', 'R', 'w', 'n', 'y', 'n', 't', 'C'], ['b', 'v', 'G', 'K', 'J', 'u', 'w', 'q', 'x', 'b', 'O', 'Z', 'b', 'v', 'E', 'O', 'o', 'j', 'W', 'd', 'r', 'z', 'X', 'K', 'r', 'O', 'm', 'S', 'V', 'D', 'm', 'O', 'j', 'O', 'J', 'L', 'z', 'S'], ['Z', 'O', 'X', 'A', 'd', 'N', 'V', 't', 'f', 'z', 'q', 'H', 'O', 'Z', 'b', 'T', 'W', 'B', 'u', 'K', 'P', 'y', 'w', 'z', 'p', 'M', 'Z', 'P', 'l', 'y', 'J', 'G', 'i', 'C', 'r', 'y', 's', 'v'], ['k', 'R', 'i', 'z', 'A', 'l', 'J', 'X', 'C', 'i', 'P', 'A', 'y', 'y', 'a', 'E', 'V', 's', 'a', 'P', 'r', 'Y', 'D', 'n', 'o', 'w', 'M', ' ', 'W', 'm', 'W', 'H', 'a', 'v', 'j', 'g', 'Y', 'm'], ['M', 'y', 'N', 'A', 'R', 'u', 'e', 'N', 'H', 'a', 's', 'E', 'Q', 'b', 'd', 'E', 's', 'X', 'f', 'G', 'N', 'x', 'h', 'i', 'u', 'U', 'M', 'U', 's', 'u', 'N', 'f', 'u', 'o', 'C', 's', 'S', 'P'], ['h', 'C', 'v', 'L', 'H', 'h', 'Y', 'Y', 'F', 'S', 'd', 'Q', 'h', 'V', 'V', 'U', 'g', 'C', 's', 'X', 'E', 't', 'e', 'M', 'F', 'w', 'U', 'e', 'C', 'J', 'Y', 'R', 'o', 'a', 'W', 'L', 'k', 'K'], ['k', 'H', 'J', 'T', 's', 'F', 'y', 'C', 'O', 'J', 'O', 'B', 'm', 'B', 'e', 'G', 'l', 'g', 'y', 'J', 'y', 'u', 'F', 'E', 'B', ' ', 'B', 'Z', 'a', 'e', 'v', 'u', 'U', 'J', 'l', 'C', 'k', 'v'], ['d', 'y', 'V', 'Z', 't', 'X', 'n', 'v', 'O', 's', 'E', 'L', 'Z', 'x', 'x', 'p', 'w', 'W', 'S', 'n', 'G', 'y', 'q', 'o', 'B', 'X', 'f', 'r', 'n', 'T', 'y', 'p', 'J', 'j', 'I', 'w', 'r', 's'], ['h', 'y', 'p', 'j', 'r', 'D', 'j', 'H', 't', 'X', 'q', 'K', 'N', 'j', 'h', 'v', 'K', 'r', 'j', 'J', 'A', 'u', 'D', 'f', 'J', 'n', 'q', 'w', 'P', 'w', 'i', 's', 'G', 's', 't', 'D', 'r', 'A'], ['f', 'I', 'v', 'M', 'x', 'K', 'O', 'i', 'p', 'y', 'o', 'Z', 'Y', 's', 'V', 'f', 'i', 'V', 'x', 'K', 'p', 'a', 'L', 'V', 'r', 'B', 'v', 'd', 'M', 'e', 'X', 'h', 'F', 'S', 'p', 'Z', 'J', 'I'], ['H', 'V', 'a', 'a', 'i', 'k', 'D', 'e', 'Z', 'i', 'h', 'v', 'A', 'G', 'N', 'Q', 'r', 'e', 'A', 'q', 'n', 'a', 'z', 'N', 'b', 'y', 'R', 'z', 'c', 'I', 'A', 'h', 'z', 'o', 'F', 'w', 'p', 'h'], ['X', 'z', 'K', 'b', 'z', 'E', 'u', 'E', 'h', 'L', 'X', 'K', 'Q', 'r', 'f', 'Z', 'k', 'p', 'S', 'b', 'l', 'N', 'M', 'u', 'f', 'z', 'p', 'f', 'Q', 'U', 'q', 'g', 'F', 'K', 'D', 'Q', 'H', 'K'], ['S', 'U', 'o', 'u', 'z', 'G', 'q', 'w', 'N', 'B', 'c', 'u', 'k', 'n', 'v', 'S', 'O', 'Z', 'I', 'F', 'T', 'Z', 'D', 'g', 'w', 'K', 'G', 'C', 'B', 'M', 'e', 'W', 'r', 'v', 'l', 't', 't', 'u'], ['P', 'e', 'm', 'H', 'W', 'b', 's', 'C', 'j', 'U', 'E', 'a', 'J', 'o', 'G', ' ', 'H', 'T', 'f', 'j', 'N', 'N', 'E', 'u', 'W', 'O', 'X', 'e', 'm', 'w', ' ', 'f', 'U', 'Y', 'N', 'X', 'I', 'j'], [' ', 'v', 'q', 'O', 'd', 'p', 'd', 'Q', 'N', 'A', 'v', 'u', 'o', 'q', ' ', 'S', 'H', 'b', 'M', 'J', 'b', 'G', 'L', 'N', 'w', 'r', 'G', 'Q', 'E', 'R', 'y', 'a', 'k', 'S', 'W', 'I', 'P', 'd'], ['N', 'z', 'F', 'X', 'x', 'J', 'q', 'G', 'Z', 'Z', 'E', ' ', 'q', 'M', 'L', 'B', 'y', 'k', 'h', 'R', 'e', 'R', 'N', 'p', 'D', 'K', 'n', 'g', 'E', 'w', 'P', 'v', 'J', 'P', ' ', 'q', 'N', 's'], ['u', 'Q', 'F', 'j', 'r', 'I', 'X', 'C', 'E', 'R', 'R', 'E', 'D', 'p', 'n', 'a', 'X', 'Q', 'J', 'F', 'F', 'x', 's', 'P', 'o', 'a', 't', 'f', 'S', 'n', 'P', 'S', 'k', 's', 'j', 'M', 'L', 'l'], ['F', ' ', 'n', 'P', 'P', 'N', 'D', ' ', 'N', 'W', 'G', 'm', 'p', 'P', 'R', 'L', 'b', 'c', 'q', 'O', 'k', 'Y', 'p', 'I', 'b', 'P', 'Y', 'Y', 'F', 'c', 'p', 'W', 'e', 'R', 'k', 'j', 'V', 'h'], ['Q', 'J', 'g', 'D', 'S', 'U', 'm', 'z', 'M', 'n', 'a', 'V', 'q', 'P', 'X', 'w', 's', 'v', 'J', 'J', 'h', 'n', 'J', 'd', 'Z', 'M', 'v', 'M', 'h', 'Q', ' ', 'W', 'V', 's', 'O', 'A', 'x', 'j'], ['N', 'i', 'm', 'F', 'H', 'C', ' ', 'x', ' ', 't', 'g', 'q', 'j', 'd', 'n', 'g', 'l', 'U', 'k', 'U', 'q', 'h', 'A', 'c', 'u', 'o', 'U', 'z', 'D', 'N', 'p', 'R', 'K', 'k', 'T', 'i', 'D', 'i'], ['P', 'r', 'W', 'S', 's', 'U', 'k', 'l', 'e', 's', 'W', 'd', 'Y', 'q', 'p', 'Q', 'z', 'F', 'Z', 's', 'x', 'h', 'J', 'q', 'B', 'F', 'R', 'm', 'l', 'f', 'H', 'U', 'd', 'V', 'o', 'b', 't', 'B'], ['R', 'q', 'm', 'q', 'h', 'q', 'i', 'P', 'N', 'O', 'q', 'i', 'V', 'O', 'n', 'K', 'J', 'd', 'E', 'b', 'V', 'O', 'u', 'S', 'l', 'u', 'A', 'k', 'd', 'r', 'x', 'g', 'y', 'U', 'A', 'q', 'p', 'd'], ['r', 'h', 'h', 'L', 'j', 'd', 'b', 'o', 'v', 'D', 'd', 'M', 'f', 'y', 'Q', 'V', ' ', 'j', 'a', 'T', 'X', 'a', 't', 'I', 'Z', 'A', 'P', 'l', 'Y', 'j', 'c', 'A', 'A', 'e', 'r', 'H', 'u', 'f'], ['a', 'Y', 'J', 'J', 'k', 'L', 'x', 'l', 'O', 'n', 'J', 'I', 'l', 'x', 'V', 'S', 'S', 'l', 'D', 'E', 'm', 'd', ' ', 'j', 'Q', 'L', 't', 'c', 'o', 'D', 'z', 'A', 'x', 'u', 'F', 'E', 'v', 'a'], ['o', 'K', 'F', 'V', 'L', 'G', 't', 'A', 'd', 'b', 'P', 'F', 'K', 'N', 'J', 'e', 'B', 'T', 'H', 'n', 'D', 'b', 'm', 'T', 'L', 'S', 'n', 'D', 'b', 's', 'I', 't', 'O', 'a', 'm', 'a', 'A', 'n'], ['L', 'o', 'z', 'L', 'a', 'd', 'T', 'D', 'd', 'S', 'D', 'a', 'm', 'z', 'y', 'y', 'A', 'j', 'v', 'H', 'F', 't', 'A', 'f', 'G', 'E', ' ', 'x', ' ', 'm', 'L', 'I', 'O', 'Z', 'C', 'y', 'X', 'x'], [' ', 'I', 'i', 's', 'E', 'N', 'm', 'k', 'l', 'n', 's', 's', 'P', 'M', 'x', 'i', 'I', 'K', 'k', 'm', 'k', 'X', 'n', 'W', 'k', 'F', 'D', 'c', 'l', 'd', 'n', 'o', 'H', 'T', 'B', 'g', 'S', 'v'], ['g', 'p', 'd', 'A', 'Y', 'b', 'L', 'P', 'v', 'j', 'O', 'C', 's', 'g', 'J', 'm', 'P', 'd', 'H', 'c', 'h', 'U', 'P', 'J', 'h', 'c', 'f', 'W', 'l', 'K', 'F', 'T', 's', 'Z', 'n', 'v', ' ', 'p'], ['O', 'H', 'J', 'y', 'B', 'c', 'M', 'Q', 'F', 'k', 'S', 'o', 'b', 'M', 'c', 'i', 'K', 'l', 'a', 'Y', 'v', 'O', 'U', 'R', 'B', 'o', 'H', 'g', 'o', ' ', 'H', 'l', 'g', 'e', 'L', 'x', 'M', 'z'], ['q', 'u', 'A', 'O', 'u', 'f', 'r', 'U', 'F', 'g', 'f', 'g', 'R', 'E', 'W', 'H', 'n', 'e', 'N', 'Z', 'y', 'M', 'j', 'L', 'T', 'b', 'v', 'N', 'u', 'X', 'E', 'y', 'g', 'Y', ' ', 'n', 'T', 'r'], ['k', 'n', 'F', 'B', 'X', 't', 'j', 'a', 'b', 'I', 'C', 'O', 'R', 'h', 'c', 'C', 'F', 'E', 'l', 'Y', 's', 'D', 'p', 'j', 'J', ' ', 'y', 'u', 'x', 'q', ' ', 'P', 'J', 'P', 't', 'g', 'X', 'j'], ['M', 'u', 'Q', 'x', 'r', 'n', 'U', 'w', 'w', ' ', 'H', 'P', ' ', 'V', 'X', 'Y', 't', 'Z', 'F', 'H', 'X', 'N', 'y', 'E', 'j', 'I', 'Q', 'P', ' ', 'y', 'e', 'I', 'o', 'b', 'j', 'E', 'p', 'G'], ['n', 'd', 'T', 'f', 'a', 'D', 's', 'i', 'b', 'm', 'K', 'h', 'c', 'G', 'I', 'p', 'd', 'x', 'I', 'G', 'B', 'q', 'k', 'A', 'B', 'M', 'g', 'S', 't', 'K', 'b', 'm', 'm', 'u', 'k', ' ', 'U', 'Z'], ['C', 'v', 'L', 'k', 'x', 'L', ' ', 'm', 'x', 'P', 'C', 'X', 'n', 'w', 'd', 'E', 'O', 'D', 'Q', 'i', 'A', 'p', 'K', 'r', 'n', 'Y', 'T', 'v', 'K', 'O', 'M', 'w', 'p', 'P', 'R', 'X', 'I', 'g'], ['l', 'M', 'd', 'j', 'M', 'd', 'y', 'x', ' ', 'o', 'E', 't', 'X', 'w', 'c', 'H', 'r', 'q', 'd', 'Q', 'I', 'g', 'T', 'F', 't', 'q', 'A', 'e', 'm', 'y', 'G', 't', 'v', 'G', 'r', 'x', 'g', 'H'], ['T', 'f', 'N', 'W', 'K', 'T', 'b', 'O', 'J', 'B', 'a', 'd', 'l', 'y', 's', 's', 'W', 'D', 't', 'z', 'D', 'c', 'k', 'l', 'e', 'Q', 'A', 'J', 'J', 'k', 'M', 'G', 'F', 'S', 'C', 'N', 'x', 'X']],32,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param))) | 737.369863 | 10,683 | 0.204447 |
def f_gold ( keypad , n ) :
if ( not keypad or n <= 0 ) :
return 0
if ( n == 1 ) :
return 10
odd = [ 0 ] * 10
even = [ 0 ] * 10
i = 0
j = 0
useOdd = 0
totalCount = 0
for i in range ( 10 ) :
odd [ i ] = 1
for j in range ( 2 , n + 1 ) :
useOdd = 1 - useOdd
if ( useOdd == 1 ) :
even [ 0 ] = odd [ 0 ] + odd [ 8 ]
even [ 1 ] = odd [ 1 ] + odd [ 2 ] + odd [ 4 ]
even [ 2 ] = odd [ 2 ] + odd [ 1 ] + odd [ 3 ] + odd [ 5 ]
even [ 3 ] = odd [ 3 ] + odd [ 2 ] + odd [ 6 ]
even [ 4 ] = odd [ 4 ] + odd [ 1 ] + odd [ 5 ] + odd [ 7 ]
even [ 5 ] = odd [ 5 ] + odd [ 2 ] + odd [ 4 ] + odd [ 8 ] + odd [ 6 ]
even [ 6 ] = odd [ 6 ] + odd [ 3 ] + odd [ 5 ] + odd [ 9 ]
even [ 7 ] = odd [ 7 ] + odd [ 4 ] + odd [ 8 ]
even [ 8 ] = odd [ 8 ] + odd [ 0 ] + odd [ 5 ] + odd [ 7 ] + odd [ 9 ]
even [ 9 ] = odd [ 9 ] + odd [ 6 ] + odd [ 8 ]
else :
odd [ 0 ] = even [ 0 ] + even [ 8 ]
odd [ 1 ] = even [ 1 ] + even [ 2 ] + even [ 4 ]
odd [ 2 ] = even [ 2 ] + even [ 1 ] + even [ 3 ] + even [ 5 ]
odd [ 3 ] = even [ 3 ] + even [ 2 ] + even [ 6 ]
odd [ 4 ] = even [ 4 ] + even [ 1 ] + even [ 5 ] + even [ 7 ]
odd [ 5 ] = even [ 5 ] + even [ 2 ] + even [ 4 ] + even [ 8 ] + even [ 6 ]
odd [ 6 ] = even [ 6 ] + even [ 3 ] + even [ 5 ] + even [ 9 ]
odd [ 7 ] = even [ 7 ] + even [ 4 ] + even [ 8 ]
odd [ 8 ] = even [ 8 ] + even [ 0 ] + even [ 5 ] + even [ 7 ] + even [ 9 ]
odd [ 9 ] = even [ 9 ] + even [ 6 ] + even [ 8 ]
totalCount = 0
if ( useOdd == 1 ) :
for i in range ( 10 ) :
totalCount += even [ i ]
else :
for i in range ( 10 ) :
totalCount += odd [ i ]
return totalCount
if __name__ == '__main__':
param = [
([[' ', 'A', 'C', 'K', 'R', 'R', 'V', 'c', 'd', 'i', 'i', 'j', 'm', 'o', 'q', 'q', 'r', 'r', 'v', 'v', 'x', 'z'], ['B', 'D', 'I', 'M', 'N', 'Q', 'R', 'Z', 'c', 'f', 'i', 'j', 'j', 'l', 'l', 'n', 'p', 'q', 's', 't', 't', 'w'], ['A', 'F', 'F', 'G', 'H', 'J', 'K', 'K', 'N', 'V', 'V', 'b', 'c', 'c', 'g', 'i', 'j', 'l', 'l', 's', 't', 'y'], [' ', 'A', 'B', 'B', 'E', 'H', 'I', 'J', 'J', 'P', 'Q', 'T', 'U', 'V', 'Z', 'c', 'c', 'j', 'p', 'w', 'y', 'z'], [' ', ' ', 'A', 'C', 'F', 'G', 'H', 'M', 'N', 'R', 'R', 'V', 'c', 'i', 'j', 'o', 'p', 'p', 'q', 'r', 'w', 'y'], [' ', ' ', 'C', 'C', 'D', 'H', 'I', 'J', 'K', 'O', 'S', 'X', 'Y', 'f', 'h', 'h', 'o', 'p', 'p', 'u', 'u', 'w'], ['B', 'C', 'D', 'H', 'M', 'M', 'Q', 'Q', 'R', 'S', 'X', 'Z', 'e', 'e', 'e', 'j', 'k', 'l', 'm', 'o', 'v', 'w'], ['A', 'C', 'C', 'D', 'H', 'H', 'I', 'J', 'L', 'L', 'L', 'M', 'N', 'S', 'U', 'c', 'd', 'f', 'f', 's', 'u', 'y'], ['A', 'B', 'D', 'D', 'I', 'J', 'K', 'L', 'L', 'M', 'P', 'S', 'S', 'Y', 'b', 'e', 'h', 'j', 'm', 'o', 'q', 's'], [' ', 'B', 'E', 'H', 'H', 'J', 'M', 'P', 'S', 'T', 'U', 'V', 'Z', 'd', 'j', 'm', 'm', 'p', 'q', 'v', 'w', 'w'], ['B', 'E', 'F', 'G', 'H', 'M', 'M', 'M', 'N', 'O', 'Q', 'R', 'T', 'V', 'a', 'c', 'g', 'g', 'i', 's', 'x', 'y'], ['A', 'E', 'G', 'J', 'O', 'R', 'R', 'S', 'T', 'W', 'a', 'b', 'f', 'h', 'h', 'i', 'm', 'n', 's', 'u', 'v', 'y'], ['B', 'D', 'E', 'H', 'I', 'I', 'K', 'M', 'N', 'P', 'Q', 'S', 'a', 'e', 'i', 'j', 'm', 'o', 'p', 'r', 'x', 'z'], ['A', 'G', 'I', 'K', 'K', 'L', 'O', 'P', 'U', 'U', 'X', 'X', 'Z', 'a', 'c', 'f', 'g', 'i', 'l', 'o', 'o', 'v'], [' ', ' ', 'E', 'H', 'J', 'J', 'L', 'M', 'N', 'O', 'P', 'S', 'S', 'X', 'c', 'f', 'g', 'r', 'u', 'v', 'x', 'z'], ['C', 'E', 'F', 'F', 'H', 'H', 'I', 'K', 'M', 'M', 'U', 'Z', 'e', 'e', 'h', 'h', 'h', 'j', 'k', 'k', 'p', 'r'], [' ', ' ', ' ', 'C', 'G', 'I', 'J', 'O', 'O', 'P', 'T', 'V', 'Y', 'b', 'j', 'n', 'o', 'o', 's', 'u', 'w', 'x'], ['A', 'D', 'F', 'F', 'H', 'H', 'N', 'R', 'S', 'W', 'W', 'Y', 'Y', 'b', 'f', 'i', 'k', 'o', 'u', 'y', 'y', 'z'], [' ', 'C', 'G', 'I', 'I', 'L', 'P', 'S', 'X', 'Y', 'd', 'd', 'f', 'g', 'g', 'k', 'm', 'o', 'r', 'r', 'r', 'x'], ['F', 'I', 'J', 'N', 'P', 'P', 'Q', 'Q', 'R', 'X', 'Y', 'a', 'b', 'h', 'h', 'j', 'l', 'm', 'n', 'p', 'r', 'y'], [' ', 'C', 'D', 'E', 'F', 'L', 'Q', 'Q', 'V', 'c', 'g', 'h', 'k', 'k', 'l', 'l', 'n', 'o', 'p', 'r', 'u', 'x'], [' ', 'A', 'G', 'K', 'L', 'M', 'T', 'U', 'U', 'W', 'Z', 'a', 'f', 'i', 'k', 'k', 'n', 'n', 'p', 'q', 'v', 'z']],13,),
([['3', '5', '1', '5', '6', '7', '7', '3', '0', '4', '7', '6', '1', '4', '0', '6', '3', '4', '1', '3', '1', '2', '9', '8', '7', '8', '0', '2', '7', '6', '1', '0', '3', '8', '0', '5', '9', '3', '9', '9', '8', '6'], ['0', '3', '8', '5', '0', '2', '0', '6', '1', '8', '7', '2', '8', '6', '0', '3', '9', '4', '9', '5', '7', '4', '3', '7', '4', '3', '8', '6', '1', '5', '4', '8', '0', '8', '3', '2', '7', '7', '6', '9', '7', '9'], ['6', '7', '1', '1', '7', '2', '5', '3', '2', '8', '4', '7', '8', '6', '1', '5', '2', '1', '6', '5', '7', '6', '8', '6', '8', '8', '1', '6', '3', '1', '1', '7', '1', '6', '4', '9', '2', '8', '2', '6', '3', '4'], ['8', '7', '9', '2', '0', '6', '6', '6', '2', '3', '1', '4', '8', '2', '3', '5', '5', '9', '2', '8', '0', '3', '2', '7', '2', '0', '2', '7', '0', '6', '5', '8', '2', '9', '3', '9', '8', '1', '9', '7', '9', '7'], ['9', '8', '1', '5', '0', '9', '9', '7', '7', '8', '4', '1', '8', '0', '4', '6', '7', '0', '5', '8', '6', '5', '6', '5', '1', '4', '0', '4', '3', '4', '6', '7', '6', '7', '3', '5', '4', '5', '6', '7', '1', '1'], ['4', '4', '4', '9', '8', '8', '7', '5', '3', '1', '8', '4', '8', '1', '0', '4', '9', '8', '9', '5', '2', '7', '5', '3', '4', '8', '2', '4', '7', '5', '0', '3', '6', '2', '5', '6', '3', '1', '9', '4', '8', '9'], ['7', '2', '7', '6', '2', '8', '8', '8', '1', '1', '5', '4', '6', '5', '3', '0', '3', '7', '4', '0', '0', '2', '4', '1', '8', '0', '0', '7', '6', '4', '7', '1', '8', '8', '1', '8', '8', '2', '3', '1', '7', '2'], ['2', '7', '5', '8', '7', '6', '2', '9', '9', '0', '6', '1', '7', '8', '1', '3', '3', '1', '5', '7', '9', '8', '2', '0', '7', '6', '0', '0', '1', '1', '5', '8', '6', '7', '7', '9', '9', '0', '4', '4', '3', '4'], ['0', '9', '9', '0', '5', '4', '9', '9', '3', '0', '3', '1', '5', '9', '9', '5', '3', '0', '2', '3', '9', '9', '7', '8', '5', '4', '6', '4', '2', '8', '7', '0', '2', '3', '6', '5', '2', '6', '0', '6', '5', '7'], ['1', '1', '4', '1', '4', '2', '7', '1', '9', '7', '9', '9', '4', '4', '2', '7', '6', '8', '2', '6', '7', '3', '1', '8', '0', '5', '3', '0', '3', '9', '0', '4', '7', '9', '6', '8', '1', '7', '0', '3', '2', '4'], ['6', '3', '1', '3', '2', '9', '5', '5', '4', '7', '2', '4', '7', '6', '9', '2', '0', '1', '2', '1', '4', '3', '8', '4', '9', '8', '9', '7', '7', '6', '8', '2', '4', '5', '3', '0', '1', '3', '0', '1', '0', '9'], ['5', '9', '4', '2', '1', '5', '0', '2', '6', '6', '0', '8', '3', '0', '3', '3', '3', '0', '7', '8', '0', '7', '7', '4', '3', '0', '6', '9', '6', '2', '2', '2', '8', '3', '7', '2', '4', '0', '0', '4', '5', '2'], ['3', '1', '1', '6', '2', '9', '7', '0', '3', '2', '8', '0', '5', '2', '2', '9', '9', '2', '8', '3', '5', '7', '4', '2', '8', '7', '8', '0', '4', '9', '7', '8', '0', '3', '2', '2', '1', '5', '1', '4', '9', '1'], ['6', '4', '8', '2', '4', '2', '5', '4', '0', '1', '0', '9', '0', '3', '0', '6', '4', '8', '6', '7', '9', '3', '0', '1', '6', '9', '5', '7', '5', '2', '9', '4', '7', '0', '6', '4', '1', '4', '4', '1', '3', '5'], ['6', '7', '8', '2', '9', '5', '0', '2', '6', '5', '4', '9', '4', '7', '8', '4', '6', '7', '6', '5', '1', '3', '8', '1', '7', '5', '9', '3', '9', '4', '0', '6', '5', '6', '9', '8', '4', '6', '9', '9', '0', '2'], ['6', '9', '2', '4', '3', '7', '2', '5', '8', '6', '3', '6', '3', '6', '7', '2', '6', '8', '6', '4', '3', '9', '6', '2', '1', '3', '1', '8', '8', '9', '6', '2', '0', '2', '2', '9', '3', '6', '4', '4', '8', '7'], ['1', '4', '5', '5', '7', '2', '3', '8', '3', '6', '9', '3', '3', '4', '4', '2', '3', '7', '5', '5', '2', '8', '7', '2', '7', '6', '0', '5', '1', '4', '1', '5', '5', '0', '4', '8', '7', '8', '1', '4', '2', '6'], ['5', '6', '8', '0', '0', '6', '3', '8', '3', '8', '2', '0', '8', '5', '4', '4', '0', '0', '8', '5', '8', '9', '1', '3', '3', '1', '1', '2', '9', '9', '1', '2', '1', '3', '5', '8', '7', '9', '3', '1', '3', '5'], ['9', '6', '7', '4', '9', '0', '2', '8', '9', '4', '3', '6', '4', '1', '8', '3', '1', '8', '0', '4', '4', '2', '1', '2', '9', '8', '3', '6', '7', '3', '9', '5', '7', '9', '1', '4', '6', '1', '4', '5', '4', '0'], ['5', '7', '4', '0', '6', '7', '8', '3', '6', '5', '8', '1', '4', '9', '9', '2', '7', '7', '4', '2', '8', '0', '8', '3', '2', '7', '3', '5', '7', '4', '4', '1', '3', '5', '1', '9', '6', '1', '0', '9', '5', '4'], ['3', '4', '0', '0', '3', '2', '2', '2', '9', '7', '5', '5', '1', '8', '4', '7', '9', '0', '7', '4', '1', '9', '3', '7', '3', '9', '5', '0', '3', '6', '6', '8', '8', '4', '1', '8', '2', '3', '9', '5', '3', '3'], ['7', '0', '6', '2', '5', '2', '1', '8', '1', '4', '4', '8', '9', '0', '3', '0', '3', '1', '9', '0', '8', '0', '1', '0', '3', '7', '6', '6', '3', '9', '4', '3', '4', '4', '1', '4', '7', '2', '9', '5', '8', '3'], ['7', '5', '7', '9', '8', '8', '3', '4', '3', '2', '5', '2', '4', '6', '5', '6', '1', '6', '0', '4', '9', '6', '8', '0', '3', '3', '2', '1', '1', '8', '9', '5', '3', '8', '3', '0', '4', '7', '7', '9', '2', '6'], ['6', '3', '9', '7', '5', '8', '5', '1', '1', '6', '6', '0', '8', '3', '2', '7', '3', '0', '4', '5', '1', '2', '3', '0', '4', '2', '8', '4', '1', '1', '0', '2', '3', '2', '5', '6', '3', '0', '1', '2', '2', '5'], ['8', '7', '2', '1', '4', '9', '6', '5', '2', '0', '9', '1', '0', '8', '6', '9', '7', '3', '4', '5', '6', '7', '2', '8', '3', '0', '1', '9', '5', '4', '4', '1', '6', '4', '0', '5', '1', '5', '7', '8', '2', '4'], ['4', '8', '1', '1', '7', '0', '8', '0', '2', '1', '8', '2', '2', '7', '6', '2', '3', '5', '2', '5', '5', '5', '9', '3', '4', '9', '4', '9', '8', '8', '0', '1', '6', '7', '7', '5', '7', '5', '9', '3', '6', '1'], ['5', '8', '6', '8', '0', '7', '3', '1', '9', '2', '3', '5', '5', '5', '0', '9', '2', '2', '2', '8', '7', '7', '6', '7', '6', '7', '4', '3', '9', '8', '3', '9', '3', '5', '7', '1', '3', '1', '4', '0', '7', '1'], ['9', '2', '6', '8', '8', '6', '8', '4', '8', '6', '7', '7', '7', '0', '2', '6', '5', '1', '5', '3', '8', '0', '5', '6', '5', '4', '9', '4', '6', '0', '0', '7', '2', '2', '1', '1', '0', '5', '1', '2', '5', '1'], ['1', '8', '4', '3', '2', '6', '1', '8', '3', '6', '5', '5', '1', '5', '9', '8', '0', '2', '8', '9', '4', '2', '1', '9', '6', '5', '1', '2', '5', '4', '6', '7', '3', '8', '7', '3', '2', '4', '7', '6', '6', '0'], ['9', '2', '9', '7', '5', '6', '4', '9', '5', '4', '8', '5', '2', '4', '0', '5', '5', '1', '0', '9', '3', '6', '4', '0', '9', '4', '2', '7', '5', '1', '3', '4', '8', '3', '7', '4', '2', '8', '3', '0', '2', '8'], ['8', '4', '4', '7', '5', '7', '3', '2', '8', '9', '5', '5', '2', '3', '8', '3', '3', '8', '0', '4', '9', '5', '9', '8', '5', '9', '1', '9', '4', '3', '9', '7', '4', '3', '0', '9', '3', '1', '3', '1', '3', '9'], ['9', '3', '7', '7', '4', '9', '1', '1', '8', '9', '2', '1', '2', '4', '1', '0', '9', '2', '8', '8', '9', '7', '2', '6', '0', '4', '3', '6', '2', '1', '4', '7', '6', '2', '4', '0', '8', '5', '1', '6', '2', '1'], ['6', '8', '7', '3', '6', '4', '3', '9', '3', '7', '1', '5', '0', '5', '5', '1', '7', '9', '3', '9', '8', '9', '9', '6', '6', '3', '1', '2', '2', '2', '0', '7', '8', '4', '7', '3', '6', '2', '2', '1', '9', '6'], ['1', '3', '1', '5', '7', '5', '2', '5', '3', '4', '0', '7', '6', '8', '5', '9', '7', '1', '0', '3', '3', '8', '2', '9', '7', '2', '4', '8', '6', '3', '1', '3', '3', '0', '7', '1', '5', '9', '0', '9', '8', '1'], ['4', '1', '6', '2', '2', '3', '9', '7', '6', '5', '6', '5', '3', '0', '8', '4', '3', '0', '6', '8', '7', '4', '1', '4', '2', '3', '2', '2', '1', '0', '0', '5', '3', '4', '0', '8', '4', '8', '4', '9', '0', '0'], ['2', '1', '1', '4', '8', '0', '6', '9', '7', '0', '9', '4', '7', '6', '1', '1', '5', '2', '0', '6', '9', '2', '0', '2', '7', '3', '3', '0', '5', '2', '6', '3', '0', '1', '8', '3', '5', '5', '3', '9', '8', '5'], ['1', '3', '2', '8', '8', '7', '7', '2', '6', '3', '8', '8', '5', '6', '7', '0', '1', '7', '7', '8', '5', '1', '9', '5', '2', '5', '7', '2', '2', '5', '9', '6', '0', '3', '1', '2', '2', '2', '3', '0', '1', '9'], ['2', '5', '0', '6', '4', '0', '1', '6', '9', '7', '0', '6', '7', '4', '9', '1', '0', '2', '5', '5', '7', '0', '2', '0', '8', '0', '6', '2', '6', '8', '1', '1', '0', '6', '4', '4', '0', '6', '5', '8', '7', '3'], ['9', '7', '8', '6', '0', '3', '7', '5', '7', '5', '6', '0', '5', '6', '3', '9', '6', '3', '2', '6', '0', '0', '6', '5', '8', '3', '7', '3', '7', '3', '5', '2', '4', '9', '4', '1', '0', '7', '9', '7', '6', '2'], ['3', '0', '7', '5', '1', '4', '8', '7', '9', '9', '0', '7', '6', '8', '6', '0', '5', '8', '0', '8', '9', '4', '8', '1', '3', '1', '8', '6', '0', '5', '1', '7', '3', '4', '7', '6', '4', '2', '8', '6', '1', '7'], ['4', '2', '8', '1', '1', '3', '2', '6', '5', '1', '9', '1', '2', '8', '8', '8', '2', '6', '2', '5', '6', '0', '7', '5', '2', '0', '9', '3', '0', '1', '4', '1', '1', '0', '0', '3', '9', '3', '4', '8', '8', '3'], ['9', '1', '9', '0', '9', '4', '0', '8', '4', '9', '7', '6', '7', '6', '0', '7', '1', '1', '7', '4', '9', '0', '0', '7', '3', '2', '8', '1', '6', '9', '7', '2', '0', '1', '6', '1', '9', '8', '9', '7', '5', '3']],39,),
([['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1']],15,),
([['b', 'q', 'b', 'D', 't', 'y', 'Z', 'G', 'd', 'r', 'R', 'R', 'z', 'A', 'Y', 'H', 'D', 'Q', 'X', 'U', 'o', 'a', 'S', 'P', 'S', 'c', 'W', 'r', 'I', 'y', 'E', 'x', 'E', 'k', 'l', 'F', 'M', 'G', 'z', 'T', 'I', 'E', 'D', 'K', 'M', 'l'], ['V', 'm', 'W', 'M', 'l', 'H', 'l', 'j', 'f', 'S', 'k', 'g', 'O', 'W', 'S', 'R', 'I', 'L', 'J', 'Z', 'V', 'X', 'w', 'l', 'K', 's', 'F', 'o', 'X', 'k', 'a', 'L', 'K', 'H', ' ', 'E', 'x', 'b', 'Z', 'w', 'Z', 'Y', 'U', 'y', 'I', 'Q'], ['I', 'o', 's', 'A', 'f', 'Z', 'C', 'o', 'X', 'b', 'd', 's', ' ', 'Y', 'Q', 'U', 'C', 'T', 'K', 'r', 'Q', 'U', 'P', 'C', 'w', 'R', 'e', 's', 'L', 'A', 'j', 'g', 'p', 'B', 'I', 'W', 'L', 'e', 'w', 'b', 'R', 'z', 'Y', 'M', 'M', 'E'], ['k', 'Y', 'v', 'L', 'f', 'x', 'v', 'l', 'C', 'g', 'J', 'V', 'l', 'q', 'p', 'x', 'z', 'A', 'J', 'h', 'V', 'i', 'h', 'r', 'Z', 'i', ' ', 'y', 'M', 'k', 'p', 'q', 'X', 'M', 'U', 'W', 'v', 'v', 'P', 'L', 'n', 'j', 'r', 'O', 'k', ' '], ['K', 'k', 'K', 'Z', 'X', 'W', 'e', ' ', 'x', 'u', 'r', 'l', 'l', 'z', 'V', 'e', 'K', 'z', 'y', 'x', 'f', 'v', 'n', 'f', 'K', 'p', 'b', 'I', 'C', 'p', 'b', 'V', 'R', 't', 'n', 't', 'm', 'A', 'F', 'J', 'U', 'M', 'n', 'g', 'M', 'W'], ['a', 'e', 'x', 'A', 'U', 'V', 'P', 'W', 'W', 'l', 'p', ' ', 'o', 'L', 'X', 'E', 'g', 'k', 'Y', 'W', 'P', 'Y', 'B', 't', 'Z', 'm', 'V', 'Z', 'O', 'z', 'o', 'O', 'm', 's', 'x', 'O', 'L', 'q', 'Z', 'E', 'y', 'B', 'l', 'h', 'h', 'T'], ['c', 'x', 'R', 'R', 'x', 'S', 'R', 'y', 'J', 'Y', 'e', 'F', 'X', 'x', 'h', 'L', 'N', 'Q', 'j', 'X', 's', 'H', 'Z', 'M', 'Q', 'b', 'Q', 'h', 'x', 'R', 'Y', 'C', 'r', 'D', 'b', 'O', 'l', 'W', 'J', 'I', 'A', 'P', 'x', 'D', 'T', 'c'], ['Y', 's', 'B', 'N', 'B', 'g', 'e', 'h', 'l', 'y', 'N', 's', 'a', 'f', 'k', 'p', 'C', 'Q', 'c', 'U', 'A', 'N', 'w', 'V', 'z', 'F', 'j', 'M', 'F', 'g', 'q', 'x', 'r', 'l', 'e', 'Y', 'T', 'z', ' ', 'a', 'n', 'n', 'x', 'p', 'm', 'J'], ['v', 'O', 'a', 'A', 'E', 'q', 'L', 'P', ' ', 'w', 'l', 'G', 'k', 'f', 'M', 'A', 'k', 'i', 'f', 'D', 'z', 'A', 'J', 'Y', 'b', 'g', 'a', 'h', 'e', 'S', 'Q', 'H', 'c', 'f', 'I', 'S', 'X', 'Y', 'J', 'g', 'f', 'n', 'G', 'J', 'r', 'S'], [' ', 'S', 'w', 'G', 'b', 'v', 'z', 'U', 'l', 'k', 'a', 'w', 'y', 'D', 'Q', 'v', 'c', 'T', 'S', 'S', 'n', 'M', 'm', 'j', 'U', 'X', 'a', 'k', 'O', 'A', 'T', 'a', 'U', 'u', 'y', 's', 'W', 'j', 'k', 'n', 'a', 'V', 'X', 'N', 'D', 'C'], ['Z', 'o', 'O', 'a', 'z', 'M', 'X', 'k', 'm', 'X', 'J', 'w', 'y', 'd', 'j', 'c', 'Q', 'E', 'E', 'i', 'g', 'q', 'U', 'v', 'C', 'k', 'y', 't', 'T', 'A', 'o', 'u', 'o', 'e', 'J', 'c', 'c', 'd', 'i', 'o', 'b', 'A', 'h', 'g', 'y', 'Y'], ['O', 'j', 'F', 'A', 'f', 't', 'J', 'u', 'V', 'J', 'P', 'Z', 'C', 'c', 'c', 'y', 'G', 's', 'W', 'X', 'O', 'g', 'q', 'l', 'z', 'L', 'p', 'U', 'o', 'A', 'k', 'v', 'q', 'v', 'I', 'W', 'k', 'r', 'm', 'Y', 'i', 'V', 'Y', 'c', 'P', 'S'], ['N', ' ', 'W', 'k', 'z', 'o', 'V', 'w', 'M', 'a', 'q', 'c', 'P', 'D', 'x', 'O', 'M', 'y', ' ', 'B', 'y', 'L', 'V', 'E', 'j', 'i', 'C', 'k', ' ', ' ', 'c', 'K', 'c', 'h', 'y', 'K', 'c', 'G', 'Q', 'h', 'B', 'i', 'L', 'Q', 'P', 's'], ['X', 'p', 'y', 'I', 'W', 'F', 'F', 'o', 'W', 'g', 'A', 'H', 'a', 'H', 'X', 'F', 'd', 'Y', 'I', 'x', 'n', 'r', 's', 'c', 'B', 'L', 'o', 'B', 'C', 'o', 'G', 'v', 'T', 'q', 'A', 'Z', 'a', 'Z', 'd', 'S', 'B', 'S', 'F', 'I', 'm', 'C'], ['F', 't', 'c', 'w', 'E', 'X', 's', 'F', 'e', 'J', 'h', 'Y', 'f', 'g', 'd', 'f', 'N', 'X', 'G', 'l', 'n', 'M', 'L', 'k', 'P', 'Y', 'M', ' ', 'U', 'X', 'n', 's', 'o', 'F', 'R', 'g', 'E', 'I', 'G', 'P', 'x', 'f', 'h', 'K', 'b', 'k'], ['a', 'p', 'j', 'Q', 'X', 'p', 'h', 'R', 'g', 'U', 'O', 'x', 'X', 'k', 'v', 'm', 'o', 'E', 'Z', 'Z', 'W', 'v', 'k', 'l', 'o', 'O', 'N', 'P', 'Q', 'k', 'A', 'K', 'c', 'l', 'w', 'a', 'k', 'Z', 'd', 'T', 'S', 't', 'K', 'L', 'x', 'k'], ['t', 'f', 'V', 'Q', 'X', 'e', 's', 'f', 'o', 'N', 'U', 'z', 'y', 'K', 'F', ' ', 'A', 'V', 'W', 'A', 'j', 'C', 'T', 'G', 'z', 'K', 'j', ' ', 'I', 'w', 'h', 'Q', 't', 'I', 'm', 'V', 'h', 'M', 'L', 'Q', 'J', 'g', 'p', 'x', 'P', 'i'], ['X', 'Q', 'b', 'i', 'T', 'A', 'R', 'f', 'c', 'r', 'K', 't', 'J', 'E', 'Z', 'd', 'W', 'O', 'G', 'X', 'u', 'I', 'z', ' ', 'm', 'H', 's', 'P', 'd', 's', 'k', 'm', 'E', 'K', 'Y', 'H', 'L', 'b', 'Z', 'y', 'I', 'c', 'p', 'y', 'Y', 'T'], ['P', 'g', 'C', 'T', 'i', 'Z', 's', 's', 'r', 'E', 'L', 'P', 'T', 'o', 'r', 'g', 'x', 'c', 'U', 'b', 'o', 'l', 'H', 'H', 'k', 'b', 'N', 'e', 'S', 'E', 'U', 'c', 'g', 'V', 'E', 'V', 'l', 'L', ' ', 'I', 'h', 'M', 'L', 'z', 'P', 'e'], ['l', 'i', 'O', 'F', 'S', 'e', 'Z', 'j', 'y', 'J', 'p', 'c', 'q', 'j', 'Q', 'E', 'j', 'd', 'u', 'S', 'N', 'Y', 'R', ' ', 'F', 'I', 'f', 'u', 'd', 't', 'u', 'Q', 'J', 'v', 'i', 'x', 'A', 'd', 'k', 'v', 'H', 'Z', 'B', 'u', 'o', 'k'], ['V', 'p', 'B', 'h', 'M', 'a', 'p', 'n', 'z', 'L', 's', 'g', 'c', 'G', 'T', 'X', 'a', 'X', 's', 'h', 'O', 'x', 'h', 's', 'x', 'N', ' ', 'O', 'w', 'F', 'v', 'M', 'W', 'u', 'c', 'Y', 'x', 'x', 'H', 'P', 'T', 'h', 's', 'W', 'w', 'l'], ['B', 'f', 'k', 'U', 'j', 'b', 'X', 'J', 'z', 'y', 'w', 'B', 'n', 'f', 'x', 'N', 'Y', 'l', 'Q', 'h', 't', 'v', 'U', 'y', 'I', 'G', 'q', 'T', 'a', 'i', 'N', 'p', 'e', 'Z', 'Y', 'Q', 'B', 'G', 'e', 'N', 'V', 's', 'E', 'U', 'B', 'h'], ['q', 'Y', 'r', 'w', 't', 'G', 'G', 'M', 'F', ' ', 'e', 'u', 'E', 'g', 's', 'D', 'c', 'h', 'L', 'G', 'x', 'u', 'V', 'j', 'u', 'U', 'i', 'm', 'Y', 'J', 'L', 'P', 'h', 'X', 'p', 'P', 'F', 'f', 'O', 'u', 'U', 'H', 'Y', 'I', 'A', 'X'], ['v', ' ', 'W', 'A', 'e', 't', 'Y', 't', 'I', 's', 'w', 'M', ' ', 'E', 'R', 'K', 'x', 'i', 'O', 'w', 'h', 'e', 'f', 'N', 'i', 'N', 'v', 'q', 'F', 'u', 'A', 'c', 'e', 's', 'p', 'N', 'j', 'G', 'q', 'W', 'q', 'U', 'J', 'b', 'V', 'i'], ['p', 'Y', 'p', 'f', 'I', 'N', 'S', 'C', 'J', 'p', 'O', 'O', 's', 'V', 's', 'Z', 'y', 's', 'l', 'o', 'b', 'e', 'L', 'J', 'm', 'W', 'g', 'P', 'x', 'l', 'W', 'N', 'a', 'T', 'm', 'D', 'p', 'p', 'l', 'P', 'E', 'V', 'c', 'O', 'T', 'Z'], ['x', ' ', 'v', 'X', 'T', 's', 'i', 'A', 'J', 'q', 'H', 'P', 'x', 'q', 'Y', 'n', 's', 'i', 'W', 'z', 'Y', 'q', 'a', 'Z', 't', 'M', 's', 'A', 'q', 'e', 'W', 'V', 'g', 'y', 'x', 'n', 'E', 'p', 'x', 't', 'q', 'R', 'T', 'm', 'h', 'm'], ['M', 'u', 'D', 'R', 'R', 'h', 'B', 'f', ' ', 'H', 'b', 'l', 'q', 'X', 'f', 'b', 'r', 'e', 'v', 'D', 'm', 'T', 'v', 'l', 'g', 'l', 'z', 'y', 'A', 'O', 'i', 'G', 'Q', 'l', 'K', 'G', 'H', 'G', 'S', 'b', 'a', 'b', 'k', 'p', 'g', 'R'], ['G', 'Q', 'P', 'e', 'P', 'r', 'K', 'U', 'l', 'g', 'X', 'q', 'I', 'O', 'U', 'y', 'V', 'Z', 't', 'Q', 'g', 'd', 'T', 'J', 'q', 'w', 'z', 'L', 'V', 'p', 'e', 'X', 'x', 'D', 'k', 'R', 'P', 'U', 'L', 'z', 'a', 'L', 'L', 'Q', 'z', 'D'], ['j', 'W', 'Q', 'E', 'P', 'V', 'f', 'p', 'w', 'n', 'U', 'j', 'Z', 'P', 'f', 'v', 'R', 'r', 'h', 'z', 'r', 'l', 'T', 'P', 'U', 'f', 'v', 'A', 'B', 'k', 'b', 'n', 'o', 'h', 'j', 'K', 'h', 'r', 'f', 'q', 'x', 'E', 'U', 'g', 'd', 'C'], ['C', 'v', 'D', ' ', 'K', 'd', 'd', 'D', 'd', 'f', 'U', 'F', 'l', 'x', 'E', 'D', 'Q', 'L', 'W', 'X', 'E', 'E', 'X', 'T', 'M', 'C', 'e', 'B', 'x', 'o', 'C', 'f', 'd', 'o', 'F', 'T', 'J', 'F', 'G', 'l', 'k', 'x', 'u', 'q', 'N', 't'], ['l', 'd', 'P', 'k', 'N', 'w', 't', 'C', 'u', 'n', 'f', 'Z', 'B', 'A', 'Z', 'z', 'z', 'v', 'Z', 's', 'n', 'f', 'Y', 'c', 's', 'j', 'e', 'M', 'E', 'i', 'N', 'Y', 'D', 'k', 'k', 'n', 'j', 'X', 'q', 'T', 'T', 'G', 'S', 'd', 't', 'd'], ['c', 'c', 'G', 'd', 'y', 'T', 'E', 'w', 'k', 'R', 'd', 'N', 'S', 'M', 'L', 'p', 'H', 'F', 'C', 'L', 'n', 'l', 'C', 'M', 'L', 'u', 'k', ' ', 'X', 'E', 'L', 'J', 'L', 'G', 'l', 'H', 'l', 'r', 'p', 'v', 'D', 'T', 'r', 'L', 'v', 'e'], ['t', 'A', 's', 'J', 'M', 'b', 'P', 'a', 'p', 'G', 'q', 'p', 'i', 'd', 'b', 'C', 'S', 'w', 'c', 'N', 'm', 'A', 'C', 'm', 'f', 'T', 'P', 'z', 'U', 'L', 'o', 'V', 'N', 'M', 'G', 'h', 'V', 'h', 'U', 'S', 'I', 'N', 'f', 'H', 'L', 'f'], ['q', 'V', 'Z', 'j', 's', 'c', 'T', 'n', 'U', 'l', 'E', 'V', 'c', 's', 'J', 'n', 'q', 'b', 'c', 'h', 'e', 'x', 'H', 'G', 'k', 'U', 'P', 'U', 'T', 'W', 'n', 't', 'p', 'i', 'b', 'u', 'b', 'H', 's', 'D', 'L', 'Y', 'Z', 'u', 'P', 'w'], ['s', 'F', 'O', 't', 'J', 'e', 'f', 'P', 'l', 'l', 'v', 'G', 'B', 'J', 'i', 'b', 'i', 'r', 'P', 'x', 'a', 'i', 'X', 'T', 'G', 'G', 'a', 'k', 'd', 'Z', 'L', 'Y', 'U', 'r', 'b', 'p', 't', 'k', 'L', 't', 'x', 'T', 'k', 'v', 'a', 'k'], ['c', 's', 'B', 'Z', 'd', 'h', 'd', 'P', 'w', 'D', 'a', 'c', 'G', 'M', 'T', 'u', 'U', 'O', 'T', 'w', 'a', 'o', 'x', 'V', 'J', 'g', 'N', 'w', 'w', 'f', 'g', 'u', 'j', 'p', 'G', 'T', 'w', 'X', 'J', 'p', 'M', 'y', 'o', 'G', 'm', 'w'], ['w', 'j', 'K', 'u', 'K', 'd', 'N', 'I', 'w', 'E', ' ', 'K', 'K', 'c', 'x', 'U', 'A', 'A', 'v', 'F', 'z', 'a', 'z', 'C', 'V', 'W', 'A', 'o', 'm', 'Z', 'i', 'U', 'F', 'e', 'p', 'w', 'O', 'A', 'T', 'u', 'a', 'P', 'l', 'y', 'w', 'J'], ['b', 'M', 'e', 'h', 'S', 'Q', 'c', 'G', 'D', 'A', 'I', 'H', 'g', 'f', 'E', 'j', 'x', 'u', 'P', 'p', 'p', 'd', 'V', 'F', 'D', 'L', 'L', 'g', 'H', 'h', 'n', 'Q', 'K', 'L', 'g', 'K', 'y', 'Y', 'u', 'A', 'g', 'W', 't', 'J', 'X', 'F'], ['k', 'J', 'l', 'X', 'J', 'm', 'e', 'Y', 'd', 'Z', 'L', 'W', 'r', 'W', 'T', 'J', 'G', 'f', ' ', 's', 'j', 'j', 'P', 'h', 'k', 'x', 'k', 'k', 'B', 'N', 'j', 'h', 's', 'o', 'b', 'm', 'u', 'O', 'i', 'D', 'c', 'B', 'a', 'h', 'B', 'Y'], ['L', 'l', 'R', 'Z', 'f', 'j', 'G', 'E', 'j', 'g', 'X', 'S', 'P', 'H', 'T', 'a', 'c', 'Y', 'b', 'r', 'N', 'N', 'R', 'n', 'd', 'j', 'H', 'M', 'X', 'A', 'V', 'G', 'c', 'r', 'l', 'v', 'F', 'e', 'z', 'k', 'z', 'Q', 'r', 'F', 'L', 'H'], ['U', 'o', 'Y', 'O', 'n', 'J', 'c', 'i', 'j', 'a', 'j', 'H', 'O', 'u', 'S', 'm', 'K', 'y', 'i', 'T', 'v', 'j', ' ', 'v', 'H', 'f', 'r', 'q', 'F', 'a', 'l', 'u', 'F', 'E', 'p', 'b', 'V', ' ', 'm', 'O', 'M', 'E', 'f', 'Q', 't', 'T'], [' ', 'B', 'H', 'i', 'H', 'c', 'T', ' ', 'K', 'u', 'd', 'C', 'F', 'F', 'S', 'v', 'Z', 'A', 'b', 't', 'r', 'G', 'I', 'F', 'p', 'L', 'G', 'N', 'h', 'y', 'm', 'b', 'z', 'V', 'G', 'D', 'p', 'K', 'p', 'C', 'X', 'y', 'w', 'c', 'z', 'K'], ['P', 'q', 'o', 'M', 'T', 'U', 'o', 'r', 'A', 'h', 'S', 'q', 'T', 's', 'V', 'u', 'c', 'N', 'v', 'E', 'r', 'X', 'k', 'v', 'M', 'p', 'Q', 'd', 'Y', 'Q', 'J', 'c', 'L', 'M', 'r', 'Z', 'D', 'k', 'V', 'u', 'G', ' ', 'Y', 'O', 'i', 'x'], ['V', 'x', 'o', 'G', 'T', 'g', 'G', 'N', 'A', 'q', 'p', 'l', 'K', 't', 'j', 'n', 'C', 'U', 'c', 'b', 'q', 'q', 'c', 'C', 'w', 'x', 'B', 'C', 't', 'V', 'z', 'y', 'y', 'o', 'U', 'E', 'O', 'X', 'j', 'V', 'r', 'y', 't', 'n', 'R', 'H'], ['Z', 'O', 'w', 'z', 'v', 'K', 'U', 'c', 'N', 'M', 'h', 'W', 'Y', 'Z', 'g', 'k', 'h', 'o', 'K', 'B', 'K', 'q', 'u', 'P', 'z', 'v', 'j', 'u', 'z', 'P', 'B', 'y', 'p', 'Y', 'U', 'W', 'Z', 'I', 'c', 'm', 'W', 'J', 'c', 'l', ' ', 'O'], ['Q', 'A', 'B', 'Z', 'C', 'D', 'N', 'i', 'W', 'E', 'W', 'V', 'Z', 'k', 'A', 'D', 'z', 'Z', 'I', 't', 'Y', 'K', 'u', 'T', 'u', 'q', 'p', 'V', 'P', 'y', 'o', 'e', 'Y', 'x', 'd', 'L', 'P', 'L', 'p', 'Z', 'E', 'N', 'r', 'c', 'K', 'Z']],31,),
([['1', '1', '1', '1', '2', '2', '3', '3', '3', '4', '4', '5', '5', '6', '7', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '1', '1', '2', '3', '4', '5', '6', '6', '6', '6', '6', '6', '6', '7', '7', '8', '8', '9'], ['0', '0', '0', '0', '0', '1', '1', '2', '2', '2', '3', '3', '4', '5', '5', '5', '5', '6', '7', '7', '8'], ['0', '1', '1', '2', '2', '2', '2', '2', '3', '3', '4', '4', '5', '5', '6', '6', '7', '7', '7', '9', '9'], ['0', '0', '1', '1', '2', '2', '2', '3', '3', '3', '4', '4', '4', '4', '4', '6', '7', '7', '8', '8', '9'], ['0', '0', '0', '0', '1', '1', '2', '3', '3', '3', '3', '4', '4', '4', '5', '7', '8', '8', '8', '9', '9'], ['0', '0', '0', '0', '0', '0', '1', '1', '2', '3', '4', '5', '5', '6', '6', '7', '7', '8', '8', '9', '9'], ['0', '2', '2', '2', '4', '4', '4', '4', '4', '5', '5', '5', '6', '6', '7', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '2', '3', '3', '3', '4', '4', '5', '5', '5', '7', '7', '7', '8', '8', '8', '9', '9', '9'], ['0', '0', '1', '2', '2', '3', '4', '4', '4', '4', '4', '5', '6', '6', '6', '7', '8', '8', '9', '9', '9'], ['0', '0', '1', '1', '1', '1', '1', '2', '2', '2', '2', '3', '4', '4', '5', '5', '6', '6', '8', '8', '9'], ['0', '0', '1', '2', '2', '2', '3', '3', '5', '5', '5', '6', '7', '7', '7', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '1', '1', '3', '5', '5', '5', '5', '6', '6', '6', '6', '6', '7', '7', '8', '8', '9', '9'], ['0', '0', '1', '2', '2', '2', '2', '2', '2', '3', '3', '5', '5', '5', '6', '7', '8', '8', '9', '9', '9'], ['0', '0', '0', '0', '2', '3', '5', '5', '5', '5', '5', '6', '6', '6', '7', '7', '7', '7', '7', '8', '9'], ['0', '0', '1', '2', '2', '3', '3', '3', '4', '4', '4', '5', '5', '5', '6', '6', '6', '7', '7', '8', '9'], ['0', '0', '0', '0', '1', '1', '3', '3', '3', '4', '4', '5', '5', '6', '7', '8', '8', '8', '9', '9', '9'], ['0', '0', '1', '1', '1', '1', '1', '2', '2', '3', '5', '5', '6', '6', '6', '7', '7', '7', '7', '8', '8'], ['0', '1', '1', '1', '1', '2', '2', '4', '4', '4', '4', '4', '5', '5', '6', '7', '7', '8', '8', '9', '9'], ['1', '1', '2', '2', '3', '3', '4', '5', '5', '5', '5', '6', '6', '7', '7', '7', '8', '8', '8', '9', '9'], ['0', '0', '0', '0', '2', '2', '2', '3', '3', '4', '5', '5', '5', '5', '5', '5', '6', '7', '7', '7', '9']],11,),
([['0', '1', '0', '1', '1', '1', '0', '1', '1', '0', '1', '0', '0', '0', '1', '1', '1', '1', '0', '0', '0', '1', '1', '1', '0', '1', '1', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '0', '1'], ['1', '0', '0', '0', '1', '0', '1', '1', '0', '0', '0', '0', '1', '0', '0', '0', '1', '1', '0', '0', '0', '1', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '0', '1', '0'], ['0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '1', '1', '0', '1', '0', '0', '1', '0', '1', '0', '1', '1', '1', '0', '1', '0', '0', '0', '1', '0', '0', '1', '1', '1', '0', '0'], ['0', '1', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '1', '1', '0'], ['1', '1', '1', '1', '1', '0', '0', '0', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '1', '0', '0', '1', '0', '0', '1', '1'], ['1', '0', '1', '0', '0', '1', '1', '1', '1', '0', '1', '1', '0', '0', '0', '0', '1', '0', '0', '1', '0', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '1', '1', '0', '1', '0', '1', '0'], ['0', '0', '0', '0', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '1', '0', '1', '1', '0', '1', '0', '0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '1', '0', '0', '0', '1', '1'], ['1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '0', '0', '0', '1', '0', '0'], ['0', '0', '1', '1', '1', '1', '0', '1', '0', '1', '1', '1', '1', '0', '1', '1', '0', '0', '0', '0', '0', '0', '0', '1', '0', '1', '1', '0', '1', '0', '0', '0', '1', '1', '0', '1', '1', '1', '1'], ['1', '0', '0', '1', '1', '0', '1', '1', '0', '0', '0', '1', '1', '0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '0', '1', '1', '0', '0', '1', '0', '0', '0', '1', '1', '0', '1', '0', '0'], ['0', '0', '0', '1', '0', '0', '1', '1', '0', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '1', '0', '1', '0', '0', '1', '0', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '0', '1'], ['1', '0', '0', '1', '0', '0', '0', '0', '1', '1', '1', '0', '1', '1', '1', '0', '0', '0', '0', '0', '0', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '0', '1', '0', '1', '1', '0', '1', '0'], ['0', '0', '1', '0', '0', '0', '1', '1', '1', '1', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '1', '0', '0'], ['0', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '1', '1', '1', '0', '0', '0', '0', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '0', '0', '0'], ['0', '0', '0', '0', '1', '1', '0', '0', '1', '1', '0', '0', '0', '1', '1', '0', '1', '0', '0', '0', '0', '1', '0', '0', '1', '1', '1', '0', '0', '1', '1', '1', '1', '0', '1', '0', '1', '1', '1'], ['1', '1', '0', '1', '1', '0', '0', '0', '0', '0', '0', '1', '0', '1', '0', '0', '0', '1', '1', '0', '1', '1', '0', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '0', '1', '0', '1'], ['0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '0', '1', '0', '0', '1', '1', '1', '1', '1', '0'], ['1', '1', '0', '1', '1', '1', '0', '0', '1', '1', '0', '0', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '0', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '0', '1', '0', '1', '1'], ['0', '0', '1', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '1', '1', '0', '1', '1', '1', '0', '0'], ['1', '1', '0', '1', '0', '0', '1', '1', '1', '1', '0', '0', '1', '0', '0', '0', '1', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '1', '0', '0', '0', '1', '0', '0', '0', '1', '1', '1'], ['1', '0', '0', '1', '1', '1', '0', '0', '1', '1', '1', '1', '1', '0', '0', '0', '0', '0', '1', '0', '1', '0', '0', '0', '0', '1', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '0', '0', '0'], ['0', '0', '1', '1', '0', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '1', '0', '1', '1', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0'], ['0', '1', '0', '1', '1', '1', '1', '1', '0', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '0', '1', '0', '1', '0', '0', '1', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '1', '1', '1', '1', '0', '1', '1', '0', '0', '1', '0', '0', '1', '1', '0', '1', '0', '0', '0', '1', '1', '0', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '1', '1', '0', '0', '1'], ['0', '0', '0', '1', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '1', '1', '1', '0', '0', '0', '1'], ['1', '0', '0', '1', '0', '1', '0', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '0', '1', '1', '0', '1', '1', '1', '0', '0', '1', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '0'], ['0', '0', '1', '0', '1', '1', '0', '0', '1', '1', '1', '0', '0', '1', '1', '1', '0', '1', '0', '0', '0', '0', '1', '1', '1', '1', '1', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '0', '0'], ['1', '1', '0', '0', '1', '1', '1', '0', '0', '1', '0', '1', '1', '1', '0', '0', '0', '0', '0', '1', '0', '1', '0', '1', '1', '0', '1', '1', '1', '0', '0', '1', '0', '0', '1', '0', '1', '1', '1'], ['0', '1', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1', '1', '0', '1', '0', '1'], ['1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '1', '0', '0', '0', '0', '1', '1', '0', '0', '0', '0', '0', '0', '1', '1', '0', '1', '0', '1', '1', '1', '0', '0', '0', '0', '1', '1', '1', '0'], ['1', '0', '1', '0', '1', '0', '1', '0', '0', '1', '1', '1', '0', '1', '1', '1', '1', '0', '0', '1', '0', '1', '0', '0', '0', '1', '1', '0', '1', '1', '1', '0', '1', '0', '0', '0', '0', '0', '1'], ['1', '1', '0', '0', '1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '1', '1', '0', '0', '1', '0', '0', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '0', '0'], ['1', '0', '0', '1', '1', '0', '1', '1', '0', '0', '0', '0', '0', '1', '0', '0', '1', '1', '1', '1', '1', '1', '0', '0', '0', '1', '1', '1', '1', '0', '0', '1', '1', '0', '1', '1', '1', '0', '1'], ['0', '1', '0', '0', '0', '1', '0', '1', '0', '0', '1', '0', '1', '0', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '1'], ['1', '0', '1', '1', '1', '1', '1', '1', '0', '0', '1', '0', '1', '0', '1', '0', '0', '1', '0', '0', '0', '0', '1', '1', '0', '1', '0', '1', '0', '1', '1', '1', '1', '1', '1', '0', '0', '1', '0'], ['0', '1', '1', '1', '0', '1', '0', '1', '1', '0', '0', '0', '1', '0', '0', '0', '1', '0', '0', '0', '0', '1', '0', '0', '0', '0', '0', '1', '1', '1', '0', '1', '1', '0', '1', '1', '1', '1', '1'], ['1', '1', '1', '0', '1', '1', '0', '0', '0', '0', '1', '1', '0', '1', '1', '0', '1', '0', '0', '1', '0', '0', '1', '1', '1', '0', '1', '1', '0', '1', '1', '1', '0', '1', '1', '0', '0', '0', '1'], ['0', '1', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1', '0', '0', '1', '1', '0', '1', '0', '0', '1', '1', '1', '0', '0', '1', '0', '0', '0', '0', '1', '0', '1', '0', '1', '0', '1', '1', '0'], ['1', '1', '0', '1', '1', '0', '0', '1', '1', '1', '0', '1', '1', '0', '1', '1', '0', '0', '1', '1', '1', '1', '0', '0', '0', '0', '0', '0', '0', '1', '0', '0', '0', '1', '0', '0', '1', '1', '1']],20,),
([['B', 'D', 'D', 'E', 'H', 'H', 'J', 'M', 'M', 'M', 'M', 'N', 'O', 'O', 'P', 'R', 'S', 'T', 'U', 'U', 'W', 'W', 'Z', 'Z', 'b', 'c', 'c', 'e', 'f', 'g', 'j', 'k', 'k', 'n', 'o', 'r', 't', 'u', 'v'], [' ', 'A', 'A', 'A', 'C', 'C', 'D', 'D', 'E', 'F', 'H', 'J', 'J', 'K', 'L', 'L', 'N', 'T', 'T', 'U', 'W', 'Y', 'Z', 'c', 'f', 'g', 'i', 'i', 'k', 'k', 'm', 'n', 'o', 'p', 'r', 'r', 'u', 'v', 'x'], [' ', 'A', 'A', 'C', 'D', 'E', 'G', 'H', 'K', 'K', 'L', 'Q', 'S', 'U', 'V', 'Z', 'a', 'd', 'e', 'g', 'i', 'i', 'j', 'n', 'o', 'o', 'p', 'p', 'q', 's', 's', 't', 't', 'w', 'x', 'x', 'x', 'y', 'z'], [' ', 'B', 'D', 'E', 'G', 'H', 'H', 'H', 'H', 'K', 'M', 'O', 'O', 'R', 'R', 'S', 'S', 'U', 'V', 'X', 'a', 'a', 'd', 'e', 'e', 'f', 'h', 'i', 'j', 'p', 'p', 'q', 'q', 'q', 's', 'w', 'w', 'y', 'z'], [' ', 'A', 'A', 'C', 'E', 'F', 'G', 'H', 'J', 'J', 'K', 'M', 'O', 'S', 'S', 'U', 'X', 'Y', 'Z', 'b', 'd', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'n', 'q', 'q', 's', 's', 't', 'u', 'u', 'v', 'y', 'z'], ['H', 'H', 'H', 'H', 'J', 'J', 'K', 'M', 'N', 'S', 'U', 'U', 'V', 'V', 'V', 'W', 'Y', 'a', 'b', 'c', 'c', 'e', 'f', 'f', 'f', 'h', 'k', 'l', 'm', 'q', 'q', 's', 't', 'v', 'v', 'w', 'w', 'y', 'z'], ['A', 'B', 'D', 'G', 'H', 'I', 'J', 'J', 'L', 'M', 'N', 'P', 'Q', 'S', 'T', 'T', 'X', 'X', 'X', 'Y', 'Z', 'a', 'c', 'd', 'd', 'd', 'i', 'k', 'l', 'm', 'n', 'p', 'q', 'q', 't', 'w', 'x', 'y', 'y'], [' ', 'B', 'B', 'C', 'E', 'F', 'G', 'H', 'I', 'I', 'I', 'J', 'J', 'K', 'M', 'N', 'O', 'O', 'P', 'Q', 'S', 'T', 'W', 'Y', 'Y', 'a', 'c', 'd', 'h', 'h', 'i', 'j', 'k', 'o', 'o', 's', 'z', 'z', 'z'], [' ', 'A', 'C', 'C', 'D', 'E', 'E', 'E', 'F', 'H', 'H', 'M', 'M', 'N', 'N', 'R', 'T', 'W', 'Z', 'Z', 'd', 'e', 'h', 'h', 'j', 'j', 'k', 'm', 'n', 'o', 'p', 'r', 's', 's', 't', 'w', 'x', 'x', 'x'], ['A', 'D', 'I', 'M', 'P', 'Q', 'U', 'U', 'Y', 'Y', 'Z', 'Z', 'Z', 'a', 'b', 'c', 'e', 'f', 'f', 'f', 'g', 'g', 'h', 'h', 'i', 'i', 'j', 'm', 'n', 'o', 'p', 'q', 'r', 'u', 'u', 'u', 'w', 'x', 'z'], [' ', 'A', 'A', 'A', 'B', 'C', 'E', 'F', 'G', 'H', 'J', 'Q', 'R', 'S', 'U', 'U', 'V', 'W', 'Y', 'Z', 'a', 'b', 'b', 'd', 'g', 'j', 'k', 'l', 'l', 'm', 'n', 'n', 'o', 's', 's', 'u', 'w', 'w', 'w'], [' ', 'A', 'B', 'C', 'E', 'E', 'E', 'H', 'J', 'J', 'K', 'M', 'N', 'P', 'R', 'U', 'U', 'V', 'W', 'a', 'e', 'f', 'k', 'k', 'k', 'l', 'l', 'm', 'n', 'n', 'o', 'o', 'o', 'q', 'r', 'r', 't', 'u', 'x'], [' ', 'B', 'B', 'E', 'F', 'F', 'H', 'O', 'O', 'P', 'P', 'Q', 'R', 'S', 'T', 'X', 'a', 'a', 'a', 'b', 'e', 'f', 'g', 'i', 'j', 'm', 'n', 'p', 'r', 't', 't', 't', 'u', 'v', 'v', 'w', 'x', 'x', 'z'], [' ', 'A', 'B', 'C', 'D', 'E', 'E', 'G', 'H', 'J', 'J', 'J', 'K', 'K', 'M', 'P', 'Q', 'R', 'R', 'W', 'X', 'X', 'Z', 'a', 'a', 'e', 'h', 'i', 'j', 'k', 'q', 'q', 'r', 'r', 's', 'u', 'x', 'x', 'y'], [' ', 'B', 'I', 'I', 'J', 'J', 'K', 'N', 'O', 'P', 'P', 'R', 'U', 'X', 'Z', 'Z', 'Z', 'b', 'd', 'f', 'f', 'h', 'h', 'h', 'j', 'k', 'k', 'n', 'n', 'o', 'o', 'p', 'q', 's', 't', 'v', 'w', 'x', 'z'], [' ', ' ', 'B', 'E', 'K', 'L', 'M', 'N', 'Q', 'Q', 'R', 'S', 'T', 'U', 'V', 'V', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'e', 'e', 'g', 'i', 'i', 'm', 'n', 'o', 'p', 's', 'u', 'u', 'v', 'w', 'x', 'z'], ['E', 'E', 'E', 'E', 'J', 'K', 'K', 'M', 'N', 'P', 'Q', 'S', 'S', 'V', 'W', 'W', 'W', 'X', 'Y', 'c', 'c', 'd', 'e', 'f', 'h', 'n', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'x', 'x', 'y', 'z', 'z'], [' ', ' ', ' ', 'E', 'E', 'F', 'F', 'G', 'G', 'H', 'J', 'L', 'O', 'Q', 'R', 'R', 'T', 'V', 'W', 'Y', 'Y', 'Z', 'Z', 'c', 'f', 'g', 'h', 'h', 'j', 'l', 'q', 'q', 'q', 't', 'v', 'x', 'x', 'y', 'y'], ['B', 'D', 'G', 'G', 'H', 'J', 'J', 'K', 'M', 'Q', 'S', 'S', 'T', 'T', 'T', 'U', 'V', 'Z', 'Z', 'a', 'b', 'd', 'e', 'g', 'g', 'h', 'h', 'l', 'l', 'n', 'o', 's', 'u', 'u', 'v', 'v', 'w', 'x', 'y'], [' ', ' ', 'B', 'B', 'B', 'C', 'D', 'D', 'E', 'I', 'L', 'M', 'O', 'O', 'P', 'P', 'Q', 'R', 'R', 'R', 'R', 'R', 'U', 'a', 'b', 'c', 'd', 'e', 'g', 'k', 'l', 'l', 'n', 'n', 'n', 'p', 'p', 'r', 'r'], [' ', ' ', 'B', 'E', 'E', 'F', 'G', 'L', 'M', 'N', 'N', 'O', 'P', 'R', 'R', 'S', 'S', 'S', 'T', 'T', 'Y', 'Y', 'Z', 'a', 'a', 'b', 'd', 'e', 'f', 'j', 'j', 'k', 'l', 'l', 'm', 'o', 'o', 'p', 'y'], ['A', 'B', 'E', 'E', 'H', 'H', 'I', 'J', 'J', 'N', 'O', 'P', 'Q', 'R', 'V', 'V', 'W', 'W', 'X', 'X', 'Y', 'Z', 'Z', 'g', 'i', 'j', 'j', 'm', 'n', 'o', 'q', 'r', 'r', 's', 's', 's', 's', 't', 'x'], [' ', 'G', 'J', 'L', 'M', 'M', 'Q', 'Q', 'Q', 'S', 'U', 'W', 'W', 'Y', 'Z', 'Z', 'a', 'b', 'f', 'h', 'i', 'i', 'l', 'l', 'm', 'n', 'o', 'p', 'p', 'p', 'q', 'q', 'q', 's', 's', 't', 'u', 'v', 'w'], ['B', 'B', 'D', 'E', 'E', 'H', 'I', 'J', 'K', 'K', 'L', 'S', 'T', 'V', 'X', 'b', 'b', 'b', 'd', 'd', 'g', 'h', 'h', 'h', 'i', 'i', 'k', 'l', 'm', 'm', 'n', 'o', 'v', 'w', 'x', 'x', 'x', 'z', 'z'], ['B', 'C', 'C', 'C', 'D', 'D', 'E', 'F', 'J', 'K', 'M', 'N', 'O', 'O', 'Q', 'Q', 'R', 'R', 'R', 'S', 'T', 'U', 'V', 'W', 'W', 'a', 'b', 'f', 'g', 'i', 'm', 'n', 'n', 'n', 'p', 'p', 'p', 'u', 'v'], [' ', 'B', 'D', 'F', 'F', 'H', 'J', 'J', 'M', 'M', 'N', 'T', 'U', 'c', 'd', 'e', 'e', 'j', 'j', 'j', 'l', 'l', 'm', 'm', 'n', 'n', 'o', 'p', 'p', 'p', 's', 't', 't', 'v', 'v', 'w', 'y', 'y', 'y'], [' ', 'A', 'A', 'B', 'D', 'G', 'H', 'H', 'H', 'I', 'K', 'N', 'O', 'P', 'R', 'S', 'T', 'Y', 'Y', 'a', 'b', 'c', 'e', 'f', 'g', 'h', 'j', 'j', 'j', 'm', 'n', 'o', 's', 's', 'u', 'u', 'x', 'x', 'z'], [' ', ' ', 'F', 'G', 'G', 'J', 'N', 'N', 'P', 'S', 'S', 'S', 'T', 'T', 'X', 'Z', 'a', 'd', 'e', 'f', 'f', 'h', 'i', 'j', 'k', 'm', 'm', 'n', 'r', 's', 's', 't', 'v', 'w', 'x', 'x', 'x', 'z', 'z'], ['B', 'B', 'D', 'I', 'J', 'L', 'M', 'M', 'N', 'P', 'P', 'Q', 'S', 'U', 'X', 'X', 'X', 'Y', 'Z', 'a', 'b', 'e', 'e', 'f', 'g', 'i', 'j', 'l', 'm', 'o', 'q', 'r', 'r', 't', 'v', 'w', 'w', 'w', 'w'], [' ', 'A', 'B', 'C', 'D', 'D', 'E', 'F', 'F', 'H', 'I', 'J', 'J', 'M', 'N', 'N', 'O', 'S', 'U', 'V', 'W', 'W', 'e', 'g', 'h', 'h', 'i', 'j', 'j', 'o', 'p', 'q', 'q', 'r', 't', 'v', 'v', 'x', 'y'], [' ', 'A', 'A', 'C', 'C', 'D', 'D', 'D', 'E', 'G', 'I', 'J', 'O', 'Q', 'S', 'S', 'S', 'T', 'T', 'V', 'X', 'Y', 'Y', 'b', 'i', 'k', 'l', 'l', 'm', 'n', 'p', 't', 'v', 'w', 'w', 'x', 'x', 'y', 'z'], ['A', 'A', 'D', 'F', 'G', 'H', 'I', 'L', 'N', 'P', 'Q', 'S', 'T', 'U', 'V', 'W', 'W', 'X', 'Y', 'Z', 'b', 'c', 'f', 'g', 'g', 'g', 'j', 'j', 'j', 'l', 'q', 's', 's', 'v', 'v', 'w', 'x', 'y', 'z'], ['B', 'H', 'I', 'J', 'K', 'K', 'L', 'L', 'M', 'N', 'N', 'N', 'P', 'P', 'S', 'T', 'U', 'V', 'W', 'W', 'a', 'a', 'a', 'a', 'b', 'j', 'j', 'k', 'm', 'n', 'p', 'u', 'u', 'u', 'v', 'w', 'x', 'y', 'z'], ['B', 'B', 'D', 'D', 'D', 'E', 'G', 'H', 'I', 'I', 'I', 'L', 'N', 'N', 'O', 'P', 'R', 'R', 'R', 'S', 'V', 'V', 'Y', 'Z', 'a', 'b', 'h', 'k', 'l', 'm', 'n', 'o', 'p', 'p', 'q', 'r', 's', 'x', 'z'], ['A', 'B', 'B', 'G', 'G', 'H', 'J', 'J', 'L', 'M', 'M', 'N', 'N', 'P', 'P', 'P', 'R', 'S', 'T', 'X', 'Z', 'd', 'd', 'f', 'f', 'j', 'j', 'j', 'l', 'l', 'l', 'm', 'r', 'r', 'u', 'v', 'v', 'x', 'x'], [' ', 'B', 'B', 'C', 'E', 'G', 'J', 'J', 'K', 'L', 'N', 'O', 'Q', 'R', 'T', 'T', 'V', 'V', 'X', 'X', 'b', 'e', 'f', 'i', 'i', 'k', 'm', 'n', 'o', 'o', 'p', 's', 's', 'u', 'u', 'w', 'x', 'x', 'x'], ['A', 'A', 'A', 'B', 'B', 'E', 'H', 'H', 'H', 'I', 'J', 'J', 'N', 'Q', 'Q', 'R', 'R', 'U', 'V', 'X', 'a', 'b', 'd', 'd', 'e', 'e', 'g', 'g', 'k', 'k', 'l', 'n', 'n', 'p', 'q', 'q', 'v', 'w', 'x'], ['B', 'B', 'B', 'C', 'C', 'D', 'E', 'F', 'H', 'I', 'I', 'K', 'N', 'N', 'P', 'P', 'P', 'U', 'W', 'X', 'Z', 'c', 'e', 'h', 'h', 'i', 'j', 'l', 'p', 'p', 'r', 'r', 'r', 'r', 'v', 'w', 'x', 'x', 'y'], [' ', ' ', 'B', 'C', 'C', 'D', 'E', 'E', 'H', 'L', 'O', 'P', 'P', 'S', 'T', 'V', 'Y', 'Y', 'Y', 'c', 'd', 'e', 'e', 'f', 'h', 'h', 'h', 'j', 'k', 'l', 'm', 'n', 'r', 's', 's', 'u', 'x', 'y', 'y']],38,),
([['8', '0', '3', '3', '7', '7', '3', '5', '4', '9', '6', '9', '4', '6', '9'], ['8', '7', '2', '2', '6', '9', '6', '0', '0', '6', '8', '1', '6', '1', '5'], ['2', '0', '5', '1', '8', '0', '0', '2', '9', '4', '1', '4', '8', '0', '2'], ['9', '9', '9', '5', '1', '8', '9', '5', '8', '7', '2', '9', '4', '0', '4'], ['1', '6', '7', '1', '7', '4', '7', '4', '6', '4', '3', '8', '0', '4', '9'], ['2', '7', '9', '6', '1', '2', '2', '9', '0', '7', '2', '3', '2', '0', '9'], ['9', '5', '3', '3', '6', '1', '3', '1', '3', '4', '3', '4', '1', '5', '9'], ['1', '6', '5', '2', '6', '7', '1', '8', '6', '6', '2', '2', '6', '7', '6'], ['5', '3', '8', '0', '3', '6', '3', '2', '1', '2', '3', '8', '1', '0', '2'], ['2', '2', '6', '8', '0', '6', '5', '9', '9', '3', '9', '5', '8', '6', '4'], ['4', '1', '0', '3', '9', '1', '0', '8', '3', '4', '0', '9', '0', '6', '8'], ['1', '7', '9', '6', '6', '1', '7', '2', '5', '9', '5', '2', '1', '1', '8'], ['7', '7', '4', '5', '2', '6', '4', '3', '4', '9', '1', '4', '3', '7', '2'], ['1', '3', '0', '5', '9', '2', '2', '6', '2', '4', '0', '7', '2', '6', '1'], ['0', '4', '4', '2', '6', '9', '5', '4', '3', '2', '6', '5', '6', '4', '0']],8,),
([['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1'], ['0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1', '1']],6,),
([['u', 'V', 'l', 'L', 'o', 'i', 'o', 'L', 'S', 'D', 'S', 'u', 'Z', 'E', 's', 'q', 'P', 'X', 'd', 'v', 'W', 'J', 'p', 'r', 'e', 'j', 'F', 'l', 'Z', 'U', 'R', 'Y', 'M', 'C', 'S', 'C', 'Q', 'A'], ['w', 'p', 'O', 'x', 'a', 'v', 'Q', 'Z', 'n', 'Q', 'j', 't', 'N', ' ', 'n', 'u', 'y', 'x', 'E', 'r', 'd', 'e', 'g', 'e', 'H', 'Z', 'b', 's', 'A', 'R', 'x', 'h', 'v', 'X', 'x', 'K', 'P', 'M'], ['y', 'D', 'z', 't', 'g', 'L', 'B', 'N', 'i', 'g', 'E', 'l', 'P', 'q', 'j', 'm', 'c', 'X', 'b', 'X', 'Z', 'w', 's', 'Z', 'F', 'p', 'r', 'P', 'o', 'p', 'Y', 'R', 'w', 'n', 'y', 'n', 't', 'C'], ['b', 'v', 'G', 'K', 'J', 'u', 'w', 'q', 'x', 'b', 'O', 'Z', 'b', 'v', 'E', 'O', 'o', 'j', 'W', 'd', 'r', 'z', 'X', 'K', 'r', 'O', 'm', 'S', 'V', 'D', 'm', 'O', 'j', 'O', 'J', 'L', 'z', 'S'], ['Z', 'O', 'X', 'A', 'd', 'N', 'V', 't', 'f', 'z', 'q', 'H', 'O', 'Z', 'b', 'T', 'W', 'B', 'u', 'K', 'P', 'y', 'w', 'z', 'p', 'M', 'Z', 'P', 'l', 'y', 'J', 'G', 'i', 'C', 'r', 'y', 's', 'v'], ['k', 'R', 'i', 'z', 'A', 'l', 'J', 'X', 'C', 'i', 'P', 'A', 'y', 'y', 'a', 'E', 'V', 's', 'a', 'P', 'r', 'Y', 'D', 'n', 'o', 'w', 'M', ' ', 'W', 'm', 'W', 'H', 'a', 'v', 'j', 'g', 'Y', 'm'], ['M', 'y', 'N', 'A', 'R', 'u', 'e', 'N', 'H', 'a', 's', 'E', 'Q', 'b', 'd', 'E', 's', 'X', 'f', 'G', 'N', 'x', 'h', 'i', 'u', 'U', 'M', 'U', 's', 'u', 'N', 'f', 'u', 'o', 'C', 's', 'S', 'P'], ['h', 'C', 'v', 'L', 'H', 'h', 'Y', 'Y', 'F', 'S', 'd', 'Q', 'h', 'V', 'V', 'U', 'g', 'C', 's', 'X', 'E', 't', 'e', 'M', 'F', 'w', 'U', 'e', 'C', 'J', 'Y', 'R', 'o', 'a', 'W', 'L', 'k', 'K'], ['k', 'H', 'J', 'T', 's', 'F', 'y', 'C', 'O', 'J', 'O', 'B', 'm', 'B', 'e', 'G', 'l', 'g', 'y', 'J', 'y', 'u', 'F', 'E', 'B', ' ', 'B', 'Z', 'a', 'e', 'v', 'u', 'U', 'J', 'l', 'C', 'k', 'v'], ['d', 'y', 'V', 'Z', 't', 'X', 'n', 'v', 'O', 's', 'E', 'L', 'Z', 'x', 'x', 'p', 'w', 'W', 'S', 'n', 'G', 'y', 'q', 'o', 'B', 'X', 'f', 'r', 'n', 'T', 'y', 'p', 'J', 'j', 'I', 'w', 'r', 's'], ['h', 'y', 'p', 'j', 'r', 'D', 'j', 'H', 't', 'X', 'q', 'K', 'N', 'j', 'h', 'v', 'K', 'r', 'j', 'J', 'A', 'u', 'D', 'f', 'J', 'n', 'q', 'w', 'P', 'w', 'i', 's', 'G', 's', 't', 'D', 'r', 'A'], ['f', 'I', 'v', 'M', 'x', 'K', 'O', 'i', 'p', 'y', 'o', 'Z', 'Y', 's', 'V', 'f', 'i', 'V', 'x', 'K', 'p', 'a', 'L', 'V', 'r', 'B', 'v', 'd', 'M', 'e', 'X', 'h', 'F', 'S', 'p', 'Z', 'J', 'I'], ['H', 'V', 'a', 'a', 'i', 'k', 'D', 'e', 'Z', 'i', 'h', 'v', 'A', 'G', 'N', 'Q', 'r', 'e', 'A', 'q', 'n', 'a', 'z', 'N', 'b', 'y', 'R', 'z', 'c', 'I', 'A', 'h', 'z', 'o', 'F', 'w', 'p', 'h'], ['X', 'z', 'K', 'b', 'z', 'E', 'u', 'E', 'h', 'L', 'X', 'K', 'Q', 'r', 'f', 'Z', 'k', 'p', 'S', 'b', 'l', 'N', 'M', 'u', 'f', 'z', 'p', 'f', 'Q', 'U', 'q', 'g', 'F', 'K', 'D', 'Q', 'H', 'K'], ['S', 'U', 'o', 'u', 'z', 'G', 'q', 'w', 'N', 'B', 'c', 'u', 'k', 'n', 'v', 'S', 'O', 'Z', 'I', 'F', 'T', 'Z', 'D', 'g', 'w', 'K', 'G', 'C', 'B', 'M', 'e', 'W', 'r', 'v', 'l', 't', 't', 'u'], ['P', 'e', 'm', 'H', 'W', 'b', 's', 'C', 'j', 'U', 'E', 'a', 'J', 'o', 'G', ' ', 'H', 'T', 'f', 'j', 'N', 'N', 'E', 'u', 'W', 'O', 'X', 'e', 'm', 'w', ' ', 'f', 'U', 'Y', 'N', 'X', 'I', 'j'], [' ', 'v', 'q', 'O', 'd', 'p', 'd', 'Q', 'N', 'A', 'v', 'u', 'o', 'q', ' ', 'S', 'H', 'b', 'M', 'J', 'b', 'G', 'L', 'N', 'w', 'r', 'G', 'Q', 'E', 'R', 'y', 'a', 'k', 'S', 'W', 'I', 'P', 'd'], ['N', 'z', 'F', 'X', 'x', 'J', 'q', 'G', 'Z', 'Z', 'E', ' ', 'q', 'M', 'L', 'B', 'y', 'k', 'h', 'R', 'e', 'R', 'N', 'p', 'D', 'K', 'n', 'g', 'E', 'w', 'P', 'v', 'J', 'P', ' ', 'q', 'N', 's'], ['u', 'Q', 'F', 'j', 'r', 'I', 'X', 'C', 'E', 'R', 'R', 'E', 'D', 'p', 'n', 'a', 'X', 'Q', 'J', 'F', 'F', 'x', 's', 'P', 'o', 'a', 't', 'f', 'S', 'n', 'P', 'S', 'k', 's', 'j', 'M', 'L', 'l'], ['F', ' ', 'n', 'P', 'P', 'N', 'D', ' ', 'N', 'W', 'G', 'm', 'p', 'P', 'R', 'L', 'b', 'c', 'q', 'O', 'k', 'Y', 'p', 'I', 'b', 'P', 'Y', 'Y', 'F', 'c', 'p', 'W', 'e', 'R', 'k', 'j', 'V', 'h'], ['Q', 'J', 'g', 'D', 'S', 'U', 'm', 'z', 'M', 'n', 'a', 'V', 'q', 'P', 'X', 'w', 's', 'v', 'J', 'J', 'h', 'n', 'J', 'd', 'Z', 'M', 'v', 'M', 'h', 'Q', ' ', 'W', 'V', 's', 'O', 'A', 'x', 'j'], ['N', 'i', 'm', 'F', 'H', 'C', ' ', 'x', ' ', 't', 'g', 'q', 'j', 'd', 'n', 'g', 'l', 'U', 'k', 'U', 'q', 'h', 'A', 'c', 'u', 'o', 'U', 'z', 'D', 'N', 'p', 'R', 'K', 'k', 'T', 'i', 'D', 'i'], ['P', 'r', 'W', 'S', 's', 'U', 'k', 'l', 'e', 's', 'W', 'd', 'Y', 'q', 'p', 'Q', 'z', 'F', 'Z', 's', 'x', 'h', 'J', 'q', 'B', 'F', 'R', 'm', 'l', 'f', 'H', 'U', 'd', 'V', 'o', 'b', 't', 'B'], ['R', 'q', 'm', 'q', 'h', 'q', 'i', 'P', 'N', 'O', 'q', 'i', 'V', 'O', 'n', 'K', 'J', 'd', 'E', 'b', 'V', 'O', 'u', 'S', 'l', 'u', 'A', 'k', 'd', 'r', 'x', 'g', 'y', 'U', 'A', 'q', 'p', 'd'], ['r', 'h', 'h', 'L', 'j', 'd', 'b', 'o', 'v', 'D', 'd', 'M', 'f', 'y', 'Q', 'V', ' ', 'j', 'a', 'T', 'X', 'a', 't', 'I', 'Z', 'A', 'P', 'l', 'Y', 'j', 'c', 'A', 'A', 'e', 'r', 'H', 'u', 'f'], ['a', 'Y', 'J', 'J', 'k', 'L', 'x', 'l', 'O', 'n', 'J', 'I', 'l', 'x', 'V', 'S', 'S', 'l', 'D', 'E', 'm', 'd', ' ', 'j', 'Q', 'L', 't', 'c', 'o', 'D', 'z', 'A', 'x', 'u', 'F', 'E', 'v', 'a'], ['o', 'K', 'F', 'V', 'L', 'G', 't', 'A', 'd', 'b', 'P', 'F', 'K', 'N', 'J', 'e', 'B', 'T', 'H', 'n', 'D', 'b', 'm', 'T', 'L', 'S', 'n', 'D', 'b', 's', 'I', 't', 'O', 'a', 'm', 'a', 'A', 'n'], ['L', 'o', 'z', 'L', 'a', 'd', 'T', 'D', 'd', 'S', 'D', 'a', 'm', 'z', 'y', 'y', 'A', 'j', 'v', 'H', 'F', 't', 'A', 'f', 'G', 'E', ' ', 'x', ' ', 'm', 'L', 'I', 'O', 'Z', 'C', 'y', 'X', 'x'], [' ', 'I', 'i', 's', 'E', 'N', 'm', 'k', 'l', 'n', 's', 's', 'P', 'M', 'x', 'i', 'I', 'K', 'k', 'm', 'k', 'X', 'n', 'W', 'k', 'F', 'D', 'c', 'l', 'd', 'n', 'o', 'H', 'T', 'B', 'g', 'S', 'v'], ['g', 'p', 'd', 'A', 'Y', 'b', 'L', 'P', 'v', 'j', 'O', 'C', 's', 'g', 'J', 'm', 'P', 'd', 'H', 'c', 'h', 'U', 'P', 'J', 'h', 'c', 'f', 'W', 'l', 'K', 'F', 'T', 's', 'Z', 'n', 'v', ' ', 'p'], ['O', 'H', 'J', 'y', 'B', 'c', 'M', 'Q', 'F', 'k', 'S', 'o', 'b', 'M', 'c', 'i', 'K', 'l', 'a', 'Y', 'v', 'O', 'U', 'R', 'B', 'o', 'H', 'g', 'o', ' ', 'H', 'l', 'g', 'e', 'L', 'x', 'M', 'z'], ['q', 'u', 'A', 'O', 'u', 'f', 'r', 'U', 'F', 'g', 'f', 'g', 'R', 'E', 'W', 'H', 'n', 'e', 'N', 'Z', 'y', 'M', 'j', 'L', 'T', 'b', 'v', 'N', 'u', 'X', 'E', 'y', 'g', 'Y', ' ', 'n', 'T', 'r'], ['k', 'n', 'F', 'B', 'X', 't', 'j', 'a', 'b', 'I', 'C', 'O', 'R', 'h', 'c', 'C', 'F', 'E', 'l', 'Y', 's', 'D', 'p', 'j', 'J', ' ', 'y', 'u', 'x', 'q', ' ', 'P', 'J', 'P', 't', 'g', 'X', 'j'], ['M', 'u', 'Q', 'x', 'r', 'n', 'U', 'w', 'w', ' ', 'H', 'P', ' ', 'V', 'X', 'Y', 't', 'Z', 'F', 'H', 'X', 'N', 'y', 'E', 'j', 'I', 'Q', 'P', ' ', 'y', 'e', 'I', 'o', 'b', 'j', 'E', 'p', 'G'], ['n', 'd', 'T', 'f', 'a', 'D', 's', 'i', 'b', 'm', 'K', 'h', 'c', 'G', 'I', 'p', 'd', 'x', 'I', 'G', 'B', 'q', 'k', 'A', 'B', 'M', 'g', 'S', 't', 'K', 'b', 'm', 'm', 'u', 'k', ' ', 'U', 'Z'], ['C', 'v', 'L', 'k', 'x', 'L', ' ', 'm', 'x', 'P', 'C', 'X', 'n', 'w', 'd', 'E', 'O', 'D', 'Q', 'i', 'A', 'p', 'K', 'r', 'n', 'Y', 'T', 'v', 'K', 'O', 'M', 'w', 'p', 'P', 'R', 'X', 'I', 'g'], ['l', 'M', 'd', 'j', 'M', 'd', 'y', 'x', ' ', 'o', 'E', 't', 'X', 'w', 'c', 'H', 'r', 'q', 'd', 'Q', 'I', 'g', 'T', 'F', 't', 'q', 'A', 'e', 'm', 'y', 'G', 't', 'v', 'G', 'r', 'x', 'g', 'H'], ['T', 'f', 'N', 'W', 'K', 'T', 'b', 'O', 'J', 'B', 'a', 'd', 'l', 'y', 's', 's', 'W', 'D', 't', 'z', 'D', 'c', 'k', 'l', 'e', 'Q', 'A', 'J', 'J', 'k', 'M', 'G', 'F', 'S', 'C', 'N', 'x', 'X']],32,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param))) | true | true |
1c328c47f90c2c8cccd9d610252edcd92f1f5310 | 837 | py | Python | exercise/class_game1.py | LeeBeral/python | 9f0d360d69ee5245e3ef13a9dc9fc666374587a4 | [
"MIT"
] | null | null | null | exercise/class_game1.py | LeeBeral/python | 9f0d360d69ee5245e3ef13a9dc9fc666374587a4 | [
"MIT"
] | null | null | null | exercise/class_game1.py | LeeBeral/python | 9f0d360d69ee5245e3ef13a9dc9fc666374587a4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import random
def enemy(ancestry,gear):
enemy=ancestry
weapon=gear
hp=random.randrange(0,20)
ac=random.randrange(0,20)
return [enemy,weapon,hp,ac]
def fight(tgt):
print("You take a swing at the " + tgt[0] + ".")
hit=random.randrange(0,20)
if hit > tgt[3]:
print("You hit the " + tgt[0] + " for " + str(hit) + " damage!")
tgt[2] = tgt[2] - hit
else:
print("You missed.")
foe=enemy("troll","great axe")
print("You meet a " + foe[0] + " wielding a " + foe[1])
print("Type the a key and then RETURN to attack.")
while True:
action=input()
if action.lower() == "a":
fight(foe)
if foe[2] < 1:
print("You killed your foe!")
else:
print("The " + foe[0] + " has " + str(foe[2]) + " HP remaining") | 31 | 73 | 0.545998 |
import random
def enemy(ancestry,gear):
enemy=ancestry
weapon=gear
hp=random.randrange(0,20)
ac=random.randrange(0,20)
return [enemy,weapon,hp,ac]
def fight(tgt):
print("You take a swing at the " + tgt[0] + ".")
hit=random.randrange(0,20)
if hit > tgt[3]:
print("You hit the " + tgt[0] + " for " + str(hit) + " damage!")
tgt[2] = tgt[2] - hit
else:
print("You missed.")
foe=enemy("troll","great axe")
print("You meet a " + foe[0] + " wielding a " + foe[1])
print("Type the a key and then RETURN to attack.")
while True:
action=input()
if action.lower() == "a":
fight(foe)
if foe[2] < 1:
print("You killed your foe!")
else:
print("The " + foe[0] + " has " + str(foe[2]) + " HP remaining") | true | true |
1c328ca537430d8d2940418bb3fd2f878a054306 | 239 | py | Python | Exercicios/007-media-aritmetica.py | fernandaveiga/Curso-em-Video | ea8956fce6e16cbaa8a83106ffa45071cbdc617e | [
"MIT"
] | null | null | null | Exercicios/007-media-aritmetica.py | fernandaveiga/Curso-em-Video | ea8956fce6e16cbaa8a83106ffa45071cbdc617e | [
"MIT"
] | null | null | null | Exercicios/007-media-aritmetica.py | fernandaveiga/Curso-em-Video | ea8956fce6e16cbaa8a83106ffa45071cbdc617e | [
"MIT"
] | null | null | null | # 007 - Desenvolva um programa que leia as duas notas de um aluno e calcule e mostre a sua média:
n1 = float(input('Digite a primeira nota: '))
n2 = float(input('Digite a segunda nota: '))
print(f'A médias entre as notas é: {(n1+n2)/2}') | 39.833333 | 97 | 0.694561 |
n1 = float(input('Digite a primeira nota: '))
n2 = float(input('Digite a segunda nota: '))
print(f'A médias entre as notas é: {(n1+n2)/2}') | true | true |
1c328d7d9526bcf4dbafb0a6f872081e9e52f7ef | 1,296 | py | Python | Labs/1/NamingShape.py | ArvinZJC/UofG_PGT_PSD_Python | d90e9bb0b53b14c6b1d7e657c3c61e2792e0d9c4 | [
"MIT"
] | null | null | null | Labs/1/NamingShape.py | ArvinZJC/UofG_PGT_PSD_Python | d90e9bb0b53b14c6b1d7e657c3c61e2792e0d9c4 | [
"MIT"
] | null | null | null | Labs/1/NamingShape.py | ArvinZJC/UofG_PGT_PSD_Python | d90e9bb0b53b14c6b1d7e657c3c61e2792e0d9c4 | [
"MIT"
] | null | null | null | '''
Description: exercise: naming the shape
Version: 1.0.0.20210113
Author: Arvin Zhao
Date: 2021-01-13 11:04:45
Last Editors: Arvin Zhao
LastEditTime: 2021-01-13 11:26:54
'''
def name_shape(slide_num: str, out_of_range_error_msg: str) -> str:
'''
Name a shape according to the number of sldies.
Parameters
----------
slide_num : the number of slides
out_of_range_error_msg : a specified error message returned if the number of slides is less than 3 or more than 10
Returns
-------
shape : the name of a shape, or an error message
'''
shape_namer = {
'3': 'Triangle',
'4': 'Square',
'5': 'Pentagon',
'6': 'Hexagon',
'7': 'Heptagon',
'8': 'Octagon',
'9': 'Enneagon',
'10': 'Decagon'
}
return shape_namer.get(slide_num, out_of_range_error_msg) # Return a specified error message if the number of slides is less than 3 or more than 10.
if __name__ == '__main__':
out_of_range_error_msg = 'Error! Please enter an integer no less than 3 and no more than 10.'
while True:
shape = name_shape(input('Enter the number of slides to name a shape: ').strip(), out_of_range_error_msg)
print(shape)
if shape != out_of_range_error_msg:
break | 28.173913 | 153 | 0.631944 |
def name_shape(slide_num: str, out_of_range_error_msg: str) -> str:
shape_namer = {
'3': 'Triangle',
'4': 'Square',
'5': 'Pentagon',
'6': 'Hexagon',
'7': 'Heptagon',
'8': 'Octagon',
'9': 'Enneagon',
'10': 'Decagon'
}
return shape_namer.get(slide_num, out_of_range_error_msg)
if __name__ == '__main__':
out_of_range_error_msg = 'Error! Please enter an integer no less than 3 and no more than 10.'
while True:
shape = name_shape(input('Enter the number of slides to name a shape: ').strip(), out_of_range_error_msg)
print(shape)
if shape != out_of_range_error_msg:
break | true | true |
1c328d929a7b5fe0490e67103b0ee806499ee37d | 8,945 | py | Python | module/dun_datasets/additional_gap_loader.py | a07458666/UncertaintyFlow | cef2512901d4e27bb22fc3997522cd47c03b569c | [
"MIT"
] | null | null | null | module/dun_datasets/additional_gap_loader.py | a07458666/UncertaintyFlow | cef2512901d4e27bb22fc3997522cd47c03b569c | [
"MIT"
] | null | null | null | module/dun_datasets/additional_gap_loader.py | a07458666/UncertaintyFlow | cef2512901d4e27bb22fc3997522cd47c03b569c | [
"MIT"
] | null | null | null | import zipfile
import pickle
try:
import urllib
from urllib import urlretrieve
except Exception:
import urllib.request as urllib
from os import path
import numpy as np
from numpy.random import uniform, randn
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from .utils import mkdir
def load_axis(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/gap_classification'):
urllib.urlretrieve('https://javierantoran.github.io/assets/datasets/gap_classification.zip',
filename=base_dir + '/gap_classification.zip')
with zipfile.ZipFile(base_dir + '/gap_classification.zip', 'r') as zip_ref:
zip_ref.extractall(base_dir)
file1 = base_dir + '/gap_classification/axis.pkl'
with open(file1, 'rb') as f:
axis_tupple = pickle.load(f)
axis_x = axis_tupple[0].astype(np.float32)
axis_y = axis_tupple[1].astype(np.float32)[:, np.newaxis]
x_means, x_stds = axis_x.mean(axis=0), axis_x.std(axis=0)
y_means, y_stds = axis_y.mean(axis=0), axis_y.std(axis=0)
X = ((axis_x - x_means) / x_stds).astype(np.float32)
Y = ((axis_y - y_means) / y_stds).astype(np.float32)
return X, Y
def load_origin(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/gap_classification'):
urllib.urlretrieve('https://javierantoran.github.io/assets/datasets/gap_classification.zip',
filename=base_dir + '/gap_classification.zip')
with zipfile.ZipFile(base_dir + '/gap_classification.zip', 'r') as zip_ref:
zip_ref.extractall(base_dir)
file2 = base_dir + '/gap_classification/origin.pkl'
with open(file2, 'rb') as f:
origin_tupple = pickle.load(f)
origin_x = origin_tupple[0].astype(np.float32)
origin_y = origin_tupple[1].astype(np.float32)[:, np.newaxis]
x_means, x_stds = origin_x.mean(axis=0), origin_x.std(axis=0)
y_means, y_stds = origin_y.mean(axis=0), origin_y.std(axis=0)
X = ((origin_x - x_means) / x_stds).astype(np.float32)
Y = ((origin_y - y_means) / y_stds).astype(np.float32)
return X, Y
def load_agw_1d(base_dir='./dun_datasets/data/', get_feats=False):
if not path.exists(base_dir + '/agw_data'):
mkdir(base_dir + '/agw_data')
urllib.urlretrieve('https://raw.githubusercontent.com/wjmaddox/drbayes/master/experiments/synthetic_regression/ckpts/data.npy',
filename=base_dir + '/agw_data/data.npy')
def features(x):
return np.hstack([x[:, None] / 2.0, (x[:, None] / 2.0) ** 2])
data = np.load(base_dir + '/agw_data/data.npy')
x, y = data[:, 0], data[:, 1]
y = y[:, None]
f = features(x)
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
f_means, f_stds = f.mean(axis=0), f.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
F = ((f - f_means) / f_stds).astype(np.float32)
if get_feats:
return F, Y
return X[:, None], Y
def load_andrew_1d(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/andrew_1d'):
print('base_dir does not point to data directory')
with open(base_dir + '/andrew_1d/1d_cosine_separated.pkl', 'rb') as f:
data = pickle.load(f)
x = data[:, 0]
x = x[:, None]
y = data[:, 1]
y = y[:, None]
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
return X, Y
def load_matern_1d(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/matern_data/'):
mkdir(base_dir + '/matern_data/')
def gen_1d_matern_data():
from GPy.kern.src.sde_matern import Matern32
np.random.seed(4)
lengthscale = 0.5
variance = 1.0
sig_noise = 0.15
n1_points = 200
x1 = np.random.uniform(-2, -1, n1_points)[:, None]
n2_points = 200
x2 = np.random.uniform(0.5, 2.5, n2_points)[:, None]
no_points = n1_points + n2_points
x = np.concatenate([x1, x2], axis=0)
x.sort(axis=0)
k = Matern32(input_dim=1, variance=variance, lengthscale=lengthscale)
C = k.K(x, x) + np.eye(no_points) * sig_noise ** 2
y = np.random.multivariate_normal(np.zeros((no_points)), C)[:, None]
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
return X, Y
x, y = gen_1d_matern_data()
xy = np.concatenate([x, y], axis=1)
np.save(base_dir + '/matern_data/matern_1d.npy', xy)
return x, y
else:
xy = np.load(base_dir + '/matern_data/matern_1d.npy')
x = xy[:, 0]
x = x[:, None]
y = xy[:, 1]
y = y[:, None]
return x, y
def load_my_1d(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/my_1d_data/'):
mkdir(base_dir + '/my_1d_data/')
def gen_my_1d(hetero=False):
np.random.seed(0)
Npoints = 1002
x0 = uniform(-1, 0, size=int(Npoints / 3))
x1 = uniform(1.7, 2.5, size=int(Npoints / 3))
x2 = uniform(4, 5, size=int(Npoints / 3))
x = np.concatenate([x0, x1, x2])
def function(x):
return x - 0.1 * x ** 2 + np.cos(np.pi * x / 2)
y = function(x)
homo_noise_std = 0.25
homo_noise = randn(*x.shape) * homo_noise_std
y_homo = y + homo_noise
hetero_noise_std = np.abs(0.1 * np.abs(x) ** 1.5)
hetero_noise = randn(*x.shape) * hetero_noise_std
y_hetero = y + hetero_noise
X = x[:, np.newaxis]
y_joint = np.stack([y_homo, y_hetero], axis=1)
X_train, X_test, y_joint_train, y_joint_test = train_test_split(X, y_joint, test_size=0.5, random_state=42)
y_hetero_train, y_hetero_test = y_joint_train[:, 1, np.newaxis], y_joint_test[:, 1, np.newaxis]
y_homo_train, y_homo_test = y_joint_train[:, 0, np.newaxis], y_joint_test[:, 0, np.newaxis]
x_means, x_stds = X_train.mean(axis=0), X_train.std(axis=0)
y_hetero_means, y_hetero_stds = y_hetero_train.mean(axis=0), y_hetero_train.std(axis=0)
y_homo_means, y_homo_stds = y_homo_test.mean(axis=0), y_homo_test.std(axis=0)
X_train = ((X_train - x_means) / x_stds).astype(np.float32)
X_test = ((X_test - x_means) / x_stds).astype(np.float32)
y_hetero_train = ((y_hetero_train - y_hetero_means) / y_hetero_stds).astype(np.float32)
y_hetero_test = ((y_hetero_test - y_hetero_means) / y_hetero_stds).astype(np.float32)
y_homo_train = ((y_homo_train - y_homo_means) / y_homo_stds).astype(np.float32)
y_homo_test = ((y_homo_test - y_homo_means) / y_homo_stds).astype(np.float32)
if hetero:
return X_train, y_hetero_train, X_test, y_hetero_test
else:
return X_train, y_homo_train, X_test, y_homo_test
X_train, y_homo_train, X_test, y_homo_test = gen_my_1d()
xy = np.concatenate([X_train, y_homo_train, X_test, y_homo_test], axis=1)
np.save(base_dir + '/my_1d_data/my_1d_data.npy', xy)
return X_train, y_homo_train, X_test, y_homo_test
xy = np.load(base_dir + '/my_1d_data/my_1d_data.npy')
X_train = xy[:, 0, None].astype(np.float32)
y_homo_train = xy[:, 1, None].astype(np.float32)
X_test = xy[:, 2, None].astype(np.float32)
y_homo_test = xy[:, 3, None].astype(np.float32)
return X_train, y_homo_train, X_test, y_homo_test
def load_wiggle_1d():
np.random.seed(0)
Npoints = 300
x = randn(Npoints) * 2.5 + 5 # uniform(0, 10, size=Npoints)
def function(x):
return np.sin(np.pi * x) + 0.2 * np.cos(np.pi * x * 4) - 0.3 * x
y = function(x)
homo_noise_std = 0.25
homo_noise = randn(*x.shape) * homo_noise_std
y = y + homo_noise
x = x[:, None]
y = y[:, None]
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
return X, Y
| 35.496032 | 136 | 0.583007 | import zipfile
import pickle
try:
import urllib
from urllib import urlretrieve
except Exception:
import urllib.request as urllib
from os import path
import numpy as np
from numpy.random import uniform, randn
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from .utils import mkdir
def load_axis(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/gap_classification'):
urllib.urlretrieve('https://javierantoran.github.io/assets/datasets/gap_classification.zip',
filename=base_dir + '/gap_classification.zip')
with zipfile.ZipFile(base_dir + '/gap_classification.zip', 'r') as zip_ref:
zip_ref.extractall(base_dir)
file1 = base_dir + '/gap_classification/axis.pkl'
with open(file1, 'rb') as f:
axis_tupple = pickle.load(f)
axis_x = axis_tupple[0].astype(np.float32)
axis_y = axis_tupple[1].astype(np.float32)[:, np.newaxis]
x_means, x_stds = axis_x.mean(axis=0), axis_x.std(axis=0)
y_means, y_stds = axis_y.mean(axis=0), axis_y.std(axis=0)
X = ((axis_x - x_means) / x_stds).astype(np.float32)
Y = ((axis_y - y_means) / y_stds).astype(np.float32)
return X, Y
def load_origin(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/gap_classification'):
urllib.urlretrieve('https://javierantoran.github.io/assets/datasets/gap_classification.zip',
filename=base_dir + '/gap_classification.zip')
with zipfile.ZipFile(base_dir + '/gap_classification.zip', 'r') as zip_ref:
zip_ref.extractall(base_dir)
file2 = base_dir + '/gap_classification/origin.pkl'
with open(file2, 'rb') as f:
origin_tupple = pickle.load(f)
origin_x = origin_tupple[0].astype(np.float32)
origin_y = origin_tupple[1].astype(np.float32)[:, np.newaxis]
x_means, x_stds = origin_x.mean(axis=0), origin_x.std(axis=0)
y_means, y_stds = origin_y.mean(axis=0), origin_y.std(axis=0)
X = ((origin_x - x_means) / x_stds).astype(np.float32)
Y = ((origin_y - y_means) / y_stds).astype(np.float32)
return X, Y
def load_agw_1d(base_dir='./dun_datasets/data/', get_feats=False):
if not path.exists(base_dir + '/agw_data'):
mkdir(base_dir + '/agw_data')
urllib.urlretrieve('https://raw.githubusercontent.com/wjmaddox/drbayes/master/experiments/synthetic_regression/ckpts/data.npy',
filename=base_dir + '/agw_data/data.npy')
def features(x):
return np.hstack([x[:, None] / 2.0, (x[:, None] / 2.0) ** 2])
data = np.load(base_dir + '/agw_data/data.npy')
x, y = data[:, 0], data[:, 1]
y = y[:, None]
f = features(x)
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
f_means, f_stds = f.mean(axis=0), f.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
F = ((f - f_means) / f_stds).astype(np.float32)
if get_feats:
return F, Y
return X[:, None], Y
def load_andrew_1d(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/andrew_1d'):
print('base_dir does not point to data directory')
with open(base_dir + '/andrew_1d/1d_cosine_separated.pkl', 'rb') as f:
data = pickle.load(f)
x = data[:, 0]
x = x[:, None]
y = data[:, 1]
y = y[:, None]
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
return X, Y
def load_matern_1d(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/matern_data/'):
mkdir(base_dir + '/matern_data/')
def gen_1d_matern_data():
from GPy.kern.src.sde_matern import Matern32
np.random.seed(4)
lengthscale = 0.5
variance = 1.0
sig_noise = 0.15
n1_points = 200
x1 = np.random.uniform(-2, -1, n1_points)[:, None]
n2_points = 200
x2 = np.random.uniform(0.5, 2.5, n2_points)[:, None]
no_points = n1_points + n2_points
x = np.concatenate([x1, x2], axis=0)
x.sort(axis=0)
k = Matern32(input_dim=1, variance=variance, lengthscale=lengthscale)
C = k.K(x, x) + np.eye(no_points) * sig_noise ** 2
y = np.random.multivariate_normal(np.zeros((no_points)), C)[:, None]
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
return X, Y
x, y = gen_1d_matern_data()
xy = np.concatenate([x, y], axis=1)
np.save(base_dir + '/matern_data/matern_1d.npy', xy)
return x, y
else:
xy = np.load(base_dir + '/matern_data/matern_1d.npy')
x = xy[:, 0]
x = x[:, None]
y = xy[:, 1]
y = y[:, None]
return x, y
def load_my_1d(base_dir='./dun_datasets/data/'):
if not path.exists(base_dir + '/my_1d_data/'):
mkdir(base_dir + '/my_1d_data/')
def gen_my_1d(hetero=False):
np.random.seed(0)
Npoints = 1002
x0 = uniform(-1, 0, size=int(Npoints / 3))
x1 = uniform(1.7, 2.5, size=int(Npoints / 3))
x2 = uniform(4, 5, size=int(Npoints / 3))
x = np.concatenate([x0, x1, x2])
def function(x):
return x - 0.1 * x ** 2 + np.cos(np.pi * x / 2)
y = function(x)
homo_noise_std = 0.25
homo_noise = randn(*x.shape) * homo_noise_std
y_homo = y + homo_noise
hetero_noise_std = np.abs(0.1 * np.abs(x) ** 1.5)
hetero_noise = randn(*x.shape) * hetero_noise_std
y_hetero = y + hetero_noise
X = x[:, np.newaxis]
y_joint = np.stack([y_homo, y_hetero], axis=1)
X_train, X_test, y_joint_train, y_joint_test = train_test_split(X, y_joint, test_size=0.5, random_state=42)
y_hetero_train, y_hetero_test = y_joint_train[:, 1, np.newaxis], y_joint_test[:, 1, np.newaxis]
y_homo_train, y_homo_test = y_joint_train[:, 0, np.newaxis], y_joint_test[:, 0, np.newaxis]
x_means, x_stds = X_train.mean(axis=0), X_train.std(axis=0)
y_hetero_means, y_hetero_stds = y_hetero_train.mean(axis=0), y_hetero_train.std(axis=0)
y_homo_means, y_homo_stds = y_homo_test.mean(axis=0), y_homo_test.std(axis=0)
X_train = ((X_train - x_means) / x_stds).astype(np.float32)
X_test = ((X_test - x_means) / x_stds).astype(np.float32)
y_hetero_train = ((y_hetero_train - y_hetero_means) / y_hetero_stds).astype(np.float32)
y_hetero_test = ((y_hetero_test - y_hetero_means) / y_hetero_stds).astype(np.float32)
y_homo_train = ((y_homo_train - y_homo_means) / y_homo_stds).astype(np.float32)
y_homo_test = ((y_homo_test - y_homo_means) / y_homo_stds).astype(np.float32)
if hetero:
return X_train, y_hetero_train, X_test, y_hetero_test
else:
return X_train, y_homo_train, X_test, y_homo_test
X_train, y_homo_train, X_test, y_homo_test = gen_my_1d()
xy = np.concatenate([X_train, y_homo_train, X_test, y_homo_test], axis=1)
np.save(base_dir + '/my_1d_data/my_1d_data.npy', xy)
return X_train, y_homo_train, X_test, y_homo_test
xy = np.load(base_dir + '/my_1d_data/my_1d_data.npy')
X_train = xy[:, 0, None].astype(np.float32)
y_homo_train = xy[:, 1, None].astype(np.float32)
X_test = xy[:, 2, None].astype(np.float32)
y_homo_test = xy[:, 3, None].astype(np.float32)
return X_train, y_homo_train, X_test, y_homo_test
def load_wiggle_1d():
np.random.seed(0)
Npoints = 300
x = randn(Npoints) * 2.5 + 5
def function(x):
return np.sin(np.pi * x) + 0.2 * np.cos(np.pi * x * 4) - 0.3 * x
y = function(x)
homo_noise_std = 0.25
homo_noise = randn(*x.shape) * homo_noise_std
y = y + homo_noise
x = x[:, None]
y = y[:, None]
x_means, x_stds = x.mean(axis=0), x.std(axis=0)
y_means, y_stds = y.mean(axis=0), y.std(axis=0)
X = ((x - x_means) / x_stds).astype(np.float32)
Y = ((y - y_means) / y_stds).astype(np.float32)
return X, Y
| true | true |
1c328e4b13a641103c83fd1310687f1abef1d462 | 1,355 | py | Python | image/scripts/blank_image.py | Magnitus-/hypriot-kit | 07941b56411b1f9e7d45a457a3026456ecfc2652 | [
"MIT"
] | 1 | 2018-04-01T21:37:58.000Z | 2018-04-01T21:37:58.000Z | image/scripts/blank_image.py | Magnitus-/hypriot-kit | 07941b56411b1f9e7d45a457a3026456ecfc2652 | [
"MIT"
] | null | null | null | image/scripts/blank_image.py | Magnitus-/hypriot-kit | 07941b56411b1f9e7d45a457a3026456ecfc2652 | [
"MIT"
] | null | null | null | import glob, os
import container
from builder import BuilderBase
class BlankImage(BuilderBase):
repo = 'blank_image'
artifact_pattern = "{device}-raw.img.zip"
artifact_checksum_pattern = "{device}-raw.img.zip.sha256"
def get_description(self):
return "Blank SD Image"
def __init__(self, configs):
self.image = configs['blank_image']['image']
self.repo = configs['blank_image']['repo']
self.branch = configs['blank_image'].get('branch')
self.device = configs['blank_image']['device']
def get_artifacts_names(self):
return [
BlankImage.artifact_pattern.format(**{
"device": self.device
}),
BlankImage.artifact_checksum_pattern.format(**{
"device": self.device
}),
]
def build_artifacts(self):
args = {
"remove": True,
"privileged": True,
"volumes": {
os.environ.get('HYPRIOT_ARTIFACTS_VOLUME'): {
"bind": "/workspace"
}
},
"image": self.image
}
if self.device == "rpi":
args['command'] = "/builder/rpi/build.sh"
elif self.device == "odroid":
args['command'] = "/builder/odroid/build.sh"
container.run(args)
| 27.1 | 61 | 0.542435 | import glob, os
import container
from builder import BuilderBase
class BlankImage(BuilderBase):
repo = 'blank_image'
artifact_pattern = "{device}-raw.img.zip"
artifact_checksum_pattern = "{device}-raw.img.zip.sha256"
def get_description(self):
return "Blank SD Image"
def __init__(self, configs):
self.image = configs['blank_image']['image']
self.repo = configs['blank_image']['repo']
self.branch = configs['blank_image'].get('branch')
self.device = configs['blank_image']['device']
def get_artifacts_names(self):
return [
BlankImage.artifact_pattern.format(**{
"device": self.device
}),
BlankImage.artifact_checksum_pattern.format(**{
"device": self.device
}),
]
def build_artifacts(self):
args = {
"remove": True,
"privileged": True,
"volumes": {
os.environ.get('HYPRIOT_ARTIFACTS_VOLUME'): {
"bind": "/workspace"
}
},
"image": self.image
}
if self.device == "rpi":
args['command'] = "/builder/rpi/build.sh"
elif self.device == "odroid":
args['command'] = "/builder/odroid/build.sh"
container.run(args)
| true | true |
1c3290b6604a75c73e6c6d0c7728010b0721e2f7 | 13,121 | py | Python | src/azure-cli/azure/cli/command_modules/storage/tests/hybrid_2019_03_01/test_storage_validators.py | digimaun/azure-cli | 298994660f0fde6863cb45a7c3142141ed10f923 | [
"MIT"
] | 2 | 2020-08-08T11:00:25.000Z | 2020-08-08T11:00:30.000Z | src/azure-cli/azure/cli/command_modules/storage/tests/hybrid_2019_03_01/test_storage_validators.py | cindywu/azure-cli | bd011cb91ac6e0ac89f53e1105d76ea30b6609a0 | [
"MIT"
] | 1 | 2021-06-02T02:49:48.000Z | 2021-06-02T02:49:48.000Z | src/azure-cli/azure/cli/command_modules/storage/tests/hybrid_2019_03_01/test_storage_validators.py | cindywu/azure-cli | bd011cb91ac6e0ac89f53e1105d76ea30b6609a0 | [
"MIT"
] | 1 | 2020-09-07T18:44:14.000Z | 2020-09-07T18:44:14.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
import mock
from argparse import Namespace
from six import StringIO
from knack import CLI
from azure.cli.core._config import GLOBAL_CONFIG_DIR, ENV_VAR_PREFIX
from azure.cli.core.cloud import get_active_cloud
from azure.cli.core.profiles import get_sdk, ResourceType, supported_api_version
from azure.cli.command_modules.storage._validators import (get_permission_validator, get_datetime_type,
ipv4_range_type, resource_type_type, services_type,
process_blob_source_uri, get_char_options_validator,
get_source_file_or_blob_service_client,
validate_encryption_source, validate_source_uri,
validate_encryption_services)
from azure.cli.testsdk import api_version_constraint
class MockCLI(CLI):
def __init__(self):
super(MockCLI, self).__init__(cli_name='mock_cli', config_dir=GLOBAL_CONFIG_DIR,
config_env_var_prefix=ENV_VAR_PREFIX, commands_loader_cls=MockLoader)
self.cloud = get_active_cloud(self)
class MockLoader(object):
def __init__(self, ctx):
self.ctx = ctx
def get_models(self, *attr_args, **_):
from azure.cli.core.profiles import get_sdk
return get_sdk(self.ctx, ResourceType.DATA_STORAGE, *attr_args, mod='models')
class MockCmd(object):
def __init__(self, ctx):
self.cli_ctx = ctx
self.loader = MockLoader(self.cli_ctx)
def get_models(self, *attr_args, **kwargs):
return get_sdk(self.cli_ctx, ResourceType.DATA_STORAGE, *attr_args, **kwargs)
class TestStorageValidators(unittest.TestCase):
def setUp(self):
self.io = StringIO()
self.cli = MockCLI()
self.loader = MockLoader(self.cli)
def tearDown(self):
self.io.close()
def test_permission_validator(self):
t_container_permissions = get_sdk(self.cli, ResourceType.DATA_STORAGE, 'blob.models#ContainerPermissions')
ns1 = Namespace(permission='rwdl')
ns2 = Namespace(permission='abc')
get_permission_validator(t_container_permissions)(ns1)
self.assertTrue(isinstance(ns1.permission, t_container_permissions))
with self.assertRaises(ValueError):
get_permission_validator(t_container_permissions)(ns2)
def test_datetime_string_type(self):
input = "2017-01-01T12:30Z"
actual = get_datetime_type(True)(input)
expected = "2017-01-01T12:30Z"
self.assertEqual(actual, expected)
input = "2017-01-01 12:30"
with self.assertRaises(ValueError):
get_datetime_type(True)(input)
def test_datetime_type(self):
import datetime
input = "2017-01-01T12:30Z"
actual = get_datetime_type(False)(input)
expected = datetime.datetime(2017, 1, 1, 12, 30, 0)
self.assertEqual(actual, expected)
input = "2017-01-01 12:30"
with self.assertRaises(ValueError):
actual = get_datetime_type(False)(input)
def test_ipv4_range_type(self):
input = "111.22.3.111"
actual = ipv4_range_type(input)
expected = input
self.assertEqual(actual, expected)
input = "111.22.3.111-222.11.44.111"
actual = ipv4_range_type(input)
expected = input
self.assertEqual(actual, expected)
input = "111.22"
with self.assertRaises(ValueError):
actual = ipv4_range_type(input)
input = "111.22.33.44-"
with self.assertRaises(ValueError):
actual = ipv4_range_type(input)
def test_resource_types_type(self):
input = "sso"
actual = str(resource_type_type(self.loader)(input))
expected = "so"
self.assertEqual(actual, expected)
input = "blob"
with self.assertRaises(ValueError):
actual = resource_type_type(self.loader)(input)
def test_services_type(self):
input = "ttfqbqtf"
actual = str(services_type(self.loader)(input))
if supported_api_version(self.cli, ResourceType.DATA_STORAGE, max_api='2016-05-31') or \
supported_api_version(self.cli, ResourceType.DATA_STORAGE, min_api='2017-07-29'):
expected = "bqtf"
else:
expected = "bqf"
self.assertEqual(actual, expected)
input = "everything"
with self.assertRaises(ValueError):
services_type(self.loader)(input)
def test_storage_process_blob_source_uri_redundent_parameter(self):
with self.assertRaises(ValueError):
process_blob_source_uri(MockCmd(self.cli),
Namespace(copy_source='https://example.com', source_sas='some_sas'))
with self.assertRaises(ValueError):
process_blob_source_uri(MockCmd(self.cli),
Namespace(copy_source='https://example.com', source_account_name='account_name'))
def test_storage_get_char_options_validator(self):
with self.assertRaises(ValueError) as cm:
get_char_options_validator('abc', 'no_such_property')(object())
self.assertEqual('Missing options --no-such-property.', str(cm.exception))
ns = Namespace(services='bcd')
with self.assertRaises(ValueError) as cm:
get_char_options_validator('abc', 'services')(ns)
self.assertEqual('--services: only valid values are: a, b, c.', str(cm.exception))
ns = Namespace(services='ab')
get_char_options_validator('abc', 'services')(ns)
result = getattr(ns, 'services')
self.assertIs(type(result), set)
self.assertEqual(result, set('ab'))
def test_validate_source_uri(self):
ns = Namespace(copy_source='https://other_name.file.core.windows.net/share2',
source_sas='some_sas_token')
validate_source_uri(MockCmd(self.cli), ns)
self.assertEqual(ns.copy_source, 'https://other_name.file.core.windows.net/share2?some_sas_token')
@api_version_constraint(resource_type=ResourceType.MGMT_STORAGE, min_api='2016-12-01')
class TestEncryptionValidators(unittest.TestCase):
def setUp(self):
self.cli = MockCLI()
def test_validate_encryption_services(self):
ns = Namespace(encryption_services=['blob'], _cmd=MockCmd(self.cli))
validate_encryption_services(MockCmd(self.cli), ns)
self.assertIsNotNone(ns.encryption_services.blob)
self.assertTrue(ns.encryption_services.blob.enabled)
self.assertIsNone(ns.encryption_services.file)
ns = Namespace(encryption_services=['file'], _cmd=MockCmd(self.cli))
validate_encryption_services(MockCmd(self.cli), ns)
self.assertIsNotNone(ns.encryption_services.file)
self.assertTrue(ns.encryption_services.file.enabled)
self.assertIsNone(ns.encryption_services.blob)
ns = Namespace(encryption_services=['blob', 'file'], _cmd=MockCmd(self.cli))
validate_encryption_services(MockCmd(self.cli), ns)
self.assertIsNotNone(ns.encryption_services.blob)
self.assertTrue(ns.encryption_services.blob.enabled)
self.assertIsNotNone(ns.encryption_services.file)
self.assertTrue(ns.encryption_services.file.enabled)
def test_validate_encryption_source(self):
with self.assertRaises(ValueError):
validate_encryption_source(
Namespace(encryption_key_source='Microsoft.Keyvault', encryption_key_name=None,
encryption_key_version=None, encryption_key_vault=None, _cmd=MockCmd(self.cli)))
with self.assertRaises(ValueError):
validate_encryption_source(
Namespace(encryption_key_source='Microsoft.Storage', encryption_key_name='key_name',
encryption_key_version='key_version', encryption_key_vault='https://example.com/key_uri'))
class TestGetSourceClientValidator(unittest.TestCase):
def setUp(self):
self.cli = MockCLI()
def test_validate_with_container_name_blob(self):
# source container name given, validator does not change namespace aside from ensuring source-client none
ns = self._create_namespace(source_container='container2', destination_container='container1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertIsNone(ns.source_client)
def test_validate_with_source_uri_blob(self):
# source given in form of uri, source_container parsed from uri, source and dest account are the same
ns = self._create_namespace(source_uri='https://storage_name.blob.core.windows.net/container2',
destination_container='container1')
with mock.patch('azure.cli.command_modules.storage._validators._query_account_key', return_value="fake_key"):
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_container, 'container2')
self.assertIsNone(ns.source_client)
def test_validate_with_different_source_uri_sas_blob(self):
# source given in form of uri, source_container parsed from uri, source and dest account are different
ns = self._create_namespace(source_uri='https://other_name.blob.core.windows.net/container2?some_sas_token',
destination_container='container1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_container, 'container2')
self.assertIsNotNone(ns.source_client)
self.assertEqual(ns.source_client.account_name, 'other_name')
def test_validate_with_share_name_file(self):
# source share name given, validator does not change namespace aside from ensuring source-client none
ns = self._create_namespace(source_share='share2', destination_share='share1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertIsNone(ns.source_client)
def test_validate_with_source_uri_file(self):
# source given in form of uri, source_share parsed from uri, source and dest account are the same
ns = self._create_namespace(source_uri='https://storage_name.file.core.windows.net/share2',
destination_share='share1')
with mock.patch('azure.cli.command_modules.storage._validators._query_account_key', return_value="fake_key"):
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_share, 'share2')
self.assertIsNone(ns.source_client)
def test_validate_with_different_source_uri_sas_file(self):
# source given in form of uri, source_share parsed from uri, source and dest account are different
ns = self._create_namespace(source_uri='https://other_name.file.core.windows.net/share2?some_sas_token',
destination_share='share1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_share, 'share2')
self.assertIsNotNone(ns.source_client)
self.assertEqual(ns.source_client.account_name, 'other_name')
def test_validate_negatives(self):
# bad argument combinations
with self.assertRaises(ValueError):
get_source_file_or_blob_service_client(
MockCmd(self.cli),
self._create_namespace(source_uri='https://storage_name.file.core.windows.net/share2',
source_account_name='some_name'))
with self.assertRaises(ValueError):
get_source_file_or_blob_service_client(MockCmd(self.cli), self._create_namespace(source_uri='faulty_uri'))
with self.assertRaises(ValueError):
get_source_file_or_blob_service_client(
MockCmd(self.cli),
self._create_namespace(source_container='container_name', source_share='share_name'))
def _create_namespace(self, **kwargs):
ns = Namespace(account_key='my_storage_key', account_name='storage_name', connection_string=None, dryrun=False,
pattern='*', sas_token=None, source_account_key=None, source_account_name=None,
source_client=None, source_container=None, source_sas=None, source_share=None, source_uri=None,
destination_container=None, destination_share=None)
for key in kwargs:
setattr(ns, key, kwargs[key])
return ns
if __name__ == '__main__':
unittest.main()
| 46.200704 | 119 | 0.667099 |
import unittest
import mock
from argparse import Namespace
from six import StringIO
from knack import CLI
from azure.cli.core._config import GLOBAL_CONFIG_DIR, ENV_VAR_PREFIX
from azure.cli.core.cloud import get_active_cloud
from azure.cli.core.profiles import get_sdk, ResourceType, supported_api_version
from azure.cli.command_modules.storage._validators import (get_permission_validator, get_datetime_type,
ipv4_range_type, resource_type_type, services_type,
process_blob_source_uri, get_char_options_validator,
get_source_file_or_blob_service_client,
validate_encryption_source, validate_source_uri,
validate_encryption_services)
from azure.cli.testsdk import api_version_constraint
class MockCLI(CLI):
def __init__(self):
super(MockCLI, self).__init__(cli_name='mock_cli', config_dir=GLOBAL_CONFIG_DIR,
config_env_var_prefix=ENV_VAR_PREFIX, commands_loader_cls=MockLoader)
self.cloud = get_active_cloud(self)
class MockLoader(object):
def __init__(self, ctx):
self.ctx = ctx
def get_models(self, *attr_args, **_):
from azure.cli.core.profiles import get_sdk
return get_sdk(self.ctx, ResourceType.DATA_STORAGE, *attr_args, mod='models')
class MockCmd(object):
def __init__(self, ctx):
self.cli_ctx = ctx
self.loader = MockLoader(self.cli_ctx)
def get_models(self, *attr_args, **kwargs):
return get_sdk(self.cli_ctx, ResourceType.DATA_STORAGE, *attr_args, **kwargs)
class TestStorageValidators(unittest.TestCase):
def setUp(self):
self.io = StringIO()
self.cli = MockCLI()
self.loader = MockLoader(self.cli)
def tearDown(self):
self.io.close()
def test_permission_validator(self):
t_container_permissions = get_sdk(self.cli, ResourceType.DATA_STORAGE, 'blob.models#ContainerPermissions')
ns1 = Namespace(permission='rwdl')
ns2 = Namespace(permission='abc')
get_permission_validator(t_container_permissions)(ns1)
self.assertTrue(isinstance(ns1.permission, t_container_permissions))
with self.assertRaises(ValueError):
get_permission_validator(t_container_permissions)(ns2)
def test_datetime_string_type(self):
input = "2017-01-01T12:30Z"
actual = get_datetime_type(True)(input)
expected = "2017-01-01T12:30Z"
self.assertEqual(actual, expected)
input = "2017-01-01 12:30"
with self.assertRaises(ValueError):
get_datetime_type(True)(input)
def test_datetime_type(self):
import datetime
input = "2017-01-01T12:30Z"
actual = get_datetime_type(False)(input)
expected = datetime.datetime(2017, 1, 1, 12, 30, 0)
self.assertEqual(actual, expected)
input = "2017-01-01 12:30"
with self.assertRaises(ValueError):
actual = get_datetime_type(False)(input)
def test_ipv4_range_type(self):
input = "111.22.3.111"
actual = ipv4_range_type(input)
expected = input
self.assertEqual(actual, expected)
input = "111.22.3.111-222.11.44.111"
actual = ipv4_range_type(input)
expected = input
self.assertEqual(actual, expected)
input = "111.22"
with self.assertRaises(ValueError):
actual = ipv4_range_type(input)
input = "111.22.33.44-"
with self.assertRaises(ValueError):
actual = ipv4_range_type(input)
def test_resource_types_type(self):
input = "sso"
actual = str(resource_type_type(self.loader)(input))
expected = "so"
self.assertEqual(actual, expected)
input = "blob"
with self.assertRaises(ValueError):
actual = resource_type_type(self.loader)(input)
def test_services_type(self):
input = "ttfqbqtf"
actual = str(services_type(self.loader)(input))
if supported_api_version(self.cli, ResourceType.DATA_STORAGE, max_api='2016-05-31') or \
supported_api_version(self.cli, ResourceType.DATA_STORAGE, min_api='2017-07-29'):
expected = "bqtf"
else:
expected = "bqf"
self.assertEqual(actual, expected)
input = "everything"
with self.assertRaises(ValueError):
services_type(self.loader)(input)
def test_storage_process_blob_source_uri_redundent_parameter(self):
with self.assertRaises(ValueError):
process_blob_source_uri(MockCmd(self.cli),
Namespace(copy_source='https://example.com', source_sas='some_sas'))
with self.assertRaises(ValueError):
process_blob_source_uri(MockCmd(self.cli),
Namespace(copy_source='https://example.com', source_account_name='account_name'))
def test_storage_get_char_options_validator(self):
with self.assertRaises(ValueError) as cm:
get_char_options_validator('abc', 'no_such_property')(object())
self.assertEqual('Missing options --no-such-property.', str(cm.exception))
ns = Namespace(services='bcd')
with self.assertRaises(ValueError) as cm:
get_char_options_validator('abc', 'services')(ns)
self.assertEqual('--services: only valid values are: a, b, c.', str(cm.exception))
ns = Namespace(services='ab')
get_char_options_validator('abc', 'services')(ns)
result = getattr(ns, 'services')
self.assertIs(type(result), set)
self.assertEqual(result, set('ab'))
def test_validate_source_uri(self):
ns = Namespace(copy_source='https://other_name.file.core.windows.net/share2',
source_sas='some_sas_token')
validate_source_uri(MockCmd(self.cli), ns)
self.assertEqual(ns.copy_source, 'https://other_name.file.core.windows.net/share2?some_sas_token')
@api_version_constraint(resource_type=ResourceType.MGMT_STORAGE, min_api='2016-12-01')
class TestEncryptionValidators(unittest.TestCase):
def setUp(self):
self.cli = MockCLI()
def test_validate_encryption_services(self):
ns = Namespace(encryption_services=['blob'], _cmd=MockCmd(self.cli))
validate_encryption_services(MockCmd(self.cli), ns)
self.assertIsNotNone(ns.encryption_services.blob)
self.assertTrue(ns.encryption_services.blob.enabled)
self.assertIsNone(ns.encryption_services.file)
ns = Namespace(encryption_services=['file'], _cmd=MockCmd(self.cli))
validate_encryption_services(MockCmd(self.cli), ns)
self.assertIsNotNone(ns.encryption_services.file)
self.assertTrue(ns.encryption_services.file.enabled)
self.assertIsNone(ns.encryption_services.blob)
ns = Namespace(encryption_services=['blob', 'file'], _cmd=MockCmd(self.cli))
validate_encryption_services(MockCmd(self.cli), ns)
self.assertIsNotNone(ns.encryption_services.blob)
self.assertTrue(ns.encryption_services.blob.enabled)
self.assertIsNotNone(ns.encryption_services.file)
self.assertTrue(ns.encryption_services.file.enabled)
def test_validate_encryption_source(self):
with self.assertRaises(ValueError):
validate_encryption_source(
Namespace(encryption_key_source='Microsoft.Keyvault', encryption_key_name=None,
encryption_key_version=None, encryption_key_vault=None, _cmd=MockCmd(self.cli)))
with self.assertRaises(ValueError):
validate_encryption_source(
Namespace(encryption_key_source='Microsoft.Storage', encryption_key_name='key_name',
encryption_key_version='key_version', encryption_key_vault='https://example.com/key_uri'))
class TestGetSourceClientValidator(unittest.TestCase):
def setUp(self):
self.cli = MockCLI()
def test_validate_with_container_name_blob(self):
ns = self._create_namespace(source_container='container2', destination_container='container1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertIsNone(ns.source_client)
def test_validate_with_source_uri_blob(self):
ns = self._create_namespace(source_uri='https://storage_name.blob.core.windows.net/container2',
destination_container='container1')
with mock.patch('azure.cli.command_modules.storage._validators._query_account_key', return_value="fake_key"):
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_container, 'container2')
self.assertIsNone(ns.source_client)
def test_validate_with_different_source_uri_sas_blob(self):
ns = self._create_namespace(source_uri='https://other_name.blob.core.windows.net/container2?some_sas_token',
destination_container='container1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_container, 'container2')
self.assertIsNotNone(ns.source_client)
self.assertEqual(ns.source_client.account_name, 'other_name')
def test_validate_with_share_name_file(self):
ns = self._create_namespace(source_share='share2', destination_share='share1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertIsNone(ns.source_client)
def test_validate_with_source_uri_file(self):
ns = self._create_namespace(source_uri='https://storage_name.file.core.windows.net/share2',
destination_share='share1')
with mock.patch('azure.cli.command_modules.storage._validators._query_account_key', return_value="fake_key"):
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_share, 'share2')
self.assertIsNone(ns.source_client)
def test_validate_with_different_source_uri_sas_file(self):
ns = self._create_namespace(source_uri='https://other_name.file.core.windows.net/share2?some_sas_token',
destination_share='share1')
get_source_file_or_blob_service_client(MockCmd(self.cli), ns)
self.assertEqual(ns.source_share, 'share2')
self.assertIsNotNone(ns.source_client)
self.assertEqual(ns.source_client.account_name, 'other_name')
def test_validate_negatives(self):
with self.assertRaises(ValueError):
get_source_file_or_blob_service_client(
MockCmd(self.cli),
self._create_namespace(source_uri='https://storage_name.file.core.windows.net/share2',
source_account_name='some_name'))
with self.assertRaises(ValueError):
get_source_file_or_blob_service_client(MockCmd(self.cli), self._create_namespace(source_uri='faulty_uri'))
with self.assertRaises(ValueError):
get_source_file_or_blob_service_client(
MockCmd(self.cli),
self._create_namespace(source_container='container_name', source_share='share_name'))
def _create_namespace(self, **kwargs):
ns = Namespace(account_key='my_storage_key', account_name='storage_name', connection_string=None, dryrun=False,
pattern='*', sas_token=None, source_account_key=None, source_account_name=None,
source_client=None, source_container=None, source_sas=None, source_share=None, source_uri=None,
destination_container=None, destination_share=None)
for key in kwargs:
setattr(ns, key, kwargs[key])
return ns
if __name__ == '__main__':
unittest.main()
| true | true |
1c3290d3998f3b680588dcc2763f11f9ba21bd86 | 208 | py | Python | bitmovin/resources/enums/encoder_version.py | camberbridge/bitmovin-python | 3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95 | [
"Unlicense"
] | 44 | 2016-12-12T17:37:23.000Z | 2021-03-03T09:48:48.000Z | bitmovin/resources/enums/encoder_version.py | camberbridge/bitmovin-python | 3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95 | [
"Unlicense"
] | 38 | 2017-01-09T14:45:45.000Z | 2022-02-27T18:04:33.000Z | bitmovin/resources/enums/encoder_version.py | camberbridge/bitmovin-python | 3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95 | [
"Unlicense"
] | 27 | 2017-02-02T22:49:31.000Z | 2019-11-21T07:04:57.000Z | import enum
class EncoderVersion(enum.Enum):
STABLE = 'STABLE'
BETA = 'BETA'
V0_16_0 = '0.16.0'
V0_17_0 = '0.17.0'
@staticmethod
def default():
return EncoderVersion.STABLE
| 16 | 36 | 0.610577 | import enum
class EncoderVersion(enum.Enum):
STABLE = 'STABLE'
BETA = 'BETA'
V0_16_0 = '0.16.0'
V0_17_0 = '0.17.0'
@staticmethod
def default():
return EncoderVersion.STABLE
| true | true |
1c3290d9b25c6a6853fbf30b5d5f09b7eec27ff6 | 34,256 | py | Python | test/test_utils.py | zoogaezee/youtubeDL | 01de1a9d506ff51bff4100e11275557226fa8b9a | [
"Unlicense"
] | null | null | null | test/test_utils.py | zoogaezee/youtubeDL | 01de1a9d506ff51bff4100e11275557226fa8b9a | [
"Unlicense"
] | null | null | null | test/test_utils.py | zoogaezee/youtubeDL | 01de1a9d506ff51bff4100e11275557226fa8b9a | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Various small unit tests
import io
import json
import xml.etree.ElementTree
from youtube_dl.utils import (
age_restricted,
args_to_str,
clean_html,
DateRange,
detect_exe_version,
determine_ext,
encode_compat_str,
encodeFilename,
escape_rfc3986,
escape_url,
ExtractorError,
find_xpath_attr,
fix_xml_ampersands,
InAdvancePagedList,
intlist_to_bytes,
is_html,
js_to_json,
limit_length,
OnDemandPagedList,
orderedSet,
parse_duration,
parse_filesize,
parse_iso8601,
read_batch_urls,
sanitize_filename,
sanitize_path,
prepend_extension,
replace_extension,
remove_quotes,
shell_quote,
smuggle_url,
str_to_int,
strip_jsonp,
struct_unpack,
timeconvert,
unescapeHTML,
unified_strdate,
unsmuggle_url,
uppercase_escape,
lowercase_escape,
url_basename,
urlencode_postdata,
version_tuple,
xpath_with_ns,
xpath_element,
xpath_text,
xpath_attr,
render_table,
match_str,
parse_dfxp_time_expr,
dfxp2srt,
cli_option,
cli_valueless_option,
cli_bool_option,
)
from youtube_dl.compat import (
compat_etree_fromstring,
)
class TestUtil(unittest.TestCase):
def test_timeconvert(self):
self.assertTrue(timeconvert('') is None)
self.assertTrue(timeconvert('bougrg') is None)
def test_sanitize_filename(self):
self.assertEqual(sanitize_filename('abc'), 'abc')
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
self.assertEqual(sanitize_filename('123'), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de'))
self.assertFalse('/' in sanitize_filename('abc/de///'))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
self.assertEqual('yes no', sanitize_filename('yes? no'))
self.assertEqual('this - that', sanitize_filename('this: that'))
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
aumlaut = 'ä'
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
self.assertEqual(sanitize_filename(tests), tests)
self.assertEqual(
sanitize_filename('New World record at 0:12:34'),
'New World record at 0_12_34')
self.assertEqual(sanitize_filename('--gasdgf'), '_-gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
self.assertEqual(sanitize_filename('.gasdgf'), 'gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
forbidden = '"\0\\/'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc))
def test_sanitize_filename_restricted(self):
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
self.assertEqual(sanitize_filename('123', restricted=True), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
tests = 'a\xe4b\u4e2d\u56fd\u7684c'
self.assertEqual(sanitize_filename(tests, restricted=True), 'a_b_c')
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
# Handle a common case more neatly
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
# .. but make sure the file name is never empty
self.assertTrue(sanitize_filename('-', restricted=True) != '')
self.assertTrue(sanitize_filename(':', restricted=True) != '')
def test_sanitize_ids(self):
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
def test_sanitize_path(self):
if sys.platform != 'win32':
return
self.assertEqual(sanitize_path('abc'), 'abc')
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
self.assertEqual(sanitize_path('abc|def'), 'abc#def')
self.assertEqual(sanitize_path('<>:"|?*'), '#######')
self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(
sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
self.assertEqual(
sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
self.assertEqual(sanitize_path('../abc'), '..\\abc')
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
self.assertEqual(sanitize_path('./abc'), 'abc')
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
def test_prepend_extension(self):
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
def test_replace_extension(self):
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
def test_remove_quotes(self):
self.assertEqual(remove_quotes(None), None)
self.assertEqual(remove_quotes('"'), '"')
self.assertEqual(remove_quotes("'"), "'")
self.assertEqual(remove_quotes(';'), ';')
self.assertEqual(remove_quotes('";'), '";')
self.assertEqual(remove_quotes('""'), '')
self.assertEqual(remove_quotes('";"'), ';')
def test_ordered_set(self):
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(orderedSet([]), [])
self.assertEqual(orderedSet([1]), [1])
# keep the list ordered
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
def test_unescape_html(self):
self.assertEqual(unescapeHTML('%20;'), '%20;')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('é'), 'é')
self.assertEqual(unescapeHTML('�'), '�')
def test_daterange(self):
_20century = DateRange("19000101", "20000101")
self.assertFalse("17890714" in _20century)
_ac = DateRange("00010101")
self.assertTrue("19690721" in _ac)
_firstmilenium = DateRange(end="10000101")
self.assertTrue("07110427" in _firstmilenium)
def test_unified_dates(self):
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
self.assertEqual(
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
'20141126')
self.assertEqual(
unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
'20150202')
self.assertEqual(unified_strdate('25-09-2014'), '20140925')
self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None)
def test_determine_ext(self):
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8')
def test_find_xpath_attr(self):
testxml = '''<root>
<node/>
<node x="a"/>
<node x="a" y="c" />
<node x="b" y="d" />
<node x="" />
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
def test_xpath_with_ns(self):
testxml = '''<root xmlns:media="http://example.com/">
<media:song>
<media:author>The Author</media:author>
<url>http://server.com/download.mp3</url>
</media:song>
</root>'''
doc = compat_etree_fromstring(testxml)
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
self.assertTrue(find('media:song') is not None)
self.assertEqual(find('media:song/media:author').text, 'The Author')
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
def test_xpath_element(self):
doc = xml.etree.ElementTree.Element('root')
div = xml.etree.ElementTree.SubElement(doc, 'div')
p = xml.etree.ElementTree.SubElement(div, 'p')
p.text = 'Foo'
self.assertEqual(xpath_element(doc, 'div/p'), p)
self.assertEqual(xpath_element(doc, ['div/p']), p)
self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p)
self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default')
self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default')
self.assertTrue(xpath_element(doc, 'div/bar') is None)
self.assertTrue(xpath_element(doc, ['div/bar']) is None)
self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None)
self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True)
def test_xpath_text(self):
testxml = '''<root>
<div>
<p>Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default')
self.assertTrue(xpath_text(doc, 'div/bar') is None)
self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
def test_xpath_attr(self):
testxml = '''<root>
<div>
<p x="a">Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a')
self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None)
self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None)
self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default')
self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default')
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True)
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True)
def test_smuggle_url(self):
data = {"ö": "ö", "abc": [3]}
url = 'https://foo.bar/baz?x=y#a'
smug_url = smuggle_url(url, data)
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
self.assertEqual(url, unsmug_url)
self.assertEqual(data, unsmug_data)
res_url, res_data = unsmuggle_url(url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, None)
def test_shell_quote(self):
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
self.assertEqual(shell_quote(args), """ffmpeg -i 'ñ€ß'"'"'.mp4'""")
def test_str_to_int(self):
self.assertEqual(str_to_int('123,456'), 123456)
self.assertEqual(str_to_int('123.456'), 123456)
def test_url_basename(self):
self.assertEqual(url_basename('http://foo.de/'), '')
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
self.assertEqual(
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
'trailer.mp4')
def test_parse_duration(self):
self.assertEqual(parse_duration(None), None)
self.assertEqual(parse_duration(False), None)
self.assertEqual(parse_duration('invalid'), None)
self.assertEqual(parse_duration('1'), 1)
self.assertEqual(parse_duration('1337:12'), 80232)
self.assertEqual(parse_duration('9:12:43'), 33163)
self.assertEqual(parse_duration('12:00'), 720)
self.assertEqual(parse_duration('00:01:01'), 61)
self.assertEqual(parse_duration('x:y'), None)
self.assertEqual(parse_duration('3h11m53s'), 11513)
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
self.assertEqual(parse_duration('62m45s'), 3765)
self.assertEqual(parse_duration('6m59s'), 419)
self.assertEqual(parse_duration('49s'), 49)
self.assertEqual(parse_duration('0h0m0s'), 0)
self.assertEqual(parse_duration('0m0s'), 0)
self.assertEqual(parse_duration('0s'), 0)
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
self.assertEqual(parse_duration('T30M38S'), 1838)
self.assertEqual(parse_duration('5 s'), 5)
self.assertEqual(parse_duration('3 min'), 180)
self.assertEqual(parse_duration('2.5 hours'), 9000)
self.assertEqual(parse_duration('02:03:04'), 7384)
self.assertEqual(parse_duration('01:02:03:04'), 93784)
self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
self.assertEqual(parse_duration('87 Min.'), 5220)
def test_fix_xml_ampersands(self):
self.assertEqual(
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
self.assertEqual(
fix_xml_ampersands('"&x=y&wrong;&z=a'),
'"&x=y&wrong;&z=a')
self.assertEqual(
fix_xml_ampersands('&'><"'),
'&'><"')
self.assertEqual(
fix_xml_ampersands('Ӓ᪼'), 'Ӓ᪼')
self.assertEqual(fix_xml_ampersands('&#&#'), '&#&#')
def test_paged_list(self):
def testPL(size, pagesize, sliceargs, expected):
def get_page(pagenum):
firstid = pagenum * pagesize
upto = min(size, pagenum * pagesize + pagesize)
for i in range(firstid, upto):
yield i
pl = OnDemandPagedList(get_page, pagesize)
got = pl.getslice(*sliceargs)
self.assertEqual(got, expected)
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
got = iapl.getslice(*sliceargs)
self.assertEqual(got, expected)
testPL(5, 2, (), [0, 1, 2, 3, 4])
testPL(5, 2, (1,), [1, 2, 3, 4])
testPL(5, 2, (2,), [2, 3, 4])
testPL(5, 2, (4,), [4])
testPL(5, 2, (0, 3), [0, 1, 2])
testPL(5, 2, (1, 4), [1, 2, 3])
testPL(5, 2, (2, 99), [2, 3, 4])
testPL(5, 2, (20, 99), [])
def test_struct_unpack(self):
self.assertEqual(struct_unpack('!B', b'\x00'), (0,))
def test_read_batch_urls(self):
f = io.StringIO('''\xef\xbb\xbf foo
bar\r
baz
# More after this line\r
; or after this
bam''')
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
def test_urlencode_postdata(self):
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
self.assertTrue(isinstance(data, bytes))
def test_encode_compat_str(self):
self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест')
self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест')
def test_parse_iso8601(self):
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251)
self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None)
def test_strip_jsonp(self):
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
d = json.loads(stripped)
self.assertEqual(d, [{"id": "532cb", "x": 3}])
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
d = json.loads(stripped)
self.assertEqual(d, {'STATUS': 'OK'})
def test_uppercase_escape(self):
self.assertEqual(uppercase_escape('aä'), 'aä')
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
def test_lowercase_escape(self):
self.assertEqual(lowercase_escape('aä'), 'aä')
self.assertEqual(lowercase_escape('\\u0026'), '&')
def test_limit_length(self):
self.assertEqual(limit_length(None, 12), None)
self.assertEqual(limit_length('foo', 12), 'foo')
self.assertTrue(
limit_length('foo bar baz asd', 12).startswith('foo bar'))
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
def test_escape_rfc3986(self):
reserved = "!*'();:@&=+$,/?#[]"
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
self.assertEqual(escape_rfc3986(reserved), reserved)
self.assertEqual(escape_rfc3986(unreserved), unreserved)
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
def test_escape_url(self):
self.assertEqual(
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
)
self.assertEqual(
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
)
self.assertEqual(
escape_url('http://тест.рф/фрагмент'),
'http://тест.рф/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
)
self.assertEqual(
escape_url('http://тест.рф/абв?абв=абв#абв'),
'http://тест.рф/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
)
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
def test_js_to_json_realworld(self):
inp = '''{
'clip':{'provider':'pseudo'}
}'''
self.assertEqual(js_to_json(inp), '''{
"clip":{"provider":"pseudo"}
}''')
json.loads(js_to_json(inp))
inp = '''{
'playlist':[{'controls':{'all':null}}]
}'''
self.assertEqual(js_to_json(inp), '''{
"playlist":[{"controls":{"all":null}}]
}''')
inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"'''
self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''')
inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
json_code = js_to_json(inp)
self.assertEqual(json.loads(json_code), json.loads(inp))
def test_js_to_json_edgecases(self):
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
on = js_to_json('{"abc": true}')
self.assertEqual(json.loads(on), {'abc': True})
# Ignore JavaScript code as well
on = js_to_json('''{
"x": 1,
y: "a",
z: some.code
}''')
d = json.loads(on)
self.assertEqual(d['x'], 1)
self.assertEqual(d['y'], 'a')
on = js_to_json('["abc", "def",]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('{"abc": "def",}')
self.assertEqual(json.loads(on), {'abc': 'def'})
def test_clean_html(self):
self.assertEqual(clean_html('a:\nb'), 'a: b')
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
def test_intlist_to_bytes(self):
self.assertEqual(
intlist_to_bytes([0, 1, 127, 128, 255]),
b'\x00\x01\x7f\x80\xff')
def test_args_to_str(self):
self.assertEqual(
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
'foo ba/r -baz \'2 be\' \'\''
)
def test_parse_filesize(self):
self.assertEqual(parse_filesize(None), None)
self.assertEqual(parse_filesize(''), None)
self.assertEqual(parse_filesize('91 B'), 91)
self.assertEqual(parse_filesize('foobar'), None)
self.assertEqual(parse_filesize('2 MiB'), 2097152)
self.assertEqual(parse_filesize('5 GB'), 5000000000)
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
self.assertEqual(parse_filesize('1,24 KB'), 1240)
def test_version_tuple(self):
self.assertEqual(version_tuple('1'), (1,))
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
def test_detect_exe_version(self):
self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
configuration: --prefix=/usr --extra-'''), '1.2.1')
self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
Trying to open render node...
Success at /dev/dri/renderD128.
ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
def test_age_restricted(self):
self.assertFalse(age_restricted(None, 10)) # unrestricted content
self.assertFalse(age_restricted(1, None)) # unrestricted policy
self.assertFalse(age_restricted(8, 10))
self.assertTrue(age_restricted(18, 14))
self.assertFalse(age_restricted(18, 18))
def test_is_html(self):
self.assertFalse(is_html(b'\x49\x44\x43<html'))
self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-8 with BOM
b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-16-LE
b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
))
self.assertTrue(is_html( # UTF-16-BE
b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
))
self.assertTrue(is_html( # UTF-32-BE
b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
self.assertTrue(is_html( # UTF-32-LE
b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
def test_render_table(self):
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]]),
'a bcd\n'
'123 4\n'
'9999 51')
def test_match_str(self):
self.assertRaises(ValueError, match_str, 'xy>foobar', {})
self.assertFalse(match_str('xy', {'x': 1200}))
self.assertTrue(match_str('!xy', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 1200}))
self.assertFalse(match_str('!x', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 0}))
self.assertFalse(match_str('x>0', {'x': 0}))
self.assertFalse(match_str('x>0', {}))
self.assertTrue(match_str('x>?0', {}))
self.assertTrue(match_str('x>1K', {'x': 1200}))
self.assertFalse(match_str('x>2K', {'x': 1200}))
self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 90, 'description': 'foo'}))
self.assertTrue(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 10}))
def test_parse_dfxp_time_expr(self):
self.assertEqual(parse_dfxp_time_expr(None), None)
self.assertEqual(parse_dfxp_time_expr(''), None)
self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1)
def test_dfxp2srt(self):
dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
<p begin="1" end="2">第二行<br/>♪♪</p>
<p begin="2" dur="1"><span>Third<br/>Line</span></p>
<p begin="3" end="-1">Lines with invalid timestamps are ignored</p>
<p begin="-1" end="-1">Ignore, two</p>
<p begin="3" dur="-1">Ignored, three</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The following line contains Chinese characters and special symbols
2
00:00:01,000 --> 00:00:02,000
第二行
♪♪
3
00:00:02,000 --> 00:00:03,000
Third
Line
'''
self.assertEqual(dfxp2srt(dfxp_data), srt_data)
dfxp_data_no_default_namespace = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The first line</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The first line
'''
self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
def test_cli_option(self):
self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128'])
self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), [])
self.assertEqual(cli_option({}, '--proxy', 'proxy'), [])
def test_cli_valueless_option(self):
self.assertEqual(cli_valueless_option(
{'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader'])
self.assertEqual(cli_valueless_option(
{'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), [])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate'])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate'])
def test_cli_bool_option(self):
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'),
['--no-check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='),
['--no-check-certificate=true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=true'])
if __name__ == '__main__':
unittest.main()
| 44.43061 | 157 | 0.607368 |
from __future__ import unicode_literals
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import io
import json
import xml.etree.ElementTree
from youtube_dl.utils import (
age_restricted,
args_to_str,
clean_html,
DateRange,
detect_exe_version,
determine_ext,
encode_compat_str,
encodeFilename,
escape_rfc3986,
escape_url,
ExtractorError,
find_xpath_attr,
fix_xml_ampersands,
InAdvancePagedList,
intlist_to_bytes,
is_html,
js_to_json,
limit_length,
OnDemandPagedList,
orderedSet,
parse_duration,
parse_filesize,
parse_iso8601,
read_batch_urls,
sanitize_filename,
sanitize_path,
prepend_extension,
replace_extension,
remove_quotes,
shell_quote,
smuggle_url,
str_to_int,
strip_jsonp,
struct_unpack,
timeconvert,
unescapeHTML,
unified_strdate,
unsmuggle_url,
uppercase_escape,
lowercase_escape,
url_basename,
urlencode_postdata,
version_tuple,
xpath_with_ns,
xpath_element,
xpath_text,
xpath_attr,
render_table,
match_str,
parse_dfxp_time_expr,
dfxp2srt,
cli_option,
cli_valueless_option,
cli_bool_option,
)
from youtube_dl.compat import (
compat_etree_fromstring,
)
class TestUtil(unittest.TestCase):
def test_timeconvert(self):
self.assertTrue(timeconvert('') is None)
self.assertTrue(timeconvert('bougrg') is None)
def test_sanitize_filename(self):
self.assertEqual(sanitize_filename('abc'), 'abc')
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
self.assertEqual(sanitize_filename('123'), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de'))
self.assertFalse('/' in sanitize_filename('abc/de///'))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
self.assertEqual('yes no', sanitize_filename('yes? no'))
self.assertEqual('this - that', sanitize_filename('this: that'))
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
aumlaut = 'ä'
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
self.assertEqual(sanitize_filename(tests), tests)
self.assertEqual(
sanitize_filename('New World record at 0:12:34'),
'New World record at 0_12_34')
self.assertEqual(sanitize_filename('--gasdgf'), '_-gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
self.assertEqual(sanitize_filename('.gasdgf'), 'gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
forbidden = '"\0\\/'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc))
def test_sanitize_filename_restricted(self):
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
self.assertEqual(sanitize_filename('123', restricted=True), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
tests = 'a\xe4b\u4e2d\u56fd\u7684c'
self.assertEqual(sanitize_filename(tests, restricted=True), 'a_b_c')
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
# Handle a common case more neatly
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
# .. but make sure the file name is never empty
self.assertTrue(sanitize_filename('-', restricted=True) != '')
self.assertTrue(sanitize_filename(':', restricted=True) != '')
def test_sanitize_ids(self):
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
def test_sanitize_path(self):
if sys.platform != 'win32':
return
self.assertEqual(sanitize_path('abc'), 'abc')
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
self.assertEqual(sanitize_path('abc|def'), 'abc
self.assertEqual(sanitize_path('<>:"|?*'), '#######')
self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(
sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
self.assertEqual(
sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
self.assertEqual(sanitize_path('../abc'), '..\\abc')
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
self.assertEqual(sanitize_path('./abc'), 'abc')
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
def test_prepend_extension(self):
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
def test_replace_extension(self):
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
def test_remove_quotes(self):
self.assertEqual(remove_quotes(None), None)
self.assertEqual(remove_quotes('"'), '"')
self.assertEqual(remove_quotes("'"), "'")
self.assertEqual(remove_quotes(';'), ';')
self.assertEqual(remove_quotes('";'), '";')
self.assertEqual(remove_quotes('""'), '')
self.assertEqual(remove_quotes('";"'), ';')
def test_ordered_set(self):
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(orderedSet([]), [])
self.assertEqual(orderedSet([1]), [1])
# keep the list ordered
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
def test_unescape_html(self):
self.assertEqual(unescapeHTML('%20;'), '%20;')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('é'), 'é')
self.assertEqual(unescapeHTML('�'), '�')
def test_daterange(self):
_20century = DateRange("19000101", "20000101")
self.assertFalse("17890714" in _20century)
_ac = DateRange("00010101")
self.assertTrue("19690721" in _ac)
_firstmilenium = DateRange(end="10000101")
self.assertTrue("07110427" in _firstmilenium)
def test_unified_dates(self):
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
self.assertEqual(
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
'20141126')
self.assertEqual(
unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
'20150202')
self.assertEqual(unified_strdate('25-09-2014'), '20140925')
self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None)
def test_determine_ext(self):
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None)
self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8')
def test_find_xpath_attr(self):
testxml = '''<root>
<node/>
<node x="a"/>
<node x="a" y="c" />
<node x="b" y="d" />
<node x="" />
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
def test_xpath_with_ns(self):
testxml = '''<root xmlns:media="http://example.com/">
<media:song>
<media:author>The Author</media:author>
<url>http://server.com/download.mp3</url>
</media:song>
</root>'''
doc = compat_etree_fromstring(testxml)
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
self.assertTrue(find('media:song') is not None)
self.assertEqual(find('media:song/media:author').text, 'The Author')
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
def test_xpath_element(self):
doc = xml.etree.ElementTree.Element('root')
div = xml.etree.ElementTree.SubElement(doc, 'div')
p = xml.etree.ElementTree.SubElement(div, 'p')
p.text = 'Foo'
self.assertEqual(xpath_element(doc, 'div/p'), p)
self.assertEqual(xpath_element(doc, ['div/p']), p)
self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p)
self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default')
self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default')
self.assertTrue(xpath_element(doc, 'div/bar') is None)
self.assertTrue(xpath_element(doc, ['div/bar']) is None)
self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None)
self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True)
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True)
def test_xpath_text(self):
testxml = '''<root>
<div>
<p>Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default')
self.assertTrue(xpath_text(doc, 'div/bar') is None)
self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
def test_xpath_attr(self):
testxml = '''<root>
<div>
<p x="a">Foo</p>
</div>
</root>'''
doc = compat_etree_fromstring(testxml)
self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a')
self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None)
self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None)
self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default')
self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default')
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True)
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True)
def test_smuggle_url(self):
data = {"ö": "ö", "abc": [3]}
url = 'https://foo.bar/baz?x=y#a'
smug_url = smuggle_url(url, data)
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
self.assertEqual(url, unsmug_url)
self.assertEqual(data, unsmug_data)
res_url, res_data = unsmuggle_url(url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, None)
def test_shell_quote(self):
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
self.assertEqual(shell_quote(args), """ffmpeg -i 'ñ€ß'"'"'.mp4'""")
def test_str_to_int(self):
self.assertEqual(str_to_int('123,456'), 123456)
self.assertEqual(str_to_int('123.456'), 123456)
def test_url_basename(self):
self.assertEqual(url_basename('http://foo.de/'), '')
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
self.assertEqual(
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
'trailer.mp4')
def test_parse_duration(self):
self.assertEqual(parse_duration(None), None)
self.assertEqual(parse_duration(False), None)
self.assertEqual(parse_duration('invalid'), None)
self.assertEqual(parse_duration('1'), 1)
self.assertEqual(parse_duration('1337:12'), 80232)
self.assertEqual(parse_duration('9:12:43'), 33163)
self.assertEqual(parse_duration('12:00'), 720)
self.assertEqual(parse_duration('00:01:01'), 61)
self.assertEqual(parse_duration('x:y'), None)
self.assertEqual(parse_duration('3h11m53s'), 11513)
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
self.assertEqual(parse_duration('62m45s'), 3765)
self.assertEqual(parse_duration('6m59s'), 419)
self.assertEqual(parse_duration('49s'), 49)
self.assertEqual(parse_duration('0h0m0s'), 0)
self.assertEqual(parse_duration('0m0s'), 0)
self.assertEqual(parse_duration('0s'), 0)
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
self.assertEqual(parse_duration('T30M38S'), 1838)
self.assertEqual(parse_duration('5 s'), 5)
self.assertEqual(parse_duration('3 min'), 180)
self.assertEqual(parse_duration('2.5 hours'), 9000)
self.assertEqual(parse_duration('02:03:04'), 7384)
self.assertEqual(parse_duration('01:02:03:04'), 93784)
self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
self.assertEqual(parse_duration('87 Min.'), 5220)
def test_fix_xml_ampersands(self):
self.assertEqual(
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
self.assertEqual(
fix_xml_ampersands('"&x=y&wrong;&z=a'),
'"&x=y&wrong;&z=a')
self.assertEqual(
fix_xml_ampersands('&'><"'),
'&'><"')
self.assertEqual(
fix_xml_ampersands('Ӓ᪼'), 'Ӓ᪼')
self.assertEqual(fix_xml_ampersands('&#&#'), '&#&#')
def test_paged_list(self):
def testPL(size, pagesize, sliceargs, expected):
def get_page(pagenum):
firstid = pagenum * pagesize
upto = min(size, pagenum * pagesize + pagesize)
for i in range(firstid, upto):
yield i
pl = OnDemandPagedList(get_page, pagesize)
got = pl.getslice(*sliceargs)
self.assertEqual(got, expected)
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
got = iapl.getslice(*sliceargs)
self.assertEqual(got, expected)
testPL(5, 2, (), [0, 1, 2, 3, 4])
testPL(5, 2, (1,), [1, 2, 3, 4])
testPL(5, 2, (2,), [2, 3, 4])
testPL(5, 2, (4,), [4])
testPL(5, 2, (0, 3), [0, 1, 2])
testPL(5, 2, (1, 4), [1, 2, 3])
testPL(5, 2, (2, 99), [2, 3, 4])
testPL(5, 2, (20, 99), [])
def test_struct_unpack(self):
self.assertEqual(struct_unpack('!B', b'\x00'), (0,))
def test_read_batch_urls(self):
f = io.StringIO('''\xef\xbb\xbf foo
bar\r
baz
# More after this line\r
; or after this
bam''')
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
def test_urlencode_postdata(self):
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
self.assertTrue(isinstance(data, bytes))
def test_encode_compat_str(self):
self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест')
self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест')
def test_parse_iso8601(self):
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251)
self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None)
def test_strip_jsonp(self):
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
d = json.loads(stripped)
self.assertEqual(d, [{"id": "532cb", "x": 3}])
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
d = json.loads(stripped)
self.assertEqual(d, {'STATUS': 'OK'})
def test_uppercase_escape(self):
self.assertEqual(uppercase_escape('aä'), 'aä')
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
def test_lowercase_escape(self):
self.assertEqual(lowercase_escape('aä'), 'aä')
self.assertEqual(lowercase_escape('\\u0026'), '&')
def test_limit_length(self):
self.assertEqual(limit_length(None, 12), None)
self.assertEqual(limit_length('foo', 12), 'foo')
self.assertTrue(
limit_length('foo bar baz asd', 12).startswith('foo bar'))
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
def test_escape_rfc3986(self):
reserved = "!*'();:@&=+$,/?
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
self.assertEqual(escape_rfc3986(reserved), reserved)
self.assertEqual(escape_rfc3986(unreserved), unreserved)
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
def test_escape_url(self):
self.assertEqual(
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
)
self.assertEqual(
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
)
self.assertEqual(
escape_url('http://тест.рф/фрагмент'),
'http://тест.рф/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
)
self.assertEqual(
escape_url('http://тест.рф/абв?абв=абв#абв'),
'http://тест.рф/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
)
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
def test_js_to_json_realworld(self):
inp = '''{
'clip':{'provider':'pseudo'}
}'''
self.assertEqual(js_to_json(inp), '''{
"clip":{"provider":"pseudo"}
}''')
json.loads(js_to_json(inp))
inp = '''{
'playlist':[{'controls':{'all':null}}]
}'''
self.assertEqual(js_to_json(inp), '''{
"playlist":[{"controls":{"all":null}}]
}''')
inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"'''
self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''')
inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
json_code = js_to_json(inp)
self.assertEqual(json.loads(json_code), json.loads(inp))
def test_js_to_json_edgecases(self):
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
on = js_to_json('{"abc": true}')
self.assertEqual(json.loads(on), {'abc': True})
# Ignore JavaScript code as well
on = js_to_json('''{
"x": 1,
y: "a",
z: some.code
}''')
d = json.loads(on)
self.assertEqual(d['x'], 1)
self.assertEqual(d['y'], 'a')
on = js_to_json('["abc", "def",]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('{"abc": "def",}')
self.assertEqual(json.loads(on), {'abc': 'def'})
def test_clean_html(self):
self.assertEqual(clean_html('a:\nb'), 'a: b')
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
def test_intlist_to_bytes(self):
self.assertEqual(
intlist_to_bytes([0, 1, 127, 128, 255]),
b'\x00\x01\x7f\x80\xff')
def test_args_to_str(self):
self.assertEqual(
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
'foo ba/r -baz \'2 be\' \'\''
)
def test_parse_filesize(self):
self.assertEqual(parse_filesize(None), None)
self.assertEqual(parse_filesize(''), None)
self.assertEqual(parse_filesize('91 B'), 91)
self.assertEqual(parse_filesize('foobar'), None)
self.assertEqual(parse_filesize('2 MiB'), 2097152)
self.assertEqual(parse_filesize('5 GB'), 5000000000)
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
self.assertEqual(parse_filesize('1,24 KB'), 1240)
def test_version_tuple(self):
self.assertEqual(version_tuple('1'), (1,))
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
def test_detect_exe_version(self):
self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
configuration: --prefix=/usr --extra-'''), '1.2.1')
self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
Trying to open render node...
Success at /dev/dri/renderD128.
ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
def test_age_restricted(self):
self.assertFalse(age_restricted(None, 10)) # unrestricted content
self.assertFalse(age_restricted(1, None)) # unrestricted policy
self.assertFalse(age_restricted(8, 10))
self.assertTrue(age_restricted(18, 14))
self.assertFalse(age_restricted(18, 18))
def test_is_html(self):
self.assertFalse(is_html(b'\x49\x44\x43<html'))
self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-8 with BOM
b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-16-LE
b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
))
self.assertTrue(is_html( # UTF-16-BE
b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
))
self.assertTrue(is_html( # UTF-32-BE
b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
self.assertTrue(is_html( # UTF-32-LE
b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
def test_render_table(self):
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]]),
'a bcd\n'
'123 4\n'
'9999 51')
def test_match_str(self):
self.assertRaises(ValueError, match_str, 'xy>foobar', {})
self.assertFalse(match_str('xy', {'x': 1200}))
self.assertTrue(match_str('!xy', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 1200}))
self.assertFalse(match_str('!x', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 0}))
self.assertFalse(match_str('x>0', {'x': 0}))
self.assertFalse(match_str('x>0', {}))
self.assertTrue(match_str('x>?0', {}))
self.assertTrue(match_str('x>1K', {'x': 1200}))
self.assertFalse(match_str('x>2K', {'x': 1200}))
self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 90, 'description': 'foo'}))
self.assertTrue(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 10}))
def test_parse_dfxp_time_expr(self):
self.assertEqual(parse_dfxp_time_expr(None), None)
self.assertEqual(parse_dfxp_time_expr(''), None)
self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1)
def test_dfxp2srt(self):
dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
<p begin="1" end="2">第二行<br/>♪♪</p>
<p begin="2" dur="1"><span>Third<br/>Line</span></p>
<p begin="3" end="-1">Lines with invalid timestamps are ignored</p>
<p begin="-1" end="-1">Ignore, two</p>
<p begin="3" dur="-1">Ignored, three</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The following line contains Chinese characters and special symbols
2
00:00:01,000 --> 00:00:02,000
第二行
♪♪
3
00:00:02,000 --> 00:00:03,000
Third
Line
'''
self.assertEqual(dfxp2srt(dfxp_data), srt_data)
dfxp_data_no_default_namespace = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The first line</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The first line
'''
self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
def test_cli_option(self):
self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128'])
self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), [])
self.assertEqual(cli_option({}, '--proxy', 'proxy'), [])
def test_cli_valueless_option(self):
self.assertEqual(cli_valueless_option(
{'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader'])
self.assertEqual(cli_valueless_option(
{'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), [])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate'])
self.assertEqual(cli_valueless_option(
{'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), [])
self.assertEqual(cli_valueless_option(
{'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate'])
def test_cli_bool_option(self):
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'),
['--no-check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='),
['--no-check-certificate=true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=false'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
['--check-certificate', 'true'])
self.assertEqual(
cli_bool_option(
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
['--check-certificate=true'])
if __name__ == '__main__':
unittest.main()
| true | true |
1c32910732157fc016e141a31d5d08ae39abdfc7 | 3,797 | py | Python | test/unit/test_callback.py | talkhouli/sockeye | ef5fed26cda3151883efe446878f4d7f65b5852d | [
"Apache-2.0"
] | 1 | 2019-11-14T14:31:07.000Z | 2019-11-14T14:31:07.000Z | test/unit/test_callback.py | talkhouli/sockeye | ef5fed26cda3151883efe446878f4d7f65b5852d | [
"Apache-2.0"
] | null | null | null | test/unit/test_callback.py | talkhouli/sockeye | ef5fed26cda3151883efe446878f4d7f65b5852d | [
"Apache-2.0"
] | 1 | 2019-11-05T14:42:05.000Z | 2019-11-05T14:42:05.000Z | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License
# is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Tests sockeye.callback.TrainingMonitor optimization logic
"""
import os
import tempfile
import numpy as np
import pytest
from sockeye import callback
from sockeye import constants as C
from sockeye import utils
test_constants = [('perplexity', np.inf,
[{'perplexity': 100.0, '_': 42}, {'perplexity': 50.0}, {'perplexity': 60.0}, {'perplexity': 80.0}],
[{'perplexity': 200.0}, {'perplexity': 100.0}, {'perplexity': 100.001}, {'perplexity': 99.99}],
[True, True, False, True]),
('accuracy', 0.0,
[{'accuracy': 100.0}, {'accuracy': 50.0}, {'accuracy': 60.0}, {'accuracy': 80.0}],
[{'accuracy': 200.0}, {'accuracy': 100.0}, {'accuracy': 100.001}, {'accuracy': 99.99}],
[True, False, False, False])]
class DummyMetric:
def __init__(self, metric_dict):
self.metric_dict = metric_dict
def get_name_value(self):
for metric_name, value in self.metric_dict.items():
yield metric_name, value
@pytest.mark.parametrize("optimized_metric, initial_best, train_metrics, eval_metrics, improved_seq",
test_constants)
def test_callback(optimized_metric, initial_best, train_metrics, eval_metrics, improved_seq):
with tempfile.TemporaryDirectory() as tmpdir:
batch_size = 32
monitor = callback.TrainingMonitor(batch_size=batch_size,
output_folder=tmpdir,
optimized_metric=optimized_metric)
assert monitor.optimized_metric == optimized_metric
assert monitor.get_best_validation_score() == initial_best
metrics_fname = os.path.join(tmpdir, C.METRICS_NAME)
for checkpoint, (train_metric, eval_metric, expected_improved) in enumerate(
zip(train_metrics, eval_metrics, improved_seq), 1):
monitor.checkpoint_callback(checkpoint, train_metric)
assert len(monitor.metrics) == checkpoint
assert monitor.metrics[-1] == {k + "-train": v for k, v in train_metric.items()}
improved, best_checkpoint = monitor.eval_end_callback(checkpoint, DummyMetric(eval_metric))
assert {k + "-val" for k in eval_metric.keys()} <= monitor.metrics[-1].keys()
assert improved == expected_improved
assert os.path.exists(metrics_fname)
metrics = utils.read_metrics_file(metrics_fname)
_compare_metrics(metrics, monitor.metrics)
def _compare_metrics(a, b):
assert len(a) == len(b)
for x, y in zip(a, b):
assert len(x.items()) == len(y.items())
for (xk, xv), (yk, yv) in zip(sorted(x.items()), sorted(y.items())):
assert xk == yk
assert pytest.approx(xv, yv)
def test_bleu_requires_checkpoint_decoder():
with pytest.raises(utils.SockeyeError) as e, tempfile.TemporaryDirectory() as tmpdir:
callback.TrainingMonitor(batch_size=1,
output_folder=tmpdir,
optimized_metric='bleu',
cp_decoder=None)
assert "bleu requires CheckpointDecoder" == str(e.value)
| 44.151163 | 118 | 0.631551 |
import os
import tempfile
import numpy as np
import pytest
from sockeye import callback
from sockeye import constants as C
from sockeye import utils
test_constants = [('perplexity', np.inf,
[{'perplexity': 100.0, '_': 42}, {'perplexity': 50.0}, {'perplexity': 60.0}, {'perplexity': 80.0}],
[{'perplexity': 200.0}, {'perplexity': 100.0}, {'perplexity': 100.001}, {'perplexity': 99.99}],
[True, True, False, True]),
('accuracy', 0.0,
[{'accuracy': 100.0}, {'accuracy': 50.0}, {'accuracy': 60.0}, {'accuracy': 80.0}],
[{'accuracy': 200.0}, {'accuracy': 100.0}, {'accuracy': 100.001}, {'accuracy': 99.99}],
[True, False, False, False])]
class DummyMetric:
def __init__(self, metric_dict):
self.metric_dict = metric_dict
def get_name_value(self):
for metric_name, value in self.metric_dict.items():
yield metric_name, value
@pytest.mark.parametrize("optimized_metric, initial_best, train_metrics, eval_metrics, improved_seq",
test_constants)
def test_callback(optimized_metric, initial_best, train_metrics, eval_metrics, improved_seq):
with tempfile.TemporaryDirectory() as tmpdir:
batch_size = 32
monitor = callback.TrainingMonitor(batch_size=batch_size,
output_folder=tmpdir,
optimized_metric=optimized_metric)
assert monitor.optimized_metric == optimized_metric
assert monitor.get_best_validation_score() == initial_best
metrics_fname = os.path.join(tmpdir, C.METRICS_NAME)
for checkpoint, (train_metric, eval_metric, expected_improved) in enumerate(
zip(train_metrics, eval_metrics, improved_seq), 1):
monitor.checkpoint_callback(checkpoint, train_metric)
assert len(monitor.metrics) == checkpoint
assert monitor.metrics[-1] == {k + "-train": v for k, v in train_metric.items()}
improved, best_checkpoint = monitor.eval_end_callback(checkpoint, DummyMetric(eval_metric))
assert {k + "-val" for k in eval_metric.keys()} <= monitor.metrics[-1].keys()
assert improved == expected_improved
assert os.path.exists(metrics_fname)
metrics = utils.read_metrics_file(metrics_fname)
_compare_metrics(metrics, monitor.metrics)
def _compare_metrics(a, b):
assert len(a) == len(b)
for x, y in zip(a, b):
assert len(x.items()) == len(y.items())
for (xk, xv), (yk, yv) in zip(sorted(x.items()), sorted(y.items())):
assert xk == yk
assert pytest.approx(xv, yv)
def test_bleu_requires_checkpoint_decoder():
with pytest.raises(utils.SockeyeError) as e, tempfile.TemporaryDirectory() as tmpdir:
callback.TrainingMonitor(batch_size=1,
output_folder=tmpdir,
optimized_metric='bleu',
cp_decoder=None)
assert "bleu requires CheckpointDecoder" == str(e.value)
| true | true |
1c32915066bc8754e129df0f97a3b2fc00012555 | 6,574 | py | Python | _setuputils.py | hackaugusto/gevent | ab5272c5b6fade84ea001f579748898a6d56d5ba | [
"MIT"
] | null | null | null | _setuputils.py | hackaugusto/gevent | ab5272c5b6fade84ea001f579748898a6d56d5ba | [
"MIT"
] | null | null | null | _setuputils.py | hackaugusto/gevent | ab5272c5b6fade84ea001f579748898a6d56d5ba | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
gevent build utilities.
"""
from __future__ import print_function, absolute_import, division
import re
import os
import os.path
import sys
from subprocess import check_call
from glob import glob
from setuptools import Extension as _Extension
from setuptools.command.build_ext import build_ext
## Exported configurations
PYPY = hasattr(sys, 'pypy_version_info')
WIN = sys.platform.startswith('win')
RUNNING_ON_TRAVIS = os.environ.get('TRAVIS')
RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR')
RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR
LIBRARIES = []
DEFINE_MACROS = []
if WIN:
LIBRARIES += ['ws2_32']
DEFINE_MACROS += [('FD_SETSIZE', '1024'), ('_WIN32', '1')]
### File handling
THIS_DIR = os.path.dirname(__file__)
def quoted_abspath(*segments):
return '"' + os.path.abspath(os.path.join(*segments)) + '"'
def read(name, *args):
"""Read a file path relative to this file."""
with open(os.path.join(THIS_DIR, name)) as f:
return f.read(*args)
def read_version(name="src/gevent/__init__.py"):
contents = read(name)
version = re.search(r"__version__\s*=\s*'(.*)'", contents, re.M).group(1)
assert version, "could not read version"
return version
def dep_abspath(depname, *extra):
return os.path.abspath(os.path.join('deps', depname, *extra))
def quoted_dep_abspath(depname):
return quoted_abspath(dep_abspath(depname))
def glob_many(*globs):
"""
Return a list of all the glob patterns expanded.
"""
result = []
for pattern in globs:
result.extend(glob(pattern))
return sorted(result)
## Configuration
def _parse_environ(key):
value = os.environ.get(key)
if not value:
return
value = value.lower().strip()
if value in ('1', 'true', 'on', 'yes'):
return True
elif value in ('0', 'false', 'off', 'no'):
return False
raise ValueError('Environment variable %r has invalid value %r. '
'Please set it to 1, 0 or an empty string' % (key, value))
IGNORE_CFFI = _parse_environ("GEVENT_NO_CFFI_BUILD")
SKIP_LIBUV = _parse_environ('GEVENT_NO_LIBUV_BUILD')
def _get_config_value(key, defkey, path=None):
"""
Find a boolean value, configured in the environment at *key* or
*defkey* (typically, *defkey* will be shared by several calls). If
those don't exist, then check for the existence of *path* and return
that (if path is given)
"""
value = _parse_environ(key)
if value is None:
value = _parse_environ(defkey)
if value is not None:
return value
return os.path.exists(path) if path is not None else False
def should_embed(dep_name):
"""
Check the configuration for the dep_name and see if it
should be embedded. Environment keys are derived from the
dep name: libev becomes LIBEV_EMBED and c-ares becomes CARES_EMBED.
"""
path = dep_abspath(dep_name)
defkey = 'EMBED'
key = dep_name.replace('-', '').upper() + '_' + defkey
return _get_config_value(key, defkey, path)
## Headers
def make_universal_header(filename, *defines):
defines = [('#define %s ' % define, define) for define in defines]
with open(filename, 'r') as f:
lines = f.read().split('\n')
ifdef = 0
with open(filename, 'w') as f:
for line in lines:
if line.startswith('#ifdef'):
ifdef += 1
elif line.startswith('#endif'):
ifdef -= 1
elif not ifdef:
for prefix, define in defines:
if line.startswith(prefix):
line = '#ifdef __LP64__\n#define %s 8\n#else\n#define %s 4\n#endif' % (define, define)
break
print(line, file=f)
# Processes
def _system(cmd, cwd=None, env=None, **kwargs):
sys.stdout.write('Running %r in %s\n' % (cmd, cwd or os.getcwd()))
sys.stdout.flush()
if 'shell' not in kwargs:
kwargs['shell'] = True
env = env or os.environ.copy()
if env.get('CC', '').startswith('ccache '):
# Running configure scripts under ccache just adds overhead.
env['CC'] = env['CC'][7:]
return check_call(cmd, cwd=cwd, env=env, **kwargs)
def system(cmd, cwd=None, env=None, **kwargs):
if _system(cmd, cwd=cwd, env=env, **kwargs):
sys.exit(1)
# Cython
try:
from Cython.Build import cythonize
except ImportError:
# The .c files had better already exist. Based on code from
# http://cython.readthedocs.io/en/latest/src/reference/compilation.html#distributing-cython-modules
def cythonize(extensions, **_kwargs):
for extension in extensions:
sources = []
for sfile in extension.sources:
path, ext = os.path.splitext(sfile)
if ext in ('.pyx', '.py'):
ext = '.c'
sfile = path + ext
sources.append(sfile)
extension.sources[:] = sources
return extensions
def cythonize1(ext):
new_ext = cythonize([ext], include_path=['src/gevent', 'src/gevent/libev', 'src/gevent/resolver'])[0]
for optional_attr in ('configure', 'optional'):
if hasattr(ext, optional_attr):
setattr(new_ext, optional_attr,
getattr(ext, optional_attr))
return new_ext
## Distutils extensions
class BuildFailed(Exception):
pass
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError # pylint:disable=no-name-in-module,import-error
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError)
class ConfiguringBuildExt(build_ext):
def gevent_prepare(self, ext):
configure = getattr(ext, 'configure', None)
if configure:
configure(self, ext)
def build_extension(self, ext):
self.gevent_prepare(ext)
try:
result = build_ext.build_extension(self, ext)
except ext_errors:
if getattr(ext, 'optional', False):
raise BuildFailed()
else:
raise
return result
class Extension(_Extension):
# This class exists currently mostly to make pylint
# happy in terms of attributes we use.
def __init__(self, *args, **kwargs):
self.libraries = []
self.define_macros = []
# Python 2 has this as an old-style class for some reason
# so super() doesn't work.
_Extension.__init__(self, *args, **kwargs) # pylint:disable=no-member,non-parent-init-called
| 29.479821 | 135 | 0.634773 |
from __future__ import print_function, absolute_import, division
import re
import os
import os.path
import sys
from subprocess import check_call
from glob import glob
from setuptools import Extension as _Extension
from setuptools.command.build_ext import build_ext
py_version_info')
WIN = sys.platform.startswith('win')
RUNNING_ON_TRAVIS = os.environ.get('TRAVIS')
RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR')
RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR
LIBRARIES = []
DEFINE_MACROS = []
if WIN:
LIBRARIES += ['ws2_32']
DEFINE_MACROS += [('FD_SETSIZE', '1024'), ('_WIN32', '1')]
file__)
def quoted_abspath(*segments):
return '"' + os.path.abspath(os.path.join(*segments)) + '"'
def read(name, *args):
with open(os.path.join(THIS_DIR, name)) as f:
return f.read(*args)
def read_version(name="src/gevent/__init__.py"):
contents = read(name)
version = re.search(r"__version__\s*=\s*'(.*)'", contents, re.M).group(1)
assert version, "could not read version"
return version
def dep_abspath(depname, *extra):
return os.path.abspath(os.path.join('deps', depname, *extra))
def quoted_dep_abspath(depname):
return quoted_abspath(dep_abspath(depname))
def glob_many(*globs):
result = []
for pattern in globs:
result.extend(glob(pattern))
return sorted(result)
viron(key):
value = os.environ.get(key)
if not value:
return
value = value.lower().strip()
if value in ('1', 'true', 'on', 'yes'):
return True
elif value in ('0', 'false', 'off', 'no'):
return False
raise ValueError('Environment variable %r has invalid value %r. '
'Please set it to 1, 0 or an empty string' % (key, value))
IGNORE_CFFI = _parse_environ("GEVENT_NO_CFFI_BUILD")
SKIP_LIBUV = _parse_environ('GEVENT_NO_LIBUV_BUILD')
def _get_config_value(key, defkey, path=None):
value = _parse_environ(key)
if value is None:
value = _parse_environ(defkey)
if value is not None:
return value
return os.path.exists(path) if path is not None else False
def should_embed(dep_name):
path = dep_abspath(dep_name)
defkey = 'EMBED'
key = dep_name.replace('-', '').upper() + '_' + defkey
return _get_config_value(key, defkey, path)
e_universal_header(filename, *defines):
defines = [('#define %s ' % define, define) for define in defines]
with open(filename, 'r') as f:
lines = f.read().split('\n')
ifdef = 0
with open(filename, 'w') as f:
for line in lines:
if line.startswith('#ifdef'):
ifdef += 1
elif line.startswith('#endif'):
ifdef -= 1
elif not ifdef:
for prefix, define in defines:
if line.startswith(prefix):
line = '#ifdef __LP64__\n#define %s 8\n#else\n#define %s 4\n#endif' % (define, define)
break
print(line, file=f)
def _system(cmd, cwd=None, env=None, **kwargs):
sys.stdout.write('Running %r in %s\n' % (cmd, cwd or os.getcwd()))
sys.stdout.flush()
if 'shell' not in kwargs:
kwargs['shell'] = True
env = env or os.environ.copy()
if env.get('CC', '').startswith('ccache '):
env['CC'] = env['CC'][7:]
return check_call(cmd, cwd=cwd, env=env, **kwargs)
def system(cmd, cwd=None, env=None, **kwargs):
if _system(cmd, cwd=cwd, env=env, **kwargs):
sys.exit(1)
try:
from Cython.Build import cythonize
except ImportError:
s, **_kwargs):
for extension in extensions:
sources = []
for sfile in extension.sources:
path, ext = os.path.splitext(sfile)
if ext in ('.pyx', '.py'):
ext = '.c'
sfile = path + ext
sources.append(sfile)
extension.sources[:] = sources
return extensions
def cythonize1(ext):
new_ext = cythonize([ext], include_path=['src/gevent', 'src/gevent/libev', 'src/gevent/resolver'])[0]
for optional_attr in ('configure', 'optional'):
if hasattr(ext, optional_attr):
setattr(new_ext, optional_attr,
getattr(ext, optional_attr))
return new_ext
eption):
pass
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError)
class ConfiguringBuildExt(build_ext):
def gevent_prepare(self, ext):
configure = getattr(ext, 'configure', None)
if configure:
configure(self, ext)
def build_extension(self, ext):
self.gevent_prepare(ext)
try:
result = build_ext.build_extension(self, ext)
except ext_errors:
if getattr(ext, 'optional', False):
raise BuildFailed()
else:
raise
return result
class Extension(_Extension):
def __init__(self, *args, **kwargs):
self.libraries = []
self.define_macros = []
_Extension.__init__(self, *args, **kwargs) # pylint:disable=no-member,non-parent-init-called
| true | true |
1c32921f448fde06c33182baa152d1c193fd83fc | 819 | py | Python | Problems/seven_sided_die.py | rohanaurora/daily-coding-challenges | a40340e5a4f15a5f7b66a64f98dc149a66ebd945 | [
"MIT"
] | null | null | null | Problems/seven_sided_die.py | rohanaurora/daily-coding-challenges | a40340e5a4f15a5f7b66a64f98dc149a66ebd945 | [
"MIT"
] | null | null | null | Problems/seven_sided_die.py | rohanaurora/daily-coding-challenges | a40340e5a4f15a5f7b66a64f98dc149a66ebd945 | [
"MIT"
] | null | null | null | # Seven-sided dice from five-sided dice
#
# Given an equal-probability generator of one of the integers 1 to 5 as dice5, create dice7 that generates a
# pseudo-random integer from 1 to 7 in equal probability using only dice5 as a source of random numbers,
# and check the distribution for at least one million calls using the function created in
# Simple Random Distribution Checker.
#
#
# Implementation suggestion: dice7 might call dice5 twice, re-call if four of the 25 combinations are given,
# otherwise split the other 21 combinations into 7 groups of three, and return the group index from the rolls.
from random import randint
def dice5():
return randint(1, 5)
def dice7():
r = dice5() + dice5() * 5 - 6
if r < 21:
return (r % 7) + 1
else:
return dice7()
s = dice7()
print(s) | 32.76 | 110 | 0.716728 |
from random import randint
def dice5():
return randint(1, 5)
def dice7():
r = dice5() + dice5() * 5 - 6
if r < 21:
return (r % 7) + 1
else:
return dice7()
s = dice7()
print(s) | true | true |
1c329252ff1f3f27a711b8961b57e678169c29da | 2,720 | py | Python | grism_analysis_main.py | WWGolay/grism-gui | 1c689b622724c1bd6afc73d40d197366e86d7bb2 | [
"MIT"
] | null | null | null | grism_analysis_main.py | WWGolay/grism-gui | 1c689b622724c1bd6afc73d40d197366e86d7bb2 | [
"MIT"
] | null | null | null | grism_analysis_main.py | WWGolay/grism-gui | 1c689b622724c1bd6afc73d40d197366e86d7bb2 | [
"MIT"
] | null | null | null | '''
grism_analysis_main
Last Updated 5/2/22
University Of Iowa Astronomy Department
AJ Serck
Will Golay
'''
# Python imports
from datetime import date, timedelta, datetime
import astropy.io.fits as pyfits
import os
# Custom libraries
from bin.grism_analysis_web import grism_web
from bin.grism_tools_lib import grism_tools
from config import setup_config
def main():
cfg = setup_config.read('grism-gui.cfg')
default_temp_dir = cfg.get('default', 'default_temp_dir')
defaultDir = cfg.get('default', 'default_calibration_dir')
day_iter = int(cfg.get('default', 'find_calib_by_date'))
take_input = bool(cfg.get('default', 'take_input')=='True')
default_temp_dir = cfg.get('default', 'default_temp_dir')
path_to_fits = default_temp_dir
take_input = False#cfg.get('default', 'take_input')
web_analyzer = grism_web()
if take_input:
fits_image, calibration,path = web_analyzer.get_fits() # Get initial fits image
print("PATH:" + path)
if path != "":
path_to_fits = path
else:
with open(default_temp_dir+'/im.fts', 'wb') as binary_file: # Write fits image to file so it can be analyzed
binary_file.write(fits_image['content'])
path_to_fits += '/im.fts'
if calibration == None: # TODO: Add advanced option on first page for entry of custom calibration file, otherwise search for one
# Get date of image to iterate back to latest calibration file, parse header into date object
hdulist = pyfits.open(path_to_fits)
fitsDate = hdulist[0].header['DATE-OBS']
startDate = date(int(fitsDate[0:4]), int(fitsDate[5:7]), int(fitsDate[8:10]))
# Iterate to find latest calib file in last 180 days
if day_iter > 0:
for testDate in (startDate - timedelta(n) for n in range(day_iter)):
if os.path.isfile(defaultDir+'grism_cal_6_'+testDate.strftime('%Y_%m_%d')+'.csv'):
cal_file = defaultDir+'grism_cal_6_'+testDate.strftime('%Y_%m_%d')+'.csv'
break
else: continue
else:
cal_file = default_temp_dir+'/cal.csv'
else:
cal_file = default_temp_dir+'/cal.csv'
else:
path_to_fits += '/im.fts'
cal_file = default_temp_dir+'/cal.csv'
try: os.path.exists(cal_file)
except: web_analyzer.raise_calibration_error()
grism_analyzer = grism_tools(path_to_fits, cal_file) # instantiate analyzer with fits image and calibration file
web_analyzer.run_analysis(grism_analyzer)
if __name__ == '__main__':
main() | 39.42029 | 136 | 0.640441 |
from datetime import date, timedelta, datetime
import astropy.io.fits as pyfits
import os
from bin.grism_analysis_web import grism_web
from bin.grism_tools_lib import grism_tools
from config import setup_config
def main():
cfg = setup_config.read('grism-gui.cfg')
default_temp_dir = cfg.get('default', 'default_temp_dir')
defaultDir = cfg.get('default', 'default_calibration_dir')
day_iter = int(cfg.get('default', 'find_calib_by_date'))
take_input = bool(cfg.get('default', 'take_input')=='True')
default_temp_dir = cfg.get('default', 'default_temp_dir')
path_to_fits = default_temp_dir
take_input = False
web_analyzer = grism_web()
if take_input:
fits_image, calibration,path = web_analyzer.get_fits()
print("PATH:" + path)
if path != "":
path_to_fits = path
else:
with open(default_temp_dir+'/im.fts', 'wb') as binary_file:
binary_file.write(fits_image['content'])
path_to_fits += '/im.fts'
if calibration == None:
hdulist = pyfits.open(path_to_fits)
fitsDate = hdulist[0].header['DATE-OBS']
startDate = date(int(fitsDate[0:4]), int(fitsDate[5:7]), int(fitsDate[8:10]))
if day_iter > 0:
for testDate in (startDate - timedelta(n) for n in range(day_iter)):
if os.path.isfile(defaultDir+'grism_cal_6_'+testDate.strftime('%Y_%m_%d')+'.csv'):
cal_file = defaultDir+'grism_cal_6_'+testDate.strftime('%Y_%m_%d')+'.csv'
break
else: continue
else:
cal_file = default_temp_dir+'/cal.csv'
else:
cal_file = default_temp_dir+'/cal.csv'
else:
path_to_fits += '/im.fts'
cal_file = default_temp_dir+'/cal.csv'
try: os.path.exists(cal_file)
except: web_analyzer.raise_calibration_error()
grism_analyzer = grism_tools(path_to_fits, cal_file)
web_analyzer.run_analysis(grism_analyzer)
if __name__ == '__main__':
main() | true | true |
1c3292b7fa0d90db80e2778bb80e56558ce08540 | 2,035 | py | Python | frappe/commands/redis_utils.py | oryxsolutions/frappe | d193ea22d17ca40d57432040a8afad72287d9e23 | [
"MIT"
] | null | null | null | frappe/commands/redis_utils.py | oryxsolutions/frappe | d193ea22d17ca40d57432040a8afad72287d9e23 | [
"MIT"
] | null | null | null | frappe/commands/redis_utils.py | oryxsolutions/frappe | d193ea22d17ca40d57432040a8afad72287d9e23 | [
"MIT"
] | null | null | null | import os
import click
import frappe
from frappe.installer import update_site_config
from frappe.utils.redis_queue import RedisQueue
@click.command("create-rq-users")
@click.option(
"--set-admin-password",
is_flag=True,
default=False,
help="Set new Redis admin(default user) password",
)
@click.option(
"--use-rq-auth", is_flag=True, default=False, help="Enable Redis authentication for sites"
)
def create_rq_users(set_admin_password=False, use_rq_auth=False):
"""Create Redis Queue users and add to acl and app configs.
acl config file will be used by redis server while starting the server
and app config is used by app while connecting to redis server.
"""
acl_file_path = os.path.abspath("../config/redis_queue.acl")
with frappe.init_site():
acl_list, user_credentials = RedisQueue.gen_acl_list(set_admin_password=set_admin_password)
with open(acl_file_path, "w") as f:
f.writelines([acl + "\n" for acl in acl_list])
sites_path = os.getcwd()
common_site_config_path = os.path.join(sites_path, "common_site_config.json")
update_site_config(
"rq_username",
user_credentials["bench"][0],
validate=False,
site_config_path=common_site_config_path,
)
update_site_config(
"rq_password",
user_credentials["bench"][1],
validate=False,
site_config_path=common_site_config_path,
)
update_site_config(
"use_rq_auth", use_rq_auth, validate=False, site_config_path=common_site_config_path
)
click.secho(
"* ACL and site configs are updated with new user credentials. "
"Please restart Redis Queue server to enable namespaces.",
fg="green",
)
if set_admin_password:
env_key = "RQ_ADMIN_PASWORD"
click.secho(
"* Redis admin password is successfully set up. "
"Include below line in .bashrc file for system to use",
fg="green",
)
click.secho(f"`export {env_key}={user_credentials['default'][1]}`")
click.secho(
"NOTE: Please save the admin password as you "
"can not access redis server without the password",
fg="yellow",
)
commands = [create_rq_users]
| 27.5 | 93 | 0.747912 | import os
import click
import frappe
from frappe.installer import update_site_config
from frappe.utils.redis_queue import RedisQueue
@click.command("create-rq-users")
@click.option(
"--set-admin-password",
is_flag=True,
default=False,
help="Set new Redis admin(default user) password",
)
@click.option(
"--use-rq-auth", is_flag=True, default=False, help="Enable Redis authentication for sites"
)
def create_rq_users(set_admin_password=False, use_rq_auth=False):
acl_file_path = os.path.abspath("../config/redis_queue.acl")
with frappe.init_site():
acl_list, user_credentials = RedisQueue.gen_acl_list(set_admin_password=set_admin_password)
with open(acl_file_path, "w") as f:
f.writelines([acl + "\n" for acl in acl_list])
sites_path = os.getcwd()
common_site_config_path = os.path.join(sites_path, "common_site_config.json")
update_site_config(
"rq_username",
user_credentials["bench"][0],
validate=False,
site_config_path=common_site_config_path,
)
update_site_config(
"rq_password",
user_credentials["bench"][1],
validate=False,
site_config_path=common_site_config_path,
)
update_site_config(
"use_rq_auth", use_rq_auth, validate=False, site_config_path=common_site_config_path
)
click.secho(
"* ACL and site configs are updated with new user credentials. "
"Please restart Redis Queue server to enable namespaces.",
fg="green",
)
if set_admin_password:
env_key = "RQ_ADMIN_PASWORD"
click.secho(
"* Redis admin password is successfully set up. "
"Include below line in .bashrc file for system to use",
fg="green",
)
click.secho(f"`export {env_key}={user_credentials['default'][1]}`")
click.secho(
"NOTE: Please save the admin password as you "
"can not access redis server without the password",
fg="yellow",
)
commands = [create_rq_users]
| true | true |
1c3294341dafd442f43d58b91e40f15dec483b03 | 167 | py | Python | neuro_helper/abstract/__init__.py | mehrshadg/neuro-data-helper | 7a17ae7e4cca1a5652c9c0182c209b084073cf1f | [
"MIT"
] | null | null | null | neuro_helper/abstract/__init__.py | mehrshadg/neuro-data-helper | 7a17ae7e4cca1a5652c9c0182c209b084073cf1f | [
"MIT"
] | null | null | null | neuro_helper/abstract/__init__.py | mehrshadg/neuro-data-helper | 7a17ae7e4cca1a5652c9c0182c209b084073cf1f | [
"MIT"
] | null | null | null | from .map import *
__all__ = ["Space", "TopoName", "TemplateName", "HierarchyName", "TemplateData",
"AbstractMap", "TemplateMap", "TopoMap", "file_names"]
| 33.4 | 80 | 0.652695 | from .map import *
__all__ = ["Space", "TopoName", "TemplateName", "HierarchyName", "TemplateData",
"AbstractMap", "TemplateMap", "TopoMap", "file_names"]
| true | true |
1c3294680cf5595fc4436eeac807d9840ea677b4 | 3,448 | py | Python | python/smqtk/bin/minibatch_kmeans_clusters.py | jbeezley/SMQTK | e6b00f94be95f39bbca52a7983ac3d6d1f86f847 | [
"BSD-3-Clause"
] | 1 | 2021-04-10T10:51:26.000Z | 2021-04-10T10:51:26.000Z | python/smqtk/bin/minibatch_kmeans_clusters.py | jbeezley/SMQTK | e6b00f94be95f39bbca52a7983ac3d6d1f86f847 | [
"BSD-3-Clause"
] | 3 | 2021-06-08T22:19:14.000Z | 2022-03-12T00:46:44.000Z | python/smqtk/bin/minibatch_kmeans_clusters.py | DigitalCompanion/SMQTK | fc9404b69150ef44f24423844bc80735c0c2b669 | [
"BSD-3-Clause"
] | null | null | null | """
Script for generating clusters from descriptors in a given index using the
mini-batch KMeans implementation from Scikit-learn
(http://scikit-learn.org/stable/modules/generated/sklearn.cluster.MiniBatchKMeans.html).
By the nature of Scikit-learn's MiniBatchKMeans implementation, euclidean
distance is used to measure distance between descriptors.
"""
import logging
import os
import numpy
from six.moves import cPickle
from sklearn.cluster import MiniBatchKMeans
from smqtk.compute_functions import mb_kmeans_build_apply
from smqtk.representation.descriptor_index import get_descriptor_index_impls
from smqtk.utils import Configurable
from smqtk.utils.bin_utils import utility_main_helper, basic_cli_parser
from smqtk.utils.file_utils import safe_create_dir
from smqtk.utils.plugin import make_config, from_plugin_config
def default_config():
# Trick for mixing in our Configurable class API on top of scikit-learn's
# MiniBatchKMeans class in order to introspect construction parameters.
# We never construct this class so we do not need to implement "pure
# virtual" instance methods.
# noinspection PyAbstractClass
class MBKTemp (MiniBatchKMeans, Configurable):
pass
c = {
"minibatch_kmeans_params": MBKTemp.get_default_config(),
"descriptor_index": make_config(get_descriptor_index_impls()),
# Number of descriptors to run an initial fit with. This brings the
# advantage of choosing a best initialization point from multiple.
"initial_fit_size": 0,
# Path to save generated KMeans centroids
"centroids_output_filepath_npy": "centroids.npy"
}
# Change/Remove some KMeans params for more appropriate defaults
del c['minibatch_kmeans_params']['compute_labels']
del c['minibatch_kmeans_params']['verbose']
c['minibatch_kmeans_params']['random_state'] = 0
return c
def cli_parser():
p = basic_cli_parser(__doc__)
g_output = p.add_argument_group("output")
g_output.add_argument('-o', '--output-map',
metavar="PATH",
help="Path to output the clustering class mapping "
"to. Saved as a pickle file with -1 format.")
return p
def main():
args = cli_parser().parse_args()
config = utility_main_helper(default_config, args)
log = logging.getLogger(__name__)
output_filepath = args.output_map
if not output_filepath:
raise ValueError("No path given for output map file (pickle).")
#: :type: smqtk.representation.DescriptorIndex
index = from_plugin_config(config['descriptor_index'],
get_descriptor_index_impls())
mbkm = MiniBatchKMeans(verbose=args.verbose,
compute_labels=False,
**config['minibatch_kmeans_params'])
initial_fit_size = int(config['initial_fit_size'])
d_classes = mb_kmeans_build_apply(index, mbkm, initial_fit_size)
log.info("Saving KMeans centroids to: %s",
config['centroids_output_filepath_npy'])
numpy.save(config['centroids_output_filepath_npy'], mbkm.cluster_centers_)
log.info("Saving result classification map to: %s", output_filepath)
safe_create_dir(os.path.dirname(output_filepath))
with open(output_filepath, 'w') as f:
cPickle.dump(d_classes, f, -1)
log.info("Done")
if __name__ == '__main__':
main()
| 35.546392 | 88 | 0.710847 | import logging
import os
import numpy
from six.moves import cPickle
from sklearn.cluster import MiniBatchKMeans
from smqtk.compute_functions import mb_kmeans_build_apply
from smqtk.representation.descriptor_index import get_descriptor_index_impls
from smqtk.utils import Configurable
from smqtk.utils.bin_utils import utility_main_helper, basic_cli_parser
from smqtk.utils.file_utils import safe_create_dir
from smqtk.utils.plugin import make_config, from_plugin_config
def default_config():
# MiniBatchKMeans class in order to introspect construction parameters.
# We never construct this class so we do not need to implement "pure
# virtual" instance methods.
# noinspection PyAbstractClass
class MBKTemp (MiniBatchKMeans, Configurable):
pass
c = {
"minibatch_kmeans_params": MBKTemp.get_default_config(),
"descriptor_index": make_config(get_descriptor_index_impls()),
# Number of descriptors to run an initial fit with. This brings the
# advantage of choosing a best initialization point from multiple.
"initial_fit_size": 0,
# Path to save generated KMeans centroids
"centroids_output_filepath_npy": "centroids.npy"
}
# Change/Remove some KMeans params for more appropriate defaults
del c['minibatch_kmeans_params']['compute_labels']
del c['minibatch_kmeans_params']['verbose']
c['minibatch_kmeans_params']['random_state'] = 0
return c
def cli_parser():
p = basic_cli_parser(__doc__)
g_output = p.add_argument_group("output")
g_output.add_argument('-o', '--output-map',
metavar="PATH",
help="Path to output the clustering class mapping "
"to. Saved as a pickle file with -1 format.")
return p
def main():
args = cli_parser().parse_args()
config = utility_main_helper(default_config, args)
log = logging.getLogger(__name__)
output_filepath = args.output_map
if not output_filepath:
raise ValueError("No path given for output map file (pickle).")
#: :type: smqtk.representation.DescriptorIndex
index = from_plugin_config(config['descriptor_index'],
get_descriptor_index_impls())
mbkm = MiniBatchKMeans(verbose=args.verbose,
compute_labels=False,
**config['minibatch_kmeans_params'])
initial_fit_size = int(config['initial_fit_size'])
d_classes = mb_kmeans_build_apply(index, mbkm, initial_fit_size)
log.info("Saving KMeans centroids to: %s",
config['centroids_output_filepath_npy'])
numpy.save(config['centroids_output_filepath_npy'], mbkm.cluster_centers_)
log.info("Saving result classification map to: %s", output_filepath)
safe_create_dir(os.path.dirname(output_filepath))
with open(output_filepath, 'w') as f:
cPickle.dump(d_classes, f, -1)
log.info("Done")
if __name__ == '__main__':
main()
| true | true |
1c3294df2c05ca1b567ad8b831cd7b9923bea59b | 5,032 | py | Python | venv/lib/python3.7/site-packages/rinoh/font/opentype/gsub.py | rodrez/jobpy | 02da7ed62a5f057ce66f7b3c0a5f13223363af9a | [
"MIT"
] | 3 | 2020-05-16T01:54:47.000Z | 2021-04-27T01:37:08.000Z | venv/lib/python3.7/site-packages/rinoh/font/opentype/gsub.py | rodrez/jobpy | 02da7ed62a5f057ce66f7b3c0a5f13223363af9a | [
"MIT"
] | 7 | 2020-05-11T01:45:45.000Z | 2022-03-02T14:58:30.000Z | venv/lib/python3.7/site-packages/rinoh/font/opentype/gsub.py | rodrez/jobpy | 02da7ed62a5f057ce66f7b3c0a5f13223363af9a | [
"MIT"
] | 1 | 2020-07-19T04:31:01.000Z | 2020-07-19T04:31:01.000Z | # This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
from .parse import OpenTypeTable, MultiFormatTable
from .parse import uint16, ushort, ulong, glyph_id, array, indirect
from .parse import context_array, indirect_array
from .layout import LayoutTable
from .layout import Coverage
# Single substitution (subtable format 1)
class SingleSubTable(MultiFormatTable):
entries = [('SubstFormat', uint16),
('Coverage', indirect(Coverage))]
formats = {1: [('DeltaGlyphID', glyph_id)],
2: [('GlyphCount', uint16),
('Substitute', context_array(glyph_id, 'GlyphCount'))]}
def lookup(self, glyph_id):
try:
index = self['Coverage'].index(glyph_id)
except ValueError:
raise KeyError
if self['SubstFormat'] == 1:
return index + self['DeltaGlyphID']
else:
return self['Substitute'][index]
# Multiple subtitution (subtable format 2)
class Sequence(OpenTypeTable):
entries = [('GlyphCount', uint16),
('Substitute', context_array(glyph_id, 'GlyphCount'))]
class MultipleSubTable(OpenTypeTable):
entries = [('SubstFormat', uint16),
('Coverage', indirect(Coverage)),
('SequenceCount', uint16),
('Sequence', context_array(Sequence, 'SequenceCount'))]
def lookup(self, glyph_id):
try:
index = self['Coverage'].index(glyph_id)
except ValueError:
raise KeyError
raise NotImplementedError
# Alternate subtitution (subtable format 3)
class AlternateSubTable(OpenTypeTable):
pass
# Ligature substitution (subtable format 4)
class Ligature(OpenTypeTable):
entries = [('LigGlyph', glyph_id),
('CompCount', uint16)]
def __init__(self, file, file_offset):
super().__init__(file, file_offset)
self['Component'] = array(glyph_id, self['CompCount'] - 1)(file)
class LigatureSet(OpenTypeTable):
entries = [('LigatureCount', uint16),
('Ligature', indirect_array(Ligature, 'LigatureCount'))]
class LigatureSubTable(OpenTypeTable):
entries = [('SubstFormat', uint16),
('Coverage', indirect(Coverage)),
('LigSetCount', uint16),
('LigatureSet', indirect_array(LigatureSet, 'LigSetCount'))]
def lookup(self, a_id, b_id):
try:
index = self['Coverage'].index(a_id)
except ValueError:
raise KeyError
ligature_set = self['LigatureSet'][index]
for ligature in ligature_set['Ligature']:
if ligature['Component'] == [b_id]:
return ligature['LigGlyph']
raise KeyError
# Chaining contextual substitution (subtable format 6)
class ChainSubRule(OpenTypeTable):
pass
## entries = [('BacktrackGlyphCount', uint16),
## ('Backtrack', context_array(glyph_id, 'BacktrackGlyphCount')),
## ('InputGlyphCount', uint16),
## ('Input', context_array(glyph_id, 'InputGlyphCount',
## lambda count: count - 1)),
## ('LookaheadGlyphCount', uint16),
## ('LookAhead', context_array(glyph_id, 'LookaheadGlyphCount')),
## ('SubstCount', uint16),
## ('SubstLookupRecord', context_array(glyph_id, 'SubstCount'))]
class ChainSubRuleSet(OpenTypeTable):
entries = [('ChainSubRuleCount', uint16),
('ChainSubRule', indirect(ChainSubRule))]
class ChainingContextSubtable(MultiFormatTable):
entries = [('SubstFormat', uint16)]
formats = {1: [('Coverage', indirect(Coverage)),
('ChainSubRuleSetCount', uint16),
('ChainSubRuleSet', indirect_array(ChainSubRuleSet,
'ChainSubRuleSetCount'))]}
# Extension substitution (subtable format 7)
class ExtensionSubstitution(OpenTypeTable):
entries = [('SubstFormat', ushort),
('ExtensionLookupType', ushort),
('ExtensionOffset', ulong)]
def __init__(self, file, file_offset=None):
super().__init__(file, file_offset=file_offset)
subtable_class = GsubTable.lookup_types[self['ExtensionLookupType']]
table_offset = file_offset + self['ExtensionOffset']
self.subtable = subtable_class(file, table_offset)
def lookup(self, *args, **kwargs):
return self.subtable.lookup(*args, **kwargs)
class GsubTable(LayoutTable):
"""Glyph substitution table"""
tag = 'GSUB'
lookup_types = {1: SingleSubTable,
2: MultipleSubTable,
3: AlternateSubTable,
4: LigatureSubTable,
#6: ChainingContextSubtable}
7: ExtensionSubstitution}
| 34.703448 | 80 | 0.61566 |
from .parse import OpenTypeTable, MultiFormatTable
from .parse import uint16, ushort, ulong, glyph_id, array, indirect
from .parse import context_array, indirect_array
from .layout import LayoutTable
from .layout import Coverage
class SingleSubTable(MultiFormatTable):
entries = [('SubstFormat', uint16),
('Coverage', indirect(Coverage))]
formats = {1: [('DeltaGlyphID', glyph_id)],
2: [('GlyphCount', uint16),
('Substitute', context_array(glyph_id, 'GlyphCount'))]}
def lookup(self, glyph_id):
try:
index = self['Coverage'].index(glyph_id)
except ValueError:
raise KeyError
if self['SubstFormat'] == 1:
return index + self['DeltaGlyphID']
else:
return self['Substitute'][index]
class Sequence(OpenTypeTable):
entries = [('GlyphCount', uint16),
('Substitute', context_array(glyph_id, 'GlyphCount'))]
class MultipleSubTable(OpenTypeTable):
entries = [('SubstFormat', uint16),
('Coverage', indirect(Coverage)),
('SequenceCount', uint16),
('Sequence', context_array(Sequence, 'SequenceCount'))]
def lookup(self, glyph_id):
try:
index = self['Coverage'].index(glyph_id)
except ValueError:
raise KeyError
raise NotImplementedError
class AlternateSubTable(OpenTypeTable):
pass
class Ligature(OpenTypeTable):
entries = [('LigGlyph', glyph_id),
('CompCount', uint16)]
def __init__(self, file, file_offset):
super().__init__(file, file_offset)
self['Component'] = array(glyph_id, self['CompCount'] - 1)(file)
class LigatureSet(OpenTypeTable):
entries = [('LigatureCount', uint16),
('Ligature', indirect_array(Ligature, 'LigatureCount'))]
class LigatureSubTable(OpenTypeTable):
entries = [('SubstFormat', uint16),
('Coverage', indirect(Coverage)),
('LigSetCount', uint16),
('LigatureSet', indirect_array(LigatureSet, 'LigSetCount'))]
def lookup(self, a_id, b_id):
try:
index = self['Coverage'].index(a_id)
except ValueError:
raise KeyError
ligature_set = self['LigatureSet'][index]
for ligature in ligature_set['Ligature']:
if ligature['Component'] == [b_id]:
return ligature['LigGlyph']
raise KeyError
class ChainSubRule(OpenTypeTable):
pass
:
entries = [('SubstFormat', ushort),
('ExtensionLookupType', ushort),
('ExtensionOffset', ulong)]
def __init__(self, file, file_offset=None):
super().__init__(file, file_offset=file_offset)
subtable_class = GsubTable.lookup_types[self['ExtensionLookupType']]
table_offset = file_offset + self['ExtensionOffset']
self.subtable = subtable_class(file, table_offset)
def lookup(self, *args, **kwargs):
return self.subtable.lookup(*args, **kwargs)
class GsubTable(LayoutTable):
tag = 'GSUB'
lookup_types = {1: SingleSubTable,
2: MultipleSubTable,
3: AlternateSubTable,
4: LigatureSubTable,
7: ExtensionSubstitution}
| true | true |
1c32955b469aa7291af353410301fd056959674e | 37,823 | py | Python | tests/commands/train_test.py | OhadRubin/allennlp | 7e61241d2720050d0f39376eebba713acd0b287b | [
"Apache-2.0"
] | null | null | null | tests/commands/train_test.py | OhadRubin/allennlp | 7e61241d2720050d0f39376eebba713acd0b287b | [
"Apache-2.0"
] | 35 | 2021-08-11T13:20:30.000Z | 2022-03-29T13:17:39.000Z | tests/commands/train_test.py | OhadRubin/allennlp | 7e61241d2720050d0f39376eebba713acd0b287b | [
"Apache-2.0"
] | null | null | null | import argparse
import copy
import json
import logging
import math
import os
import re
import shutil
from collections import OrderedDict, Counter
from typing import Optional, List, Dict, Any
import pytest
import torch
from allennlp.commands.train import Train, train_model, train_model_from_args, TrainModel
from allennlp.common import Params
from allennlp.common.checks import ConfigurationError
from allennlp.common.testing import AllenNlpTestCase, cpu_or_gpu
from allennlp.data import Vocabulary
from allennlp.data.data_loaders import TensorDict
from allennlp.models import load_archive, Model
from allennlp.models.archival import CONFIG_NAME
from allennlp.training import TrainerCallback, GradientDescentTrainer
from allennlp.training.learning_rate_schedulers import (
ExponentialLearningRateScheduler,
LearningRateScheduler,
)
SEQUENCE_TAGGING_DATA_PATH = str(AllenNlpTestCase.FIXTURES_ROOT / "data" / "sequence_tagging.tsv")
SEQUENCE_TAGGING_SHARDS_PATH = str(AllenNlpTestCase.FIXTURES_ROOT / "data" / "shards" / "*")
@TrainerCallback.register("training_data_logger")
class TrainingDataLoggerOnBatchCallback(TrainerCallback):
def on_batch( # type: ignore
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
**kwargs,
) -> None:
if is_training:
logger = logging.getLogger(__name__)
for batch in batch_inputs:
for metadata in batch["metadata"]:
logger.info(f"First word from training data: '{metadata['words'][0]}'") # type: ignore
_seen_training_devices = set()
@TrainerCallback.register("training_device_logger")
class TrainingDeviceLoggerOnBatchCallback(TrainerCallback):
def on_batch( # type: ignore
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
**kwargs,
) -> None:
global _seen_training_devices
for tensor in trainer.model.parameters():
_seen_training_devices.add(tensor.device)
@TrainerCallback.register("training_primary_check")
class TrainingPrimaryCheckCallback(TrainerCallback):
"""
Makes sure there is only one primary worker.
"""
def on_start(
self, trainer: "GradientDescentTrainer", is_primary: bool = True, **kwargs
) -> None:
super().on_start(trainer, is_primary=is_primary, **kwargs)
if is_primary:
assert torch.distributed.get_rank() == 0
class TestTrain(AllenNlpTestCase):
DEFAULT_PARAMS = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
def test_train_model(self):
params = lambda: copy.deepcopy(self.DEFAULT_PARAMS)
train_model(params(), serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
# It's OK if serialization dir exists but is empty:
serialization_dir2 = os.path.join(self.TEST_DIR, "empty_directory")
assert not os.path.exists(serialization_dir2)
os.makedirs(serialization_dir2)
train_model(params(), serialization_dir=serialization_dir2)
# It's not OK if serialization dir exists and has junk in it non-empty:
serialization_dir3 = os.path.join(self.TEST_DIR, "non_empty_directory")
assert not os.path.exists(serialization_dir3)
os.makedirs(serialization_dir3)
with open(os.path.join(serialization_dir3, "README.md"), "w") as f:
f.write("TEST")
with pytest.raises(ConfigurationError):
train_model(params(), serialization_dir=serialization_dir3)
# It's also not OK if serialization dir is a real serialization dir:
with pytest.raises(ConfigurationError):
train_model(params(), serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
# But it's OK if serialization dir exists and --recover is specified:
train_model(
params(),
serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"),
recover=True,
)
# It's ok serialization dir exists and --force is specified (it will be deleted):
train_model(
params(), serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"), force=True
)
# But --force and --recover cannot both be specified
with pytest.raises(ConfigurationError):
train_model(
params(),
serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"),
force=True,
recover=True,
)
@cpu_or_gpu
def test_detect_gpu(self):
import copy
params = copy.deepcopy(self.DEFAULT_PARAMS)
params["trainer"]["callbacks"] = ["training_device_logger"]
global _seen_training_devices
_seen_training_devices.clear()
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_detect_gpu"))
assert len(_seen_training_devices) == 1
seen_training_device = next(iter(_seen_training_devices))
if torch.cuda.device_count() == 0:
assert seen_training_device.type == "cpu"
else:
assert seen_training_device.type == "cuda"
@cpu_or_gpu
def test_force_gpu(self):
import copy
params = copy.deepcopy(self.DEFAULT_PARAMS)
params["trainer"]["callbacks"] = ["training_device_logger"]
params["trainer"]["cuda_device"] = 0
global _seen_training_devices
_seen_training_devices.clear()
if torch.cuda.device_count() == 0:
with pytest.raises(ConfigurationError):
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_force_gpu"))
else:
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_force_gpu"))
assert len(_seen_training_devices) == 1
seen_training_device = next(iter(_seen_training_devices))
assert seen_training_device.type == "cuda"
@cpu_or_gpu
def test_force_cpu(self):
import copy
params = copy.deepcopy(self.DEFAULT_PARAMS)
params["trainer"]["callbacks"] = ["training_device_logger"]
params["trainer"]["cuda_device"] = -1
global _seen_training_devices
_seen_training_devices.clear()
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_force_cpu"))
assert len(_seen_training_devices) == 1
seen_training_device = next(iter(_seen_training_devices))
assert seen_training_device.type == "cpu"
@cpu_or_gpu
def test_train_model_distributed(self):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {
"num_epochs": 2,
"optimizer": "adam",
# Need to use the fully qualified name here so the distributed workers
# can import it.
"callbacks": ["tests.commands.train_test.TrainingPrimaryCheckCallback"],
},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
assert "metrics.json" in serialized_files
# Make sure the metrics look right.
with open(os.path.join(out_dir, "metrics.json")) as f:
metrics = json.load(f)
assert metrics["peak_worker_0_memory_MB"] > 0
assert metrics["peak_worker_1_memory_MB"] > 0
if torch.cuda.device_count() >= 2:
assert metrics["peak_gpu_0_memory_MB"] > 0
assert metrics["peak_gpu_1_memory_MB"] > 0
# Check we can load the serialized model
assert load_archive(out_dir).model
@pytest.mark.parametrize("max_instances", [1, 2, 3, 4, None])
@pytest.mark.parametrize("grad_acc", [None, 2])
@pytest.mark.parametrize("batch_size", [1, 2, 3])
def test_train_model_distributed_with_gradient_accumulation(
self, max_instances, grad_acc, batch_size
):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging", "max_instances": max_instances},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": batch_size},
"trainer": {
"num_epochs": 2,
"optimizer": "adam",
"num_gradient_accumulation_steps": grad_acc,
},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train_with_grad_acc")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
assert "metrics.json" in serialized_files
# Make sure the metrics look right.
with open(os.path.join(out_dir, "metrics.json")) as f:
metrics = json.load(f)
assert metrics["peak_worker_0_memory_MB"] > 0
assert metrics["peak_worker_1_memory_MB"] > 0
if torch.cuda.device_count() >= 2:
assert metrics["peak_gpu_0_memory_MB"] > 0
assert metrics["peak_gpu_1_memory_MB"] > 0
# Check we can load the serialized model
assert load_archive(out_dir).model
@cpu_or_gpu
@pytest.mark.parametrize("max_instances_in_memory", [None, 10])
def test_train_model_distributed_with_sharded_reader(self, max_instances_in_memory):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sharded", "base_reader": {"type": "sequence_tagging"}},
"train_data_path": SEQUENCE_TAGGING_SHARDS_PATH,
"validation_data_path": SEQUENCE_TAGGING_SHARDS_PATH,
"data_loader": {
"batch_size": 1,
"max_instances_in_memory": max_instances_in_memory,
},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
# Check we can load the serialized model
archive = load_archive(out_dir)
assert archive.model
# Check that we created a vocab from all the shards.
tokens = archive.model.vocab._token_to_index["tokens"].keys()
assert tokens == {
"@@PADDING@@",
"@@UNKNOWN@@",
"are",
".",
"animals",
"plants",
"vehicles",
"cats",
"dogs",
"snakes",
"birds",
"ferns",
"trees",
"flowers",
"vegetables",
"cars",
"buses",
"planes",
"rockets",
}
# TODO: This is somewhat brittle. Make these constants in trainer.py.
train_early = "finishing training early!"
validation_early = "finishing validation early!"
train_complete = "completed its entire epoch (training)."
validation_complete = "completed its entire epoch (validation)."
# There are three shards, but only two workers, so the first worker will have to discard some data.
with open(os.path.join(out_dir, "out_worker0.log")) as f:
worker0_log = f.read()
assert train_early in worker0_log
assert validation_early in worker0_log
assert train_complete not in worker0_log
assert validation_complete not in worker0_log
with open(os.path.join(out_dir, "out_worker1.log")) as f:
worker1_log = f.read()
assert train_early not in worker1_log
assert validation_early not in worker1_log
assert train_complete in worker1_log
assert validation_complete in worker1_log
@cpu_or_gpu
@pytest.mark.parametrize("max_instances_in_memory", [None, 10])
def test_train_model_distributed_without_sharded_reader(self, max_instances_in_memory):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
num_epochs = 2
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging", "max_instances": 4},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {
"batch_size": 1,
"max_instances_in_memory": max_instances_in_memory,
},
"trainer": {
"num_epochs": num_epochs,
"optimizer": "adam",
"callbacks": ["tests.commands.train_test.TrainingDataLoggerOnBatchCallback"],
},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
# Check we can load the serialized model
archive = load_archive(out_dir)
assert archive.model
# Check that we created a vocab from all the shards.
tokens = set(archive.model.vocab._token_to_index["tokens"].keys())
assert tokens == {
"@@PADDING@@",
"@@UNKNOWN@@",
"are",
".",
"animals",
"cats",
"dogs",
"snakes",
"birds",
}
train_complete = "completed its entire epoch (training)."
validation_complete = "completed its entire epoch (validation)."
import re
pattern = re.compile(r"First word from training data: '([^']*)'")
first_word_counts = Counter() # type: ignore
with open(os.path.join(out_dir, "out_worker0.log")) as f:
worker0_log = f.read()
assert train_complete in worker0_log
assert validation_complete in worker0_log
for first_word in pattern.findall(worker0_log):
first_word_counts[first_word] += 1
with open(os.path.join(out_dir, "out_worker1.log")) as f:
worker1_log = f.read()
assert train_complete in worker1_log
assert validation_complete in worker1_log
for first_word in pattern.findall(worker1_log):
first_word_counts[first_word] += 1
assert first_word_counts == {
"cats": num_epochs,
"dogs": num_epochs,
"snakes": num_epochs,
"birds": num_epochs,
}
def test_distributed_raises_error_with_no_gpus(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
"distributed": {},
}
)
with pytest.raises(ConfigurationError):
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
def test_train_saves_all_keys_in_config(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"pytorch_seed": 42,
"numpy_seed": 42,
"random_seed": 42,
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
serialization_dir = os.path.join(self.TEST_DIR, "test_train_model")
params_as_dict = (
params.as_ordered_dict()
) # Do it here as train_model will pop all the values.
train_model(params, serialization_dir=serialization_dir)
config_path = os.path.join(serialization_dir, CONFIG_NAME)
with open(config_path) as config:
saved_config_as_dict = OrderedDict(json.load(config))
assert params_as_dict == saved_config_as_dict
def test_error_is_throw_when_cuda_device_is_not_available(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": "test_fixtures/data/sequence_tagging.tsv",
"validation_data_path": "test_fixtures/data/sequence_tagging.tsv",
"data_loader": {"batch_size": 2},
"trainer": {
"num_epochs": 2,
"cuda_device": torch.cuda.device_count(),
"optimizer": "adam",
},
}
)
with pytest.raises(ConfigurationError, match="Experiment specified"):
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
def test_train_with_test_set(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"test_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"evaluate_on_test": True,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "train_with_test_set"))
def test_train_number_of_steps(self):
number_of_epochs = 2
last_num_steps_per_epoch: Optional[int] = None
@LearningRateScheduler.register("mock")
class MockLRScheduler(ExponentialLearningRateScheduler):
def __init__(self, optimizer: torch.optim.Optimizer, num_steps_per_epoch: int):
super().__init__(optimizer)
nonlocal last_num_steps_per_epoch
last_num_steps_per_epoch = num_steps_per_epoch
batch_callback_counter = 0
@TrainerCallback.register("counter")
class CounterOnBatchCallback(TrainerCallback):
def on_batch(
self,
trainer: GradientDescentTrainer,
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
batch_grad_norm: Optional[float] = None,
**kwargs,
) -> None:
nonlocal batch_callback_counter
if is_training:
batch_callback_counter += 1
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"test_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"evaluate_on_test": True,
"data_loader": {"batch_size": 2},
"trainer": {
"num_epochs": number_of_epochs,
"optimizer": "adam",
"learning_rate_scheduler": {"type": "mock"},
"callbacks": ["counter"],
},
}
)
train_model(
params.duplicate(), serialization_dir=os.path.join(self.TEST_DIR, "train_normal")
)
assert batch_callback_counter == last_num_steps_per_epoch * number_of_epochs
batch_callback_counter = 0
normal_steps_per_epoch = last_num_steps_per_epoch
original_batch_size = params["data_loader"]["batch_size"]
params["data_loader"]["batch_size"] = 1
train_model(
params.duplicate(), serialization_dir=os.path.join(self.TEST_DIR, "train_with_bs1")
)
assert batch_callback_counter == last_num_steps_per_epoch * number_of_epochs
batch_callback_counter = 0
assert normal_steps_per_epoch == math.ceil(last_num_steps_per_epoch / original_batch_size)
params["data_loader"]["batch_size"] = original_batch_size
params["trainer"]["num_gradient_accumulation_steps"] = 3
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "train_with_ga"))
assert batch_callback_counter == last_num_steps_per_epoch * number_of_epochs
batch_callback_counter = 0
assert math.ceil(normal_steps_per_epoch / 3) == last_num_steps_per_epoch
def test_train_args(self):
parser = argparse.ArgumentParser(description="Testing")
subparsers = parser.add_subparsers(title="Commands", metavar="")
Train().add_subparser(subparsers)
for serialization_arg in ["-s", "--serialization-dir"]:
raw_args = ["train", "path/to/params", serialization_arg, "serialization_dir"]
args = parser.parse_args(raw_args)
assert args.func == train_model_from_args
assert args.param_path == "path/to/params"
assert args.serialization_dir == "serialization_dir"
# config is required
with pytest.raises(SystemExit) as cm:
args = parser.parse_args(["train", "-s", "serialization_dir"])
assert cm.exception.code == 2 # argparse code for incorrect usage
# serialization dir is required
with pytest.raises(SystemExit) as cm:
args = parser.parse_args(["train", "path/to/params"])
assert cm.exception.code == 2 # argparse code for incorrect usage
def test_train_model_can_instantiate_from_params(self):
params = Params.from_file(self.FIXTURES_ROOT / "simple_tagger" / "experiment.json")
# Can instantiate from base class params
TrainModel.from_params(
params=params, serialization_dir=self.TEST_DIR, local_rank=0, batch_weight_key=""
)
def test_train_can_fine_tune_model_from_archive(self):
params = Params.from_file(
self.FIXTURES_ROOT / "basic_classifier" / "experiment_from_archive.jsonnet"
)
train_loop = TrainModel.from_params(
params=params, serialization_dir=self.TEST_DIR, local_rank=0, batch_weight_key=""
)
train_loop.run()
model = Model.from_archive(
self.FIXTURES_ROOT / "basic_classifier" / "serialization" / "model.tar.gz"
)
# This is checking that the vocabulary actually got extended. The data that we're using for
# training is different from the data we used to produce the model archive, and we set
# parameters such that the vocab should have been extended.
assert train_loop.model.vocab.get_vocab_size() > model.vocab.get_vocab_size()
def test_train_nograd_regex(self):
params_get = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
serialization_dir = os.path.join(self.TEST_DIR, "test_train_nograd")
regex_lists = [[], [".*text_field_embedder.*"], [".*text_field_embedder.*", ".*encoder.*"]]
for regex_list in regex_lists:
params = params_get()
params["trainer"]["no_grad"] = regex_list
shutil.rmtree(serialization_dir, ignore_errors=True)
model = train_model(params, serialization_dir=serialization_dir)
# If regex is matched, parameter name should have requires_grad False
# Or else True
for name, parameter in model.named_parameters():
if any(re.search(regex, name) for regex in regex_list):
assert not parameter.requires_grad
else:
assert parameter.requires_grad
# If all parameters have requires_grad=False, then error.
params = params_get()
params["trainer"]["no_grad"] = ["*"]
shutil.rmtree(serialization_dir, ignore_errors=True)
with pytest.raises(Exception):
train_model(params, serialization_dir=serialization_dir)
class TestDryRun(AllenNlpTestCase):
def setup_method(self):
super().setup_method()
self.params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": str(self.FIXTURES_ROOT / "data" / "sequence_tagging.tsv"),
"validation_data_path": str(self.FIXTURES_ROOT / "data" / "sequence_tagging.tsv"),
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
def test_dry_run_doesnt_overwrite_vocab(self):
vocab_path = self.TEST_DIR / "vocabulary"
os.mkdir(vocab_path)
# Put something in the vocab directory
with open(vocab_path / "test.txt", "a+") as open_file:
open_file.write("test")
# It should raise error if vocab dir is non-empty
with pytest.raises(ConfigurationError):
train_model(self.params, self.TEST_DIR, dry_run=True)
def test_dry_run_makes_vocab(self):
vocab_path = self.TEST_DIR / "vocabulary"
train_model(self.params, self.TEST_DIR, dry_run=True)
vocab_files = os.listdir(vocab_path)
assert set(vocab_files) == {
".lock",
"labels.txt",
"non_padded_namespaces.txt",
"tokens.txt",
}
with open(vocab_path / "tokens.txt") as f:
tokens = [line.strip() for line in f]
tokens.sort()
assert tokens == [
".",
"@@UNKNOWN@@",
"animals",
"are",
"birds",
"cats",
"dogs",
"horses",
"snakes",
]
with open(vocab_path / "labels.txt") as f:
labels = [line.strip() for line in f]
labels.sort()
assert labels == ["N", "V"]
def test_dry_run_with_extension(self):
existing_serialization_dir = self.TEST_DIR / "existing"
extended_serialization_dir = self.TEST_DIR / "extended"
existing_vocab_path = existing_serialization_dir / "vocabulary"
extended_vocab_path = extended_serialization_dir / "vocabulary"
vocab = Vocabulary()
vocab.add_token_to_namespace("some_weird_token_1", namespace="tokens")
vocab.add_token_to_namespace("some_weird_token_2", namespace="tokens")
os.makedirs(existing_serialization_dir, exist_ok=True)
vocab.save_to_files(existing_vocab_path)
self.params["vocabulary"] = {}
self.params["vocabulary"]["type"] = "extend"
self.params["vocabulary"]["directory"] = str(existing_vocab_path)
self.params["vocabulary"]["min_count"] = {"tokens": 3}
train_model(self.params, extended_serialization_dir, dry_run=True)
vocab_files = os.listdir(extended_vocab_path)
assert set(vocab_files) == {
".lock",
"labels.txt",
"non_padded_namespaces.txt",
"tokens.txt",
}
with open(extended_vocab_path / "tokens.txt") as f:
tokens = [line.strip() for line in f]
assert tokens[0] == "@@UNKNOWN@@"
assert tokens[1] == "some_weird_token_1"
assert tokens[2] == "some_weird_token_2"
tokens.sort()
assert tokens == [
".",
"@@UNKNOWN@@",
"animals",
"are",
"some_weird_token_1",
"some_weird_token_2",
]
with open(extended_vocab_path / "labels.txt") as f:
labels = [line.strip() for line in f]
labels.sort()
assert labels == ["N", "V"]
def test_dry_run_without_extension(self):
existing_serialization_dir = self.TEST_DIR / "existing"
extended_serialization_dir = self.TEST_DIR / "extended"
existing_vocab_path = existing_serialization_dir / "vocabulary"
extended_vocab_path = extended_serialization_dir / "vocabulary"
vocab = Vocabulary()
# if extend is False, its users responsibility to make sure that dataset instances
# will be indexible by provided vocabulary. At least @@UNKNOWN@@ should be present in
# namespace for which there could be OOV entries seen in dataset during indexing.
# For `tokens` ns, new words will be seen but `tokens` has @@UNKNOWN@@ token.
# but for 'labels' ns, there is no @@UNKNOWN@@ so required to add 'N', 'V' upfront.
vocab.add_token_to_namespace("some_weird_token_1", namespace="tokens")
vocab.add_token_to_namespace("some_weird_token_2", namespace="tokens")
vocab.add_token_to_namespace("N", namespace="labels")
vocab.add_token_to_namespace("V", namespace="labels")
os.makedirs(existing_serialization_dir, exist_ok=True)
vocab.save_to_files(existing_vocab_path)
self.params["vocabulary"] = {}
self.params["vocabulary"]["type"] = "from_files"
self.params["vocabulary"]["directory"] = str(existing_vocab_path)
train_model(self.params, extended_serialization_dir, dry_run=True)
with open(extended_vocab_path / "tokens.txt") as f:
tokens = [line.strip() for line in f]
assert tokens[0] == "@@UNKNOWN@@"
assert tokens[1] == "some_weird_token_1"
assert tokens[2] == "some_weird_token_2"
assert len(tokens) == 3
def test_make_vocab_args(self):
parser = argparse.ArgumentParser(description="Testing")
subparsers = parser.add_subparsers(title="Commands", metavar="")
Train().add_subparser(subparsers)
for serialization_arg in ["-s", "--serialization-dir"]:
raw_args = [
"train",
"path/to/params",
serialization_arg,
"serialization_dir",
"--dry-run",
]
args = parser.parse_args(raw_args)
assert args.func == train_model_from_args
assert args.param_path == "path/to/params"
assert args.serialization_dir == "serialization_dir"
assert args.dry_run
def test_warn_validation_loader_batches_per_epoch(self):
self.params["data_loader"]["batches_per_epoch"] = 3
with pytest.warns(UserWarning, match="batches_per_epoch"):
train_model(self.params, self.TEST_DIR, dry_run=True)
| 40.626208 | 107 | 0.584829 | import argparse
import copy
import json
import logging
import math
import os
import re
import shutil
from collections import OrderedDict, Counter
from typing import Optional, List, Dict, Any
import pytest
import torch
from allennlp.commands.train import Train, train_model, train_model_from_args, TrainModel
from allennlp.common import Params
from allennlp.common.checks import ConfigurationError
from allennlp.common.testing import AllenNlpTestCase, cpu_or_gpu
from allennlp.data import Vocabulary
from allennlp.data.data_loaders import TensorDict
from allennlp.models import load_archive, Model
from allennlp.models.archival import CONFIG_NAME
from allennlp.training import TrainerCallback, GradientDescentTrainer
from allennlp.training.learning_rate_schedulers import (
ExponentialLearningRateScheduler,
LearningRateScheduler,
)
SEQUENCE_TAGGING_DATA_PATH = str(AllenNlpTestCase.FIXTURES_ROOT / "data" / "sequence_tagging.tsv")
SEQUENCE_TAGGING_SHARDS_PATH = str(AllenNlpTestCase.FIXTURES_ROOT / "data" / "shards" / "*")
@TrainerCallback.register("training_data_logger")
class TrainingDataLoggerOnBatchCallback(TrainerCallback):
def on_batch(
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
**kwargs,
) -> None:
if is_training:
logger = logging.getLogger(__name__)
for batch in batch_inputs:
for metadata in batch["metadata"]:
logger.info(f"First word from training data: '{metadata['words'][0]}'")
_seen_training_devices = set()
@TrainerCallback.register("training_device_logger")
class TrainingDeviceLoggerOnBatchCallback(TrainerCallback):
def on_batch(
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
**kwargs,
) -> None:
global _seen_training_devices
for tensor in trainer.model.parameters():
_seen_training_devices.add(tensor.device)
@TrainerCallback.register("training_primary_check")
class TrainingPrimaryCheckCallback(TrainerCallback):
def on_start(
self, trainer: "GradientDescentTrainer", is_primary: bool = True, **kwargs
) -> None:
super().on_start(trainer, is_primary=is_primary, **kwargs)
if is_primary:
assert torch.distributed.get_rank() == 0
class TestTrain(AllenNlpTestCase):
DEFAULT_PARAMS = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
def test_train_model(self):
params = lambda: copy.deepcopy(self.DEFAULT_PARAMS)
train_model(params(), serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
serialization_dir2 = os.path.join(self.TEST_DIR, "empty_directory")
assert not os.path.exists(serialization_dir2)
os.makedirs(serialization_dir2)
train_model(params(), serialization_dir=serialization_dir2)
# It's not OK if serialization dir exists and has junk in it non-empty:
serialization_dir3 = os.path.join(self.TEST_DIR, "non_empty_directory")
assert not os.path.exists(serialization_dir3)
os.makedirs(serialization_dir3)
with open(os.path.join(serialization_dir3, "README.md"), "w") as f:
f.write("TEST")
with pytest.raises(ConfigurationError):
train_model(params(), serialization_dir=serialization_dir3)
with pytest.raises(ConfigurationError):
train_model(params(), serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
# But it's OK if serialization dir exists and --recover is specified:
train_model(
params(),
serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"),
recover=True,
)
train_model(
params(), serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"), force=True
)
# But --force and --recover cannot both be specified
with pytest.raises(ConfigurationError):
train_model(
params(),
serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"),
force=True,
recover=True,
)
@cpu_or_gpu
def test_detect_gpu(self):
import copy
params = copy.deepcopy(self.DEFAULT_PARAMS)
params["trainer"]["callbacks"] = ["training_device_logger"]
global _seen_training_devices
_seen_training_devices.clear()
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_detect_gpu"))
assert len(_seen_training_devices) == 1
seen_training_device = next(iter(_seen_training_devices))
if torch.cuda.device_count() == 0:
assert seen_training_device.type == "cpu"
else:
assert seen_training_device.type == "cuda"
@cpu_or_gpu
def test_force_gpu(self):
import copy
params = copy.deepcopy(self.DEFAULT_PARAMS)
params["trainer"]["callbacks"] = ["training_device_logger"]
params["trainer"]["cuda_device"] = 0
global _seen_training_devices
_seen_training_devices.clear()
if torch.cuda.device_count() == 0:
with pytest.raises(ConfigurationError):
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_force_gpu"))
else:
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_force_gpu"))
assert len(_seen_training_devices) == 1
seen_training_device = next(iter(_seen_training_devices))
assert seen_training_device.type == "cuda"
@cpu_or_gpu
def test_force_cpu(self):
import copy
params = copy.deepcopy(self.DEFAULT_PARAMS)
params["trainer"]["callbacks"] = ["training_device_logger"]
params["trainer"]["cuda_device"] = -1
global _seen_training_devices
_seen_training_devices.clear()
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_force_cpu"))
assert len(_seen_training_devices) == 1
seen_training_device = next(iter(_seen_training_devices))
assert seen_training_device.type == "cpu"
@cpu_or_gpu
def test_train_model_distributed(self):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {
"num_epochs": 2,
"optimizer": "adam",
# Need to use the fully qualified name here so the distributed workers
# can import it.
"callbacks": ["tests.commands.train_test.TrainingPrimaryCheckCallback"],
},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
assert "metrics.json" in serialized_files
# Make sure the metrics look right.
with open(os.path.join(out_dir, "metrics.json")) as f:
metrics = json.load(f)
assert metrics["peak_worker_0_memory_MB"] > 0
assert metrics["peak_worker_1_memory_MB"] > 0
if torch.cuda.device_count() >= 2:
assert metrics["peak_gpu_0_memory_MB"] > 0
assert metrics["peak_gpu_1_memory_MB"] > 0
# Check we can load the serialized model
assert load_archive(out_dir).model
@pytest.mark.parametrize("max_instances", [1, 2, 3, 4, None])
@pytest.mark.parametrize("grad_acc", [None, 2])
@pytest.mark.parametrize("batch_size", [1, 2, 3])
def test_train_model_distributed_with_gradient_accumulation(
self, max_instances, grad_acc, batch_size
):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging", "max_instances": max_instances},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": batch_size},
"trainer": {
"num_epochs": 2,
"optimizer": "adam",
"num_gradient_accumulation_steps": grad_acc,
},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train_with_grad_acc")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
assert "metrics.json" in serialized_files
# Make sure the metrics look right.
with open(os.path.join(out_dir, "metrics.json")) as f:
metrics = json.load(f)
assert metrics["peak_worker_0_memory_MB"] > 0
assert metrics["peak_worker_1_memory_MB"] > 0
if torch.cuda.device_count() >= 2:
assert metrics["peak_gpu_0_memory_MB"] > 0
assert metrics["peak_gpu_1_memory_MB"] > 0
# Check we can load the serialized model
assert load_archive(out_dir).model
@cpu_or_gpu
@pytest.mark.parametrize("max_instances_in_memory", [None, 10])
def test_train_model_distributed_with_sharded_reader(self, max_instances_in_memory):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sharded", "base_reader": {"type": "sequence_tagging"}},
"train_data_path": SEQUENCE_TAGGING_SHARDS_PATH,
"validation_data_path": SEQUENCE_TAGGING_SHARDS_PATH,
"data_loader": {
"batch_size": 1,
"max_instances_in_memory": max_instances_in_memory,
},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
# Check we can load the serialized model
archive = load_archive(out_dir)
assert archive.model
# Check that we created a vocab from all the shards.
tokens = archive.model.vocab._token_to_index["tokens"].keys()
assert tokens == {
"@@PADDING@@",
"@@UNKNOWN@@",
"are",
".",
"animals",
"plants",
"vehicles",
"cats",
"dogs",
"snakes",
"birds",
"ferns",
"trees",
"flowers",
"vegetables",
"cars",
"buses",
"planes",
"rockets",
}
# TODO: This is somewhat brittle. Make these constants in trainer.py.
train_early = "finishing training early!"
validation_early = "finishing validation early!"
train_complete = "completed its entire epoch (training)."
validation_complete = "completed its entire epoch (validation)."
# There are three shards, but only two workers, so the first worker will have to discard some data.
with open(os.path.join(out_dir, "out_worker0.log")) as f:
worker0_log = f.read()
assert train_early in worker0_log
assert validation_early in worker0_log
assert train_complete not in worker0_log
assert validation_complete not in worker0_log
with open(os.path.join(out_dir, "out_worker1.log")) as f:
worker1_log = f.read()
assert train_early not in worker1_log
assert validation_early not in worker1_log
assert train_complete in worker1_log
assert validation_complete in worker1_log
@cpu_or_gpu
@pytest.mark.parametrize("max_instances_in_memory", [None, 10])
def test_train_model_distributed_without_sharded_reader(self, max_instances_in_memory):
if torch.cuda.device_count() >= 2:
devices = [0, 1]
else:
devices = [-1, -1]
num_epochs = 2
params = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging", "max_instances": 4},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {
"batch_size": 1,
"max_instances_in_memory": max_instances_in_memory,
},
"trainer": {
"num_epochs": num_epochs,
"optimizer": "adam",
"callbacks": ["tests.commands.train_test.TrainingDataLoggerOnBatchCallback"],
},
"distributed": {"cuda_devices": devices},
}
)
out_dir = os.path.join(self.TEST_DIR, "test_distributed_train")
train_model(params(), serialization_dir=out_dir)
# Check that some logs specific to distributed
# training are where we expect.
serialized_files = os.listdir(out_dir)
assert "out_worker0.log" in serialized_files
assert "out_worker1.log" in serialized_files
assert "model.tar.gz" in serialized_files
# Check we can load the serialized model
archive = load_archive(out_dir)
assert archive.model
# Check that we created a vocab from all the shards.
tokens = set(archive.model.vocab._token_to_index["tokens"].keys())
assert tokens == {
"@@PADDING@@",
"@@UNKNOWN@@",
"are",
".",
"animals",
"cats",
"dogs",
"snakes",
"birds",
}
train_complete = "completed its entire epoch (training)."
validation_complete = "completed its entire epoch (validation)."
import re
pattern = re.compile(r"First word from training data: '([^']*)'")
first_word_counts = Counter()
with open(os.path.join(out_dir, "out_worker0.log")) as f:
worker0_log = f.read()
assert train_complete in worker0_log
assert validation_complete in worker0_log
for first_word in pattern.findall(worker0_log):
first_word_counts[first_word] += 1
with open(os.path.join(out_dir, "out_worker1.log")) as f:
worker1_log = f.read()
assert train_complete in worker1_log
assert validation_complete in worker1_log
for first_word in pattern.findall(worker1_log):
first_word_counts[first_word] += 1
assert first_word_counts == {
"cats": num_epochs,
"dogs": num_epochs,
"snakes": num_epochs,
"birds": num_epochs,
}
def test_distributed_raises_error_with_no_gpus(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
"distributed": {},
}
)
with pytest.raises(ConfigurationError):
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
def test_train_saves_all_keys_in_config(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"pytorch_seed": 42,
"numpy_seed": 42,
"random_seed": 42,
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
serialization_dir = os.path.join(self.TEST_DIR, "test_train_model")
params_as_dict = (
params.as_ordered_dict()
)
train_model(params, serialization_dir=serialization_dir)
config_path = os.path.join(serialization_dir, CONFIG_NAME)
with open(config_path) as config:
saved_config_as_dict = OrderedDict(json.load(config))
assert params_as_dict == saved_config_as_dict
def test_error_is_throw_when_cuda_device_is_not_available(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": "test_fixtures/data/sequence_tagging.tsv",
"validation_data_path": "test_fixtures/data/sequence_tagging.tsv",
"data_loader": {"batch_size": 2},
"trainer": {
"num_epochs": 2,
"cuda_device": torch.cuda.device_count(),
"optimizer": "adam",
},
}
)
with pytest.raises(ConfigurationError, match="Experiment specified"):
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "test_train_model"))
def test_train_with_test_set(self):
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"test_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"evaluate_on_test": True,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "train_with_test_set"))
def test_train_number_of_steps(self):
number_of_epochs = 2
last_num_steps_per_epoch: Optional[int] = None
@LearningRateScheduler.register("mock")
class MockLRScheduler(ExponentialLearningRateScheduler):
def __init__(self, optimizer: torch.optim.Optimizer, num_steps_per_epoch: int):
super().__init__(optimizer)
nonlocal last_num_steps_per_epoch
last_num_steps_per_epoch = num_steps_per_epoch
batch_callback_counter = 0
@TrainerCallback.register("counter")
class CounterOnBatchCallback(TrainerCallback):
def on_batch(
self,
trainer: GradientDescentTrainer,
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
batch_grad_norm: Optional[float] = None,
**kwargs,
) -> None:
nonlocal batch_callback_counter
if is_training:
batch_callback_counter += 1
params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"test_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"evaluate_on_test": True,
"data_loader": {"batch_size": 2},
"trainer": {
"num_epochs": number_of_epochs,
"optimizer": "adam",
"learning_rate_scheduler": {"type": "mock"},
"callbacks": ["counter"],
},
}
)
train_model(
params.duplicate(), serialization_dir=os.path.join(self.TEST_DIR, "train_normal")
)
assert batch_callback_counter == last_num_steps_per_epoch * number_of_epochs
batch_callback_counter = 0
normal_steps_per_epoch = last_num_steps_per_epoch
original_batch_size = params["data_loader"]["batch_size"]
params["data_loader"]["batch_size"] = 1
train_model(
params.duplicate(), serialization_dir=os.path.join(self.TEST_DIR, "train_with_bs1")
)
assert batch_callback_counter == last_num_steps_per_epoch * number_of_epochs
batch_callback_counter = 0
assert normal_steps_per_epoch == math.ceil(last_num_steps_per_epoch / original_batch_size)
params["data_loader"]["batch_size"] = original_batch_size
params["trainer"]["num_gradient_accumulation_steps"] = 3
train_model(params, serialization_dir=os.path.join(self.TEST_DIR, "train_with_ga"))
assert batch_callback_counter == last_num_steps_per_epoch * number_of_epochs
batch_callback_counter = 0
assert math.ceil(normal_steps_per_epoch / 3) == last_num_steps_per_epoch
def test_train_args(self):
parser = argparse.ArgumentParser(description="Testing")
subparsers = parser.add_subparsers(title="Commands", metavar="")
Train().add_subparser(subparsers)
for serialization_arg in ["-s", "--serialization-dir"]:
raw_args = ["train", "path/to/params", serialization_arg, "serialization_dir"]
args = parser.parse_args(raw_args)
assert args.func == train_model_from_args
assert args.param_path == "path/to/params"
assert args.serialization_dir == "serialization_dir"
with pytest.raises(SystemExit) as cm:
args = parser.parse_args(["train", "-s", "serialization_dir"])
assert cm.exception.code == 2
with pytest.raises(SystemExit) as cm:
args = parser.parse_args(["train", "path/to/params"])
assert cm.exception.code == 2
def test_train_model_can_instantiate_from_params(self):
params = Params.from_file(self.FIXTURES_ROOT / "simple_tagger" / "experiment.json")
TrainModel.from_params(
params=params, serialization_dir=self.TEST_DIR, local_rank=0, batch_weight_key=""
)
def test_train_can_fine_tune_model_from_archive(self):
params = Params.from_file(
self.FIXTURES_ROOT / "basic_classifier" / "experiment_from_archive.jsonnet"
)
train_loop = TrainModel.from_params(
params=params, serialization_dir=self.TEST_DIR, local_rank=0, batch_weight_key=""
)
train_loop.run()
model = Model.from_archive(
self.FIXTURES_ROOT / "basic_classifier" / "serialization" / "model.tar.gz"
)
# training is different from the data we used to produce the model archive, and we set
# parameters such that the vocab should have been extended.
assert train_loop.model.vocab.get_vocab_size() > model.vocab.get_vocab_size()
def test_train_nograd_regex(self):
params_get = lambda: Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": SEQUENCE_TAGGING_DATA_PATH,
"validation_data_path": SEQUENCE_TAGGING_DATA_PATH,
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
serialization_dir = os.path.join(self.TEST_DIR, "test_train_nograd")
regex_lists = [[], [".*text_field_embedder.*"], [".*text_field_embedder.*", ".*encoder.*"]]
for regex_list in regex_lists:
params = params_get()
params["trainer"]["no_grad"] = regex_list
shutil.rmtree(serialization_dir, ignore_errors=True)
model = train_model(params, serialization_dir=serialization_dir)
# If regex is matched, parameter name should have requires_grad False
# Or else True
for name, parameter in model.named_parameters():
if any(re.search(regex, name) for regex in regex_list):
assert not parameter.requires_grad
else:
assert parameter.requires_grad
# If all parameters have requires_grad=False, then error.
params = params_get()
params["trainer"]["no_grad"] = ["*"]
shutil.rmtree(serialization_dir, ignore_errors=True)
with pytest.raises(Exception):
train_model(params, serialization_dir=serialization_dir)
class TestDryRun(AllenNlpTestCase):
def setup_method(self):
super().setup_method()
self.params = Params(
{
"model": {
"type": "simple_tagger",
"text_field_embedder": {
"token_embedders": {"tokens": {"type": "embedding", "embedding_dim": 5}}
},
"encoder": {"type": "lstm", "input_size": 5, "hidden_size": 7, "num_layers": 2},
},
"dataset_reader": {"type": "sequence_tagging"},
"train_data_path": str(self.FIXTURES_ROOT / "data" / "sequence_tagging.tsv"),
"validation_data_path": str(self.FIXTURES_ROOT / "data" / "sequence_tagging.tsv"),
"data_loader": {"batch_size": 2},
"trainer": {"num_epochs": 2, "optimizer": "adam"},
}
)
def test_dry_run_doesnt_overwrite_vocab(self):
vocab_path = self.TEST_DIR / "vocabulary"
os.mkdir(vocab_path)
# Put something in the vocab directory
with open(vocab_path / "test.txt", "a+") as open_file:
open_file.write("test")
# It should raise error if vocab dir is non-empty
with pytest.raises(ConfigurationError):
train_model(self.params, self.TEST_DIR, dry_run=True)
def test_dry_run_makes_vocab(self):
vocab_path = self.TEST_DIR / "vocabulary"
train_model(self.params, self.TEST_DIR, dry_run=True)
vocab_files = os.listdir(vocab_path)
assert set(vocab_files) == {
".lock",
"labels.txt",
"non_padded_namespaces.txt",
"tokens.txt",
}
with open(vocab_path / "tokens.txt") as f:
tokens = [line.strip() for line in f]
tokens.sort()
assert tokens == [
".",
"@@UNKNOWN@@",
"animals",
"are",
"birds",
"cats",
"dogs",
"horses",
"snakes",
]
with open(vocab_path / "labels.txt") as f:
labels = [line.strip() for line in f]
labels.sort()
assert labels == ["N", "V"]
def test_dry_run_with_extension(self):
existing_serialization_dir = self.TEST_DIR / "existing"
extended_serialization_dir = self.TEST_DIR / "extended"
existing_vocab_path = existing_serialization_dir / "vocabulary"
extended_vocab_path = extended_serialization_dir / "vocabulary"
vocab = Vocabulary()
vocab.add_token_to_namespace("some_weird_token_1", namespace="tokens")
vocab.add_token_to_namespace("some_weird_token_2", namespace="tokens")
os.makedirs(existing_serialization_dir, exist_ok=True)
vocab.save_to_files(existing_vocab_path)
self.params["vocabulary"] = {}
self.params["vocabulary"]["type"] = "extend"
self.params["vocabulary"]["directory"] = str(existing_vocab_path)
self.params["vocabulary"]["min_count"] = {"tokens": 3}
train_model(self.params, extended_serialization_dir, dry_run=True)
vocab_files = os.listdir(extended_vocab_path)
assert set(vocab_files) == {
".lock",
"labels.txt",
"non_padded_namespaces.txt",
"tokens.txt",
}
with open(extended_vocab_path / "tokens.txt") as f:
tokens = [line.strip() for line in f]
assert tokens[0] == "@@UNKNOWN@@"
assert tokens[1] == "some_weird_token_1"
assert tokens[2] == "some_weird_token_2"
tokens.sort()
assert tokens == [
".",
"@@UNKNOWN@@",
"animals",
"are",
"some_weird_token_1",
"some_weird_token_2",
]
with open(extended_vocab_path / "labels.txt") as f:
labels = [line.strip() for line in f]
labels.sort()
assert labels == ["N", "V"]
def test_dry_run_without_extension(self):
existing_serialization_dir = self.TEST_DIR / "existing"
extended_serialization_dir = self.TEST_DIR / "extended"
existing_vocab_path = existing_serialization_dir / "vocabulary"
extended_vocab_path = extended_serialization_dir / "vocabulary"
vocab = Vocabulary()
# if extend is False, its users responsibility to make sure that dataset instances
# will be indexible by provided vocabulary. At least @@UNKNOWN@@ should be present in
# namespace for which there could be OOV entries seen in dataset during indexing.
# For `tokens` ns, new words will be seen but `tokens` has @@UNKNOWN@@ token.
# but for 'labels' ns, there is no @@UNKNOWN@@ so required to add 'N', 'V' upfront.
vocab.add_token_to_namespace("some_weird_token_1", namespace="tokens")
vocab.add_token_to_namespace("some_weird_token_2", namespace="tokens")
vocab.add_token_to_namespace("N", namespace="labels")
vocab.add_token_to_namespace("V", namespace="labels")
os.makedirs(existing_serialization_dir, exist_ok=True)
vocab.save_to_files(existing_vocab_path)
self.params["vocabulary"] = {}
self.params["vocabulary"]["type"] = "from_files"
self.params["vocabulary"]["directory"] = str(existing_vocab_path)
train_model(self.params, extended_serialization_dir, dry_run=True)
with open(extended_vocab_path / "tokens.txt") as f:
tokens = [line.strip() for line in f]
assert tokens[0] == "@@UNKNOWN@@"
assert tokens[1] == "some_weird_token_1"
assert tokens[2] == "some_weird_token_2"
assert len(tokens) == 3
def test_make_vocab_args(self):
parser = argparse.ArgumentParser(description="Testing")
subparsers = parser.add_subparsers(title="Commands", metavar="")
Train().add_subparser(subparsers)
for serialization_arg in ["-s", "--serialization-dir"]:
raw_args = [
"train",
"path/to/params",
serialization_arg,
"serialization_dir",
"--dry-run",
]
args = parser.parse_args(raw_args)
assert args.func == train_model_from_args
assert args.param_path == "path/to/params"
assert args.serialization_dir == "serialization_dir"
assert args.dry_run
def test_warn_validation_loader_batches_per_epoch(self):
self.params["data_loader"]["batches_per_epoch"] = 3
with pytest.warns(UserWarning, match="batches_per_epoch"):
train_model(self.params, self.TEST_DIR, dry_run=True)
| true | true |
1c32983c9fd1009fd780c6074a40829ed2e5320b | 10,682 | py | Python | SpiderKeeper/app/spider/model.py | huichen90/SpiderKeeper | 9b9fdb56c87da7734b9dce2e63adc0e97c201c57 | [
"MIT"
] | null | null | null | SpiderKeeper/app/spider/model.py | huichen90/SpiderKeeper | 9b9fdb56c87da7734b9dce2e63adc0e97c201c57 | [
"MIT"
] | null | null | null | SpiderKeeper/app/spider/model.py | huichen90/SpiderKeeper | 9b9fdb56c87da7734b9dce2e63adc0e97c201c57 | [
"MIT"
] | null | null | null | import datetime
from sqlalchemy import desc
from SpiderKeeper.app import db, Base
class Project(Base):
__tablename__ = 'sk_project'
'''创建的工程表'''
project_name = db.Column(db.String(50))
@classmethod
def load_project(cls, project_list): # 添加工程
for project in project_list:
existed_project = cls.query.filter_by(project_name=project.project_name).first()
if not existed_project:
db.session.add(project)
db.session.commit()
@classmethod
def find_project_by_id(cls, project_id): # 查询工程
return Project.query.filter_by(id=project_id).first()
def to_dict(self):
return {
"project_id": self.id,
"project_name": self.project_name
}
class SpiderInstance(Base):
__tablename__ = 'sk_spider'
'''爬虫表'''
spider_name = db.Column(db.String(100))
project_id = db.Column(db.INTEGER, nullable=False, index=True)
@classmethod
def update_spider_instances(cls, project_id, spider_instance_list):
for spider_instance in spider_instance_list:
existed_spider_instance = cls.query.filter_by(project_id=project_id,
spider_name=spider_instance.spider_name).first()
if not existed_spider_instance:
db.session.add(spider_instance)
db.session.commit()
for spider in cls.query.filter_by(project_id=project_id).all():
existed_spider = any(
spider.spider_name == s.spider_name
for s in spider_instance_list
)
if not existed_spider:
db.session.delete(spider)
db.session.commit()
@classmethod
def list_spider_by_project_id(cls, project_id):
return cls.query.filter_by(project_id=project_id).all()
def to_dict(self):
return dict(spider_instance_id=self.id,
spider_name=self.spider_name,
project_id=self.project_id)
@classmethod
def list_spiders(cls, project_id):
sql_last_runtime = '''
select * from (select a.spider_name,b.date_created from sk_job_instance as a
left join sk_job_execution as b
on a.id = b.job_instance_id
order by b.date_created desc) as c
group by c.spider_name
'''
sql_avg_runtime = '''
select a.spider_name,avg(end_time-start_time) from sk_job_instance as a
left join sk_job_execution as b
on a.id = b.job_instance_id
where b.end_time is not null
group by a.spider_name
'''
last_runtime_list = dict(
(spider_name, last_run_time) for spider_name, last_run_time in db.engine.execute(sql_last_runtime))
avg_runtime_list = dict(
(spider_name, avg_run_time) for spider_name, avg_run_time in db.engine.execute(sql_avg_runtime))
res = []
for spider in cls.query.filter_by(project_id=project_id).all():
last_runtime = last_runtime_list.get(spider.spider_name)
res.append(dict(spider.to_dict(),
**{'spider_last_runtime': last_runtime if last_runtime else '-',
'spider_avg_runtime': avg_runtime_list.get(spider.spider_name)
}))
return res
class JobPriority():
LOW, NORMAL, HIGH, HIGHEST = range(-1, 3)
class JobRunType():
ONETIME = 'onetime'
PERIODIC = 'periodic'
class JobInstance(Base):
__tablename__ = 'sk_job_instance'
'''爬虫任务表'''
job_name = db.Column(db.String(50)) # 任务名称
spider_type = db.Column(db.String(50)) # 采集形式
spider_name = db.Column(db.String(100), nullable=False, index=True)
project_id = db.Column(db.INTEGER, nullable=False, index=True)
tags = db.Column(db.Text) # job tag(split by , )
spider_arguments = db.Column(db.Text) # job execute arguments(split by , ex.: arg1=foo,arg2=bar)
priority = db.Column(db.INTEGER)
desc = db.Column(db.Text)
cron_minutes = db.Column(db.String(20), default="0")
cron_hour = db.Column(db.String(20), default="*")
cron_day_of_month = db.Column(db.String(20), default="*")
cron_day_of_week = db.Column(db.String(20), default="*")
cron_month = db.Column(db.String(20), default="*")
enabled = db.Column(db.INTEGER, default=0) # 0/-1
run_type = db.Column(db.String(20)) # periodic/onetime
def to_dict(self):
return dict(
job_instance_id=self.id,
job_name = self.job_name,
spider_type = self.spider_type,
spider_name=self.spider_name,
tags=self.tags.split(',') if self.tags else None,
spider_arguments=self.spider_arguments,
priority=self.priority,
desc=self.desc,
cron_minutes=self.cron_minutes,
cron_hour=self.cron_hour,
cron_day_of_month=self.cron_day_of_month,
cron_day_of_week=self.cron_day_of_week,
cron_month=self.cron_month,
enabled=self.enabled == 0,
run_type=self.run_type
)
@classmethod
def list_job_instance_by_project_id(cls, project_id):
return cls.query.filter_by(project_id=project_id).all()
@classmethod
def find_job_instance_by_id(cls, job_instance_id):
return cls.query.filter_by(id=job_instance_id).first()
class SpiderStatus():
PENDING, RUNNING, FINISHED, CANCELED = range(4)
class JobExecution(Base):
__tablename__ = 'sk_job_execution'
'''记录爬虫的执行情况'''
project_id = db.Column(db.INTEGER, nullable=False, index=True)
service_job_execution_id = db.Column(db.String(50), nullable=False, index=True) # 服务器作业执行ID
job_instance_id = db.Column(db.INTEGER, nullable=False, index=True) # 作业实例ID
create_time = db.Column(db.DATETIME)
start_time = db.Column(db.DATETIME)
end_time = db.Column(db.DATETIME)
running_status = db.Column(db.INTEGER, default=SpiderStatus.PENDING)
running_on = db.Column(db.Text)
def to_dict(self):
job_instance = JobInstance.query.filter_by(id=self.job_instance_id).first()
return {
'project_id': self.project_id,
'job_execution_id': self.id,
'job_instance_id': self.job_instance_id,
'service_job_execution_id': self.service_job_execution_id,
'create_time': self.create_time.strftime('%Y-%m-%d %H:%M:%S') if self.create_time else None,
'start_time': self.start_time.strftime('%Y-%m-%d %H:%M:%S') if self.start_time else None,
'end_time': self.end_time.strftime('%Y-%m-%d %H:%M:%S') if self.end_time else None,
'running_status': self.running_status,
'running_on': self.running_on,
'job_instance': job_instance.to_dict() if job_instance else {}
}
@classmethod
def find_job_by_service_id(cls, service_job_execution_id):
return cls.query.filter_by(service_job_execution_id=service_job_execution_id).first()
@classmethod
def list_job_by_service_ids(cls, service_job_execution_ids):
return cls.query.filter(cls.service_job_execution_id.in_(service_job_execution_ids)).all()
@classmethod
def list_uncomplete_job(cls):
return cls.query.filter(cls.running_status != SpiderStatus.FINISHED,
cls.running_status != SpiderStatus.CANCELED).all()
@classmethod
def list_jobs(cls, project_id, each_status_limit=100):
result = {}
result['PENDING'] = [job_execution.to_dict() for job_execution in
JobExecution.query.filter_by(project_id=project_id,
running_status=SpiderStatus.PENDING).order_by(
desc(JobExecution.date_modified)).limit(each_status_limit)]
result['RUNNING'] = [job_execution.to_dict() for job_execution in
JobExecution.query.filter_by(project_id=project_id,
running_status=SpiderStatus.RUNNING).order_by(
desc(JobExecution.date_modified)).limit(each_status_limit)]
result['COMPLETED'] = [job_execution.to_dict() for job_execution in
JobExecution.query.filter(JobExecution.project_id == project_id).filter(
(JobExecution.running_status == SpiderStatus.FINISHED) | (
JobExecution.running_status == SpiderStatus.CANCELED)).order_by(
desc(JobExecution.date_modified)).limit(each_status_limit)]
return result
@classmethod
def list_run_stats_by_hours(cls, project_id):
result = {}
hour_keys = []
last_time = datetime.datetime.now() - datetime.timedelta(hours=23)
last_time = datetime.datetime(last_time.year, last_time.month, last_time.day, last_time.hour)
for hour in range(23, -1, -1):
time_tmp = datetime.datetime.now() - datetime.timedelta(hours=hour)
hour_key = time_tmp.strftime('%Y-%m-%d %H:00:00')
hour_keys.append(hour_key)
result[hour_key] = 0 # init
for job_execution in JobExecution.query.filter(JobExecution.project_id == project_id,
JobExecution.date_created >= last_time).all():
hour_key = job_execution.create_time.strftime('%Y-%m-%d %H:00:00')
result[hour_key] += 1
return [dict(key=hour_key, value=result[hour_key]) for hour_key in hour_keys]
class Videoitems(db.Model):
__tablename__ = 'videoitems'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(500),nullable=False)
url = db.Column(db.String(100), nullable=False, index=True)
keywords = db.Column(db.String(100), nullable=False)
tags = db.Column(db.String(1000),default=[])
video_category = db.Column(db.String(50),default="其它")
upload_time = db.Column(db.String(50))
spider_time = db.Column(db.String(50))
info = db.Column(db.Text)
site_name = db.Column(db.String(20), default="")
video_time = db.Column(db.Integer, default=0)
isdownload = db.Column(db.Integer, default=0)
play_count = db.Column(db.String(20), default="0")
task_id = db.Column(db.String(20))
class RunningJob(Base):
__tablename__ = 'running_job'
spider_random_id = db.Column(db.String(50),nullable=False,index=True) | 42.899598 | 111 | 0.627691 | import datetime
from sqlalchemy import desc
from SpiderKeeper.app import db, Base
class Project(Base):
__tablename__ = 'sk_project'
project_name = db.Column(db.String(50))
@classmethod
def load_project(cls, project_list):
for project in project_list:
existed_project = cls.query.filter_by(project_name=project.project_name).first()
if not existed_project:
db.session.add(project)
db.session.commit()
@classmethod
def find_project_by_id(cls, project_id):
return Project.query.filter_by(id=project_id).first()
def to_dict(self):
return {
"project_id": self.id,
"project_name": self.project_name
}
class SpiderInstance(Base):
__tablename__ = 'sk_spider'
spider_name = db.Column(db.String(100))
project_id = db.Column(db.INTEGER, nullable=False, index=True)
@classmethod
def update_spider_instances(cls, project_id, spider_instance_list):
for spider_instance in spider_instance_list:
existed_spider_instance = cls.query.filter_by(project_id=project_id,
spider_name=spider_instance.spider_name).first()
if not existed_spider_instance:
db.session.add(spider_instance)
db.session.commit()
for spider in cls.query.filter_by(project_id=project_id).all():
existed_spider = any(
spider.spider_name == s.spider_name
for s in spider_instance_list
)
if not existed_spider:
db.session.delete(spider)
db.session.commit()
@classmethod
def list_spider_by_project_id(cls, project_id):
return cls.query.filter_by(project_id=project_id).all()
def to_dict(self):
return dict(spider_instance_id=self.id,
spider_name=self.spider_name,
project_id=self.project_id)
@classmethod
def list_spiders(cls, project_id):
sql_last_runtime = '''
select * from (select a.spider_name,b.date_created from sk_job_instance as a
left join sk_job_execution as b
on a.id = b.job_instance_id
order by b.date_created desc) as c
group by c.spider_name
'''
sql_avg_runtime = '''
select a.spider_name,avg(end_time-start_time) from sk_job_instance as a
left join sk_job_execution as b
on a.id = b.job_instance_id
where b.end_time is not null
group by a.spider_name
'''
last_runtime_list = dict(
(spider_name, last_run_time) for spider_name, last_run_time in db.engine.execute(sql_last_runtime))
avg_runtime_list = dict(
(spider_name, avg_run_time) for spider_name, avg_run_time in db.engine.execute(sql_avg_runtime))
res = []
for spider in cls.query.filter_by(project_id=project_id).all():
last_runtime = last_runtime_list.get(spider.spider_name)
res.append(dict(spider.to_dict(),
**{'spider_last_runtime': last_runtime if last_runtime else '-',
'spider_avg_runtime': avg_runtime_list.get(spider.spider_name)
}))
return res
class JobPriority():
LOW, NORMAL, HIGH, HIGHEST = range(-1, 3)
class JobRunType():
ONETIME = 'onetime'
PERIODIC = 'periodic'
class JobInstance(Base):
__tablename__ = 'sk_job_instance'
job_name = db.Column(db.String(50))
spider_type = db.Column(db.String(50))
spider_name = db.Column(db.String(100), nullable=False, index=True)
project_id = db.Column(db.INTEGER, nullable=False, index=True)
tags = db.Column(db.Text)
spider_arguments = db.Column(db.Text)
priority = db.Column(db.INTEGER)
desc = db.Column(db.Text)
cron_minutes = db.Column(db.String(20), default="0")
cron_hour = db.Column(db.String(20), default="*")
cron_day_of_month = db.Column(db.String(20), default="*")
cron_day_of_week = db.Column(db.String(20), default="*")
cron_month = db.Column(db.String(20), default="*")
enabled = db.Column(db.INTEGER, default=0)
run_type = db.Column(db.String(20))
def to_dict(self):
return dict(
job_instance_id=self.id,
job_name = self.job_name,
spider_type = self.spider_type,
spider_name=self.spider_name,
tags=self.tags.split(',') if self.tags else None,
spider_arguments=self.spider_arguments,
priority=self.priority,
desc=self.desc,
cron_minutes=self.cron_minutes,
cron_hour=self.cron_hour,
cron_day_of_month=self.cron_day_of_month,
cron_day_of_week=self.cron_day_of_week,
cron_month=self.cron_month,
enabled=self.enabled == 0,
run_type=self.run_type
)
@classmethod
def list_job_instance_by_project_id(cls, project_id):
return cls.query.filter_by(project_id=project_id).all()
@classmethod
def find_job_instance_by_id(cls, job_instance_id):
return cls.query.filter_by(id=job_instance_id).first()
class SpiderStatus():
PENDING, RUNNING, FINISHED, CANCELED = range(4)
class JobExecution(Base):
__tablename__ = 'sk_job_execution'
project_id = db.Column(db.INTEGER, nullable=False, index=True)
service_job_execution_id = db.Column(db.String(50), nullable=False, index=True)
job_instance_id = db.Column(db.INTEGER, nullable=False, index=True)
create_time = db.Column(db.DATETIME)
start_time = db.Column(db.DATETIME)
end_time = db.Column(db.DATETIME)
running_status = db.Column(db.INTEGER, default=SpiderStatus.PENDING)
running_on = db.Column(db.Text)
def to_dict(self):
job_instance = JobInstance.query.filter_by(id=self.job_instance_id).first()
return {
'project_id': self.project_id,
'job_execution_id': self.id,
'job_instance_id': self.job_instance_id,
'service_job_execution_id': self.service_job_execution_id,
'create_time': self.create_time.strftime('%Y-%m-%d %H:%M:%S') if self.create_time else None,
'start_time': self.start_time.strftime('%Y-%m-%d %H:%M:%S') if self.start_time else None,
'end_time': self.end_time.strftime('%Y-%m-%d %H:%M:%S') if self.end_time else None,
'running_status': self.running_status,
'running_on': self.running_on,
'job_instance': job_instance.to_dict() if job_instance else {}
}
@classmethod
def find_job_by_service_id(cls, service_job_execution_id):
return cls.query.filter_by(service_job_execution_id=service_job_execution_id).first()
@classmethod
def list_job_by_service_ids(cls, service_job_execution_ids):
return cls.query.filter(cls.service_job_execution_id.in_(service_job_execution_ids)).all()
@classmethod
def list_uncomplete_job(cls):
return cls.query.filter(cls.running_status != SpiderStatus.FINISHED,
cls.running_status != SpiderStatus.CANCELED).all()
@classmethod
def list_jobs(cls, project_id, each_status_limit=100):
result = {}
result['PENDING'] = [job_execution.to_dict() for job_execution in
JobExecution.query.filter_by(project_id=project_id,
running_status=SpiderStatus.PENDING).order_by(
desc(JobExecution.date_modified)).limit(each_status_limit)]
result['RUNNING'] = [job_execution.to_dict() for job_execution in
JobExecution.query.filter_by(project_id=project_id,
running_status=SpiderStatus.RUNNING).order_by(
desc(JobExecution.date_modified)).limit(each_status_limit)]
result['COMPLETED'] = [job_execution.to_dict() for job_execution in
JobExecution.query.filter(JobExecution.project_id == project_id).filter(
(JobExecution.running_status == SpiderStatus.FINISHED) | (
JobExecution.running_status == SpiderStatus.CANCELED)).order_by(
desc(JobExecution.date_modified)).limit(each_status_limit)]
return result
@classmethod
def list_run_stats_by_hours(cls, project_id):
result = {}
hour_keys = []
last_time = datetime.datetime.now() - datetime.timedelta(hours=23)
last_time = datetime.datetime(last_time.year, last_time.month, last_time.day, last_time.hour)
for hour in range(23, -1, -1):
time_tmp = datetime.datetime.now() - datetime.timedelta(hours=hour)
hour_key = time_tmp.strftime('%Y-%m-%d %H:00:00')
hour_keys.append(hour_key)
result[hour_key] = 0
for job_execution in JobExecution.query.filter(JobExecution.project_id == project_id,
JobExecution.date_created >= last_time).all():
hour_key = job_execution.create_time.strftime('%Y-%m-%d %H:00:00')
result[hour_key] += 1
return [dict(key=hour_key, value=result[hour_key]) for hour_key in hour_keys]
class Videoitems(db.Model):
__tablename__ = 'videoitems'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(500),nullable=False)
url = db.Column(db.String(100), nullable=False, index=True)
keywords = db.Column(db.String(100), nullable=False)
tags = db.Column(db.String(1000),default=[])
video_category = db.Column(db.String(50),default="其它")
upload_time = db.Column(db.String(50))
spider_time = db.Column(db.String(50))
info = db.Column(db.Text)
site_name = db.Column(db.String(20), default="")
video_time = db.Column(db.Integer, default=0)
isdownload = db.Column(db.Integer, default=0)
play_count = db.Column(db.String(20), default="0")
task_id = db.Column(db.String(20))
class RunningJob(Base):
__tablename__ = 'running_job'
spider_random_id = db.Column(db.String(50),nullable=False,index=True) | true | true |
1c3298ce7a8f61c528dabe303770da0032e2cd4a | 196 | py | Python | bot/hooks/__init__.py | lungdart/discord_rpg_bot | dc574026965b972c6935cca0db2679e7be757939 | [
"MIT"
] | 1 | 2021-03-01T22:20:33.000Z | 2021-03-01T22:20:33.000Z | bot/hooks/__init__.py | lungdart/discord_rpg_bot | dc574026965b972c6935cca0db2679e7be757939 | [
"MIT"
] | 17 | 2021-03-05T16:50:48.000Z | 2021-03-18T17:49:47.000Z | bot/hooks/__init__.py | lungdart/discord_rpg_bot | dc574026965b972c6935cca0db2679e7be757939 | [
"MIT"
] | null | null | null | from bot.hooks.admin import Admin
from bot.hooks.battle import Battle
from bot.hooks.manage_char import ManageCharacter
from bot.hooks.activities import Activities
from bot.hooks.shop import Shop
| 32.666667 | 49 | 0.846939 | from bot.hooks.admin import Admin
from bot.hooks.battle import Battle
from bot.hooks.manage_char import ManageCharacter
from bot.hooks.activities import Activities
from bot.hooks.shop import Shop
| true | true |
1c329a5bd7c432f41175cac631c67f134572022d | 938 | py | Python | swiftwind/core/templatetags/swiftwind_utilities.py | m-den-i/swiftwind | 3af9a1ec3327a992f1d3f2c11fefbb3c06cadbce | [
"MIT"
] | 11 | 2016-12-13T00:46:48.000Z | 2020-07-28T13:44:12.000Z | swiftwind/core/templatetags/swiftwind_utilities.py | m-den-i/swiftwind | 3af9a1ec3327a992f1d3f2c11fefbb3c06cadbce | [
"MIT"
] | 15 | 2017-11-29T19:38:32.000Z | 2018-11-02T21:08:04.000Z | swiftwind/core/templatetags/swiftwind_utilities.py | m-den-i/swiftwind | 3af9a1ec3327a992f1d3f2c11fefbb3c06cadbce | [
"MIT"
] | 4 | 2018-10-23T12:39:04.000Z | 2019-12-30T11:06:23.000Z | import six
from django import template
register = template.Library()
@register.filter
def partition(list_, columns=2):
"""
Break a list into ``columns`` number of columns.
"""
iter_ = iter(list_)
columns = int(columns)
rows = []
while True:
row = []
for column_number in range(1, columns + 1):
try:
value = six.next(iter_)
except StopIteration:
pass
else:
row.append(value)
if not row:
return rows
rows.append(row)
@register.filter
def short_name(name):
bits = (name or '').split(' ')
if len(bits) == 0:
return name
else:
first = bits[0]
last = bits[-1]
if last:
# First + Initial
return ' '.join([first, last[0]])
else:
# No last name, just give the first name
return first
| 20.844444 | 52 | 0.50533 | import six
from django import template
register = template.Library()
@register.filter
def partition(list_, columns=2):
iter_ = iter(list_)
columns = int(columns)
rows = []
while True:
row = []
for column_number in range(1, columns + 1):
try:
value = six.next(iter_)
except StopIteration:
pass
else:
row.append(value)
if not row:
return rows
rows.append(row)
@register.filter
def short_name(name):
bits = (name or '').split(' ')
if len(bits) == 0:
return name
else:
first = bits[0]
last = bits[-1]
if last:
return ' '.join([first, last[0]])
else:
return first
| true | true |
1c329a8a741053888ca17662e28135f888beecd4 | 648 | py | Python | backend/app/alembic/versions/a9c131191819_add_notes_file.py | dmitritruf/distiller-django-react | ec4c3837194306e725d8a2437ace10a899a254d2 | [
"BSD-3-Clause"
] | 2 | 2021-11-04T16:27:33.000Z | 2021-11-04T20:09:19.000Z | backend/app/alembic/versions/a9c131191819_add_notes_file.py | OpenChemistry/distiller | 67da87672654555f9821590f42b108d70a55c1a6 | [
"BSD-3-Clause"
] | 70 | 2021-11-04T16:34:52.000Z | 2022-03-30T02:52:17.000Z | backend/app/alembic/versions/a9c131191819_add_notes_file.py | dmitritruf/distiller-django-react | ec4c3837194306e725d8a2437ace10a899a254d2 | [
"BSD-3-Clause"
] | null | null | null | """Add notes file
Revision ID: a9c131191819
Revises: 6c7e28cf38e9
Create Date: 2021-08-13 10:38:45.529535
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a9c131191819'
down_revision = '6c7e28cf38e9'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('scans', sa.Column('notes', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('scans', 'notes')
# ### end Alembic commands ###
| 22.344828 | 74 | 0.686728 | from alembic import op
import sqlalchemy as sa
revision = 'a9c131191819'
down_revision = '6c7e28cf38e9'
branch_labels = None
depends_on = None
def upgrade():
| true | true |
1c329b19455a566b39ba5de47c0f2d1dcab07ece | 28,779 | py | Python | tests/wallet/simple_sync/test_simple_sync_protocol.py | zcomputerwiz/hddcoin-light-wallet | cf059817fff9753ce4b499edefc4deb39703248c | [
"Apache-2.0"
] | null | null | null | tests/wallet/simple_sync/test_simple_sync_protocol.py | zcomputerwiz/hddcoin-light-wallet | cf059817fff9753ce4b499edefc4deb39703248c | [
"Apache-2.0"
] | null | null | null | tests/wallet/simple_sync/test_simple_sync_protocol.py | zcomputerwiz/hddcoin-light-wallet | cf059817fff9753ce4b499edefc4deb39703248c | [
"Apache-2.0"
] | null | null | null | # flake8: noqa: F811, F401
import asyncio
from typing import List, Optional
import pytest
from clvm.casts import int_to_bytes
from colorlog import logging
from hddcoin.consensus.block_rewards import calculate_pool_reward, calculate_base_farmer_reward
from hddcoin.protocols import wallet_protocol, full_node_protocol
from hddcoin.protocols.full_node_protocol import RespondTransaction
from hddcoin.protocols.protocol_message_types import ProtocolMessageTypes
from hddcoin.protocols.wallet_protocol import RespondToCoinUpdates, CoinStateUpdate, RespondToPhUpdates, CoinState
from hddcoin.server.outbound_message import NodeType
from hddcoin.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
from hddcoin.types.blockchain_format.coin import Coin
from hddcoin.types.coin_record import CoinRecord
from hddcoin.types.condition_opcodes import ConditionOpcode
from hddcoin.types.condition_with_args import ConditionWithArgs
from hddcoin.types.peer_info import PeerInfo
from hddcoin.types.spend_bundle import SpendBundle
from hddcoin.util.ints import uint16, uint32, uint64
from hddcoin.wallet.wallet import Wallet
from hddcoin.wallet.wallet_state_manager import WalletStateManager
from tests.connection_utils import add_dummy_connection
from tests.setup_nodes import self_hostname, setup_simulators_and_wallets, bt
from tests.time_out_assert import time_out_assert
from tests.wallet.cc_wallet.test_cc_wallet import tx_in_pool
from tests.wallet_tools import WalletTool
def wallet_height_at_least(wallet_node, h):
height = wallet_node.wallet_state_manager.blockchain._peak_height
if height == h:
return True
return False
log = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestSimpleSyncProtocol:
@pytest.fixture(scope="function")
async def wallet_node_simulator(self):
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest.fixture(scope="function")
async def wallet_two_node_simulator(self):
async for _ in setup_simulators_and_wallets(2, 1, {}):
yield _
async def get_all_messages_in_queue(self, queue):
all_messages = []
await asyncio.sleep(2)
while not queue.empty():
message, peer = await queue.get()
all_messages.append(message)
return all_messages
@pytest.mark.asyncio
async def test_subscribe_for_ph(self, wallet_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
zero_ph = 32 * b"\0"
junk_ph = 32 * b"\a"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states == []
# Farm few more with reward
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
# Farm more rewards to check the incoming queue for the updates
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
zero_coin = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [zero_ph])
all_zero_coin = set(zero_coin)
notified_zero_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_zero_coins.add(coin_state)
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
assert all_zero_coin == notified_zero_coins
# Test subscribing to more coins
one_ph = 32 * b"\1"
msg = wallet_protocol.RegisterForPhUpdates([one_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
peak = full_node_api.full_node.blockchain.get_peak()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
zero_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(
True, [zero_ph], peak.height + 1
)
one_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [one_ph])
all_coins = set(zero_coins)
all_coins.update(one_coins)
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_all_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_all_coins.add(coin_state)
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
assert all_coins == notified_all_coins
wsm: WalletStateManager = wallet_node.wallet_state_manager
wallet: Wallet = wsm.wallets[1]
puzzle_hash = await wallet.get_new_puzzlehash()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
fn_amount = sum(
cr.coin.amount
for cr in await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(False, puzzle_hash)
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
assert funds == fn_amount
msg_1 = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response_1 = await full_node_api.register_interest_in_puzzle_hash(msg_1, fake_wallet_peer)
assert msg_response_1.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response_1: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response_1.data)
assert len(data_response_1.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
tx_record = await wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
assert len(tx_record.spend_bundle.removals()) == 1
spent_coin = tx_record.spend_bundle.removals()[0]
assert spent_coin.puzzle_hash == puzzle_hash
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == spent_coin.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == spent_coin
assert notified_state.spent_height is not None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id(self, wallet_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(15, standard_wallet.get_confirmed_balance, funds)
my_coins: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, puzzle_hash
)
coin_to_spend = my_coins[0].coin
msg = wallet_protocol.RegisterForCoinUpdates([coin_to_spend.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states[0].coin == coin_to_spend
coins = set()
coins.add(coin_to_spend)
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0), coins=coins)
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
# Farm transaction
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_coins.add(coin_state.coin)
assert coin_state.spent_height is not None
assert notified_coins == coins
# Test getting notification for coin that is about to be created
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
tx_record.spend_bundle.additions()
added_target: Optional[Coin] = None
for coin in tx_record.spend_bundle.additions():
if coin.puzzle_hash == puzzle_hash:
added_target = coin
assert added_target is not None
msg = wallet_protocol.RegisterForCoinUpdates([added_target.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == added_target.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == added_target
assert notified_state.spent_height is None
@pytest.mark.asyncio
async def test_subscribe_for_ph_reorg(self, wallet_node_simulator):
num_blocks = 4
long_blocks = 20
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await self.get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
# First state is creation, second one is a reorg
assert len(coin_update_messages) == 2
first = coin_update_messages[0]
assert len(first.items) == 2
first_state_coin_1 = first.items[0]
assert first_state_coin_1.spent_height is None
assert first_state_coin_1.created_height is not None
first_state_coin_2 = first.items[1]
assert first_state_coin_2.spent_height is None
assert first_state_coin_2.created_height is not None
second = coin_update_messages[1]
assert second.fork_height == fork_height
assert len(second.items) == 2
second_state_coin_1 = second.items[0]
assert second_state_coin_1.spent_height is None
assert second_state_coin_1.created_height is None
second_state_coin_2 = second.items[1]
assert second_state_coin_2.spent_height is None
assert second_state_coin_2.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id_reorg(self, wallet_node_simulator):
num_blocks = 4
long_blocks = 20
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
for coin_rec in coin_records:
msg = wallet_protocol.RegisterForCoinUpdates([coin_rec.name], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await self.get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
assert len(coin_update_messages) == 1
update = coin_update_messages[0]
coin_states = update.items
assert len(coin_states) == 2
first_coin = coin_states[0]
assert first_coin.spent_height is None
assert first_coin.created_height is None
second_coin = coin_states[1]
assert second_coin.spent_height is None
assert second_coin.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_hint(self, wallet_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 1
coin_records: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, hint_puzzle_hash
)
assert len(coin_records) == 1
assert data_response.coin_states[0] == coin_records[0].coin_state
@pytest.mark.asyncio
async def test_subscribe_for_hint_long_sync(self, wallet_two_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_two_node_simulator
full_node_api = full_nodes[0]
full_node_api_1 = full_nodes[1]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
fn_server_1 = full_node_api_1.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
incoming_queue_1, peer_id_1 = await add_dummy_connection(fn_server_1, 12313, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
fake_wallet_peer_1 = fn_server_1.all_connections[peer_id_1]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
msg_response_1 = await full_node_api_1.register_interest_in_puzzle_hash(msg, fake_wallet_peer_1)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
# Create more blocks than recent "short_sync_blocks_behind_threshold" so that node enters batch
for i in range(0, 100):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
node1_height = full_node_api_1.full_node.blockchain.get_peak_height()
assert node1_height is None
await fn_server_1.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
node0_height = full_node_api.full_node.blockchain.get_peak_height()
await time_out_assert(15, full_node_api_1.full_node.blockchain.get_peak_height, node0_height)
all_messages = await self.get_all_messages_in_queue(incoming_queue)
all_messages_1 = await self.get_all_messages_in_queue(incoming_queue_1)
def check_messages_for_hint(messages):
notified_state = None
for message in messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
check_messages_for_hint(all_messages)
check_messages_for_hint(all_messages_1)
| 45.536392 | 118 | 0.710136 |
import asyncio
from typing import List, Optional
import pytest
from clvm.casts import int_to_bytes
from colorlog import logging
from hddcoin.consensus.block_rewards import calculate_pool_reward, calculate_base_farmer_reward
from hddcoin.protocols import wallet_protocol, full_node_protocol
from hddcoin.protocols.full_node_protocol import RespondTransaction
from hddcoin.protocols.protocol_message_types import ProtocolMessageTypes
from hddcoin.protocols.wallet_protocol import RespondToCoinUpdates, CoinStateUpdate, RespondToPhUpdates, CoinState
from hddcoin.server.outbound_message import NodeType
from hddcoin.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
from hddcoin.types.blockchain_format.coin import Coin
from hddcoin.types.coin_record import CoinRecord
from hddcoin.types.condition_opcodes import ConditionOpcode
from hddcoin.types.condition_with_args import ConditionWithArgs
from hddcoin.types.peer_info import PeerInfo
from hddcoin.types.spend_bundle import SpendBundle
from hddcoin.util.ints import uint16, uint32, uint64
from hddcoin.wallet.wallet import Wallet
from hddcoin.wallet.wallet_state_manager import WalletStateManager
from tests.connection_utils import add_dummy_connection
from tests.setup_nodes import self_hostname, setup_simulators_and_wallets, bt
from tests.time_out_assert import time_out_assert
from tests.wallet.cc_wallet.test_cc_wallet import tx_in_pool
from tests.wallet_tools import WalletTool
def wallet_height_at_least(wallet_node, h):
height = wallet_node.wallet_state_manager.blockchain._peak_height
if height == h:
return True
return False
log = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestSimpleSyncProtocol:
@pytest.fixture(scope="function")
async def wallet_node_simulator(self):
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest.fixture(scope="function")
async def wallet_two_node_simulator(self):
async for _ in setup_simulators_and_wallets(2, 1, {}):
yield _
async def get_all_messages_in_queue(self, queue):
all_messages = []
await asyncio.sleep(2)
while not queue.empty():
message, peer = await queue.get()
all_messages.append(message)
return all_messages
@pytest.mark.asyncio
async def test_subscribe_for_ph(self, wallet_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
zero_ph = 32 * b"\0"
junk_ph = 32 * b"\a"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states == []
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 2 * num_blocks
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
zero_coin = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [zero_ph])
all_zero_coin = set(zero_coin)
notified_zero_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_zero_coins.add(coin_state)
assert len(data_response.items) == 2
assert all_zero_coin == notified_zero_coins
one_ph = 32 * b"\1"
msg = wallet_protocol.RegisterForPhUpdates([one_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
peak = full_node_api.full_node.blockchain.get_peak()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
zero_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(
True, [zero_ph], peak.height + 1
)
one_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [one_ph])
all_coins = set(zero_coins)
all_coins.update(one_coins)
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_all_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_all_coins.add(coin_state)
assert len(data_response.items) == 2
assert all_coins == notified_all_coins
wsm: WalletStateManager = wallet_node.wallet_state_manager
wallet: Wallet = wsm.wallets[1]
puzzle_hash = await wallet.get_new_puzzlehash()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
fn_amount = sum(
cr.coin.amount
for cr in await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(False, puzzle_hash)
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
assert funds == fn_amount
msg_1 = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response_1 = await full_node_api.register_interest_in_puzzle_hash(msg_1, fake_wallet_peer)
assert msg_response_1.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response_1: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response_1.data)
assert len(data_response_1.coin_states) == 2 * num_blocks
tx_record = await wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
assert len(tx_record.spend_bundle.removals()) == 1
spent_coin = tx_record.spend_bundle.removals()[0]
assert spent_coin.puzzle_hash == puzzle_hash
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == spent_coin.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == spent_coin
assert notified_state.spent_height is not None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id(self, wallet_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(15, standard_wallet.get_confirmed_balance, funds)
my_coins: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, puzzle_hash
)
coin_to_spend = my_coins[0].coin
msg = wallet_protocol.RegisterForCoinUpdates([coin_to_spend.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states[0].coin == coin_to_spend
coins = set()
coins.add(coin_to_spend)
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0), coins=coins)
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
# Farm transaction
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_coins.add(coin_state.coin)
assert coin_state.spent_height is not None
assert notified_coins == coins
# Test getting notification for coin that is about to be created
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
tx_record.spend_bundle.additions()
added_target: Optional[Coin] = None
for coin in tx_record.spend_bundle.additions():
if coin.puzzle_hash == puzzle_hash:
added_target = coin
assert added_target is not None
msg = wallet_protocol.RegisterForCoinUpdates([added_target.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == added_target.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == added_target
assert notified_state.spent_height is None
@pytest.mark.asyncio
async def test_subscribe_for_ph_reorg(self, wallet_node_simulator):
num_blocks = 4
long_blocks = 20
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await self.get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
assert len(coin_update_messages) == 2
first = coin_update_messages[0]
assert len(first.items) == 2
first_state_coin_1 = first.items[0]
assert first_state_coin_1.spent_height is None
assert first_state_coin_1.created_height is not None
first_state_coin_2 = first.items[1]
assert first_state_coin_2.spent_height is None
assert first_state_coin_2.created_height is not None
second = coin_update_messages[1]
assert second.fork_height == fork_height
assert len(second.items) == 2
second_state_coin_1 = second.items[0]
assert second_state_coin_1.spent_height is None
assert second_state_coin_1.created_height is None
second_state_coin_2 = second.items[1]
assert second_state_coin_2.spent_height is None
assert second_state_coin_2.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id_reorg(self, wallet_node_simulator):
num_blocks = 4
long_blocks = 20
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
for coin_rec in coin_records:
msg = wallet_protocol.RegisterForCoinUpdates([coin_rec.name], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await self.get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
assert len(coin_update_messages) == 1
update = coin_update_messages[0]
coin_states = update.items
assert len(coin_states) == 2
first_coin = coin_states[0]
assert first_coin.spent_height is None
assert first_coin.created_height is None
second_coin = coin_states[1]
assert second_coin.spent_height is None
assert second_coin.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_hint(self, wallet_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 1
coin_records: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, hint_puzzle_hash
)
assert len(coin_records) == 1
assert data_response.coin_states[0] == coin_records[0].coin_state
@pytest.mark.asyncio
async def test_subscribe_for_hint_long_sync(self, wallet_two_node_simulator):
num_blocks = 4
full_nodes, wallets = wallet_two_node_simulator
full_node_api = full_nodes[0]
full_node_api_1 = full_nodes[1]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
fn_server_1 = full_node_api_1.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, 12312, NodeType.WALLET)
incoming_queue_1, peer_id_1 = await add_dummy_connection(fn_server_1, 12313, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
fake_wallet_peer_1 = fn_server_1.all_connections[peer_id_1]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
msg_response_1 = await full_node_api_1.register_interest_in_puzzle_hash(msg, fake_wallet_peer_1)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
# Create more blocks than recent "short_sync_blocks_behind_threshold" so that node enters batch
for i in range(0, 100):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
node1_height = full_node_api_1.full_node.blockchain.get_peak_height()
assert node1_height is None
await fn_server_1.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
node0_height = full_node_api.full_node.blockchain.get_peak_height()
await time_out_assert(15, full_node_api_1.full_node.blockchain.get_peak_height, node0_height)
all_messages = await self.get_all_messages_in_queue(incoming_queue)
all_messages_1 = await self.get_all_messages_in_queue(incoming_queue_1)
def check_messages_for_hint(messages):
notified_state = None
for message in messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
check_messages_for_hint(all_messages)
check_messages_for_hint(all_messages_1)
| true | true |
1c329c6a37fddc5fbee797a6ee3d3ba3ea359df3 | 2,768 | py | Python | models/encoder.py | zsc/End-to-end-ASR-Transformer | 3e02ff6210badb588134a81eb17f8c9ab59e735f | [
"Apache-2.0"
] | 7 | 2021-12-08T04:07:48.000Z | 2022-01-10T07:27:29.000Z | models/encoder.py | zsc/End-to-end-ASR-Transformer | 3e02ff6210badb588134a81eb17f8c9ab59e735f | [
"Apache-2.0"
] | 1 | 2021-12-08T05:14:47.000Z | 2021-12-08T05:14:47.000Z | models/encoder.py | zsc/End-to-end-ASR-Transformer | 3e02ff6210badb588134a81eb17f8c9ab59e735f | [
"Apache-2.0"
] | 1 | 2021-12-08T05:13:44.000Z | 2021-12-08T05:13:44.000Z | import megengine.module as M
import megengine.functional as F
import megengine as mge
from .attention import MultiHeadedAttention
from .embedding import PositionalEncoding
from .encoder_layer import EncoderLayer
from .layer_norm import LayerNorm
from .positionwise_feed_forward import PositionwiseFeedForward
from .repeat import repeat
from .subsampling import Conv2dSubsampling
class Encoder(M.Module):
"""Transformer encoder module."""
def __init__(
self,
idim,
attention_dim=256,
attention_heads=4,
linear_units=2048,
num_blocks=6,
dropout_rate=0.1,
positional_dropout_rate=0.1,
attention_dropout_rate=0.0,
pos_enc_class=PositionalEncoding,
normalize_before=True,
concat_after=False,
positionwise_layer_type="linear",
positionwise_conv_kernel_size=1,
padding_idx=-1,
):
"""Construct an Encoder object."""
super(Encoder, self).__init__()
self.embed = Conv2dSubsampling(idim, attention_dim, dropout_rate)
self.normalize_before = normalize_before
if positionwise_layer_type == "linear":
positionwise_layer = PositionwiseFeedForward
positionwise_layer_args = (attention_dim, linear_units, dropout_rate)
self.encoders = repeat(
num_blocks,
lambda: EncoderLayer(
attention_dim,
MultiHeadedAttention(
attention_heads, attention_dim, attention_dropout_rate
),
positionwise_layer(*positionwise_layer_args),
dropout_rate,
normalize_before,
concat_after,
),
)
if self.normalize_before:
self.after_norm = LayerNorm(attention_dim)
def forward(self, xs, masks):
"""Encode input sequence."""
if isinstance(self.embed, (Conv2dSubsampling)):
xs, masks = self.embed(xs, masks)
else:
xs = self.embed(xs)
xs, masks = self.encoders(xs, masks)
if self.normalize_before:
xs = self.after_norm(xs)
return xs, masks
def forward_one_step(self, xs, masks, cache=None):
"""Encode input frame."""
if isinstance(self.embed, Conv2dSubsampling):
xs, masks = self.embed(xs, masks)
else:
xs = self.embed(xs)
if cache is None:
cache = [None for _ in range(len(self.encoders))]
new_cache = []
for c, e in zip(cache, self.encoders):
xs, masks = e(xs, masks, cache=c)
new_cache.append(xs)
if self.normalize_before:
xs = self.after_norm(xs)
return xs, masks, new_cache
| 32.564706 | 81 | 0.616329 | import megengine.module as M
import megengine.functional as F
import megengine as mge
from .attention import MultiHeadedAttention
from .embedding import PositionalEncoding
from .encoder_layer import EncoderLayer
from .layer_norm import LayerNorm
from .positionwise_feed_forward import PositionwiseFeedForward
from .repeat import repeat
from .subsampling import Conv2dSubsampling
class Encoder(M.Module):
def __init__(
self,
idim,
attention_dim=256,
attention_heads=4,
linear_units=2048,
num_blocks=6,
dropout_rate=0.1,
positional_dropout_rate=0.1,
attention_dropout_rate=0.0,
pos_enc_class=PositionalEncoding,
normalize_before=True,
concat_after=False,
positionwise_layer_type="linear",
positionwise_conv_kernel_size=1,
padding_idx=-1,
):
super(Encoder, self).__init__()
self.embed = Conv2dSubsampling(idim, attention_dim, dropout_rate)
self.normalize_before = normalize_before
if positionwise_layer_type == "linear":
positionwise_layer = PositionwiseFeedForward
positionwise_layer_args = (attention_dim, linear_units, dropout_rate)
self.encoders = repeat(
num_blocks,
lambda: EncoderLayer(
attention_dim,
MultiHeadedAttention(
attention_heads, attention_dim, attention_dropout_rate
),
positionwise_layer(*positionwise_layer_args),
dropout_rate,
normalize_before,
concat_after,
),
)
if self.normalize_before:
self.after_norm = LayerNorm(attention_dim)
def forward(self, xs, masks):
if isinstance(self.embed, (Conv2dSubsampling)):
xs, masks = self.embed(xs, masks)
else:
xs = self.embed(xs)
xs, masks = self.encoders(xs, masks)
if self.normalize_before:
xs = self.after_norm(xs)
return xs, masks
def forward_one_step(self, xs, masks, cache=None):
if isinstance(self.embed, Conv2dSubsampling):
xs, masks = self.embed(xs, masks)
else:
xs = self.embed(xs)
if cache is None:
cache = [None for _ in range(len(self.encoders))]
new_cache = []
for c, e in zip(cache, self.encoders):
xs, masks = e(xs, masks, cache=c)
new_cache.append(xs)
if self.normalize_before:
xs = self.after_norm(xs)
return xs, masks, new_cache
| true | true |
1c329ca24197b85d788b41f05cb3ae0f8b488d05 | 13,350 | py | Python | code/train_seq2seq.py | sumanbanerjee1/Code-Mixed-Dialog | 9df1d4dc800548a883f8bc1a9ce4116c77aebc02 | [
"Apache-2.0"
] | 35 | 2018-06-27T11:07:58.000Z | 2022-01-06T07:52:35.000Z | code/train_seq2seq.py | sumanbanerjee1/Code-Mixed-Dialog | 9df1d4dc800548a883f8bc1a9ce4116c77aebc02 | [
"Apache-2.0"
] | null | null | null | code/train_seq2seq.py | sumanbanerjee1/Code-Mixed-Dialog | 9df1d4dc800548a883f8bc1a9ce4116c77aebc02 | [
"Apache-2.0"
] | 6 | 2019-06-30T07:13:59.000Z | 2021-02-17T13:14:52.000Z | import tensorflow as tf
import numpy as np
import json
import pickle
import os
import subprocess
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from tqdm import tqdm
from seq2seq.model import Seq2seqModel
from seq2seq.data_utils import pad, replace_token_no, get_len
flags = tf.app.flags
flags.DEFINE_string("config_id",'135',"Hyperparam config id")
flags.DEFINE_string("data_dir", "../data/tamil", "Data directory ")
flags.DEFINE_string("infer_data", "test", "[train, dev or test] Dataset on which you want to perform inference")
flags.DEFINE_string("logs_dir", "logs", "Logging directory ")
flags.DEFINE_string("checkpoint_dir", 'checkpoints', "Checkpoint directory")
flags.DEFINE_string("rnn_unit", 'gru', "GRU or LSTM")
flags.DEFINE_float("learning_rate", 0.0004, "Learning rate for Adam Optimizer")
flags.DEFINE_integer("batch_size",32, "Batch size for training")
flags.DEFINE_integer("epochs",50, "Number of epochs to train for")
flags.DEFINE_integer("max_gradient_norm",5, "Max grad norm. 0 for no clipping")
flags.DEFINE_float("dropout", 0.75, "keep probability for keeping unit")
flags.DEFINE_integer("num_layers", 1, "No of layers of stacking in RNN")
flags.DEFINE_integer("word_emb_dim",300, "hidden dimensions of the word embeddings.")
flags.DEFINE_integer("hidden_units",350, "hidden dimensions of the encoder-decoder units.")
flags.DEFINE_integer("eval_interval", 1, "After how many epochs do you want to eval")
flags.DEFINE_integer("patience",5, "Patience parameter")
flags.DEFINE_boolean("train",True,"Train or Infer")
flags.DEFINE_boolean("debug",True,"Debug mode with small dataset")
FLAGS = flags.FLAGS
def arrayfy(data, stats, vocab):
"""
Create data-arrays from the nested-list form of data
data: The data in nested list form
stats: The stats file dumped from preprocessing
vocab: The vocab file dumped from preprocessing
"""
context_len,dec_ip_len,dec_op_len=get_len(data)
pad(data,stats)
replace_token_no(data,vocab)
context=np.asarray(data[0])
dec_ip_arr=np.asarray(data[1])
dec_op_arr=np.asarray(data[2])
context_len_arr=np.asarray(context_len)
dec_ip_len_arr=np.asarray(dec_ip_len)
target_l_arr=[]
for i in range(len(data[2])):
fill=list(np.zeros(stats[2],dtype=int))
for j in range(dec_op_len[i]):
fill[j]=1
target_l_arr.append(fill)
target_len_arr=np.asarray(target_l_arr)
return [context,dec_ip_arr,dec_op_arr,context_len_arr,dec_ip_len_arr,target_len_arr]
def read_data(directory):
"""
Read the data and associated files from the
data directory and return it in the form of arrays
args:
directory: The data directory specified by FLAGS.data_dir
"""
if not os.path.exists(FLAGS.logs_dir+FLAGS.config_id+'/'):
os.mkdir(FLAGS.logs_dir+FLAGS.config_id+'/')
with open(directory+'/p-dialog-dstc2-train.json','r') as fp:
train_data=json.load(fp)
with open(directory+'/p-dialog-dstc2-test.json','r') as fp:
test_data=json.load(fp)
with open(directory+'/p-dialog-dstc2-dev.json','r') as fp:
dev_data=json.load(fp)
with open(directory+'/p-dialog-dstc2-stats.json','r') as fp:
stats=json.load(fp)
with open(directory+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
params_dict=FLAGS.__flags
params_dict['max_enc_size']=stats[0]
params_dict['max_sent_size']=stats[1]
params_dict['vocab_size']=len(vocab)
train=arrayfy(train_data,stats,vocab)
test=arrayfy(test_data,stats,vocab)
dev=arrayfy(dev_data,stats,vocab)
return train,test,dev
def create_model(sess,FLAGS):
"""
Create a new model if there are no checkpoints
otherwise restore the model from the existing
checkpoint
args:
sess: The active Session
FLAGS: The configuration FLAGS
"""
print("Creating/Restoring Seq2seq Model.....")
model = Seq2seqModel(sess,FLAGS)
ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir+FLAGS.config_id)
if ckpt:
print("Restoring model parameters from %s" %
ckpt.model_checkpoint_path)
model.saver.restore(sess, ckpt.model_checkpoint_path)
else:
print("Created new model!")
sess.run(tf.global_variables_initializer())
return model
def save_model(sess,model):
"""
Save the model in the checkpoint directory
args:
sess: The active Session
model: The model object which is created or restored
"""
if not os.path.exists(FLAGS.checkpoint_dir+FLAGS.config_id):
os.makedirs(FLAGS.checkpoint_dir+FLAGS.config_id)
save_path = model.saver.save(sess, os.path.join(FLAGS.checkpoint_dir+FLAGS.config_id, "model.ckpt"))
print("Model saved in file: %s" % save_path)
def get_words_from_ids(ids):
"""
Convert token ids to the corresponding words by looking up
from the vocabulary file. It breaks the generated sentence
at the first '<EOS>' token.
arg:
ids: The predicted ids obtained from argmax over the Vocab softmax values
"""
ids_list=ids.tolist()
with open(FLAGS.data_dir+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
invert_vocab= dict([v,k] for k,v in vocab.items())
r=[]
for i in ids_list:
c=''
for j in i:
c=c+' '+invert_vocab[j]
if invert_vocab[j]=='<EOS>':
break
r.append(c.strip())
return r
def eval_(data,sess,model,FLAGS,epoch):
"""
Run one pass over the validation set
to get the average validation loss
args:
data: The whole data in the array format
sess: The active Session
model: The model object which has been created or restored
FLAGS: The configuration FLAGS
epoch: The current epoch number
"""
batch_size = FLAGS.batch_size
num_ex = data[0].shape[0]
batches = zip(range(0, num_ex, batch_size), range(batch_size, num_ex+batch_size, batch_size))
batches = [(start, end) for start, end in batches]
#Start forward pass on the dev batches
losses=[]
preds_all=np.zeros(FLAGS.max_sent_size)
for i,j in batches:
batch_data=[data[k][i:j] for k in range(len(data))]
pred,loss,_ =model.step(sess,FLAGS,batch_data,True,1.0)
preds_all = np.row_stack((preds_all,pred))
losses.append(loss)
avg_loss=np.mean(losses)
return avg_loss
def train():
"""
Set up batches of the data and run training on them.
Also collects the validation losses after
FLAGS.eval_interval number of epochs. Logs them in the FLAGS.logs_dir
"""
print("Reading Dataset....")
dir_=FLAGS.data_dir
train_examples, test_examples, dev_examples = read_data(dir_)
# If in debugging mode then run the training of 2 epochs with a smaller data of 67 examples only
if FLAGS.debug==True:
train_examples=[train_examples[k][0:67] for k in range(len(train_examples))]
dev_examples=[dev_examples[k][0:67] for k in range(len(dev_examples))]
FLAGS.epochs=2
print("Finished Reading Dataset!")
#setup data batch indices
batch_size = FLAGS.batch_size
num_train = train_examples[0].shape[0]
batches = zip(range(0, num_train, batch_size), range(batch_size, num_train+batch_size, batch_size))
batches = [(start, end) for start, end in batches]
fp=open(FLAGS.logs_dir+FLAGS.config_id+'/logs.log','w+')
with tf.Session() as sess:
#Create or Restore Model
model=create_model(sess,FLAGS)
try:
#Run Training
for epoch in range(1,FLAGS.epochs+1):
train_loss=[]
for i,j in tqdm(batches):
batch_train =[train_examples[k][i:j] for k in range(len(train_examples))]
ypred, loss,_ =model.step(sess,FLAGS,batch_train,False,FLAGS.dropout)
train_loss.append(loss)
fp.write("Epoch:"+str(epoch)+" batch train loss: "+str(loss)+'\n')
print("Epoch: ",epoch," Train loss: ",np.mean(train_loss))
if epoch>0 and epoch % FLAGS.eval_interval==0:
val_loss=eval_(dev_examples,sess,model,FLAGS,epoch)
print("Val Loss: "+str(val_loss)+" Train loss: "+str(np.mean(train_loss)))
fp.write("Val Loss: "+str(val_loss)+"Train loss: "+str(np.mean(train_loss))+'\n\n\n\n')
print('Saving Model...')
fp.write("Saving Model\n")
save_model(sess,model)
except KeyboardInterrupt:
print("Keyboard Interrupt")
finally:
fp.close()
def infer(data_infer):
"""
Run inference on the dataset specified.
It dumps the generated sentences and the ground truth sentences.
args:
data_infer: The dataset on which inference is going to be performed.
"""
dir_=FLAGS.data_dir
train_examples, test_examples, dev_examples = read_data(dir_)
if data_infer=='test':
data=test_examples
elif data_infer=='dev':
data=dev_examples
elif data_infer=='train':
data=train_examples
else:
print("Invalid Choice!!")
return
# If debugging mode is on then run inference only on a smaller dataset of 67 examples
if FLAGS.debug:
data = [data[k][0:67] for k in range(len(data))]
#set up batch indices
batch_size = FLAGS.batch_size
num_ex = data[0].shape[0]
batches = zip(range(0, num_ex, batch_size), range(batch_size, num_ex+batch_size, batch_size))
batches = [(start, end) for start, end in batches]
with tf.Session(graph=tf.Graph()) as sess:
model=create_model(sess,FLAGS)
all_wts=[]
preds_all=np.zeros(FLAGS.max_sent_size)
for i,j in tqdm(batches):
batch_data=[data[k][i:j] for k in range(len(data))]
pred,loss,wts =model.step(sess,FLAGS,batch_data,True,1.0)
all_wts.append(wts)
preds_all = np.row_stack((preds_all,pred))
preds_ids=np.delete(preds_all,0,0)
preds_test=get_words_from_ids(preds_ids)
labels_test=get_words_from_ids(data[2])
os.makedirs("Results")
fp1 =open('Results/predictions'+str(FLAGS.config_id)+'.txt','w+')
for item in preds_test:
fp1.write("%s\n"%item)
fp1.close()
fp2 =open('Results/labels'+str(FLAGS.config_id)+'.txt','w+')
for item in labels_test:
fp2.write("%s\n"%item)
fp2.close()
def get_words_from_ids_context(ids):
ids_list=ids.tolist()
with open(FLAGS.data_dir+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
invert_vocab= dict([v,k] for k,v in vocab.items())
r=[]
for i in ids_list:
c=''
for j in i:
c=c+' '+invert_vocab[j]
r.append(c.strip())
return r
def get_words_from_ids_context_kb(ids):
ids_list=ids.tolist()
with open(FLAGS.data_dir+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
invert_vocab= dict([v,k] for k,v in vocab.items())
ind=ids_list[0].index([0,0,0])
ids_list[0]=ids_list[0][0:ind]
r=[]
for i in ids_list[0]:
c=''
for j in i:
c=c+' '+invert_vocab[j]
r.append(c.strip())
return r
def showAttention(input_sentence, output_words, attentions,c):
fig = plt.figure(figsize=(30,10))
ax = fig.add_subplot(1,1,1)
cax = ax.matshow(attentions, cmap='Blues')
fig.colorbar(cax)
ax.set_xticklabels([''] + input_sentence +
['<EOS>'], rotation=90,fontsize=5)
ax.set_yticklabels([''] + output_words,fontsize=6.5)
ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
plt.savefig('attnplots'+c+'.pdf')
plt.show()
def give_wts(w,op_words,c_words,index):
ww=[]
for i in w:
ww.append(i[index:index+1,0:len(c_words)])
ww=np.asarray(ww)
ww=np.squeeze(ww,1)
ww=ww[0:len(op_words)]
return ww
def give_wts_theta(w,op_words,index):
ww1=[]
for i in w:
ww1.append(i[index:index+1,0:1])
ww1=np.asarray(ww1)
ww1=np.squeeze(ww1,1)
ww1=ww1[0:len(op_words)]
return ww1
def attention_wts(preds_test,data,all_wts,index):
ind=index%32
all_wts1=all_wts[int(index/32)]
pre=data[0][index:index+1]
op_words=preds_test[index].split(' ')
pre_words=get_words_from_ids_context(pre)[0].split(' ')
ww_pre=give_wts(all_wts1,op_words,pre_words,ind)
showAttention(pre_words,op_words,ww_pre,'pre')
def main():
if FLAGS.train:
train()
FLAGS.train=False
infer(FLAGS.infer_data)
else:
infer(FLAGS.infer_data)
if __name__=='__main__':
main()
| 32.402913 | 112 | 0.630412 | import tensorflow as tf
import numpy as np
import json
import pickle
import os
import subprocess
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from tqdm import tqdm
from seq2seq.model import Seq2seqModel
from seq2seq.data_utils import pad, replace_token_no, get_len
flags = tf.app.flags
flags.DEFINE_string("config_id",'135',"Hyperparam config id")
flags.DEFINE_string("data_dir", "../data/tamil", "Data directory ")
flags.DEFINE_string("infer_data", "test", "[train, dev or test] Dataset on which you want to perform inference")
flags.DEFINE_string("logs_dir", "logs", "Logging directory ")
flags.DEFINE_string("checkpoint_dir", 'checkpoints', "Checkpoint directory")
flags.DEFINE_string("rnn_unit", 'gru', "GRU or LSTM")
flags.DEFINE_float("learning_rate", 0.0004, "Learning rate for Adam Optimizer")
flags.DEFINE_integer("batch_size",32, "Batch size for training")
flags.DEFINE_integer("epochs",50, "Number of epochs to train for")
flags.DEFINE_integer("max_gradient_norm",5, "Max grad norm. 0 for no clipping")
flags.DEFINE_float("dropout", 0.75, "keep probability for keeping unit")
flags.DEFINE_integer("num_layers", 1, "No of layers of stacking in RNN")
flags.DEFINE_integer("word_emb_dim",300, "hidden dimensions of the word embeddings.")
flags.DEFINE_integer("hidden_units",350, "hidden dimensions of the encoder-decoder units.")
flags.DEFINE_integer("eval_interval", 1, "After how many epochs do you want to eval")
flags.DEFINE_integer("patience",5, "Patience parameter")
flags.DEFINE_boolean("train",True,"Train or Infer")
flags.DEFINE_boolean("debug",True,"Debug mode with small dataset")
FLAGS = flags.FLAGS
def arrayfy(data, stats, vocab):
context_len,dec_ip_len,dec_op_len=get_len(data)
pad(data,stats)
replace_token_no(data,vocab)
context=np.asarray(data[0])
dec_ip_arr=np.asarray(data[1])
dec_op_arr=np.asarray(data[2])
context_len_arr=np.asarray(context_len)
dec_ip_len_arr=np.asarray(dec_ip_len)
target_l_arr=[]
for i in range(len(data[2])):
fill=list(np.zeros(stats[2],dtype=int))
for j in range(dec_op_len[i]):
fill[j]=1
target_l_arr.append(fill)
target_len_arr=np.asarray(target_l_arr)
return [context,dec_ip_arr,dec_op_arr,context_len_arr,dec_ip_len_arr,target_len_arr]
def read_data(directory):
if not os.path.exists(FLAGS.logs_dir+FLAGS.config_id+'/'):
os.mkdir(FLAGS.logs_dir+FLAGS.config_id+'/')
with open(directory+'/p-dialog-dstc2-train.json','r') as fp:
train_data=json.load(fp)
with open(directory+'/p-dialog-dstc2-test.json','r') as fp:
test_data=json.load(fp)
with open(directory+'/p-dialog-dstc2-dev.json','r') as fp:
dev_data=json.load(fp)
with open(directory+'/p-dialog-dstc2-stats.json','r') as fp:
stats=json.load(fp)
with open(directory+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
params_dict=FLAGS.__flags
params_dict['max_enc_size']=stats[0]
params_dict['max_sent_size']=stats[1]
params_dict['vocab_size']=len(vocab)
train=arrayfy(train_data,stats,vocab)
test=arrayfy(test_data,stats,vocab)
dev=arrayfy(dev_data,stats,vocab)
return train,test,dev
def create_model(sess,FLAGS):
print("Creating/Restoring Seq2seq Model.....")
model = Seq2seqModel(sess,FLAGS)
ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir+FLAGS.config_id)
if ckpt:
print("Restoring model parameters from %s" %
ckpt.model_checkpoint_path)
model.saver.restore(sess, ckpt.model_checkpoint_path)
else:
print("Created new model!")
sess.run(tf.global_variables_initializer())
return model
def save_model(sess,model):
if not os.path.exists(FLAGS.checkpoint_dir+FLAGS.config_id):
os.makedirs(FLAGS.checkpoint_dir+FLAGS.config_id)
save_path = model.saver.save(sess, os.path.join(FLAGS.checkpoint_dir+FLAGS.config_id, "model.ckpt"))
print("Model saved in file: %s" % save_path)
def get_words_from_ids(ids):
ids_list=ids.tolist()
with open(FLAGS.data_dir+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
invert_vocab= dict([v,k] for k,v in vocab.items())
r=[]
for i in ids_list:
c=''
for j in i:
c=c+' '+invert_vocab[j]
if invert_vocab[j]=='<EOS>':
break
r.append(c.strip())
return r
def eval_(data,sess,model,FLAGS,epoch):
batch_size = FLAGS.batch_size
num_ex = data[0].shape[0]
batches = zip(range(0, num_ex, batch_size), range(batch_size, num_ex+batch_size, batch_size))
batches = [(start, end) for start, end in batches]
losses=[]
preds_all=np.zeros(FLAGS.max_sent_size)
for i,j in batches:
batch_data=[data[k][i:j] for k in range(len(data))]
pred,loss,_ =model.step(sess,FLAGS,batch_data,True,1.0)
preds_all = np.row_stack((preds_all,pred))
losses.append(loss)
avg_loss=np.mean(losses)
return avg_loss
def train():
print("Reading Dataset....")
dir_=FLAGS.data_dir
train_examples, test_examples, dev_examples = read_data(dir_)
if FLAGS.debug==True:
train_examples=[train_examples[k][0:67] for k in range(len(train_examples))]
dev_examples=[dev_examples[k][0:67] for k in range(len(dev_examples))]
FLAGS.epochs=2
print("Finished Reading Dataset!")
batch_size = FLAGS.batch_size
num_train = train_examples[0].shape[0]
batches = zip(range(0, num_train, batch_size), range(batch_size, num_train+batch_size, batch_size))
batches = [(start, end) for start, end in batches]
fp=open(FLAGS.logs_dir+FLAGS.config_id+'/logs.log','w+')
with tf.Session() as sess:
model=create_model(sess,FLAGS)
try:
for epoch in range(1,FLAGS.epochs+1):
train_loss=[]
for i,j in tqdm(batches):
batch_train =[train_examples[k][i:j] for k in range(len(train_examples))]
ypred, loss,_ =model.step(sess,FLAGS,batch_train,False,FLAGS.dropout)
train_loss.append(loss)
fp.write("Epoch:"+str(epoch)+" batch train loss: "+str(loss)+'\n')
print("Epoch: ",epoch," Train loss: ",np.mean(train_loss))
if epoch>0 and epoch % FLAGS.eval_interval==0:
val_loss=eval_(dev_examples,sess,model,FLAGS,epoch)
print("Val Loss: "+str(val_loss)+" Train loss: "+str(np.mean(train_loss)))
fp.write("Val Loss: "+str(val_loss)+"Train loss: "+str(np.mean(train_loss))+'\n\n\n\n')
print('Saving Model...')
fp.write("Saving Model\n")
save_model(sess,model)
except KeyboardInterrupt:
print("Keyboard Interrupt")
finally:
fp.close()
def infer(data_infer):
dir_=FLAGS.data_dir
train_examples, test_examples, dev_examples = read_data(dir_)
if data_infer=='test':
data=test_examples
elif data_infer=='dev':
data=dev_examples
elif data_infer=='train':
data=train_examples
else:
print("Invalid Choice!!")
return
if FLAGS.debug:
data = [data[k][0:67] for k in range(len(data))]
batch_size = FLAGS.batch_size
num_ex = data[0].shape[0]
batches = zip(range(0, num_ex, batch_size), range(batch_size, num_ex+batch_size, batch_size))
batches = [(start, end) for start, end in batches]
with tf.Session(graph=tf.Graph()) as sess:
model=create_model(sess,FLAGS)
all_wts=[]
preds_all=np.zeros(FLAGS.max_sent_size)
for i,j in tqdm(batches):
batch_data=[data[k][i:j] for k in range(len(data))]
pred,loss,wts =model.step(sess,FLAGS,batch_data,True,1.0)
all_wts.append(wts)
preds_all = np.row_stack((preds_all,pred))
preds_ids=np.delete(preds_all,0,0)
preds_test=get_words_from_ids(preds_ids)
labels_test=get_words_from_ids(data[2])
os.makedirs("Results")
fp1 =open('Results/predictions'+str(FLAGS.config_id)+'.txt','w+')
for item in preds_test:
fp1.write("%s\n"%item)
fp1.close()
fp2 =open('Results/labels'+str(FLAGS.config_id)+'.txt','w+')
for item in labels_test:
fp2.write("%s\n"%item)
fp2.close()
def get_words_from_ids_context(ids):
ids_list=ids.tolist()
with open(FLAGS.data_dir+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
invert_vocab= dict([v,k] for k,v in vocab.items())
r=[]
for i in ids_list:
c=''
for j in i:
c=c+' '+invert_vocab[j]
r.append(c.strip())
return r
def get_words_from_ids_context_kb(ids):
ids_list=ids.tolist()
with open(FLAGS.data_dir+'/p-dialog-dstc2-vocab.json','r') as fp:
vocab=json.load(fp)
invert_vocab= dict([v,k] for k,v in vocab.items())
ind=ids_list[0].index([0,0,0])
ids_list[0]=ids_list[0][0:ind]
r=[]
for i in ids_list[0]:
c=''
for j in i:
c=c+' '+invert_vocab[j]
r.append(c.strip())
return r
def showAttention(input_sentence, output_words, attentions,c):
fig = plt.figure(figsize=(30,10))
ax = fig.add_subplot(1,1,1)
cax = ax.matshow(attentions, cmap='Blues')
fig.colorbar(cax)
ax.set_xticklabels([''] + input_sentence +
['<EOS>'], rotation=90,fontsize=5)
ax.set_yticklabels([''] + output_words,fontsize=6.5)
ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
plt.savefig('attnplots'+c+'.pdf')
plt.show()
def give_wts(w,op_words,c_words,index):
ww=[]
for i in w:
ww.append(i[index:index+1,0:len(c_words)])
ww=np.asarray(ww)
ww=np.squeeze(ww,1)
ww=ww[0:len(op_words)]
return ww
def give_wts_theta(w,op_words,index):
ww1=[]
for i in w:
ww1.append(i[index:index+1,0:1])
ww1=np.asarray(ww1)
ww1=np.squeeze(ww1,1)
ww1=ww1[0:len(op_words)]
return ww1
def attention_wts(preds_test,data,all_wts,index):
ind=index%32
all_wts1=all_wts[int(index/32)]
pre=data[0][index:index+1]
op_words=preds_test[index].split(' ')
pre_words=get_words_from_ids_context(pre)[0].split(' ')
ww_pre=give_wts(all_wts1,op_words,pre_words,ind)
showAttention(pre_words,op_words,ww_pre,'pre')
def main():
if FLAGS.train:
train()
FLAGS.train=False
infer(FLAGS.infer_data)
else:
infer(FLAGS.infer_data)
if __name__=='__main__':
main()
| true | true |
1c329d8afed0fdcc1edf83c27cb2d8f18395ae5d | 5,367 | py | Python | violas_client/canoser/int_type.py | violas-core/violas-client | e8798f7d081ac218b78b81fd7eb2f8da92631a16 | [
"MIT"
] | null | null | null | violas_client/canoser/int_type.py | violas-core/violas-client | e8798f7d081ac218b78b81fd7eb2f8da92631a16 | [
"MIT"
] | null | null | null | violas_client/canoser/int_type.py | violas-core/violas-client | e8798f7d081ac218b78b81fd7eb2f8da92631a16 | [
"MIT"
] | 1 | 2022-01-05T06:49:42.000Z | 2022-01-05T06:49:42.000Z | import struct
from random import randint
from violas_client.canoser.base import Base
from struct import pack, unpack
from_bytes = int.from_bytes
class IntType(Base):
@classmethod
def to_json_serializable(cls, value):
return value
@classmethod
def encode(cls, value):
return pack(cls.pack_str, value)
@classmethod
def encode_slow(cls, value):
return value.to_bytes(cls.byte_lens, byteorder="little", signed=cls.signed)
@classmethod
def decode_bytes_slow(cls, bytes):
return unpack(cls.pack_str, bytes)[0]
@classmethod
def decode_bytes(cls, bytes):
return from_bytes(bytes, byteorder='little', signed=cls.signed)
@classmethod
def decode(cls, cursor):
bytes = cursor.read_bytes(cls.byte_lens)
return cls.decode_bytes(bytes)
@classmethod
def from_value(cls, value):
return value
@classmethod
def from_proto(cls, proto):
return proto
@classmethod
def int_unsafe(cls, s):
ret = int(s)
cls.check_value(ret)
return ret
@classmethod
def int_safe(cls, s):
"""
Only allow safe str and valid int to be coerced to destination IntType
"""
if isinstance(s, bool):
raise TypeError(f"{s} is not a integer")
if isinstance(s, int):
cls.check_value(s)
return s
if not isinstance(s, str):
raise TypeError(f"{s} is not instance of <str>.")
if len(s) < 1:
raise TypeError(f"'{s}' is empty.")
len_min = len(str(cls.min_value))
len_max = len(str(cls.max_value))
if len(s) > max(len_min, len_max):
raise TypeError(f"Length of {s} is larger than max:{max(len_min, len_max)}.")
ret = int(s)
cls.check_value(ret)
return ret
@classmethod
def check_value(cls, value):
if value is None:
return
if isinstance(value, bool):
raise TypeError(f"{value} is not a integer")
if not isinstance(value, int):
raise TypeError(f"{value} is not instance of <int>.")
min, max = cls.min_value, cls.max_value
if value < min or value > max:
raise TypeError('value {} not in range {}-{}'.format(value, min, max))
@classmethod
def checked_add(cls, v1, v2):
#rust style api
cls.check_value(v1)
cls.check_value(v2)
try:
ret = v1+v2
cls.check_value(ret)
return ret
except TypeError:
return None
@classmethod
def random(cls):
return randint(cls.min_value, cls.max_value)
class Int8(IntType):
pack_str = "<b"
byte_lens = 1
max_value = 127
min_value = -128
signed = True
class Int16(IntType):
pack_str = "<h"
byte_lens = 2
max_value = 32767
min_value = -32768
signed = True
class Int32(IntType):
pack_str = "<l"
byte_lens = 4
max_value = 2147483647
min_value = -2147483648
signed = True
class Int64(IntType):
pack_str = "<q"
byte_lens = 8
max_value = 9223372036854775807
min_value = -9223372036854775808
signed = True
class Uint8(IntType):
pack_str = "<B"
byte_lens = 1
max_value = 255
min_value = 0
signed = False
class Uint16(IntType):
pack_str = "<H"
byte_lens = 2
max_value = 65535
min_value = 0
signed = False
class Uint32(IntType):
pack_str = "<L"
byte_lens = 4
max_value = 4294967295
min_value = 0
signed = False
@classmethod
def serialize_uint32_as_uleb128(cls, value) -> bytes:
ret = bytearray()
while value >= 0x80:
# Write 7 (lowest) bits of data and set the 8th bit to 1.
byte = (value & 0x7f)
ret.append(byte | 0x80)
value >>= 7
# Write the remaining bits of data and set the highest bit to 0.
ret.append(value)
return bytes(ret)
@classmethod
def parse_uint32_from_uleb128(cls, cursor):
max_shift = 28
value = 0
shift = 0
while not cursor.is_finished():
byte = cursor.read_u8()
val = byte & 0x7f
value |= (val << shift)
if val == byte:
return value
shift += 7
if shift > max_shift:
break
raise ValueError(f"invalid ULEB128 representation for Uint32")
class Uint64(IntType):
pack_str = "<Q"
byte_lens = 8
max_value = 18446744073709551615
min_value = 0
signed = False
class Int128(IntType):
byte_lens = 16
max_value = 170141183460469231731687303715884105727
min_value = -170141183460469231731687303715884105728
signed = True
@classmethod
def encode(cls, value):
return value.to_bytes(16, byteorder="little", signed=True)
class Uint128(IntType):
byte_lens = 16
max_value = 340282366920938463463374607431768211455
min_value = 0
signed = False
@classmethod
def encode(cls, value):
return value.to_bytes(16, byteorder="little", signed=False)
| 25.679426 | 90 | 0.575182 | import struct
from random import randint
from violas_client.canoser.base import Base
from struct import pack, unpack
from_bytes = int.from_bytes
class IntType(Base):
@classmethod
def to_json_serializable(cls, value):
return value
@classmethod
def encode(cls, value):
return pack(cls.pack_str, value)
@classmethod
def encode_slow(cls, value):
return value.to_bytes(cls.byte_lens, byteorder="little", signed=cls.signed)
@classmethod
def decode_bytes_slow(cls, bytes):
return unpack(cls.pack_str, bytes)[0]
@classmethod
def decode_bytes(cls, bytes):
return from_bytes(bytes, byteorder='little', signed=cls.signed)
@classmethod
def decode(cls, cursor):
bytes = cursor.read_bytes(cls.byte_lens)
return cls.decode_bytes(bytes)
@classmethod
def from_value(cls, value):
return value
@classmethod
def from_proto(cls, proto):
return proto
@classmethod
def int_unsafe(cls, s):
ret = int(s)
cls.check_value(ret)
return ret
@classmethod
def int_safe(cls, s):
if isinstance(s, bool):
raise TypeError(f"{s} is not a integer")
if isinstance(s, int):
cls.check_value(s)
return s
if not isinstance(s, str):
raise TypeError(f"{s} is not instance of <str>.")
if len(s) < 1:
raise TypeError(f"'{s}' is empty.")
len_min = len(str(cls.min_value))
len_max = len(str(cls.max_value))
if len(s) > max(len_min, len_max):
raise TypeError(f"Length of {s} is larger than max:{max(len_min, len_max)}.")
ret = int(s)
cls.check_value(ret)
return ret
@classmethod
def check_value(cls, value):
if value is None:
return
if isinstance(value, bool):
raise TypeError(f"{value} is not a integer")
if not isinstance(value, int):
raise TypeError(f"{value} is not instance of <int>.")
min, max = cls.min_value, cls.max_value
if value < min or value > max:
raise TypeError('value {} not in range {}-{}'.format(value, min, max))
@classmethod
def checked_add(cls, v1, v2):
cls.check_value(v1)
cls.check_value(v2)
try:
ret = v1+v2
cls.check_value(ret)
return ret
except TypeError:
return None
@classmethod
def random(cls):
return randint(cls.min_value, cls.max_value)
class Int8(IntType):
pack_str = "<b"
byte_lens = 1
max_value = 127
min_value = -128
signed = True
class Int16(IntType):
pack_str = "<h"
byte_lens = 2
max_value = 32767
min_value = -32768
signed = True
class Int32(IntType):
pack_str = "<l"
byte_lens = 4
max_value = 2147483647
min_value = -2147483648
signed = True
class Int64(IntType):
pack_str = "<q"
byte_lens = 8
max_value = 9223372036854775807
min_value = -9223372036854775808
signed = True
class Uint8(IntType):
pack_str = "<B"
byte_lens = 1
max_value = 255
min_value = 0
signed = False
class Uint16(IntType):
pack_str = "<H"
byte_lens = 2
max_value = 65535
min_value = 0
signed = False
class Uint32(IntType):
pack_str = "<L"
byte_lens = 4
max_value = 4294967295
min_value = 0
signed = False
@classmethod
def serialize_uint32_as_uleb128(cls, value) -> bytes:
ret = bytearray()
while value >= 0x80:
byte = (value & 0x7f)
ret.append(byte | 0x80)
value >>= 7
ret.append(value)
return bytes(ret)
@classmethod
def parse_uint32_from_uleb128(cls, cursor):
max_shift = 28
value = 0
shift = 0
while not cursor.is_finished():
byte = cursor.read_u8()
val = byte & 0x7f
value |= (val << shift)
if val == byte:
return value
shift += 7
if shift > max_shift:
break
raise ValueError(f"invalid ULEB128 representation for Uint32")
class Uint64(IntType):
pack_str = "<Q"
byte_lens = 8
max_value = 18446744073709551615
min_value = 0
signed = False
class Int128(IntType):
byte_lens = 16
max_value = 170141183460469231731687303715884105727
min_value = -170141183460469231731687303715884105728
signed = True
@classmethod
def encode(cls, value):
return value.to_bytes(16, byteorder="little", signed=True)
class Uint128(IntType):
byte_lens = 16
max_value = 340282366920938463463374607431768211455
min_value = 0
signed = False
@classmethod
def encode(cls, value):
return value.to_bytes(16, byteorder="little", signed=False)
| true | true |
1c329db3107c5e67b900cf9359ac76186cdfbe1f | 3,680 | py | Python | awacs/glue.py | chizou/awacs | 335c545d13ea22488b318245af891eb427c139db | [
"BSD-2-Clause"
] | null | null | null | awacs/glue.py | chizou/awacs | 335c545d13ea22488b318245af891eb427c139db | [
"BSD-2-Clause"
] | null | null | null | awacs/glue.py | chizou/awacs | 335c545d13ea22488b318245af891eb427c139db | [
"BSD-2-Clause"
] | null | null | null | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action as BaseAction
from aws import BaseARN
service_name = 'AWS Glue'
prefix = 'glue'
class Action(BaseAction):
def __init__(self, action=None):
sup = super(Action, self)
sup.__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource='', region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
BatchCreatePartition = Action('BatchCreatePartition')
BatchDeleteConnection = Action('BatchDeleteConnection')
BatchDeletePartition = Action('BatchDeletePartition')
BatchDeleteTable = Action('BatchDeleteTable')
BatchGetPartition = Action('BatchGetPartition')
CreateClassifier = Action('CreateClassifier')
CreateConnection = Action('CreateConnection')
CreateCrawler = Action('CreateCrawler')
CreateDatabase = Action('CreateDatabase')
CreateDevEndpoint = Action('CreateDevEndpoint')
CreateJob = Action('CreateJob')
CreatePartition = Action('CreatePartition')
CreateScript = Action('CreateScript')
CreateTable = Action('CreateTable')
CreateTrigger = Action('CreateTrigger')
CreateUserDefinedFunction = Action('CreateUserDefinedFunction')
DeleteClassifier = Action('DeleteClassifier')
DeleteConnection = Action('DeleteConnection')
DeleteCrawler = Action('DeleteCrawler')
DeleteDatabase = Action('DeleteDatabase')
DeleteDevEndpoint = Action('DeleteDevEndpoint')
DeleteJob = Action('DeleteJob')
DeletePartition = Action('DeletePartition')
DeleteTable = Action('DeleteTable')
DeleteTrigger = Action('DeleteTrigger')
DeleteUserDefinedFunction = Action('DeleteUserDefinedFunction')
GetCatalogImportStatus = Action('GetCatalogImportStatus')
GetClassifier = Action('GetClassifier')
GetClassifiers = Action('GetClassifiers')
GetConnection = Action('GetConnection')
GetConnections = Action('GetConnections')
GetCrawler = Action('GetCrawler')
GetCrawlerMetrics = Action('GetCrawlerMetrics')
GetCrawlers = Action('GetCrawlers')
GetDatabase = Action('GetDatabase')
GetDatabases = Action('GetDatabases')
GetDataflowGraph = Action('GetDataflowGraph')
GetDevEndpoint = Action('GetDevEndpoint')
GetDevEndpoints = Action('GetDevEndpoints')
GetJob = Action('GetJob')
GetJobRun = Action('GetJobRun')
GetJobRuns = Action('GetJobRuns')
GetJobs = Action('GetJobs')
GetMapping = Action('GetMapping')
GetPartition = Action('GetPartition')
GetPartitions = Action('GetPartitions')
GetPlan = Action('GetPlan')
GetTable = Action('GetTable')
GetTableVersions = Action('GetTableVersions')
GetTables = Action('GetTables')
GetTrigger = Action('GetTrigger')
GetTriggers = Action('GetTriggers')
GetUserDefinedFunction = Action('GetUserDefinedFunction')
GetUserDefinedFunctions = Action('GetUserDefinedFunctions')
ImportCatalogToGlue = Action('ImportCatalogToGlue')
ResetJobBookmark = Action('ResetJobBookmark')
StartCrawler = Action('StartCrawler')
StartCrawlerSchedule = Action('StartCrawlerSchedule')
StartJobRun = Action('StartJobRun')
StartTrigger = Action('StartTrigger')
StopCrawler = Action('StopCrawler')
StopCrawlerSchedule = Action('StopCrawlerSchedule')
StopTrigger = Action('StopTrigger')
UpdateClassifier = Action('UpdateClassifier')
UpdateConnection = Action('UpdateConnection')
UpdateCrawler = Action('UpdateCrawler')
UpdateDatabase = Action('UpdateDatabase')
UpdateDevEndpoint = Action('UpdateDevEndpoint')
UpdateJob = Action('UpdateJob')
UpdatePartition = Action('UpdatePartition')
UpdateTable = Action('UpdateTable')
UpdateTrigger = Action('UpdateTrigger')
UpdateUserDefinedFunction = Action('UpdateUserDefinedFunction')
| 37.171717 | 70 | 0.787228 |
from aws import Action as BaseAction
from aws import BaseARN
service_name = 'AWS Glue'
prefix = 'glue'
class Action(BaseAction):
def __init__(self, action=None):
sup = super(Action, self)
sup.__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource='', region='', account=''):
sup = super(ARN, self)
sup.__init__(service=prefix, resource=resource, region=region,
account=account)
BatchCreatePartition = Action('BatchCreatePartition')
BatchDeleteConnection = Action('BatchDeleteConnection')
BatchDeletePartition = Action('BatchDeletePartition')
BatchDeleteTable = Action('BatchDeleteTable')
BatchGetPartition = Action('BatchGetPartition')
CreateClassifier = Action('CreateClassifier')
CreateConnection = Action('CreateConnection')
CreateCrawler = Action('CreateCrawler')
CreateDatabase = Action('CreateDatabase')
CreateDevEndpoint = Action('CreateDevEndpoint')
CreateJob = Action('CreateJob')
CreatePartition = Action('CreatePartition')
CreateScript = Action('CreateScript')
CreateTable = Action('CreateTable')
CreateTrigger = Action('CreateTrigger')
CreateUserDefinedFunction = Action('CreateUserDefinedFunction')
DeleteClassifier = Action('DeleteClassifier')
DeleteConnection = Action('DeleteConnection')
DeleteCrawler = Action('DeleteCrawler')
DeleteDatabase = Action('DeleteDatabase')
DeleteDevEndpoint = Action('DeleteDevEndpoint')
DeleteJob = Action('DeleteJob')
DeletePartition = Action('DeletePartition')
DeleteTable = Action('DeleteTable')
DeleteTrigger = Action('DeleteTrigger')
DeleteUserDefinedFunction = Action('DeleteUserDefinedFunction')
GetCatalogImportStatus = Action('GetCatalogImportStatus')
GetClassifier = Action('GetClassifier')
GetClassifiers = Action('GetClassifiers')
GetConnection = Action('GetConnection')
GetConnections = Action('GetConnections')
GetCrawler = Action('GetCrawler')
GetCrawlerMetrics = Action('GetCrawlerMetrics')
GetCrawlers = Action('GetCrawlers')
GetDatabase = Action('GetDatabase')
GetDatabases = Action('GetDatabases')
GetDataflowGraph = Action('GetDataflowGraph')
GetDevEndpoint = Action('GetDevEndpoint')
GetDevEndpoints = Action('GetDevEndpoints')
GetJob = Action('GetJob')
GetJobRun = Action('GetJobRun')
GetJobRuns = Action('GetJobRuns')
GetJobs = Action('GetJobs')
GetMapping = Action('GetMapping')
GetPartition = Action('GetPartition')
GetPartitions = Action('GetPartitions')
GetPlan = Action('GetPlan')
GetTable = Action('GetTable')
GetTableVersions = Action('GetTableVersions')
GetTables = Action('GetTables')
GetTrigger = Action('GetTrigger')
GetTriggers = Action('GetTriggers')
GetUserDefinedFunction = Action('GetUserDefinedFunction')
GetUserDefinedFunctions = Action('GetUserDefinedFunctions')
ImportCatalogToGlue = Action('ImportCatalogToGlue')
ResetJobBookmark = Action('ResetJobBookmark')
StartCrawler = Action('StartCrawler')
StartCrawlerSchedule = Action('StartCrawlerSchedule')
StartJobRun = Action('StartJobRun')
StartTrigger = Action('StartTrigger')
StopCrawler = Action('StopCrawler')
StopCrawlerSchedule = Action('StopCrawlerSchedule')
StopTrigger = Action('StopTrigger')
UpdateClassifier = Action('UpdateClassifier')
UpdateConnection = Action('UpdateConnection')
UpdateCrawler = Action('UpdateCrawler')
UpdateDatabase = Action('UpdateDatabase')
UpdateDevEndpoint = Action('UpdateDevEndpoint')
UpdateJob = Action('UpdateJob')
UpdatePartition = Action('UpdatePartition')
UpdateTable = Action('UpdateTable')
UpdateTrigger = Action('UpdateTrigger')
UpdateUserDefinedFunction = Action('UpdateUserDefinedFunction')
| true | true |
1c329e24df8c7d20f6476811b5648b3433fdb945 | 2,084 | py | Python | ietf/person/migrations/0012_auto_20160606_0823.py | ekr/ietfdb | 8d936836b0b9ff31cda415b0a423e3f5b33ab695 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 | 2021-11-20T03:40:40.000Z | 2021-11-20T03:40:42.000Z | ietf/person/migrations/0012_auto_20160606_0823.py | ekr/ietfdb | 8d936836b0b9ff31cda415b0a423e3f5b33ab695 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | ietf/person/migrations/0012_auto_20160606_0823.py | ekr/ietfdb | 8d936836b0b9ff31cda415b0a423e3f5b33ab695 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import ietf.utils.storage
class Migration(migrations.Migration):
dependencies = [
('person', '0011_populate_photos'),
]
operations = [
migrations.AlterField(
model_name='person',
name='biography',
field=models.TextField(help_text=b'Short biography for use on leadership pages. Use plain text or reStructuredText markup.', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='photo',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='photo_thumb',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='biography',
field=models.TextField(help_text=b'Short biography for use on leadership pages. Use plain text or reStructuredText markup.', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='photo',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='photo_thumb',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
]
| 40.076923 | 163 | 0.647793 |
from __future__ import unicode_literals
from django.db import models, migrations
import ietf.utils.storage
class Migration(migrations.Migration):
dependencies = [
('person', '0011_populate_photos'),
]
operations = [
migrations.AlterField(
model_name='person',
name='biography',
field=models.TextField(help_text=b'Short biography for use on leadership pages. Use plain text or reStructuredText markup.', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='photo',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='photo_thumb',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='biography',
field=models.TextField(help_text=b'Short biography for use on leadership pages. Use plain text or reStructuredText markup.', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='photo',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='photo_thumb',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photos', blank=True),
preserve_default=True,
),
]
| true | true |
1c329e4cf35061eb113f0ddacf52c789c39ee2c8 | 1,205 | py | Python | bigfastapi/schemas/customer_schemas.py | Smyja/bigfastapi | b05beede7bb8e853ce0494b495379037ccf27e8d | [
"MIT"
] | null | null | null | bigfastapi/schemas/customer_schemas.py | Smyja/bigfastapi | b05beede7bb8e853ce0494b495379037ccf27e8d | [
"MIT"
] | null | null | null | bigfastapi/schemas/customer_schemas.py | Smyja/bigfastapi | b05beede7bb8e853ce0494b495379037ccf27e8d | [
"MIT"
] | null | null | null | from datetime import datetime
from typing import Optional
from pydantic import BaseModel, EmailStr
class CustomerBase(BaseModel):
first_name: str
last_name: str
email: EmailStr
phone_number: str
address: str
gender: str
age: int
postal_code: str
language: str
country: str
city: str
region: str
class Config:
orm_mode = True
class CustomerCreate(CustomerBase):
organization_id: str
class Customer(CustomerBase):
organization_id: str
customer_id: str
date_created: datetime
last_updated: datetime
class CustomerUpdate(BaseModel):
first_name: Optional[str] = None
last_name: Optional[str] = None
email: Optional[EmailStr] = None
phone_number: Optional[str] = None
organization_id: Optional[str] = None
address: Optional[str] = None
gender: Optional[str] = None
age: Optional[int] = None
postal_code: Optional[str] = None
language: Optional[str] = None
country: Optional[str] = None
city: Optional[str] = None
region: Optional[str] = None
class CustomerCreateResponse(BaseModel):
message: str
customer: Customer
class ResponseModel(BaseModel):
message: str
| 23.173077 | 41 | 0.696266 | from datetime import datetime
from typing import Optional
from pydantic import BaseModel, EmailStr
class CustomerBase(BaseModel):
first_name: str
last_name: str
email: EmailStr
phone_number: str
address: str
gender: str
age: int
postal_code: str
language: str
country: str
city: str
region: str
class Config:
orm_mode = True
class CustomerCreate(CustomerBase):
organization_id: str
class Customer(CustomerBase):
organization_id: str
customer_id: str
date_created: datetime
last_updated: datetime
class CustomerUpdate(BaseModel):
first_name: Optional[str] = None
last_name: Optional[str] = None
email: Optional[EmailStr] = None
phone_number: Optional[str] = None
organization_id: Optional[str] = None
address: Optional[str] = None
gender: Optional[str] = None
age: Optional[int] = None
postal_code: Optional[str] = None
language: Optional[str] = None
country: Optional[str] = None
city: Optional[str] = None
region: Optional[str] = None
class CustomerCreateResponse(BaseModel):
message: str
customer: Customer
class ResponseModel(BaseModel):
message: str
| true | true |
1c329f4762f4785e737cff46b09c8e9f80c61634 | 6,162 | py | Python | FeaturesSP500.py | LaplaceKorea/Data | 8c16fc8c4cc29c1dc42d340ba1452c7fbc222bd3 | [
"MIT"
] | null | null | null | FeaturesSP500.py | LaplaceKorea/Data | 8c16fc8c4cc29c1dc42d340ba1452c7fbc222bd3 | [
"MIT"
] | null | null | null | FeaturesSP500.py | LaplaceKorea/Data | 8c16fc8c4cc29c1dc42d340ba1452c7fbc222bd3 | [
"MIT"
] | null | null | null |
from typing import Any, Dict, List, Tuple
from Config import cfg
from Features import *
import numpy as np
DELAYED_BID = 66
DELAYED_ASK = 67
DELAYED_LAST = 68
DELAYED_BID_SIZE = 69
DELAYED_ASK_SIZE = 70
DELAYED_LAST_SIZE = 71
DELAYED_HIGH = 72
DELAYED_LOW = 73
DELAYED_VOLUME = 74
DELAYED_CLOSE = 75
DELAYED_OPEN = 76
TIMESTAMP = 88
translate = {
DELAYED_BID: "DELAYED_BID",
DELAYED_ASK: "DELAYED_ASK",
DELAYED_LAST: "DELAYED_LAST",
DELAYED_BID_SIZE: "DELAYED_BID_SIZE",
DELAYED_ASK_SIZE: "DELAYED_ASK_SIZE",
DELAYED_LAST_SIZE: "DELAYED_LAST_SIZE",
DELAYED_HIGH: "DELAYED_HIGH",
DELAYED_LOW: "DELAYED_LOW",
DELAYED_VOLUME: "DELAYED_VOLUME",
DELAYED_CLOSE: "DELAYED_CLOSE",
DELAYED_OPEN: "DELAYED_OPEN",
TIMESTAMP: "TIMESTAMP"
}
taboo = {
"Symbol": True,
"American Airlines Group": True,
"0": True
}
stocks: Dict[str, int] = {
# ex: "AAPL": 0, "ADBE":1
}
stockNames: Dict[str, str] = {}
import csv
ccount = 0
with open("constituents.csv") as csvfile:
reader = csv.reader(csvfile)
for row in reader:
if row[0] in taboo:
pass
else:
print(row[0], " => ", row[1])
stocks[row[0]] = ccount
stockNames[row[0]] = row[1]
ccount = ccount+1
fields = {
DELAYED_OPEN:0,
DELAYED_HIGH:1,
DELAYED_LOW:2,
DELAYED_CLOSE:3
}
def loadSP500File(fn: str):
raw_data: Dict[str,List[np.ndarray]] = {}
# Z|#|field|size
# P|#|field|price
# S|#|type(88)|value(timestamp)
# delayed-1623246971
c = 1
with open(fn) as infile:
for line in infile:
c = c + len(line)
if line[0] == 'Z':
pass
if line[0] == 'P':
elts = line.split("|")
ticker = elts[1]
if ticker in stocks:
field = int(elts[2])
price = float(elts[3])
if field in fields:
print(ticker, field, price)
if ticker in raw_data:
pass
else:
raw_data[ticker] = []
rd = raw_data[ticker]
try:
rd[len(rd)-1][fields[field]+1] = price
except:
pass
if line[0] == 'S':
elts = line.split("|")
tickers = elts[1]
if ticker in stocks:
field = int(elts[2])
ts = int(elts[3])
if field == TIMESTAMP:
print(ticker, "time=", ts)
if ticker in raw_data:
pass
else:
raw_data[ticker] = []
rd = raw_data[ticker]
a = np.zeros((len(fields)+1,), dtype=np.float32)
a[0] = ts
rd.append(a)
# print(c)
# print(raw_data)
finallist: List[np.ndarray] = []
indices: Dict[str, int] = {}
for k in stocks:
indices[k] = 0
ndone = 0
farfuture = 1e37
while ndone < len(indices):
next = farfuture # whatever big
selected = ""
for k in indices:
i = indices[k]
try:
d = raw_data[k]
if i < len(d):
ts = d[i][0]
if ts < next:
next = ts
selected = k
except:
pass
nextLine = np.zeros((len(stocks) * (len(fields)+1),), dtype=np.float32)
# print(nextLine.shape)
if selected == "":
break
for k in indices:
i = indices[k]
try:
d = raw_data[k]
if i < len(d):
ts = d[i][0]
if abs(ts-next) < 1e-12:
idx = stocks[k]
nextLine[(idx *(len(fields)+1)):(idx*(len(fields)+1)+(len(fields)+1))] = d[i][:]
indices[k] = i+1
except:
pass
finallist.append(nextLine)
f = np.vstack(finallist)
print(f.shape)
return f
# description
# List[Tuple[str,Dict[str,Any]]]
# feature size ~
featureSize = len(stocks) * (1 + len(fields))
print("featureSize=", featureSize)
featureDesc: List[Tuple[str,Dict[str,Any]]] = []
rstocks: Dict[int, str] = {}
rfields: Dict[int, int] = {}
for k in stocks:
rstocks[stocks[k]] = k
for kk in fields:
rfields[fields[kk]] = kk
ccount = 0
for i in range(len(stocks)):
featureDesc.append((str(ccount), { "stock": rstocks[i], "field": "time"} ))
ccount = ccount+1
for j in range(len(fields)):
featureDesc.append((str(ccount), { "stock": rstocks[i], "field": translate[rfields[j]] }))
ccount = ccount+1
print(featureDesc[0:100])
sp500Prefix = cfg["features"]["sp500Prefix"]
# "/crypto/esoteric/tws/delayed-1623246971"
#import glob
#lst = [f for f in glob.glob("/crypto/esoteric/tws/delayed-*")]
#lst.sort()
#print(lst)
#
#for f in lst:
# try:
# v = loadSP500File(f)
# except:
# pass
# loadSP500File("/crypto/esoteric/tws/delayed-1623246971")
class FeaturesSP500(Features):
def getDescription(self)->List[Tuple[str,Dict[str,Any]]]:
return featureDesc
def getFeatureSize(self) -> int:
return featureSize
def getFeature(self, fromDT: datetime, toDT: datetime, timestep: timedelta) -> np.ndarray:
import glob
lst = [f for f in glob.glob(sp500Prefix+"*")]
lst.sort()
print(lst)
print(sp500Prefix)
rv: List[np.ndarray] = []
for f in lst:
try:
v = loadSP500File(f)
rv.append(v)
except:
pass
return np.vstack(rv)
def FeaturesSP500Builder(confg: Dict[str,Any])->Features:
return FeaturesSP500()
registerFeatureBuilder("SP500", FeaturesSP500Builder)
| 26.908297 | 104 | 0.499675 |
from typing import Any, Dict, List, Tuple
from Config import cfg
from Features import *
import numpy as np
DELAYED_BID = 66
DELAYED_ASK = 67
DELAYED_LAST = 68
DELAYED_BID_SIZE = 69
DELAYED_ASK_SIZE = 70
DELAYED_LAST_SIZE = 71
DELAYED_HIGH = 72
DELAYED_LOW = 73
DELAYED_VOLUME = 74
DELAYED_CLOSE = 75
DELAYED_OPEN = 76
TIMESTAMP = 88
translate = {
DELAYED_BID: "DELAYED_BID",
DELAYED_ASK: "DELAYED_ASK",
DELAYED_LAST: "DELAYED_LAST",
DELAYED_BID_SIZE: "DELAYED_BID_SIZE",
DELAYED_ASK_SIZE: "DELAYED_ASK_SIZE",
DELAYED_LAST_SIZE: "DELAYED_LAST_SIZE",
DELAYED_HIGH: "DELAYED_HIGH",
DELAYED_LOW: "DELAYED_LOW",
DELAYED_VOLUME: "DELAYED_VOLUME",
DELAYED_CLOSE: "DELAYED_CLOSE",
DELAYED_OPEN: "DELAYED_OPEN",
TIMESTAMP: "TIMESTAMP"
}
taboo = {
"Symbol": True,
"American Airlines Group": True,
"0": True
}
stocks: Dict[str, int] = {
}
stockNames: Dict[str, str] = {}
import csv
ccount = 0
with open("constituents.csv") as csvfile:
reader = csv.reader(csvfile)
for row in reader:
if row[0] in taboo:
pass
else:
print(row[0], " => ", row[1])
stocks[row[0]] = ccount
stockNames[row[0]] = row[1]
ccount = ccount+1
fields = {
DELAYED_OPEN:0,
DELAYED_HIGH:1,
DELAYED_LOW:2,
DELAYED_CLOSE:3
}
def loadSP500File(fn: str):
raw_data: Dict[str,List[np.ndarray]] = {}
ile:
for line in infile:
c = c + len(line)
if line[0] == 'Z':
pass
if line[0] == 'P':
elts = line.split("|")
ticker = elts[1]
if ticker in stocks:
field = int(elts[2])
price = float(elts[3])
if field in fields:
print(ticker, field, price)
if ticker in raw_data:
pass
else:
raw_data[ticker] = []
rd = raw_data[ticker]
try:
rd[len(rd)-1][fields[field]+1] = price
except:
pass
if line[0] == 'S':
elts = line.split("|")
tickers = elts[1]
if ticker in stocks:
field = int(elts[2])
ts = int(elts[3])
if field == TIMESTAMP:
print(ticker, "time=", ts)
if ticker in raw_data:
pass
else:
raw_data[ticker] = []
rd = raw_data[ticker]
a = np.zeros((len(fields)+1,), dtype=np.float32)
a[0] = ts
rd.append(a)
finallist: List[np.ndarray] = []
indices: Dict[str, int] = {}
for k in stocks:
indices[k] = 0
ndone = 0
farfuture = 1e37
while ndone < len(indices):
next = farfuture
selected = ""
for k in indices:
i = indices[k]
try:
d = raw_data[k]
if i < len(d):
ts = d[i][0]
if ts < next:
next = ts
selected = k
except:
pass
nextLine = np.zeros((len(stocks) * (len(fields)+1),), dtype=np.float32)
if selected == "":
break
for k in indices:
i = indices[k]
try:
d = raw_data[k]
if i < len(d):
ts = d[i][0]
if abs(ts-next) < 1e-12:
idx = stocks[k]
nextLine[(idx *(len(fields)+1)):(idx*(len(fields)+1)+(len(fields)+1))] = d[i][:]
indices[k] = i+1
except:
pass
finallist.append(nextLine)
f = np.vstack(finallist)
print(f.shape)
return f
featureSize = len(stocks) * (1 + len(fields))
print("featureSize=", featureSize)
featureDesc: List[Tuple[str,Dict[str,Any]]] = []
rstocks: Dict[int, str] = {}
rfields: Dict[int, int] = {}
for k in stocks:
rstocks[stocks[k]] = k
for kk in fields:
rfields[fields[kk]] = kk
ccount = 0
for i in range(len(stocks)):
featureDesc.append((str(ccount), { "stock": rstocks[i], "field": "time"} ))
ccount = ccount+1
for j in range(len(fields)):
featureDesc.append((str(ccount), { "stock": rstocks[i], "field": translate[rfields[j]] }))
ccount = ccount+1
print(featureDesc[0:100])
sp500Prefix = cfg["features"]["sp500Prefix"]
class FeaturesSP500(Features):
def getDescription(self)->List[Tuple[str,Dict[str,Any]]]:
return featureDesc
def getFeatureSize(self) -> int:
return featureSize
def getFeature(self, fromDT: datetime, toDT: datetime, timestep: timedelta) -> np.ndarray:
import glob
lst = [f for f in glob.glob(sp500Prefix+"*")]
lst.sort()
print(lst)
print(sp500Prefix)
rv: List[np.ndarray] = []
for f in lst:
try:
v = loadSP500File(f)
rv.append(v)
except:
pass
return np.vstack(rv)
def FeaturesSP500Builder(confg: Dict[str,Any])->Features:
return FeaturesSP500()
registerFeatureBuilder("SP500", FeaturesSP500Builder)
| true | true |
1c32a1c327da3427def2508fea5d64478b4d53ed | 9,153 | py | Python | utils/data/config.py | Chen-Yifan/weaver | 431a1427a185fa6357e40b729b8adcf263c349d2 | [
"MIT"
] | null | null | null | utils/data/config.py | Chen-Yifan/weaver | 431a1427a185fa6357e40b729b8adcf263c349d2 | [
"MIT"
] | null | null | null | utils/data/config.py | Chen-Yifan/weaver | 431a1427a185fa6357e40b729b8adcf263c349d2 | [
"MIT"
] | 2 | 2021-03-06T00:57:04.000Z | 2021-05-15T05:16:03.000Z | import numpy as np
import yaml
import copy
from ..logger import _logger
from .tools import _get_variable_names
def _as_list(x):
if x is None:
return None
elif isinstance(x, (list, tuple)):
return x
else:
return [x]
def _md5(fname):
'''https://stackoverflow.com/questions/3431825/generating-an-md5-checksum-of-a-file'''
import hashlib
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
class DataConfig(object):
r"""Data loading configuration.
"""
def __init__(self, print_info=True, **kwargs):
opts = {
'treename': None,
'selection': None,
'test_time_selection': None,
'preprocess': {'method': 'manual', 'data_fraction': 0.1, 'params': None},
'new_variables': {},
'inputs': {},
'labels': {},
'observers': [],
'weights': None,
}
for k, v in kwargs.items():
if v is not None:
if isinstance(opts[k], dict):
opts[k].update(v)
else:
opts[k] = v
# only information in ``self.options'' will be persisted when exporting to YAML
self.options = opts
if print_info:
_logger.debug(opts)
self.selection = opts['selection']
self.test_time_selection = opts['test_time_selection'] if opts['test_time_selection'] else self.selection
self.var_funcs = opts['new_variables']
# preprocessing config
self.preprocess = opts['preprocess']
self._auto_standardization = opts['preprocess']['method'].lower().startswith('auto')
self._missing_standardization_info = False
self.preprocess_params = opts['preprocess']['params'] if opts['preprocess']['params'] is not None else {}
# inputs
self.input_names = tuple(opts['inputs'].keys())
self.input_dicts = {k: [] for k in self.input_names}
self.input_shapes = {}
for k, o in opts['inputs'].items():
self.input_shapes[k] = (-1, len(o['vars']), o['length'])
for v in o['vars']:
v = _as_list(v)
self.input_dicts[k].append(v[0])
if opts['preprocess']['params'] is None:
def _get(idx, default):
try:
return v[idx]
except IndexError:
return default
params = {'length': o['length'], 'center': _get(1, 'auto' if self._auto_standardization else None),
'scale': _get(2, 1), 'min': _get(3, -5), 'max': _get(4, 5), 'pad_value': _get(5, 0)}
if v[0] in self.preprocess_params and params != self.preprocess_params[v[0]]:
raise RuntimeError('Incompatible info for variable %s, had: \n %s\nnow got:\n %s' % (v[0], str(self.preprocess_params[k]), str(params)))
if params['center'] == 'auto':
self._missing_standardization_info = True
self.preprocess_params[v[0]] = params
# labels
self.label_type = opts['labels']['type']
self.label_value = opts['labels']['value']
if self.label_type == 'simple':
assert(isinstance(self.label_value, list))
self.label_names = ('label',)
self.var_funcs['label'] = 'np.stack([%s], axis=1).argmax(1)' % (','.join(self.label_value))
else:
self.label_names = tuple(self.label_value.keys())
self.var_funcs.update(self.label_value)
# weights: TODO
self.weight_name = None
if opts['weights'] is not None:
self.weight_name = 'weight'
self.use_precomputed_weights = opts['weights']['use_precomputed_weights']
if self.use_precomputed_weights:
self.var_funcs[self.weight_name] = '*'.join(opts['weights']['weight_branches'])
else:
self.reweight_method = opts['weights']['reweight_method']
self.reweight_branches = tuple(opts['weights']['reweight_vars'].keys())
self.reweight_bins = tuple(opts['weights']['reweight_vars'].values())
self.reweight_classes = tuple(opts['weights']['reweight_classes'])
self.class_weights = opts['weights'].get('class_weights', None)
if self.class_weights is None:
self.class_weights = np.ones(len(self.reweight_classes))
self.reweight_threshold = opts['weights'].get('reweight_threshold', 10)
self.reweight_hists = opts['weights'].get('reweight_hists', None)
if self.reweight_hists is not None:
for k, v in self.reweight_hists.items():
self.reweight_hists[k] = np.array(v, dtype='float32')
# observers
self.observer_names = tuple(opts['observers'])
# remove self mapping from var_funcs
for k, v in self.var_funcs.items():
if k == v:
del self.var_funcs[k]
if print_info:
_logger.info('preprocess config: %s', str(self.preprocess))
_logger.info('selection: %s', str(self.selection))
_logger.info('test_time_selection: %s', str(self.test_time_selection))
_logger.info('var_funcs:\n - %s', '\n - '.join(str(it) for it in self.var_funcs.items()))
_logger.info('input_names: %s', str(self.input_names))
_logger.info('input_dicts:\n - %s', '\n - '.join(str(it) for it in self.input_dicts.items()))
_logger.info('input_shapes:\n - %s', '\n - '.join(str(it) for it in self.input_shapes.items()))
_logger.info('preprocess_params:\n - %s', '\n - '.join(str(it) for it in self.preprocess_params.items()))
_logger.info('label_names: %s', str(self.label_names))
_logger.info('observer_names: %s', str(self.observer_names))
# parse config
self.keep_branches = set()
aux_branches = set()
# selection
if self.selection:
aux_branches.update(_get_variable_names(self.selection))
# test time selection
if self.test_time_selection:
aux_branches.update(_get_variable_names(self.test_time_selection))
# var_funcs
self.keep_branches.update(self.var_funcs.keys())
for expr in self.var_funcs.values():
aux_branches.update(_get_variable_names(expr))
# inputs
for names in self.input_dicts.values():
self.keep_branches.update(names)
# labels
self.keep_branches.update(self.label_names)
# weight
if self.weight_name:
self.keep_branches.add(self.weight_name)
if not self.use_precomputed_weights:
aux_branches.update(self.reweight_branches)
aux_branches.update(self.reweight_classes)
# observers
self.keep_branches.update(self.observer_names)
# keep and drop
self.drop_branches = (aux_branches - self.keep_branches)
self.load_branches = (aux_branches | self.keep_branches) - set(self.var_funcs.keys()) - {self.weight_name, }
if print_info:
_logger.debug('drop_branches:\n %s', ','.join(self.drop_branches))
_logger.debug('load_branches:\n %s', ','.join(self.load_branches))
def __getattr__(self, name):
return self.options[name]
def dump(self, fp):
with open(fp, 'w') as f:
yaml.safe_dump(self.options, f, sort_keys=False)
@classmethod
def load(cls, fp, load_observers=True):
with open(fp) as f:
options = yaml.safe_load(f)
if not load_observers:
options['observers'] = None
return cls(**options)
def copy(self):
return self.__class__(print_info=False, **copy.deepcopy(self.options))
def __copy__(self):
return self.copy()
def __deepcopy__(self, memo):
return self.copy()
def export_json(self, fp):
import json
j = {'output_names':self.label_value, 'input_names':self.input_names}
for k, v in self.input_dicts.items():
j[k] = {'var_names':v, 'var_infos':{}}
for var_name in v:
j[k]['var_length'] = self.preprocess_params[var_name]['length']
info = self.preprocess_params[var_name]
j[k]['var_infos'][var_name] = {
'median': 0 if info['center'] is None else info['center'],
'norm_factor': info['scale'],
'replace_inf_value': 0,
'lower_bound': -1e32 if info['center'] is None else info['min'],
'upper_bound': 1e32 if info['center'] is None else info['max'],
'pad': info['pad_value']
}
with open(fp, 'w') as f:
json.dump(j, f, indent=2)
| 42.771028 | 162 | 0.567136 | import numpy as np
import yaml
import copy
from ..logger import _logger
from .tools import _get_variable_names
def _as_list(x):
if x is None:
return None
elif isinstance(x, (list, tuple)):
return x
else:
return [x]
def _md5(fname):
import hashlib
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
class DataConfig(object):
def __init__(self, print_info=True, **kwargs):
opts = {
'treename': None,
'selection': None,
'test_time_selection': None,
'preprocess': {'method': 'manual', 'data_fraction': 0.1, 'params': None},
'new_variables': {},
'inputs': {},
'labels': {},
'observers': [],
'weights': None,
}
for k, v in kwargs.items():
if v is not None:
if isinstance(opts[k], dict):
opts[k].update(v)
else:
opts[k] = v
self.options = opts
if print_info:
_logger.debug(opts)
self.selection = opts['selection']
self.test_time_selection = opts['test_time_selection'] if opts['test_time_selection'] else self.selection
self.var_funcs = opts['new_variables']
self.preprocess = opts['preprocess']
self._auto_standardization = opts['preprocess']['method'].lower().startswith('auto')
self._missing_standardization_info = False
self.preprocess_params = opts['preprocess']['params'] if opts['preprocess']['params'] is not None else {}
self.input_names = tuple(opts['inputs'].keys())
self.input_dicts = {k: [] for k in self.input_names}
self.input_shapes = {}
for k, o in opts['inputs'].items():
self.input_shapes[k] = (-1, len(o['vars']), o['length'])
for v in o['vars']:
v = _as_list(v)
self.input_dicts[k].append(v[0])
if opts['preprocess']['params'] is None:
def _get(idx, default):
try:
return v[idx]
except IndexError:
return default
params = {'length': o['length'], 'center': _get(1, 'auto' if self._auto_standardization else None),
'scale': _get(2, 1), 'min': _get(3, -5), 'max': _get(4, 5), 'pad_value': _get(5, 0)}
if v[0] in self.preprocess_params and params != self.preprocess_params[v[0]]:
raise RuntimeError('Incompatible info for variable %s, had: \n %s\nnow got:\n %s' % (v[0], str(self.preprocess_params[k]), str(params)))
if params['center'] == 'auto':
self._missing_standardization_info = True
self.preprocess_params[v[0]] = params
self.label_type = opts['labels']['type']
self.label_value = opts['labels']['value']
if self.label_type == 'simple':
assert(isinstance(self.label_value, list))
self.label_names = ('label',)
self.var_funcs['label'] = 'np.stack([%s], axis=1).argmax(1)' % (','.join(self.label_value))
else:
self.label_names = tuple(self.label_value.keys())
self.var_funcs.update(self.label_value)
self.weight_name = None
if opts['weights'] is not None:
self.weight_name = 'weight'
self.use_precomputed_weights = opts['weights']['use_precomputed_weights']
if self.use_precomputed_weights:
self.var_funcs[self.weight_name] = '*'.join(opts['weights']['weight_branches'])
else:
self.reweight_method = opts['weights']['reweight_method']
self.reweight_branches = tuple(opts['weights']['reweight_vars'].keys())
self.reweight_bins = tuple(opts['weights']['reweight_vars'].values())
self.reweight_classes = tuple(opts['weights']['reweight_classes'])
self.class_weights = opts['weights'].get('class_weights', None)
if self.class_weights is None:
self.class_weights = np.ones(len(self.reweight_classes))
self.reweight_threshold = opts['weights'].get('reweight_threshold', 10)
self.reweight_hists = opts['weights'].get('reweight_hists', None)
if self.reweight_hists is not None:
for k, v in self.reweight_hists.items():
self.reweight_hists[k] = np.array(v, dtype='float32')
self.observer_names = tuple(opts['observers'])
for k, v in self.var_funcs.items():
if k == v:
del self.var_funcs[k]
if print_info:
_logger.info('preprocess config: %s', str(self.preprocess))
_logger.info('selection: %s', str(self.selection))
_logger.info('test_time_selection: %s', str(self.test_time_selection))
_logger.info('var_funcs:\n - %s', '\n - '.join(str(it) for it in self.var_funcs.items()))
_logger.info('input_names: %s', str(self.input_names))
_logger.info('input_dicts:\n - %s', '\n - '.join(str(it) for it in self.input_dicts.items()))
_logger.info('input_shapes:\n - %s', '\n - '.join(str(it) for it in self.input_shapes.items()))
_logger.info('preprocess_params:\n - %s', '\n - '.join(str(it) for it in self.preprocess_params.items()))
_logger.info('label_names: %s', str(self.label_names))
_logger.info('observer_names: %s', str(self.observer_names))
self.keep_branches = set()
aux_branches = set()
if self.selection:
aux_branches.update(_get_variable_names(self.selection))
if self.test_time_selection:
aux_branches.update(_get_variable_names(self.test_time_selection))
self.keep_branches.update(self.var_funcs.keys())
for expr in self.var_funcs.values():
aux_branches.update(_get_variable_names(expr))
for names in self.input_dicts.values():
self.keep_branches.update(names)
self.keep_branches.update(self.label_names)
if self.weight_name:
self.keep_branches.add(self.weight_name)
if not self.use_precomputed_weights:
aux_branches.update(self.reweight_branches)
aux_branches.update(self.reweight_classes)
self.keep_branches.update(self.observer_names)
self.drop_branches = (aux_branches - self.keep_branches)
self.load_branches = (aux_branches | self.keep_branches) - set(self.var_funcs.keys()) - {self.weight_name, }
if print_info:
_logger.debug('drop_branches:\n %s', ','.join(self.drop_branches))
_logger.debug('load_branches:\n %s', ','.join(self.load_branches))
def __getattr__(self, name):
return self.options[name]
def dump(self, fp):
with open(fp, 'w') as f:
yaml.safe_dump(self.options, f, sort_keys=False)
@classmethod
def load(cls, fp, load_observers=True):
with open(fp) as f:
options = yaml.safe_load(f)
if not load_observers:
options['observers'] = None
return cls(**options)
def copy(self):
return self.__class__(print_info=False, **copy.deepcopy(self.options))
def __copy__(self):
return self.copy()
def __deepcopy__(self, memo):
return self.copy()
def export_json(self, fp):
import json
j = {'output_names':self.label_value, 'input_names':self.input_names}
for k, v in self.input_dicts.items():
j[k] = {'var_names':v, 'var_infos':{}}
for var_name in v:
j[k]['var_length'] = self.preprocess_params[var_name]['length']
info = self.preprocess_params[var_name]
j[k]['var_infos'][var_name] = {
'median': 0 if info['center'] is None else info['center'],
'norm_factor': info['scale'],
'replace_inf_value': 0,
'lower_bound': -1e32 if info['center'] is None else info['min'],
'upper_bound': 1e32 if info['center'] is None else info['max'],
'pad': info['pad_value']
}
with open(fp, 'w') as f:
json.dump(j, f, indent=2)
| true | true |
1c32a23cb2cdd2fe2ed6a06239d902692c93ac99 | 3,800 | py | Python | noxfile.py | oertr/hypermodern-python | 5c792d42577692d28df45a981948d774442f63d8 | [
"MIT"
] | null | null | null | noxfile.py | oertr/hypermodern-python | 5c792d42577692d28df45a981948d774442f63d8 | [
"MIT"
] | 1 | 2021-11-24T09:58:54.000Z | 2021-11-24T09:58:54.000Z | noxfile.py | oertr/hypermodern-python | 5c792d42577692d28df45a981948d774442f63d8 | [
"MIT"
] | null | null | null | """Nox sessions."""
import tempfile
from typing import Any
import nox
from nox.sessions import Session
nox.options.sessions = "lint", "mypy", "tests"
locations = "src", "tests", "noxfile.py", "docs/conf.py"
def install_with_constraints(session: Session, *args: str, **kwargs: Any) -> None:
"""Install packages constrained by Poetry's lock file."""
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
"--without-hashes",
f"--output={requirements.name}",
external=True,
)
session.install(f"--constraint={requirements.name}", *args, **kwargs)
@nox.session(python="3.9")
def tests(session: Session) -> None:
"""Run the test suite."""
args = session.posargs or ["--cov", "-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(
session, "coverage[toml]", "pytest", "pytest-cov", "pytest-mock"
)
session.run("pytest", *args)
@nox.session(python=["3.8", "3.7"])
def lint(session: Session) -> None:
"""Lint using flake8."""
args = session.posargs or locations
install_with_constraints(
session,
"flake8",
"flake8-annotations",
"flake8-bandit",
"flake8-black",
"flake8-bugbear",
"flake8-docstrings",
"flake8-import-order",
"darglint",
)
session.run("flake8", *args)
@nox.session(python="3.9")
def black(session: Session) -> None:
"""Run black code formatter."""
args = session.posargs or locations
install_with_constraints(session, "black")
session.run("black", *args)
@nox.session(python="3.9")
def safety(session: Session) -> None:
"""Scan dependencies for insecure packages."""
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
"--without-hashes",
f"--output={requirements.name}",
external=True,
)
install_with_constraints(session, "safety")
session.run("safety", "check", f"--file={requirements.name}", "--full-report")
@nox.session(python="3.9")
def mypy(session: Session) -> None:
"""Type-check using mypy."""
args = session.posargs or locations
install_with_constraints(session, "mypy")
session.run("mypy", *args)
package = "hypermodern_python"
@nox.session(python="3.9")
def typeguard(session: Session) -> None:
"""Runtime type checking using Typeguard."""
args = session.posargs or ["-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "pytest", "pytest-mock", "typeguard")
session.run("pytest", f"--typeguard-packages={package}", *args)
@nox.session(python="3.9")
def xdoctest(session: Session) -> None:
"""Run examples with xdoctest."""
args = session.posargs or ["all"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "xdoctest")
session.run("python", "-m", "xdoctest", package, *args)
@nox.session(python="3.9")
def docs(session: Session) -> None:
"""Build the documentation."""
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "sphinx", "sphinx-autodoc-typehints")
session.run("sphinx-build", "docs", "docs/_build")
@nox.session(python="3.9")
def coverage(session: Session) -> None:
"""Upload coverage data."""
install_with_constraints(session, "coverage[toml]", "codecov")
session.run("coverage", "xml", "--fail-under=0")
session.run("codecov", *session.posargs)
| 30.645161 | 86 | 0.619474 | import tempfile
from typing import Any
import nox
from nox.sessions import Session
nox.options.sessions = "lint", "mypy", "tests"
locations = "src", "tests", "noxfile.py", "docs/conf.py"
def install_with_constraints(session: Session, *args: str, **kwargs: Any) -> None:
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
"--without-hashes",
f"--output={requirements.name}",
external=True,
)
session.install(f"--constraint={requirements.name}", *args, **kwargs)
@nox.session(python="3.9")
def tests(session: Session) -> None:
args = session.posargs or ["--cov", "-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(
session, "coverage[toml]", "pytest", "pytest-cov", "pytest-mock"
)
session.run("pytest", *args)
@nox.session(python=["3.8", "3.7"])
def lint(session: Session) -> None:
args = session.posargs or locations
install_with_constraints(
session,
"flake8",
"flake8-annotations",
"flake8-bandit",
"flake8-black",
"flake8-bugbear",
"flake8-docstrings",
"flake8-import-order",
"darglint",
)
session.run("flake8", *args)
@nox.session(python="3.9")
def black(session: Session) -> None:
args = session.posargs or locations
install_with_constraints(session, "black")
session.run("black", *args)
@nox.session(python="3.9")
def safety(session: Session) -> None:
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
"--without-hashes",
f"--output={requirements.name}",
external=True,
)
install_with_constraints(session, "safety")
session.run("safety", "check", f"--file={requirements.name}", "--full-report")
@nox.session(python="3.9")
def mypy(session: Session) -> None:
args = session.posargs or locations
install_with_constraints(session, "mypy")
session.run("mypy", *args)
package = "hypermodern_python"
@nox.session(python="3.9")
def typeguard(session: Session) -> None:
args = session.posargs or ["-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "pytest", "pytest-mock", "typeguard")
session.run("pytest", f"--typeguard-packages={package}", *args)
@nox.session(python="3.9")
def xdoctest(session: Session) -> None:
args = session.posargs or ["all"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "xdoctest")
session.run("python", "-m", "xdoctest", package, *args)
@nox.session(python="3.9")
def docs(session: Session) -> None:
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "sphinx", "sphinx-autodoc-typehints")
session.run("sphinx-build", "docs", "docs/_build")
@nox.session(python="3.9")
def coverage(session: Session) -> None:
install_with_constraints(session, "coverage[toml]", "codecov")
session.run("coverage", "xml", "--fail-under=0")
session.run("codecov", *session.posargs)
| true | true |
1c32a283e9321b6fee30f4acfd6dc10a11a36b5c | 155 | py | Python | polls_cms_integration/forms.py | sandilukez/news-project | fcd91edd6b7d6941b77a6390aa86efe3e2b0c3a1 | [
"MIT"
] | null | null | null | polls_cms_integration/forms.py | sandilukez/news-project | fcd91edd6b7d6941b77a6390aa86efe3e2b0c3a1 | [
"MIT"
] | null | null | null | polls_cms_integration/forms.py | sandilukez/news-project | fcd91edd6b7d6941b77a6390aa86efe3e2b0c3a1 | [
"MIT"
] | null | null | null | from django import forms
from polls.models import Poll
class PollWizardForm(forms.ModelForm):
class Meta:
model = Poll
exclude = []
| 15.5 | 38 | 0.670968 | from django import forms
from polls.models import Poll
class PollWizardForm(forms.ModelForm):
class Meta:
model = Poll
exclude = []
| true | true |
1c32a3d1b74ff3ab9282c97dbdf86760f4c701ba | 2,993 | py | Python | setup.py | bchretien/bloom | 51b12507745521989944730f1c6b8539e26a7406 | [
"BSD-3-Clause"
] | 1 | 2016-03-09T00:27:38.000Z | 2016-03-09T00:27:38.000Z | setup.py | bchretien/bloom | 51b12507745521989944730f1c6b8539e26a7406 | [
"BSD-3-Clause"
] | null | null | null | setup.py | bchretien/bloom | 51b12507745521989944730f1c6b8539e26a7406 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
import sys
from setuptools import setup, find_packages
install_requires = [
'catkin-pkg >= 0.1.14',
'distribute',
'empy',
'python-dateutil',
'PyYAML',
'rosdep >= 0.10.25',
'rosdistro >= 0.3.0',
'vcstools >= 0.1.22',
]
# argparse got moved into the stdlib in py2.7, so we only
# need to install the pypi version if we're on an older
# python.
if sys.version_info[0] == 2 and sys.version_info[1] <= 6:
install_requires.append('argparse')
setup(
name='bloom',
version='0.4.9',
packages=find_packages(exclude=['test']),
package_data={
'bloom.generators.debian': [
'bloom/generators/debian/templates/*',
'bloom/generators/debian/templates/source/*'
]
},
include_package_data=True,
install_requires=install_requires,
author='Tully Foote, William Woodall',
author_email='tfoote@willowgarage.com, william@osrfoundation.org',
maintainer='William Woodall',
maintainer_email='william@osrfoundation.org',
url='http://www.ros.org/wiki/bloom',
download_url='http://pr.willowgarage.com/downloads/bloom/',
keywords=['ROS'],
classifiers=['Programming Language :: Python',
'License :: OSI Approved :: BSD License'],
description="Bloom is a release automation tool.",
long_description="""\
Bloom provides tools for releasing software on top of a git repository \
and leverages tools and patterns from git-buildpackage. Additionally, \
bloom leverages meta and build information from catkin \
(https://github.com/ros/catkin) to automate release branching and the \
generation of platform specific source packages, like debian's src-debs.""",
license='BSD',
test_suite='test',
entry_points={
'console_scripts': [
'git-bloom-config = bloom.commands.git.config:main',
'git-bloom-import-upstream = bloom.commands.git.import_upstream:main',
'git-bloom-branch = bloom.commands.git.branch:main',
'git-bloom-patch = bloom.commands.git.patch.patch_main:main',
'git-bloom-generate = bloom.commands.git.generate:main',
'git-bloom-release = bloom.commands.git.release:main',
'bloom-export-upstream = bloom.commands.export_upstream:main',
'bloom-update = bloom.commands.update:main',
'bloom-release = bloom.commands.release:main',
'bloom-generate = bloom.commands.generate:main'
],
'bloom.generators': [
'release = bloom.generators.release:ReleaseGenerator',
'rosrelease = bloom.generators.rosrelease:RosReleaseGenerator',
'debian = bloom.generators.debian:DebianGenerator',
'rosdebian = bloom.generators.rosdebian:RosDebianGenerator'
],
'bloom.generate_cmds': [
'debian = bloom.generators.debian.generate_cmd:description',
'rosdebian = bloom.generators.rosdebian:description'
]
}
)
| 38.371795 | 82 | 0.655864 |
import sys
from setuptools import setup, find_packages
install_requires = [
'catkin-pkg >= 0.1.14',
'distribute',
'empy',
'python-dateutil',
'PyYAML',
'rosdep >= 0.10.25',
'rosdistro >= 0.3.0',
'vcstools >= 0.1.22',
]
# python.
if sys.version_info[0] == 2 and sys.version_info[1] <= 6:
install_requires.append('argparse')
setup(
name='bloom',
version='0.4.9',
packages=find_packages(exclude=['test']),
package_data={
'bloom.generators.debian': [
'bloom/generators/debian/templates/*',
'bloom/generators/debian/templates/source/*'
]
},
include_package_data=True,
install_requires=install_requires,
author='Tully Foote, William Woodall',
author_email='tfoote@willowgarage.com, william@osrfoundation.org',
maintainer='William Woodall',
maintainer_email='william@osrfoundation.org',
url='http://www.ros.org/wiki/bloom',
download_url='http://pr.willowgarage.com/downloads/bloom/',
keywords=['ROS'],
classifiers=['Programming Language :: Python',
'License :: OSI Approved :: BSD License'],
description="Bloom is a release automation tool.",
long_description="""\
Bloom provides tools for releasing software on top of a git repository \
and leverages tools and patterns from git-buildpackage. Additionally, \
bloom leverages meta and build information from catkin \
(https://github.com/ros/catkin) to automate release branching and the \
generation of platform specific source packages, like debian's src-debs.""",
license='BSD',
test_suite='test',
entry_points={
'console_scripts': [
'git-bloom-config = bloom.commands.git.config:main',
'git-bloom-import-upstream = bloom.commands.git.import_upstream:main',
'git-bloom-branch = bloom.commands.git.branch:main',
'git-bloom-patch = bloom.commands.git.patch.patch_main:main',
'git-bloom-generate = bloom.commands.git.generate:main',
'git-bloom-release = bloom.commands.git.release:main',
'bloom-export-upstream = bloom.commands.export_upstream:main',
'bloom-update = bloom.commands.update:main',
'bloom-release = bloom.commands.release:main',
'bloom-generate = bloom.commands.generate:main'
],
'bloom.generators': [
'release = bloom.generators.release:ReleaseGenerator',
'rosrelease = bloom.generators.rosrelease:RosReleaseGenerator',
'debian = bloom.generators.debian:DebianGenerator',
'rosdebian = bloom.generators.rosdebian:RosDebianGenerator'
],
'bloom.generate_cmds': [
'debian = bloom.generators.debian.generate_cmd:description',
'rosdebian = bloom.generators.rosdebian:description'
]
}
)
| true | true |
1c32a4844f9e79a9d27d43aaaf31661fe5746e0a | 19,655 | py | Python | discord/scheduled_event.py | bokja/discord.py-master | 7b4bd2eafebeb1b3c186f95cd71b1baaad5b43dd | [
"MIT"
] | null | null | null | discord/scheduled_event.py | bokja/discord.py-master | 7b4bd2eafebeb1b3c186f95cd71b1baaad5b43dd | [
"MIT"
] | null | null | null | discord/scheduled_event.py | bokja/discord.py-master | 7b4bd2eafebeb1b3c186f95cd71b1baaad5b43dd | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, AsyncIterator, Dict, Optional, Union
from .asset import Asset
from .enums import EventStatus, EntityType, PrivacyLevel, try_enum
from .mixins import Hashable
from .object import Object, OLDEST_OBJECT
from .utils import parse_time, _get_as_snowflake, _bytes_to_base64_data, MISSING
if TYPE_CHECKING:
from .types.scheduled_event import (
GuildScheduledEvent as GuildScheduledEventPayload,
GuildScheduledEventWithUserCount as GuildScheduledEventWithUserCountPayload,
EntityMetadata,
)
from .abc import Snowflake
from .guild import Guild
from .channel import VoiceChannel, StageChannel
from .state import ConnectionState
from .user import User
GuildScheduledEventPayload = Union[GuildScheduledEventPayload, GuildScheduledEventWithUserCountPayload]
# fmt: off
__all__ = (
"ScheduledEvent",
)
# fmt: on
class ScheduledEvent(Hashable):
"""Represents a scheduled event in a guild.
.. versionadded:: 2.0
.. container:: operations
.. describe:: x == y
Checks if two scheduled events are equal.
.. describe:: x != y
Checks if two scheduled events are not equal.
.. describe:: hash(x)
Returns the scheduled event's hash.
Attributes
----------
id: :class:`int`
The scheduled event's ID.
name: :class:`str`
The name of the scheduled event.
description: Optional[:class:`str`]
The description of the scheduled event.
entity_type: :class:`EntityType`
The type of entity this event is for.
entity_id: Optional[:class:`int`]
The ID of the entity this event is for if available.
start_time: :class:`datetime.datetime`
The time that the scheduled event will start in UTC.
end_time: :class:`datetime.datetime`
The time that the scheduled event will end in UTC.
privacy_level: :class:`PrivacyLevel`
The privacy level of the scheduled event.
status: :class:`EventStatus`
The status of the scheduled event.
user_count: :class:`int`
The number of users subscribed to the scheduled event.
creator: Optional[:class:`User`]
The user that created the scheduled event.
location: Optional[:class:`str`]
The location of the scheduled event.
"""
__slots__ = (
'_state',
'_users',
'id',
'guild_id',
'name',
'description',
'entity_type',
'entity_id',
'start_time',
'end_time',
'privacy_level',
'status',
'_cover_image',
'user_count',
'creator',
'channel_id',
'location',
)
def __init__(self, *, state: ConnectionState, data: GuildScheduledEventPayload) -> None:
self._state = state
self._users: Dict[int, User] = {}
self._update(data)
def _update(self, data: GuildScheduledEventPayload) -> None:
self.id: int = int(data['id'])
self.guild_id: int = int(data['guild_id'])
self.name: str = data['name']
self.description: Optional[str] = data.get('description')
self.entity_type = try_enum(EntityType, data['entity_type'])
self.entity_id: Optional[int] = _get_as_snowflake(data, 'entity_id')
self.start_time: datetime = parse_time(data['scheduled_start_time'])
self.privacy_level: PrivacyLevel = try_enum(PrivacyLevel, data['status'])
self.status: EventStatus = try_enum(EventStatus, data['status'])
self._cover_image: Optional[str] = data.get('image', None)
self.user_count: int = data.get('user_count', 0)
creator = data.get('creator')
self.creator: Optional[User] = self._state.store_user(creator) if creator else None
self.end_time: Optional[datetime] = parse_time(data.get('scheduled_end_time'))
self.channel_id: Optional[int] = _get_as_snowflake(data, 'channel_id')
metadata = data.get('entity_metadata')
self._unroll_metadata(metadata)
def _unroll_metadata(self, data: Optional[EntityMetadata]):
self.location: Optional[str] = data.get('location') if data else None
@classmethod
def from_creation(cls, *, state: ConnectionState, data: GuildScheduledEventPayload):
creator_id = data.get('creator_id')
self = cls(state=state, data=data)
if creator_id:
self.creator = self._state.get_user(int(creator_id))
def __repr__(self) -> str:
return f'<GuildScheduledEvent id={self.id} name={self.name!r} guild_id={self.guild_id!r} creator={self.creator!r}>'
@property
def cover_image(self) -> Optional[Asset]:
"""Optional[:class:`Asset`]: The scheduled event's cover image."""
if self._cover_image is None:
return None
return Asset._from_scheduled_event_cover_image(self._state, self.id, self._cover_image)
@property
def guild(self) -> Optional[Guild]:
"""Optional[:class:`Guild`]: The guild this scheduled event is in."""
return self._state._get_guild(self.guild_id)
@property
def channel(self) -> Optional[Union[VoiceChannel, StageChannel]]:
"""Optional[Union[:class:`VoiceChannel`, :class:`StageChannel`]]: The channel this scheduled event is in."""
return self.guild.get_channel(self.channel_id) # type: ignore
@property
def url(self):
""":class:`str`: The url for the scheduled event."""
return f'https://discord.com/events/{self.guild_id}/{self.id}'
async def start(self, *, reason: Optional[str] = None) -> ScheduledEvent:
"""|coro|
Starts the scheduled event.
Shorthand for:
.. code-block:: python3
await event.edit(status=EventStatus.active)
Parameters
-----------
reason: Optional[:class:`str`]
The reason for starting the scheduled event.
Raises
------
ValueError
The scheduled event has already started or has ended.
Forbidden
You do not have the proper permissions to start the scheduled event.
HTTPException
The scheduled event could not be started.
Returns
-------
:class:`ScheduledEvent`
The scheduled event that was started.
"""
if self.status is not EventStatus.scheduled:
raise ValueError('This scheduled event is already running.')
return await self.edit(status=EventStatus.active, reason=reason)
async def end(self, *, reason: Optional[str] = None) -> ScheduledEvent:
"""|coro|
Ends the scheduled event.
Shorthand for:
.. code-block:: python3
await event.edit(status=EventStatus.completed)
Parameters
-----------
reason: Optional[:class:`str`]
The reason for ending the scheduled event.
Raises
------
ValueError
The scheduled event is not active or has already ended.
Forbidden
You do not have the proper permissions to end the scheduled event.
HTTPException
The scheduled event could not be ended.
Returns
-------
:class:`ScheduledEvent`
The scheduled event that was ended.
"""
if self.status is not EventStatus.active:
raise ValueError('This scheduled event is not active.')
return await self.edit(status=EventStatus.ended, reason=reason)
async def cancel(self, *, reason: Optional[str] = None) -> ScheduledEvent:
"""|coro|
Cancels the scheduled event.
Shorthand for:
.. code-block:: python3
await event.edit(status=EventStatus.cancelled)
Parameters
-----------
reason: Optional[:class:`str`]
The reason for cancelling the scheduled event.
Raises
------
ValueError
The scheduled event is already running.
Forbidden
You do not have the proper permissions to cancel the scheduled event.
HTTPException
The scheduled event could not be cancelled.
Returns
-------
:class:`ScheduledEvent`
The scheduled event that was cancelled.
"""
if self.status is not EventStatus.scheduled:
raise ValueError('This scheduled event is already running.')
return await self.edit(status=EventStatus.cancelled, reason=reason)
async def edit(
self,
*,
name: str = MISSING,
description: str = MISSING,
channel: Optional[Snowflake] = MISSING,
start_time: datetime = MISSING,
end_time: Optional[datetime] = MISSING,
privacy_level: PrivacyLevel = MISSING,
entity_type: EntityType = MISSING,
status: EventStatus = MISSING,
image: bytes = MISSING,
location: str = MISSING,
reason: Optional[str] = None,
) -> ScheduledEvent:
r"""|coro|
Edits the scheduled event.
Requires :attr:`~Permissions.manage_events` permissions.
Parameters
-----------
name: :class:`str`
The name of the scheduled event.
description: :class:`str`
The description of the scheduled event.
channel: Optional[:class:`~discord.abc.Snowflake`]
The channel to put the scheduled event in.
Required if the entity type is either :attr:`EntityType.voice` or
:attr:`EntityType.stage_instance`.
start_time: :class:`datetime.datetime`
The time that the scheduled event will start. This must be a timezone-aware
datetime object. Consider using :func:`utils.utcnow`.
end_time: Optional[:class:`datetime.datetime`]
The time that the scheduled event will end. This must be a timezone-aware
datetime object. Consider using :func:`utils.utcnow`.
If the entity type is either :attr:`EntityType.voice` or
:attr:`EntityType.stage_instance`, the end_time can be cleared by
passing ``None``.
Required if the entity type is :attr:`EntityType.external`.
privacy_level: :class:`PrivacyLevel`
The privacy level of the scheduled event.
entity_type: :class:`EntityType`
The new entity type.
status: :class:`EventStatus`
The new status of the scheduled event.
image: Optional[:class:`bytes`]
The new image of the scheduled event or ``None`` to remove the image.
location: :class:`str`
The new location of the scheduled event.
Required if the entity type is :attr:`EntityType.external`.
reason: Optional[:class:`str`]
The reason for editing the scheduled event. Shows up on the audit log.
Raises
-------
TypeError
`image` was not a :term:`py:bytes-like object`, or ``privacy_level``
was not a :class:`PrivacyLevel`, or ``entity_type`` was not an
:class:`EntityType`, ``status`` was not an :class:`EventStatus`, or
an argument was provided that was incompatible with the scheduled event's
entity type.
ValueError
``start_time`` or ``end_time`` was not a timezone-aware datetime object.
Forbidden
You do not have permissions to edit the scheduled event.
HTTPException
Editing the scheduled event failed.
Returns
--------
:class:`ScheduledEvent`
The edited scheduled event.
"""
payload = {}
metadata = {}
if name is not MISSING:
payload['name'] = name
if start_time is not MISSING:
if start_time.tzinfo is None:
raise ValueError(
'start_time must be an aware datetime. Consider using discord.utils.utcnow() or datetime.datetime.now().astimezone() for local time.'
)
payload['scheduled_start_time'] = start_time.isoformat()
if description is not MISSING:
payload['description'] = description
if privacy_level is not MISSING:
if not isinstance(privacy_level, PrivacyLevel):
raise TypeError('privacy_level must be of type PrivacyLevel.')
payload['privacy_level'] = privacy_level.value
if status is not MISSING:
if not isinstance(status, EventStatus):
raise TypeError('status must be of type EventStatus')
payload['status'] = status.value
if image is not MISSING:
image_as_str: Optional[str] = _bytes_to_base64_data(image) if image is not None else image
payload['image'] = image_as_str
if entity_type is not MISSING:
if not isinstance(entity_type, EntityType):
raise TypeError('entity_type must be of type EntityType')
payload['entity_type'] = entity_type.value
_entity_type = entity_type or self.entity_type
if _entity_type in (EntityType.stage_instance, EntityType.voice):
if channel is MISSING or channel is None:
raise TypeError('channel must be set when entity_type is voice or stage_instance')
payload['channel_id'] = channel.id
if location not in (MISSING, None):
raise TypeError('location cannot be set when entity_type is voice or stage_instance')
payload['entity_metadata'] = None
else:
if channel not in (MISSING, None):
raise TypeError('channel cannot be set when entity_type is external')
payload['channel_id'] = None
if location is MISSING or location is None:
raise TypeError('location must be set when entity_type is external')
metadata['location'] = location
if end_time is MISSING or end_time is None:
raise TypeError('end_time must be set when entity_type is external')
if end_time is not MISSING:
if end_time is not None:
if end_time.tzinfo is None:
raise ValueError(
'end_time must be an aware datetime. Consider using discord.utils.utcnow() or datetime.datetime.now().astimezone() for local time.'
)
payload['scheduled_end_time'] = end_time.isoformat()
else:
payload['scheduled_end_time'] = end_time
if metadata:
payload['entity_metadata'] = metadata
data = await self._state.http.edit_scheduled_event(self.guild_id, self.id, **payload, reason=reason)
s = ScheduledEvent(state=self._state, data=data)
s._users = self._users
return s
async def delete(self, *, reason: Optional[str] = None) -> None:
"""|coro|
Deletes the scheduled event.
Requires :attr:`~Permissions.manage_events` permissions.
Parameters
-----------
reason: Optional[:class:`str`]
The reason for deleting the scheduled event. Shows up on the audit log.
Raises
------
Forbidden
You do not have permissions to delete the scheduled event.
HTTPException
Deleting the scheduled event failed.
"""
await self._state.http.delete_scheduled_event(self.guild_id, self.id, reason=reason)
async def users(
self,
*,
limit: Optional[int] = None,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
oldest_first: bool = MISSING,
) -> AsyncIterator[User]:
"""|coro|
Retrieves all :class:`User` that are subscribed to this event.
This requires :attr:`Intents.members` to get information about members
other than yourself.
Raises
-------
HTTPException
Retrieving the members failed.
Returns
--------
List[:class:`User`]
All subscribed users of this event.
"""
async def _before_strategy(retrieve, before, limit):
before_id = before.id if before else None
users = await self._state.http.get_scheduled_event_users(
self.guild_id, self.id, limit=retrieve, with_member=False, before=before_id
)
if users:
if limit is not None:
limit -= len(users)
before = Object(id=users[-1]['user']['id'])
return users, before, limit
async def _after_strategy(retrieve, after, limit):
after_id = after.id if after else None
users = await self._state.http.get_scheduled_event_users(
self.guild_id, self.id, limit=retrieve, with_member=False, after=after_id
)
if users:
if limit is not None:
limit -= len(users)
after = Object(id=users[0]['user']['id'])
return users, after, limit
if limit is None:
limit = self.user_count or None
if oldest_first is MISSING:
reverse = after is not None
else:
reverse = oldest_first
predicate = None
if reverse:
strategy, state = _after_strategy, after
if before:
predicate = lambda u: u['user']['id'] < before.id
else:
strategy, state = _before_strategy, before
if after and after != OLDEST_OBJECT:
predicate = lambda u: u['user']['id'] > after.id
while True:
retrieve = min(100 if limit is None else limit, 100)
if retrieve < 1:
return
data, state, limit = await strategy(retrieve, state, limit)
if len(data) < 100:
limit = 0
if reverse:
data = reversed(data)
if predicate:
data = filter(predicate, data)
users = (self._state.store_user(raw_user['user']) for raw_user in data)
for user in users:
yield user
def _add_user(self, user: User) -> None:
self._users[user.id] = user
def _pop_user(self, user_id: int) -> None:
self._users.pop(user_id)
| 34.482456 | 155 | 0.614551 |
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, AsyncIterator, Dict, Optional, Union
from .asset import Asset
from .enums import EventStatus, EntityType, PrivacyLevel, try_enum
from .mixins import Hashable
from .object import Object, OLDEST_OBJECT
from .utils import parse_time, _get_as_snowflake, _bytes_to_base64_data, MISSING
if TYPE_CHECKING:
from .types.scheduled_event import (
GuildScheduledEvent as GuildScheduledEventPayload,
GuildScheduledEventWithUserCount as GuildScheduledEventWithUserCountPayload,
EntityMetadata,
)
from .abc import Snowflake
from .guild import Guild
from .channel import VoiceChannel, StageChannel
from .state import ConnectionState
from .user import User
GuildScheduledEventPayload = Union[GuildScheduledEventPayload, GuildScheduledEventWithUserCountPayload]
__all__ = (
"ScheduledEvent",
)
class ScheduledEvent(Hashable):
__slots__ = (
'_state',
'_users',
'id',
'guild_id',
'name',
'description',
'entity_type',
'entity_id',
'start_time',
'end_time',
'privacy_level',
'status',
'_cover_image',
'user_count',
'creator',
'channel_id',
'location',
)
def __init__(self, *, state: ConnectionState, data: GuildScheduledEventPayload) -> None:
self._state = state
self._users: Dict[int, User] = {}
self._update(data)
def _update(self, data: GuildScheduledEventPayload) -> None:
self.id: int = int(data['id'])
self.guild_id: int = int(data['guild_id'])
self.name: str = data['name']
self.description: Optional[str] = data.get('description')
self.entity_type = try_enum(EntityType, data['entity_type'])
self.entity_id: Optional[int] = _get_as_snowflake(data, 'entity_id')
self.start_time: datetime = parse_time(data['scheduled_start_time'])
self.privacy_level: PrivacyLevel = try_enum(PrivacyLevel, data['status'])
self.status: EventStatus = try_enum(EventStatus, data['status'])
self._cover_image: Optional[str] = data.get('image', None)
self.user_count: int = data.get('user_count', 0)
creator = data.get('creator')
self.creator: Optional[User] = self._state.store_user(creator) if creator else None
self.end_time: Optional[datetime] = parse_time(data.get('scheduled_end_time'))
self.channel_id: Optional[int] = _get_as_snowflake(data, 'channel_id')
metadata = data.get('entity_metadata')
self._unroll_metadata(metadata)
def _unroll_metadata(self, data: Optional[EntityMetadata]):
self.location: Optional[str] = data.get('location') if data else None
@classmethod
def from_creation(cls, *, state: ConnectionState, data: GuildScheduledEventPayload):
creator_id = data.get('creator_id')
self = cls(state=state, data=data)
if creator_id:
self.creator = self._state.get_user(int(creator_id))
def __repr__(self) -> str:
return f'<GuildScheduledEvent id={self.id} name={self.name!r} guild_id={self.guild_id!r} creator={self.creator!r}>'
@property
def cover_image(self) -> Optional[Asset]:
if self._cover_image is None:
return None
return Asset._from_scheduled_event_cover_image(self._state, self.id, self._cover_image)
@property
def guild(self) -> Optional[Guild]:
return self._state._get_guild(self.guild_id)
@property
def channel(self) -> Optional[Union[VoiceChannel, StageChannel]]:
return self.guild.get_channel(self.channel_id)
@property
def url(self):
return f'https://discord.com/events/{self.guild_id}/{self.id}'
async def start(self, *, reason: Optional[str] = None) -> ScheduledEvent:
if self.status is not EventStatus.scheduled:
raise ValueError('This scheduled event is already running.')
return await self.edit(status=EventStatus.active, reason=reason)
async def end(self, *, reason: Optional[str] = None) -> ScheduledEvent:
if self.status is not EventStatus.active:
raise ValueError('This scheduled event is not active.')
return await self.edit(status=EventStatus.ended, reason=reason)
async def cancel(self, *, reason: Optional[str] = None) -> ScheduledEvent:
if self.status is not EventStatus.scheduled:
raise ValueError('This scheduled event is already running.')
return await self.edit(status=EventStatus.cancelled, reason=reason)
async def edit(
self,
*,
name: str = MISSING,
description: str = MISSING,
channel: Optional[Snowflake] = MISSING,
start_time: datetime = MISSING,
end_time: Optional[datetime] = MISSING,
privacy_level: PrivacyLevel = MISSING,
entity_type: EntityType = MISSING,
status: EventStatus = MISSING,
image: bytes = MISSING,
location: str = MISSING,
reason: Optional[str] = None,
) -> ScheduledEvent:
payload = {}
metadata = {}
if name is not MISSING:
payload['name'] = name
if start_time is not MISSING:
if start_time.tzinfo is None:
raise ValueError(
'start_time must be an aware datetime. Consider using discord.utils.utcnow() or datetime.datetime.now().astimezone() for local time.'
)
payload['scheduled_start_time'] = start_time.isoformat()
if description is not MISSING:
payload['description'] = description
if privacy_level is not MISSING:
if not isinstance(privacy_level, PrivacyLevel):
raise TypeError('privacy_level must be of type PrivacyLevel.')
payload['privacy_level'] = privacy_level.value
if status is not MISSING:
if not isinstance(status, EventStatus):
raise TypeError('status must be of type EventStatus')
payload['status'] = status.value
if image is not MISSING:
image_as_str: Optional[str] = _bytes_to_base64_data(image) if image is not None else image
payload['image'] = image_as_str
if entity_type is not MISSING:
if not isinstance(entity_type, EntityType):
raise TypeError('entity_type must be of type EntityType')
payload['entity_type'] = entity_type.value
_entity_type = entity_type or self.entity_type
if _entity_type in (EntityType.stage_instance, EntityType.voice):
if channel is MISSING or channel is None:
raise TypeError('channel must be set when entity_type is voice or stage_instance')
payload['channel_id'] = channel.id
if location not in (MISSING, None):
raise TypeError('location cannot be set when entity_type is voice or stage_instance')
payload['entity_metadata'] = None
else:
if channel not in (MISSING, None):
raise TypeError('channel cannot be set when entity_type is external')
payload['channel_id'] = None
if location is MISSING or location is None:
raise TypeError('location must be set when entity_type is external')
metadata['location'] = location
if end_time is MISSING or end_time is None:
raise TypeError('end_time must be set when entity_type is external')
if end_time is not MISSING:
if end_time is not None:
if end_time.tzinfo is None:
raise ValueError(
'end_time must be an aware datetime. Consider using discord.utils.utcnow() or datetime.datetime.now().astimezone() for local time.'
)
payload['scheduled_end_time'] = end_time.isoformat()
else:
payload['scheduled_end_time'] = end_time
if metadata:
payload['entity_metadata'] = metadata
data = await self._state.http.edit_scheduled_event(self.guild_id, self.id, **payload, reason=reason)
s = ScheduledEvent(state=self._state, data=data)
s._users = self._users
return s
async def delete(self, *, reason: Optional[str] = None) -> None:
await self._state.http.delete_scheduled_event(self.guild_id, self.id, reason=reason)
async def users(
self,
*,
limit: Optional[int] = None,
before: Optional[Snowflake] = None,
after: Optional[Snowflake] = None,
oldest_first: bool = MISSING,
) -> AsyncIterator[User]:
async def _before_strategy(retrieve, before, limit):
before_id = before.id if before else None
users = await self._state.http.get_scheduled_event_users(
self.guild_id, self.id, limit=retrieve, with_member=False, before=before_id
)
if users:
if limit is not None:
limit -= len(users)
before = Object(id=users[-1]['user']['id'])
return users, before, limit
async def _after_strategy(retrieve, after, limit):
after_id = after.id if after else None
users = await self._state.http.get_scheduled_event_users(
self.guild_id, self.id, limit=retrieve, with_member=False, after=after_id
)
if users:
if limit is not None:
limit -= len(users)
after = Object(id=users[0]['user']['id'])
return users, after, limit
if limit is None:
limit = self.user_count or None
if oldest_first is MISSING:
reverse = after is not None
else:
reverse = oldest_first
predicate = None
if reverse:
strategy, state = _after_strategy, after
if before:
predicate = lambda u: u['user']['id'] < before.id
else:
strategy, state = _before_strategy, before
if after and after != OLDEST_OBJECT:
predicate = lambda u: u['user']['id'] > after.id
while True:
retrieve = min(100 if limit is None else limit, 100)
if retrieve < 1:
return
data, state, limit = await strategy(retrieve, state, limit)
if len(data) < 100:
limit = 0
if reverse:
data = reversed(data)
if predicate:
data = filter(predicate, data)
users = (self._state.store_user(raw_user['user']) for raw_user in data)
for user in users:
yield user
def _add_user(self, user: User) -> None:
self._users[user.id] = user
def _pop_user(self, user_id: int) -> None:
self._users.pop(user_id)
| true | true |
1c32a485711f2f6dcfcfe2f285a60d4af9815af0 | 295 | py | Python | app/rss_feeder_api/tasks.py | RSaab/rss-scraper | 9bf608878e7d08fea6508ae90b27f1c226b313f1 | [
"MIT"
] | null | null | null | app/rss_feeder_api/tasks.py | RSaab/rss-scraper | 9bf608878e7d08fea6508ae90b27f1c226b313f1 | [
"MIT"
] | null | null | null | app/rss_feeder_api/tasks.py | RSaab/rss-scraper | 9bf608878e7d08fea6508ae90b27f1c226b313f1 | [
"MIT"
] | null | null | null | from rss_feeder_api.celery import app
from rss_feeder_api.models import Feed
@app.task
def my_scheduled_job():
print("cron job")
updateAllFeeds()
def updateAllFeeds():
feeds = Feed.objects.filter(flagged=False)
for feed in feeds:
feed.force_update()
print("Done!") | 21.071429 | 46 | 0.708475 | from rss_feeder_api.celery import app
from rss_feeder_api.models import Feed
@app.task
def my_scheduled_job():
print("cron job")
updateAllFeeds()
def updateAllFeeds():
feeds = Feed.objects.filter(flagged=False)
for feed in feeds:
feed.force_update()
print("Done!") | true | true |
1c32a4a16b01d9fe059907ca4bf7bb7373fd1a9c | 1,592 | py | Python | chevah/txftps/checkers.py | chevah/txftps | 2227c0017299c840e7e9b69022bac4bb2dd38ae4 | [
"BSD-3-Clause"
] | 4 | 2017-04-14T11:22:37.000Z | 2021-06-09T10:53:37.000Z | chevah/txftps/checkers.py | chevah/txftps | 2227c0017299c840e7e9b69022bac4bb2dd38ae4 | [
"BSD-3-Clause"
] | 1 | 2021-10-13T14:17:18.000Z | 2021-10-13T14:17:18.000Z | chevah/txftps/checkers.py | chevah/txftps | 2227c0017299c840e7e9b69022bac4bb2dd38ae4 | [
"BSD-3-Clause"
] | 1 | 2015-06-11T14:07:15.000Z | 2015-06-11T14:07:15.000Z | from zope.interface import implements
from twisted.cred.checkers import ICredentialsChecker
from twisted.internet import defer, error
from chevah.txftps.interfaces import (
IPasswordCredentials,
ISSLCertificateCredentials,
)
class InMemoryPassword(object):
implements(ICredentialsChecker)
credentialInterfaces = IPasswordCredentials,
def __init__(self, database):
self._database = database
def requestAvatarId(self, credentials):
if not credentials.password:
return defer.fail(error.UnauthorizedLogin())
for username, password in self._database:
if username == credentials.username:
if password == credentials.password:
return username
else:
defer.fail(error.UnauthorizedLogin())
return defer.fail(error.UnauthorizedLogin())
class SSLCertificateChecker(object):
implements(ICredentialsChecker)
credentialInterfaces = ISSLCertificateCredentials,
def requestAvatarId(self, credentials):
if not credentials.certificate:
return defer.fail(error.UnauthorizedLogin())
subject = credentials.certificate.get_subject()
if not subject:
return defer.fail(error.UnauthorizedLogin())
common_name = subject.commonName
if common_name is None:
return defer.fail(error.UnauthorizedLogin())
if credentials.username == common_name:
return credentials.username
else:
return defer.fail(error.UnauthorizedLogin())
| 28.945455 | 57 | 0.67902 | from zope.interface import implements
from twisted.cred.checkers import ICredentialsChecker
from twisted.internet import defer, error
from chevah.txftps.interfaces import (
IPasswordCredentials,
ISSLCertificateCredentials,
)
class InMemoryPassword(object):
implements(ICredentialsChecker)
credentialInterfaces = IPasswordCredentials,
def __init__(self, database):
self._database = database
def requestAvatarId(self, credentials):
if not credentials.password:
return defer.fail(error.UnauthorizedLogin())
for username, password in self._database:
if username == credentials.username:
if password == credentials.password:
return username
else:
defer.fail(error.UnauthorizedLogin())
return defer.fail(error.UnauthorizedLogin())
class SSLCertificateChecker(object):
implements(ICredentialsChecker)
credentialInterfaces = ISSLCertificateCredentials,
def requestAvatarId(self, credentials):
if not credentials.certificate:
return defer.fail(error.UnauthorizedLogin())
subject = credentials.certificate.get_subject()
if not subject:
return defer.fail(error.UnauthorizedLogin())
common_name = subject.commonName
if common_name is None:
return defer.fail(error.UnauthorizedLogin())
if credentials.username == common_name:
return credentials.username
else:
return defer.fail(error.UnauthorizedLogin())
| true | true |
1c32a557a767c625830d86fa0aa28718e3690cba | 748 | py | Python | basiclearning/filter_map_reduce_zip.py | GSGSS/git_learning | ceeabfe88edf2ecbfaf58eb42ca878a7a0910c4c | [
"MIT"
] | null | null | null | basiclearning/filter_map_reduce_zip.py | GSGSS/git_learning | ceeabfe88edf2ecbfaf58eb42ca878a7a0910c4c | [
"MIT"
] | null | null | null | basiclearning/filter_map_reduce_zip.py | GSGSS/git_learning | ceeabfe88edf2ecbfaf58eb42ca878a7a0910c4c | [
"MIT"
] | null | null | null | from functools import reduce
# filter 满足条件返回
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = 5
slict = filter(lambda x: x < b, a)
print(type(slict))
print(list(slict))
# map 依次操作每一个参数
a = [1, 2, 3]
map1 = map(lambda x: x+1, a)
print(type(map1))
print(list(map1))
b = [4, 5, 6]
map2 = map(lambda x, y: x+y, a, b)
print(type(map2))
print(list(map2))
# reduce 用初始值跟列表中每一项循环运算得出一个结果
reduce1 = reduce(lambda x, y: x+y, [3, 4, 5], 1)
print(reduce1)
# zip 重新组合数组
zip1 = zip((1, 2, 3), (4, 5, 6))
print(type(zip1))
for i in zip1:
print(i)
# 用zip对调字典的key和value
# !!!注意,返回的zip对象是一个迭代器,只能迭代一次
dic1 = {'a': 'aa', 'b': 'bb', 'c': 'cc'}
zip2 = zip(dic1.values(), dic1.keys())
dic2 = dict(zip2)
print(dic2)
# 再次调用zip对象会发现迭代已经完成,无法继续返回结果
dic2 = dict(zip2)
print(dic2)
| 19.684211 | 48 | 0.627005 | from functools import reduce
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = 5
slict = filter(lambda x: x < b, a)
print(type(slict))
print(list(slict))
a = [1, 2, 3]
map1 = map(lambda x: x+1, a)
print(type(map1))
print(list(map1))
b = [4, 5, 6]
map2 = map(lambda x, y: x+y, a, b)
print(type(map2))
print(list(map2))
reduce1 = reduce(lambda x, y: x+y, [3, 4, 5], 1)
print(reduce1)
zip1 = zip((1, 2, 3), (4, 5, 6))
print(type(zip1))
for i in zip1:
print(i)
dic1 = {'a': 'aa', 'b': 'bb', 'c': 'cc'}
zip2 = zip(dic1.values(), dic1.keys())
dic2 = dict(zip2)
print(dic2)
dic2 = dict(zip2)
print(dic2)
| true | true |
1c32a6265cea90b2736089890ec05093424577c9 | 2,944 | py | Python | homework04/tests/test_tree.py | kristinazagaynova/programming-kristinazagaynova | d154a27f927d6da0135bffbd75330d4da8039f98 | [
"Unlicense"
] | null | null | null | homework04/tests/test_tree.py | kristinazagaynova/programming-kristinazagaynova | d154a27f927d6da0135bffbd75330d4da8039f98 | [
"Unlicense"
] | null | null | null | homework04/tests/test_tree.py | kristinazagaynova/programming-kristinazagaynova | d154a27f927d6da0135bffbd75330d4da8039f98 | [
"Unlicense"
] | 1 | 2021-02-20T09:36:40.000Z | 2021-02-20T09:36:40.000Z | import pathlib
import stat
import time
import unittest
from unittest.mock import patch
from pyfakefs.fake_filesystem_unittest import TestCase
import pyvcs
from pyvcs.index import read_index, update_index
from pyvcs.repo import repo_create
from pyvcs.tree import commit_tree, write_tree
@unittest.skipIf(pyvcs.__version_info__ < (0, 5, 0), "Нужна версия пакета 0.5.0 и выше")
class WriteTreeTestCase(TestCase):
def setUp(self):
self.setUpPyfakefs()
def test_write_tree(self):
gitdir = repo_create(".")
animals = pathlib.Path("animals.txt")
mode100644 = stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
self.fs.create_file(
animals,
contents="Big blue basilisks bawl in the basement\n",
st_mode=mode100644,
)
update_index(gitdir, [animals], write=True)
entries = read_index(gitdir)
sha = write_tree(gitdir, entries)
self.assertEqual("dc6b8ea09fb7573a335c5fb953b49b85bb6ca985", sha)
def test_write_tree_subdirs(self):
gitdir = repo_create(".")
mode100644 = stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
quote = pathlib.Path("quote.txt")
self.fs.create_file(quote, contents="that's what she said", st_mode=mode100644)
letters = pathlib.Path("alphabeta") / "letters.txt"
self.fs.create_file(letters, contents="abcdefg", st_mode=mode100644)
digits = pathlib.Path("numbers") / "digits.txt"
self.fs.create_file(digits, contents="1234567890", st_mode=mode100644)
update_index(gitdir, [quote, letters, digits], write=True)
entries = read_index(gitdir)
sha = write_tree(gitdir, entries)
self.assertEqual("a9cde03408c68cbb205b038140b4c3a38aa1d01a", sha)
alphabeta_tree_sha = "7926bf494dcdb82261e1ca113116610f8d05470b"
alphabeta_tree_obj = gitdir / "objects" / alphabeta_tree_sha[:2] / alphabeta_tree_sha[2:]
self.assertTrue(alphabeta_tree_obj.exists())
numbers_tree_sha = "32ad3641a773ce34816dece1ce63cc24c8a514d0"
numbers_tree_obj = gitdir / "objects" / numbers_tree_sha[:2] / numbers_tree_sha[2:]
self.assertTrue(numbers_tree_obj.exists())
@unittest.skipIf(pyvcs.__version_info__ < (0, 6, 0), "Нужна версия пакета 0.6.0 и выше")
class CommitTreeTestCase(TestCase):
def setUp(self):
self.setUpPyfakefs()
@patch("time.timezone", -10800)
@patch("time.localtime", return_value=time.localtime(1593638381))
def test_commit_tree(self, localtime):
gitdir = repo_create(".")
tree_sha = "dc6b8ea09fb7573a335c5fb953b49b85bb6ca985"
author = "Dmitriy Sorokin <Dementiy@yandex.ru>"
message = "initial commit"
commit_sha = commit_tree(gitdir, tree_sha, message, parent=None, author=author)
self.assertEqual("f60952d53906d8b2e6f8567762f86fbe7ca4ac65", commit_sha)
| 41.464789 | 97 | 0.699728 | import pathlib
import stat
import time
import unittest
from unittest.mock import patch
from pyfakefs.fake_filesystem_unittest import TestCase
import pyvcs
from pyvcs.index import read_index, update_index
from pyvcs.repo import repo_create
from pyvcs.tree import commit_tree, write_tree
@unittest.skipIf(pyvcs.__version_info__ < (0, 5, 0), "Нужна версия пакета 0.5.0 и выше")
class WriteTreeTestCase(TestCase):
def setUp(self):
self.setUpPyfakefs()
def test_write_tree(self):
gitdir = repo_create(".")
animals = pathlib.Path("animals.txt")
mode100644 = stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
self.fs.create_file(
animals,
contents="Big blue basilisks bawl in the basement\n",
st_mode=mode100644,
)
update_index(gitdir, [animals], write=True)
entries = read_index(gitdir)
sha = write_tree(gitdir, entries)
self.assertEqual("dc6b8ea09fb7573a335c5fb953b49b85bb6ca985", sha)
def test_write_tree_subdirs(self):
gitdir = repo_create(".")
mode100644 = stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
quote = pathlib.Path("quote.txt")
self.fs.create_file(quote, contents="that's what she said", st_mode=mode100644)
letters = pathlib.Path("alphabeta") / "letters.txt"
self.fs.create_file(letters, contents="abcdefg", st_mode=mode100644)
digits = pathlib.Path("numbers") / "digits.txt"
self.fs.create_file(digits, contents="1234567890", st_mode=mode100644)
update_index(gitdir, [quote, letters, digits], write=True)
entries = read_index(gitdir)
sha = write_tree(gitdir, entries)
self.assertEqual("a9cde03408c68cbb205b038140b4c3a38aa1d01a", sha)
alphabeta_tree_sha = "7926bf494dcdb82261e1ca113116610f8d05470b"
alphabeta_tree_obj = gitdir / "objects" / alphabeta_tree_sha[:2] / alphabeta_tree_sha[2:]
self.assertTrue(alphabeta_tree_obj.exists())
numbers_tree_sha = "32ad3641a773ce34816dece1ce63cc24c8a514d0"
numbers_tree_obj = gitdir / "objects" / numbers_tree_sha[:2] / numbers_tree_sha[2:]
self.assertTrue(numbers_tree_obj.exists())
@unittest.skipIf(pyvcs.__version_info__ < (0, 6, 0), "Нужна версия пакета 0.6.0 и выше")
class CommitTreeTestCase(TestCase):
def setUp(self):
self.setUpPyfakefs()
@patch("time.timezone", -10800)
@patch("time.localtime", return_value=time.localtime(1593638381))
def test_commit_tree(self, localtime):
gitdir = repo_create(".")
tree_sha = "dc6b8ea09fb7573a335c5fb953b49b85bb6ca985"
author = "Dmitriy Sorokin <Dementiy@yandex.ru>"
message = "initial commit"
commit_sha = commit_tree(gitdir, tree_sha, message, parent=None, author=author)
self.assertEqual("f60952d53906d8b2e6f8567762f86fbe7ca4ac65", commit_sha)
| true | true |
1c32a6ea55192b9a0ff8c4aabefaed68e451b27c | 4,128 | py | Python | label_maker_dask/utils.py | developmentseed/label-maker-dask | 700d2fa224ac674171fd8c4f8709a67f862eac36 | [
"MIT"
] | 5 | 2021-10-09T03:25:46.000Z | 2021-10-20T07:17:15.000Z | label_maker_dask/utils.py | developmentseed/label-maker-dask | 700d2fa224ac674171fd8c4f8709a67f862eac36 | [
"MIT"
] | 1 | 2021-10-15T12:39:16.000Z | 2021-10-15T13:51:01.000Z | label_maker_dask/utils.py | developmentseed/label-maker-dask | 700d2fa224ac674171fd8c4f8709a67f862eac36 | [
"MIT"
] | null | null | null | # pylint: disable=unused-argument
"""Provide utility functions"""
import os
from io import BytesIO
from urllib.parse import parse_qs
import numpy as np
import rasterio
import requests # type: ignore
from mercantile import Tile, bounds
from PIL import Image, ImageColor
from rasterio.crs import CRS
from rasterio.warp import transform_bounds
from rio_tiler.io import COGReader
WGS84_CRS = CRS.from_epsg(4326)
class SafeDict(dict):
"""dictionary for replacing missing url properties"""
def __missing__(self, key):
"""replace missing url properties"""
return "{" + key + "}"
def url(tile: Tile, imagery):
"""Return a tile url provided an imagery template and a tile"""
return imagery.format(x=tile.x, y=tile.y, z=tile.z)
def class_match(ml_type, label, i):
"""Determine if a label matches a given class index"""
if ml_type == "classification":
return label[i] > 0
elif ml_type == "object-detection":
return len(list(filter(lambda bb: bb[4] == i, label)))
elif ml_type == "segmentation":
return np.count_nonzero(label == i)
return None
def download_tile_tms(tile: Tile, imagery):
"""Download a satellite image tile from a tms endpoint"""
if os.environ.get("ACCESS_TOKEN"):
token = os.environ.get("ACCESS_TOKEN")
imagery = imagery.format_map(SafeDict(ACCESS_TOKEN=token))
r = requests.get(url(tile, imagery))
return np.array(Image.open(BytesIO(r.content)))
def get_tile_tif(tile, imagery):
"""
Read a GeoTIFF with a window corresponding to a TMS tile
"""
with COGReader(imagery) as image:
img = image.tile(*tile)
return np.moveaxis(img.data, 0, 2)
def get_tile_wms(tile, imagery):
"""
Read a WMS endpoint with query parameters corresponding to a TMS tile
Converts the tile boundaries to the spatial/coordinate reference system
(SRS or CRS) specified by the WMS query parameter.
"""
# retrieve the necessary parameters from the query string
query_dict = parse_qs(imagery.lower())
wms_version = query_dict.get("version")[0]
if wms_version == "1.3.0":
wms_srs = query_dict.get("crs")[0]
else:
wms_srs = query_dict.get("srs")[0]
# find our tile bounding box
bound = bounds(*[int(t) for t in tile])
xmin, ymin, xmax, ymax = transform_bounds(
WGS84_CRS, CRS.from_string(wms_srs), *bound, densify_pts=21
)
# project the tile bounding box from lat/lng to WMS SRS
bbox = (
[ymin, xmin, ymax, xmax] if wms_version == "1.3.0" else [xmin, ymin, xmax, ymax]
)
# request the image with the transformed bounding box and save
wms_url = imagery.replace("{bbox}", ",".join([str(b) for b in bbox]))
r = requests.get(wms_url)
return np.array(Image.open(BytesIO(r.content)))
def is_tif(imagery):
"""Determine if an imagery path leads to a valid tif"""
valid_drivers = ["GTiff", "VRT"]
try:
with rasterio.open(imagery) as test_ds:
if test_ds.meta["driver"] not in valid_drivers:
# rasterio can open path, but it is not a tif
valid_tif = False
else:
valid_tif = True
except rasterio.errors.RasterioIOError:
# rasterio cannot open the path. this is the case for a
# tile service
valid_tif = False
return valid_tif
def is_wms(imagery):
"""Determine if an imagery path is a WMS endpoint"""
return "{bbox}" in imagery
def get_image_function(imagery):
"""Return the correct image downloading function based on the imagery string"""
if is_tif(imagery):
return get_tile_tif
if is_wms(imagery):
return get_tile_wms
return download_tile_tms
# Taken from https://github.com/CartoDB/CartoColor/blob/master/cartocolor.js#L1633-L1733
colors = ["#DDCC77", "#CC6677", "#117733", "#332288", "#AA4499", "#88CCEE"]
def class_color(c):
"""Return 3-element tuple containing rgb values for a given class"""
if c == 0:
return (0, 0, 0) # background class
return ImageColor.getrgb(colors[c % len(colors)])
| 29.697842 | 88 | 0.662306 |
import os
from io import BytesIO
from urllib.parse import parse_qs
import numpy as np
import rasterio
import requests
from mercantile import Tile, bounds
from PIL import Image, ImageColor
from rasterio.crs import CRS
from rasterio.warp import transform_bounds
from rio_tiler.io import COGReader
WGS84_CRS = CRS.from_epsg(4326)
class SafeDict(dict):
def __missing__(self, key):
return "{" + key + "}"
def url(tile: Tile, imagery):
return imagery.format(x=tile.x, y=tile.y, z=tile.z)
def class_match(ml_type, label, i):
if ml_type == "classification":
return label[i] > 0
elif ml_type == "object-detection":
return len(list(filter(lambda bb: bb[4] == i, label)))
elif ml_type == "segmentation":
return np.count_nonzero(label == i)
return None
def download_tile_tms(tile: Tile, imagery):
if os.environ.get("ACCESS_TOKEN"):
token = os.environ.get("ACCESS_TOKEN")
imagery = imagery.format_map(SafeDict(ACCESS_TOKEN=token))
r = requests.get(url(tile, imagery))
return np.array(Image.open(BytesIO(r.content)))
def get_tile_tif(tile, imagery):
with COGReader(imagery) as image:
img = image.tile(*tile)
return np.moveaxis(img.data, 0, 2)
def get_tile_wms(tile, imagery):
query_dict = parse_qs(imagery.lower())
wms_version = query_dict.get("version")[0]
if wms_version == "1.3.0":
wms_srs = query_dict.get("crs")[0]
else:
wms_srs = query_dict.get("srs")[0]
bound = bounds(*[int(t) for t in tile])
xmin, ymin, xmax, ymax = transform_bounds(
WGS84_CRS, CRS.from_string(wms_srs), *bound, densify_pts=21
)
bbox = (
[ymin, xmin, ymax, xmax] if wms_version == "1.3.0" else [xmin, ymin, xmax, ymax]
)
wms_url = imagery.replace("{bbox}", ",".join([str(b) for b in bbox]))
r = requests.get(wms_url)
return np.array(Image.open(BytesIO(r.content)))
def is_tif(imagery):
valid_drivers = ["GTiff", "VRT"]
try:
with rasterio.open(imagery) as test_ds:
if test_ds.meta["driver"] not in valid_drivers:
valid_tif = False
else:
valid_tif = True
except rasterio.errors.RasterioIOError:
valid_tif = False
return valid_tif
def is_wms(imagery):
return "{bbox}" in imagery
def get_image_function(imagery):
if is_tif(imagery):
return get_tile_tif
if is_wms(imagery):
return get_tile_wms
return download_tile_tms
#DDCC77", "#CC6677", "#117733", "#332288", "#AA4499", "#88CCEE"]
def class_color(c):
if c == 0:
return (0, 0, 0)
return ImageColor.getrgb(colors[c % len(colors)])
| true | true |
1c32a743ab53fee05dd8f26beea9f33a973416ba | 2,751 | py | Python | zimsoap/utils.py | emillion/zimsoap | d1ea2eb4d50f263c9a16e5549af03f1eff3e295e | [
"Apache-2.0"
] | 11 | 2015-02-15T23:52:56.000Z | 2021-05-06T18:05:24.000Z | zimsoap/utils.py | emillion/zimsoap | d1ea2eb4d50f263c9a16e5549af03f1eff3e295e | [
"Apache-2.0"
] | 25 | 2015-01-14T11:27:51.000Z | 2016-09-07T14:06:52.000Z | zimsoap/utils.py | emillion/zimsoap | d1ea2eb4d50f263c9a16e5549af03f1eff3e295e | [
"Apache-2.0"
] | 10 | 2015-08-12T14:45:17.000Z | 2021-12-08T23:40:35.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
""" Misc tool functions """
import pythonzimbra
import pythonzimbra.tools.xmlserializer
import re
import hmac
import hashlib
from xml.dom import minidom
re_zuuid = re.compile(r'[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}')
def is_zuuid(s):
""" Is it a zimbraUUID ?
example zimbra UUID : d78fd9c9-f000-440b-bce6-ea938d40fa2d
"""
return re_zuuid.match(s)
def build_preauth_str(preauth_key, account_name, timestamp, expires,
admin=False):
""" Builds the preauth string and hmac it, following the zimbra spec.
Spec and examples are here http://wiki.zimbra.com/wiki/Preauth
"""
if admin:
s = '{0}|1|name|{1}|{2}'.format(account_name, expires, timestamp)
else:
s = '{0}|name|{1}|{2}'.format(account_name, expires, timestamp)
return hmac.new(preauth_key.encode('utf-8'), s.encode('utf-8'),
hashlib.sha1).hexdigest()
def wrap_in_cdata(s):
return "<![CDATA[{0}]]>".format(s)
def as_list(obj):
if isinstance(obj, (list, tuple)):
return obj
else:
return [obj]
def get_content(obj):
""" Works arround (sometimes) non predictible results of pythonzimbra
Sometime, the content of an XML tag is wrapped in {'_content': foo},
sometime it is accessible directly.
"""
if isinstance(obj, dict):
return obj['_content']
else:
return obj
def auto_type(s):
""" Get a XML response and tries to convert it to Python base object
"""
if isinstance(s, bool):
return s
elif s is None:
return ''
elif s == 'TRUE':
return True
elif s == 'FALSE':
return False
else:
try:
try:
# telephone numbers may be wrongly interpretted as ints
if s.startswith('+'):
return s
else:
return int(s)
except ValueError:
return float(s)
except ValueError:
return s
def auto_untype(arg):
""" The oposite of auto_type : takes a python base object and tries to
convert it to XML typed string.
"""
if arg is True:
return 'TRUE'
elif arg is False:
return 'FALSE'
else:
return arg
def xml_str_to_dict(s):
""" Transforms an XML string it to python-zimbra dict format
For format, see:
https://github.com/Zimbra-Community/python-zimbra/blob/master/README.md
:param: a string, containing XML
:returns: a dict, with python-zimbra format
"""
xml = minidom.parseString(s)
return pythonzimbra.tools.xmlserializer.dom_to_dict(xml.firstChild)
| 24.345133 | 77 | 0.605598 |
from __future__ import unicode_literals
import pythonzimbra
import pythonzimbra.tools.xmlserializer
import re
import hmac
import hashlib
from xml.dom import minidom
re_zuuid = re.compile(r'[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}')
def is_zuuid(s):
return re_zuuid.match(s)
def build_preauth_str(preauth_key, account_name, timestamp, expires,
admin=False):
if admin:
s = '{0}|1|name|{1}|{2}'.format(account_name, expires, timestamp)
else:
s = '{0}|name|{1}|{2}'.format(account_name, expires, timestamp)
return hmac.new(preauth_key.encode('utf-8'), s.encode('utf-8'),
hashlib.sha1).hexdigest()
def wrap_in_cdata(s):
return "<![CDATA[{0}]]>".format(s)
def as_list(obj):
if isinstance(obj, (list, tuple)):
return obj
else:
return [obj]
def get_content(obj):
if isinstance(obj, dict):
return obj['_content']
else:
return obj
def auto_type(s):
if isinstance(s, bool):
return s
elif s is None:
return ''
elif s == 'TRUE':
return True
elif s == 'FALSE':
return False
else:
try:
try:
if s.startswith('+'):
return s
else:
return int(s)
except ValueError:
return float(s)
except ValueError:
return s
def auto_untype(arg):
if arg is True:
return 'TRUE'
elif arg is False:
return 'FALSE'
else:
return arg
def xml_str_to_dict(s):
xml = minidom.parseString(s)
return pythonzimbra.tools.xmlserializer.dom_to_dict(xml.firstChild)
| true | true |
1c32a8a2a1ea62504b0b0a48f589f3eb0e8f8376 | 222 | py | Python | generalapiwrapper/test_SteamworksWrapper.py | ECorpron/APIWrapper | 8fddd5ba9256c56d529faca09fd7e09fcd6a844e | [
"CC0-1.0"
] | null | null | null | generalapiwrapper/test_SteamworksWrapper.py | ECorpron/APIWrapper | 8fddd5ba9256c56d529faca09fd7e09fcd6a844e | [
"CC0-1.0"
] | null | null | null | generalapiwrapper/test_SteamworksWrapper.py | ECorpron/APIWrapper | 8fddd5ba9256c56d529faca09fd7e09fcd6a844e | [
"CC0-1.0"
] | null | null | null | # As mentioned in TestAPIWrapper, I am not familiar with testing api calls
# so this is something that I would like to add in the future.
class TestSteamworksWrapper():
def testGetGameAchievements(self):
pass
| 31.714286 | 74 | 0.752252 |
class TestSteamworksWrapper():
def testGetGameAchievements(self):
pass
| true | true |
1c32aa6e6005e89cd1da77d33f4d806ef0817224 | 1,058 | py | Python | tests/integration/installation_and_upgrade/test_data_consistancy.py | AlexTalker/integrated-manager-for-lustre | 251099e5c776f3c1898af50bb7cc77924c7cf7c7 | [
"MIT"
] | 1 | 2021-02-08T16:59:14.000Z | 2021-02-08T16:59:14.000Z | tests/integration/installation_and_upgrade/test_data_consistancy.py | AlexTalker/integrated-manager-for-lustre | 251099e5c776f3c1898af50bb7cc77924c7cf7c7 | [
"MIT"
] | null | null | null | tests/integration/installation_and_upgrade/test_data_consistancy.py | AlexTalker/integrated-manager-for-lustre | 251099e5c776f3c1898af50bb7cc77924c7cf7c7 | [
"MIT"
] | null | null | null | from tests.integration.installation_and_upgrade.test_installation_and_upgrade import TestInstallationAndUpgrade
class TestAllEndPoints(TestInstallationAndUpgrade):
def test_access_all_data_all_endpoints(self):
""" Fetch all the data from all the end points """
# Some end points just can't be fetched so we have to ignore them.
end_point_exceptions = [
"/api/help/",
"/api/test_host/",
"/api/system_status/",
"/api/updates_available/",
"/api/session/",
"/api/action/",
]
end_points = self.get_json_by_uri("/api/", args={"limit": 0})
for end_point in end_points.values():
if end_point["list_endpoint"] not in end_point_exceptions:
import sys
sys.stderr.write("\nReading endpoint %s\n" % end_point["list_endpoint"])
self.get_json_by_uri(end_point["list_endpoint"], args={"limit": 0})
sys.stderr.write("\nRead endpoint %s\n" % end_point["list_endpoint"])
| 39.185185 | 111 | 0.620983 | from tests.integration.installation_and_upgrade.test_installation_and_upgrade import TestInstallationAndUpgrade
class TestAllEndPoints(TestInstallationAndUpgrade):
def test_access_all_data_all_endpoints(self):
end_point_exceptions = [
"/api/help/",
"/api/test_host/",
"/api/system_status/",
"/api/updates_available/",
"/api/session/",
"/api/action/",
]
end_points = self.get_json_by_uri("/api/", args={"limit": 0})
for end_point in end_points.values():
if end_point["list_endpoint"] not in end_point_exceptions:
import sys
sys.stderr.write("\nReading endpoint %s\n" % end_point["list_endpoint"])
self.get_json_by_uri(end_point["list_endpoint"], args={"limit": 0})
sys.stderr.write("\nRead endpoint %s\n" % end_point["list_endpoint"])
| true | true |
1c32ab045b8962e787b6429790061fb8695b5e7f | 82,515 | py | Python | tests/expressions/tests.py | coppfimm/django | 915ac165763cad3fc6a4aff9bc42ac8607b46c4d | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null | tests/expressions/tests.py | coppfimm/django | 915ac165763cad3fc6a4aff9bc42ac8607b46c4d | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null | tests/expressions/tests.py | coppfimm/django | 915ac165763cad3fc6a4aff9bc42ac8607b46c4d | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | null | null | null | import datetime
import pickle
import unittest
import uuid
from collections import namedtuple
from copy import deepcopy
from decimal import Decimal
from unittest import mock
from django.core.exceptions import FieldError
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import (
AutoField, Avg, BinaryField, BooleanField, Case, CharField, Count,
DateField, DateTimeField, DecimalField, DurationField, Exists, Expression,
ExpressionList, ExpressionWrapper, F, FloatField, Func, IntegerField, Max,
Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField,
UUIDField, Value, Variance, When,
)
from django.db.models.expressions import (
Col, Combinable, CombinedExpression, RawSQL, Ref,
)
from django.db.models.functions import (
Coalesce, Concat, Left, Length, Lower, Substr, Upper,
)
from django.db.models.sql import constants
from django.db.models.sql.datastructures import Join
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import (
Approximate, CaptureQueriesContext, isolate_apps, register_lookup,
)
from django.utils.functional import SimpleLazyObject
from .models import (
UUID, UUIDPK, Company, Employee, Experiment, Manager, Number,
RemoteEmployee, Result, SimulationRun, Time,
)
class BasicExpressionsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.example_inc = Company.objects.create(
name="Example Inc.", num_employees=2300, num_chairs=5,
ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10)
)
cls.foobar_ltd = Company.objects.create(
name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True,
ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20)
)
cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30)
cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max)
def setUp(self):
self.company_query = Company.objects.values(
"name", "num_employees", "num_chairs"
).order_by(
"name", "num_employees", "num_chairs"
)
def test_annotate_values_aggregate(self):
companies = Company.objects.annotate(
salaries=F('ceo__salary'),
).values('num_employees', 'salaries').aggregate(
result=Sum(
F('salaries') + F('num_employees'),
output_field=IntegerField()
),
)
self.assertEqual(companies['result'], 2395)
def test_annotate_values_filter(self):
companies = Company.objects.annotate(
foo=RawSQL('%s', ['value']),
).filter(foo='value').order_by('name')
self.assertSequenceEqual(
companies,
[self.example_inc, self.foobar_ltd, self.gmbh],
)
def test_annotate_values_count(self):
companies = Company.objects.annotate(foo=RawSQL('%s', ['value']))
self.assertEqual(companies.count(), 3)
@skipUnlessDBFeature('supports_boolean_expr_in_select_clause')
def test_filtering_on_annotate_that_uses_q(self):
self.assertEqual(
Company.objects.annotate(
num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField())
).filter(num_employees_check=True).count(),
2,
)
def test_filtering_on_q_that_is_boolean(self):
self.assertEqual(
Company.objects.filter(
ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField())
).count(),
2,
)
def test_filtering_on_rawsql_that_is_boolean(self):
self.assertEqual(
Company.objects.filter(
RawSQL('num_employees > %s', (3,), output_field=BooleanField()),
).count(),
2,
)
def test_filter_inter_attribute(self):
# We can filter on attribute relationships on same model obj, e.g.
# find companies where the number of employees is greater
# than the number of chairs.
self.assertSequenceEqual(
self.company_query.filter(num_employees__gt=F("num_chairs")), [
{
"num_chairs": 5,
"name": "Example Inc.",
"num_employees": 2300,
},
{
"num_chairs": 1,
"name": "Test GmbH",
"num_employees": 32
},
],
)
def test_update(self):
# We can set one field to have the value of another field
# Make sure we have enough chairs
self.company_query.update(num_chairs=F("num_employees"))
self.assertSequenceEqual(
self.company_query, [
{
"num_chairs": 2300,
"name": "Example Inc.",
"num_employees": 2300
},
{
"num_chairs": 3,
"name": "Foobar Ltd.",
"num_employees": 3
},
{
"num_chairs": 32,
"name": "Test GmbH",
"num_employees": 32
}
],
)
def test_arithmetic(self):
# We can perform arithmetic operations in expressions
# Make sure we have 2 spare chairs
self.company_query.update(num_chairs=F("num_employees") + 2)
self.assertSequenceEqual(
self.company_query, [
{
'num_chairs': 2302,
'name': 'Example Inc.',
'num_employees': 2300
},
{
'num_chairs': 5,
'name': 'Foobar Ltd.',
'num_employees': 3
},
{
'num_chairs': 34,
'name': 'Test GmbH',
'num_employees': 32
}
],
)
def test_order_of_operations(self):
# Law of order of operations is followed
self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees'))
self.assertSequenceEqual(
self.company_query, [
{
'num_chairs': 6900,
'name': 'Example Inc.',
'num_employees': 2300
},
{
'num_chairs': 9,
'name': 'Foobar Ltd.',
'num_employees': 3
},
{
'num_chairs': 96,
'name': 'Test GmbH',
'num_employees': 32
}
],
)
def test_parenthesis_priority(self):
# Law of order of operations can be overridden by parentheses
self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees'))
self.assertSequenceEqual(
self.company_query, [
{
'num_chairs': 5294600,
'name': 'Example Inc.',
'num_employees': 2300
},
{
'num_chairs': 15,
'name': 'Foobar Ltd.',
'num_employees': 3
},
{
'num_chairs': 1088,
'name': 'Test GmbH',
'num_employees': 32
}
],
)
def test_update_with_fk(self):
# ForeignKey can become updated with the value of another ForeignKey.
self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3)
self.assertQuerysetEqual(
Company.objects.all(),
['Joe Smith', 'Frank Meyer', 'Max Mustermann'],
lambda c: str(c.point_of_contact),
ordered=False
)
def test_update_with_none(self):
Number.objects.create(integer=1, float=1.0)
Number.objects.create(integer=2)
Number.objects.filter(float__isnull=False).update(float=Value(None))
self.assertQuerysetEqual(
Number.objects.all(),
[None, None],
lambda n: n.float,
ordered=False
)
def test_filter_with_join(self):
# F Expressions can also span joins
Company.objects.update(point_of_contact=F('ceo'))
c = Company.objects.first()
c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum")
c.save()
self.assertQuerysetEqual(
Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')),
['Foobar Ltd.', 'Test GmbH'],
lambda c: c.name,
ordered=False
)
Company.objects.exclude(
ceo__firstname=F("point_of_contact__firstname")
).update(name="foo")
self.assertEqual(
Company.objects.exclude(
ceo__firstname=F('point_of_contact__firstname')
).get().name,
"foo",
)
msg = "Joined field references are not permitted in this query"
with self.assertRaisesMessage(FieldError, msg):
Company.objects.exclude(
ceo__firstname=F('point_of_contact__firstname')
).update(name=F('point_of_contact__lastname'))
def test_object_update(self):
# F expressions can be used to update attributes on single objects
self.gmbh.num_employees = F('num_employees') + 4
self.gmbh.save()
self.gmbh.refresh_from_db()
self.assertEqual(self.gmbh.num_employees, 36)
def test_new_object_save(self):
# We should be able to use Funcs when inserting new data
test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max)
test_co.save()
test_co.refresh_from_db()
self.assertEqual(test_co.name, "upper")
def test_new_object_create(self):
test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max)
test_co.refresh_from_db()
self.assertEqual(test_co.name, "upper")
def test_object_create_with_aggregate(self):
# Aggregates are not allowed when inserting new data
msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).'
with self.assertRaisesMessage(FieldError, msg):
Company.objects.create(
name='Company', num_employees=Max(Value(1)), num_chairs=1,
ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30),
)
def test_object_update_fk(self):
# F expressions cannot be used to update attributes which are foreign
# keys, or attributes which involve joins.
test_gmbh = Company.objects.get(pk=self.gmbh.pk)
msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.'
with self.assertRaisesMessage(ValueError, msg):
test_gmbh.point_of_contact = F('ceo')
test_gmbh.point_of_contact = self.gmbh.ceo
test_gmbh.save()
test_gmbh.name = F('ceo__lastname')
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
test_gmbh.save()
def test_update_inherited_field_value(self):
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5)
def test_object_update_unsaved_objects(self):
# F expressions cannot be used to update attributes on objects which do
# not yet exist in the database
acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max)
acme.num_employees = F("num_employees") + 16
msg = (
'Failed to insert expression "Col(expressions_company, '
'expressions.Company.num_employees) + Value(16)" on '
'expressions.Company.num_employees. F() expressions can only be '
'used to update, not to insert.'
)
with self.assertRaisesMessage(ValueError, msg):
acme.save()
acme.num_employees = 12
acme.name = Lower(F('name'))
msg = (
'Failed to insert expression "Lower(Col(expressions_company, '
'expressions.Company.name))" on expressions.Company.name. F() '
'expressions can only be used to update, not to insert.'
)
with self.assertRaisesMessage(ValueError, msg):
acme.save()
def test_ticket_11722_iexact_lookup(self):
Employee.objects.create(firstname="John", lastname="Doe")
test = Employee.objects.create(firstname="Test", lastname="test")
queryset = Employee.objects.filter(firstname__iexact=F('lastname'))
self.assertSequenceEqual(queryset, [test])
def test_ticket_16731_startswith_lookup(self):
Employee.objects.create(firstname="John", lastname="Doe")
e2 = Employee.objects.create(firstname="Jack", lastname="Jackson")
e3 = Employee.objects.create(firstname="Jack", lastname="jackson")
self.assertSequenceEqual(
Employee.objects.filter(lastname__startswith=F('firstname')),
[e2, e3] if connection.features.has_case_insensitive_like else [e2]
)
qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk')
self.assertSequenceEqual(qs, [e2, e3])
def test_ticket_18375_join_reuse(self):
# Reverse multijoin F() references and the lookup target the same join.
# Pre #18375 the F() join was generated first and the lookup couldn't
# reuse that join.
qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_ticket_18375_kwarg_ordering(self):
# The next query was dict-randomization dependent - if the "gte=1"
# was seen first, then the F() will reuse the join generated by the
# gte lookup, if F() was seen first, then it generated a join the
# other lookups could not reuse.
qs = Employee.objects.filter(
company_ceo_set__num_chairs=F('company_ceo_set__num_employees'),
company_ceo_set__num_chairs__gte=1,
)
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_ticket_18375_kwarg_ordering_2(self):
# Another similar case for F() than above. Now we have the same join
# in two filter kwargs, one in the lhs lookup, one in F. Here pre
# #18375 the amount of joins generated was random if dict
# randomization was enabled, that is the generated query dependent
# on which clause was seen first.
qs = Employee.objects.filter(
company_ceo_set__num_employees=F('pk'),
pk=F('company_ceo_set__num_employees')
)
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_ticket_18375_chained_filters(self):
# F() expressions do not reuse joins from previous filter.
qs = Employee.objects.filter(
company_ceo_set__num_employees=F('pk')
).filter(
company_ceo_set__num_employees=F('company_ceo_set__num_employees')
)
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_order_by_exists(self):
mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20)
mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by(
# Order by whether the employee is the CEO of a company
Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc()
)
self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary])
def test_order_by_multiline_sql(self):
raw_order_by = (
RawSQL('''
CASE WHEN num_employees > 1000
THEN num_chairs
ELSE 0 END
''', []).desc(),
RawSQL('''
CASE WHEN num_chairs > 1
THEN 1
ELSE 0 END
''', []).asc()
)
for qs in (
Company.objects.all(),
Company.objects.distinct(),
):
with self.subTest(qs=qs):
self.assertSequenceEqual(
qs.order_by(*raw_order_by),
[self.example_inc, self.gmbh, self.foobar_ltd],
)
def test_outerref(self):
inner = Company.objects.filter(point_of_contact=OuterRef('pk'))
msg = (
'This queryset contains a reference to an outer query and may only '
'be used in a subquery.'
)
with self.assertRaisesMessage(ValueError, msg):
inner.exists()
outer = Employee.objects.annotate(is_point_of_contact=Exists(inner))
self.assertIs(outer.exists(), True)
def test_exist_single_field_output_field(self):
queryset = Company.objects.values('pk')
self.assertIsInstance(Exists(queryset).output_field, BooleanField)
def test_subquery(self):
Company.objects.filter(name='Example Inc.').update(
point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'),
ceo=self.max,
)
Employee.objects.create(firstname='Bob', lastname='Brown', salary=40)
qs = Employee.objects.annotate(
is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))),
is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))),
is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))),
is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))),
largest_company=Subquery(Company.objects.order_by('-num_employees').filter(
Q(ceo=OuterRef('pk')) | Q(point_of_contact=OuterRef('pk'))
).values('name')[:1], output_field=CharField())
).values(
'firstname',
'is_point_of_contact',
'is_not_point_of_contact',
'is_ceo_of_small_company',
'is_ceo_small_2',
'largest_company',
).order_by('firstname')
results = list(qs)
# Could use Coalesce(subq, Value('')) instead except for the bug in
# cx_Oracle mentioned in #23843.
bob = results[0]
if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls:
bob['largest_company'] = None
self.assertEqual(results, [
{
'firstname': 'Bob',
'is_point_of_contact': False,
'is_not_point_of_contact': True,
'is_ceo_of_small_company': False,
'is_ceo_small_2': False,
'largest_company': None,
},
{
'firstname': 'Frank',
'is_point_of_contact': False,
'is_not_point_of_contact': True,
'is_ceo_of_small_company': True,
'is_ceo_small_2': True,
'largest_company': 'Foobar Ltd.',
},
{
'firstname': 'Joe',
'is_point_of_contact': True,
'is_not_point_of_contact': False,
'is_ceo_of_small_company': False,
'is_ceo_small_2': False,
'largest_company': 'Example Inc.',
},
{
'firstname': 'Max',
'is_point_of_contact': False,
'is_not_point_of_contact': True,
'is_ceo_of_small_company': True,
'is_ceo_small_2': True,
'largest_company': 'Example Inc.'
}
])
# A less elegant way to write the same query: this uses a LEFT OUTER
# JOIN and an IS NULL, inside a WHERE NOT IN which is probably less
# efficient than EXISTS.
self.assertCountEqual(
qs.filter(is_point_of_contact=True).values('pk'),
Employee.objects.exclude(company_point_of_contact_set=None).values('pk')
)
def test_subquery_eq(self):
qs = Employee.objects.annotate(
is_ceo=Exists(Company.objects.filter(ceo=OuterRef('pk'))),
is_point_of_contact=Exists(
Company.objects.filter(point_of_contact=OuterRef('pk')),
),
small_company=Exists(
queryset=Company.objects.filter(num_employees__lt=200),
),
).filter(is_ceo=True, is_point_of_contact=False, small_company=True)
self.assertNotEqual(
qs.query.annotations['is_ceo'],
qs.query.annotations['is_point_of_contact'],
)
self.assertNotEqual(
qs.query.annotations['is_ceo'],
qs.query.annotations['small_company'],
)
def test_in_subquery(self):
# This is a contrived test (and you really wouldn't write this query),
# but it is a succinct way to test the __in=Subquery() construct.
small_companies = Company.objects.filter(num_employees__lt=200).values('pk')
subquery_test = Company.objects.filter(pk__in=Subquery(small_companies))
self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh])
subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3)))
self.assertCountEqual(subquery_test2, [self.foobar_ltd])
def test_uuid_pk_subquery(self):
u = UUIDPK.objects.create()
UUID.objects.create(uuid_fk=u)
qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id')))
self.assertCountEqual(qs, [u])
def test_nested_subquery(self):
inner = Company.objects.filter(point_of_contact=OuterRef('pk'))
outer = Employee.objects.annotate(is_point_of_contact=Exists(inner))
contrived = Employee.objects.annotate(
is_point_of_contact=Subquery(
outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'),
output_field=BooleanField(),
),
)
self.assertCountEqual(contrived.values_list(), outer.values_list())
def test_nested_subquery_join_outer_ref(self):
inner = Employee.objects.filter(pk=OuterRef('ceo__pk')).values('pk')
qs = Employee.objects.annotate(
ceo_company=Subquery(
Company.objects.filter(
ceo__in=inner,
ceo__pk=OuterRef('pk'),
).values('pk'),
),
)
self.assertSequenceEqual(
qs.values_list('ceo_company', flat=True),
[self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk],
)
def test_nested_subquery_outer_ref_2(self):
first = Time.objects.create(time='09:00')
second = Time.objects.create(time='17:00')
third = Time.objects.create(time='21:00')
SimulationRun.objects.bulk_create([
SimulationRun(start=first, end=second, midpoint='12:00'),
SimulationRun(start=first, end=third, midpoint='15:00'),
SimulationRun(start=second, end=first, midpoint='00:00'),
])
inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time')
middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1]
outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField()))
# This is a contrived example. It exercises the double OuterRef form.
self.assertCountEqual(outer, [first, second, third])
def test_nested_subquery_outer_ref_with_autofield(self):
first = Time.objects.create(time='09:00')
second = Time.objects.create(time='17:00')
SimulationRun.objects.create(start=first, end=second, midpoint='12:00')
inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start')
middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1]
outer = Time.objects.annotate(other=Subquery(middle, output_field=IntegerField()))
# This exercises the double OuterRef form with AutoField as pk.
self.assertCountEqual(outer, [first, second])
def test_annotations_within_subquery(self):
Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank'))
inner = Company.objects.filter(
ceo=OuterRef('pk')
).values('ceo').annotate(total_employees=Sum('num_employees')).values('total_employees')
outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner))
self.assertSequenceEqual(
outer.order_by('-total_employees').values('salary', 'total_employees'),
[{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}],
)
def test_subquery_references_joined_table_twice(self):
inner = Company.objects.filter(
num_chairs__gte=OuterRef('ceo__salary'),
num_employees__gte=OuterRef('point_of_contact__salary'),
)
# Another contrived example (there is no need to have a subquery here)
outer = Company.objects.filter(pk__in=Subquery(inner.values('pk')))
self.assertFalse(outer.exists())
def test_subquery_filter_by_aggregate(self):
Number.objects.create(integer=1000, float=1.2)
Employee.objects.create(salary=1000)
qs = Number.objects.annotate(
min_valuable_count=Subquery(
Employee.objects.filter(
salary=OuterRef('integer'),
).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1]
),
)
self.assertEqual(qs.get().float, 1.2)
def test_subquery_filter_by_lazy(self):
self.max.manager = Manager.objects.create(name='Manager')
self.max.save()
max_manager = SimpleLazyObject(
lambda: Manager.objects.get(pk=self.max.manager.pk)
)
qs = Company.objects.annotate(
ceo_manager=Subquery(
Employee.objects.filter(
lastname=OuterRef('ceo__lastname'),
).values('manager'),
),
).filter(ceo_manager=max_manager)
self.assertEqual(qs.get(), self.gmbh)
def test_aggregate_subquery_annotation(self):
with self.assertNumQueries(1) as ctx:
aggregate = Company.objects.annotate(
ceo_salary=Subquery(
Employee.objects.filter(
id=OuterRef('ceo_id'),
).values('salary')
),
).aggregate(
ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)),
)
self.assertEqual(aggregate, {'ceo_salary_gt_20': 1})
# Aggregation over a subquery annotation doesn't annotate the subquery
# twice in the inner query.
sql = ctx.captured_queries[0]['sql']
self.assertLessEqual(sql.count('SELECT'), 3)
# GROUP BY isn't required to aggregate over a query that doesn't
# contain nested aggregates.
self.assertNotIn('GROUP BY', sql)
def test_explicit_output_field(self):
class FuncA(Func):
output_field = CharField()
class FuncB(Func):
pass
expr = FuncB(FuncA())
self.assertEqual(expr.output_field, FuncA.output_field)
def test_outerref_mixed_case_table_name(self):
inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned'))
outer = Result.objects.filter(pk__in=Subquery(inner.values('pk')))
self.assertFalse(outer.exists())
def test_outerref_with_operator(self):
inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2)
outer = Company.objects.filter(pk__in=Subquery(inner.values('pk')))
self.assertEqual(outer.get().name, 'Test GmbH')
def test_nested_outerref_with_function(self):
self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer')
self.gmbh.save()
inner = Employee.objects.filter(
lastname__startswith=Left(OuterRef(OuterRef('lastname')), 1),
)
qs = Employee.objects.annotate(
ceo_company=Subquery(
Company.objects.filter(
point_of_contact__in=inner,
ceo__pk=OuterRef('pk'),
).values('name'),
),
).filter(ceo_company__isnull=False)
self.assertEqual(qs.get().ceo_company, 'Test GmbH')
def test_annotation_with_outerref(self):
gmbh_salary = Company.objects.annotate(
max_ceo_salary_raise=Subquery(
Company.objects.annotate(
salary_raise=OuterRef('num_employees') + F('num_employees'),
).order_by('-salary_raise').values('salary_raise')[:1],
output_field=IntegerField(),
),
).get(pk=self.gmbh.pk)
self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332)
def test_annotation_with_nested_outerref(self):
self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer')
self.gmbh.save()
inner = Employee.objects.annotate(
outer_lastname=OuterRef(OuterRef('lastname')),
).filter(lastname__startswith=Left('outer_lastname', 1))
qs = Employee.objects.annotate(
ceo_company=Subquery(
Company.objects.filter(
point_of_contact__in=inner,
ceo__pk=OuterRef('pk'),
).values('name'),
),
).filter(ceo_company__isnull=False)
self.assertEqual(qs.get().ceo_company, 'Test GmbH')
def test_pickle_expression(self):
expr = Value(1)
expr.convert_value # populate cached property
self.assertEqual(pickle.loads(pickle.dumps(expr)), expr)
def test_incorrect_field_in_F_expression(self):
with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."):
list(Employee.objects.filter(firstname=F('nope')))
def test_incorrect_joined_field_in_F_expression(self):
with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."):
list(Company.objects.filter(ceo__pk=F('point_of_contact__nope')))
def test_exists_in_filter(self):
inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk')
qs1 = Employee.objects.filter(Exists(inner))
qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True)
self.assertCountEqual(qs1, qs2)
self.assertFalse(Employee.objects.exclude(Exists(inner)).exists())
self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner)))
def test_subquery_in_filter(self):
inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu')
self.assertSequenceEqual(
Employee.objects.filter(Subquery(inner)),
[self.foobar_ltd.ceo],
)
def test_subquery_group_by_outerref_in_filter(self):
inner = Company.objects.annotate(
employee=OuterRef('pk'),
).values('employee').annotate(
min_num_chairs=Min('num_chairs'),
).values('ceo')
self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True)
def test_case_in_filter_if_boolean_output_field(self):
is_ceo = Company.objects.filter(ceo=OuterRef('pk'))
is_poc = Company.objects.filter(point_of_contact=OuterRef('pk'))
qs = Employee.objects.filter(
Case(
When(Exists(is_ceo), then=True),
When(Exists(is_poc), then=True),
default=False,
output_field=BooleanField(),
),
)
self.assertCountEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max])
def test_boolean_expression_combined(self):
is_ceo = Company.objects.filter(ceo=OuterRef('pk'))
is_poc = Company.objects.filter(point_of_contact=OuterRef('pk'))
self.gmbh.point_of_contact = self.max
self.gmbh.save()
self.assertCountEqual(
Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)),
[self.example_inc.ceo, self.foobar_ltd.ceo, self.max],
)
self.assertCountEqual(
Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)),
[self.max],
)
self.assertCountEqual(
Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)),
[self.max],
)
self.assertCountEqual(
Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)),
[self.example_inc.ceo, self.max],
)
self.assertCountEqual(
Employee.objects.filter(Q(salary__gte=30) & Exists(is_ceo)),
[self.max],
)
self.assertCountEqual(
Employee.objects.filter(Q(salary__lt=15) | Exists(is_poc)),
[self.example_inc.ceo, self.max],
)
def test_boolean_expression_combined_with_empty_Q(self):
is_poc = Company.objects.filter(point_of_contact=OuterRef('pk'))
self.gmbh.point_of_contact = self.max
self.gmbh.save()
tests = [
Exists(is_poc) & Q(),
Q() & Exists(is_poc),
Exists(is_poc) | Q(),
Q() | Exists(is_poc),
]
for conditions in tests:
with self.subTest(conditions):
self.assertCountEqual(Employee.objects.filter(conditions), [self.max])
class IterableLookupInnerExpressionsTests(TestCase):
@classmethod
def setUpTestData(cls):
ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30)
# MySQL requires that the values calculated for expressions don't pass
# outside of the field's range, so it's inconvenient to use the values
# in the more general tests.
cls.c5020 = Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo)
cls.c5040 = Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo)
cls.c5050 = Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo)
cls.c5060 = Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo)
cls.c99300 = Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo)
def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self):
# __in lookups can use F() expressions for integers.
queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10]))
self.assertSequenceEqual(queryset, [self.c5060])
self.assertCountEqual(
Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])),
[self.c5040, self.c5060],
)
self.assertCountEqual(
Company.objects.filter(
num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10])
),
[self.c5040, self.c5050, self.c5060],
)
def test_expressions_in_lookups_join_choice(self):
midpoint = datetime.time(13, 0)
t1 = Time.objects.create(time=datetime.time(12, 0))
t2 = Time.objects.create(time=datetime.time(14, 0))
s1 = SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint)
SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint)
SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint)
SimulationRun.objects.create(start=None, end=None, midpoint=midpoint)
queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')])
self.assertSequenceEqual(queryset, [s1])
for alias in queryset.query.alias_map.values():
if isinstance(alias, Join):
self.assertEqual(alias.join_type, constants.INNER)
queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')])
self.assertQuerysetEqual(queryset, [], ordered=False)
for alias in queryset.query.alias_map.values():
if isinstance(alias, Join):
self.assertEqual(alias.join_type, constants.LOUTER)
def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self):
# Range lookups can use F() expressions for integers.
Company.objects.filter(num_employees__exact=F("num_chairs"))
self.assertCountEqual(
Company.objects.filter(num_employees__range=(F('num_chairs'), 100)),
[self.c5020, self.c5040, self.c5050],
)
self.assertCountEqual(
Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)),
[self.c5040, self.c5050, self.c5060],
)
self.assertCountEqual(
Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)),
[self.c5020, self.c5040, self.c5050, self.c5060],
)
self.assertCountEqual(
Company.objects.filter(num_employees__range=(1, 100)),
[self.c5020, self.c5040, self.c5050, self.c5060, self.c99300],
)
def test_range_lookup_namedtuple(self):
EmployeeRange = namedtuple('EmployeeRange', ['minimum', 'maximum'])
qs = Company.objects.filter(
num_employees__range=EmployeeRange(minimum=51, maximum=100),
)
self.assertSequenceEqual(qs, [self.c99300])
@unittest.skipUnless(connection.vendor == 'sqlite',
"This defensive test only works on databases that don't validate parameter types")
def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self):
"""
This tests that SQL injection isn't possible using compilation of
expressions in iterable filters, as their compilation happens before
the main query compilation. It's limited to SQLite, as PostgreSQL,
Oracle and other vendors have defense in depth against this by type
checking. Testing against SQLite (the most permissive of the built-in
databases) demonstrates that the problem doesn't exist while keeping
the test simple.
"""
queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1'])
self.assertQuerysetEqual(queryset, [], ordered=False)
def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self):
start = datetime.datetime(2016, 2, 3, 15, 0, 0)
end = datetime.datetime(2016, 2, 5, 15, 0, 0)
experiment_1 = Experiment.objects.create(
name='Integrity testing',
assigned=start.date(),
start=start,
end=end,
completed=end.date(),
estimated_time=end - start,
)
experiment_2 = Experiment.objects.create(
name='Taste testing',
assigned=start.date(),
start=start,
end=end,
completed=end.date(),
estimated_time=end - start,
)
r1 = Result.objects.create(
experiment=experiment_1,
result_time=datetime.datetime(2016, 2, 4, 15, 0, 0),
)
Result.objects.create(
experiment=experiment_1,
result_time=datetime.datetime(2016, 3, 10, 2, 0, 0),
)
Result.objects.create(
experiment=experiment_2,
result_time=datetime.datetime(2016, 1, 8, 5, 0, 0),
)
within_experiment_time = [F('experiment__start'), F('experiment__end')]
queryset = Result.objects.filter(result_time__range=within_experiment_time)
self.assertSequenceEqual(queryset, [r1])
within_experiment_time = [F('experiment__start'), F('experiment__end')]
queryset = Result.objects.filter(result_time__range=within_experiment_time)
self.assertSequenceEqual(queryset, [r1])
class FTests(SimpleTestCase):
def test_deepcopy(self):
f = F("foo")
g = deepcopy(f)
self.assertEqual(f.name, g.name)
def test_deconstruct(self):
f = F('name')
path, args, kwargs = f.deconstruct()
self.assertEqual(path, 'django.db.models.expressions.F')
self.assertEqual(args, (f.name,))
self.assertEqual(kwargs, {})
def test_equal(self):
f = F('name')
same_f = F('name')
other_f = F('username')
self.assertEqual(f, same_f)
self.assertNotEqual(f, other_f)
def test_hash(self):
d = {F('name'): 'Bob'}
self.assertIn(F('name'), d)
self.assertEqual(d[F('name')], 'Bob')
def test_not_equal_Value(self):
f = F('name')
value = Value('name')
self.assertNotEqual(f, value)
self.assertNotEqual(value, f)
class ExpressionsTests(TestCase):
def test_F_reuse(self):
f = F('id')
n = Number.objects.create(integer=-1)
c = Company.objects.create(
name="Example Inc.", num_employees=2300, num_chairs=5,
ceo=Employee.objects.create(firstname="Joe", lastname="Smith")
)
c_qs = Company.objects.filter(id=f)
self.assertEqual(c_qs.get(), c)
# Reuse the same F-object for another queryset
n_qs = Number.objects.filter(id=f)
self.assertEqual(n_qs.get(), n)
# The original query still works correctly
self.assertEqual(c_qs.get(), c)
def test_patterns_escape(self):
r"""
Special characters (e.g. %, _ and \) stored in database are
properly escaped when using a pattern lookup with an expression
refs #16731
"""
Employee.objects.bulk_create([
Employee(firstname="Johnny", lastname="%John"),
Employee(firstname="Jean-Claude", lastname="Claud_"),
Employee(firstname="Jean-Claude", lastname="Claude%"),
Employee(firstname="Johnny", lastname="Joh\\n"),
Employee(firstname="Johnny", lastname="_ohn"),
])
claude = Employee.objects.create(firstname='Jean-Claude', lastname='Claude')
john = Employee.objects.create(firstname='Johnny', lastname='John')
john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%Joh\\n')
self.assertCountEqual(
Employee.objects.filter(firstname__contains=F('lastname')),
[john_sign, john, claude],
)
self.assertCountEqual(
Employee.objects.filter(firstname__startswith=F('lastname')),
[john_sign, john],
)
self.assertSequenceEqual(
Employee.objects.filter(firstname__endswith=F('lastname')),
[claude],
)
def test_insensitive_patterns_escape(self):
r"""
Special characters (e.g. %, _ and \) stored in database are
properly escaped when using a case insensitive pattern lookup with an
expression -- refs #16731
"""
Employee.objects.bulk_create([
Employee(firstname="Johnny", lastname="%john"),
Employee(firstname="Jean-Claude", lastname="claud_"),
Employee(firstname="Jean-Claude", lastname="claude%"),
Employee(firstname="Johnny", lastname="joh\\n"),
Employee(firstname="Johnny", lastname="_ohn"),
])
claude = Employee.objects.create(firstname='Jean-Claude', lastname='claude')
john = Employee.objects.create(firstname='Johnny', lastname='john')
john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%joh\\n')
self.assertCountEqual(
Employee.objects.filter(firstname__icontains=F('lastname')),
[john_sign, john, claude],
)
self.assertCountEqual(
Employee.objects.filter(firstname__istartswith=F('lastname')),
[john_sign, john],
)
self.assertSequenceEqual(
Employee.objects.filter(firstname__iendswith=F('lastname')),
[claude],
)
@isolate_apps('expressions')
class SimpleExpressionTests(SimpleTestCase):
def test_equal(self):
self.assertEqual(Expression(), Expression())
self.assertEqual(
Expression(IntegerField()),
Expression(output_field=IntegerField())
)
self.assertEqual(Expression(IntegerField()), mock.ANY)
self.assertNotEqual(
Expression(IntegerField()),
Expression(CharField())
)
class TestModel(Model):
field = IntegerField()
other_field = IntegerField()
self.assertNotEqual(
Expression(TestModel._meta.get_field('field')),
Expression(TestModel._meta.get_field('other_field')),
)
def test_hash(self):
self.assertEqual(hash(Expression()), hash(Expression()))
self.assertEqual(
hash(Expression(IntegerField())),
hash(Expression(output_field=IntegerField()))
)
self.assertNotEqual(
hash(Expression(IntegerField())),
hash(Expression(CharField())),
)
class TestModel(Model):
field = IntegerField()
other_field = IntegerField()
self.assertNotEqual(
hash(Expression(TestModel._meta.get_field('field'))),
hash(Expression(TestModel._meta.get_field('other_field'))),
)
class ExpressionsNumericTests(TestCase):
@classmethod
def setUpTestData(cls):
Number(integer=-1).save()
Number(integer=42).save()
Number(integer=1337).save()
Number.objects.update(float=F('integer'))
def test_fill_with_value_from_same_object(self):
"""
We can fill a value in all objects with an other value of the
same object.
"""
self.assertQuerysetEqual(
Number.objects.all(),
[(-1, -1), (42, 42), (1337, 1337)],
lambda n: (n.integer, round(n.float)),
ordered=False
)
def test_increment_value(self):
"""
We can increment a value of all objects in a query set.
"""
self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2)
self.assertQuerysetEqual(
Number.objects.all(),
[(-1, -1), (43, 42), (1338, 1337)],
lambda n: (n.integer, round(n.float)),
ordered=False
)
def test_filter_not_equals_other_field(self):
"""
We can filter for objects, where a value is not equals the value
of an other field.
"""
self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2)
self.assertQuerysetEqual(
Number.objects.exclude(float=F('integer')),
[(43, 42), (1338, 1337)],
lambda n: (n.integer, round(n.float)),
ordered=False
)
def test_complex_expressions(self):
"""
Complex expressions of different connection types are possible.
"""
n = Number.objects.create(integer=10, float=123.45)
self.assertEqual(Number.objects.filter(pk=n.pk).update(
float=F('integer') + F('float') * 2), 1)
self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
class ExpressionOperatorTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n = Number.objects.create(integer=42, float=15.5)
cls.n1 = Number.objects.create(integer=-42, float=-15.5)
def test_lefthand_addition(self):
# LH Addition of floats and integers
Number.objects.filter(pk=self.n.pk).update(
integer=F('integer') + 15,
float=F('float') + 42.7
)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_lefthand_subtraction(self):
# LH Subtraction of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
def test_lefthand_multiplication(self):
# Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_lefthand_division(self):
# LH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
def test_lefthand_modulo(self):
# LH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
def test_lefthand_bitwise_and(self):
# LH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56))
Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64)
def test_lefthand_bitwise_left_shift_operator(self):
Number.objects.update(integer=F('integer').bitleftshift(2))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168)
def test_lefthand_bitwise_right_shift_operator(self):
Number.objects.update(integer=F('integer').bitrightshift(2))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11)
def test_lefthand_bitwise_or(self):
# LH Bitwise or on integers
Number.objects.update(integer=F('integer').bitor(48))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10)
def test_lefthand_transformed_field_bitwise_or(self):
Employee.objects.create(firstname='Max', lastname='Mustermann')
with register_lookup(CharField, Length):
qs = Employee.objects.annotate(bitor=F('lastname__length').bitor(48))
self.assertEqual(qs.get().bitor, 58)
def test_lefthand_power(self):
# LH Power arithmetic operation on floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2))
def test_lefthand_bitwise_xor(self):
Number.objects.update(integer=F('integer').bitxor(48))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26)
def test_lefthand_bitwise_xor_null(self):
employee = Employee.objects.create(firstname='John', lastname='Doe')
Employee.objects.update(salary=F('salary').bitxor(48))
employee.refresh_from_db()
self.assertIsNone(employee.salary)
@unittest.skipUnless(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.")
def test_lefthand_bitwise_xor_not_supported(self):
msg = 'Bitwise XOR is not supported in Oracle.'
with self.assertRaisesMessage(NotSupportedError, msg):
Number.objects.update(integer=F('integer').bitxor(48))
def test_right_hand_addition(self):
# Right hand operators
Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float'))
# RH Addition of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_right_hand_subtraction(self):
Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float'))
# RH Subtraction of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
def test_right_hand_multiplication(self):
# RH Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_right_hand_division(self):
# RH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
def test_right_hand_modulo(self):
# RH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
def test_righthand_power(self):
# RH Power arithmetic operation on floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3))
class FTimeDeltaTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.sday = sday = datetime.date(2010, 6, 25)
cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
midnight = datetime.time(0)
delta0 = datetime.timedelta(0)
delta1 = datetime.timedelta(microseconds=253000)
delta2 = datetime.timedelta(seconds=44)
delta3 = datetime.timedelta(hours=21, minutes=8)
delta4 = datetime.timedelta(days=10)
delta5 = datetime.timedelta(days=90)
# Test data is set so that deltas and delays will be
# strictly increasing.
cls.deltas = []
cls.delays = []
cls.days_long = []
# e0: started same day as assigned, zero duration
end = stime + delta0
cls.e0 = Experiment.objects.create(
name='e0', assigned=sday, start=stime, end=end,
completed=end.date(), estimated_time=delta0,
)
cls.deltas.append(delta0)
cls.delays.append(cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight))
cls.days_long.append(cls.e0.completed - cls.e0.assigned)
# e1: started one day after assigned, tiny duration, data
# set so that end time has no fractional seconds, which
# tests an edge case on sqlite.
delay = datetime.timedelta(1)
end = stime + delay + delta1
e1 = Experiment.objects.create(
name='e1', assigned=sday, start=stime + delay, end=end,
completed=end.date(), estimated_time=delta1,
)
cls.deltas.append(delta1)
cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight))
cls.days_long.append(e1.completed - e1.assigned)
# e2: started three days after assigned, small duration
end = stime + delta2
e2 = Experiment.objects.create(
name='e2', assigned=sday - datetime.timedelta(3), start=stime,
end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1),
)
cls.deltas.append(delta2)
cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight))
cls.days_long.append(e2.completed - e2.assigned)
# e3: started four days after assigned, medium duration
delay = datetime.timedelta(4)
end = stime + delay + delta3
e3 = Experiment.objects.create(
name='e3', assigned=sday, start=stime + delay, end=end,
completed=end.date(), estimated_time=delta3,
)
cls.deltas.append(delta3)
cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight))
cls.days_long.append(e3.completed - e3.assigned)
# e4: started 10 days after assignment, long duration
end = stime + delta4
e4 = Experiment.objects.create(
name='e4', assigned=sday - datetime.timedelta(10), start=stime,
end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1),
)
cls.deltas.append(delta4)
cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight))
cls.days_long.append(e4.completed - e4.assigned)
# e5: started a month after assignment, very long duration
delay = datetime.timedelta(30)
end = stime + delay + delta5
e5 = Experiment.objects.create(
name='e5', assigned=sday, start=stime + delay, end=end,
completed=end.date(), estimated_time=delta5,
)
cls.deltas.append(delta5)
cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight))
cls.days_long.append(e5.completed - e5.assigned)
cls.expnames = [e.name for e in Experiment.objects.all()]
def test_multiple_query_compilation(self):
# Ticket #21643
queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1))
q1 = str(queryset.query)
q2 = str(queryset.query)
self.assertEqual(q1, q2)
def test_query_clone(self):
# Ticket #21643 - Crash when compiling query more than once
qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1))
qs2 = qs.all()
list(qs)
list(qs2)
# Intentionally no assert
def test_delta_add(self):
for i, delta in enumerate(self.deltas):
test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_delta_subtract(self):
for i, delta in enumerate(self.deltas):
test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_exclude(self):
for i, delta in enumerate(self.deltas):
test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)]
self.assertEqual(test_set, self.expnames[i:])
test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)]
self.assertEqual(test_set, self.expnames[i + 1:])
def test_date_comparison(self):
for i, days in enumerate(self.days_long):
test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)]
self.assertEqual(test_set, self.expnames[:i + 1])
@skipUnlessDBFeature("supports_mixed_date_datetime_comparisons")
def test_mixed_comparisons1(self):
for i, delay in enumerate(self.delays):
test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_mixed_comparisons2(self):
for i, delay in enumerate(self.delays):
delay = datetime.timedelta(delay.days)
test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [
e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1))
]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_delta_update(self):
for delta in self.deltas:
exps = Experiment.objects.all()
expected_durations = [e.duration() for e in exps]
expected_starts = [e.start + delta for e in exps]
expected_ends = [e.end + delta for e in exps]
Experiment.objects.update(start=F('start') + delta, end=F('end') + delta)
exps = Experiment.objects.all()
new_starts = [e.start for e in exps]
new_ends = [e.end for e in exps]
new_durations = [e.duration() for e in exps]
self.assertEqual(expected_starts, new_starts)
self.assertEqual(expected_ends, new_ends)
self.assertEqual(expected_durations, new_durations)
def test_invalid_operator(self):
with self.assertRaises(DatabaseError):
list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0)))
def test_durationfield_add(self):
zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))]
self.assertEqual(zeros, ['e0'])
end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))]
self.assertEqual(end_less, ['e2'])
delta_math = [
e.name for e in
Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1))
]
self.assertEqual(delta_math, ['e4'])
queryset = Experiment.objects.annotate(shifted=ExpressionWrapper(
F('start') + Value(None, output_field=DurationField()),
output_field=DateTimeField(),
))
self.assertIsNone(queryset.first().shifted)
def test_duration_expressions(self):
for delta in self.deltas:
qs = Experiment.objects.annotate(duration=F('estimated_time') + delta)
for obj in qs:
self.assertEqual(obj.duration, obj.estimated_time + delta)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_date_subtraction(self):
queryset = Experiment.objects.annotate(
completion_duration=F('completed') - F('assigned'),
)
at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))}
self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'})
at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))}
self.assertEqual(at_least_120_days, {'e5'})
less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))}
self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'})
queryset = Experiment.objects.annotate(
difference=F('completed') - Value(None, output_field=DateField()),
)
self.assertIsNone(queryset.first().difference)
queryset = Experiment.objects.annotate(shifted=ExpressionWrapper(
F('completed') - Value(None, output_field=DurationField()),
output_field=DateField(),
))
self.assertIsNone(queryset.first().shifted)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_date_subquery_subtraction(self):
subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('completed')
queryset = Experiment.objects.annotate(
difference=subquery - F('completed'),
).filter(difference=datetime.timedelta())
self.assertTrue(queryset.exists())
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_date_case_subtraction(self):
queryset = Experiment.objects.annotate(
date_case=Case(
When(Q(name='e0'), then=F('completed')),
output_field=DateField(),
),
completed_value=Value(
self.e0.completed,
output_field=DateField(),
),
difference=F('date_case') - F('completed_value'),
).filter(difference=datetime.timedelta())
self.assertEqual(queryset.get(), self.e0)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_time_subtraction(self):
Time.objects.create(time=datetime.time(12, 30, 15, 2345))
queryset = Time.objects.annotate(
difference=F('time') - Value(datetime.time(11, 15, 0)),
)
self.assertEqual(
queryset.get().difference,
datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345)
)
queryset = Time.objects.annotate(
difference=F('time') - Value(None, output_field=TimeField()),
)
self.assertIsNone(queryset.first().difference)
queryset = Time.objects.annotate(shifted=ExpressionWrapper(
F('time') - Value(None, output_field=DurationField()),
output_field=TimeField(),
))
self.assertIsNone(queryset.first().shifted)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_time_subquery_subtraction(self):
Time.objects.create(time=datetime.time(12, 30, 15, 2345))
subquery = Time.objects.filter(pk=OuterRef('pk')).values('time')
queryset = Time.objects.annotate(
difference=subquery - F('time'),
).filter(difference=datetime.timedelta())
self.assertTrue(queryset.exists())
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_datetime_subtraction(self):
under_estimate = [
e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start'))
]
self.assertEqual(under_estimate, ['e2'])
over_estimate = [
e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start'))
]
self.assertEqual(over_estimate, ['e4'])
queryset = Experiment.objects.annotate(
difference=F('start') - Value(None, output_field=DateTimeField()),
)
self.assertIsNone(queryset.first().difference)
queryset = Experiment.objects.annotate(shifted=ExpressionWrapper(
F('start') - Value(None, output_field=DurationField()),
output_field=DateTimeField(),
))
self.assertIsNone(queryset.first().shifted)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_datetime_subquery_subtraction(self):
subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('start')
queryset = Experiment.objects.annotate(
difference=subquery - F('start'),
).filter(difference=datetime.timedelta())
self.assertTrue(queryset.exists())
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_datetime_subtraction_microseconds(self):
delta = datetime.timedelta(microseconds=8999999999999999)
Experiment.objects.update(end=F('start') + delta)
qs = Experiment.objects.annotate(delta=F('end') - F('start'))
for e in qs:
self.assertEqual(e.delta, delta)
def test_duration_with_datetime(self):
# Exclude e1 which has very high precision so we can test this on all
# backends regardless of whether or not it supports
# microsecond_precision.
over_estimate = Experiment.objects.exclude(name='e1').filter(
completed__gt=self.stime + F('estimated_time'),
).order_by('name')
self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name)
def test_duration_with_datetime_microseconds(self):
delta = datetime.timedelta(microseconds=8999999999999999)
qs = Experiment.objects.annotate(dt=ExpressionWrapper(
F('start') + delta,
output_field=DateTimeField(),
))
for e in qs:
self.assertEqual(e.dt, e.start + delta)
def test_date_minus_duration(self):
more_than_4_days = Experiment.objects.filter(
assigned__lt=F('completed') - Value(datetime.timedelta(days=4))
)
self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name)
def test_negative_timedelta_update(self):
# subtract 30 seconds, 30 minutes, 2 hours and 2 days
experiments = Experiment.objects.filter(name='e0').annotate(
start_sub_seconds=F('start') + datetime.timedelta(seconds=-30),
).annotate(
start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30),
).annotate(
start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2),
).annotate(
new_start=F('start_sub_hours') + datetime.timedelta(days=-2),
)
expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0)
# subtract 30 microseconds
experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30))
expected_start += datetime.timedelta(microseconds=+746970)
experiments.update(start=F('new_start'))
e0 = Experiment.objects.get(name='e0')
self.assertEqual(e0.start, expected_start)
class ValueTests(TestCase):
def test_update_TimeField_using_Value(self):
Time.objects.create()
Time.objects.update(time=Value(datetime.time(1), output_field=TimeField()))
self.assertEqual(Time.objects.get().time, datetime.time(1))
def test_update_UUIDField_using_Value(self):
UUID.objects.create()
UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField()))
self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012'))
def test_deconstruct(self):
value = Value('name')
path, args, kwargs = value.deconstruct()
self.assertEqual(path, 'django.db.models.expressions.Value')
self.assertEqual(args, (value.value,))
self.assertEqual(kwargs, {})
def test_deconstruct_output_field(self):
value = Value('name', output_field=CharField())
path, args, kwargs = value.deconstruct()
self.assertEqual(path, 'django.db.models.expressions.Value')
self.assertEqual(args, (value.value,))
self.assertEqual(len(kwargs), 1)
self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct())
def test_equal(self):
value = Value('name')
self.assertEqual(value, Value('name'))
self.assertNotEqual(value, Value('username'))
def test_hash(self):
d = {Value('name'): 'Bob'}
self.assertIn(Value('name'), d)
self.assertEqual(d[Value('name')], 'Bob')
def test_equal_output_field(self):
value = Value('name', output_field=CharField())
same_value = Value('name', output_field=CharField())
other_value = Value('name', output_field=TimeField())
no_output_field = Value('name')
self.assertEqual(value, same_value)
self.assertNotEqual(value, other_value)
self.assertNotEqual(value, no_output_field)
def test_raise_empty_expressionlist(self):
msg = 'ExpressionList requires at least one expression'
with self.assertRaisesMessage(ValueError, msg):
ExpressionList()
def test_compile_unresolved(self):
# This test might need to be revisited later on if #25425 is enforced.
compiler = Time.objects.all().query.get_compiler(connection=connection)
value = Value('foo')
self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo']))
value = Value('foo', output_field=CharField())
self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo']))
def test_resolve_output_field(self):
value_types = [
('str', CharField),
(True, BooleanField),
(42, IntegerField),
(3.14, FloatField),
(datetime.date(2019, 5, 15), DateField),
(datetime.datetime(2019, 5, 15), DateTimeField),
(datetime.time(3, 16), TimeField),
(datetime.timedelta(1), DurationField),
(Decimal('3.14'), DecimalField),
(b'', BinaryField),
(uuid.uuid4(), UUIDField),
]
for value, ouput_field_type in value_types:
with self.subTest(type=type(value)):
expr = Value(value)
self.assertIsInstance(expr.output_field, ouput_field_type)
def test_resolve_output_field_failure(self):
msg = 'Cannot resolve expression type, unknown output_field'
with self.assertRaisesMessage(FieldError, msg):
Value(object()).output_field
class ExistsTests(TestCase):
def test_optimizations(self):
with CaptureQueriesContext(connection) as context:
list(Experiment.objects.values(exists=Exists(
Experiment.objects.order_by('pk'),
)).order_by())
captured_queries = context.captured_queries
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]['sql']
self.assertNotIn(
connection.ops.quote_name(Experiment._meta.pk.column),
captured_sql,
)
self.assertIn(
connection.ops.limit_offset_sql(None, 1),
captured_sql,
)
self.assertNotIn('ORDER BY', captured_sql)
class FieldTransformTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.sday = sday = datetime.date(2010, 6, 25)
cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
cls.ex1 = Experiment.objects.create(
name='Experiment 1',
assigned=sday,
completed=sday + datetime.timedelta(2),
estimated_time=datetime.timedelta(2),
start=stime,
end=stime + datetime.timedelta(2),
)
def test_month_aggregation(self):
self.assertEqual(
Experiment.objects.aggregate(month_count=Count('assigned__month')),
{'month_count': 1}
)
def test_transform_in_values(self):
self.assertSequenceEqual(
Experiment.objects.values('assigned__month'),
[{'assigned__month': 6}],
)
def test_multiple_transforms_in_values(self):
self.assertSequenceEqual(
Experiment.objects.values('end__date__month'),
[{'end__date__month': 6}],
)
class ReprTests(SimpleTestCase):
def test_expressions(self):
self.assertEqual(
repr(Case(When(a=1))),
"<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>"
)
self.assertEqual(
repr(When(Q(age__gte=18), then=Value('legal'))),
"<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value(legal)>"
)
self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)")
self.assertEqual(repr(F('published')), "F(published)")
self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>")
self.assertEqual(
repr(ExpressionWrapper(F('cost') + F('tax'), IntegerField())),
"ExpressionWrapper(F(cost) + F(tax))"
)
self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)")
self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)')
self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])")
self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))")
self.assertEqual(repr(Value(1)), "Value(1)")
self.assertEqual(
repr(ExpressionList(F('col'), F('anothercol'))),
'ExpressionList(F(col), F(anothercol))'
)
self.assertEqual(
repr(ExpressionList(OrderBy(F('col'), descending=False))),
'ExpressionList(OrderBy(F(col), descending=False))'
)
def test_functions(self):
self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))")
self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))")
self.assertEqual(repr(Length('a')), "Length(F(a))")
self.assertEqual(repr(Lower('a')), "Lower(F(a))")
self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))")
self.assertEqual(repr(Upper('a')), "Upper(F(a))")
def test_aggregates(self):
self.assertEqual(repr(Avg('a')), "Avg(F(a))")
self.assertEqual(repr(Count('a')), "Count(F(a))")
self.assertEqual(repr(Count('*')), "Count('*')")
self.assertEqual(repr(Max('a')), "Max(F(a))")
self.assertEqual(repr(Min('a')), "Min(F(a))")
self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)")
self.assertEqual(repr(Sum('a')), "Sum(F(a))")
self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)")
def test_distinct_aggregates(self):
self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)")
self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)")
def test_filtered_aggregates(self):
filter = Q(a=1)
self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)")
self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(
repr(Variance('a', sample=True, filter=filter)),
"Variance(F(a), filter=(AND: ('a', 1)), sample=True)"
)
self.assertEqual(
repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))"
)
class CombinableTests(SimpleTestCase):
bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.'
def test_negation(self):
c = Combinable()
self.assertEqual(-c, c * -1)
def test_and(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
Combinable() & Combinable()
def test_or(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
Combinable() | Combinable()
def test_reversed_and(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
object() & Combinable()
def test_reversed_or(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
object() | Combinable()
class CombinedExpressionTests(SimpleTestCase):
def test_resolve_output_field(self):
tests = [
(IntegerField, AutoField, IntegerField),
(AutoField, IntegerField, IntegerField),
(IntegerField, DecimalField, DecimalField),
(DecimalField, IntegerField, DecimalField),
(IntegerField, FloatField, FloatField),
(FloatField, IntegerField, FloatField),
]
connectors = [Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV]
for lhs, rhs, combined in tests:
for connector in connectors:
with self.subTest(lhs=lhs, connector=connector, rhs=rhs, combined=combined):
expr = CombinedExpression(
Expression(lhs()),
connector,
Expression(rhs()),
)
self.assertIsInstance(expr.output_field, combined)
class ExpressionWrapperTests(SimpleTestCase):
def test_empty_group_by(self):
expr = ExpressionWrapper(Value(3), output_field=IntegerField())
self.assertEqual(expr.get_group_by_cols(alias=None), [])
def test_non_empty_group_by(self):
value = Value('f')
value.output_field = None
expr = ExpressionWrapper(Lower(value), output_field=IntegerField())
group_by_cols = expr.get_group_by_cols(alias=None)
self.assertEqual(group_by_cols, [expr.expression])
self.assertEqual(group_by_cols[0].output_field, expr.output_field)
| 42.402364 | 116 | 0.620408 | import datetime
import pickle
import unittest
import uuid
from collections import namedtuple
from copy import deepcopy
from decimal import Decimal
from unittest import mock
from django.core.exceptions import FieldError
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import (
AutoField, Avg, BinaryField, BooleanField, Case, CharField, Count,
DateField, DateTimeField, DecimalField, DurationField, Exists, Expression,
ExpressionList, ExpressionWrapper, F, FloatField, Func, IntegerField, Max,
Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField,
UUIDField, Value, Variance, When,
)
from django.db.models.expressions import (
Col, Combinable, CombinedExpression, RawSQL, Ref,
)
from django.db.models.functions import (
Coalesce, Concat, Left, Length, Lower, Substr, Upper,
)
from django.db.models.sql import constants
from django.db.models.sql.datastructures import Join
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import (
Approximate, CaptureQueriesContext, isolate_apps, register_lookup,
)
from django.utils.functional import SimpleLazyObject
from .models import (
UUID, UUIDPK, Company, Employee, Experiment, Manager, Number,
RemoteEmployee, Result, SimulationRun, Time,
)
class BasicExpressionsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.example_inc = Company.objects.create(
name="Example Inc.", num_employees=2300, num_chairs=5,
ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10)
)
cls.foobar_ltd = Company.objects.create(
name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True,
ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20)
)
cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30)
cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max)
def setUp(self):
self.company_query = Company.objects.values(
"name", "num_employees", "num_chairs"
).order_by(
"name", "num_employees", "num_chairs"
)
def test_annotate_values_aggregate(self):
companies = Company.objects.annotate(
salaries=F('ceo__salary'),
).values('num_employees', 'salaries').aggregate(
result=Sum(
F('salaries') + F('num_employees'),
output_field=IntegerField()
),
)
self.assertEqual(companies['result'], 2395)
def test_annotate_values_filter(self):
companies = Company.objects.annotate(
foo=RawSQL('%s', ['value']),
).filter(foo='value').order_by('name')
self.assertSequenceEqual(
companies,
[self.example_inc, self.foobar_ltd, self.gmbh],
)
def test_annotate_values_count(self):
companies = Company.objects.annotate(foo=RawSQL('%s', ['value']))
self.assertEqual(companies.count(), 3)
@skipUnlessDBFeature('supports_boolean_expr_in_select_clause')
def test_filtering_on_annotate_that_uses_q(self):
self.assertEqual(
Company.objects.annotate(
num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField())
).filter(num_employees_check=True).count(),
2,
)
def test_filtering_on_q_that_is_boolean(self):
self.assertEqual(
Company.objects.filter(
ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField())
).count(),
2,
)
def test_filtering_on_rawsql_that_is_boolean(self):
self.assertEqual(
Company.objects.filter(
RawSQL('num_employees > %s', (3,), output_field=BooleanField()),
).count(),
2,
)
def test_filter_inter_attribute(self):
self.assertSequenceEqual(
self.company_query.filter(num_employees__gt=F("num_chairs")), [
{
"num_chairs": 5,
"name": "Example Inc.",
"num_employees": 2300,
},
{
"num_chairs": 1,
"name": "Test GmbH",
"num_employees": 32
},
],
)
def test_update(self):
self.company_query.update(num_chairs=F("num_employees"))
self.assertSequenceEqual(
self.company_query, [
{
"num_chairs": 2300,
"name": "Example Inc.",
"num_employees": 2300
},
{
"num_chairs": 3,
"name": "Foobar Ltd.",
"num_employees": 3
},
{
"num_chairs": 32,
"name": "Test GmbH",
"num_employees": 32
}
],
)
def test_arithmetic(self):
self.company_query.update(num_chairs=F("num_employees") + 2)
self.assertSequenceEqual(
self.company_query, [
{
'num_chairs': 2302,
'name': 'Example Inc.',
'num_employees': 2300
},
{
'num_chairs': 5,
'name': 'Foobar Ltd.',
'num_employees': 3
},
{
'num_chairs': 34,
'name': 'Test GmbH',
'num_employees': 32
}
],
)
def test_order_of_operations(self):
self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees'))
self.assertSequenceEqual(
self.company_query, [
{
'num_chairs': 6900,
'name': 'Example Inc.',
'num_employees': 2300
},
{
'num_chairs': 9,
'name': 'Foobar Ltd.',
'num_employees': 3
},
{
'num_chairs': 96,
'name': 'Test GmbH',
'num_employees': 32
}
],
)
def test_parenthesis_priority(self):
self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees'))
self.assertSequenceEqual(
self.company_query, [
{
'num_chairs': 5294600,
'name': 'Example Inc.',
'num_employees': 2300
},
{
'num_chairs': 15,
'name': 'Foobar Ltd.',
'num_employees': 3
},
{
'num_chairs': 1088,
'name': 'Test GmbH',
'num_employees': 32
}
],
)
def test_update_with_fk(self):
self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3)
self.assertQuerysetEqual(
Company.objects.all(),
['Joe Smith', 'Frank Meyer', 'Max Mustermann'],
lambda c: str(c.point_of_contact),
ordered=False
)
def test_update_with_none(self):
Number.objects.create(integer=1, float=1.0)
Number.objects.create(integer=2)
Number.objects.filter(float__isnull=False).update(float=Value(None))
self.assertQuerysetEqual(
Number.objects.all(),
[None, None],
lambda n: n.float,
ordered=False
)
def test_filter_with_join(self):
Company.objects.update(point_of_contact=F('ceo'))
c = Company.objects.first()
c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum")
c.save()
self.assertQuerysetEqual(
Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')),
['Foobar Ltd.', 'Test GmbH'],
lambda c: c.name,
ordered=False
)
Company.objects.exclude(
ceo__firstname=F("point_of_contact__firstname")
).update(name="foo")
self.assertEqual(
Company.objects.exclude(
ceo__firstname=F('point_of_contact__firstname')
).get().name,
"foo",
)
msg = "Joined field references are not permitted in this query"
with self.assertRaisesMessage(FieldError, msg):
Company.objects.exclude(
ceo__firstname=F('point_of_contact__firstname')
).update(name=F('point_of_contact__lastname'))
def test_object_update(self):
self.gmbh.num_employees = F('num_employees') + 4
self.gmbh.save()
self.gmbh.refresh_from_db()
self.assertEqual(self.gmbh.num_employees, 36)
def test_new_object_save(self):
test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max)
test_co.save()
test_co.refresh_from_db()
self.assertEqual(test_co.name, "upper")
def test_new_object_create(self):
test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max)
test_co.refresh_from_db()
self.assertEqual(test_co.name, "upper")
def test_object_create_with_aggregate(self):
msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).'
with self.assertRaisesMessage(FieldError, msg):
Company.objects.create(
name='Company', num_employees=Max(Value(1)), num_chairs=1,
ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30),
)
def test_object_update_fk(self):
test_gmbh = Company.objects.get(pk=self.gmbh.pk)
msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.'
with self.assertRaisesMessage(ValueError, msg):
test_gmbh.point_of_contact = F('ceo')
test_gmbh.point_of_contact = self.gmbh.ceo
test_gmbh.save()
test_gmbh.name = F('ceo__lastname')
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
test_gmbh.save()
def test_update_inherited_field_value(self):
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5)
def test_object_update_unsaved_objects(self):
# F expressions cannot be used to update attributes on objects which do
# not yet exist in the database
acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max)
acme.num_employees = F("num_employees") + 16
msg = (
'Failed to insert expression "Col(expressions_company, '
'expressions.Company.num_employees) + Value(16)" on '
'expressions.Company.num_employees. F() expressions can only be '
'used to update, not to insert.'
)
with self.assertRaisesMessage(ValueError, msg):
acme.save()
acme.num_employees = 12
acme.name = Lower(F('name'))
msg = (
'Failed to insert expression "Lower(Col(expressions_company, '
'expressions.Company.name))" on expressions.Company.name. F() '
'expressions can only be used to update, not to insert.'
)
with self.assertRaisesMessage(ValueError, msg):
acme.save()
def test_ticket_11722_iexact_lookup(self):
Employee.objects.create(firstname="John", lastname="Doe")
test = Employee.objects.create(firstname="Test", lastname="test")
queryset = Employee.objects.filter(firstname__iexact=F('lastname'))
self.assertSequenceEqual(queryset, [test])
def test_ticket_16731_startswith_lookup(self):
Employee.objects.create(firstname="John", lastname="Doe")
e2 = Employee.objects.create(firstname="Jack", lastname="Jackson")
e3 = Employee.objects.create(firstname="Jack", lastname="jackson")
self.assertSequenceEqual(
Employee.objects.filter(lastname__startswith=F('firstname')),
[e2, e3] if connection.features.has_case_insensitive_like else [e2]
)
qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk')
self.assertSequenceEqual(qs, [e2, e3])
def test_ticket_18375_join_reuse(self):
# Reverse multijoin F() references and the lookup target the same join.
# Pre #18375 the F() join was generated first and the lookup couldn't
# reuse that join.
qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_ticket_18375_kwarg_ordering(self):
# The next query was dict-randomization dependent - if the "gte=1"
# was seen first, then the F() will reuse the join generated by the
# gte lookup, if F() was seen first, then it generated a join the
# other lookups could not reuse.
qs = Employee.objects.filter(
company_ceo_set__num_chairs=F('company_ceo_set__num_employees'),
company_ceo_set__num_chairs__gte=1,
)
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_ticket_18375_kwarg_ordering_2(self):
# Another similar case for F() than above. Now we have the same join
# in two filter kwargs, one in the lhs lookup, one in F. Here pre
# #18375 the amount of joins generated was random if dict
# randomization was enabled, that is the generated query dependent
# on which clause was seen first.
qs = Employee.objects.filter(
company_ceo_set__num_employees=F('pk'),
pk=F('company_ceo_set__num_employees')
)
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_ticket_18375_chained_filters(self):
# F() expressions do not reuse joins from previous filter.
qs = Employee.objects.filter(
company_ceo_set__num_employees=F('pk')
).filter(
company_ceo_set__num_employees=F('company_ceo_set__num_employees')
)
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_order_by_exists(self):
mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20)
mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by(
# Order by whether the employee is the CEO of a company
Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc()
)
self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary])
def test_order_by_multiline_sql(self):
raw_order_by = (
RawSQL('''
CASE WHEN num_employees > 1000
THEN num_chairs
ELSE 0 END
''', []).desc(),
RawSQL('''
CASE WHEN num_chairs > 1
THEN 1
ELSE 0 END
''', []).asc()
)
for qs in (
Company.objects.all(),
Company.objects.distinct(),
):
with self.subTest(qs=qs):
self.assertSequenceEqual(
qs.order_by(*raw_order_by),
[self.example_inc, self.gmbh, self.foobar_ltd],
)
def test_outerref(self):
inner = Company.objects.filter(point_of_contact=OuterRef('pk'))
msg = (
'This queryset contains a reference to an outer query and may only '
'be used in a subquery.'
)
with self.assertRaisesMessage(ValueError, msg):
inner.exists()
outer = Employee.objects.annotate(is_point_of_contact=Exists(inner))
self.assertIs(outer.exists(), True)
def test_exist_single_field_output_field(self):
queryset = Company.objects.values('pk')
self.assertIsInstance(Exists(queryset).output_field, BooleanField)
def test_subquery(self):
Company.objects.filter(name='Example Inc.').update(
point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'),
ceo=self.max,
)
Employee.objects.create(firstname='Bob', lastname='Brown', salary=40)
qs = Employee.objects.annotate(
is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))),
is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))),
is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))),
is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))),
largest_company=Subquery(Company.objects.order_by('-num_employees').filter(
Q(ceo=OuterRef('pk')) | Q(point_of_contact=OuterRef('pk'))
).values('name')[:1], output_field=CharField())
).values(
'firstname',
'is_point_of_contact',
'is_not_point_of_contact',
'is_ceo_of_small_company',
'is_ceo_small_2',
'largest_company',
).order_by('firstname')
results = list(qs)
# Could use Coalesce(subq, Value('')) instead except for the bug in
# cx_Oracle mentioned in #23843.
bob = results[0]
if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls:
bob['largest_company'] = None
self.assertEqual(results, [
{
'firstname': 'Bob',
'is_point_of_contact': False,
'is_not_point_of_contact': True,
'is_ceo_of_small_company': False,
'is_ceo_small_2': False,
'largest_company': None,
},
{
'firstname': 'Frank',
'is_point_of_contact': False,
'is_not_point_of_contact': True,
'is_ceo_of_small_company': True,
'is_ceo_small_2': True,
'largest_company': 'Foobar Ltd.',
},
{
'firstname': 'Joe',
'is_point_of_contact': True,
'is_not_point_of_contact': False,
'is_ceo_of_small_company': False,
'is_ceo_small_2': False,
'largest_company': 'Example Inc.',
},
{
'firstname': 'Max',
'is_point_of_contact': False,
'is_not_point_of_contact': True,
'is_ceo_of_small_company': True,
'is_ceo_small_2': True,
'largest_company': 'Example Inc.'
}
])
# A less elegant way to write the same query: this uses a LEFT OUTER
# JOIN and an IS NULL, inside a WHERE NOT IN which is probably less
# efficient than EXISTS.
self.assertCountEqual(
qs.filter(is_point_of_contact=True).values('pk'),
Employee.objects.exclude(company_point_of_contact_set=None).values('pk')
)
def test_subquery_eq(self):
qs = Employee.objects.annotate(
is_ceo=Exists(Company.objects.filter(ceo=OuterRef('pk'))),
is_point_of_contact=Exists(
Company.objects.filter(point_of_contact=OuterRef('pk')),
),
small_company=Exists(
queryset=Company.objects.filter(num_employees__lt=200),
),
).filter(is_ceo=True, is_point_of_contact=False, small_company=True)
self.assertNotEqual(
qs.query.annotations['is_ceo'],
qs.query.annotations['is_point_of_contact'],
)
self.assertNotEqual(
qs.query.annotations['is_ceo'],
qs.query.annotations['small_company'],
)
def test_in_subquery(self):
# This is a contrived test (and you really wouldn't write this query),
# but it is a succinct way to test the __in=Subquery() construct.
small_companies = Company.objects.filter(num_employees__lt=200).values('pk')
subquery_test = Company.objects.filter(pk__in=Subquery(small_companies))
self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh])
subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3)))
self.assertCountEqual(subquery_test2, [self.foobar_ltd])
def test_uuid_pk_subquery(self):
u = UUIDPK.objects.create()
UUID.objects.create(uuid_fk=u)
qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id')))
self.assertCountEqual(qs, [u])
def test_nested_subquery(self):
inner = Company.objects.filter(point_of_contact=OuterRef('pk'))
outer = Employee.objects.annotate(is_point_of_contact=Exists(inner))
contrived = Employee.objects.annotate(
is_point_of_contact=Subquery(
outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'),
output_field=BooleanField(),
),
)
self.assertCountEqual(contrived.values_list(), outer.values_list())
def test_nested_subquery_join_outer_ref(self):
inner = Employee.objects.filter(pk=OuterRef('ceo__pk')).values('pk')
qs = Employee.objects.annotate(
ceo_company=Subquery(
Company.objects.filter(
ceo__in=inner,
ceo__pk=OuterRef('pk'),
).values('pk'),
),
)
self.assertSequenceEqual(
qs.values_list('ceo_company', flat=True),
[self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk],
)
def test_nested_subquery_outer_ref_2(self):
first = Time.objects.create(time='09:00')
second = Time.objects.create(time='17:00')
third = Time.objects.create(time='21:00')
SimulationRun.objects.bulk_create([
SimulationRun(start=first, end=second, midpoint='12:00'),
SimulationRun(start=first, end=third, midpoint='15:00'),
SimulationRun(start=second, end=first, midpoint='00:00'),
])
inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time')
middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1]
outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField()))
# This is a contrived example. It exercises the double OuterRef form.
self.assertCountEqual(outer, [first, second, third])
def test_nested_subquery_outer_ref_with_autofield(self):
first = Time.objects.create(time='09:00')
second = Time.objects.create(time='17:00')
SimulationRun.objects.create(start=first, end=second, midpoint='12:00')
inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start')
middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1]
outer = Time.objects.annotate(other=Subquery(middle, output_field=IntegerField()))
# This exercises the double OuterRef form with AutoField as pk.
self.assertCountEqual(outer, [first, second])
def test_annotations_within_subquery(self):
Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank'))
inner = Company.objects.filter(
ceo=OuterRef('pk')
).values('ceo').annotate(total_employees=Sum('num_employees')).values('total_employees')
outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner))
self.assertSequenceEqual(
outer.order_by('-total_employees').values('salary', 'total_employees'),
[{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}],
)
def test_subquery_references_joined_table_twice(self):
inner = Company.objects.filter(
num_chairs__gte=OuterRef('ceo__salary'),
num_employees__gte=OuterRef('point_of_contact__salary'),
)
# Another contrived example (there is no need to have a subquery here)
outer = Company.objects.filter(pk__in=Subquery(inner.values('pk')))
self.assertFalse(outer.exists())
def test_subquery_filter_by_aggregate(self):
Number.objects.create(integer=1000, float=1.2)
Employee.objects.create(salary=1000)
qs = Number.objects.annotate(
min_valuable_count=Subquery(
Employee.objects.filter(
salary=OuterRef('integer'),
).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1]
),
)
self.assertEqual(qs.get().float, 1.2)
def test_subquery_filter_by_lazy(self):
self.max.manager = Manager.objects.create(name='Manager')
self.max.save()
max_manager = SimpleLazyObject(
lambda: Manager.objects.get(pk=self.max.manager.pk)
)
qs = Company.objects.annotate(
ceo_manager=Subquery(
Employee.objects.filter(
lastname=OuterRef('ceo__lastname'),
).values('manager'),
),
).filter(ceo_manager=max_manager)
self.assertEqual(qs.get(), self.gmbh)
def test_aggregate_subquery_annotation(self):
with self.assertNumQueries(1) as ctx:
aggregate = Company.objects.annotate(
ceo_salary=Subquery(
Employee.objects.filter(
id=OuterRef('ceo_id'),
).values('salary')
),
).aggregate(
ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)),
)
self.assertEqual(aggregate, {'ceo_salary_gt_20': 1})
# Aggregation over a subquery annotation doesn't annotate the subquery
# twice in the inner query.
sql = ctx.captured_queries[0]['sql']
self.assertLessEqual(sql.count('SELECT'), 3)
# GROUP BY isn't required to aggregate over a query that doesn't
# contain nested aggregates.
self.assertNotIn('GROUP BY', sql)
def test_explicit_output_field(self):
class FuncA(Func):
output_field = CharField()
class FuncB(Func):
pass
expr = FuncB(FuncA())
self.assertEqual(expr.output_field, FuncA.output_field)
def test_outerref_mixed_case_table_name(self):
inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned'))
outer = Result.objects.filter(pk__in=Subquery(inner.values('pk')))
self.assertFalse(outer.exists())
def test_outerref_with_operator(self):
inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2)
outer = Company.objects.filter(pk__in=Subquery(inner.values('pk')))
self.assertEqual(outer.get().name, 'Test GmbH')
def test_nested_outerref_with_function(self):
self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer')
self.gmbh.save()
inner = Employee.objects.filter(
lastname__startswith=Left(OuterRef(OuterRef('lastname')), 1),
)
qs = Employee.objects.annotate(
ceo_company=Subquery(
Company.objects.filter(
point_of_contact__in=inner,
ceo__pk=OuterRef('pk'),
).values('name'),
),
).filter(ceo_company__isnull=False)
self.assertEqual(qs.get().ceo_company, 'Test GmbH')
def test_annotation_with_outerref(self):
gmbh_salary = Company.objects.annotate(
max_ceo_salary_raise=Subquery(
Company.objects.annotate(
salary_raise=OuterRef('num_employees') + F('num_employees'),
).order_by('-salary_raise').values('salary_raise')[:1],
output_field=IntegerField(),
),
).get(pk=self.gmbh.pk)
self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332)
def test_annotation_with_nested_outerref(self):
self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer')
self.gmbh.save()
inner = Employee.objects.annotate(
outer_lastname=OuterRef(OuterRef('lastname')),
).filter(lastname__startswith=Left('outer_lastname', 1))
qs = Employee.objects.annotate(
ceo_company=Subquery(
Company.objects.filter(
point_of_contact__in=inner,
ceo__pk=OuterRef('pk'),
).values('name'),
),
).filter(ceo_company__isnull=False)
self.assertEqual(qs.get().ceo_company, 'Test GmbH')
def test_pickle_expression(self):
expr = Value(1)
expr.convert_value # populate cached property
self.assertEqual(pickle.loads(pickle.dumps(expr)), expr)
def test_incorrect_field_in_F_expression(self):
with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."):
list(Employee.objects.filter(firstname=F('nope')))
def test_incorrect_joined_field_in_F_expression(self):
with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."):
list(Company.objects.filter(ceo__pk=F('point_of_contact__nope')))
def test_exists_in_filter(self):
inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk')
qs1 = Employee.objects.filter(Exists(inner))
qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True)
self.assertCountEqual(qs1, qs2)
self.assertFalse(Employee.objects.exclude(Exists(inner)).exists())
self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner)))
def test_subquery_in_filter(self):
inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu')
self.assertSequenceEqual(
Employee.objects.filter(Subquery(inner)),
[self.foobar_ltd.ceo],
)
def test_subquery_group_by_outerref_in_filter(self):
inner = Company.objects.annotate(
employee=OuterRef('pk'),
).values('employee').annotate(
min_num_chairs=Min('num_chairs'),
).values('ceo')
self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True)
def test_case_in_filter_if_boolean_output_field(self):
is_ceo = Company.objects.filter(ceo=OuterRef('pk'))
is_poc = Company.objects.filter(point_of_contact=OuterRef('pk'))
qs = Employee.objects.filter(
Case(
When(Exists(is_ceo), then=True),
When(Exists(is_poc), then=True),
default=False,
output_field=BooleanField(),
),
)
self.assertCountEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max])
def test_boolean_expression_combined(self):
is_ceo = Company.objects.filter(ceo=OuterRef('pk'))
is_poc = Company.objects.filter(point_of_contact=OuterRef('pk'))
self.gmbh.point_of_contact = self.max
self.gmbh.save()
self.assertCountEqual(
Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)),
[self.example_inc.ceo, self.foobar_ltd.ceo, self.max],
)
self.assertCountEqual(
Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)),
[self.max],
)
self.assertCountEqual(
Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)),
[self.max],
)
self.assertCountEqual(
Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)),
[self.example_inc.ceo, self.max],
)
self.assertCountEqual(
Employee.objects.filter(Q(salary__gte=30) & Exists(is_ceo)),
[self.max],
)
self.assertCountEqual(
Employee.objects.filter(Q(salary__lt=15) | Exists(is_poc)),
[self.example_inc.ceo, self.max],
)
def test_boolean_expression_combined_with_empty_Q(self):
is_poc = Company.objects.filter(point_of_contact=OuterRef('pk'))
self.gmbh.point_of_contact = self.max
self.gmbh.save()
tests = [
Exists(is_poc) & Q(),
Q() & Exists(is_poc),
Exists(is_poc) | Q(),
Q() | Exists(is_poc),
]
for conditions in tests:
with self.subTest(conditions):
self.assertCountEqual(Employee.objects.filter(conditions), [self.max])
class IterableLookupInnerExpressionsTests(TestCase):
@classmethod
def setUpTestData(cls):
ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30)
# MySQL requires that the values calculated for expressions don't pass
# outside of the field's range, so it's inconvenient to use the values
# in the more general tests.
cls.c5020 = Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo)
cls.c5040 = Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo)
cls.c5050 = Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo)
cls.c5060 = Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo)
cls.c99300 = Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo)
def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self):
# __in lookups can use F() expressions for integers.
queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10]))
self.assertSequenceEqual(queryset, [self.c5060])
self.assertCountEqual(
Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])),
[self.c5040, self.c5060],
)
self.assertCountEqual(
Company.objects.filter(
num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10])
),
[self.c5040, self.c5050, self.c5060],
)
def test_expressions_in_lookups_join_choice(self):
midpoint = datetime.time(13, 0)
t1 = Time.objects.create(time=datetime.time(12, 0))
t2 = Time.objects.create(time=datetime.time(14, 0))
s1 = SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint)
SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint)
SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint)
SimulationRun.objects.create(start=None, end=None, midpoint=midpoint)
queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')])
self.assertSequenceEqual(queryset, [s1])
for alias in queryset.query.alias_map.values():
if isinstance(alias, Join):
self.assertEqual(alias.join_type, constants.INNER)
queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')])
self.assertQuerysetEqual(queryset, [], ordered=False)
for alias in queryset.query.alias_map.values():
if isinstance(alias, Join):
self.assertEqual(alias.join_type, constants.LOUTER)
def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self):
# Range lookups can use F() expressions for integers.
Company.objects.filter(num_employees__exact=F("num_chairs"))
self.assertCountEqual(
Company.objects.filter(num_employees__range=(F('num_chairs'), 100)),
[self.c5020, self.c5040, self.c5050],
)
self.assertCountEqual(
Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)),
[self.c5040, self.c5050, self.c5060],
)
self.assertCountEqual(
Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)),
[self.c5020, self.c5040, self.c5050, self.c5060],
)
self.assertCountEqual(
Company.objects.filter(num_employees__range=(1, 100)),
[self.c5020, self.c5040, self.c5050, self.c5060, self.c99300],
)
def test_range_lookup_namedtuple(self):
EmployeeRange = namedtuple('EmployeeRange', ['minimum', 'maximum'])
qs = Company.objects.filter(
num_employees__range=EmployeeRange(minimum=51, maximum=100),
)
self.assertSequenceEqual(qs, [self.c99300])
@unittest.skipUnless(connection.vendor == 'sqlite',
"This defensive test only works on databases that don't validate parameter types")
def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self):
queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1'])
self.assertQuerysetEqual(queryset, [], ordered=False)
def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self):
start = datetime.datetime(2016, 2, 3, 15, 0, 0)
end = datetime.datetime(2016, 2, 5, 15, 0, 0)
experiment_1 = Experiment.objects.create(
name='Integrity testing',
assigned=start.date(),
start=start,
end=end,
completed=end.date(),
estimated_time=end - start,
)
experiment_2 = Experiment.objects.create(
name='Taste testing',
assigned=start.date(),
start=start,
end=end,
completed=end.date(),
estimated_time=end - start,
)
r1 = Result.objects.create(
experiment=experiment_1,
result_time=datetime.datetime(2016, 2, 4, 15, 0, 0),
)
Result.objects.create(
experiment=experiment_1,
result_time=datetime.datetime(2016, 3, 10, 2, 0, 0),
)
Result.objects.create(
experiment=experiment_2,
result_time=datetime.datetime(2016, 1, 8, 5, 0, 0),
)
within_experiment_time = [F('experiment__start'), F('experiment__end')]
queryset = Result.objects.filter(result_time__range=within_experiment_time)
self.assertSequenceEqual(queryset, [r1])
within_experiment_time = [F('experiment__start'), F('experiment__end')]
queryset = Result.objects.filter(result_time__range=within_experiment_time)
self.assertSequenceEqual(queryset, [r1])
class FTests(SimpleTestCase):
def test_deepcopy(self):
f = F("foo")
g = deepcopy(f)
self.assertEqual(f.name, g.name)
def test_deconstruct(self):
f = F('name')
path, args, kwargs = f.deconstruct()
self.assertEqual(path, 'django.db.models.expressions.F')
self.assertEqual(args, (f.name,))
self.assertEqual(kwargs, {})
def test_equal(self):
f = F('name')
same_f = F('name')
other_f = F('username')
self.assertEqual(f, same_f)
self.assertNotEqual(f, other_f)
def test_hash(self):
d = {F('name'): 'Bob'}
self.assertIn(F('name'), d)
self.assertEqual(d[F('name')], 'Bob')
def test_not_equal_Value(self):
f = F('name')
value = Value('name')
self.assertNotEqual(f, value)
self.assertNotEqual(value, f)
class ExpressionsTests(TestCase):
def test_F_reuse(self):
f = F('id')
n = Number.objects.create(integer=-1)
c = Company.objects.create(
name="Example Inc.", num_employees=2300, num_chairs=5,
ceo=Employee.objects.create(firstname="Joe", lastname="Smith")
)
c_qs = Company.objects.filter(id=f)
self.assertEqual(c_qs.get(), c)
# Reuse the same F-object for another queryset
n_qs = Number.objects.filter(id=f)
self.assertEqual(n_qs.get(), n)
# The original query still works correctly
self.assertEqual(c_qs.get(), c)
def test_patterns_escape(self):
Employee.objects.bulk_create([
Employee(firstname="Johnny", lastname="%John"),
Employee(firstname="Jean-Claude", lastname="Claud_"),
Employee(firstname="Jean-Claude", lastname="Claude%"),
Employee(firstname="Johnny", lastname="Joh\\n"),
Employee(firstname="Johnny", lastname="_ohn"),
])
claude = Employee.objects.create(firstname='Jean-Claude', lastname='Claude')
john = Employee.objects.create(firstname='Johnny', lastname='John')
john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%Joh\\n')
self.assertCountEqual(
Employee.objects.filter(firstname__contains=F('lastname')),
[john_sign, john, claude],
)
self.assertCountEqual(
Employee.objects.filter(firstname__startswith=F('lastname')),
[john_sign, john],
)
self.assertSequenceEqual(
Employee.objects.filter(firstname__endswith=F('lastname')),
[claude],
)
def test_insensitive_patterns_escape(self):
Employee.objects.bulk_create([
Employee(firstname="Johnny", lastname="%john"),
Employee(firstname="Jean-Claude", lastname="claud_"),
Employee(firstname="Jean-Claude", lastname="claude%"),
Employee(firstname="Johnny", lastname="joh\\n"),
Employee(firstname="Johnny", lastname="_ohn"),
])
claude = Employee.objects.create(firstname='Jean-Claude', lastname='claude')
john = Employee.objects.create(firstname='Johnny', lastname='john')
john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%joh\\n')
self.assertCountEqual(
Employee.objects.filter(firstname__icontains=F('lastname')),
[john_sign, john, claude],
)
self.assertCountEqual(
Employee.objects.filter(firstname__istartswith=F('lastname')),
[john_sign, john],
)
self.assertSequenceEqual(
Employee.objects.filter(firstname__iendswith=F('lastname')),
[claude],
)
@isolate_apps('expressions')
class SimpleExpressionTests(SimpleTestCase):
def test_equal(self):
self.assertEqual(Expression(), Expression())
self.assertEqual(
Expression(IntegerField()),
Expression(output_field=IntegerField())
)
self.assertEqual(Expression(IntegerField()), mock.ANY)
self.assertNotEqual(
Expression(IntegerField()),
Expression(CharField())
)
class TestModel(Model):
field = IntegerField()
other_field = IntegerField()
self.assertNotEqual(
Expression(TestModel._meta.get_field('field')),
Expression(TestModel._meta.get_field('other_field')),
)
def test_hash(self):
self.assertEqual(hash(Expression()), hash(Expression()))
self.assertEqual(
hash(Expression(IntegerField())),
hash(Expression(output_field=IntegerField()))
)
self.assertNotEqual(
hash(Expression(IntegerField())),
hash(Expression(CharField())),
)
class TestModel(Model):
field = IntegerField()
other_field = IntegerField()
self.assertNotEqual(
hash(Expression(TestModel._meta.get_field('field'))),
hash(Expression(TestModel._meta.get_field('other_field'))),
)
class ExpressionsNumericTests(TestCase):
@classmethod
def setUpTestData(cls):
Number(integer=-1).save()
Number(integer=42).save()
Number(integer=1337).save()
Number.objects.update(float=F('integer'))
def test_fill_with_value_from_same_object(self):
self.assertQuerysetEqual(
Number.objects.all(),
[(-1, -1), (42, 42), (1337, 1337)],
lambda n: (n.integer, round(n.float)),
ordered=False
)
def test_increment_value(self):
self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2)
self.assertQuerysetEqual(
Number.objects.all(),
[(-1, -1), (43, 42), (1338, 1337)],
lambda n: (n.integer, round(n.float)),
ordered=False
)
def test_filter_not_equals_other_field(self):
self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2)
self.assertQuerysetEqual(
Number.objects.exclude(float=F('integer')),
[(43, 42), (1338, 1337)],
lambda n: (n.integer, round(n.float)),
ordered=False
)
def test_complex_expressions(self):
n = Number.objects.create(integer=10, float=123.45)
self.assertEqual(Number.objects.filter(pk=n.pk).update(
float=F('integer') + F('float') * 2), 1)
self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
class ExpressionOperatorTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n = Number.objects.create(integer=42, float=15.5)
cls.n1 = Number.objects.create(integer=-42, float=-15.5)
def test_lefthand_addition(self):
# LH Addition of floats and integers
Number.objects.filter(pk=self.n.pk).update(
integer=F('integer') + 15,
float=F('float') + 42.7
)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_lefthand_subtraction(self):
# LH Subtraction of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
def test_lefthand_multiplication(self):
# Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_lefthand_division(self):
# LH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
def test_lefthand_modulo(self):
# LH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
def test_lefthand_bitwise_and(self):
# LH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56))
Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64)
def test_lefthand_bitwise_left_shift_operator(self):
Number.objects.update(integer=F('integer').bitleftshift(2))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168)
def test_lefthand_bitwise_right_shift_operator(self):
Number.objects.update(integer=F('integer').bitrightshift(2))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11)
def test_lefthand_bitwise_or(self):
# LH Bitwise or on integers
Number.objects.update(integer=F('integer').bitor(48))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10)
def test_lefthand_transformed_field_bitwise_or(self):
Employee.objects.create(firstname='Max', lastname='Mustermann')
with register_lookup(CharField, Length):
qs = Employee.objects.annotate(bitor=F('lastname__length').bitor(48))
self.assertEqual(qs.get().bitor, 58)
def test_lefthand_power(self):
# LH Power arithmetic operation on floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2))
def test_lefthand_bitwise_xor(self):
Number.objects.update(integer=F('integer').bitxor(48))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26)
self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26)
def test_lefthand_bitwise_xor_null(self):
employee = Employee.objects.create(firstname='John', lastname='Doe')
Employee.objects.update(salary=F('salary').bitxor(48))
employee.refresh_from_db()
self.assertIsNone(employee.salary)
@unittest.skipUnless(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.")
def test_lefthand_bitwise_xor_not_supported(self):
msg = 'Bitwise XOR is not supported in Oracle.'
with self.assertRaisesMessage(NotSupportedError, msg):
Number.objects.update(integer=F('integer').bitxor(48))
def test_right_hand_addition(self):
# Right hand operators
Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float'))
# RH Addition of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_right_hand_subtraction(self):
Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float'))
# RH Subtraction of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
def test_right_hand_multiplication(self):
# RH Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_right_hand_division(self):
# RH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
def test_right_hand_modulo(self):
# RH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
def test_righthand_power(self):
# RH Power arithmetic operation on floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3))
class FTimeDeltaTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.sday = sday = datetime.date(2010, 6, 25)
cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
midnight = datetime.time(0)
delta0 = datetime.timedelta(0)
delta1 = datetime.timedelta(microseconds=253000)
delta2 = datetime.timedelta(seconds=44)
delta3 = datetime.timedelta(hours=21, minutes=8)
delta4 = datetime.timedelta(days=10)
delta5 = datetime.timedelta(days=90)
# Test data is set so that deltas and delays will be
# strictly increasing.
cls.deltas = []
cls.delays = []
cls.days_long = []
# e0: started same day as assigned, zero duration
end = stime + delta0
cls.e0 = Experiment.objects.create(
name='e0', assigned=sday, start=stime, end=end,
completed=end.date(), estimated_time=delta0,
)
cls.deltas.append(delta0)
cls.delays.append(cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight))
cls.days_long.append(cls.e0.completed - cls.e0.assigned)
# e1: started one day after assigned, tiny duration, data
# set so that end time has no fractional seconds, which
# tests an edge case on sqlite.
delay = datetime.timedelta(1)
end = stime + delay + delta1
e1 = Experiment.objects.create(
name='e1', assigned=sday, start=stime + delay, end=end,
completed=end.date(), estimated_time=delta1,
)
cls.deltas.append(delta1)
cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight))
cls.days_long.append(e1.completed - e1.assigned)
# e2: started three days after assigned, small duration
end = stime + delta2
e2 = Experiment.objects.create(
name='e2', assigned=sday - datetime.timedelta(3), start=stime,
end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1),
)
cls.deltas.append(delta2)
cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight))
cls.days_long.append(e2.completed - e2.assigned)
# e3: started four days after assigned, medium duration
delay = datetime.timedelta(4)
end = stime + delay + delta3
e3 = Experiment.objects.create(
name='e3', assigned=sday, start=stime + delay, end=end,
completed=end.date(), estimated_time=delta3,
)
cls.deltas.append(delta3)
cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight))
cls.days_long.append(e3.completed - e3.assigned)
# e4: started 10 days after assignment, long duration
end = stime + delta4
e4 = Experiment.objects.create(
name='e4', assigned=sday - datetime.timedelta(10), start=stime,
end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1),
)
cls.deltas.append(delta4)
cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight))
cls.days_long.append(e4.completed - e4.assigned)
# e5: started a month after assignment, very long duration
delay = datetime.timedelta(30)
end = stime + delay + delta5
e5 = Experiment.objects.create(
name='e5', assigned=sday, start=stime + delay, end=end,
completed=end.date(), estimated_time=delta5,
)
cls.deltas.append(delta5)
cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight))
cls.days_long.append(e5.completed - e5.assigned)
cls.expnames = [e.name for e in Experiment.objects.all()]
def test_multiple_query_compilation(self):
# Ticket #21643
queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1))
q1 = str(queryset.query)
q2 = str(queryset.query)
self.assertEqual(q1, q2)
def test_query_clone(self):
# Ticket #21643 - Crash when compiling query more than once
qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1))
qs2 = qs.all()
list(qs)
list(qs2)
# Intentionally no assert
def test_delta_add(self):
for i, delta in enumerate(self.deltas):
test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_delta_subtract(self):
for i, delta in enumerate(self.deltas):
test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_exclude(self):
for i, delta in enumerate(self.deltas):
test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)]
self.assertEqual(test_set, self.expnames[i:])
test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)]
self.assertEqual(test_set, self.expnames[i + 1:])
def test_date_comparison(self):
for i, days in enumerate(self.days_long):
test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)]
self.assertEqual(test_set, self.expnames[:i + 1])
@skipUnlessDBFeature("supports_mixed_date_datetime_comparisons")
def test_mixed_comparisons1(self):
for i, delay in enumerate(self.delays):
test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_mixed_comparisons2(self):
for i, delay in enumerate(self.delays):
delay = datetime.timedelta(delay.days)
test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [
e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1))
]
self.assertEqual(test_set, self.expnames[:i + 1])
def test_delta_update(self):
for delta in self.deltas:
exps = Experiment.objects.all()
expected_durations = [e.duration() for e in exps]
expected_starts = [e.start + delta for e in exps]
expected_ends = [e.end + delta for e in exps]
Experiment.objects.update(start=F('start') + delta, end=F('end') + delta)
exps = Experiment.objects.all()
new_starts = [e.start for e in exps]
new_ends = [e.end for e in exps]
new_durations = [e.duration() for e in exps]
self.assertEqual(expected_starts, new_starts)
self.assertEqual(expected_ends, new_ends)
self.assertEqual(expected_durations, new_durations)
def test_invalid_operator(self):
with self.assertRaises(DatabaseError):
list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0)))
def test_durationfield_add(self):
zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))]
self.assertEqual(zeros, ['e0'])
end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))]
self.assertEqual(end_less, ['e2'])
delta_math = [
e.name for e in
Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1))
]
self.assertEqual(delta_math, ['e4'])
queryset = Experiment.objects.annotate(shifted=ExpressionWrapper(
F('start') + Value(None, output_field=DurationField()),
output_field=DateTimeField(),
))
self.assertIsNone(queryset.first().shifted)
def test_duration_expressions(self):
for delta in self.deltas:
qs = Experiment.objects.annotate(duration=F('estimated_time') + delta)
for obj in qs:
self.assertEqual(obj.duration, obj.estimated_time + delta)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_date_subtraction(self):
queryset = Experiment.objects.annotate(
completion_duration=F('completed') - F('assigned'),
)
at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))}
self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'})
at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))}
self.assertEqual(at_least_120_days, {'e5'})
less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))}
self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'})
queryset = Experiment.objects.annotate(
difference=F('completed') - Value(None, output_field=DateField()),
)
self.assertIsNone(queryset.first().difference)
queryset = Experiment.objects.annotate(shifted=ExpressionWrapper(
F('completed') - Value(None, output_field=DurationField()),
output_field=DateField(),
))
self.assertIsNone(queryset.first().shifted)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_date_subquery_subtraction(self):
subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('completed')
queryset = Experiment.objects.annotate(
difference=subquery - F('completed'),
).filter(difference=datetime.timedelta())
self.assertTrue(queryset.exists())
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_date_case_subtraction(self):
queryset = Experiment.objects.annotate(
date_case=Case(
When(Q(name='e0'), then=F('completed')),
output_field=DateField(),
),
completed_value=Value(
self.e0.completed,
output_field=DateField(),
),
difference=F('date_case') - F('completed_value'),
).filter(difference=datetime.timedelta())
self.assertEqual(queryset.get(), self.e0)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_time_subtraction(self):
Time.objects.create(time=datetime.time(12, 30, 15, 2345))
queryset = Time.objects.annotate(
difference=F('time') - Value(datetime.time(11, 15, 0)),
)
self.assertEqual(
queryset.get().difference,
datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345)
)
queryset = Time.objects.annotate(
difference=F('time') - Value(None, output_field=TimeField()),
)
self.assertIsNone(queryset.first().difference)
queryset = Time.objects.annotate(shifted=ExpressionWrapper(
F('time') - Value(None, output_field=DurationField()),
output_field=TimeField(),
))
self.assertIsNone(queryset.first().shifted)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_time_subquery_subtraction(self):
Time.objects.create(time=datetime.time(12, 30, 15, 2345))
subquery = Time.objects.filter(pk=OuterRef('pk')).values('time')
queryset = Time.objects.annotate(
difference=subquery - F('time'),
).filter(difference=datetime.timedelta())
self.assertTrue(queryset.exists())
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_datetime_subtraction(self):
under_estimate = [
e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start'))
]
self.assertEqual(under_estimate, ['e2'])
over_estimate = [
e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start'))
]
self.assertEqual(over_estimate, ['e4'])
queryset = Experiment.objects.annotate(
difference=F('start') - Value(None, output_field=DateTimeField()),
)
self.assertIsNone(queryset.first().difference)
queryset = Experiment.objects.annotate(shifted=ExpressionWrapper(
F('start') - Value(None, output_field=DurationField()),
output_field=DateTimeField(),
))
self.assertIsNone(queryset.first().shifted)
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_datetime_subquery_subtraction(self):
subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('start')
queryset = Experiment.objects.annotate(
difference=subquery - F('start'),
).filter(difference=datetime.timedelta())
self.assertTrue(queryset.exists())
@skipUnlessDBFeature('supports_temporal_subtraction')
def test_datetime_subtraction_microseconds(self):
delta = datetime.timedelta(microseconds=8999999999999999)
Experiment.objects.update(end=F('start') + delta)
qs = Experiment.objects.annotate(delta=F('end') - F('start'))
for e in qs:
self.assertEqual(e.delta, delta)
def test_duration_with_datetime(self):
# Exclude e1 which has very high precision so we can test this on all
# backends regardless of whether or not it supports
# microsecond_precision.
over_estimate = Experiment.objects.exclude(name='e1').filter(
completed__gt=self.stime + F('estimated_time'),
).order_by('name')
self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name)
def test_duration_with_datetime_microseconds(self):
delta = datetime.timedelta(microseconds=8999999999999999)
qs = Experiment.objects.annotate(dt=ExpressionWrapper(
F('start') + delta,
output_field=DateTimeField(),
))
for e in qs:
self.assertEqual(e.dt, e.start + delta)
def test_date_minus_duration(self):
more_than_4_days = Experiment.objects.filter(
assigned__lt=F('completed') - Value(datetime.timedelta(days=4))
)
self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name)
def test_negative_timedelta_update(self):
# subtract 30 seconds, 30 minutes, 2 hours and 2 days
experiments = Experiment.objects.filter(name='e0').annotate(
start_sub_seconds=F('start') + datetime.timedelta(seconds=-30),
).annotate(
start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30),
).annotate(
start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2),
).annotate(
new_start=F('start_sub_hours') + datetime.timedelta(days=-2),
)
expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0)
# subtract 30 microseconds
experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30))
expected_start += datetime.timedelta(microseconds=+746970)
experiments.update(start=F('new_start'))
e0 = Experiment.objects.get(name='e0')
self.assertEqual(e0.start, expected_start)
class ValueTests(TestCase):
def test_update_TimeField_using_Value(self):
Time.objects.create()
Time.objects.update(time=Value(datetime.time(1), output_field=TimeField()))
self.assertEqual(Time.objects.get().time, datetime.time(1))
def test_update_UUIDField_using_Value(self):
UUID.objects.create()
UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField()))
self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012'))
def test_deconstruct(self):
value = Value('name')
path, args, kwargs = value.deconstruct()
self.assertEqual(path, 'django.db.models.expressions.Value')
self.assertEqual(args, (value.value,))
self.assertEqual(kwargs, {})
def test_deconstruct_output_field(self):
value = Value('name', output_field=CharField())
path, args, kwargs = value.deconstruct()
self.assertEqual(path, 'django.db.models.expressions.Value')
self.assertEqual(args, (value.value,))
self.assertEqual(len(kwargs), 1)
self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct())
def test_equal(self):
value = Value('name')
self.assertEqual(value, Value('name'))
self.assertNotEqual(value, Value('username'))
def test_hash(self):
d = {Value('name'): 'Bob'}
self.assertIn(Value('name'), d)
self.assertEqual(d[Value('name')], 'Bob')
def test_equal_output_field(self):
value = Value('name', output_field=CharField())
same_value = Value('name', output_field=CharField())
other_value = Value('name', output_field=TimeField())
no_output_field = Value('name')
self.assertEqual(value, same_value)
self.assertNotEqual(value, other_value)
self.assertNotEqual(value, no_output_field)
def test_raise_empty_expressionlist(self):
msg = 'ExpressionList requires at least one expression'
with self.assertRaisesMessage(ValueError, msg):
ExpressionList()
def test_compile_unresolved(self):
# This test might need to be revisited later on if #25425 is enforced.
compiler = Time.objects.all().query.get_compiler(connection=connection)
value = Value('foo')
self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo']))
value = Value('foo', output_field=CharField())
self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo']))
def test_resolve_output_field(self):
value_types = [
('str', CharField),
(True, BooleanField),
(42, IntegerField),
(3.14, FloatField),
(datetime.date(2019, 5, 15), DateField),
(datetime.datetime(2019, 5, 15), DateTimeField),
(datetime.time(3, 16), TimeField),
(datetime.timedelta(1), DurationField),
(Decimal('3.14'), DecimalField),
(b'', BinaryField),
(uuid.uuid4(), UUIDField),
]
for value, ouput_field_type in value_types:
with self.subTest(type=type(value)):
expr = Value(value)
self.assertIsInstance(expr.output_field, ouput_field_type)
def test_resolve_output_field_failure(self):
msg = 'Cannot resolve expression type, unknown output_field'
with self.assertRaisesMessage(FieldError, msg):
Value(object()).output_field
class ExistsTests(TestCase):
def test_optimizations(self):
with CaptureQueriesContext(connection) as context:
list(Experiment.objects.values(exists=Exists(
Experiment.objects.order_by('pk'),
)).order_by())
captured_queries = context.captured_queries
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]['sql']
self.assertNotIn(
connection.ops.quote_name(Experiment._meta.pk.column),
captured_sql,
)
self.assertIn(
connection.ops.limit_offset_sql(None, 1),
captured_sql,
)
self.assertNotIn('ORDER BY', captured_sql)
class FieldTransformTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.sday = sday = datetime.date(2010, 6, 25)
cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
cls.ex1 = Experiment.objects.create(
name='Experiment 1',
assigned=sday,
completed=sday + datetime.timedelta(2),
estimated_time=datetime.timedelta(2),
start=stime,
end=stime + datetime.timedelta(2),
)
def test_month_aggregation(self):
self.assertEqual(
Experiment.objects.aggregate(month_count=Count('assigned__month')),
{'month_count': 1}
)
def test_transform_in_values(self):
self.assertSequenceEqual(
Experiment.objects.values('assigned__month'),
[{'assigned__month': 6}],
)
def test_multiple_transforms_in_values(self):
self.assertSequenceEqual(
Experiment.objects.values('end__date__month'),
[{'end__date__month': 6}],
)
class ReprTests(SimpleTestCase):
def test_expressions(self):
self.assertEqual(
repr(Case(When(a=1))),
"<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>"
)
self.assertEqual(
repr(When(Q(age__gte=18), then=Value('legal'))),
"<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value(legal)>"
)
self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)")
self.assertEqual(repr(F('published')), "F(published)")
self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>")
self.assertEqual(
repr(ExpressionWrapper(F('cost') + F('tax'), IntegerField())),
"ExpressionWrapper(F(cost) + F(tax))"
)
self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)")
self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)')
self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])")
self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))")
self.assertEqual(repr(Value(1)), "Value(1)")
self.assertEqual(
repr(ExpressionList(F('col'), F('anothercol'))),
'ExpressionList(F(col), F(anothercol))'
)
self.assertEqual(
repr(ExpressionList(OrderBy(F('col'), descending=False))),
'ExpressionList(OrderBy(F(col), descending=False))'
)
def test_functions(self):
self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))")
self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))")
self.assertEqual(repr(Length('a')), "Length(F(a))")
self.assertEqual(repr(Lower('a')), "Lower(F(a))")
self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))")
self.assertEqual(repr(Upper('a')), "Upper(F(a))")
def test_aggregates(self):
self.assertEqual(repr(Avg('a')), "Avg(F(a))")
self.assertEqual(repr(Count('a')), "Count(F(a))")
self.assertEqual(repr(Count('*')), "Count('*')")
self.assertEqual(repr(Max('a')), "Max(F(a))")
self.assertEqual(repr(Min('a')), "Min(F(a))")
self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)")
self.assertEqual(repr(Sum('a')), "Sum(F(a))")
self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)")
def test_distinct_aggregates(self):
self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)")
self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)")
def test_filtered_aggregates(self):
filter = Q(a=1)
self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)")
self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))")
self.assertEqual(
repr(Variance('a', sample=True, filter=filter)),
"Variance(F(a), filter=(AND: ('a', 1)), sample=True)"
)
self.assertEqual(
repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))"
)
class CombinableTests(SimpleTestCase):
bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.'
def test_negation(self):
c = Combinable()
self.assertEqual(-c, c * -1)
def test_and(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
Combinable() & Combinable()
def test_or(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
Combinable() | Combinable()
def test_reversed_and(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
object() & Combinable()
def test_reversed_or(self):
with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg):
object() | Combinable()
class CombinedExpressionTests(SimpleTestCase):
def test_resolve_output_field(self):
tests = [
(IntegerField, AutoField, IntegerField),
(AutoField, IntegerField, IntegerField),
(IntegerField, DecimalField, DecimalField),
(DecimalField, IntegerField, DecimalField),
(IntegerField, FloatField, FloatField),
(FloatField, IntegerField, FloatField),
]
connectors = [Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV]
for lhs, rhs, combined in tests:
for connector in connectors:
with self.subTest(lhs=lhs, connector=connector, rhs=rhs, combined=combined):
expr = CombinedExpression(
Expression(lhs()),
connector,
Expression(rhs()),
)
self.assertIsInstance(expr.output_field, combined)
class ExpressionWrapperTests(SimpleTestCase):
def test_empty_group_by(self):
expr = ExpressionWrapper(Value(3), output_field=IntegerField())
self.assertEqual(expr.get_group_by_cols(alias=None), [])
def test_non_empty_group_by(self):
value = Value('f')
value.output_field = None
expr = ExpressionWrapper(Lower(value), output_field=IntegerField())
group_by_cols = expr.get_group_by_cols(alias=None)
self.assertEqual(group_by_cols, [expr.expression])
self.assertEqual(group_by_cols[0].output_field, expr.output_field)
| true | true |
1c32ab08df1cc376afa30d41b2722e1062722afb | 1,248 | py | Python | ampel/aux/SimpleTagFilter.py | mafn/Ampel-core | 744acbf36f0a2ceae7230ceab1350236c1501b57 | [
"BSD-3-Clause"
] | null | null | null | ampel/aux/SimpleTagFilter.py | mafn/Ampel-core | 744acbf36f0a2ceae7230ceab1350236c1501b57 | [
"BSD-3-Clause"
] | null | null | null | ampel/aux/SimpleTagFilter.py | mafn/Ampel-core | 744acbf36f0a2ceae7230ceab1350236c1501b57 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: Ampel-core/ampel/aux/SimpleTagFilter.py
# License: BSD-3-Clause
# Author: Jakob van Santen <jakob.van.santen@desy.de>
# Date: 06.12.2021
# Last Modified Date: 06.12.2021
# Last Modified By: Jakob van Santen <jakob.van.santen@desy.de>
from ampel.types import Tag
from ampel.abstract.AbsApplicable import AbsApplicable
from ampel.content.DataPoint import DataPoint
class SimpleTagFilter(AbsApplicable):
#: Accept DataPoints with any of these tags
require: None | list[Tag] = None
#: Reject Datapoints with any of these tags
forbid: None | list[Tag] = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._allow = None if self.require is None else set(self.require)
self._deny = None if self.forbid is None else set(self.forbid)
def _accept(self, dp: DataPoint):
tag = set(dp.get("tag", []))
return (self._allow is None or tag.intersection(self._allow)) and (
self._deny is None or not tag.intersection(self._deny)
)
def apply(self, arg: list[DataPoint]) -> list[DataPoint]:
return [el for el in arg if self._accept(el)]
| 35.657143 | 75 | 0.647436 |
from ampel.types import Tag
from ampel.abstract.AbsApplicable import AbsApplicable
from ampel.content.DataPoint import DataPoint
class SimpleTagFilter(AbsApplicable):
require: None | list[Tag] = None
forbid: None | list[Tag] = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._allow = None if self.require is None else set(self.require)
self._deny = None if self.forbid is None else set(self.forbid)
def _accept(self, dp: DataPoint):
tag = set(dp.get("tag", []))
return (self._allow is None or tag.intersection(self._allow)) and (
self._deny is None or not tag.intersection(self._deny)
)
def apply(self, arg: list[DataPoint]) -> list[DataPoint]:
return [el for el in arg if self._accept(el)]
| true | true |
1c32ab8dde7a37d392ee659a6ea689faa12727e2 | 39 | py | Python | twistedchecker/test/__init__.py | twisted/twistedchecker | 4a69f3f7d80f2f42d915bc128d4257a2e15804e4 | [
"MIT"
] | 10 | 2015-01-06T14:59:50.000Z | 2021-01-10T19:06:16.000Z | twistedchecker/test/__init__.py | twisted/twistedchecker | 4a69f3f7d80f2f42d915bc128d4257a2e15804e4 | [
"MIT"
] | 76 | 2015-02-07T13:00:22.000Z | 2020-04-14T02:10:37.000Z | twistedchecker/test/__init__.py | twisted/twistedchecker | 4a69f3f7d80f2f42d915bc128d4257a2e15804e4 | [
"MIT"
] | 11 | 2015-05-24T15:55:49.000Z | 2017-10-15T11:34:35.000Z | """
Unit Tests for TwistedChecker.
"""
| 9.75 | 30 | 0.666667 | true | true | |
1c32abaa628bec8a0c59f08ddbef8035e36be06a | 517 | py | Python | tests/test_ipfs_add.py | mathiasfrohlich/blockfrost-python | 4b391a711683732c47fa9183532f14c189b75124 | [
"Apache-2.0"
] | 1 | 2022-01-17T14:26:05.000Z | 2022-01-17T14:26:05.000Z | tests/test_ipfs_add.py | mathiasfrohlich/blockfrost-python | 4b391a711683732c47fa9183532f14c189b75124 | [
"Apache-2.0"
] | null | null | null | tests/test_ipfs_add.py | mathiasfrohlich/blockfrost-python | 4b391a711683732c47fa9183532f14c189b75124 | [
"Apache-2.0"
] | null | null | null | from blockfrost import BlockFrostIPFS, ApiError
from blockfrost.ipfs.add import IPFSObjectResponse
file_path = "README.md"
def test_add(requests_mock):
ipfs = BlockFrostIPFS()
mock_data = {
"name": file_path,
"ipfs_hash": "QmZbHqiCxKEVX7QfijzJTkZiSi3WEVTcvANgNAWzDYgZDr",
"size": 125297
}
requests_mock.post(f"{ipfs.url}/ipfs/add", json=mock_data)
mock_object = IPFSObjectResponse(**mock_data)
assert ipfs.add(file_path=file_path).ipfs_hash == mock_object.ipfs_hash
| 30.411765 | 75 | 0.727273 | from blockfrost import BlockFrostIPFS, ApiError
from blockfrost.ipfs.add import IPFSObjectResponse
file_path = "README.md"
def test_add(requests_mock):
ipfs = BlockFrostIPFS()
mock_data = {
"name": file_path,
"ipfs_hash": "QmZbHqiCxKEVX7QfijzJTkZiSi3WEVTcvANgNAWzDYgZDr",
"size": 125297
}
requests_mock.post(f"{ipfs.url}/ipfs/add", json=mock_data)
mock_object = IPFSObjectResponse(**mock_data)
assert ipfs.add(file_path=file_path).ipfs_hash == mock_object.ipfs_hash
| true | true |
1c32acc88d168bc376ec29f2eae66ac341eab896 | 18,510 | py | Python | stacker/blueprints/base.py | scrthq/stacker | 3cdfbd543f82d6805435ac9fa2655bf31e436cc4 | [
"BSD-2-Clause"
] | 1 | 2018-07-17T11:23:47.000Z | 2018-07-17T11:23:47.000Z | stacker/blueprints/base.py | scrthq/stacker | 3cdfbd543f82d6805435ac9fa2655bf31e436cc4 | [
"BSD-2-Clause"
] | 2 | 2019-02-05T21:36:05.000Z | 2019-02-08T22:16:42.000Z | stacker/blueprints/base.py | scrthq/stacker | 3cdfbd543f82d6805435ac9fa2655bf31e436cc4 | [
"BSD-2-Clause"
] | 1 | 2020-02-29T04:49:11.000Z | 2020-02-29T04:49:11.000Z | from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import str
from past.builtins import basestring
from builtins import object
import copy
import hashlib
import logging
import string
from stacker.util import read_value_from_path
from stacker.variables import Variable
from troposphere import (
Output,
Parameter,
Ref,
Template,
)
from ..exceptions import (
MissingVariable,
UnresolvedVariable,
UnresolvedVariables,
ValidatorError,
VariableTypeRequired,
InvalidUserdataPlaceholder
)
from .variables.types import (
CFNType,
TroposphereType,
)
logger = logging.getLogger(__name__)
PARAMETER_PROPERTIES = {
"default": "Default",
"description": "Description",
"no_echo": "NoEcho",
"allowed_values": "AllowedValues",
"allowed_pattern": "AllowedPattern",
"max_length": "MaxLength",
"min_length": "MinLength",
"max_value": "MaxValue",
"min_value": "MinValue",
"constraint_description": "ConstraintDescription"
}
class CFNParameter(object):
def __init__(self, name, value):
"""Wrapper around a value to indicate a CloudFormation Parameter.
Args:
name (str): the name of the CloudFormation Parameter
value (str, list, int or bool): the value we're going to submit as
a CloudFormation Parameter.
"""
acceptable_types = [basestring, bool, list, int]
acceptable = False
for acceptable_type in acceptable_types:
if isinstance(value, acceptable_type):
acceptable = True
if acceptable_type == bool:
logger.debug("Converting parameter %s boolean '%s' "
"to string.", name, value)
value = str(value).lower()
break
if acceptable_type == int:
logger.debug("Converting parameter %s integer '%s' "
"to string.", name, value)
value = str(value)
break
if not acceptable:
raise ValueError(
"CFNParameter (%s) value must be one of %s got: %s" % (
name, "str, int, bool, or list", value))
self.name = name
self.value = value
def __repr__(self):
return "CFNParameter({}: {})".format(self.name, self.value)
def to_parameter_value(self):
"""Return the value to be submitted to CloudFormation"""
return self.value
@property
def ref(self):
return Ref(self.name)
def build_parameter(name, properties):
"""Builds a troposphere Parameter with the given properties.
Args:
name (string): The name of the parameter.
properties (dict): Contains the properties that will be applied to the
parameter. See:
http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/parameters-section-structure.html
Returns:
:class:`troposphere.Parameter`: The created parameter object.
"""
p = Parameter(name, Type=properties.get("type"))
for name, attr in PARAMETER_PROPERTIES.items():
if name in properties:
setattr(p, attr, properties[name])
return p
def validate_variable_type(var_name, var_type, value):
"""Ensures the value is the correct variable type.
Args:
var_name (str): The name of the defined variable on a blueprint.
var_type (type): The type that the value should be.
value (obj): The object representing the value provided for the
variable
Returns:
object: Returns the appropriate value object. If the original value
was of CFNType, the returned value will be wrapped in CFNParameter.
Raises:
ValueError: If the `value` isn't of `var_type` and can't be cast as
that type, this is raised.
"""
if isinstance(var_type, CFNType):
value = CFNParameter(name=var_name, value=value)
elif isinstance(var_type, TroposphereType):
try:
value = var_type.create(value)
except Exception as exc:
name = "{}.create".format(var_type.resource_name)
raise ValidatorError(var_name, name, value, exc)
else:
if not isinstance(value, var_type):
raise ValueError(
"Value for variable %s must be of type %s. Actual "
"type: %s." % (var_name, var_type, type(value))
)
return value
def validate_allowed_values(allowed_values, value):
"""Support a variable defining which values it allows.
Args:
allowed_values (Optional[list]): A list of allowed values from the
variable definition
value (obj): The object representing the value provided for the
variable
Returns:
bool: Boolean for whether or not the value is valid.
"""
# ignore CFNParameter, troposphere handles these for us
if not allowed_values or isinstance(value, CFNParameter):
return True
return value in allowed_values
def resolve_variable(var_name, var_def, provided_variable, blueprint_name):
"""Resolve a provided variable value against the variable definition.
Args:
var_name (str): The name of the defined variable on a blueprint.
var_def (dict): A dictionary representing the defined variables
attributes.
provided_variable (:class:`stacker.variables.Variable`): The variable
value provided to the blueprint.
blueprint_name (str): The name of the blueprint that the variable is
being applied to.
Returns:
object: The resolved variable value, could be any python object.
Raises:
MissingVariable: Raised when a variable with no default is not
provided a value.
UnresolvedVariable: Raised when the provided variable is not already
resolved.
ValueError: Raised when the value is not the right type and cannot be
cast as the correct type. Raised by
:func:`stacker.blueprints.base.validate_variable_type`
ValidatorError: Raised when a validator raises an exception. Wraps the
original exception.
"""
try:
var_type = var_def["type"]
except KeyError:
raise VariableTypeRequired(blueprint_name, var_name)
if provided_variable:
if not provided_variable.resolved:
raise UnresolvedVariable(blueprint_name, provided_variable)
value = provided_variable.value
else:
# Variable value not provided, try using the default, if it exists
# in the definition
try:
value = var_def["default"]
except KeyError:
raise MissingVariable(blueprint_name, var_name)
# If no validator, return the value as is, otherwise apply validator
validator = var_def.get("validator", lambda v: v)
try:
value = validator(value)
except Exception as exc:
raise ValidatorError(var_name, validator.__name__, value, exc)
# Ensure that the resulting value is the correct type
value = validate_variable_type(var_name, var_type, value)
allowed_values = var_def.get("allowed_values")
if not validate_allowed_values(allowed_values, value):
message = (
"Invalid value passed to '%s' in blueprint: %s. Got: '%s', "
"expected one of %s"
) % (var_name, blueprint_name, value, allowed_values)
raise ValueError(message)
return value
def parse_user_data(variables, raw_user_data, blueprint_name):
"""Parse the given user data and renders it as a template
It supports referencing template variables to create userdata
that's supplemented with information from the stack, as commonly
required when creating EC2 userdata files.
For example:
Given a raw_user_data string: 'open file ${file}'
And a variables dictionary with: {'file': 'test.txt'}
parse_user_data would output: open file test.txt
Args:
variables (dict): variables available to the template
raw_user_data (str): the user_data to be parsed
blueprint_name (str): the name of the blueprint
Returns:
str: The parsed user data, with all the variables values and
refs replaced with their resolved values.
Raises:
InvalidUserdataPlaceholder: Raised when a placeholder name in
raw_user_data is not valid.
E.g ${100} would raise this.
MissingVariable: Raised when a variable is in the raw_user_data that
is not given in the blueprint
"""
variable_values = {}
for key, value in variables.items():
if type(value) is CFNParameter:
variable_values[key] = value.to_parameter_value()
else:
variable_values[key] = value
template = string.Template(raw_user_data)
res = ""
try:
res = template.substitute(variable_values)
except ValueError as exp:
raise InvalidUserdataPlaceholder(blueprint_name, exp.args[0])
except KeyError as key:
raise MissingVariable(blueprint_name, key)
return res
class Blueprint(object):
"""Base implementation for rendering a troposphere template.
Args:
name (str): A name for the blueprint.
context (:class:`stacker.context.Context`): the context the blueprint
is being executed under.
mappings (dict, optional): Cloudformation Mappings to be used in the
template.
"""
def __init__(self, name, context, mappings=None, description=None):
self.name = name
self.context = context
self.mappings = mappings
self.outputs = {}
self.reset_template()
self.resolved_variables = None
self.description = description
if hasattr(self, "PARAMETERS") or hasattr(self, "LOCAL_PARAMETERS"):
raise AttributeError("DEPRECATION WARNING: Blueprint %s uses "
"deprecated PARAMETERS or "
"LOCAL_PARAMETERS, rather than VARIABLES. "
"Please update your blueprints. See https://"
"stacker.readthedocs.io/en/latest/blueprints."
"html#variables for aditional information."
% name)
def get_parameter_definitions(self):
"""Get the parameter definitions to submit to CloudFormation.
Any variable definition whose `type` is an instance of `CFNType` will
be returned as a CloudFormation Parameter.
Returns:
dict: parameter definitions. Keys are parameter names, the values
are dicts containing key/values for various parameter
properties.
"""
output = {}
for var_name, attrs in self.defined_variables().items():
var_type = attrs.get("type")
if isinstance(var_type, CFNType):
cfn_attrs = copy.deepcopy(attrs)
cfn_attrs["type"] = var_type.parameter_type
output[var_name] = cfn_attrs
return output
def get_required_parameter_definitions(self):
"""Returns all template parameters that do not have a default value.
Returns:
dict: dict of required CloudFormation Parameters for the blueprint.
Will be a dictionary of <parameter name>: <parameter
attributes>.
"""
required = {}
for name, attrs in self.get_parameter_definitions().items():
if "Default" not in attrs:
required[name] = attrs
return required
def get_parameter_values(self):
"""Return a dictionary of variables with `type` :class:`CFNType`.
Returns:
dict: variables that need to be submitted as CloudFormation
Parameters. Will be a dictionary of <parameter name>:
<parameter value>.
"""
variables = self.get_variables()
output = {}
for key, value in variables.items():
try:
output[key] = value.to_parameter_value()
except AttributeError:
continue
return output
def setup_parameters(self):
"""Add any CloudFormation parameters to the template"""
t = self.template
parameters = self.get_parameter_definitions()
if not parameters:
logger.debug("No parameters defined.")
return
for name, attrs in parameters.items():
p = build_parameter(name, attrs)
t.add_parameter(p)
def defined_variables(self):
"""Return a dictionary of variables defined by the blueprint.
By default, this will just return the values from `VARIABLES`, but this
makes it easy for subclasses to add variables.
Returns:
dict: variables defined by the blueprint
"""
return copy.deepcopy(getattr(self, "VARIABLES", {}))
def get_variables(self):
"""Return a dictionary of variables available to the template.
These variables will have been defined within `VARIABLES` or
`self.defined_variables`. Any variable value that contains a lookup
will have been resolved.
Returns:
dict: variables available to the template
Raises:
"""
if self.resolved_variables is None:
raise UnresolvedVariables(self.name)
return self.resolved_variables
def get_cfn_parameters(self):
"""Return a dictionary of variables with `type` :class:`CFNType`.
Returns:
dict: variables that need to be submitted as CloudFormation
Parameters.
"""
variables = self.get_variables()
output = {}
for key, value in variables.items():
if hasattr(value, "to_parameter_value"):
output[key] = value.to_parameter_value()
return output
def resolve_variables(self, provided_variables):
"""Resolve the values of the blueprint variables.
This will resolve the values of the `VARIABLES` with values from the
env file, the config, and any lookups resolved.
Args:
provided_variables (list of :class:`stacker.variables.Variable`):
list of provided variables
"""
self.resolved_variables = {}
defined_variables = self.defined_variables()
variable_dict = dict((var.name, var) for var in provided_variables)
for var_name, var_def in defined_variables.items():
value = resolve_variable(
var_name,
var_def,
variable_dict.get(var_name),
self.name
)
self.resolved_variables[var_name] = value
def import_mappings(self):
if not self.mappings:
return
for name, mapping in self.mappings.items():
logger.debug("Adding mapping %s.", name)
self.template.add_mapping(name, mapping)
def reset_template(self):
self.template = Template()
self._rendered = None
self._version = None
def render_template(self):
"""Render the Blueprint to a CloudFormation template"""
self.import_mappings()
self.create_template()
if self.description:
self.set_template_description(self.description)
self.setup_parameters()
rendered = self.template.to_json(indent=self.context.template_indent)
version = hashlib.md5(rendered.encode()).hexdigest()[:8]
return (version, rendered)
def to_json(self, variables=None):
"""Render the blueprint and return the template in json form.
Args:
variables (dict):
Optional dictionary providing/overriding variable values.
Returns:
str: the rendered CFN JSON template
"""
variables_to_resolve = []
if variables:
for key, value in variables.items():
variables_to_resolve.append(Variable(key, value))
for k in self.get_parameter_definitions():
if not variables or k not in variables:
# The provided value for a CFN parameter has no effect in this
# context (generating the CFN template), so any string can be
# provided for its value - just needs to be something
variables_to_resolve.append(Variable(k, 'unused_value'))
self.resolve_variables(variables_to_resolve)
return self.render_template()[1]
def read_user_data(self, user_data_path):
"""Reads and parses a user_data file.
Args:
user_data_path (str):
path to the userdata file
Returns:
str: the parsed user data file
"""
raw_user_data = read_value_from_path(user_data_path)
variables = self.get_variables()
return parse_user_data(variables, raw_user_data, self.name)
def set_template_description(self, description):
"""Adds a description to the Template
Args:
description (str): A description to be added to the resulting
template.
"""
self.template.add_description(description)
def add_output(self, name, value):
"""Simple helper for adding outputs.
Args:
name (str): The name of the output to create.
value (str): The value to put in the output.
"""
self.template.add_output(Output(name, Value=value))
@property
def requires_change_set(self):
"""Returns true if the underlying template has transforms."""
return self.template.transform is not None
@property
def rendered(self):
if not self._rendered:
self._version, self._rendered = self.render_template()
return self._rendered
@property
def version(self):
if not self._version:
self._version, self._rendered = self.render_template()
return self._version
def create_template(self):
raise NotImplementedError
| 33.053571 | 107 | 0.622474 | from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import str
from past.builtins import basestring
from builtins import object
import copy
import hashlib
import logging
import string
from stacker.util import read_value_from_path
from stacker.variables import Variable
from troposphere import (
Output,
Parameter,
Ref,
Template,
)
from ..exceptions import (
MissingVariable,
UnresolvedVariable,
UnresolvedVariables,
ValidatorError,
VariableTypeRequired,
InvalidUserdataPlaceholder
)
from .variables.types import (
CFNType,
TroposphereType,
)
logger = logging.getLogger(__name__)
PARAMETER_PROPERTIES = {
"default": "Default",
"description": "Description",
"no_echo": "NoEcho",
"allowed_values": "AllowedValues",
"allowed_pattern": "AllowedPattern",
"max_length": "MaxLength",
"min_length": "MinLength",
"max_value": "MaxValue",
"min_value": "MinValue",
"constraint_description": "ConstraintDescription"
}
class CFNParameter(object):
def __init__(self, name, value):
acceptable_types = [basestring, bool, list, int]
acceptable = False
for acceptable_type in acceptable_types:
if isinstance(value, acceptable_type):
acceptable = True
if acceptable_type == bool:
logger.debug("Converting parameter %s boolean '%s' "
"to string.", name, value)
value = str(value).lower()
break
if acceptable_type == int:
logger.debug("Converting parameter %s integer '%s' "
"to string.", name, value)
value = str(value)
break
if not acceptable:
raise ValueError(
"CFNParameter (%s) value must be one of %s got: %s" % (
name, "str, int, bool, or list", value))
self.name = name
self.value = value
def __repr__(self):
return "CFNParameter({}: {})".format(self.name, self.value)
def to_parameter_value(self):
return self.value
@property
def ref(self):
return Ref(self.name)
def build_parameter(name, properties):
p = Parameter(name, Type=properties.get("type"))
for name, attr in PARAMETER_PROPERTIES.items():
if name in properties:
setattr(p, attr, properties[name])
return p
def validate_variable_type(var_name, var_type, value):
if isinstance(var_type, CFNType):
value = CFNParameter(name=var_name, value=value)
elif isinstance(var_type, TroposphereType):
try:
value = var_type.create(value)
except Exception as exc:
name = "{}.create".format(var_type.resource_name)
raise ValidatorError(var_name, name, value, exc)
else:
if not isinstance(value, var_type):
raise ValueError(
"Value for variable %s must be of type %s. Actual "
"type: %s." % (var_name, var_type, type(value))
)
return value
def validate_allowed_values(allowed_values, value):
if not allowed_values or isinstance(value, CFNParameter):
return True
return value in allowed_values
def resolve_variable(var_name, var_def, provided_variable, blueprint_name):
try:
var_type = var_def["type"]
except KeyError:
raise VariableTypeRequired(blueprint_name, var_name)
if provided_variable:
if not provided_variable.resolved:
raise UnresolvedVariable(blueprint_name, provided_variable)
value = provided_variable.value
else:
try:
value = var_def["default"]
except KeyError:
raise MissingVariable(blueprint_name, var_name)
validator = var_def.get("validator", lambda v: v)
try:
value = validator(value)
except Exception as exc:
raise ValidatorError(var_name, validator.__name__, value, exc)
value = validate_variable_type(var_name, var_type, value)
allowed_values = var_def.get("allowed_values")
if not validate_allowed_values(allowed_values, value):
message = (
"Invalid value passed to '%s' in blueprint: %s. Got: '%s', "
"expected one of %s"
) % (var_name, blueprint_name, value, allowed_values)
raise ValueError(message)
return value
def parse_user_data(variables, raw_user_data, blueprint_name):
variable_values = {}
for key, value in variables.items():
if type(value) is CFNParameter:
variable_values[key] = value.to_parameter_value()
else:
variable_values[key] = value
template = string.Template(raw_user_data)
res = ""
try:
res = template.substitute(variable_values)
except ValueError as exp:
raise InvalidUserdataPlaceholder(blueprint_name, exp.args[0])
except KeyError as key:
raise MissingVariable(blueprint_name, key)
return res
class Blueprint(object):
def __init__(self, name, context, mappings=None, description=None):
self.name = name
self.context = context
self.mappings = mappings
self.outputs = {}
self.reset_template()
self.resolved_variables = None
self.description = description
if hasattr(self, "PARAMETERS") or hasattr(self, "LOCAL_PARAMETERS"):
raise AttributeError("DEPRECATION WARNING: Blueprint %s uses "
"deprecated PARAMETERS or "
"LOCAL_PARAMETERS, rather than VARIABLES. "
"Please update your blueprints. See https://"
"stacker.readthedocs.io/en/latest/blueprints."
"html#variables for aditional information."
% name)
def get_parameter_definitions(self):
output = {}
for var_name, attrs in self.defined_variables().items():
var_type = attrs.get("type")
if isinstance(var_type, CFNType):
cfn_attrs = copy.deepcopy(attrs)
cfn_attrs["type"] = var_type.parameter_type
output[var_name] = cfn_attrs
return output
def get_required_parameter_definitions(self):
required = {}
for name, attrs in self.get_parameter_definitions().items():
if "Default" not in attrs:
required[name] = attrs
return required
def get_parameter_values(self):
variables = self.get_variables()
output = {}
for key, value in variables.items():
try:
output[key] = value.to_parameter_value()
except AttributeError:
continue
return output
def setup_parameters(self):
t = self.template
parameters = self.get_parameter_definitions()
if not parameters:
logger.debug("No parameters defined.")
return
for name, attrs in parameters.items():
p = build_parameter(name, attrs)
t.add_parameter(p)
def defined_variables(self):
return copy.deepcopy(getattr(self, "VARIABLES", {}))
def get_variables(self):
if self.resolved_variables is None:
raise UnresolvedVariables(self.name)
return self.resolved_variables
def get_cfn_parameters(self):
variables = self.get_variables()
output = {}
for key, value in variables.items():
if hasattr(value, "to_parameter_value"):
output[key] = value.to_parameter_value()
return output
def resolve_variables(self, provided_variables):
self.resolved_variables = {}
defined_variables = self.defined_variables()
variable_dict = dict((var.name, var) for var in provided_variables)
for var_name, var_def in defined_variables.items():
value = resolve_variable(
var_name,
var_def,
variable_dict.get(var_name),
self.name
)
self.resolved_variables[var_name] = value
def import_mappings(self):
if not self.mappings:
return
for name, mapping in self.mappings.items():
logger.debug("Adding mapping %s.", name)
self.template.add_mapping(name, mapping)
def reset_template(self):
self.template = Template()
self._rendered = None
self._version = None
def render_template(self):
self.import_mappings()
self.create_template()
if self.description:
self.set_template_description(self.description)
self.setup_parameters()
rendered = self.template.to_json(indent=self.context.template_indent)
version = hashlib.md5(rendered.encode()).hexdigest()[:8]
return (version, rendered)
def to_json(self, variables=None):
variables_to_resolve = []
if variables:
for key, value in variables.items():
variables_to_resolve.append(Variable(key, value))
for k in self.get_parameter_definitions():
if not variables or k not in variables:
variables_to_resolve.append(Variable(k, 'unused_value'))
self.resolve_variables(variables_to_resolve)
return self.render_template()[1]
def read_user_data(self, user_data_path):
raw_user_data = read_value_from_path(user_data_path)
variables = self.get_variables()
return parse_user_data(variables, raw_user_data, self.name)
def set_template_description(self, description):
self.template.add_description(description)
def add_output(self, name, value):
self.template.add_output(Output(name, Value=value))
@property
def requires_change_set(self):
return self.template.transform is not None
@property
def rendered(self):
if not self._rendered:
self._version, self._rendered = self.render_template()
return self._rendered
@property
def version(self):
if not self._version:
self._version, self._rendered = self.render_template()
return self._version
def create_template(self):
raise NotImplementedError
| true | true |
1c32ad1b4981abe93c3f66a5517ee15f7662ec65 | 407 | py | Python | Part_3_advanced/m03_date_and_time/date_construct/homework_1_start/main.py | Mikma03/InfoShareacademy_Python_Courses | 3df1008c8c92831bebf1625f960f25b39d6987e6 | [
"MIT"
] | null | null | null | Part_3_advanced/m03_date_and_time/date_construct/homework_1_start/main.py | Mikma03/InfoShareacademy_Python_Courses | 3df1008c8c92831bebf1625f960f25b39d6987e6 | [
"MIT"
] | null | null | null | Part_3_advanced/m03_date_and_time/date_construct/homework_1_start/main.py | Mikma03/InfoShareacademy_Python_Courses | 3df1008c8c92831bebf1625f960f25b39d6987e6 | [
"MIT"
] | null | null | null | from new_movies import movies_ranking
from new_movies.random_data_utility import random_generator
def run_example():
some_movies = random_generator.generate_random_movies(movies_number=15)
movies_ranking.print_top_movies(some_movies)
# movies_ranking.print_top_movies(some_movies, limit=12)
# movies_ranking.print_top_movies(some_movies, 12)
if __name__ == "__main__":
run_example()
| 29.071429 | 75 | 0.805897 | from new_movies import movies_ranking
from new_movies.random_data_utility import random_generator
def run_example():
some_movies = random_generator.generate_random_movies(movies_number=15)
movies_ranking.print_top_movies(some_movies)
if __name__ == "__main__":
run_example()
| true | true |
1c32ad3d236fe6ddd50443cf1f6b99698ac7bdc6 | 2,958 | py | Python | test/loader/loader.py | ameserole/Naumachia | dc13c33c5fcf053c74dfce8351a696d28857fd9d | [
"MIT"
] | null | null | null | test/loader/loader.py | ameserole/Naumachia | dc13c33c5fcf053c74dfce8351a696d28857fd9d | [
"MIT"
] | null | null | null | test/loader/loader.py | ameserole/Naumachia | dc13c33c5fcf053c74dfce8351a696d28857fd9d | [
"MIT"
] | null | null | null | from urllib.parse import urljoin
from db import Db
import requests
import os
import re
import json
import logging
import random
import string
import redis
import yaml
testcn_pattern = r'test[a-zA-Z]{32}'
script_dir = os.path.dirname(os.path.realpath(__file__))
REGISTRAR_URL = os.environ.get('REGISTRAR_URL', 'http://localhost:3960')
REDIS_ADDR = os.environ.get('REDIS_ADDR', 'localhost')
REDIS_PORT = int(os.environ.get('REDIS_PORT', 6379))
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')
LOADER_CONFIG = os.environ.get('LOADER_CONFIG', os.path.join(script_dir, 'config.yml'))
_levelnum = getattr(logging, LOG_LEVEL.upper(), None)
if not isinstance(_levelnum, int):
raise ValueError('Invalid log level: {}'.format(LOG_LEVEL))
logging.basicConfig(level=_levelnum, format="[%(levelname)s %(asctime)s] %(message)s", datefmt="%m-%d %H:%M:%S")
logger = logging.getLogger(__name__)
def gentestcn():
cn = 'test' + "".join(random.choice(string.ascii_letters) for _ in range(32))
assert istestcn(cn)
return cn
def istestcn(cn):
return re.match(testcn_pattern, cn) is not None
def load_config():
with open(LOADER_CONFIG, 'r') as f:
return yaml.load(f)
if __name__ == "__main__":
# Open the connection to redis
Db.redis = redis.Redis(host=REDIS_ADDR, port=REDIS_PORT)
# Load the yaml config for this test
config = load_config()
logging.debug("Configuration to satisfy: %r", config)
for challenge, settings in config.items():
chaldb = Db.Challenge(challenge)
chaldb.ready = False
Db.challenges.add(chaldb)
logger.info('Loading certificates for %s', challenge)
ls = requests.get(urljoin(REGISTRAR_URL, challenge + '/list')).json()
cns = { entry['cn'] for entry in ls if istestcn(entry['cn']) }
logger.debug("Exisitng test certificates: %r", cns)
diff = settings['certificates'] - len(cns)
if diff > 0:
logger.info('Adding %d certificates', diff)
for _ in range(diff):
cn = gentestcn()
logger.debug('Adding %s', cn)
requests.get(urljoin(REGISTRAR_URL, challenge + '/add'), params={'cn': cn}).raise_for_status()
cns.add(cn)
if diff < 0:
logger.info('Removing %d overprovisioned certificates', diff)
for _ in range(abs(diff)):
cn = cns.pop()
logger.debug('Removing %s', cn)
requests.get(urljoin(REGISTRAR_URL, challenge + '/remove'), params={'cn': cn}).raise_for_status()
logging.info("Prepared %d certificates for testing", len(cns))
for cn in cns:
cert = requests.get(urljoin(REGISTRAR_URL, challenge + '/get'), params={'cn': cn}).json()
chaldb.certificates.add(Db.Certificate(cn, cert))
chaldb.ready = True
logging.info("Certificates loaded into redis at %s:%d", REDIS_ADDR, REDIS_PORT)
| 34.395349 | 113 | 0.644016 | from urllib.parse import urljoin
from db import Db
import requests
import os
import re
import json
import logging
import random
import string
import redis
import yaml
testcn_pattern = r'test[a-zA-Z]{32}'
script_dir = os.path.dirname(os.path.realpath(__file__))
REGISTRAR_URL = os.environ.get('REGISTRAR_URL', 'http://localhost:3960')
REDIS_ADDR = os.environ.get('REDIS_ADDR', 'localhost')
REDIS_PORT = int(os.environ.get('REDIS_PORT', 6379))
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO')
LOADER_CONFIG = os.environ.get('LOADER_CONFIG', os.path.join(script_dir, 'config.yml'))
_levelnum = getattr(logging, LOG_LEVEL.upper(), None)
if not isinstance(_levelnum, int):
raise ValueError('Invalid log level: {}'.format(LOG_LEVEL))
logging.basicConfig(level=_levelnum, format="[%(levelname)s %(asctime)s] %(message)s", datefmt="%m-%d %H:%M:%S")
logger = logging.getLogger(__name__)
def gentestcn():
cn = 'test' + "".join(random.choice(string.ascii_letters) for _ in range(32))
assert istestcn(cn)
return cn
def istestcn(cn):
return re.match(testcn_pattern, cn) is not None
def load_config():
with open(LOADER_CONFIG, 'r') as f:
return yaml.load(f)
if __name__ == "__main__":
Db.redis = redis.Redis(host=REDIS_ADDR, port=REDIS_PORT)
config = load_config()
logging.debug("Configuration to satisfy: %r", config)
for challenge, settings in config.items():
chaldb = Db.Challenge(challenge)
chaldb.ready = False
Db.challenges.add(chaldb)
logger.info('Loading certificates for %s', challenge)
ls = requests.get(urljoin(REGISTRAR_URL, challenge + '/list')).json()
cns = { entry['cn'] for entry in ls if istestcn(entry['cn']) }
logger.debug("Exisitng test certificates: %r", cns)
diff = settings['certificates'] - len(cns)
if diff > 0:
logger.info('Adding %d certificates', diff)
for _ in range(diff):
cn = gentestcn()
logger.debug('Adding %s', cn)
requests.get(urljoin(REGISTRAR_URL, challenge + '/add'), params={'cn': cn}).raise_for_status()
cns.add(cn)
if diff < 0:
logger.info('Removing %d overprovisioned certificates', diff)
for _ in range(abs(diff)):
cn = cns.pop()
logger.debug('Removing %s', cn)
requests.get(urljoin(REGISTRAR_URL, challenge + '/remove'), params={'cn': cn}).raise_for_status()
logging.info("Prepared %d certificates for testing", len(cns))
for cn in cns:
cert = requests.get(urljoin(REGISTRAR_URL, challenge + '/get'), params={'cn': cn}).json()
chaldb.certificates.add(Db.Certificate(cn, cert))
chaldb.ready = True
logging.info("Certificates loaded into redis at %s:%d", REDIS_ADDR, REDIS_PORT)
| true | true |
1c32ae12ebcc78a1b009081906dc246c710134e2 | 5,914 | py | Python | tests/pytests/unit/modules/virt/conftest.py | dirkmueller/salt | f9558f82307a81d29b083c1ba3d713d0bb239354 | [
"Apache-2.0"
] | null | null | null | tests/pytests/unit/modules/virt/conftest.py | dirkmueller/salt | f9558f82307a81d29b083c1ba3d713d0bb239354 | [
"Apache-2.0"
] | null | null | null | tests/pytests/unit/modules/virt/conftest.py | dirkmueller/salt | f9558f82307a81d29b083c1ba3d713d0bb239354 | [
"Apache-2.0"
] | null | null | null | import pytest
import salt.modules.config as config
import salt.modules.virt as virt
from salt._compat import ElementTree as ET
from tests.support.mock import MagicMock
class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors
"""
Libvirt library mock
"""
class virDomain(MagicMock):
"""
virDomain mock
"""
class libvirtError(Exception):
"""
libvirtError mock
"""
def __init__(self, msg):
super().__init__(msg)
self.msg = msg
def get_error_message(self):
return self.msg
class MappedResultMock(MagicMock):
"""
Mock class consistently return the same mock object based on the first argument.
"""
_instances = {}
def __init__(self):
def mapped_results(*args, **kwargs):
if args[0] not in self._instances.keys():
raise virt.libvirt.libvirtError("Not found: {}".format(args[0]))
return self._instances[args[0]]
super().__init__(side_effect=mapped_results)
def add(self, name):
self._instances[name] = MagicMock()
@pytest.fixture(autouse=True)
def setup_loader(request):
# Create libvirt mock and connection mock
mock_libvirt = LibvirtMock()
mock_conn = MagicMock()
mock_conn.getStoragePoolCapabilities.return_value = "<storagepoolCapabilities/>"
mock_libvirt.openAuth.return_value = mock_conn
setup_loader_modules = {
virt: {
"libvirt": mock_libvirt,
"__salt__": {"config.get": config.get, "config.option": config.option},
},
config: {},
}
with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock:
yield loader_mock
@pytest.fixture
def make_mock_vm():
def _make_mock_vm(xml_def):
mocked_conn = virt.libvirt.openAuth.return_value
doc = ET.fromstring(xml_def)
name = doc.find("name").text
os_type = "hvm"
os_type_node = doc.find("os/type")
if os_type_node is not None:
os_type = os_type_node.text
mocked_conn.listDefinedDomains.return_value = [name]
# Configure the mocked domain
domain_mock = virt.libvirt.virDomain()
if not isinstance(mocked_conn.lookupByName, MappedResultMock):
mocked_conn.lookupByName = MappedResultMock()
mocked_conn.lookupByName.add(name)
domain_mock = mocked_conn.lookupByName(name)
domain_mock.XMLDesc.return_value = xml_def
domain_mock.OSType.return_value = os_type
# Return state as shutdown
domain_mock.info.return_value = [
4,
2048 * 1024,
1024 * 1024,
2,
1234,
]
domain_mock.ID.return_value = 1
domain_mock.name.return_value = name
domain_mock.attachDevice.return_value = 0
domain_mock.detachDevice.return_value = 0
return domain_mock
return _make_mock_vm
@pytest.fixture
def make_mock_storage_pool():
def _make_mock_storage_pool(name, type, volumes):
mocked_conn = virt.libvirt.openAuth.return_value
# Append the pool name to the list of known mocked pools
all_pools = mocked_conn.listStoragePools.return_value
if not isinstance(all_pools, list):
all_pools = []
all_pools.append(name)
mocked_conn.listStoragePools.return_value = all_pools
# Ensure we have mapped results for the pools
if not isinstance(mocked_conn.storagePoolLookupByName, MappedResultMock):
mocked_conn.storagePoolLookupByName = MappedResultMock()
# Configure the pool
mocked_conn.storagePoolLookupByName.add(name)
mocked_pool = mocked_conn.storagePoolLookupByName(name)
source = ""
if type == "disk":
source = "<device path='/dev/{}'/>".format(name)
pool_path = "/path/to/{}".format(name)
mocked_pool.XMLDesc.return_value = """
<pool type='{}'>
<source>
{}
</source>
<target>
<path>{}</path>
</target>
</pool>
""".format(
type, source, pool_path
)
mocked_pool.name.return_value = name
mocked_pool.info.return_value = [
virt.libvirt.VIR_STORAGE_POOL_RUNNING,
]
# Append the pool to the listAllStoragePools list
all_pools_obj = mocked_conn.listAllStoragePools.return_value
if not isinstance(all_pools_obj, list):
all_pools_obj = []
all_pools_obj.append(mocked_pool)
mocked_conn.listAllStoragePools.return_value = all_pools_obj
# Configure the volumes
if not isinstance(mocked_pool.storageVolLookupByName, MappedResultMock):
mocked_pool.storageVolLookupByName = MappedResultMock()
mocked_pool.listVolumes.return_value = volumes
all_volumes = []
for volume in volumes:
mocked_pool.storageVolLookupByName.add(volume)
mocked_vol = mocked_pool.storageVolLookupByName(volume)
vol_path = "{}/{}".format(pool_path, volume)
mocked_vol.XMLDesc.return_value = """
<volume>
<target>
<path>{}</path>
</target>
</volume>
""".format(
vol_path,
)
mocked_vol.path.return_value = vol_path
mocked_vol.name.return_value = volume
mocked_vol.info.return_value = [
0,
1234567,
12345,
]
all_volumes.append(mocked_vol)
# Set the listAllVolumes return_value
mocked_pool.listAllVolumes.return_value = all_volumes
return mocked_pool
return _make_mock_storage_pool
| 30.802083 | 84 | 0.611769 | import pytest
import salt.modules.config as config
import salt.modules.virt as virt
from salt._compat import ElementTree as ET
from tests.support.mock import MagicMock
class LibvirtMock(MagicMock):
class virDomain(MagicMock):
class libvirtError(Exception):
def __init__(self, msg):
super().__init__(msg)
self.msg = msg
def get_error_message(self):
return self.msg
class MappedResultMock(MagicMock):
_instances = {}
def __init__(self):
def mapped_results(*args, **kwargs):
if args[0] not in self._instances.keys():
raise virt.libvirt.libvirtError("Not found: {}".format(args[0]))
return self._instances[args[0]]
super().__init__(side_effect=mapped_results)
def add(self, name):
self._instances[name] = MagicMock()
@pytest.fixture(autouse=True)
def setup_loader(request):
mock_libvirt = LibvirtMock()
mock_conn = MagicMock()
mock_conn.getStoragePoolCapabilities.return_value = "<storagepoolCapabilities/>"
mock_libvirt.openAuth.return_value = mock_conn
setup_loader_modules = {
virt: {
"libvirt": mock_libvirt,
"__salt__": {"config.get": config.get, "config.option": config.option},
},
config: {},
}
with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock:
yield loader_mock
@pytest.fixture
def make_mock_vm():
def _make_mock_vm(xml_def):
mocked_conn = virt.libvirt.openAuth.return_value
doc = ET.fromstring(xml_def)
name = doc.find("name").text
os_type = "hvm"
os_type_node = doc.find("os/type")
if os_type_node is not None:
os_type = os_type_node.text
mocked_conn.listDefinedDomains.return_value = [name]
domain_mock = virt.libvirt.virDomain()
if not isinstance(mocked_conn.lookupByName, MappedResultMock):
mocked_conn.lookupByName = MappedResultMock()
mocked_conn.lookupByName.add(name)
domain_mock = mocked_conn.lookupByName(name)
domain_mock.XMLDesc.return_value = xml_def
domain_mock.OSType.return_value = os_type
domain_mock.info.return_value = [
4,
2048 * 1024,
1024 * 1024,
2,
1234,
]
domain_mock.ID.return_value = 1
domain_mock.name.return_value = name
domain_mock.attachDevice.return_value = 0
domain_mock.detachDevice.return_value = 0
return domain_mock
return _make_mock_vm
@pytest.fixture
def make_mock_storage_pool():
def _make_mock_storage_pool(name, type, volumes):
mocked_conn = virt.libvirt.openAuth.return_value
all_pools = mocked_conn.listStoragePools.return_value
if not isinstance(all_pools, list):
all_pools = []
all_pools.append(name)
mocked_conn.listStoragePools.return_value = all_pools
if not isinstance(mocked_conn.storagePoolLookupByName, MappedResultMock):
mocked_conn.storagePoolLookupByName = MappedResultMock()
mocked_conn.storagePoolLookupByName.add(name)
mocked_pool = mocked_conn.storagePoolLookupByName(name)
source = ""
if type == "disk":
source = "<device path='/dev/{}'/>".format(name)
pool_path = "/path/to/{}".format(name)
mocked_pool.XMLDesc.return_value = """
<pool type='{}'>
<source>
{}
</source>
<target>
<path>{}</path>
</target>
</pool>
""".format(
type, source, pool_path
)
mocked_pool.name.return_value = name
mocked_pool.info.return_value = [
virt.libvirt.VIR_STORAGE_POOL_RUNNING,
]
all_pools_obj = mocked_conn.listAllStoragePools.return_value
if not isinstance(all_pools_obj, list):
all_pools_obj = []
all_pools_obj.append(mocked_pool)
mocked_conn.listAllStoragePools.return_value = all_pools_obj
if not isinstance(mocked_pool.storageVolLookupByName, MappedResultMock):
mocked_pool.storageVolLookupByName = MappedResultMock()
mocked_pool.listVolumes.return_value = volumes
all_volumes = []
for volume in volumes:
mocked_pool.storageVolLookupByName.add(volume)
mocked_vol = mocked_pool.storageVolLookupByName(volume)
vol_path = "{}/{}".format(pool_path, volume)
mocked_vol.XMLDesc.return_value = """
<volume>
<target>
<path>{}</path>
</target>
</volume>
""".format(
vol_path,
)
mocked_vol.path.return_value = vol_path
mocked_vol.name.return_value = volume
mocked_vol.info.return_value = [
0,
1234567,
12345,
]
all_volumes.append(mocked_vol)
mocked_pool.listAllVolumes.return_value = all_volumes
return mocked_pool
return _make_mock_storage_pool
| true | true |
1c32b056b0adb053ca4604c722481eb2fb2b6a1e | 2,233 | py | Python | tests/slice_test.py | yohplala/vaex | ca7927a19d259576ca0403ee207a597aaef6adc2 | [
"MIT"
] | 1 | 2020-11-27T08:49:18.000Z | 2020-11-27T08:49:18.000Z | tests/slice_test.py | yohplala/vaex | ca7927a19d259576ca0403ee207a597aaef6adc2 | [
"MIT"
] | null | null | null | tests/slice_test.py | yohplala/vaex | ca7927a19d259576ca0403ee207a597aaef6adc2 | [
"MIT"
] | null | null | null | import pytest
from common import *
def test_slice_expression(df):
assert df.x[:2].tolist() == df[:2].x.tolist()
assert df.x[2:6].tolist() == df[2:6].x.tolist()
assert df.x[-3:].tolist() == df[-3:].x.tolist()
# we don't support non 1 steps
# assert df.x[::-3].tolist() == df[::-3].x.tolist()
def test_slice_against_numpy(df):
assert df.x[:2].tolist() == df.x.to_numpy()[:2].tolist()
assert df.x[2:6].tolist() == df.x.to_numpy()[2:6].tolist()
assert df.x[-3:].tolist() == df.x.to_numpy()[-3:].tolist()
# we don't support non 1 steps
# assert df.x[::-3].tolist() == df.x.values[::-3].tolist()
@pytest.mark.xfail(reason="Not supported yet")
def test_slice_filtered_remte(ds_remote):
df = ds_remote
dff = df[df.x > 0]
dfs = dff[1:]
assert dfs.x.values[0] == 2
def test_slice(ds_local):
ds = ds_local
ds_sliced = ds[:]
assert ds_sliced.length_original() == ds_sliced.length_unfiltered() >= 10
assert ds_sliced.get_active_range() == (0, ds_sliced.length_original())# == (0, 10)
assert ds_sliced.x.tolist() == np.arange(10.).tolist()
# trimming with a non-zero start index
ds_sliced = ds[5:]
assert ds_sliced.length_original() == ds_sliced.length_unfiltered() == 5
assert ds_sliced.get_active_range() == (0, ds_sliced.length_original()) == (0, 5)
assert ds_sliced.x.tolist() == np.arange(5, 10.).tolist()
# slice on slice
ds_sliced = ds_sliced[1:4]
assert ds_sliced.length_original() == ds_sliced.length_unfiltered() == 3
assert ds_sliced.get_active_range() == (0, ds_sliced.length_original()) == (0, 3)
assert ds_sliced.x.tolist() == np.arange(6, 9.).tolist()
def test_head(ds_local):
ds = ds_local
df = ds.head(5)
assert len(df) == 5
def test_tail(ds_local):
ds = ds_local
df = ds.tail(5)
assert len(df) == 5
def test_head_with_selection():
df = vaex.example()
df.select(df.x > 0, name='test')
df.head()
def test_slice_beyond_end(df):
df2 = df[:100]
assert df2.x.tolist() == df.x.tolist()
assert len(df2) == len(df)
def test_slice_negative(df):
df2 = df[:-1]
assert df2.x.tolist() == df.x.to_numpy()[:-1].tolist()
assert len(df2) == len(df)-1
| 29 | 87 | 0.622929 | import pytest
from common import *
def test_slice_expression(df):
assert df.x[:2].tolist() == df[:2].x.tolist()
assert df.x[2:6].tolist() == df[2:6].x.tolist()
assert df.x[-3:].tolist() == df[-3:].x.tolist()
# assert df.x[::-3].tolist() == df[::-3].x.tolist()
def test_slice_against_numpy(df):
assert df.x[:2].tolist() == df.x.to_numpy()[:2].tolist()
assert df.x[2:6].tolist() == df.x.to_numpy()[2:6].tolist()
assert df.x[-3:].tolist() == df.x.to_numpy()[-3:].tolist()
# we don't support non 1 steps
@pytest.mark.xfail(reason="Not supported yet")
def test_slice_filtered_remte(ds_remote):
df = ds_remote
dff = df[df.x > 0]
dfs = dff[1:]
assert dfs.x.values[0] == 2
def test_slice(ds_local):
ds = ds_local
ds_sliced = ds[:]
assert ds_sliced.length_original() == ds_sliced.length_unfiltered() >= 10
assert ds_sliced.get_active_range() == (0, ds_sliced.length_original())
assert ds_sliced.x.tolist() == np.arange(10.).tolist()
ds_sliced = ds[5:]
assert ds_sliced.length_original() == ds_sliced.length_unfiltered() == 5
assert ds_sliced.get_active_range() == (0, ds_sliced.length_original()) == (0, 5)
assert ds_sliced.x.tolist() == np.arange(5, 10.).tolist()
ds_sliced = ds_sliced[1:4]
assert ds_sliced.length_original() == ds_sliced.length_unfiltered() == 3
assert ds_sliced.get_active_range() == (0, ds_sliced.length_original()) == (0, 3)
assert ds_sliced.x.tolist() == np.arange(6, 9.).tolist()
def test_head(ds_local):
ds = ds_local
df = ds.head(5)
assert len(df) == 5
def test_tail(ds_local):
ds = ds_local
df = ds.tail(5)
assert len(df) == 5
def test_head_with_selection():
df = vaex.example()
df.select(df.x > 0, name='test')
df.head()
def test_slice_beyond_end(df):
df2 = df[:100]
assert df2.x.tolist() == df.x.tolist()
assert len(df2) == len(df)
def test_slice_negative(df):
df2 = df[:-1]
assert df2.x.tolist() == df.x.to_numpy()[:-1].tolist()
assert len(df2) == len(df)-1
| true | true |
1c32b07e30bd3542e714624d35aa16df31270ecc | 1,790 | py | Python | Iris_Deploy.py | DanielCalimayor/DS-Unit-3-Sprint-3-Productization-and-Cloud | d1479de0badb674daf9cbb8b5f738d214de831ed | [
"MIT"
] | null | null | null | Iris_Deploy.py | DanielCalimayor/DS-Unit-3-Sprint-3-Productization-and-Cloud | d1479de0badb674daf9cbb8b5f738d214de831ed | [
"MIT"
] | null | null | null | Iris_Deploy.py | DanielCalimayor/DS-Unit-3-Sprint-3-Productization-and-Cloud | d1479de0badb674daf9cbb8b5f738d214de831ed | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
#pip install pickle-mixin
# In[1]:
#import the basics but important libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
#Sklearn Preprocessing
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, classification_report
from sklearn.tree import DecisionTreeClassifier
#import pickle
import pickle
import requests
import json
# In[2]:
#import
iris = pd.read_csv("https://gist.githubusercontent.com/curran/a08a1080b88344b0c8a7/raw/d546eaee765268bf2f487608c537c05e22e4b221/iris.csv")
# In[3]:
iris.head()
# In[4]:
iris.dtypes
# In[5]:
#check dist of y
plt.hist(iris['species'])
plt.show()
# In[6]:
#encode
le = LabelEncoder()
le.fit(iris['species'])
# In[7]:
iris['species'] = le.transform(iris['species'])
# In[8]:
#Features
x = iris.iloc[: , 0:4 ]
x.head()
# In[9]:
y = iris.iloc[:,4]
y.head()
# In[10]:
#split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = .25, random_state = 123)
# In[11]:
#model
algo = DecisionTreeClassifier()
model = algo.fit(x_train, y_train)
# In[12]:
#Predict
y_pred = model.predict(x_test)
# In[13]:
print(accuracy_score(y_test, y_pred))
# In[14]:
print(classification_report(y_test, y_pred))
# In[19]:
#pickle
pickle.dump(model, open('iris_model.pkl', 'wb'))
# In[20]:
my_model = pickle.load(open('iris_model.pkl', 'rb'))
# In[21]:
url = "https://localhost:9000/api"
# In[22]:
data = json.dumps({'sepal_width': 2.8, 'sepal_legnth': 6.3,'petal_width': 1.8, 'petal_legnth' : 5.5})
# In[24]:
send = requests.post(url, data)
# In[ ]:
# In[ ]:
# In[ ]:
| 10.170455 | 138 | 0.672626 |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, classification_report
from sklearn.tree import DecisionTreeClassifier
import pickle
import requests
import json
iris = pd.read_csv("https://gist.githubusercontent.com/curran/a08a1080b88344b0c8a7/raw/d546eaee765268bf2f487608c537c05e22e4b221/iris.csv")
iris.head()
iris.dtypes
plt.hist(iris['species'])
plt.show()
le = LabelEncoder()
le.fit(iris['species'])
iris['species'] = le.transform(iris['species'])
x = iris.iloc[: , 0:4 ]
x.head()
y = iris.iloc[:,4]
y.head()
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = .25, random_state = 123)
algo = DecisionTreeClassifier()
model = algo.fit(x_train, y_train)
y_pred = model.predict(x_test)
print(accuracy_score(y_test, y_pred))
print(classification_report(y_test, y_pred))
pickle.dump(model, open('iris_model.pkl', 'wb'))
my_model = pickle.load(open('iris_model.pkl', 'rb'))
url = "https://localhost:9000/api"
data = json.dumps({'sepal_width': 2.8, 'sepal_legnth': 6.3,'petal_width': 1.8, 'petal_legnth' : 5.5})
send = requests.post(url, data)
| true | true |
1c32b0c57831827048b8dbacc63ac057b4af9b30 | 17,326 | py | Python | homeassistant/components/mqtt/light/schema_json.py | boojew/home-assistant | 697c331903f8a440a4ce324a4fb0788351dc86c3 | [
"Apache-2.0"
] | 1 | 2019-05-19T08:05:02.000Z | 2019-05-19T08:05:02.000Z | homeassistant/components/mqtt/light/schema_json.py | boojew/home-assistant | 697c331903f8a440a4ce324a4fb0788351dc86c3 | [
"Apache-2.0"
] | 6 | 2021-02-08T21:02:40.000Z | 2022-03-12T00:52:16.000Z | homeassistant/components/mqtt/light/schema_json.py | boojew/home-assistant | 697c331903f8a440a4ce324a4fb0788351dc86c3 | [
"Apache-2.0"
] | null | null | null | """
Support for MQTT JSON lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.mqtt_json/
"""
import json
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR,
ATTR_TRANSITION, ATTR_WHITE_VALUE, FLASH_LONG, FLASH_SHORT,
SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT,
SUPPORT_FLASH, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE, Light)
from homeassistant.components.mqtt import (
CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC,
MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, subscription)
from homeassistant.const import (
CONF_BRIGHTNESS, CONF_COLOR_TEMP, CONF_DEVICE, CONF_EFFECT, CONF_NAME,
CONF_OPTIMISTIC, CONF_RGB, CONF_WHITE_VALUE, CONF_XY, STATE_ON)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
import homeassistant.util.color as color_util
from .schema_basic import CONF_BRIGHTNESS_SCALE
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'mqtt_json'
DEPENDENCIES = ['mqtt']
DEFAULT_BRIGHTNESS = False
DEFAULT_COLOR_TEMP = False
DEFAULT_EFFECT = False
DEFAULT_FLASH_TIME_LONG = 10
DEFAULT_FLASH_TIME_SHORT = 2
DEFAULT_NAME = 'MQTT JSON Light'
DEFAULT_OPTIMISTIC = False
DEFAULT_RGB = False
DEFAULT_WHITE_VALUE = False
DEFAULT_XY = False
DEFAULT_HS = False
DEFAULT_BRIGHTNESS_SCALE = 255
CONF_EFFECT_LIST = 'effect_list'
CONF_FLASH_TIME_LONG = 'flash_time_long'
CONF_FLASH_TIME_SHORT = 'flash_time_short'
CONF_HS = 'hs'
CONF_UNIQUE_ID = 'unique_id'
# Stealing some of these from the base MQTT configs.
PLATFORM_SCHEMA_JSON = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
vol.Optional(CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE):
vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_COLOR_TEMP, default=DEFAULT_COLOR_TEMP): cv.boolean,
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT):
cv.positive_int,
vol.Optional(CONF_FLASH_TIME_LONG, default=DEFAULT_FLASH_TIME_LONG):
cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS):
vol.All(vol.Coerce(int), vol.In([0, 1, 2])),
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_WHITE_VALUE, default=DEFAULT_WHITE_VALUE): cv.boolean,
vol.Optional(CONF_XY, default=DEFAULT_XY): cv.boolean,
vol.Optional(CONF_HS, default=DEFAULT_HS): cv.boolean,
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema).extend(
mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
async def async_setup_entity_json(hass: HomeAssistantType, config: ConfigType,
async_add_entities, discovery_hash):
"""Set up a MQTT JSON Light."""
async_add_entities([MqttLightJson(config, discovery_hash)])
# pylint: disable=too-many-ancestors
class MqttLightJson(MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, Light, RestoreEntity):
"""Representation of a MQTT JSON light."""
def __init__(self, config, discovery_hash):
"""Initialize MQTT JSON light."""
self._state = False
self._sub_state = None
self._supported_features = 0
self._topic = None
self._optimistic = False
self._brightness = None
self._color_temp = None
self._effect = None
self._hs = None
self._white_value = None
self._flash_times = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_hash,
self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_JSON(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self._subscribe_topics()
self.async_schedule_update_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
self._topic = {
key: config.get(key) for key in (
CONF_STATE_TOPIC,
CONF_COMMAND_TOPIC
)
}
optimistic = config.get(CONF_OPTIMISTIC)
self._optimistic = optimistic or self._topic[CONF_STATE_TOPIC] is None
brightness = config.get(CONF_BRIGHTNESS)
if brightness:
self._brightness = 255
else:
self._brightness = None
color_temp = config.get(CONF_COLOR_TEMP)
if color_temp:
self._color_temp = 150
else:
self._color_temp = None
effect = config.get(CONF_EFFECT)
if effect:
self._effect = 'none'
else:
self._effect = None
white_value = config.get(CONF_WHITE_VALUE)
if white_value:
self._white_value = 255
else:
self._white_value = None
if config.get(CONF_HS) or config.get(CONF_RGB) or config.get(CONF_XY):
self._hs = [0, 0]
else:
self._hs = None
self._flash_times = {
key: config.get(key) for key in (
CONF_FLASH_TIME_SHORT,
CONF_FLASH_TIME_LONG
)
}
self._supported_features = (SUPPORT_TRANSITION | SUPPORT_FLASH)
self._supported_features |= (config.get(CONF_RGB) and SUPPORT_COLOR)
self._supported_features |= (brightness and SUPPORT_BRIGHTNESS)
self._supported_features |= (color_temp and SUPPORT_COLOR_TEMP)
self._supported_features |= (effect and SUPPORT_EFFECT)
self._supported_features |= (white_value and SUPPORT_WHITE_VALUE)
self._supported_features |= (config.get(CONF_XY) and SUPPORT_COLOR)
self._supported_features |= (config.get(CONF_HS) and SUPPORT_COLOR)
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
last_state = await self.async_get_last_state()
@callback
def state_received(topic, payload, qos):
"""Handle new MQTT messages."""
values = json.loads(payload)
if values['state'] == 'ON':
self._state = True
elif values['state'] == 'OFF':
self._state = False
if self._hs is not None:
try:
red = int(values['color']['r'])
green = int(values['color']['g'])
blue = int(values['color']['b'])
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid RGB color value received")
try:
x_color = float(values['color']['x'])
y_color = float(values['color']['y'])
self._hs = color_util.color_xy_to_hs(x_color, y_color)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid XY color value received")
try:
hue = float(values['color']['h'])
saturation = float(values['color']['s'])
self._hs = (hue, saturation)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid HS color value received")
if self._brightness is not None:
try:
self._brightness = int(
values['brightness'] /
float(self._config.get(CONF_BRIGHTNESS_SCALE)) * 255)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid brightness value received")
if self._color_temp is not None:
try:
self._color_temp = int(values['color_temp'])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid color temp value received")
if self._effect is not None:
try:
self._effect = values['effect']
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid effect value received")
if self._white_value is not None:
try:
self._white_value = int(values['white_value'])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid white value received")
self.async_schedule_update_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state,
{'state_topic': {'topic': self._topic[CONF_STATE_TOPIC],
'msg_callback': state_received,
'qos': self._config.get(CONF_QOS)}})
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
if last_state.attributes.get(ATTR_BRIGHTNESS):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
if last_state.attributes.get(ATTR_HS_COLOR):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
if last_state.attributes.get(ATTR_COLOR_TEMP):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
if last_state.attributes.get(ATTR_EFFECT):
self._effect = last_state.attributes.get(ATTR_EFFECT)
if last_state.attributes.get(ATTR_WHITE_VALUE):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def color_temp(self):
"""Return the color temperature in mired."""
return self._color_temp
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def hs_color(self):
"""Return the hs color value."""
return self._hs
@property
def white_value(self):
"""Return the white property."""
return self._white_value
@property
def should_poll(self):
"""No polling needed for a MQTT light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._config.get(CONF_NAME)
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
async def async_turn_on(self, **kwargs):
"""Turn the device on.
This method is a coroutine.
"""
should_update = False
message = {'state': 'ON'}
if ATTR_HS_COLOR in kwargs and (
self._config.get(CONF_HS) or self._config.get(CONF_RGB)
or self._config.get(CONF_XY)):
hs_color = kwargs[ATTR_HS_COLOR]
message['color'] = {}
if self._config.get(CONF_RGB):
# If there's a brightness topic set, we don't want to scale the
# RGB values given using the brightness.
if self._brightness is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS,
self._brightness if self._brightness else 255)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100)
message['color']['r'] = rgb[0]
message['color']['g'] = rgb[1]
message['color']['b'] = rgb[2]
if self._config.get(CONF_XY):
xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
message['color']['x'] = xy_color[0]
message['color']['y'] = xy_color[1]
if self._config.get(CONF_HS):
message['color']['h'] = hs_color[0]
message['color']['s'] = hs_color[1]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if ATTR_FLASH in kwargs:
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
message['flash'] = self._flash_times[CONF_FLASH_TIME_LONG]
elif flash == FLASH_SHORT:
message['flash'] = self._flash_times[CONF_FLASH_TIME_SHORT]
if ATTR_TRANSITION in kwargs:
message['transition'] = int(kwargs[ATTR_TRANSITION])
if ATTR_BRIGHTNESS in kwargs:
message['brightness'] = int(
kwargs[ATTR_BRIGHTNESS] / float(DEFAULT_BRIGHTNESS_SCALE) *
self._config.get(CONF_BRIGHTNESS_SCALE))
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
if ATTR_COLOR_TEMP in kwargs:
message['color_temp'] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
should_update = True
if ATTR_EFFECT in kwargs:
message['effect'] = kwargs[ATTR_EFFECT]
if self._optimistic:
self._effect = kwargs[ATTR_EFFECT]
should_update = True
if ATTR_WHITE_VALUE in kwargs:
message['white_value'] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
should_update = True
mqtt.async_publish(
self.hass, self._topic[CONF_COMMAND_TOPIC], json.dumps(message),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN))
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = True
should_update = True
if should_update:
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
message = {'state': 'OFF'}
if ATTR_TRANSITION in kwargs:
message['transition'] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass, self._topic[CONF_COMMAND_TOPIC], json.dumps(message),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN))
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = False
self.async_schedule_update_ha_state()
| 36.246862 | 79 | 0.616588 | import json
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR,
ATTR_TRANSITION, ATTR_WHITE_VALUE, FLASH_LONG, FLASH_SHORT,
SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT,
SUPPORT_FLASH, SUPPORT_TRANSITION, SUPPORT_WHITE_VALUE, Light)
from homeassistant.components.mqtt import (
CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC,
MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, subscription)
from homeassistant.const import (
CONF_BRIGHTNESS, CONF_COLOR_TEMP, CONF_DEVICE, CONF_EFFECT, CONF_NAME,
CONF_OPTIMISTIC, CONF_RGB, CONF_WHITE_VALUE, CONF_XY, STATE_ON)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
import homeassistant.util.color as color_util
from .schema_basic import CONF_BRIGHTNESS_SCALE
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'mqtt_json'
DEPENDENCIES = ['mqtt']
DEFAULT_BRIGHTNESS = False
DEFAULT_COLOR_TEMP = False
DEFAULT_EFFECT = False
DEFAULT_FLASH_TIME_LONG = 10
DEFAULT_FLASH_TIME_SHORT = 2
DEFAULT_NAME = 'MQTT JSON Light'
DEFAULT_OPTIMISTIC = False
DEFAULT_RGB = False
DEFAULT_WHITE_VALUE = False
DEFAULT_XY = False
DEFAULT_HS = False
DEFAULT_BRIGHTNESS_SCALE = 255
CONF_EFFECT_LIST = 'effect_list'
CONF_FLASH_TIME_LONG = 'flash_time_long'
CONF_FLASH_TIME_SHORT = 'flash_time_short'
CONF_HS = 'hs'
CONF_UNIQUE_ID = 'unique_id'
PLATFORM_SCHEMA_JSON = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
vol.Optional(CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE):
vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_COLOR_TEMP, default=DEFAULT_COLOR_TEMP): cv.boolean,
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT):
cv.positive_int,
vol.Optional(CONF_FLASH_TIME_LONG, default=DEFAULT_FLASH_TIME_LONG):
cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS):
vol.All(vol.Coerce(int), vol.In([0, 1, 2])),
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_WHITE_VALUE, default=DEFAULT_WHITE_VALUE): cv.boolean,
vol.Optional(CONF_XY, default=DEFAULT_XY): cv.boolean,
vol.Optional(CONF_HS, default=DEFAULT_HS): cv.boolean,
vol.Required(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema).extend(
mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
async def async_setup_entity_json(hass: HomeAssistantType, config: ConfigType,
async_add_entities, discovery_hash):
async_add_entities([MqttLightJson(config, discovery_hash)])
class MqttLightJson(MqttAttributes, MqttAvailability, MqttDiscoveryUpdate,
MqttEntityDeviceInfo, Light, RestoreEntity):
def __init__(self, config, discovery_hash):
self._state = False
self._sub_state = None
self._supported_features = 0
self._topic = None
self._optimistic = False
self._brightness = None
self._color_temp = None
self._effect = None
self._hs = None
self._white_value = None
self._flash_times = None
self._unique_id = config.get(CONF_UNIQUE_ID)
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_hash,
self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config)
async def async_added_to_hass(self):
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
config = PLATFORM_SCHEMA_JSON(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self._subscribe_topics()
self.async_schedule_update_ha_state()
def _setup_from_config(self, config):
self._config = config
self._topic = {
key: config.get(key) for key in (
CONF_STATE_TOPIC,
CONF_COMMAND_TOPIC
)
}
optimistic = config.get(CONF_OPTIMISTIC)
self._optimistic = optimistic or self._topic[CONF_STATE_TOPIC] is None
brightness = config.get(CONF_BRIGHTNESS)
if brightness:
self._brightness = 255
else:
self._brightness = None
color_temp = config.get(CONF_COLOR_TEMP)
if color_temp:
self._color_temp = 150
else:
self._color_temp = None
effect = config.get(CONF_EFFECT)
if effect:
self._effect = 'none'
else:
self._effect = None
white_value = config.get(CONF_WHITE_VALUE)
if white_value:
self._white_value = 255
else:
self._white_value = None
if config.get(CONF_HS) or config.get(CONF_RGB) or config.get(CONF_XY):
self._hs = [0, 0]
else:
self._hs = None
self._flash_times = {
key: config.get(key) for key in (
CONF_FLASH_TIME_SHORT,
CONF_FLASH_TIME_LONG
)
}
self._supported_features = (SUPPORT_TRANSITION | SUPPORT_FLASH)
self._supported_features |= (config.get(CONF_RGB) and SUPPORT_COLOR)
self._supported_features |= (brightness and SUPPORT_BRIGHTNESS)
self._supported_features |= (color_temp and SUPPORT_COLOR_TEMP)
self._supported_features |= (effect and SUPPORT_EFFECT)
self._supported_features |= (white_value and SUPPORT_WHITE_VALUE)
self._supported_features |= (config.get(CONF_XY) and SUPPORT_COLOR)
self._supported_features |= (config.get(CONF_HS) and SUPPORT_COLOR)
async def _subscribe_topics(self):
last_state = await self.async_get_last_state()
@callback
def state_received(topic, payload, qos):
values = json.loads(payload)
if values['state'] == 'ON':
self._state = True
elif values['state'] == 'OFF':
self._state = False
if self._hs is not None:
try:
red = int(values['color']['r'])
green = int(values['color']['g'])
blue = int(values['color']['b'])
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid RGB color value received")
try:
x_color = float(values['color']['x'])
y_color = float(values['color']['y'])
self._hs = color_util.color_xy_to_hs(x_color, y_color)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid XY color value received")
try:
hue = float(values['color']['h'])
saturation = float(values['color']['s'])
self._hs = (hue, saturation)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid HS color value received")
if self._brightness is not None:
try:
self._brightness = int(
values['brightness'] /
float(self._config.get(CONF_BRIGHTNESS_SCALE)) * 255)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid brightness value received")
if self._color_temp is not None:
try:
self._color_temp = int(values['color_temp'])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid color temp value received")
if self._effect is not None:
try:
self._effect = values['effect']
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid effect value received")
if self._white_value is not None:
try:
self._white_value = int(values['white_value'])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid white value received")
self.async_schedule_update_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state,
{'state_topic': {'topic': self._topic[CONF_STATE_TOPIC],
'msg_callback': state_received,
'qos': self._config.get(CONF_QOS)}})
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
if last_state.attributes.get(ATTR_BRIGHTNESS):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
if last_state.attributes.get(ATTR_HS_COLOR):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
if last_state.attributes.get(ATTR_COLOR_TEMP):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
if last_state.attributes.get(ATTR_EFFECT):
self._effect = last_state.attributes.get(ATTR_EFFECT)
if last_state.attributes.get(ATTR_WHITE_VALUE):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
async def async_will_remove_from_hass(self):
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
@property
def brightness(self):
return self._brightness
@property
def color_temp(self):
return self._color_temp
@property
def effect(self):
return self._effect
@property
def effect_list(self):
return self._config.get(CONF_EFFECT_LIST)
@property
def hs_color(self):
return self._hs
@property
def white_value(self):
return self._white_value
@property
def should_poll(self):
return False
@property
def name(self):
return self._config.get(CONF_NAME)
@property
def unique_id(self):
return self._unique_id
@property
def is_on(self):
return self._state
@property
def assumed_state(self):
return self._optimistic
@property
def supported_features(self):
return self._supported_features
async def async_turn_on(self, **kwargs):
should_update = False
message = {'state': 'ON'}
if ATTR_HS_COLOR in kwargs and (
self._config.get(CONF_HS) or self._config.get(CONF_RGB)
or self._config.get(CONF_XY)):
hs_color = kwargs[ATTR_HS_COLOR]
message['color'] = {}
if self._config.get(CONF_RGB):
if self._brightness is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS,
self._brightness if self._brightness else 255)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100)
message['color']['r'] = rgb[0]
message['color']['g'] = rgb[1]
message['color']['b'] = rgb[2]
if self._config.get(CONF_XY):
xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
message['color']['x'] = xy_color[0]
message['color']['y'] = xy_color[1]
if self._config.get(CONF_HS):
message['color']['h'] = hs_color[0]
message['color']['s'] = hs_color[1]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if ATTR_FLASH in kwargs:
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
message['flash'] = self._flash_times[CONF_FLASH_TIME_LONG]
elif flash == FLASH_SHORT:
message['flash'] = self._flash_times[CONF_FLASH_TIME_SHORT]
if ATTR_TRANSITION in kwargs:
message['transition'] = int(kwargs[ATTR_TRANSITION])
if ATTR_BRIGHTNESS in kwargs:
message['brightness'] = int(
kwargs[ATTR_BRIGHTNESS] / float(DEFAULT_BRIGHTNESS_SCALE) *
self._config.get(CONF_BRIGHTNESS_SCALE))
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
if ATTR_COLOR_TEMP in kwargs:
message['color_temp'] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
should_update = True
if ATTR_EFFECT in kwargs:
message['effect'] = kwargs[ATTR_EFFECT]
if self._optimistic:
self._effect = kwargs[ATTR_EFFECT]
should_update = True
if ATTR_WHITE_VALUE in kwargs:
message['white_value'] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
should_update = True
mqtt.async_publish(
self.hass, self._topic[CONF_COMMAND_TOPIC], json.dumps(message),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN))
if self._optimistic:
self._state = True
should_update = True
if should_update:
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
message = {'state': 'OFF'}
if ATTR_TRANSITION in kwargs:
message['transition'] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass, self._topic[CONF_COMMAND_TOPIC], json.dumps(message),
self._config.get(CONF_QOS), self._config.get(CONF_RETAIN))
if self._optimistic:
self._state = False
self.async_schedule_update_ha_state()
| true | true |
1c32b184f46700426a11d5fd1157683e7e2958c8 | 5,913 | py | Python | tfx/extensions/google_cloud_ai_platform/tuner/executor_test.py | avelez93/tfx | 75fbb6a7d50e99138609be3ca4c3a204a13a2195 | [
"Apache-2.0"
] | null | null | null | tfx/extensions/google_cloud_ai_platform/tuner/executor_test.py | avelez93/tfx | 75fbb6a7d50e99138609be3ca4c3a204a13a2195 | [
"Apache-2.0"
] | null | null | null | tfx/extensions/google_cloud_ai_platform/tuner/executor_test.py | avelez93/tfx | 75fbb6a7d50e99138609be3ca4c3a204a13a2195 | [
"Apache-2.0"
] | 1 | 2020-12-13T22:07:53.000Z | 2020-12-13T22:07:53.000Z | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Cloud AI Platform Tuner Executor."""
import copy
import os
from typing import Any, Dict
from unittest import mock
import tensorflow as tf
from tfx.extensions.google_cloud_ai_platform import constants
from tfx.extensions.google_cloud_ai_platform.trainer import executor as ai_platform_trainer_executor
from tfx.extensions.google_cloud_ai_platform.tuner import executor as ai_platform_tuner_executor
from tfx.proto import tuner_pb2
from tfx.types import standard_component_specs
from tfx.utils import json_utils
from tfx.utils import proto_utils
class ExecutorTest(tf.test.TestCase):
def setUp(self):
super().setUp()
self._output_data_dir = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self._testMethodName)
self._job_dir = os.path.join(self._output_data_dir, 'jobDir')
self._project_id = '12345'
self._job_id = 'fake_job_id'
self._inputs = {}
self._outputs = {}
# Dict format of exec_properties. custom_config needs to be serialized
# before being passed into Do function.
self._exec_properties = {
'custom_config': {
ai_platform_trainer_executor.JOB_ID_KEY: self._job_id,
ai_platform_tuner_executor.TUNING_ARGS_KEY: {
'project': self._project_id,
'jobDir': self._job_dir,
},
},
}
self._executor_class_path = '%s.%s' % (
ai_platform_tuner_executor._WorkerExecutor.__module__,
ai_platform_tuner_executor._WorkerExecutor.__name__)
self.addCleanup(mock.patch.stopall)
self.mock_runner = mock.patch(
'tfx.extensions.google_cloud_ai_platform.tuner.executor.runner').start(
)
def _serialize_custom_config_under_test(self) -> Dict[str, Any]:
"""Converts self._exec_properties['custom_config'] to string."""
result = copy.deepcopy(self._exec_properties)
result['custom_config'] = json_utils.dumps(result['custom_config'])
return result
def testDo(self):
executor = ai_platform_tuner_executor.Executor()
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
def testDoWithTuneArgs(self):
executor = ai_platform_tuner_executor.Executor()
self._exec_properties['tune_args'] = proto_utils.proto_to_json(
tuner_pb2.TuneArgs(num_parallel_trials=3))
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
self.mock_runner.start_cloud_training.assert_called_with(
self._inputs, self._outputs, self._serialize_custom_config_under_test(),
self._executor_class_path, {
'project': self._project_id,
'jobDir': self._job_dir,
'scaleTier': 'CUSTOM',
'masterType': 'standard',
'workerType': 'standard',
'workerCount': 2,
}, self._job_id, False, None)
def testDoWithTuneArgsAndTrainingInputOverride(self):
executor = ai_platform_tuner_executor.Executor()
self._exec_properties['tune_args'] = proto_utils.proto_to_json(
tuner_pb2.TuneArgs(num_parallel_trials=6))
self._exec_properties['custom_config'][
ai_platform_tuner_executor.TUNING_ARGS_KEY].update({
'scaleTier': 'CUSTOM',
'masterType': 'n1-highmem-16',
'workerType': 'n1-highmem-16',
'workerCount': 2,
})
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
self.mock_runner.start_cloud_training.assert_called_with(
self._inputs,
self._outputs,
self._serialize_custom_config_under_test(),
self._executor_class_path,
{
'project': self._project_id,
'jobDir': self._job_dir,
# Confirm scale tier and machine types are not overritten.
'scaleTier': 'CUSTOM',
'masterType': 'n1-highmem-16',
'workerType': 'n1-highmem-16',
# Confirm workerCount has been adjusted to num_parallel_trials.
'workerCount': 5,
},
self._job_id, False, None)
def testDoWithoutCustomCaipTuneArgs(self):
executor = ai_platform_tuner_executor.Executor()
self._exec_properties = {'custom_config': {}}
with self.assertRaises(ValueError):
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
def testDoWithEnableVertexOverride(self):
executor = ai_platform_tuner_executor.Executor()
enable_vertex = True
vertex_region = 'us-central2'
self._exec_properties[standard_component_specs.CUSTOM_CONFIG_KEY][
constants.ENABLE_VERTEX_KEY] = enable_vertex
self._exec_properties[standard_component_specs.CUSTOM_CONFIG_KEY][
constants.VERTEX_REGION_KEY] = vertex_region
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
self.mock_runner.start_cloud_training.assert_called_with(
self._inputs, self._outputs, self._serialize_custom_config_under_test(),
self._executor_class_path, {
'project': self._project_id,
'jobDir': self._job_dir,
}, self._job_id, enable_vertex, vertex_region)
if __name__ == '__main__':
tf.test.main()
| 38.647059 | 100 | 0.70066 |
import copy
import os
from typing import Any, Dict
from unittest import mock
import tensorflow as tf
from tfx.extensions.google_cloud_ai_platform import constants
from tfx.extensions.google_cloud_ai_platform.trainer import executor as ai_platform_trainer_executor
from tfx.extensions.google_cloud_ai_platform.tuner import executor as ai_platform_tuner_executor
from tfx.proto import tuner_pb2
from tfx.types import standard_component_specs
from tfx.utils import json_utils
from tfx.utils import proto_utils
class ExecutorTest(tf.test.TestCase):
def setUp(self):
super().setUp()
self._output_data_dir = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self._testMethodName)
self._job_dir = os.path.join(self._output_data_dir, 'jobDir')
self._project_id = '12345'
self._job_id = 'fake_job_id'
self._inputs = {}
self._outputs = {}
self._exec_properties = {
'custom_config': {
ai_platform_trainer_executor.JOB_ID_KEY: self._job_id,
ai_platform_tuner_executor.TUNING_ARGS_KEY: {
'project': self._project_id,
'jobDir': self._job_dir,
},
},
}
self._executor_class_path = '%s.%s' % (
ai_platform_tuner_executor._WorkerExecutor.__module__,
ai_platform_tuner_executor._WorkerExecutor.__name__)
self.addCleanup(mock.patch.stopall)
self.mock_runner = mock.patch(
'tfx.extensions.google_cloud_ai_platform.tuner.executor.runner').start(
)
def _serialize_custom_config_under_test(self) -> Dict[str, Any]:
result = copy.deepcopy(self._exec_properties)
result['custom_config'] = json_utils.dumps(result['custom_config'])
return result
def testDo(self):
executor = ai_platform_tuner_executor.Executor()
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
def testDoWithTuneArgs(self):
executor = ai_platform_tuner_executor.Executor()
self._exec_properties['tune_args'] = proto_utils.proto_to_json(
tuner_pb2.TuneArgs(num_parallel_trials=3))
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
self.mock_runner.start_cloud_training.assert_called_with(
self._inputs, self._outputs, self._serialize_custom_config_under_test(),
self._executor_class_path, {
'project': self._project_id,
'jobDir': self._job_dir,
'scaleTier': 'CUSTOM',
'masterType': 'standard',
'workerType': 'standard',
'workerCount': 2,
}, self._job_id, False, None)
def testDoWithTuneArgsAndTrainingInputOverride(self):
executor = ai_platform_tuner_executor.Executor()
self._exec_properties['tune_args'] = proto_utils.proto_to_json(
tuner_pb2.TuneArgs(num_parallel_trials=6))
self._exec_properties['custom_config'][
ai_platform_tuner_executor.TUNING_ARGS_KEY].update({
'scaleTier': 'CUSTOM',
'masterType': 'n1-highmem-16',
'workerType': 'n1-highmem-16',
'workerCount': 2,
})
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
self.mock_runner.start_cloud_training.assert_called_with(
self._inputs,
self._outputs,
self._serialize_custom_config_under_test(),
self._executor_class_path,
{
'project': self._project_id,
'jobDir': self._job_dir,
'scaleTier': 'CUSTOM',
'masterType': 'n1-highmem-16',
'workerType': 'n1-highmem-16',
'workerCount': 5,
},
self._job_id, False, None)
def testDoWithoutCustomCaipTuneArgs(self):
executor = ai_platform_tuner_executor.Executor()
self._exec_properties = {'custom_config': {}}
with self.assertRaises(ValueError):
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
def testDoWithEnableVertexOverride(self):
executor = ai_platform_tuner_executor.Executor()
enable_vertex = True
vertex_region = 'us-central2'
self._exec_properties[standard_component_specs.CUSTOM_CONFIG_KEY][
constants.ENABLE_VERTEX_KEY] = enable_vertex
self._exec_properties[standard_component_specs.CUSTOM_CONFIG_KEY][
constants.VERTEX_REGION_KEY] = vertex_region
executor.Do(self._inputs, self._outputs,
self._serialize_custom_config_under_test())
self.mock_runner.start_cloud_training.assert_called_with(
self._inputs, self._outputs, self._serialize_custom_config_under_test(),
self._executor_class_path, {
'project': self._project_id,
'jobDir': self._job_dir,
}, self._job_id, enable_vertex, vertex_region)
if __name__ == '__main__':
tf.test.main()
| true | true |
1c32b1ab805272c3b7c8593a9b1c385e93812605 | 8,377 | py | Python | homeassistant/components/enigma2/media_player.py | learn-home-automation/core | c5d8792c3487e9b418b1e7d623bf59e7dbddd6b7 | [
"Apache-2.0"
] | 22,481 | 2020-03-02T13:09:59.000Z | 2022-03-31T23:34:28.000Z | homeassistant/components/enigma2/media_player.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 31,101 | 2020-03-02T13:00:16.000Z | 2022-03-31T23:57:36.000Z | homeassistant/components/enigma2/media_player.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 11,411 | 2020-03-02T14:19:20.000Z | 2022-03-31T22:46:07.000Z | """Support for Enigma2 media players."""
from openwebif.api import CreateDevice
import voluptuous as vol
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_TVSHOW,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
STATE_OFF,
STATE_ON,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
ATTR_MEDIA_CURRENTLY_RECORDING = "media_currently_recording"
ATTR_MEDIA_DESCRIPTION = "media_description"
ATTR_MEDIA_END_TIME = "media_end_time"
ATTR_MEDIA_START_TIME = "media_start_time"
CONF_USE_CHANNEL_ICON = "use_channel_icon"
CONF_DEEP_STANDBY = "deep_standby"
CONF_MAC_ADDRESS = "mac_address"
CONF_SOURCE_BOUQUET = "source_bouquet"
DEFAULT_NAME = "Enigma2 Media Player"
DEFAULT_PORT = 80
DEFAULT_SSL = False
DEFAULT_USE_CHANNEL_ICON = False
DEFAULT_USERNAME = "root"
DEFAULT_PASSWORD = "dreambox"
DEFAULT_DEEP_STANDBY = False
DEFAULT_MAC_ADDRESS = ""
DEFAULT_SOURCE_BOUQUET = ""
SUPPORTED_ENIGMA2 = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_VOLUME_STEP
| SUPPORT_TURN_ON
| SUPPORT_PAUSE
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(
CONF_USE_CHANNEL_ICON, default=DEFAULT_USE_CHANNEL_ICON
): cv.boolean,
vol.Optional(CONF_DEEP_STANDBY, default=DEFAULT_DEEP_STANDBY): cv.boolean,
vol.Optional(CONF_MAC_ADDRESS, default=DEFAULT_MAC_ADDRESS): cv.string,
vol.Optional(CONF_SOURCE_BOUQUET, default=DEFAULT_SOURCE_BOUQUET): cv.string,
}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up of an enigma2 media player."""
if discovery_info:
# Discovery gives us the streaming service port (8001)
# which is not useful as OpenWebif never runs on that port.
# So use the default port instead.
config[CONF_PORT] = DEFAULT_PORT
config[CONF_NAME] = discovery_info["hostname"]
config[CONF_HOST] = discovery_info["host"]
config[CONF_USERNAME] = DEFAULT_USERNAME
config[CONF_PASSWORD] = DEFAULT_PASSWORD
config[CONF_SSL] = DEFAULT_SSL
config[CONF_USE_CHANNEL_ICON] = DEFAULT_USE_CHANNEL_ICON
config[CONF_MAC_ADDRESS] = DEFAULT_MAC_ADDRESS
config[CONF_DEEP_STANDBY] = DEFAULT_DEEP_STANDBY
config[CONF_SOURCE_BOUQUET] = DEFAULT_SOURCE_BOUQUET
device = CreateDevice(
host=config[CONF_HOST],
port=config.get(CONF_PORT),
username=config.get(CONF_USERNAME),
password=config.get(CONF_PASSWORD),
is_https=config[CONF_SSL],
prefer_picon=config.get(CONF_USE_CHANNEL_ICON),
mac_address=config.get(CONF_MAC_ADDRESS),
turn_off_to_deep=config.get(CONF_DEEP_STANDBY),
source_bouquet=config.get(CONF_SOURCE_BOUQUET),
)
add_devices([Enigma2Device(config[CONF_NAME], device)], True)
class Enigma2Device(MediaPlayerEntity):
"""Representation of an Enigma2 box."""
def __init__(self, name, device):
"""Initialize the Enigma2 device."""
self._name = name
self.e2_box = device
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self):
"""Return the unique ID for this entity."""
return self.e2_box.mac_address
@property
def state(self):
"""Return the state of the device."""
if self.e2_box.is_recording_playback:
return STATE_PLAYING
return STATE_OFF if self.e2_box.in_standby else STATE_ON
@property
def available(self):
"""Return True if the device is available."""
return not self.e2_box.is_offline
@property
def supported_features(self):
"""Flag of media commands that are supported."""
return SUPPORTED_ENIGMA2
def turn_off(self):
"""Turn off media player."""
self.e2_box.turn_off()
def turn_on(self):
"""Turn the media player on."""
self.e2_box.turn_on()
@property
def media_title(self):
"""Title of current playing media."""
return self.e2_box.current_service_channel_name
@property
def media_series_title(self):
"""Return the title of current episode of TV show."""
return self.e2_box.current_programme_name
@property
def media_channel(self):
"""Channel of current playing media."""
return self.e2_box.current_service_channel_name
@property
def media_content_id(self):
"""Service Ref of current playing media."""
return self.e2_box.current_service_ref
@property
def media_content_type(self):
"""Type of video currently playing."""
return MEDIA_TYPE_TVSHOW
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.e2_box.muted
@property
def media_image_url(self):
"""Picon url for the channel."""
return self.e2_box.picon_url
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.e2_box.set_volume(int(volume * 100))
def volume_up(self):
"""Volume up the media player."""
self.e2_box.set_volume(int(self.e2_box.volume * 100) + 5)
def volume_down(self):
"""Volume down media player."""
self.e2_box.set_volume(int(self.e2_box.volume * 100) - 5)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.e2_box.volume
def media_stop(self):
"""Send stop command."""
self.e2_box.set_stop()
def media_play(self):
"""Play media."""
self.e2_box.toggle_play_pause()
def media_pause(self):
"""Pause the media player."""
self.e2_box.toggle_play_pause()
def media_next_track(self):
"""Send next track command."""
self.e2_box.set_channel_up()
def media_previous_track(self):
"""Send next track command."""
self.e2_box.set_channel_down()
def mute_volume(self, mute):
"""Mute or unmute."""
self.e2_box.mute_volume()
@property
def source(self):
"""Return the current input source."""
return self.e2_box.current_service_channel_name
@property
def source_list(self):
"""List of available input sources."""
return self.e2_box.source_list
def select_source(self, source):
"""Select input source."""
self.e2_box.select_source(self.e2_box.sources[source])
def update(self):
"""Update state of the media_player."""
self.e2_box.update()
@property
def extra_state_attributes(self):
"""Return device specific state attributes.
isRecording: Is the box currently recording.
currservice_fulldescription: Full program description.
currservice_begin: is in the format '21:00'.
currservice_end: is in the format '21:00'.
"""
if self.e2_box.in_standby:
return {}
return {
ATTR_MEDIA_CURRENTLY_RECORDING: self.e2_box.status_info["isRecording"],
ATTR_MEDIA_DESCRIPTION: self.e2_box.status_info[
"currservice_fulldescription"
],
ATTR_MEDIA_START_TIME: self.e2_box.status_info["currservice_begin"],
ATTR_MEDIA_END_TIME: self.e2_box.status_info["currservice_end"],
}
| 30.797794 | 85 | 0.676256 | from openwebif.api import CreateDevice
import voluptuous as vol
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_TVSHOW,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
STATE_OFF,
STATE_ON,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
ATTR_MEDIA_CURRENTLY_RECORDING = "media_currently_recording"
ATTR_MEDIA_DESCRIPTION = "media_description"
ATTR_MEDIA_END_TIME = "media_end_time"
ATTR_MEDIA_START_TIME = "media_start_time"
CONF_USE_CHANNEL_ICON = "use_channel_icon"
CONF_DEEP_STANDBY = "deep_standby"
CONF_MAC_ADDRESS = "mac_address"
CONF_SOURCE_BOUQUET = "source_bouquet"
DEFAULT_NAME = "Enigma2 Media Player"
DEFAULT_PORT = 80
DEFAULT_SSL = False
DEFAULT_USE_CHANNEL_ICON = False
DEFAULT_USERNAME = "root"
DEFAULT_PASSWORD = "dreambox"
DEFAULT_DEEP_STANDBY = False
DEFAULT_MAC_ADDRESS = ""
DEFAULT_SOURCE_BOUQUET = ""
SUPPORTED_ENIGMA2 = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_VOLUME_STEP
| SUPPORT_TURN_ON
| SUPPORT_PAUSE
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(
CONF_USE_CHANNEL_ICON, default=DEFAULT_USE_CHANNEL_ICON
): cv.boolean,
vol.Optional(CONF_DEEP_STANDBY, default=DEFAULT_DEEP_STANDBY): cv.boolean,
vol.Optional(CONF_MAC_ADDRESS, default=DEFAULT_MAC_ADDRESS): cv.string,
vol.Optional(CONF_SOURCE_BOUQUET, default=DEFAULT_SOURCE_BOUQUET): cv.string,
}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
if discovery_info:
config[CONF_PORT] = DEFAULT_PORT
config[CONF_NAME] = discovery_info["hostname"]
config[CONF_HOST] = discovery_info["host"]
config[CONF_USERNAME] = DEFAULT_USERNAME
config[CONF_PASSWORD] = DEFAULT_PASSWORD
config[CONF_SSL] = DEFAULT_SSL
config[CONF_USE_CHANNEL_ICON] = DEFAULT_USE_CHANNEL_ICON
config[CONF_MAC_ADDRESS] = DEFAULT_MAC_ADDRESS
config[CONF_DEEP_STANDBY] = DEFAULT_DEEP_STANDBY
config[CONF_SOURCE_BOUQUET] = DEFAULT_SOURCE_BOUQUET
device = CreateDevice(
host=config[CONF_HOST],
port=config.get(CONF_PORT),
username=config.get(CONF_USERNAME),
password=config.get(CONF_PASSWORD),
is_https=config[CONF_SSL],
prefer_picon=config.get(CONF_USE_CHANNEL_ICON),
mac_address=config.get(CONF_MAC_ADDRESS),
turn_off_to_deep=config.get(CONF_DEEP_STANDBY),
source_bouquet=config.get(CONF_SOURCE_BOUQUET),
)
add_devices([Enigma2Device(config[CONF_NAME], device)], True)
class Enigma2Device(MediaPlayerEntity):
def __init__(self, name, device):
self._name = name
self.e2_box = device
@property
def name(self):
return self._name
@property
def unique_id(self):
return self.e2_box.mac_address
@property
def state(self):
if self.e2_box.is_recording_playback:
return STATE_PLAYING
return STATE_OFF if self.e2_box.in_standby else STATE_ON
@property
def available(self):
return not self.e2_box.is_offline
@property
def supported_features(self):
return SUPPORTED_ENIGMA2
def turn_off(self):
self.e2_box.turn_off()
def turn_on(self):
self.e2_box.turn_on()
@property
def media_title(self):
return self.e2_box.current_service_channel_name
@property
def media_series_title(self):
return self.e2_box.current_programme_name
@property
def media_channel(self):
return self.e2_box.current_service_channel_name
@property
def media_content_id(self):
return self.e2_box.current_service_ref
@property
def media_content_type(self):
return MEDIA_TYPE_TVSHOW
@property
def is_volume_muted(self):
return self.e2_box.muted
@property
def media_image_url(self):
return self.e2_box.picon_url
def set_volume_level(self, volume):
self.e2_box.set_volume(int(volume * 100))
def volume_up(self):
self.e2_box.set_volume(int(self.e2_box.volume * 100) + 5)
def volume_down(self):
self.e2_box.set_volume(int(self.e2_box.volume * 100) - 5)
@property
def volume_level(self):
return self.e2_box.volume
def media_stop(self):
self.e2_box.set_stop()
def media_play(self):
self.e2_box.toggle_play_pause()
def media_pause(self):
self.e2_box.toggle_play_pause()
def media_next_track(self):
self.e2_box.set_channel_up()
def media_previous_track(self):
self.e2_box.set_channel_down()
def mute_volume(self, mute):
self.e2_box.mute_volume()
@property
def source(self):
return self.e2_box.current_service_channel_name
@property
def source_list(self):
return self.e2_box.source_list
def select_source(self, source):
self.e2_box.select_source(self.e2_box.sources[source])
def update(self):
self.e2_box.update()
@property
def extra_state_attributes(self):
if self.e2_box.in_standby:
return {}
return {
ATTR_MEDIA_CURRENTLY_RECORDING: self.e2_box.status_info["isRecording"],
ATTR_MEDIA_DESCRIPTION: self.e2_box.status_info[
"currservice_fulldescription"
],
ATTR_MEDIA_START_TIME: self.e2_box.status_info["currservice_begin"],
ATTR_MEDIA_END_TIME: self.e2_box.status_info["currservice_end"],
}
| true | true |
1c32b21103d9965df8d0584bef0eb83f78080ed7 | 2,581 | py | Python | cuda/onegram_rrnn.py | dodgejesse/rational-recurrences | 4d126903399cc4a86734733d037a9bb7c5dda93d | [
"MIT"
] | 7 | 2019-09-09T06:25:20.000Z | 2020-03-21T13:53:43.000Z | cuda/onegram_rrnn.py | dodgejesse/rational-recurrences | 4d126903399cc4a86734733d037a9bb7c5dda93d | [
"MIT"
] | 1 | 2020-12-13T14:26:03.000Z | 2020-12-13T14:26:03.000Z | cuda/onegram_rrnn.py | dodgejesse/rational-recurrences | 4d126903399cc4a86734733d037a9bb7c5dda93d | [
"MIT"
] | 1 | 2019-11-24T12:47:21.000Z | 2019-11-24T12:47:21.000Z | ONEGRAM_RRNN = """
extern "C" {
__global__ void rrnn_fwd(
const float * __restrict__ u,
const float * __restrict__ c1_init,
const int len,
const int batch,
const int dim,
const int k,
float * __restrict__ c1,
int semiring_type) {
int ncols = batch*dim;
int col = blockIdx.x * blockDim.x + threadIdx.x;
if (col >= ncols) return;
int ncols_u = ncols*k;
const float *up = u + (col*k);
float *c1p = c1 + col;
float cur_c1 = *(c1_init + col);
for (int row = 0; row < len; ++row) {
float u1 = *(up);
float forget1 = *(up+1);
cur_c1 = cur_c1 * forget1 + u1;
*c1p = cur_c1;
up += ncols_u;
c1p += ncols;
}
}
__global__ void rrnn_bwd(
const float * __restrict__ u,
const float * __restrict__ c1_init,
const float * __restrict__ c1,
const float * __restrict__ grad_c1,
const float * __restrict__ grad_last_c1,
const int len,
const int batch,
const int dim,
const int k,
float * __restrict__ grad_u,
float * __restrict__ grad_c1_init,
int semiring_type) {
int ncols = batch*dim;
int col = blockIdx.x * blockDim.x + threadIdx.x;
if (col >= ncols) return;
int ncols_u = ncols*k;
float cur_c1 = *(grad_last_c1 + col);
const float *up = u + (col*k) + (len-1)*ncols_u;
const float *c1p = c1 + col + (len-1)*ncols;
const float *gc1p = grad_c1 + col + (len-1)*ncols;
float *gup = grad_u + (col*k) + (len-1)*ncols_u;
for (int row = len-1; row >= 0; --row) {
float u1 = *(up);
float forget1 = *(up+1);
const float prev_c1_val = (row>0) ? (*(c1p-ncols)) : (*(c1_init+col));
const float gc1 = *(gc1p) + cur_c1;
float gu1 = gc1;
*(gup) = gu1;
float gforget1 = gc1*prev_c1_val;
*(gup+1) = gforget1;
cur_c1 = gc1 * forget1;
up -= ncols_u;
c1p -= ncols;
gup -= ncols_u;
gc1p -= ncols;
}
*(grad_c1_init + col) = cur_c1;
}
}
"""
| 29.329545 | 82 | 0.43859 | ONEGRAM_RRNN = """
extern "C" {
__global__ void rrnn_fwd(
const float * __restrict__ u,
const float * __restrict__ c1_init,
const int len,
const int batch,
const int dim,
const int k,
float * __restrict__ c1,
int semiring_type) {
int ncols = batch*dim;
int col = blockIdx.x * blockDim.x + threadIdx.x;
if (col >= ncols) return;
int ncols_u = ncols*k;
const float *up = u + (col*k);
float *c1p = c1 + col;
float cur_c1 = *(c1_init + col);
for (int row = 0; row < len; ++row) {
float u1 = *(up);
float forget1 = *(up+1);
cur_c1 = cur_c1 * forget1 + u1;
*c1p = cur_c1;
up += ncols_u;
c1p += ncols;
}
}
__global__ void rrnn_bwd(
const float * __restrict__ u,
const float * __restrict__ c1_init,
const float * __restrict__ c1,
const float * __restrict__ grad_c1,
const float * __restrict__ grad_last_c1,
const int len,
const int batch,
const int dim,
const int k,
float * __restrict__ grad_u,
float * __restrict__ grad_c1_init,
int semiring_type) {
int ncols = batch*dim;
int col = blockIdx.x * blockDim.x + threadIdx.x;
if (col >= ncols) return;
int ncols_u = ncols*k;
float cur_c1 = *(grad_last_c1 + col);
const float *up = u + (col*k) + (len-1)*ncols_u;
const float *c1p = c1 + col + (len-1)*ncols;
const float *gc1p = grad_c1 + col + (len-1)*ncols;
float *gup = grad_u + (col*k) + (len-1)*ncols_u;
for (int row = len-1; row >= 0; --row) {
float u1 = *(up);
float forget1 = *(up+1);
const float prev_c1_val = (row>0) ? (*(c1p-ncols)) : (*(c1_init+col));
const float gc1 = *(gc1p) + cur_c1;
float gu1 = gc1;
*(gup) = gu1;
float gforget1 = gc1*prev_c1_val;
*(gup+1) = gforget1;
cur_c1 = gc1 * forget1;
up -= ncols_u;
c1p -= ncols;
gup -= ncols_u;
gc1p -= ncols;
}
*(grad_c1_init + col) = cur_c1;
}
}
"""
| true | true |
1c32b2826520d61f621a1b187296b68c17e23093 | 659 | py | Python | src/brouwers/conf/travis.py | modelbrouwers/modelbrouwers | e0ba4819bf726d6144c0a648fdd4731cdc098a52 | [
"MIT"
] | 6 | 2015-03-03T13:23:07.000Z | 2021-12-19T18:12:41.000Z | src/brouwers/conf/travis.py | modelbrouwers/modelbrouwers | e0ba4819bf726d6144c0a648fdd4731cdc098a52 | [
"MIT"
] | 95 | 2015-02-07T00:55:39.000Z | 2022-02-08T20:22:05.000Z | src/brouwers/conf/travis.py | modelbrouwers/modelbrouwers | e0ba4819bf726d6144c0a648fdd4731cdc098a52 | [
"MIT"
] | 2 | 2016-03-22T16:53:26.000Z | 2019-02-09T22:46:04.000Z | import os
os.environ.setdefault("SECRET_KEY", "travis-key")
os.environ.setdefault("IS_HTTPS", "no")
os.environ.setdefault("DB_NAME", "brouwers")
os.environ.setdefault("DB_USER", "postgres")
os.environ.setdefault("DB_PASSWORD", "")
from .base import * # noqa isort:skip
# Secrets
DATABASES["mysql"] = {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}
#
# PHPBB
#
PHPBB_TABLE_PREFIX = "phpbb3_"
PHPBB_URL = "/forum"
PHPBB_UID_COOKIE = "phpbb3_u"
INSTALLED_APPS = INSTALLED_APPS + ["brouwers.forum_tools.tests.custom_fields"]
MEDIA_ROOT = os.path.join(BASE_DIR, "test_media")
SENDFILE_BACKEND = "sendfile.backends.nginx"
SHOP_ENABLED = True
| 22.724138 | 81 | 0.734446 | import os
os.environ.setdefault("SECRET_KEY", "travis-key")
os.environ.setdefault("IS_HTTPS", "no")
os.environ.setdefault("DB_NAME", "brouwers")
os.environ.setdefault("DB_USER", "postgres")
os.environ.setdefault("DB_PASSWORD", "")
from .base import *
DATABASES["mysql"] = {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}
PHPBB_TABLE_PREFIX = "phpbb3_"
PHPBB_URL = "/forum"
PHPBB_UID_COOKIE = "phpbb3_u"
INSTALLED_APPS = INSTALLED_APPS + ["brouwers.forum_tools.tests.custom_fields"]
MEDIA_ROOT = os.path.join(BASE_DIR, "test_media")
SENDFILE_BACKEND = "sendfile.backends.nginx"
SHOP_ENABLED = True
| true | true |
1c32b43e4c148db4b606236f42a2f2577905a7e2 | 6,359 | py | Python | aioarangodb/client.py | ckdanny/aioarangodb | 432ac3701c1ca32bed2021ef510c724cd543f39f | [
"MIT"
] | 17 | 2020-06-11T20:26:41.000Z | 2021-04-03T19:07:57.000Z | aioarangodb/client.py | ckdanny/aioarangodb | 432ac3701c1ca32bed2021ef510c724cd543f39f | [
"MIT"
] | 5 | 2021-06-24T13:31:16.000Z | 2022-03-09T11:28:04.000Z | aioarangodb/client.py | ckdanny/aioarangodb | 432ac3701c1ca32bed2021ef510c724cd543f39f | [
"MIT"
] | 4 | 2020-09-12T11:52:41.000Z | 2021-04-21T19:30:00.000Z | from __future__ import absolute_import, unicode_literals
import json
from six import string_types
__all__ = ['ArangoClient']
from .connection import (
BasicConnection,
JWTConnection,
JWTSuperuserConnection
)
from .database import StandardDatabase
from .exceptions import ServerConnectionError
from .http import DefaultHTTPClient
from .resolver import (
SingleHostResolver,
RandomHostResolver,
RoundRobinHostResolver
)
from .version import __version__
class ArangoClient(object):
"""ArangoDB client.
:param hosts: Host URL or list of URLs (coordinators in a cluster).
:type hosts: [str | unicode]
:param host_resolver: Host resolver. This parameter used for clusters (when
multiple host URLs are provided). Accepted values are "roundrobin" and
"random". Any other value defaults to round robin.
:type host_resolver: str | unicode
:param http_client: User-defined HTTP client.
:type http_client: arango.http.HTTPClient
:param serializer: User-defined JSON serializer. Must be a callable
which takes a JSON data type object as its only argument and return
the serialized string. If not given, ``json.dumps`` is used by default.
:type serializer: callable
:param deserializer: User-defined JSON de-serializer. Must be a callable
which takes a JSON serialized string as its only argument and return
the de-serialized object. If not given, ``json.loads`` is used by
default.
:type deserializer: callable
"""
def __init__(self,
hosts='http://127.0.0.1:8529',
host_resolver='roundrobin',
http_client=None,
serializer=json.dumps,
deserializer=json.loads):
if isinstance(hosts, string_types):
self._hosts = [host.strip('/') for host in hosts.split(',')]
else:
self._hosts = [host.strip('/') for host in hosts]
host_count = len(self._hosts)
if host_count == 1:
self._host_resolver = SingleHostResolver()
elif host_resolver == 'random':
self._host_resolver = RandomHostResolver(host_count)
else:
self._host_resolver = RoundRobinHostResolver(host_count)
self._http = http_client or DefaultHTTPClient()
self._serializer = serializer
self._deserializer = deserializer
self._sessions = [self._http.create_session(h) for h in self._hosts]
def __repr__(self):
return '<ArangoClient {}>'.format(','.join(self._hosts))
@property
def hosts(self):
"""Return the list of ArangoDB host URLs.
:return: List of ArangoDB host URLs.
:rtype: [str | unicode]
"""
return self._hosts
@property
def version(self):
"""Return the client version.
:return: Client version.
:rtype: str | unicode
"""
return __version__
async def close(self):
for session in self._sessions:
await session.close()
async def db(
self,
name='_system',
username='root',
password='',
verify=False,
auth_method='basic',
superuser_token=None):
"""Connect to an ArangoDB database and return the database API wrapper.
:param name: Database name.
:type name: str | unicode
:param username: Username for basic authentication.
:type username: str | unicode
:param password: Password for basic authentication.
:type password: str | unicode
:param verify: Verify the connection by sending a test request.
:type verify: bool
:param auth_method: HTTP authentication method. Accepted values are
"basic" (default) and "jwt". If set to "jwt", the token is
refreshed automatically using ArangoDB username and password. This
assumes that the clocks of the server and client are synchronized.
:type auth_method: str | unicode
:param superuser_token: User generated token for superuser access.
If set, parameters **username**, **password** and **auth_method**
are ignored. This token is not refreshed automatically.
:type superuser_token: str | unicode
:return: Standard database API wrapper.
:rtype: arango.database.StandardDatabase
:raise arango.exceptions.ServerConnectionError: If **verify** was set
to True and the connection fails.
"""
if superuser_token is not None:
connection = JWTSuperuserConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
superuser_token=superuser_token
)
elif auth_method == 'basic':
connection = BasicConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
username=username,
password=password,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
)
elif auth_method == 'jwt':
connection = JWTConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
username=username,
password=password,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
)
await connection.refresh_token()
else:
raise ValueError('invalid auth_method: {}'.format(auth_method))
if verify:
try:
await connection.ping()
except ServerConnectionError as err:
raise err
except Exception as err:
raise ServerConnectionError('bad connection: {}'.format(err))
return StandardDatabase(connection)
| 36.130682 | 79 | 0.611731 | from __future__ import absolute_import, unicode_literals
import json
from six import string_types
__all__ = ['ArangoClient']
from .connection import (
BasicConnection,
JWTConnection,
JWTSuperuserConnection
)
from .database import StandardDatabase
from .exceptions import ServerConnectionError
from .http import DefaultHTTPClient
from .resolver import (
SingleHostResolver,
RandomHostResolver,
RoundRobinHostResolver
)
from .version import __version__
class ArangoClient(object):
def __init__(self,
hosts='http://127.0.0.1:8529',
host_resolver='roundrobin',
http_client=None,
serializer=json.dumps,
deserializer=json.loads):
if isinstance(hosts, string_types):
self._hosts = [host.strip('/') for host in hosts.split(',')]
else:
self._hosts = [host.strip('/') for host in hosts]
host_count = len(self._hosts)
if host_count == 1:
self._host_resolver = SingleHostResolver()
elif host_resolver == 'random':
self._host_resolver = RandomHostResolver(host_count)
else:
self._host_resolver = RoundRobinHostResolver(host_count)
self._http = http_client or DefaultHTTPClient()
self._serializer = serializer
self._deserializer = deserializer
self._sessions = [self._http.create_session(h) for h in self._hosts]
def __repr__(self):
return '<ArangoClient {}>'.format(','.join(self._hosts))
@property
def hosts(self):
return self._hosts
@property
def version(self):
return __version__
async def close(self):
for session in self._sessions:
await session.close()
async def db(
self,
name='_system',
username='root',
password='',
verify=False,
auth_method='basic',
superuser_token=None):
if superuser_token is not None:
connection = JWTSuperuserConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
superuser_token=superuser_token
)
elif auth_method == 'basic':
connection = BasicConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
username=username,
password=password,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
)
elif auth_method == 'jwt':
connection = JWTConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
username=username,
password=password,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
)
await connection.refresh_token()
else:
raise ValueError('invalid auth_method: {}'.format(auth_method))
if verify:
try:
await connection.ping()
except ServerConnectionError as err:
raise err
except Exception as err:
raise ServerConnectionError('bad connection: {}'.format(err))
return StandardDatabase(connection)
| true | true |
1c32b4965cd39a3830378f9064a6396f12e4d4ac | 2,114 | py | Python | saspy/__init__.py | metllord/saspy | 324dd76c5769355f4130186c99b5e907350ca205 | [
"Apache-2.0"
] | 1 | 2020-05-28T17:30:46.000Z | 2020-05-28T17:30:46.000Z | saspy/__init__.py | metllord/saspy | 324dd76c5769355f4130186c99b5e907350ca205 | [
"Apache-2.0"
] | null | null | null | saspy/__init__.py | metllord/saspy | 324dd76c5769355f4130186c99b5e907350ca205 | [
"Apache-2.0"
] | null | null | null | #
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from saspy.version import __version__
from saspy.sasbase import SASsession, SASconfig, list_configs
from saspy.sasdata import SASdata
from saspy.sasexceptions import SASIONotSupportedError, SASConfigNotFoundError, SASConfigNotValidError
from saspy.sasproccommons import SASProcCommons
from saspy.sastabulate import Tabulate
from saspy.sasresults import SASresults
import os, sys
def isnotebook():
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True # Jupyter notebook or qtconsole
elif shell == 'TerminalInteractiveShell':
return False # Terminal running IPython
else:
return False # Other type (?)
except NameError:
return False # Probably standard Python interpreter
if isnotebook():
from saspy.sas_magic import SASMagic
get_ipython().register_magics(SASMagic)
def _find_cfg():
sp = []
sp[:] = sys.path
sp[0] = os.path.abspath(sp[0])
sp.insert(1, os.path.expanduser('~/.config/saspy'))
sp.insert(0, __file__.rsplit(os.sep+'__init__.py')[0])
cfg = 'Not found'
for dir in sp:
f1 = dir+os.sep+'sascfg_personal.py'
if os.path.isfile(f1):
cfg = f1
break
if cfg == 'Not found':
f1 =__file__.rsplit('__init__.py')[0]+'sascfg.py'
if os.path.isfile(f1):
cfg = f1
return cfg
SAScfg = _find_cfg()
| 30.2 | 102 | 0.703879 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from saspy.version import __version__
from saspy.sasbase import SASsession, SASconfig, list_configs
from saspy.sasdata import SASdata
from saspy.sasexceptions import SASIONotSupportedError, SASConfigNotFoundError, SASConfigNotValidError
from saspy.sasproccommons import SASProcCommons
from saspy.sastabulate import Tabulate
from saspy.sasresults import SASresults
import os, sys
def isnotebook():
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True
elif shell == 'TerminalInteractiveShell':
return False
else:
return False
except NameError:
return False
if isnotebook():
from saspy.sas_magic import SASMagic
get_ipython().register_magics(SASMagic)
def _find_cfg():
sp = []
sp[:] = sys.path
sp[0] = os.path.abspath(sp[0])
sp.insert(1, os.path.expanduser('~/.config/saspy'))
sp.insert(0, __file__.rsplit(os.sep+'__init__.py')[0])
cfg = 'Not found'
for dir in sp:
f1 = dir+os.sep+'sascfg_personal.py'
if os.path.isfile(f1):
cfg = f1
break
if cfg == 'Not found':
f1 =__file__.rsplit('__init__.py')[0]+'sascfg.py'
if os.path.isfile(f1):
cfg = f1
return cfg
SAScfg = _find_cfg()
| true | true |
1c32b619b21812aea2744ec4477398c202b9057c | 3,920 | py | Python | gym_gvgai/envs/gvgai_env.py | joeljosephjin/gvgai-rl | 57281629c313abb43312950b22d043a3d67639cf | [
"Apache-2.0"
] | null | null | null | gym_gvgai/envs/gvgai_env.py | joeljosephjin/gvgai-rl | 57281629c313abb43312950b22d043a3d67639cf | [
"Apache-2.0"
] | null | null | null | gym_gvgai/envs/gvgai_env.py | joeljosephjin/gvgai-rl | 57281629c313abb43312950b22d043a3d67639cf | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simulate VGDL Games
"""
import sys
from os import path
import numpy as np
dir = path.dirname(__file__)
gvgai_path = path.join(dir, "gvgai", "clients", "GVGAI-PythonClient", "src", "utils")
sys.path.append(gvgai_path)
import gym
from gym import error, spaces, utils
import ClientCommGYM as gvgai
class GVGAI_Env(gym.Env):
"""
Define a VGDL environment.
The environment defines which actions can be taken at which point and
when the agent receives which reward.
"""
def __init__(self, game, level, version):
self.__version__ = "0.0.2"
metadata = {'render.modes': ['human', 'rgb_array']}
#Send the level to play
self.GVGAI = gvgai.ClientCommGYM(game, version, level, dir)
self.game = game
self.lvl = level
self.version = version
self.actions = self.GVGAI.actions()
self.img = self.GVGAI.sso.image
self.viewer = None
#Only allow gridphysics games for now
#Get number of moves for a selected game
self.action_space = spaces.Discrete(len(self.actions))
self.img = self.img[:,:,2]
# Observation is the remaining time
self.observation_space = spaces.Box(low=0, high=255, shape=self.img.shape, dtype=np.uint8)
def step(self, action):
"""
The agent takes a step in the environment.
Parameters
----------
action : int
Returns
-------
ob, reward, episode_over, info : tuple
state (image) :
An image of the current frame of the game
reward (float) :
Total reward (Philip: Should it be incremental reward? Check Atari)
isOver (bool) :
whether it's time to reset the environment again.
info (dict):
info that can be added for debugging
info["winner"] == PLAYER_LOSES, PLAYER_WINS, NO_WINNER
"""
state, reward, isOver, info = self.GVGAI.step(action)
state = state[:,:,2]
# print(state.shape)
self.img = state
return state, reward, isOver, info
def reset(self):
"""
Reset the state of the environment and returns an initial observation.
Returns
-------
observation (object): the initial observation of the space.
"""
self.img = self.GVGAI.reset(self.lvl)
return self.img
def render(self, mode='human'):
img = self.img[:,:]
# img = self.img[:,:,:3]
if mode == 'rgb_array':
return img
elif mode == 'human':
from gym.envs.classic_control import rendering
if self.viewer is None:
self.viewer = rendering.SimpleImageViewer()
self.viewer.imshow(img)
return self.viewer.isopen
def close(self):
if self.viewer is not None:
self.viewer.close()
self.viewer = None
#Expects path string or int value
def _setLevel(self, level):
if(type(level) == int):
if(level < 5):
self.lvl = level
else:
print("Level doesn't exist, playing level 0")
self.lvl = 0
else:
newLvl = path.realpath(level)
ogLvls = [path.realpath(path.join(dir, 'games', '{}_v{}'.format(self.game, self.version), '{}_lvl{}.txt'.format(self.game, i))) for i in range(5)]
if(newLvl in ogLvls):
lvl = ogLvls.index(newLvl)
self.lvl = lvl
elif(path.exists(newLvl)):
self.GVGAI.addLevel(newLvl)
self.lvl = 5
else:
print("Level doesn't exist, playing level 0")
self.lvl = 0
def get_action_meanings(self):
return self.actions
| 30.866142 | 158 | 0.557143 |
import sys
from os import path
import numpy as np
dir = path.dirname(__file__)
gvgai_path = path.join(dir, "gvgai", "clients", "GVGAI-PythonClient", "src", "utils")
sys.path.append(gvgai_path)
import gym
from gym import error, spaces, utils
import ClientCommGYM as gvgai
class GVGAI_Env(gym.Env):
def __init__(self, game, level, version):
self.__version__ = "0.0.2"
metadata = {'render.modes': ['human', 'rgb_array']}
self.GVGAI = gvgai.ClientCommGYM(game, version, level, dir)
self.game = game
self.lvl = level
self.version = version
self.actions = self.GVGAI.actions()
self.img = self.GVGAI.sso.image
self.viewer = None
self.action_space = spaces.Discrete(len(self.actions))
self.img = self.img[:,:,2]
self.observation_space = spaces.Box(low=0, high=255, shape=self.img.shape, dtype=np.uint8)
def step(self, action):
state, reward, isOver, info = self.GVGAI.step(action)
state = state[:,:,2]
self.img = state
return state, reward, isOver, info
def reset(self):
self.img = self.GVGAI.reset(self.lvl)
return self.img
def render(self, mode='human'):
img = self.img[:,:]
if mode == 'rgb_array':
return img
elif mode == 'human':
from gym.envs.classic_control import rendering
if self.viewer is None:
self.viewer = rendering.SimpleImageViewer()
self.viewer.imshow(img)
return self.viewer.isopen
def close(self):
if self.viewer is not None:
self.viewer.close()
self.viewer = None
def _setLevel(self, level):
if(type(level) == int):
if(level < 5):
self.lvl = level
else:
print("Level doesn't exist, playing level 0")
self.lvl = 0
else:
newLvl = path.realpath(level)
ogLvls = [path.realpath(path.join(dir, 'games', '{}_v{}'.format(self.game, self.version), '{}_lvl{}.txt'.format(self.game, i))) for i in range(5)]
if(newLvl in ogLvls):
lvl = ogLvls.index(newLvl)
self.lvl = lvl
elif(path.exists(newLvl)):
self.GVGAI.addLevel(newLvl)
self.lvl = 5
else:
print("Level doesn't exist, playing level 0")
self.lvl = 0
def get_action_meanings(self):
return self.actions
| true | true |
1c32b63182e3843d4781fc4ffb7434839531e4cc | 1,011 | py | Python | openproblems/tasks/_batch_integration/batch_integration_graph/metrics/ari.py | scottgigante-immunai/openproblems | d093c1a2f21715d98e07ec760eff2c8f50c68373 | [
"MIT"
] | null | null | null | openproblems/tasks/_batch_integration/batch_integration_graph/metrics/ari.py | scottgigante-immunai/openproblems | d093c1a2f21715d98e07ec760eff2c8f50c68373 | [
"MIT"
] | null | null | null | openproblems/tasks/_batch_integration/batch_integration_graph/metrics/ari.py | scottgigante-immunai/openproblems | d093c1a2f21715d98e07ec760eff2c8f50c68373 | [
"MIT"
] | null | null | null | from .....tools.decorators import metric
"""
The Rand index compares the overlap of two clusterings;
it considers both correct clustering overlaps while also counting correct
disagreements between two clusterings.
Similar to NMI, we compared the cell-type labels with the NMI-optimized
Louvain clustering computed on the integrated dataset.
The adjustment of the Rand index corrects for randomly correct labels.
An ARI of 0 or 1 corresponds to random labeling or a perfect match, respectively.
We also used the scikit-learn (v.0.22.1) implementation of the ARI.
"""
@metric(
metric_name="ARI",
maximize=True,
image="openproblems-python-batch-integration", # only if required
)
def ari(adata):
from scib.metrics import ari
from scib.metrics.clustering import opt_louvain
opt_louvain(
adata,
label_key="labels",
cluster_key="cluster",
plot=False,
inplace=True,
force=True,
)
return ari(adata, group1="cluster", group2="labels")
| 30.636364 | 81 | 0.726014 | from .....tools.decorators import metric
@metric(
metric_name="ARI",
maximize=True,
image="openproblems-python-batch-integration",
)
def ari(adata):
from scib.metrics import ari
from scib.metrics.clustering import opt_louvain
opt_louvain(
adata,
label_key="labels",
cluster_key="cluster",
plot=False,
inplace=True,
force=True,
)
return ari(adata, group1="cluster", group2="labels")
| true | true |
1c32b6bda5245b70134b39bad44d9245efa72c04 | 5,608 | py | Python | examples/indoor.py | cnheider/MinkowskiEngine | ae6db31203ba012df2f695576e2d3819d49bf2d7 | [
"MIT"
] | null | null | null | examples/indoor.py | cnheider/MinkowskiEngine | ae6db31203ba012df2f695576e2d3819d49bf2d7 | [
"MIT"
] | null | null | null | examples/indoor.py | cnheider/MinkowskiEngine | ae6db31203ba012df2f695576e2d3819d49bf2d7 | [
"MIT"
] | 1 | 2022-02-27T10:44:31.000Z | 2022-02-27T10:44:31.000Z | # Copyright (c) Chris Choy (chrischoy@ai.stanford.edu).
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Please cite "4D Spatio-Temporal ConvNets: Minkowski Convolutional Neural
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part
# of the code.
import os
import argparse
import numpy as np
from urllib.request import urlretrieve
try:
import open3d as o3d
except ImportError:
raise ImportError('Please install open3d with `pip install open3d`.')
import torch
import MinkowskiEngine as ME
from examples.minkunet import MinkUNet34C
from examples.common import Timer
# Check if the weights and file exist and download
if not os.path.isfile('weights.pth'):
print('Downloading weights and a room ply file...')
urlretrieve("http://cvgl.stanford.edu/data2/minkowskiengine/weights.pth",
'weights.pth')
urlretrieve("http://cvgl.stanford.edu/data2/minkowskiengine/1.ply", '1.ply')
parser = argparse.ArgumentParser()
parser.add_argument('--file_name', type=str, default='1.ply')
parser.add_argument('--weights', type=str, default='weights.pth')
parser.add_argument('--use_cpu', action='store_true')
CLASS_LABELS = ('wall', 'floor', 'cabinet', 'bed', 'chair', 'sofa', 'table',
'door', 'window', 'bookshelf', 'picture', 'counter', 'desk',
'curtain', 'refrigerator', 'shower curtain', 'toilet', 'sink',
'bathtub', 'otherfurniture')
VALID_CLASS_IDS = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 16, 24, 28, 33, 34, 36, 39
]
SCANNET_COLOR_MAP = {
0: (0., 0., 0.),
1: (174., 199., 232.),
2: (152., 223., 138.),
3: (31., 119., 180.),
4: (255., 187., 120.),
5: (188., 189., 34.),
6: (140., 86., 75.),
7: (255., 152., 150.),
8: (214., 39., 40.),
9: (197., 176., 213.),
10: (148., 103., 189.),
11: (196., 156., 148.),
12: (23., 190., 207.),
14: (247., 182., 210.),
15: (66., 188., 102.),
16: (219., 219., 141.),
17: (140., 57., 197.),
18: (202., 185., 52.),
19: (51., 176., 203.),
20: (200., 54., 131.),
21: (92., 193., 61.),
22: (78., 71., 183.),
23: (172., 114., 82.),
24: (255., 127., 14.),
25: (91., 163., 138.),
26: (153., 98., 156.),
27: (140., 153., 101.),
28: (158., 218., 229.),
29: (100., 125., 154.),
30: (178., 127., 135.),
32: (146., 111., 194.),
33: (44., 160., 44.),
34: (112., 128., 144.),
35: (96., 207., 209.),
36: (227., 119., 194.),
37: (213., 92., 176.),
38: (94., 106., 211.),
39: (82., 84., 163.),
40: (100., 85., 144.),
}
def load_file(file_name):
pcd = o3d.io.read_point_cloud(file_name)
coords = np.array(pcd.points)
colors = np.array(pcd.colors)
return coords, colors, pcd
if __name__ == '__main__':
config = parser.parse_args()
device = torch.device('cuda' if (
torch.cuda.is_available() and not config.use_cpu) else 'cpu')
print(f"Using {device}")
# Define a model and load the weights
model = MinkUNet34C(3, 20).to(device)
model_dict = torch.load(config.weights)
model.load_state_dict(model_dict)
model.eval()
coords, colors, pcd = load_file(config.file_name)
# Measure time
with torch.no_grad():
voxel_size = 0.02
# Feed-forward pass and get the prediction
in_field = ME.TensorField(
features=torch.from_numpy(colors).float(),
coordinates=ME.utils.batched_coordinates([coords / voxel_size], dtype=torch.float32),
quantization_mode=ME.SparseTensorQuantizationMode.UNWEIGHTED_AVERAGE,
minkowski_algorithm=ME.MinkowskiAlgorithm.SPEED_OPTIMIZED,
device=device,
)
# Convert to a sparse tensor
sinput = in_field.sparse()
# Output sparse tensor
soutput = model(sinput)
# get the prediction on the input tensor field
out_field = soutput.slice(in_field)
logits = out_field.F
_, pred = logits.max(1)
pred = pred.cpu().numpy()
# Create a point cloud file
pred_pcd = o3d.geometry.PointCloud()
# Map color
colors = np.array([SCANNET_COLOR_MAP[VALID_CLASS_IDS[l]] for l in pred])
pred_pcd.points = o3d.utility.Vector3dVector(coords)
pred_pcd.colors = o3d.utility.Vector3dVector(colors / 255)
pred_pcd.estimate_normals()
# Move the original point cloud
pcd.points = o3d.utility.Vector3dVector(
np.array(pcd.points) + np.array([0, 5, 0]))
# Visualize the input point cloud and the prediction
o3d.visualization.draw_geometries([pcd, pred_pcd])
| 35.719745 | 97 | 0.637839 |
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part
# of the code.
import os
import argparse
import numpy as np
from urllib.request import urlretrieve
try:
import open3d as o3d
except ImportError:
raise ImportError('Please install open3d with `pip install open3d`.')
import torch
import MinkowskiEngine as ME
from examples.minkunet import MinkUNet34C
from examples.common import Timer
# Check if the weights and file exist and download
if not os.path.isfile('weights.pth'):
print('Downloading weights and a room ply file...')
urlretrieve("http://cvgl.stanford.edu/data2/minkowskiengine/weights.pth",
'weights.pth')
urlretrieve("http://cvgl.stanford.edu/data2/minkowskiengine/1.ply", '1.ply')
parser = argparse.ArgumentParser()
parser.add_argument('--file_name', type=str, default='1.ply')
parser.add_argument('--weights', type=str, default='weights.pth')
parser.add_argument('--use_cpu', action='store_true')
CLASS_LABELS = ('wall', 'floor', 'cabinet', 'bed', 'chair', 'sofa', 'table',
'door', 'window', 'bookshelf', 'picture', 'counter', 'desk',
'curtain', 'refrigerator', 'shower curtain', 'toilet', 'sink',
'bathtub', 'otherfurniture')
VALID_CLASS_IDS = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 16, 24, 28, 33, 34, 36, 39
]
SCANNET_COLOR_MAP = {
0: (0., 0., 0.),
1: (174., 199., 232.),
2: (152., 223., 138.),
3: (31., 119., 180.),
4: (255., 187., 120.),
5: (188., 189., 34.),
6: (140., 86., 75.),
7: (255., 152., 150.),
8: (214., 39., 40.),
9: (197., 176., 213.),
10: (148., 103., 189.),
11: (196., 156., 148.),
12: (23., 190., 207.),
14: (247., 182., 210.),
15: (66., 188., 102.),
16: (219., 219., 141.),
17: (140., 57., 197.),
18: (202., 185., 52.),
19: (51., 176., 203.),
20: (200., 54., 131.),
21: (92., 193., 61.),
22: (78., 71., 183.),
23: (172., 114., 82.),
24: (255., 127., 14.),
25: (91., 163., 138.),
26: (153., 98., 156.),
27: (140., 153., 101.),
28: (158., 218., 229.),
29: (100., 125., 154.),
30: (178., 127., 135.),
32: (146., 111., 194.),
33: (44., 160., 44.),
34: (112., 128., 144.),
35: (96., 207., 209.),
36: (227., 119., 194.),
37: (213., 92., 176.),
38: (94., 106., 211.),
39: (82., 84., 163.),
40: (100., 85., 144.),
}
def load_file(file_name):
pcd = o3d.io.read_point_cloud(file_name)
coords = np.array(pcd.points)
colors = np.array(pcd.colors)
return coords, colors, pcd
if __name__ == '__main__':
config = parser.parse_args()
device = torch.device('cuda' if (
torch.cuda.is_available() and not config.use_cpu) else 'cpu')
print(f"Using {device}")
# Define a model and load the weights
model = MinkUNet34C(3, 20).to(device)
model_dict = torch.load(config.weights)
model.load_state_dict(model_dict)
model.eval()
coords, colors, pcd = load_file(config.file_name)
# Measure time
with torch.no_grad():
voxel_size = 0.02
# Feed-forward pass and get the prediction
in_field = ME.TensorField(
features=torch.from_numpy(colors).float(),
coordinates=ME.utils.batched_coordinates([coords / voxel_size], dtype=torch.float32),
quantization_mode=ME.SparseTensorQuantizationMode.UNWEIGHTED_AVERAGE,
minkowski_algorithm=ME.MinkowskiAlgorithm.SPEED_OPTIMIZED,
device=device,
)
# Convert to a sparse tensor
sinput = in_field.sparse()
# Output sparse tensor
soutput = model(sinput)
# get the prediction on the input tensor field
out_field = soutput.slice(in_field)
logits = out_field.F
_, pred = logits.max(1)
pred = pred.cpu().numpy()
# Create a point cloud file
pred_pcd = o3d.geometry.PointCloud()
# Map color
colors = np.array([SCANNET_COLOR_MAP[VALID_CLASS_IDS[l]] for l in pred])
pred_pcd.points = o3d.utility.Vector3dVector(coords)
pred_pcd.colors = o3d.utility.Vector3dVector(colors / 255)
pred_pcd.estimate_normals()
# Move the original point cloud
pcd.points = o3d.utility.Vector3dVector(
np.array(pcd.points) + np.array([0, 5, 0]))
# Visualize the input point cloud and the prediction
o3d.visualization.draw_geometries([pcd, pred_pcd])
| true | true |
1c32b6fc1860b7fe41b96112f2204db29b88735e | 3,434 | py | Python | dataset/transform.py | AcodeC/video | 5d4497621cba764dc51ae22b10e1196c6e9e28bc | [
"MIT"
] | 23 | 2018-12-14T12:45:19.000Z | 2022-03-13T07:31:46.000Z | dataset/transform.py | AcodeC/video | 5d4497621cba764dc51ae22b10e1196c6e9e28bc | [
"MIT"
] | 3 | 2018-12-18T19:07:11.000Z | 2019-01-16T16:41:55.000Z | dataset/transform.py | AcodeC/video | 5d4497621cba764dc51ae22b10e1196c6e9e28bc | [
"MIT"
] | 8 | 2018-12-20T12:41:59.000Z | 2020-11-09T11:12:06.000Z | import math
import re
import string
import numpy as np
import torch
class UniformSample:
def __init__(self, n_sample):
self.n_sample = n_sample
def __call__(self, frames):
n_frames = len(frames)
if n_frames < self.n_sample:
return frames
sample_indices = [ int(i) for i in np.linspace(0, n_frames-1, self.n_sample) ]
samples = [ frames[i] for i in sample_indices ]
return samples
class RandomSample:
def __init__(self, n_sample):
self.n_sample = n_sample
def __call__(self, frames):
n_frames = len(frames)
if n_frames < self.n_sample:
return frames
sample_indices = sorted(np.random.choice(n_frames, self.n_sample, replace=False))
samples = [ frames[i] for i in sample_indices ]
return samples
class UniformJitterSample:
def __init__(self, n_sample):
self.n_sample = n_sample
def __call__(self, frames):
n_frames = len(frames)
if n_frames < self.n_sample:
return frames
jitter_std = int(math.sqrt(n_frames / self.n_sample / 2 / 2))
sample_indices = [ int(i) for i in np.linspace(0, n_frames-1, self.n_sample) ]
sample_indices = [ int(i + np.random.normal(0, jitter_std)) for i in sample_indices ]
sample_indices = [ min(max(0, i), n_frames-1) for i in sample_indices ]
sample_indices = sorted(sample_indices)
samples = [ frames[i] for i in sample_indices ]
return samples
class ZeroPadIfLessThan:
def __init__(self, n):
self.n = n
def __call__(self, frames):
while len(frames) < self.n:
frames.append(np.zeros_like(frames[0]))
return frames
class ToTensor:
def __init__(self, dtype=None):
self.dtype = dtype
def __call__(self, array):
np_array = np.asarray(array)
t = torch.from_numpy(np_array)
if self.dtype:
t = t.type(self.dtype)
return t
class TrimExceptAscii:
def __call__(self, sentence):
return sentence.decode('ascii', 'ignore').encode('ascii')
class RemovePunctuation:
def __init__(self):
self.regex = re.compile('[%s]' % re.escape(string.punctuation))
def __call__(self, sentence):
return self.regex.sub('', sentence)
class Lowercase:
def __call__(self, sentence):
return sentence.lower()
class SplitWithWhiteSpace:
def __call__(self, sentence):
return sentence.split()
class Truncate:
def __init__(self, n_word):
self.n_word = n_word
def __call__(self, words):
return words[:self.n_word]
class PadFirst:
def __init__(self, token):
self.token = token
def __call__(self, words):
return [ self.token ] + words
class PadLast:
def __init__(self, token):
self.token = token
def __call__(self, words):
return words + [ self.token ]
class PadToLength:
def __init__(self, token, length):
self.token = token
self.length = length
def __call__(self, words):
n_pads = self.length - len(words)
return words + [ self.token ] * n_pads
class ToIndex:
def __init__(self, word2idx):
self.word2idx = word2idx
def __call__(self, words): # Ignore unknown (or trimmed) words.
return [ self.word2idx[word] for word in words if word in self.word2idx ]
| 23.682759 | 93 | 0.62551 | import math
import re
import string
import numpy as np
import torch
class UniformSample:
def __init__(self, n_sample):
self.n_sample = n_sample
def __call__(self, frames):
n_frames = len(frames)
if n_frames < self.n_sample:
return frames
sample_indices = [ int(i) for i in np.linspace(0, n_frames-1, self.n_sample) ]
samples = [ frames[i] for i in sample_indices ]
return samples
class RandomSample:
def __init__(self, n_sample):
self.n_sample = n_sample
def __call__(self, frames):
n_frames = len(frames)
if n_frames < self.n_sample:
return frames
sample_indices = sorted(np.random.choice(n_frames, self.n_sample, replace=False))
samples = [ frames[i] for i in sample_indices ]
return samples
class UniformJitterSample:
def __init__(self, n_sample):
self.n_sample = n_sample
def __call__(self, frames):
n_frames = len(frames)
if n_frames < self.n_sample:
return frames
jitter_std = int(math.sqrt(n_frames / self.n_sample / 2 / 2))
sample_indices = [ int(i) for i in np.linspace(0, n_frames-1, self.n_sample) ]
sample_indices = [ int(i + np.random.normal(0, jitter_std)) for i in sample_indices ]
sample_indices = [ min(max(0, i), n_frames-1) for i in sample_indices ]
sample_indices = sorted(sample_indices)
samples = [ frames[i] for i in sample_indices ]
return samples
class ZeroPadIfLessThan:
def __init__(self, n):
self.n = n
def __call__(self, frames):
while len(frames) < self.n:
frames.append(np.zeros_like(frames[0]))
return frames
class ToTensor:
def __init__(self, dtype=None):
self.dtype = dtype
def __call__(self, array):
np_array = np.asarray(array)
t = torch.from_numpy(np_array)
if self.dtype:
t = t.type(self.dtype)
return t
class TrimExceptAscii:
def __call__(self, sentence):
return sentence.decode('ascii', 'ignore').encode('ascii')
class RemovePunctuation:
def __init__(self):
self.regex = re.compile('[%s]' % re.escape(string.punctuation))
def __call__(self, sentence):
return self.regex.sub('', sentence)
class Lowercase:
def __call__(self, sentence):
return sentence.lower()
class SplitWithWhiteSpace:
def __call__(self, sentence):
return sentence.split()
class Truncate:
def __init__(self, n_word):
self.n_word = n_word
def __call__(self, words):
return words[:self.n_word]
class PadFirst:
def __init__(self, token):
self.token = token
def __call__(self, words):
return [ self.token ] + words
class PadLast:
def __init__(self, token):
self.token = token
def __call__(self, words):
return words + [ self.token ]
class PadToLength:
def __init__(self, token, length):
self.token = token
self.length = length
def __call__(self, words):
n_pads = self.length - len(words)
return words + [ self.token ] * n_pads
class ToIndex:
def __init__(self, word2idx):
self.word2idx = word2idx
def __call__(self, words):
return [ self.word2idx[word] for word in words if word in self.word2idx ]
| true | true |
1c32b763016a1c8a4b6126d8abb45c71d372978f | 10,551 | py | Python | infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py | ofuks/DLab | 460804a2559843d099936fe40373093f9bf9edcb | [
"Apache-2.0"
] | null | null | null | infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py | ofuks/DLab | 460804a2559843d099936fe40373093f9bf9edcb | [
"Apache-2.0"
] | null | null | null | infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py | ofuks/DLab | 460804a2559843d099936fe40373093f9bf9edcb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
import logging
import json
import sys
from dlab.fab import *
from dlab.meta_lib import *
from dlab.actions_lib import *
import os
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
notebook_config = dict()
try:
notebook_config['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
except:
notebook_config['exploratory_name'] = ''
notebook_config['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
notebook_config['instance_type'] = os.environ['gcp_notebook_instance_size']
notebook_config['key_name'] = os.environ['conf_key_name']
notebook_config['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
notebook_config['instance_name'] = '{0}-{1}-nb-{2}'.format(notebook_config['service_base_name'],
notebook_config['edge_user_name'],
notebook_config['exploratory_name'])
# generating variables regarding EDGE proxy on Notebook instance
instance_hostname = GCPMeta().get_private_ip_address(notebook_config['instance_name'])
edge_instance_name = '{0}-{1}-edge'.format(notebook_config['service_base_name'], notebook_config['edge_user_name'])
notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
notebook_config['zone'] = os.environ['gcp_zone']
try:
if os.environ['conf_os_family'] == 'debian':
initial_user = 'ubuntu'
sudo_group = 'sudo'
if os.environ['conf_os_family'] == 'redhat':
initial_user = 'ec2-user'
sudo_group = 'wheel'
logging.info('[CREATING DLAB SSH USER]')
print('[CREATING DLAB SSH USER]')
params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
(instance_hostname, notebook_config['ssh_key_path'], initial_user,
notebook_config['dlab_ssh_user'], sudo_group)
try:
local("~/scripts/{}.py {}".format('create_ssh_user', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed creating ssh user 'dlab'.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
# configuring proxy on Notebook instance
try:
logging.info('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
print('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
.format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
json.dumps(additional_config), notebook_config['dlab_ssh_user'])
try:
local("~/scripts/{}.py {}".format('common_configure_proxy', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to configure proxy.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
# updating repositories & installing python packages
try:
logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
params = "--hostname {} --keyfile {} --user {} --region {}". \
format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['dlab_ssh_user'], os.environ['gcp_region'])
try:
local("~/scripts/{}.py {}".format('install_prerequisites', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed installing apps: apt & pip.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
params = "--hostname {} --keyfile {} " \
"--os_user {} --jupyter_version {} " \
"--scala_version {} --spark_version {} " \
"--hadoop_version {} --region {} " \
"--tensorflow_version {} --r_mirror {} " \
"--exploratory_name {}" \
.format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['dlab_ssh_user'],
os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
os.environ['gcp_region'], os.environ['notebook_tensorflow_version'],
os.environ['notebook_r_mirror'], notebook_config['exploratory_name'])
try:
local("~/scripts/{}.py {}".format('configure_deep_learning_node', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to configure Deep Learning node.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
print('[INSTALLING USERs KEY]')
logging.info('[INSTALLING USERs KEY]')
additional_config = {"user_keyname": os.environ['edge_user_name'],
"user_keydir": os.environ['conf_key_dir']}
params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config), notebook_config['dlab_ssh_user'])
try:
local("~/scripts/{}.py {}".format('install_user_key', params))
except:
append_result("Failed installing users key")
raise Exception
except Exception as err:
append_result("Failed installing users key.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
print('[SETUP USER GIT CREDENTIALS]')
logging.info('[SETUP USER GIT CREDENTIALS]')
params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
.format(notebook_config['dlab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
try:
local("~/scripts/{}.py {}".format('common_download_git_certfile', params))
local("~/scripts/{}.py {}".format('manage_git_creds', params))
except:
append_result("Failed setup git credentials")
raise Exception
except Exception as err:
append_result("Failed to setup git credentials.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
# generating output information
ip_address = GCPMeta().get_private_ip_address(notebook_config['instance_name'])
tensorboard_url = "http://" + ip_address + ":6006/"
jupyter_url = 'http://' + ip_address + ':8888/{}/'.format(notebook_config['exploratory_name'])
ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(
notebook_config['exploratory_name'])
print('[SUMMARY]')
logging.info('[SUMMARY]')
print("Instance name: {}".format(notebook_config['instance_name']))
print("Private IP: {}".format(ip_address))
print("Instance type: {}".format(notebook_config['instance_type']))
print("Key name: {}".format(notebook_config['key_name']))
print("User key name: {}".format(os.environ['edge_user_name']))
print("TensorBoard URL: {}".format(tensorboard_url))
print("TensorBoard log dir: /var/log/tensorboard")
print("Jupyter URL: {}".format(jupyter_ip_url))
print("Ungit URL: {}".format(ungit_ip_url))
print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(notebook_config['key_name'],
notebook_config['dlab_ssh_user'],
ip_address))
with open("/root/result.json", 'w') as result:
res = {"hostname": ip_address,
"ip": ip_address,
"instance_id": notebook_config['instance_name'],
"master_keyname": os.environ['conf_key_name'],
"tensorboard_log_dir": "/var/log/tensorboard",
"notebook_name": notebook_config['instance_name'],
"Action": "Create new notebook server",
"exploratory_url": [
{"description": "TensorBoard",
"url": tensorboard_url},
{"description": "Jupyter",
"url": jupyter_ip_url},
{"description": "Ungit",
"url": ungit_ip_url}]}
result.write(json.dumps(res)) | 50.725962 | 130 | 0.604019 |
import logging
import json
import sys
from dlab.fab import *
from dlab.meta_lib import *
from dlab.actions_lib import *
import os
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
notebook_config = dict()
try:
notebook_config['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
except:
notebook_config['exploratory_name'] = ''
notebook_config['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
notebook_config['instance_type'] = os.environ['gcp_notebook_instance_size']
notebook_config['key_name'] = os.environ['conf_key_name']
notebook_config['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
notebook_config['instance_name'] = '{0}-{1}-nb-{2}'.format(notebook_config['service_base_name'],
notebook_config['edge_user_name'],
notebook_config['exploratory_name'])
instance_hostname = GCPMeta().get_private_ip_address(notebook_config['instance_name'])
edge_instance_name = '{0}-{1}-edge'.format(notebook_config['service_base_name'], notebook_config['edge_user_name'])
notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
notebook_config['zone'] = os.environ['gcp_zone']
try:
if os.environ['conf_os_family'] == 'debian':
initial_user = 'ubuntu'
sudo_group = 'sudo'
if os.environ['conf_os_family'] == 'redhat':
initial_user = 'ec2-user'
sudo_group = 'wheel'
logging.info('[CREATING DLAB SSH USER]')
print('[CREATING DLAB SSH USER]')
params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
(instance_hostname, notebook_config['ssh_key_path'], initial_user,
notebook_config['dlab_ssh_user'], sudo_group)
try:
local("~/scripts/{}.py {}".format('create_ssh_user', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed creating ssh user 'dlab'.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
logging.info('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
print('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
.format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
json.dumps(additional_config), notebook_config['dlab_ssh_user'])
try:
local("~/scripts/{}.py {}".format('common_configure_proxy', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to configure proxy.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
params = "--hostname {} --keyfile {} --user {} --region {}". \
format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['dlab_ssh_user'], os.environ['gcp_region'])
try:
local("~/scripts/{}.py {}".format('install_prerequisites', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed installing apps: apt & pip.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
params = "--hostname {} --keyfile {} " \
"--os_user {} --jupyter_version {} " \
"--scala_version {} --spark_version {} " \
"--hadoop_version {} --region {} " \
"--tensorflow_version {} --r_mirror {} " \
"--exploratory_name {}" \
.format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['dlab_ssh_user'],
os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
os.environ['gcp_region'], os.environ['notebook_tensorflow_version'],
os.environ['notebook_r_mirror'], notebook_config['exploratory_name'])
try:
local("~/scripts/{}.py {}".format('configure_deep_learning_node', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to configure Deep Learning node.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
print('[INSTALLING USERs KEY]')
logging.info('[INSTALLING USERs KEY]')
additional_config = {"user_keyname": os.environ['edge_user_name'],
"user_keydir": os.environ['conf_key_dir']}
params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config), notebook_config['dlab_ssh_user'])
try:
local("~/scripts/{}.py {}".format('install_user_key', params))
except:
append_result("Failed installing users key")
raise Exception
except Exception as err:
append_result("Failed installing users key.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
try:
print('[SETUP USER GIT CREDENTIALS]')
logging.info('[SETUP USER GIT CREDENTIALS]')
params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
.format(notebook_config['dlab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
try:
local("~/scripts/{}.py {}".format('common_download_git_certfile', params))
local("~/scripts/{}.py {}".format('manage_git_creds', params))
except:
append_result("Failed setup git credentials")
raise Exception
except Exception as err:
append_result("Failed to setup git credentials.", str(err))
GCPActions().remove_instance(notebook_config['instance_name'], notebook_config['zone'])
sys.exit(1)
ip_address = GCPMeta().get_private_ip_address(notebook_config['instance_name'])
tensorboard_url = "http://" + ip_address + ":6006/"
jupyter_url = 'http://' + ip_address + ':8888/{}/'.format(notebook_config['exploratory_name'])
ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(
notebook_config['exploratory_name'])
print('[SUMMARY]')
logging.info('[SUMMARY]')
print("Instance name: {}".format(notebook_config['instance_name']))
print("Private IP: {}".format(ip_address))
print("Instance type: {}".format(notebook_config['instance_type']))
print("Key name: {}".format(notebook_config['key_name']))
print("User key name: {}".format(os.environ['edge_user_name']))
print("TensorBoard URL: {}".format(tensorboard_url))
print("TensorBoard log dir: /var/log/tensorboard")
print("Jupyter URL: {}".format(jupyter_ip_url))
print("Ungit URL: {}".format(ungit_ip_url))
print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(notebook_config['key_name'],
notebook_config['dlab_ssh_user'],
ip_address))
with open("/root/result.json", 'w') as result:
res = {"hostname": ip_address,
"ip": ip_address,
"instance_id": notebook_config['instance_name'],
"master_keyname": os.environ['conf_key_name'],
"tensorboard_log_dir": "/var/log/tensorboard",
"notebook_name": notebook_config['instance_name'],
"Action": "Create new notebook server",
"exploratory_url": [
{"description": "TensorBoard",
"url": tensorboard_url},
{"description": "Jupyter",
"url": jupyter_ip_url},
{"description": "Ungit",
"url": ungit_ip_url}]}
result.write(json.dumps(res)) | true | true |
1c32b79b708f112bdff89c86039028c44360ec78 | 14,446 | py | Python | scripts/constants/constants.py | sriramkswamy/kobuki-multi-agent-network | 976157ca701be63018cd13538431c2adafbe7f9a | [
"MIT"
] | null | null | null | scripts/constants/constants.py | sriramkswamy/kobuki-multi-agent-network | 976157ca701be63018cd13538431c2adafbe7f9a | [
"MIT"
] | null | null | null | scripts/constants/constants.py | sriramkswamy/kobuki-multi-agent-network | 976157ca701be63018cd13538431c2adafbe7f9a | [
"MIT"
] | null | null | null | import os
import sys
import inspect
from math import pi
# folder containing the output data
EXPTS_BASE_DATA_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
EXPTS_BASE_DATA_FOLDER = os.path.join(EXPTS_BASE_DATA_FOLDER, 'data', 'expts', '')
# folder containing the output plots
EXPTS_BASE_PLOTS_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
EXPTS_BASE_PLOTS_FOLDER = os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'plots', 'expts', '')
# folder containing the output data
TESTS_BASE_DATA_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TESTS_BASE_DATA_FOLDER = os.path.join(TESTS_BASE_DATA_FOLDER, 'data', 'tests', '')
# folder containing the output plots
TESTS_BASE_PLOTS_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TESTS_BASE_PLOTS_FOLDER = os.path.join(TESTS_BASE_PLOTS_FOLDER, 'plots', 'tests', '')
# folder containing the output data
TRIALS_BASE_DATA_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TRIALS_BASE_DATA_FOLDER = os.path.join(TRIALS_BASE_DATA_FOLDER, 'data', 'trials', '')
# folder containing the output plots
TRIALS_BASE_PLOTS_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TRIALS_BASE_PLOTS_FOLDER = os.path.join(TRIALS_BASE_PLOTS_FOLDER, 'plots', 'trials', '')
# dictionary to map proper folders for old experiments
OLD_EXPTS_DATA_FOLDER = {
'vbgr_pattern': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbgr_pattern', ''),
'vbgyr_pattern': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbgyr_pattern', ''),
'vbgyor_pattern': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbgyor_pattern', ''),
'vr_semi_circle': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vr_semi_circle', ''),
'vgr_circle_opp': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vgr_circle_opp', ''),
'vbg_straight_offset': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_offset', ''),
'vbg_straight_cross': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_cross', ''),
'vbg_straight_anti_parallel': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_anti_parallel', ''),
'vbg_straight_perpendicular': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_perpendicular', '')
}
OLD_EXPTS_PLOTS_FOLDER = {
'vbgr_pattern': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbgr_pattern', ''),
'vbgyr_pattern': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbgyr_pattern', ''),
'vbgyor_pattern': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbgyor_pattern', ''),
'vr_semi_circle': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vr_semi_circle', ''),
'vgr_circle_opp': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vgr_circle_opp', ''),
'vbg_straight_offset': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_offset', ''),
'vbg_straight_cross': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_cross', ''),
'vbg_straight_anti_parallel': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_anti_parallel', ''),
'vbg_straight_perpendicular': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_perpendicular', '')
}
# dictionary to map proper folders for tests
TESTS_DATA_FOLDER = {
'vb_polar_pose01': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose01', ''),
'vb_polar_pose02': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose02', ''),
'vb_polar_pose03': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose03', ''),
'vb_polar_pose04': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose04', ''),
'v_pole_zero_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_zero_tile6', ''),
'v_pole_piby4_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_piby4_tile6', ''),
'v_pole_3piby8_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_3piby8_tile6', ''),
'v_pole_5piby8_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_5piby8_tile6', ''),
'v_pole_3piby4_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_3piby4_tile6', ''),
'vr_pole_straight_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_straight_fast_trial1', ''),
'vr_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_fast_trial1', ''),
'vb_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_slow_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_slow_trial1', ''),
'vb_pole_straight_hold_slow_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_slow_trial1', ''),
'vb_pole_straight_medium_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_medium_trial1', ''),
'vb_pole_straight_hold_medium_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_medium_trial1', ''),
'vb_pole_straight_fast_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_fast_trial2', ''),
'vb_pole_straight_hold_fast_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_fast_trial2', ''),
'vb_pole_straight_slow_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_slow_trial2', ''),
'vb_pole_straight_hold_slow_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_slow_trial2', ''),
'vb_pole_straight_medium_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_medium_trial2', ''),
'vb_pole_straight_hold_medium_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_medium_trial2', ''),
'vr_pole_arc_acw_medium_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_arc_acw_medium_trial1', ''),
'vr_pole_arc_acw_medium_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_arc_acw_medium_trial2', '')
}
TESTS_PLOTS_FOLDER = {
'vb_polar_pose01': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose01', ''),
'vb_polar_pose02': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose02', ''),
'vb_polar_pose03': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose03', ''),
'vb_polar_pose04': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose04', ''),
'v_pole_zero_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_zero_tile6', ''),
'v_pole_piby4_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_piby4_tile6', ''),
'v_pole_3piby8_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_3piby8_tile6', ''),
'v_pole_5piby8_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_5piby8_tile6', ''),
'v_pole_3piby4_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_3piby4_tile6', ''),
'vr_pole_straight_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_straight_fast_trial1', ''),
'vr_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_fast_trial1', ''),
'vb_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_slow_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_slow_trial1', ''),
'vb_pole_straight_hold_slow_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_slow_trial1', ''),
'vb_pole_straight_medium_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_medium_trial1', ''),
'vb_pole_straight_hold_medium_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_medium_trial1', ''),
'vb_pole_straight_fast_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_fast_trial2', ''),
'vb_pole_straight_hold_fast_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_fast_trial2', ''),
'vb_pole_straight_slow_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_slow_trial2', ''),
'vb_pole_straight_hold_slow_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_slow_trial2', ''),
'vb_pole_straight_medium_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_medium_trial2', ''),
'vb_pole_straight_hold_medium_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_medium_trial2', ''),
'vr_pole_arc_acw_medium_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_arc_acw_medium_trial1', ''),
'vr_pole_arc_acw_medium_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_arc_acw_medium_trial2', '')
}
# dictionary to map proper folders for trials
TRIALS_DATA_FOLDER = {
'RO_circleobserve': os.path.join(TRIALS_BASE_DATA_FOLDER, 'RO_circleobserve', '')
}
TRIALS_PLOTS_FOLDER = {
'RO_circleobserve': os.path.join(TRIALS_BASE_PLOTS_FOLDER, 'RO_circleobserve', '')
}
# dictionary to map proper folders for old tests
OLD_TESTS_DATA_FOLDER = {
'vg_pose01': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose01', ''),
'vg_pose02': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose02', ''),
'vg_pose03': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose03', ''),
'vg_pose04': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose04', ''),
'vg_pose05': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose05', ''),
'vg_pose06': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose06', ''),
'vg_pose07': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose07', ''),
'vg_pose08': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose08', ''),
'vg_pose09': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose09', ''),
'vg_pose10': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose10', ''),
'vg_orient_north': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_north', ''),
'vg_orient_south': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_south', ''),
'vg_orient_west': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_west', ''),
'vg_orient_east': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_east', ''),
'vg_straight_north': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_north', ''),
'vg_straight_south': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_south', ''),
'vg_straight_west': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_west', ''),
'vg_straight_east': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_east', ''),
'vg_full_rotate_acw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_full_rotate_acw', ''),
'vg_full_rotate_cw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_full_rotate_cw', ''),
'vg_rotate_acw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_rotate_acw', ''),
'vg_rotate_cw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_rotate_cw', ''),
'vbg_rotate_acw_same': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_rotate_acw_same', ''),
'vbg_rotate_cw_opp': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_rotate_cw_opp', ''),
'vbg_pose06_pose01_same': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_pose06_pose01_same', ''),
'vbg_pose06_pose01_opp': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_pose06_pose01_opp', '')
}
OLD_TESTS_PLOTS_FOLDER = {
'vg_pose01': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose01', ''),
'vg_pose02': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose02', ''),
'vg_pose03': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose03', ''),
'vg_pose04': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose04', ''),
'vg_pose05': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose05', ''),
'vg_pose06': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose06', ''),
'vg_pose07': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose07', ''),
'vg_pose08': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose08', ''),
'vg_pose09': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose09', ''),
'vg_pose10': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose10', ''),
'vg_orient_north': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_north', ''),
'vg_orient_south': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_south', ''),
'vg_orient_west': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_west', ''),
'vg_orient_east': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_east', ''),
'vg_straight_north': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_north', ''),
'vg_straight_south': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_south', ''),
'vg_straight_west': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_west', ''),
'vg_straight_east': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_east', ''),
'vg_full_rotate_acw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_full_rotate_acw', ''),
'vg_full_rotate_cw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_full_rotate_cw', ''),
'vg_rotate_acw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_rotate_acw', ''),
'vg_rotate_cw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_rotate_cw', ''),
'vbg_rotate_acw_same': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_rotate_acw_same', ''),
'vbg_rotate_cw_opp': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_rotate_cw_opp', ''),
'vbg_pose06_pose01_same': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_pose06_pose01_same', ''),
'vbg_pose06_pose01_opp': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_pose06_pose01_opp', '')
}
# scan and odometry files
ODOM_POSITIONS = '_odom_positions.csv'
ODOM_VELOCITIES = '_odom_velocities.csv'
ODOM_DCM = '_odom_dcm.csv'
ODOM_QUATERNIONS = '_odom_quaternions.csv'
ODOM_TIMESTAMPS = '_odom_timestamps.csv'
SCAN_TIMESTAMPS = '_scan_timestamps.csv'
# robot names
BOT_NAMES = {
'v': 'violet',
'b': 'blue',
'g': 'green',
'y': 'yellow',
'o': 'orange',
'r': 'red',
'bg': ['blue', 'green'],
'gr': ['green', 'red'],
'bgr': ['blue', 'green', 'red'],
'bgyr': ['blue', 'green', 'yellow', 'red'],
'bgyor': ['blue', 'green', 'yellow', 'orange', 'red']
}
SCAN_LOCATION = {
'v': 'violet_scan_values',
'b': 'blue_scan_values',
'g': 'green_scan_values',
'y': 'yellow_scan_values',
'o': 'orange_scan_values',
'r': 'red_scan_values'
}
# other physical gates/values
TILE_DIST_IN = 12
TILE_DIST_M = 0.3048
WALL_DIST_IN = 12*TILE_DIST_IN
WALL_DIST_M = 12*TILE_DIST_M
| 62.808696 | 124 | 0.74775 | import os
import sys
import inspect
from math import pi
EXPTS_BASE_DATA_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
EXPTS_BASE_DATA_FOLDER = os.path.join(EXPTS_BASE_DATA_FOLDER, 'data', 'expts', '')
EXPTS_BASE_PLOTS_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
EXPTS_BASE_PLOTS_FOLDER = os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'plots', 'expts', '')
TESTS_BASE_DATA_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TESTS_BASE_DATA_FOLDER = os.path.join(TESTS_BASE_DATA_FOLDER, 'data', 'tests', '')
TESTS_BASE_PLOTS_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TESTS_BASE_PLOTS_FOLDER = os.path.join(TESTS_BASE_PLOTS_FOLDER, 'plots', 'tests', '')
TRIALS_BASE_DATA_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TRIALS_BASE_DATA_FOLDER = os.path.join(TRIALS_BASE_DATA_FOLDER, 'data', 'trials', '')
TRIALS_BASE_PLOTS_FOLDER = (os.path.realpath(
os.path.abspath(
os.path.split(
os.path.dirname(inspect.getfile(inspect.currentframe())))[0])))
TRIALS_BASE_PLOTS_FOLDER = os.path.join(TRIALS_BASE_PLOTS_FOLDER, 'plots', 'trials', '')
OLD_EXPTS_DATA_FOLDER = {
'vbgr_pattern': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbgr_pattern', ''),
'vbgyr_pattern': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbgyr_pattern', ''),
'vbgyor_pattern': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbgyor_pattern', ''),
'vr_semi_circle': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vr_semi_circle', ''),
'vgr_circle_opp': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vgr_circle_opp', ''),
'vbg_straight_offset': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_offset', ''),
'vbg_straight_cross': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_cross', ''),
'vbg_straight_anti_parallel': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_anti_parallel', ''),
'vbg_straight_perpendicular': os.path.join(EXPTS_BASE_DATA_FOLDER, 'vbg_straight_perpendicular', '')
}
OLD_EXPTS_PLOTS_FOLDER = {
'vbgr_pattern': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbgr_pattern', ''),
'vbgyr_pattern': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbgyr_pattern', ''),
'vbgyor_pattern': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbgyor_pattern', ''),
'vr_semi_circle': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vr_semi_circle', ''),
'vgr_circle_opp': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vgr_circle_opp', ''),
'vbg_straight_offset': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_offset', ''),
'vbg_straight_cross': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_cross', ''),
'vbg_straight_anti_parallel': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_anti_parallel', ''),
'vbg_straight_perpendicular': os.path.join(EXPTS_BASE_PLOTS_FOLDER, 'vbg_straight_perpendicular', '')
}
TESTS_DATA_FOLDER = {
'vb_polar_pose01': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose01', ''),
'vb_polar_pose02': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose02', ''),
'vb_polar_pose03': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose03', ''),
'vb_polar_pose04': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_polar_pose04', ''),
'v_pole_zero_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_zero_tile6', ''),
'v_pole_piby4_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_piby4_tile6', ''),
'v_pole_3piby8_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_3piby8_tile6', ''),
'v_pole_5piby8_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_5piby8_tile6', ''),
'v_pole_3piby4_tile6': os.path.join(TESTS_BASE_DATA_FOLDER, 'v_pole_3piby4_tile6', ''),
'vr_pole_straight_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_straight_fast_trial1', ''),
'vr_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_fast_trial1', ''),
'vb_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_slow_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_slow_trial1', ''),
'vb_pole_straight_hold_slow_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_slow_trial1', ''),
'vb_pole_straight_medium_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_medium_trial1', ''),
'vb_pole_straight_hold_medium_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_medium_trial1', ''),
'vb_pole_straight_fast_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_fast_trial2', ''),
'vb_pole_straight_hold_fast_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_fast_trial2', ''),
'vb_pole_straight_slow_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_slow_trial2', ''),
'vb_pole_straight_hold_slow_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_slow_trial2', ''),
'vb_pole_straight_medium_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_medium_trial2', ''),
'vb_pole_straight_hold_medium_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vb_pole_straight_hold_medium_trial2', ''),
'vr_pole_arc_acw_medium_trial1': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_arc_acw_medium_trial1', ''),
'vr_pole_arc_acw_medium_trial2': os.path.join(TESTS_BASE_DATA_FOLDER, 'vr_pole_arc_acw_medium_trial2', '')
}
TESTS_PLOTS_FOLDER = {
'vb_polar_pose01': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose01', ''),
'vb_polar_pose02': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose02', ''),
'vb_polar_pose03': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose03', ''),
'vb_polar_pose04': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_polar_pose04', ''),
'v_pole_zero_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_zero_tile6', ''),
'v_pole_piby4_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_piby4_tile6', ''),
'v_pole_3piby8_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_3piby8_tile6', ''),
'v_pole_5piby8_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_5piby8_tile6', ''),
'v_pole_3piby4_tile6': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'v_pole_3piby4_tile6', ''),
'vr_pole_straight_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_straight_fast_trial1', ''),
'vr_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_fast_trial1', ''),
'vb_pole_straight_hold_fast_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_fast_trial1', ''),
'vb_pole_straight_slow_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_slow_trial1', ''),
'vb_pole_straight_hold_slow_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_slow_trial1', ''),
'vb_pole_straight_medium_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_medium_trial1', ''),
'vb_pole_straight_hold_medium_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_medium_trial1', ''),
'vb_pole_straight_fast_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_fast_trial2', ''),
'vb_pole_straight_hold_fast_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_fast_trial2', ''),
'vb_pole_straight_slow_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_slow_trial2', ''),
'vb_pole_straight_hold_slow_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_slow_trial2', ''),
'vb_pole_straight_medium_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_medium_trial2', ''),
'vb_pole_straight_hold_medium_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vb_pole_straight_hold_medium_trial2', ''),
'vr_pole_arc_acw_medium_trial1': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_arc_acw_medium_trial1', ''),
'vr_pole_arc_acw_medium_trial2': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vr_pole_arc_acw_medium_trial2', '')
}
TRIALS_DATA_FOLDER = {
'RO_circleobserve': os.path.join(TRIALS_BASE_DATA_FOLDER, 'RO_circleobserve', '')
}
TRIALS_PLOTS_FOLDER = {
'RO_circleobserve': os.path.join(TRIALS_BASE_PLOTS_FOLDER, 'RO_circleobserve', '')
}
OLD_TESTS_DATA_FOLDER = {
'vg_pose01': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose01', ''),
'vg_pose02': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose02', ''),
'vg_pose03': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose03', ''),
'vg_pose04': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose04', ''),
'vg_pose05': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose05', ''),
'vg_pose06': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose06', ''),
'vg_pose07': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose07', ''),
'vg_pose08': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose08', ''),
'vg_pose09': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose09', ''),
'vg_pose10': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_pose10', ''),
'vg_orient_north': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_north', ''),
'vg_orient_south': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_south', ''),
'vg_orient_west': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_west', ''),
'vg_orient_east': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_orient_east', ''),
'vg_straight_north': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_north', ''),
'vg_straight_south': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_south', ''),
'vg_straight_west': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_west', ''),
'vg_straight_east': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_straight_east', ''),
'vg_full_rotate_acw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_full_rotate_acw', ''),
'vg_full_rotate_cw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_full_rotate_cw', ''),
'vg_rotate_acw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_rotate_acw', ''),
'vg_rotate_cw': os.path.join(TESTS_BASE_DATA_FOLDER, 'vg_rotate_cw', ''),
'vbg_rotate_acw_same': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_rotate_acw_same', ''),
'vbg_rotate_cw_opp': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_rotate_cw_opp', ''),
'vbg_pose06_pose01_same': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_pose06_pose01_same', ''),
'vbg_pose06_pose01_opp': os.path.join(TESTS_BASE_DATA_FOLDER, 'vbg_pose06_pose01_opp', '')
}
OLD_TESTS_PLOTS_FOLDER = {
'vg_pose01': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose01', ''),
'vg_pose02': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose02', ''),
'vg_pose03': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose03', ''),
'vg_pose04': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose04', ''),
'vg_pose05': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose05', ''),
'vg_pose06': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose06', ''),
'vg_pose07': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose07', ''),
'vg_pose08': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose08', ''),
'vg_pose09': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose09', ''),
'vg_pose10': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_pose10', ''),
'vg_orient_north': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_north', ''),
'vg_orient_south': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_south', ''),
'vg_orient_west': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_west', ''),
'vg_orient_east': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_orient_east', ''),
'vg_straight_north': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_north', ''),
'vg_straight_south': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_south', ''),
'vg_straight_west': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_west', ''),
'vg_straight_east': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_straight_east', ''),
'vg_full_rotate_acw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_full_rotate_acw', ''),
'vg_full_rotate_cw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_full_rotate_cw', ''),
'vg_rotate_acw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_rotate_acw', ''),
'vg_rotate_cw': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vg_rotate_cw', ''),
'vbg_rotate_acw_same': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_rotate_acw_same', ''),
'vbg_rotate_cw_opp': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_rotate_cw_opp', ''),
'vbg_pose06_pose01_same': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_pose06_pose01_same', ''),
'vbg_pose06_pose01_opp': os.path.join(TESTS_BASE_PLOTS_FOLDER, 'vbg_pose06_pose01_opp', '')
}
ODOM_POSITIONS = '_odom_positions.csv'
ODOM_VELOCITIES = '_odom_velocities.csv'
ODOM_DCM = '_odom_dcm.csv'
ODOM_QUATERNIONS = '_odom_quaternions.csv'
ODOM_TIMESTAMPS = '_odom_timestamps.csv'
SCAN_TIMESTAMPS = '_scan_timestamps.csv'
BOT_NAMES = {
'v': 'violet',
'b': 'blue',
'g': 'green',
'y': 'yellow',
'o': 'orange',
'r': 'red',
'bg': ['blue', 'green'],
'gr': ['green', 'red'],
'bgr': ['blue', 'green', 'red'],
'bgyr': ['blue', 'green', 'yellow', 'red'],
'bgyor': ['blue', 'green', 'yellow', 'orange', 'red']
}
SCAN_LOCATION = {
'v': 'violet_scan_values',
'b': 'blue_scan_values',
'g': 'green_scan_values',
'y': 'yellow_scan_values',
'o': 'orange_scan_values',
'r': 'red_scan_values'
}
TILE_DIST_IN = 12
TILE_DIST_M = 0.3048
WALL_DIST_IN = 12*TILE_DIST_IN
WALL_DIST_M = 12*TILE_DIST_M
| true | true |
1c32b7b845744e864bc1c3f8c00f02adb11330da | 53 | py | Python | inac8hr/imports.py | th-bunratta/8hr.insomniac | 5173500a1ad7197096d513b38258aa65b035fcf3 | [
"BSD-3-Clause"
] | null | null | null | inac8hr/imports.py | th-bunratta/8hr.insomniac | 5173500a1ad7197096d513b38258aa65b035fcf3 | [
"BSD-3-Clause"
] | null | null | null | inac8hr/imports.py | th-bunratta/8hr.insomniac | 5173500a1ad7197096d513b38258aa65b035fcf3 | [
"BSD-3-Clause"
] | null | null | null | from inac8hr.wrappers.inac8hr_arcade.sprite import *
| 26.5 | 52 | 0.849057 | from inac8hr.wrappers.inac8hr_arcade.sprite import *
| true | true |
1c32b8911fad87127d03edfe8e86d02ba78a3fe0 | 9,218 | py | Python | aries_cloudagent/config/argparse.py | Patrik-Stas/aries-cloudagent-python | 28f4b59b11db72568084070e4e6c1576c298f03d | [
"Apache-2.0"
] | null | null | null | aries_cloudagent/config/argparse.py | Patrik-Stas/aries-cloudagent-python | 28f4b59b11db72568084070e4e6c1576c298f03d | [
"Apache-2.0"
] | null | null | null | aries_cloudagent/config/argparse.py | Patrik-Stas/aries-cloudagent-python | 28f4b59b11db72568084070e4e6c1576c298f03d | [
"Apache-2.0"
] | null | null | null | """Command line option parsing."""
import os
import argparse
from typing import Sequence
PARSER = argparse.ArgumentParser(description="Runs an Aries Cloud Agent.")
PARSER.add_argument(
"-it",
"--inbound-transport",
dest="inbound_transports",
type=str,
action="append",
nargs=3,
required=True,
metavar=("<module>", "<host>", "<port>"),
help="Choose which interface(s) to listen on",
)
PARSER.add_argument(
"-ot",
"--outbound-transport",
dest="outbound_transports",
type=str,
action="append",
required=True,
metavar="<module>",
help="Choose which outbound transport handlers to register",
)
PARSER.add_argument(
"--log-config",
dest="log_config",
type=str,
metavar="<path-to-config>",
default=None,
help="Specifies a custom logging configuration file",
)
PARSER.add_argument(
"--log-level",
dest="log_level",
type=str,
metavar="<log-level>",
default=None,
help="Specifies a custom logging level "
+ "(debug, info, warning, error, critical)",
)
PARSER.add_argument(
"-e",
"--endpoint",
type=str,
metavar="<endpoint>",
help="Specify the default endpoint to use when "
+ "creating connection invitations and requests",
)
PARSER.add_argument(
"-l",
"--label",
type=str,
metavar="<label>",
help="Specify the default label to use when creating"
+ " connection invitations and requests",
)
PARSER.add_argument(
"--seed",
type=str,
metavar="<wallet-seed>",
help="Seed to use when creating the public DID",
)
PARSER.add_argument(
"--storage-type",
type=str,
metavar="<storage-type>",
help="Specify the storage implementation to use",
)
PARSER.add_argument(
"--wallet-key",
type=str,
metavar="<wallet-key>",
help="Specify the master key value to use when opening the wallet",
)
PARSER.add_argument(
"--wallet-name", type=str, metavar="<wallet-name>", help="Specify the wallet name"
)
PARSER.add_argument(
"--wallet-type",
type=str,
metavar="<wallet-type>",
help="Specify the wallet implementation to use",
)
PARSER.add_argument(
"--wallet-storage-type",
type=str,
metavar="<storage-type>",
help="Specify the wallet storage implementation to use",
)
PARSER.add_argument(
"--wallet-storage-config",
type=str,
metavar="<storage-config>",
help="Specify the storage configuration to use (required for postgres) "
+ 'e.g., \'{"url":"localhost:5432"}\'',
)
PARSER.add_argument(
"--wallet-storage-creds",
type=str,
metavar="<storage-creds>",
help="Specify the storage credentials to use (required for postgres) "
+ 'e.g., \'{"account":"postgres","password":"mysecretpassword",'
+ '"admin_account":"postgres","admin_password":"mysecretpassword"}\'',
)
PARSER.add_argument(
"--pool-name", type=str, metavar="<pool-name>", help="Specify the pool name"
)
PARSER.add_argument(
"--genesis-transactions",
type=str,
dest="genesis_transactions",
metavar="<genesis-transactions>",
help="Specify the genesis transactions as a string",
)
PARSER.add_argument(
"--genesis-url",
type=str,
dest="genesis_url",
metavar="<genesis-url>",
help="Specify a url from which to fetch the genesis transactions",
)
PARSER.add_argument(
"--admin",
type=str,
nargs=2,
metavar=("<host>", "<port>"),
help="Enable the administration API on a given host and port",
)
PARSER.add_argument("--debug", action="store_true", help="Enable debugging features")
PARSER.add_argument(
"--debug-seed",
dest="debug_seed",
type=str,
metavar="<debug-did-seed>",
help="Specify the debug seed to use",
)
PARSER.add_argument(
"--debug-connections",
action="store_true",
help="Enable additional logging around connections",
)
PARSER.add_argument(
"--accept-invites", action="store_true", help="Auto-accept connection invitations"
)
PARSER.add_argument(
"--accept-requests", action="store_true", help="Auto-accept connection requests"
)
PARSER.add_argument(
"--auto-ping-connection",
action="store_true",
help="Automatically send a trust ping when a connection response is accepted",
)
PARSER.add_argument(
"--auto-respond-messages",
action="store_true",
help="Auto-respond to basic messages",
)
PARSER.add_argument(
"--auto-respond-credential-offer",
action="store_true",
help="Auto-respond to credential offers with credential request",
)
PARSER.add_argument(
"--auto-respond-presentation-request",
action="store_true",
help="Auto-respond to presentation requests with a presentation "
+ "if exactly one credential exists to satisfy the request",
)
PARSER.add_argument(
"--auto-verify-presentation",
action="store_true",
help="Automatically verify a presentation when it is received",
)
PARSER.add_argument(
"--no-receive-invites",
action="store_true",
help="Disable the receive invitations administration function",
)
PARSER.add_argument(
"--help-link",
type=str,
metavar="<help-url>",
help="Define the help URL for the administration interface",
)
PARSER.add_argument(
"--invite",
action="store_true",
help="Generate and print a new connection invitation URL",
)
PARSER.add_argument(
"--send-invite",
type=str,
metavar="<agent-endpoint>",
help="Specify an endpoint to send an invitation to",
)
PARSER.add_argument(
"--timing",
action="store_true",
help="Including timing information in response messages",
)
PARSER.add_argument(
"--protocol",
dest="external_protocols",
type=str,
action="append",
required=False,
metavar="<module>",
help="Provide external protocol modules",
)
PARSER.add_argument(
"--webhook-url",
action="append",
metavar="<url>",
help="Send webhooks to a given URL",
)
def parse_args(args: Sequence[str] = None):
"""Parse command line arguments and return the collection."""
return PARSER.parse_args(args)
def get_settings(args):
"""Convert command line arguments to a settings dictionary."""
settings = {}
if args.log_config:
settings["log.config"] = args.log_config
if args.log_level:
settings["log.level"] = args.log_level
settings["transport.inbound_configs"] = args.inbound_transports
settings["transport.outbound_configs"] = args.outbound_transports
if args.endpoint:
settings["default_endpoint"] = args.endpoint
if args.label:
settings["default_label"] = args.label
if args.genesis_url:
settings["ledger.genesis_url"] = args.genesis_url
elif args.genesis_transactions:
settings["ledger.genesis_transactions"] = args.genesis_transactions
if args.storage_type:
settings["storage.type"] = args.storage_type
if args.seed:
settings["wallet.seed"] = args.seed
if args.wallet_key:
settings["wallet.key"] = args.wallet_key
if args.wallet_name:
settings["wallet.name"] = args.wallet_name
if args.wallet_storage_type:
settings["wallet.storage_type"] = args.wallet_storage_type
if args.wallet_type:
settings["wallet.type"] = args.wallet_type
if args.wallet_storage_config:
settings["wallet.storage_config"] = args.wallet_storage_config
if args.wallet_storage_creds:
settings["wallet.storage_creds"] = args.wallet_storage_creds
if args.admin:
settings["admin.enabled"] = True
settings["admin.host"] = args.admin[0]
settings["admin.port"] = args.admin[1]
if args.help_link:
settings["admin.help_link"] = args.help_link
if args.no_receive_invites:
settings["admin.no_receive_invites"] = True
hook_urls = list(args.webhook_url) if args.webhook_url else []
hook_url = os.environ.get("WEBHOOK_URL")
if hook_url:
hook_urls.append(hook_url)
settings["admin.webhook_urls"] = hook_urls
if args.debug:
settings["debug.enabled"] = True
if args.debug_connections:
settings["debug.connections"] = True
if args.debug_seed:
settings["debug.seed"] = args.debug_seed
if args.invite:
settings["debug.print_invitation"] = True
if args.send_invite:
settings["debug.send_invitation_to"] = args.send_invite
if args.auto_respond_credential_offer:
settings["auto_respond_credential_offer"] = True
if args.auto_respond_presentation_request:
settings["auto_respond_presentation_request"] = True
if args.auto_verify_presentation:
settings["auto_verify_presentation"] = True
if args.accept_invites:
settings["accept_invites"] = True
if args.accept_requests:
settings["accept_requests"] = True
if args.auto_ping_connection:
settings["auto_ping_connection"] = True
if args.auto_respond_messages:
settings["debug.auto_respond_messages"] = True
if args.timing:
settings["timing.enabled"] = True
if args.external_protocols:
settings["external_protocols"] = args.external_protocols
return settings
| 26.039548 | 86 | 0.669993 |
import os
import argparse
from typing import Sequence
PARSER = argparse.ArgumentParser(description="Runs an Aries Cloud Agent.")
PARSER.add_argument(
"-it",
"--inbound-transport",
dest="inbound_transports",
type=str,
action="append",
nargs=3,
required=True,
metavar=("<module>", "<host>", "<port>"),
help="Choose which interface(s) to listen on",
)
PARSER.add_argument(
"-ot",
"--outbound-transport",
dest="outbound_transports",
type=str,
action="append",
required=True,
metavar="<module>",
help="Choose which outbound transport handlers to register",
)
PARSER.add_argument(
"--log-config",
dest="log_config",
type=str,
metavar="<path-to-config>",
default=None,
help="Specifies a custom logging configuration file",
)
PARSER.add_argument(
"--log-level",
dest="log_level",
type=str,
metavar="<log-level>",
default=None,
help="Specifies a custom logging level "
+ "(debug, info, warning, error, critical)",
)
PARSER.add_argument(
"-e",
"--endpoint",
type=str,
metavar="<endpoint>",
help="Specify the default endpoint to use when "
+ "creating connection invitations and requests",
)
PARSER.add_argument(
"-l",
"--label",
type=str,
metavar="<label>",
help="Specify the default label to use when creating"
+ " connection invitations and requests",
)
PARSER.add_argument(
"--seed",
type=str,
metavar="<wallet-seed>",
help="Seed to use when creating the public DID",
)
PARSER.add_argument(
"--storage-type",
type=str,
metavar="<storage-type>",
help="Specify the storage implementation to use",
)
PARSER.add_argument(
"--wallet-key",
type=str,
metavar="<wallet-key>",
help="Specify the master key value to use when opening the wallet",
)
PARSER.add_argument(
"--wallet-name", type=str, metavar="<wallet-name>", help="Specify the wallet name"
)
PARSER.add_argument(
"--wallet-type",
type=str,
metavar="<wallet-type>",
help="Specify the wallet implementation to use",
)
PARSER.add_argument(
"--wallet-storage-type",
type=str,
metavar="<storage-type>",
help="Specify the wallet storage implementation to use",
)
PARSER.add_argument(
"--wallet-storage-config",
type=str,
metavar="<storage-config>",
help="Specify the storage configuration to use (required for postgres) "
+ 'e.g., \'{"url":"localhost:5432"}\'',
)
PARSER.add_argument(
"--wallet-storage-creds",
type=str,
metavar="<storage-creds>",
help="Specify the storage credentials to use (required for postgres) "
+ 'e.g., \'{"account":"postgres","password":"mysecretpassword",'
+ '"admin_account":"postgres","admin_password":"mysecretpassword"}\'',
)
PARSER.add_argument(
"--pool-name", type=str, metavar="<pool-name>", help="Specify the pool name"
)
PARSER.add_argument(
"--genesis-transactions",
type=str,
dest="genesis_transactions",
metavar="<genesis-transactions>",
help="Specify the genesis transactions as a string",
)
PARSER.add_argument(
"--genesis-url",
type=str,
dest="genesis_url",
metavar="<genesis-url>",
help="Specify a url from which to fetch the genesis transactions",
)
PARSER.add_argument(
"--admin",
type=str,
nargs=2,
metavar=("<host>", "<port>"),
help="Enable the administration API on a given host and port",
)
PARSER.add_argument("--debug", action="store_true", help="Enable debugging features")
PARSER.add_argument(
"--debug-seed",
dest="debug_seed",
type=str,
metavar="<debug-did-seed>",
help="Specify the debug seed to use",
)
PARSER.add_argument(
"--debug-connections",
action="store_true",
help="Enable additional logging around connections",
)
PARSER.add_argument(
"--accept-invites", action="store_true", help="Auto-accept connection invitations"
)
PARSER.add_argument(
"--accept-requests", action="store_true", help="Auto-accept connection requests"
)
PARSER.add_argument(
"--auto-ping-connection",
action="store_true",
help="Automatically send a trust ping when a connection response is accepted",
)
PARSER.add_argument(
"--auto-respond-messages",
action="store_true",
help="Auto-respond to basic messages",
)
PARSER.add_argument(
"--auto-respond-credential-offer",
action="store_true",
help="Auto-respond to credential offers with credential request",
)
PARSER.add_argument(
"--auto-respond-presentation-request",
action="store_true",
help="Auto-respond to presentation requests with a presentation "
+ "if exactly one credential exists to satisfy the request",
)
PARSER.add_argument(
"--auto-verify-presentation",
action="store_true",
help="Automatically verify a presentation when it is received",
)
PARSER.add_argument(
"--no-receive-invites",
action="store_true",
help="Disable the receive invitations administration function",
)
PARSER.add_argument(
"--help-link",
type=str,
metavar="<help-url>",
help="Define the help URL for the administration interface",
)
PARSER.add_argument(
"--invite",
action="store_true",
help="Generate and print a new connection invitation URL",
)
PARSER.add_argument(
"--send-invite",
type=str,
metavar="<agent-endpoint>",
help="Specify an endpoint to send an invitation to",
)
PARSER.add_argument(
"--timing",
action="store_true",
help="Including timing information in response messages",
)
PARSER.add_argument(
"--protocol",
dest="external_protocols",
type=str,
action="append",
required=False,
metavar="<module>",
help="Provide external protocol modules",
)
PARSER.add_argument(
"--webhook-url",
action="append",
metavar="<url>",
help="Send webhooks to a given URL",
)
def parse_args(args: Sequence[str] = None):
return PARSER.parse_args(args)
def get_settings(args):
settings = {}
if args.log_config:
settings["log.config"] = args.log_config
if args.log_level:
settings["log.level"] = args.log_level
settings["transport.inbound_configs"] = args.inbound_transports
settings["transport.outbound_configs"] = args.outbound_transports
if args.endpoint:
settings["default_endpoint"] = args.endpoint
if args.label:
settings["default_label"] = args.label
if args.genesis_url:
settings["ledger.genesis_url"] = args.genesis_url
elif args.genesis_transactions:
settings["ledger.genesis_transactions"] = args.genesis_transactions
if args.storage_type:
settings["storage.type"] = args.storage_type
if args.seed:
settings["wallet.seed"] = args.seed
if args.wallet_key:
settings["wallet.key"] = args.wallet_key
if args.wallet_name:
settings["wallet.name"] = args.wallet_name
if args.wallet_storage_type:
settings["wallet.storage_type"] = args.wallet_storage_type
if args.wallet_type:
settings["wallet.type"] = args.wallet_type
if args.wallet_storage_config:
settings["wallet.storage_config"] = args.wallet_storage_config
if args.wallet_storage_creds:
settings["wallet.storage_creds"] = args.wallet_storage_creds
if args.admin:
settings["admin.enabled"] = True
settings["admin.host"] = args.admin[0]
settings["admin.port"] = args.admin[1]
if args.help_link:
settings["admin.help_link"] = args.help_link
if args.no_receive_invites:
settings["admin.no_receive_invites"] = True
hook_urls = list(args.webhook_url) if args.webhook_url else []
hook_url = os.environ.get("WEBHOOK_URL")
if hook_url:
hook_urls.append(hook_url)
settings["admin.webhook_urls"] = hook_urls
if args.debug:
settings["debug.enabled"] = True
if args.debug_connections:
settings["debug.connections"] = True
if args.debug_seed:
settings["debug.seed"] = args.debug_seed
if args.invite:
settings["debug.print_invitation"] = True
if args.send_invite:
settings["debug.send_invitation_to"] = args.send_invite
if args.auto_respond_credential_offer:
settings["auto_respond_credential_offer"] = True
if args.auto_respond_presentation_request:
settings["auto_respond_presentation_request"] = True
if args.auto_verify_presentation:
settings["auto_verify_presentation"] = True
if args.accept_invites:
settings["accept_invites"] = True
if args.accept_requests:
settings["accept_requests"] = True
if args.auto_ping_connection:
settings["auto_ping_connection"] = True
if args.auto_respond_messages:
settings["debug.auto_respond_messages"] = True
if args.timing:
settings["timing.enabled"] = True
if args.external_protocols:
settings["external_protocols"] = args.external_protocols
return settings
| true | true |
1c32b8a716fff04e983de872a823ae7a7301c97c | 198 | py | Python | molo/surveys/admin_urls.py | praekelt/molo.surveys | c86d231f7cee669eb1c91db49ec05cf711984e30 | [
"BSD-3-Clause"
] | null | null | null | molo/surveys/admin_urls.py | praekelt/molo.surveys | c86d231f7cee669eb1c91db49ec05cf711984e30 | [
"BSD-3-Clause"
] | 88 | 2016-06-14T18:36:18.000Z | 2018-09-21T07:33:58.000Z | molo/surveys/admin_urls.py | praekeltfoundation/molo.surveys | c86d231f7cee669eb1c91db49ec05cf711984e30 | [
"BSD-3-Clause"
] | 1 | 2017-10-02T09:27:45.000Z | 2017-10-02T09:27:45.000Z | from django.conf.urls import url
from molo.surveys.views import index
urlpatterns = [
# re-route to overwritten index view, originally in wagtailsurveys
url(r'^$', index, name='index'),
]
| 22 | 70 | 0.717172 | from django.conf.urls import url
from molo.surveys.views import index
urlpatterns = [
url(r'^$', index, name='index'),
]
| true | true |
1c32b909397498ff4375230c5fafdb28c659683a | 2,030 | py | Python | source/services/giphy.py | skonik/vk-cat-bot | 8002e6974da1f2dc2dcb81b51cfefb75879f2d24 | [
"MIT"
] | null | null | null | source/services/giphy.py | skonik/vk-cat-bot | 8002e6974da1f2dc2dcb81b51cfefb75879f2d24 | [
"MIT"
] | 1 | 2021-06-02T00:16:03.000Z | 2021-06-02T00:16:03.000Z | source/services/giphy.py | skonik/vk-cat-bot | 8002e6974da1f2dc2dcb81b51cfefb75879f2d24 | [
"MIT"
] | null | null | null | import io
import re
import json
import aiohttp
class GiphyMethodsMeta(type):
GIPHY_API_METHODS_URL = 'https://api.giphy.com/v1/'
allowed_methods = re.compile(r'gifs')
def __new__(cls, name, bases, dct):
for attr_name, attr_value in dct.items():
if cls.allowed_methods.match(attr_name):
dct[attr_name] = f'{cls.GIPHY_API_METHODS_URL}{attr_value}'
return super().__new__(cls, name, bases, dct)
class GiphyMethods(metaclass=GiphyMethodsMeta):
gifs_random = 'gifs/random'
gifs_translate = 'gifs/translate'
class Giphy:
def __init__(self, token):
self.__token = token
self.basic_params = {
'api_key': token
}
async def get_random_gif(self, tag=None):
query_params = self.basic_params.copy()
if tag:
query_params.update({'tag': tag.strip()})
async with aiohttp.ClientSession() as session:
async with session.get(GiphyMethods.gifs_random, params=query_params) as response:
body = await response.text()
response_json = json.loads(body)
gif_url = response_json['data']['images']['downsized_medium']['url']
async with session.get(gif_url) as resp:
gif_image = io.BytesIO(await resp.read())
return gif_image
async def get_translate(self, string):
query_params = self.basic_params.copy()
if not string:
string = 'empty'
query_params.update({'s': string.strip()})
async with aiohttp.ClientSession() as session:
async with session.get(GiphyMethods.gifs_translate, params=query_params) as response:
body = await response.text()
response_json = json.loads(body)
gif_url = response_json['data']['images']['downsized_medium']['url']
async with session.get(gif_url) as resp:
gif_image = io.BytesIO(await resp.read())
return gif_image
| 32.741935 | 97 | 0.614778 | import io
import re
import json
import aiohttp
class GiphyMethodsMeta(type):
GIPHY_API_METHODS_URL = 'https://api.giphy.com/v1/'
allowed_methods = re.compile(r'gifs')
def __new__(cls, name, bases, dct):
for attr_name, attr_value in dct.items():
if cls.allowed_methods.match(attr_name):
dct[attr_name] = f'{cls.GIPHY_API_METHODS_URL}{attr_value}'
return super().__new__(cls, name, bases, dct)
class GiphyMethods(metaclass=GiphyMethodsMeta):
gifs_random = 'gifs/random'
gifs_translate = 'gifs/translate'
class Giphy:
def __init__(self, token):
self.__token = token
self.basic_params = {
'api_key': token
}
async def get_random_gif(self, tag=None):
query_params = self.basic_params.copy()
if tag:
query_params.update({'tag': tag.strip()})
async with aiohttp.ClientSession() as session:
async with session.get(GiphyMethods.gifs_random, params=query_params) as response:
body = await response.text()
response_json = json.loads(body)
gif_url = response_json['data']['images']['downsized_medium']['url']
async with session.get(gif_url) as resp:
gif_image = io.BytesIO(await resp.read())
return gif_image
async def get_translate(self, string):
query_params = self.basic_params.copy()
if not string:
string = 'empty'
query_params.update({'s': string.strip()})
async with aiohttp.ClientSession() as session:
async with session.get(GiphyMethods.gifs_translate, params=query_params) as response:
body = await response.text()
response_json = json.loads(body)
gif_url = response_json['data']['images']['downsized_medium']['url']
async with session.get(gif_url) as resp:
gif_image = io.BytesIO(await resp.read())
return gif_image
| true | true |
1c32b9172303fb54d84a0f56fa397803ccc87d99 | 238 | py | Python | Mundo 1-Fundamentos/PythonExercicios/ex023.py | JotaPeGabriel/curso-em-video-python3 | 141a9bec8acd7513c10768494fee5c86282dfa75 | [
"MIT"
] | null | null | null | Mundo 1-Fundamentos/PythonExercicios/ex023.py | JotaPeGabriel/curso-em-video-python3 | 141a9bec8acd7513c10768494fee5c86282dfa75 | [
"MIT"
] | null | null | null | Mundo 1-Fundamentos/PythonExercicios/ex023.py | JotaPeGabriel/curso-em-video-python3 | 141a9bec8acd7513c10768494fee5c86282dfa75 | [
"MIT"
] | null | null | null | num = int(input("Digite um numero de 0 a 9999: "))
uni = num // 1 % 10
dez = num // 10 % 10
cen = num // 100 % 10
mil = num // 1000 % 10
print(f'''O numero digitado foi: {num}
Unidade: {uni}
dezena: {dez}
Centena: {cen}
Milhar: {mil}''')
| 21.636364 | 50 | 0.588235 | num = int(input("Digite um numero de 0 a 9999: "))
uni = num // 1 % 10
dez = num // 10 % 10
cen = num // 100 % 10
mil = num // 1000 % 10
print(f'''O numero digitado foi: {num}
Unidade: {uni}
dezena: {dez}
Centena: {cen}
Milhar: {mil}''')
| true | true |
1c32ba4303f378b2390b94e61729cf0bc77089a1 | 2,058 | py | Python | scripts/mask-alignment.py | albernsrya/spheres-augur-build | fdc7866aa73c07508e2e4811f5b74b385736ad79 | [
"Apache-2.0"
] | 1 | 2021-07-03T14:05:19.000Z | 2021-07-03T14:05:19.000Z | scripts/mask-alignment.py | albernsrya/spheres-augur-build | fdc7866aa73c07508e2e4811f5b74b385736ad79 | [
"Apache-2.0"
] | null | null | null | scripts/mask-alignment.py | albernsrya/spheres-augur-build | fdc7866aa73c07508e2e4811f5b74b385736ad79 | [
"Apache-2.0"
] | null | null | null | """
Mask initial bases from alignment FASTA
"""
import argparse
import Bio
import Bio.SeqIO
from Bio.Seq import Seq
def mask_terminal_gaps(seq):
L = len(seq)
seq_trimmed = seq.lstrip('-')
left_gaps = L - len(seq_trimmed)
seq_trimmed = seq_trimmed.rstrip('-')
right_gaps = L - len(seq_trimmed) - left_gaps
return "N"*left_gaps + seq_trimmed + "N"*right_gaps
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Mask initial bases from alignment FASTA",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--alignment", required=True, help="FASTA file of alignment")
parser.add_argument("--mask-terminal-gaps", action='store_true', help="fill all terminal gaps with N as they likely represent missing data")
parser.add_argument("--mask-from-beginning", type = int, required=True, help="number of bases to mask from start")
parser.add_argument("--mask-from-end", type = int, help="number of bases to mask from end")
parser.add_argument("--mask-sites", nargs='+', type = int, help="list of sites to mask")
parser.add_argument("--output", required=True, help="FASTA file of output alignment")
args = parser.parse_args()
begin_length = 0
if args.mask_from_beginning:
begin_length = args.mask_from_beginning
end_length = 0
if args.mask_from_end:
end_length = args.mask_from_end
with open(args.output, 'w') as outfile:
for record in Bio.SeqIO.parse(args.alignment, 'fasta'):
seq = str(record.seq)
if args.mask_terminal_gaps:
seq = mask_terminal_gaps(seq)
start = "N" * begin_length
middle = seq[begin_length:-end_length]
end = "N" * end_length
seq_list = list(start + middle + end)
if args.mask_sites:
for site in args.mask_sites:
seq_list[site-1] = "N"
record.seq = Seq("".join(seq_list))
Bio.SeqIO.write(record, outfile, 'fasta')
| 38.830189 | 144 | 0.650632 | import argparse
import Bio
import Bio.SeqIO
from Bio.Seq import Seq
def mask_terminal_gaps(seq):
L = len(seq)
seq_trimmed = seq.lstrip('-')
left_gaps = L - len(seq_trimmed)
seq_trimmed = seq_trimmed.rstrip('-')
right_gaps = L - len(seq_trimmed) - left_gaps
return "N"*left_gaps + seq_trimmed + "N"*right_gaps
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Mask initial bases from alignment FASTA",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("--alignment", required=True, help="FASTA file of alignment")
parser.add_argument("--mask-terminal-gaps", action='store_true', help="fill all terminal gaps with N as they likely represent missing data")
parser.add_argument("--mask-from-beginning", type = int, required=True, help="number of bases to mask from start")
parser.add_argument("--mask-from-end", type = int, help="number of bases to mask from end")
parser.add_argument("--mask-sites", nargs='+', type = int, help="list of sites to mask")
parser.add_argument("--output", required=True, help="FASTA file of output alignment")
args = parser.parse_args()
begin_length = 0
if args.mask_from_beginning:
begin_length = args.mask_from_beginning
end_length = 0
if args.mask_from_end:
end_length = args.mask_from_end
with open(args.output, 'w') as outfile:
for record in Bio.SeqIO.parse(args.alignment, 'fasta'):
seq = str(record.seq)
if args.mask_terminal_gaps:
seq = mask_terminal_gaps(seq)
start = "N" * begin_length
middle = seq[begin_length:-end_length]
end = "N" * end_length
seq_list = list(start + middle + end)
if args.mask_sites:
for site in args.mask_sites:
seq_list[site-1] = "N"
record.seq = Seq("".join(seq_list))
Bio.SeqIO.write(record, outfile, 'fasta')
| true | true |
1c32ba78dc18ff3d51a570448d94b7e738fa37ea | 358 | py | Python | Rotating donut.py | Mario263/Hacktoberfest_2021 | 57965f48d3b19d25d2c0b75525eab4c4dce0157a | [
"MIT"
] | 16 | 2021-10-15T08:41:52.000Z | 2022-01-02T11:14:30.000Z | Rotating donut.py | Mario263/Hacktoberfest_2021 | 57965f48d3b19d25d2c0b75525eab4c4dce0157a | [
"MIT"
] | 5 | 2021-10-17T06:04:41.000Z | 2021-10-30T16:45:40.000Z | Rotating donut.py | Mario263/Hacktoberfest_2021 | 57965f48d3b19d25d2c0b75525eab4c4dce0157a | [
"MIT"
] | 43 | 2021-10-15T14:03:48.000Z | 2022-03-09T21:32:46.000Z | from vpython import *
canvas(background= color.purple)
donut = ring(radius=0.5, thickness=0.25, color=vector(400, 100, 1))
chocolate = ring(radius=0.55, thickness=0.25, color=vector(0.4, 0.2, 0))
rad = 0
while True:
rate(10)
donut.pos = vector(3*cos(rad), sin(rad), 0)
chocolate.pos = vector(3*cos(rad), sin(rad), 0)
rad = rad + 0.03 | 35.8 | 73 | 0.639665 | from vpython import *
canvas(background= color.purple)
donut = ring(radius=0.5, thickness=0.25, color=vector(400, 100, 1))
chocolate = ring(radius=0.55, thickness=0.25, color=vector(0.4, 0.2, 0))
rad = 0
while True:
rate(10)
donut.pos = vector(3*cos(rad), sin(rad), 0)
chocolate.pos = vector(3*cos(rad), sin(rad), 0)
rad = rad + 0.03 | true | true |
1c32bb2bde13ef8d4136b6393918978e0551a9c2 | 277 | py | Python | fdap/utils/loggeradapter.py | miniyus/AutomaticPosting-Python | 0627066e60d5cd474ed858e1567bcf6b5b1d9336 | [
"MIT"
] | null | null | null | fdap/utils/loggeradapter.py | miniyus/AutomaticPosting-Python | 0627066e60d5cd474ed858e1567bcf6b5b1d9336 | [
"MIT"
] | 1 | 2021-11-06T02:30:37.000Z | 2021-11-06T02:30:37.000Z | fdap/utils/loggeradapter.py | miniyus/AutomaticPosting-Python | 0627066e60d5cd474ed858e1567bcf6b5b1d9336 | [
"MIT"
] | null | null | null | import logging
class LoggerAdapter(logging.LoggerAdapter):
def __init__(self, prefix, logger):
super(LoggerAdapter, self).__init__(logger, {})
self.prefix = prefix
def process(self, msg, kwargs):
return '[%s] %s' % (self.prefix, msg), kwargs
| 25.181818 | 55 | 0.649819 | import logging
class LoggerAdapter(logging.LoggerAdapter):
def __init__(self, prefix, logger):
super(LoggerAdapter, self).__init__(logger, {})
self.prefix = prefix
def process(self, msg, kwargs):
return '[%s] %s' % (self.prefix, msg), kwargs
| true | true |
1c32bb5567a439d03bdb2f1993bc61fd4e88448f | 1,128 | py | Python | oneflow/python/framework/generator.py | wanghongsheng01/oneflow_cambricon | 187faaa2cb9ba995080ba22499b6219c2d36f0ac | [
"Apache-2.0"
] | null | null | null | oneflow/python/framework/generator.py | wanghongsheng01/oneflow_cambricon | 187faaa2cb9ba995080ba22499b6219c2d36f0ac | [
"Apache-2.0"
] | null | null | null | oneflow/python/framework/generator.py | wanghongsheng01/oneflow_cambricon | 187faaa2cb9ba995080ba22499b6219c2d36f0ac | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import oneflow
import oneflow._oneflow_internal
from oneflow.python.oneflow_export import oneflow_export
@oneflow_export("Generator")
def MakeGenerator(device=None, seed=None):
if device is None:
device = "auto"
if seed is None:
return oneflow._oneflow_internal.create_generator(device)
else:
return oneflow._oneflow_internal.create_generator(device, seed)
@oneflow_export("manual_seed")
def manual_seed(seed):
oneflow._oneflow_internal.manual_seed(seed)
| 32.228571 | 72 | 0.779255 | from __future__ import absolute_import
import oneflow
import oneflow._oneflow_internal
from oneflow.python.oneflow_export import oneflow_export
@oneflow_export("Generator")
def MakeGenerator(device=None, seed=None):
if device is None:
device = "auto"
if seed is None:
return oneflow._oneflow_internal.create_generator(device)
else:
return oneflow._oneflow_internal.create_generator(device, seed)
@oneflow_export("manual_seed")
def manual_seed(seed):
oneflow._oneflow_internal.manual_seed(seed)
| true | true |
1c32bc14a129f0ae18754f98f17e46ae45610067 | 12,868 | py | Python | models/lights.py | elena-chiculita/PieShine | 288f526902906e2dbf6463c214edaa1c608302dc | [
"MIT"
] | null | null | null | models/lights.py | elena-chiculita/PieShine | 288f526902906e2dbf6463c214edaa1c608302dc | [
"MIT"
] | 1 | 2016-12-10T04:06:17.000Z | 2016-12-10T22:46:04.000Z | models/lights.py | elena-chiculita/PieShine | 288f526902906e2dbf6463c214edaa1c608302dc | [
"MIT"
] | null | null | null | import time
from models.utils.comms import Comms
from models.utils.userobj import UserObj
from models.utils.callableobj import CallableObj
from models.utils.testobj import TestObj
from models.utils.color import Gamut
REFRESH_TIMEOUT = 5
# Associate a model id with a gamut.
ModelsGamut = {
'LCT001': 'B',
'LCT002': 'B',
'LCT003': 'B',
'LCT007': 'B',
'LCT010': 'C',
'LCT011': 'C',
'LCT014': 'C',
'LLC006': 'A',
'LLC007': 'A',
'LLC010': 'A',
'LLC011': 'A',
'LLC012': 'A',
'LLC013': 'A',
'LLC020': 'C',
'LMM001': 'B',
'LST001': 'A',
'LST002': 'C'
}
# coordinates for each gamut
gamut_A_coords = {
'red': {'x': 0.704, 'y': 0.296, 'hue': 0},
'green': {'x': 0.2151, 'y': 0.7106, 'hue': 100},
'blue': {'x': 0.138, 'y': 0.08, 'hue': 184},
'name': 'A'
}
gamut_B_coords = {
'red': {'x': 0.675, 'y': 0.322, 'hue': 0},
'green': {'x': 0.409, 'y': 0.518, 'hue': 100},
'blue': {'x': 0.167, 'y': 0.04, 'hue': 184},
'name': 'B'
}
gamut_C_coords = {
'red': {'x': 0.692, 'y': 0.308, 'hue': 0},
'green': {'x': 0.17, 'y': 0.7, 'hue': 100},
'blue': {'x': 0.153, 'y': 0.048, 'hue': 184},
'name': 'C'
}
# instantiate each gamut: A, B and C
gamutA = Gamut(gamut_A_coords['name'],
gamut_A_coords['red']['x'],
gamut_A_coords['red']['y'],
gamut_A_coords['green']['x'],
gamut_A_coords['green']['y'],
gamut_A_coords['blue']['x'],
gamut_A_coords['blue']['y'])
gamutB = Gamut(gamut_B_coords['name'],
gamut_B_coords['red']['x'],
gamut_B_coords['red']['y'],
gamut_B_coords['green']['x'],
gamut_B_coords['green']['y'],
gamut_B_coords['blue']['x'],
gamut_B_coords['blue']['y'])
gamutC = Gamut(gamut_C_coords['name'],
gamut_C_coords['red']['x'],
gamut_C_coords['red']['y'],
gamut_C_coords['green']['x'],
gamut_C_coords['green']['y'],
gamut_C_coords['blue']['x'],
gamut_C_coords['blue']['y'])
def get_gamut_by_name(name):
"""
Get the gamut object corresponding to gamut 'name'
:param name: The Gamut name.
:return: The gamut object (gamutA, gamutB, gamutC) corresponding to the given gamut name ('A', 'B' or 'C') or None.
"""
if name == 'A':
return gamutA
elif name == 'B':
return gamutB
elif name == 'C':
return gamutC
else:
return None
class Light(TestObj):
"""
Model of a Philips hue light.
Properties:
name - Unique name of the light.
on - True/False if the light is on or off.
bri - Brightness of the light (minimum = 1, maximum = 255)
alert - One of the following value:
'none' - Stop performing an alert effect.
'select' - Make the light blink once.
'lselect' - Make the light blink for 15 seconds or until an alert 'none' is received.
reachable - True/False if the light bulb is reachable or not
type - white ('Dimmable light') or colored ('Color light' or 'Extended color light')
model_id - Model id of the light (see ModelsGamut for model-gamut association for colored lights)
manufacturer_name - Manufacturer name.
unique_id - MAC address of the light.
sw_version - Software version running on the light.
"""
def __init__(self, comms, id):
assert (isinstance(comms, Comms))
self._comms = comms
self.id = id
self.__data = self._refresh()
self.refresh_time = time.time()
# for colored light bulbs identify the gamut according to its model id
if self.model_id in ModelsGamut:
self.gamut = get_gamut_by_name(ModelsGamut[self.model_id])
else:
self.gamut = None
def __repr__(self):
return '(' + self.id + ') * ' + self.name + ' * ' + ('On' if self.on else 'Off') + ' * bri = ' + str(self.bri)
@property
def _data(self, refresh=False):
# perform GET at minimum 5s
if refresh or (time.time() - self.refresh_time >= REFRESH_TIMEOUT):
self.__data = self._refresh()
self.refresh_time = time.time()
return self.__data
@property
def name(self):
return self._data['name']
@property
def on(self):
return self._data['state']['on']
@property
def bri(self):
return self._data['state']['bri']
@property
def alert(self):
return self._data['state']['alert']
@property
def reachable(self):
return self._data['state']['reachable']
@property
def type(self):
return self._data['type']
@property
def model_id(self):
return self._data['modelid']
@property
def manufacturer_name(self):
return self._data['manufacturername']
@property
def unique_id(self):
return self._data['uniqueid']
@property
def sw_version(self):
return self._data['swversion']
@classmethod
def _scan(cls, comms, color=None):
"""
Scan for a given light type.
:param comms: Comms instance to communicate with the bridge
:param color: Type of light bulb:
DimmableLight = white light
ColorLight = colored light
ExtendedColorLight = same as ColorLight, but color temperature can also be set)
:return: List of lights filtered after color parameter, or all lights if color is None.
"""
light_ids = []
d = comms.get('lights/')
[light_ids.append(key) for key, value in d.items() if (color is None) or (value['type'] == color)]
return [cls(comms, light_id) for light_id in light_ids]
def _adapt_name(self):
return self.name.replace(' ', '')
def _refresh(self):
return self._comms.get('lights/' + str(self.id))
def _force_refresh(self):
self.refresh_time = time.time() - REFRESH_TIMEOUT
def turn_on(self):
self._comms.put('lights/' + str(self.id) + '/state', '{"on":true}')
def turn_off(self):
self._comms.put('lights/' + str(self.id) + '/state', '{"on":false}')
def set_bri(self, bri):
self._comms.put('lights/' + str(self.id) + '/state', '{"bri":' + str(bri) + '}')
def set_alert(self, alert):
self._comms.put('lights/' + str(self.id) + '/state', '{"alert":"' + str(alert) + '"}')
class DimmableLight(Light):
"""
Model of a white Philips hue light (same properties as a Light instance).
"""
@classmethod
def _scan(cls, comms, color='Dimmable light'):
"""
Get a list of all white light objects from the setup.
"""
return super(DimmableLight, cls)._scan(comms, color)
class ColorLight(Light):
"""
Model of a colored Philips hue light.
Contains all properties from Light and also:
hue - Hue of the light (minimum = 0, maximum = 65535, red = 0 or 65535, green = 25500, blue = 46920).
sat - Saturation of the light (minimum = 0 = white, maximum = 254).
effect - One of the following:
'colorloop' - Puts the lights in a color looping mode until it is stopped by sending effect 'none'
'none' - Stop performing 'colorloop' effect
xy - Coordinates in CIE color space.
List of 2 elements, first x and second y, both float between 0 and 1.
If coordinates are outside the light's supported gamut - see ModelsGamut -
these will pe approximated to coordinates inside the said gamut).
colormode - Identify the light color mode:
'hs' - set from hue and sat
'xy' - set from 'xy'
"""
def __repr__(self):
return super(ColorLight, self).__repr__() + ' * Gamut ' + str(self.gamut.name) + ' [x,y] = ' + str(self.xy) + ' * sat = ' + str(
self.sat) + ' * hue = ' + str(self.hue)
@property
def hue(self):
return self._data['state']['hue']
@property
def sat(self):
return self._data['state']['sat']
@property
def effect(self):
return self._data['state']['effect']
@property
def xy(self):
return self._data['state']['xy']
@property
def colormode(self):
return self._data['state']['colormode']
@classmethod
def _scan(cls, comms, color='Color light'):
"""
Get a list of all colored light (except the ones that support color temperature setting) objects from the setup.
"""
return super(ColorLight, cls)._scan(comms, color)
def set_hue(self, hue):
self._comms.put('lights/' + str(self.id) + '/state', '{"hue":' + str(hue) + '}')
def set_sat(self, sat):
self._comms.put('lights/' + str(self.id) + '/state', '{"sat":' + str(sat) + '}')
def set_effect(self, effect):
self._comms.put('lights/' + str(self.id) + '/state', '{"effect":"' + str(effect) + '"}')
def set_xy(self, x, y):
self._comms.put('lights/' + str(self.id) + '/state', '{"xy":[' + str(x) + ',' + str(y) + ']' + '}')
def set_color(self, red, green, blue):
if self.gamut is None:
print('Model id not found. Cannot set color !!!')
return
x, y, bri = self.gamut.get_xy_and_bri_from_rgb(red, green, blue)
self.set_xy(x, y)
self.set_bri(bri)
class ExtendedColorLight(ColorLight):
"""
Model of a colored Philips hue light.
Contains all properties from Light, ColorLight and also:
ct - The Mired Color temperature of the light (minimum = 153 (6500K), maximum = 500 (2000K)).
colormode - Identify the light color mode:
'hs' - set from hue and sat
'xy' - set from 'xy'
'ct' - color temperature
"""
def __repr__(self):
return super(ExtendedColorLight, self).__repr__() + ' * ct = ' + str(self.ct)
@property
def ct(self):
return self._data['state']['ct']
@classmethod
def _scan(cls, comms, color='Extended color light'):
"""
Get a list of all colored light (that support color temperature setting) objects from the setup.
"""
return super(ExtendedColorLight, cls)._scan(comms, color)
def set_ct(self, ct):
self._comms.put('lights/' + str(self.id) + '/state', '{"ct":' + str(ct) + '}')
class Lights(UserObj, TestObj):
"""
Control all lights.
Contains a dictionary of all the light objects in the setup indexed by the light names.
Also you can access each light as a member of this instance.
"""
def __init__(self, comms):
# get a list with each type of light
dimmable_lights = DimmableLight._scan(comms)
color_lights = ColorLight._scan(comms)
extended_color_lights = ExtendedColorLight._scan(comms)
all_lights = dimmable_lights + color_lights + extended_color_lights
# and build the dictionary of lights indexed by light name
self.set_obj({light.name: light for light in all_lights})
# each light name becomes a member of this instance with the proper object associated
[setattr(self, light._adapt_name(), light) for light in all_lights]
# in order to control all lights get all parameters that can be set
method_names = []
for light in all_lights:
# for each light
for method_name in dir(light):
# get all methods
if callable(getattr(light, method_name)) and not method_name.startswith('_'):
# and keep only those which are not private
method_names.append(method_name)
# for each method
for method_name in set(method_names):
methods = []
# for each light in the setup
for light in all_lights:
# if the light supports this setting (i.e. cannot call set_color() for white lights, etc.)
if method_name in dir(light):
# get the method object
methods.append(getattr(light, method_name))
# Each method will be called for each light in the setup.
# If a method does not apply to all lights in the setup,
# it will be called only for those lights that it can be applied to.
# Basically when calling bridge.lights.turn_on() this will execute for all lights in the setup,
# like calling bridge.lights.LivingMain.turn_on(), bridge.lights.Stairs.turn_on(), etc.
setattr(self, method_name, CallableObj(methods))
def __repr__(self):
return "".join(str(light) + '\n' for light in self.values())
| 33.336788 | 136 | 0.578178 | import time
from models.utils.comms import Comms
from models.utils.userobj import UserObj
from models.utils.callableobj import CallableObj
from models.utils.testobj import TestObj
from models.utils.color import Gamut
REFRESH_TIMEOUT = 5
ModelsGamut = {
'LCT001': 'B',
'LCT002': 'B',
'LCT003': 'B',
'LCT007': 'B',
'LCT010': 'C',
'LCT011': 'C',
'LCT014': 'C',
'LLC006': 'A',
'LLC007': 'A',
'LLC010': 'A',
'LLC011': 'A',
'LLC012': 'A',
'LLC013': 'A',
'LLC020': 'C',
'LMM001': 'B',
'LST001': 'A',
'LST002': 'C'
}
gamut_A_coords = {
'red': {'x': 0.704, 'y': 0.296, 'hue': 0},
'green': {'x': 0.2151, 'y': 0.7106, 'hue': 100},
'blue': {'x': 0.138, 'y': 0.08, 'hue': 184},
'name': 'A'
}
gamut_B_coords = {
'red': {'x': 0.675, 'y': 0.322, 'hue': 0},
'green': {'x': 0.409, 'y': 0.518, 'hue': 100},
'blue': {'x': 0.167, 'y': 0.04, 'hue': 184},
'name': 'B'
}
gamut_C_coords = {
'red': {'x': 0.692, 'y': 0.308, 'hue': 0},
'green': {'x': 0.17, 'y': 0.7, 'hue': 100},
'blue': {'x': 0.153, 'y': 0.048, 'hue': 184},
'name': 'C'
}
gamutA = Gamut(gamut_A_coords['name'],
gamut_A_coords['red']['x'],
gamut_A_coords['red']['y'],
gamut_A_coords['green']['x'],
gamut_A_coords['green']['y'],
gamut_A_coords['blue']['x'],
gamut_A_coords['blue']['y'])
gamutB = Gamut(gamut_B_coords['name'],
gamut_B_coords['red']['x'],
gamut_B_coords['red']['y'],
gamut_B_coords['green']['x'],
gamut_B_coords['green']['y'],
gamut_B_coords['blue']['x'],
gamut_B_coords['blue']['y'])
gamutC = Gamut(gamut_C_coords['name'],
gamut_C_coords['red']['x'],
gamut_C_coords['red']['y'],
gamut_C_coords['green']['x'],
gamut_C_coords['green']['y'],
gamut_C_coords['blue']['x'],
gamut_C_coords['blue']['y'])
def get_gamut_by_name(name):
if name == 'A':
return gamutA
elif name == 'B':
return gamutB
elif name == 'C':
return gamutC
else:
return None
class Light(TestObj):
def __init__(self, comms, id):
assert (isinstance(comms, Comms))
self._comms = comms
self.id = id
self.__data = self._refresh()
self.refresh_time = time.time()
if self.model_id in ModelsGamut:
self.gamut = get_gamut_by_name(ModelsGamut[self.model_id])
else:
self.gamut = None
def __repr__(self):
return '(' + self.id + ') * ' + self.name + ' * ' + ('On' if self.on else 'Off') + ' * bri = ' + str(self.bri)
@property
def _data(self, refresh=False):
if refresh or (time.time() - self.refresh_time >= REFRESH_TIMEOUT):
self.__data = self._refresh()
self.refresh_time = time.time()
return self.__data
@property
def name(self):
return self._data['name']
@property
def on(self):
return self._data['state']['on']
@property
def bri(self):
return self._data['state']['bri']
@property
def alert(self):
return self._data['state']['alert']
@property
def reachable(self):
return self._data['state']['reachable']
@property
def type(self):
return self._data['type']
@property
def model_id(self):
return self._data['modelid']
@property
def manufacturer_name(self):
return self._data['manufacturername']
@property
def unique_id(self):
return self._data['uniqueid']
@property
def sw_version(self):
return self._data['swversion']
@classmethod
def _scan(cls, comms, color=None):
light_ids = []
d = comms.get('lights/')
[light_ids.append(key) for key, value in d.items() if (color is None) or (value['type'] == color)]
return [cls(comms, light_id) for light_id in light_ids]
def _adapt_name(self):
return self.name.replace(' ', '')
def _refresh(self):
return self._comms.get('lights/' + str(self.id))
def _force_refresh(self):
self.refresh_time = time.time() - REFRESH_TIMEOUT
def turn_on(self):
self._comms.put('lights/' + str(self.id) + '/state', '{"on":true}')
def turn_off(self):
self._comms.put('lights/' + str(self.id) + '/state', '{"on":false}')
def set_bri(self, bri):
self._comms.put('lights/' + str(self.id) + '/state', '{"bri":' + str(bri) + '}')
def set_alert(self, alert):
self._comms.put('lights/' + str(self.id) + '/state', '{"alert":"' + str(alert) + '"}')
class DimmableLight(Light):
@classmethod
def _scan(cls, comms, color='Dimmable light'):
return super(DimmableLight, cls)._scan(comms, color)
class ColorLight(Light):
def __repr__(self):
return super(ColorLight, self).__repr__() + ' * Gamut ' + str(self.gamut.name) + ' [x,y] = ' + str(self.xy) + ' * sat = ' + str(
self.sat) + ' * hue = ' + str(self.hue)
@property
def hue(self):
return self._data['state']['hue']
@property
def sat(self):
return self._data['state']['sat']
@property
def effect(self):
return self._data['state']['effect']
@property
def xy(self):
return self._data['state']['xy']
@property
def colormode(self):
return self._data['state']['colormode']
@classmethod
def _scan(cls, comms, color='Color light'):
return super(ColorLight, cls)._scan(comms, color)
def set_hue(self, hue):
self._comms.put('lights/' + str(self.id) + '/state', '{"hue":' + str(hue) + '}')
def set_sat(self, sat):
self._comms.put('lights/' + str(self.id) + '/state', '{"sat":' + str(sat) + '}')
def set_effect(self, effect):
self._comms.put('lights/' + str(self.id) + '/state', '{"effect":"' + str(effect) + '"}')
def set_xy(self, x, y):
self._comms.put('lights/' + str(self.id) + '/state', '{"xy":[' + str(x) + ',' + str(y) + ']' + '}')
def set_color(self, red, green, blue):
if self.gamut is None:
print('Model id not found. Cannot set color !!!')
return
x, y, bri = self.gamut.get_xy_and_bri_from_rgb(red, green, blue)
self.set_xy(x, y)
self.set_bri(bri)
class ExtendedColorLight(ColorLight):
def __repr__(self):
return super(ExtendedColorLight, self).__repr__() + ' * ct = ' + str(self.ct)
@property
def ct(self):
return self._data['state']['ct']
@classmethod
def _scan(cls, comms, color='Extended color light'):
return super(ExtendedColorLight, cls)._scan(comms, color)
def set_ct(self, ct):
self._comms.put('lights/' + str(self.id) + '/state', '{"ct":' + str(ct) + '}')
class Lights(UserObj, TestObj):
def __init__(self, comms):
dimmable_lights = DimmableLight._scan(comms)
color_lights = ColorLight._scan(comms)
extended_color_lights = ExtendedColorLight._scan(comms)
all_lights = dimmable_lights + color_lights + extended_color_lights
self.set_obj({light.name: light for light in all_lights})
[setattr(self, light._adapt_name(), light) for light in all_lights]
method_names = []
for light in all_lights:
for method_name in dir(light):
if callable(getattr(light, method_name)) and not method_name.startswith('_'):
method_names.append(method_name)
for method_name in set(method_names):
methods = []
for light in all_lights:
if method_name in dir(light):
methods.append(getattr(light, method_name))
setattr(self, method_name, CallableObj(methods))
def __repr__(self):
return "".join(str(light) + '\n' for light in self.values())
| true | true |
1c32bc3560154a4414ff7fb012ff5c5d11236aac | 3,673 | py | Python | FluentPython/concurrency_with_asyncio/flag2_asyncio.py | xu6148152/Binea_Python_Project | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | [
"MIT"
] | null | null | null | FluentPython/concurrency_with_asyncio/flag2_asyncio.py | xu6148152/Binea_Python_Project | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | [
"MIT"
] | null | null | null | FluentPython/concurrency_with_asyncio/flag2_asyncio.py | xu6148152/Binea_Python_Project | d943eb5f4685d08f080b372dcf1a7cbd5d63efed | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import asyncio
import collections
import aiohttp
from aiohttp import web
import tqdm
from concurrency_futures.flags2_common import main, HTTPStatus, Result, save_flag
# default set low to avoid errors from remote site, such as
# 503 - Service Temporarily Unavailable
DEFAULT_CONCUR_REQ = 5
MAX_CONCUR_REQ = 1000
class FetchError(Exception):
def __init__(self, country_code):
self.country_code = country_code
@asyncio.coroutine
def get_flag(base_url, cc):
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
return (yield from http_get(url))
# resp = yield from aiohttp.request('GET', url)
# if resp.status == 200:
# image = yield from resp.read()
# return image
# elif resp.status == 404:
# raise web.HTTPNotFound()
# else:
# raise aiohttp.ServerConnectionError(Exception(code=resp.status, message=resp.reason, headers=resp.headers))
@asyncio.coroutine
def download_one(cc, base_url, semaphore, verbose):
try:
with (yield from semaphore):
image = yield from get_flag(base_url, cc)
with (yield from semaphore):
country = yield from get_country(base_url, cc)
except web.HTTPNotFound:
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc
else:
country = country.replace(' ', '_')
filename = '{}-{}.gif'.format(country, cc)
loop = asyncio.get_event_loop()
loop.run_in_executor(None, save_flag, image, filename)
# save_flag(image, cc.lower() + '.gif')
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
@asyncio.coroutine
def downloader_coro(cc_list, base_url, verbose, concur_req):
counter = collections.Counter()
semaphore = asyncio.Semaphore(concur_req)
to_do_iter = [download_one(cc, base_url, semaphore, verbose) for cc in sorted(cc_list)]
for future in to_do_iter:
try:
res = yield from future
except FetchError as exc:
country_code = exc.country_code
try:
error_msg = exc.__cause__.args[0]
except IndexError:
error_msg = exc.__cause__.__class__.__name__
if verbose and error_msg:
msg = '*** Error for {}: {}'
print(msg.format(country_code, error_msg))
status = HTTPStatus.error
else:
status = res.status
counter[status] += 1
return counter
def download_many(cc_list, base_url, verbose, concur_req):
loop = asyncio.get_event_loop()
coro = downloader_coro(cc_list, base_url, verbose, concur_req)
counts = loop.run_until_complete(coro)
loop.close()
return counts
@asyncio.coroutine
def http_get(url):
res = yield from aiohttp.request('GET', url)
if res.status == 200:
ctype = res.headers.get('Content-type', '').lower()
if 'json' in ctype or url.endswith('json'):
data = yield from res.json()
else:
data = yield from res.read()
return data
elif res.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.ServerConnectionError(Exception(code=res.status, message=res.reason, headers=res.headers))
@asyncio.coroutine
def get_country(base_url, cc):
url = '{}/{cc}/metadata.json'.format(base_url, cc=cc.lower())
metadata = yield from http_get(url)
return metadata['country']
if __name__ == '__main__':
main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
| 29.620968 | 117 | 0.642254 |
import asyncio
import collections
import aiohttp
from aiohttp import web
import tqdm
from concurrency_futures.flags2_common import main, HTTPStatus, Result, save_flag
DEFAULT_CONCUR_REQ = 5
MAX_CONCUR_REQ = 1000
class FetchError(Exception):
def __init__(self, country_code):
self.country_code = country_code
@asyncio.coroutine
def get_flag(base_url, cc):
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
return (yield from http_get(url))
@asyncio.coroutine
def download_one(cc, base_url, semaphore, verbose):
try:
with (yield from semaphore):
image = yield from get_flag(base_url, cc)
with (yield from semaphore):
country = yield from get_country(base_url, cc)
except web.HTTPNotFound:
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc
else:
country = country.replace(' ', '_')
filename = '{}-{}.gif'.format(country, cc)
loop = asyncio.get_event_loop()
loop.run_in_executor(None, save_flag, image, filename)
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
@asyncio.coroutine
def downloader_coro(cc_list, base_url, verbose, concur_req):
counter = collections.Counter()
semaphore = asyncio.Semaphore(concur_req)
to_do_iter = [download_one(cc, base_url, semaphore, verbose) for cc in sorted(cc_list)]
for future in to_do_iter:
try:
res = yield from future
except FetchError as exc:
country_code = exc.country_code
try:
error_msg = exc.__cause__.args[0]
except IndexError:
error_msg = exc.__cause__.__class__.__name__
if verbose and error_msg:
msg = '*** Error for {}: {}'
print(msg.format(country_code, error_msg))
status = HTTPStatus.error
else:
status = res.status
counter[status] += 1
return counter
def download_many(cc_list, base_url, verbose, concur_req):
loop = asyncio.get_event_loop()
coro = downloader_coro(cc_list, base_url, verbose, concur_req)
counts = loop.run_until_complete(coro)
loop.close()
return counts
@asyncio.coroutine
def http_get(url):
res = yield from aiohttp.request('GET', url)
if res.status == 200:
ctype = res.headers.get('Content-type', '').lower()
if 'json' in ctype or url.endswith('json'):
data = yield from res.json()
else:
data = yield from res.read()
return data
elif res.status == 404:
raise web.HTTPNotFound()
else:
raise aiohttp.ServerConnectionError(Exception(code=res.status, message=res.reason, headers=res.headers))
@asyncio.coroutine
def get_country(base_url, cc):
url = '{}/{cc}/metadata.json'.format(base_url, cc=cc.lower())
metadata = yield from http_get(url)
return metadata['country']
if __name__ == '__main__':
main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.