hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c2fee415938db09e076fa4c0900a5cb1d183bbb | 34 | py | Python | 5.py | eliascmaciiel/Exercicios_LP_1B | f56d49cde772bbc3fe9e205693fbe2c5612eae49 | [
"MIT"
] | null | null | null | 5.py | eliascmaciiel/Exercicios_LP_1B | f56d49cde772bbc3fe9e205693fbe2c5612eae49 | [
"MIT"
] | null | null | null | 5.py | eliascmaciiel/Exercicios_LP_1B | f56d49cde772bbc3fe9e205693fbe2c5612eae49 | [
"MIT"
] | null | null | null | digitado = input("Digite algo.")
| 11.333333 | 32 | 0.676471 | digitado = input("Digite algo.")
| true | true |
1c2feeb3159d965187d6337fb06614b6aae8cdef | 1,355 | py | Python | runtime/image_classification/models/vgg16/gpus=16_straight/stage8.py | NestLakerJasonLIN/pipedream | f50827f2e28cbdbd82a4ea686c0498272b1460d6 | [
"MIT"
] | 273 | 2019-08-31T14:12:11.000Z | 2022-03-05T13:34:25.000Z | runtime/image_classification/models/vgg16/gpus=16_straight/stage8.py | albertsh10/pipedream | cad624f79a71f44ba79099f0c38321347b13e5c2 | [
"MIT"
] | 67 | 2019-09-19T15:36:59.000Z | 2022-01-13T09:11:54.000Z | runtime/image_classification/models/vgg16/gpus=16_straight/stage8.py | albertsh10/pipedream | cad624f79a71f44ba79099f0c38321347b13e5c2 | [
"MIT"
] | 100 | 2019-09-16T20:59:14.000Z | 2022-03-23T12:56:56.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
class Stage8(torch.nn.Module):
def __init__(self):
super(Stage8, self).__init__()
self.layer1 = torch.nn.ReLU(inplace=True)
self.layer2 = torch.nn.MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
self.layer3 = torch.nn.Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
self.layer4 = torch.nn.ReLU(inplace=True)
self._initialize_weights()
def forward(self, input0):
out0 = input0.clone()
out1 = self.layer1(out0)
out2 = self.layer2(out1)
out3 = self.layer3(out2)
out4 = self.layer4(out3)
return out4
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, torch.nn.Conv2d):
torch.nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.BatchNorm2d):
torch.nn.init.constant_(m.weight, 1)
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.Linear):
torch.nn.init.normal_(m.weight, 0, 0.01)
torch.nn.init.constant_(m.bias, 0)
| 37.638889 | 105 | 0.597048 |
import torch
class Stage8(torch.nn.Module):
def __init__(self):
super(Stage8, self).__init__()
self.layer1 = torch.nn.ReLU(inplace=True)
self.layer2 = torch.nn.MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
self.layer3 = torch.nn.Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
self.layer4 = torch.nn.ReLU(inplace=True)
self._initialize_weights()
def forward(self, input0):
out0 = input0.clone()
out1 = self.layer1(out0)
out2 = self.layer2(out1)
out3 = self.layer3(out2)
out4 = self.layer4(out3)
return out4
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, torch.nn.Conv2d):
torch.nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.BatchNorm2d):
torch.nn.init.constant_(m.weight, 1)
torch.nn.init.constant_(m.bias, 0)
elif isinstance(m, torch.nn.Linear):
torch.nn.init.normal_(m.weight, 0, 0.01)
torch.nn.init.constant_(m.bias, 0)
| true | true |
1c2ff0b065e5812246df47b329a55dd1d2ed290d | 1,912 | py | Python | data_api.py | samarthbhargav/pytorch-vision-language | 625e35c2e1945afa89108e20596664d66198ce5b | [
"MIT"
] | 1 | 2020-11-19T08:32:06.000Z | 2020-11-19T08:32:06.000Z | data_api.py | samarthbhargav/pytorch-vision-language | 625e35c2e1945afa89108e20596664d66198ce5b | [
"MIT"
] | 1 | 2018-06-21T13:15:37.000Z | 2018-06-21T13:15:37.000Z | data_api.py | samarthbhargav/pytorch-vision-language | 625e35c2e1945afa89108e20596664d66198ce5b | [
"MIT"
] | null | null | null | import sys, os
import codecs
import json
import pickle as pkl
from collections import defaultdict
import random
class DataApi:
# TODO figure out how to do this for test images
def __init__(self, location="data/cub/"):
self.description_test = self.get_descriptions(
os.path.join(location, "descriptions_bird.test.fg.json")
)
self.data = []
self.classes = defaultdict(list)
self.images = {}
annotations = {}
for annotation in self.description_test["annotations"]:
annotations[annotation["image_id"]] = annotation["caption"]
for index, image in enumerate(self.description_test["images"]):
class_label = image["id"].split(".")[1].split("/")[0]
self.classes[class_label].append(index)
image_index = "_".join(image["file_name"].split("/")[1].split("_")[:-1])
self.data.append(
{
"class_label": class_label,
"id": image["id"],
"path": os.path.join(location, "images", image["file_name"]),
"caption": annotations[image["id"]],
}
)
assert os.path.exists(self.data[-1]["path"])
def sample_images(self, n):
images = []
for _ in range(n):
images.append(random.choice(self.data).copy())
return images
def get_image(self, image_id):
img = next((img for img in self.data if img["id"] == image_id), None)
# Return a copy
return img.copy()
def get_descriptions(self, path):
with codecs.open(path, "r", "utf-8") as reader:
return json.load(reader)
def get_classes(self):
return [k for k in self.classes.keys()]
def sample_class(self, klass):
idx = random.choice(self.classes[klass])
return self.data[idx].copy()
| 30.349206 | 84 | 0.566946 | import sys, os
import codecs
import json
import pickle as pkl
from collections import defaultdict
import random
class DataApi:
def __init__(self, location="data/cub/"):
self.description_test = self.get_descriptions(
os.path.join(location, "descriptions_bird.test.fg.json")
)
self.data = []
self.classes = defaultdict(list)
self.images = {}
annotations = {}
for annotation in self.description_test["annotations"]:
annotations[annotation["image_id"]] = annotation["caption"]
for index, image in enumerate(self.description_test["images"]):
class_label = image["id"].split(".")[1].split("/")[0]
self.classes[class_label].append(index)
image_index = "_".join(image["file_name"].split("/")[1].split("_")[:-1])
self.data.append(
{
"class_label": class_label,
"id": image["id"],
"path": os.path.join(location, "images", image["file_name"]),
"caption": annotations[image["id"]],
}
)
assert os.path.exists(self.data[-1]["path"])
def sample_images(self, n):
images = []
for _ in range(n):
images.append(random.choice(self.data).copy())
return images
def get_image(self, image_id):
img = next((img for img in self.data if img["id"] == image_id), None)
return img.copy()
def get_descriptions(self, path):
with codecs.open(path, "r", "utf-8") as reader:
return json.load(reader)
def get_classes(self):
return [k for k in self.classes.keys()]
def sample_class(self, klass):
idx = random.choice(self.classes[klass])
return self.data[idx].copy()
| true | true |
1c2ff0d6e6322701a08acaff4002e86c28914868 | 186 | py | Python | frappe/patches/v5_0/modify_session.py | Nxweb-in/frappe | 56b3eb52bf56dd71bee29fde3ed28ed9c6d15947 | [
"MIT"
] | 1 | 2021-06-03T07:04:48.000Z | 2021-06-03T07:04:48.000Z | frappe/patches/v5_0/modify_session.py | Nxweb-in/frappe | 56b3eb52bf56dd71bee29fde3ed28ed9c6d15947 | [
"MIT"
] | null | null | null | frappe/patches/v5_0/modify_session.py | Nxweb-in/frappe | 56b3eb52bf56dd71bee29fde3ed28ed9c6d15947 | [
"MIT"
] | null | null | null |
import frappe
def execute():
if "device" not in frappe.db.get_table_columns("Sessions"):
frappe.db.sql("alter table tabSessions add column `device` varchar(255) default 'desktop'")
| 26.571429 | 93 | 0.747312 |
import frappe
def execute():
if "device" not in frappe.db.get_table_columns("Sessions"):
frappe.db.sql("alter table tabSessions add column `device` varchar(255) default 'desktop'")
| true | true |
1c2ff1812d83c0c212d78f3ce04388489258502c | 34,819 | py | Python | unittests/test_pipeline.py | FrancisCrickInstitute/reframe | aec9ab794342de3f813c6e540d978e9a8d0e9e0a | [
"BSD-3-Clause"
] | null | null | null | unittests/test_pipeline.py | FrancisCrickInstitute/reframe | aec9ab794342de3f813c6e540d978e9a8d0e9e0a | [
"BSD-3-Clause"
] | null | null | null | unittests/test_pipeline.py | FrancisCrickInstitute/reframe | aec9ab794342de3f813c6e540d978e9a8d0e9e0a | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
import os
import pathlib
import pytest
import re
import reframe as rfm
import reframe.core.runtime as rt
import reframe.utility.os_ext as os_ext
import reframe.utility.sanity as sn
import unittests.fixtures as fixtures
from reframe.core.exceptions import (BuildError, PipelineError, ReframeError,
ReframeSyntaxError, PerformanceError,
SanityError)
from reframe.frontend.loader import RegressionCheckLoader
from unittests.resources.checks.hellocheck import HelloTest
def _run(test, partition, prgenv):
test.setup(partition, prgenv)
test.compile()
test.compile_wait()
test.run()
test.wait()
test.check_sanity()
test.check_performance()
test.cleanup(remove_files=True)
def load_test(testfile):
loader = RegressionCheckLoader(['unittests/resources/checks'])
return loader.load_from_file(testfile)
@pytest.fixture
def temp_runtime(tmp_path):
def _temp_runtime(config_file, system=None, options={}):
options.update({'systems/prefix': str(tmp_path)})
with rt.temp_runtime(config_file, system, options):
yield rt.runtime()
yield _temp_runtime
@pytest.fixture
def generic_system(temp_runtime):
yield from temp_runtime(fixtures.TEST_CONFIG_FILE, 'generic')
@pytest.fixture
def testsys_system(temp_runtime):
yield from temp_runtime(fixtures.TEST_CONFIG_FILE, 'testsys')
@pytest.fixture
def user_system(temp_runtime):
if fixtures.USER_CONFIG_FILE:
yield from temp_runtime(fixtures.USER_CONFIG_FILE,
fixtures.USER_SYSTEM)
else:
yield generic_system
@pytest.fixture
def hellotest():
yield load_test('unittests/resources/checks/hellocheck.py')[0]
@pytest.fixture
def local_exec_ctx(generic_system):
partition = fixtures.partition_by_name('default')
environ = fixtures.environment_by_name('builtin-gcc', partition)
yield partition, environ
@pytest.fixture
def local_user_exec_ctx(user_system):
partition = fixtures.partition_by_scheduler('local')
if partition is None:
pytest.skip('no local jobs are supported')
try:
environ = partition.environs[0]
except IndexError:
pytest.skip('no environments configured for partition: %s' %
partition.fullname)
yield partition, environ
@pytest.fixture
def remote_exec_ctx(user_system):
partition = fixtures.partition_by_scheduler()
if partition is None:
pytest.skip('job submission not supported')
try:
environ = partition.environs[0]
except IndexError:
pytest.skip('no environments configured for partition: %s' %
partition.fullname)
yield partition, environ
@pytest.fixture
def remote_exec_ctx(user_system):
partition = fixtures.partition_by_scheduler()
if partition is None:
pytest.skip('job submission not supported')
try:
environ = partition.environs[0]
except IndexError:
pytest.skip('no environments configured for partition: %s' %
partition.fullname)
yield partition, environ
@pytest.fixture
def container_remote_exec_ctx(remote_exec_ctx):
def _container_exec_ctx(platform):
partition = remote_exec_ctx[0]
if platform not in partition.container_environs.keys():
pytest.skip('%s is not configured on the system' % platform)
yield from remote_exec_ctx
return _container_exec_ctx
@pytest.fixture
def container_local_exec_ctx(local_user_exec_ctx):
def _container_exec_ctx(platform):
partition = local_user_exec_ctx[0]
if platform not in partition.container_environs.keys():
pytest.skip('%s is not configured on the system' % platform)
yield from local_user_exec_ctx
return _container_exec_ctx
def test_environ_setup(hellotest, local_exec_ctx):
# Use test environment for the regression check
hellotest.variables = {'_FOO_': '1', '_BAR_': '2'}
hellotest.setup(*local_exec_ctx)
for k in hellotest.variables.keys():
assert k not in os.environ
def test_hellocheck(hellotest, remote_exec_ctx):
_run(hellotest, *remote_exec_ctx)
def test_hellocheck_make(remote_exec_ctx):
test = load_test('unittests/resources/checks/hellocheck_make.py')[0]
_run(test, *remote_exec_ctx)
def test_hellocheck_local(hellotest, local_exec_ctx):
# Test also the prebuild/postbuild functionality
hellotest.prebuild_cmd = ['touch prebuild', 'mkdir prebuild_dir']
hellotest.postbuild_cmd = ['touch postbuild', 'mkdir postbuild_dir']
hellotest.keep_files = ['prebuild', 'postbuild',
'prebuild_dir', 'postbuild_dir']
# Force local execution of the test; just for testing .local
hellotest.local = True
_run(hellotest, *local_exec_ctx)
must_keep = [
hellotest.stdout.evaluate(),
hellotest.stderr.evaluate(),
hellotest.build_stdout.evaluate(),
hellotest.build_stderr.evaluate(),
hellotest.job.script_filename,
*hellotest.keep_files
]
for f in must_keep:
assert os.path.exists(os.path.join(hellotest.outputdir, f))
def test_hellocheck_local_prepost_run(hellotest, local_exec_ctx):
@sn.sanity_function
def stagedir(test):
return test.stagedir
# Test also the prebuild/postbuild functionality
hellotest.pre_run = ['echo prerun: `pwd`']
hellotest.post_run = ['echo postrun: `pwd`']
pre_run_path = sn.extractsingle(r'^prerun: (\S+)', hellotest.stdout, 1)
post_run_path = sn.extractsingle(r'^postrun: (\S+)', hellotest.stdout, 1)
hellotest.sanity_patterns = sn.all([
sn.assert_eq(stagedir(hellotest), pre_run_path),
sn.assert_eq(stagedir(hellotest), post_run_path),
])
_run(hellotest, *local_exec_ctx)
def test_run_only_sanity(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.executable = './hello.sh'
self.executable_opts = ['Hello, World!']
self.local = True
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(
r'Hello, World\!', self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_run_only_no_srcdir(local_exec_ctx):
@fixtures.custom_prefix('foo/bar/')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.executable = 'echo'
self.executable_opts = ['hello']
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(r'hello', self.stdout)
test = MyTest()
assert test.sourcesdir is None
_run(test, *local_exec_ctx)
def test_compile_only_failure(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.sourcepath = 'compiler_failure.c'
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
test = MyTest()
test.setup(*local_exec_ctx)
test.compile()
with pytest.raises(BuildError):
test.compile_wait()
def test_compile_only_warning(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.build_system = 'SingleSource'
self.build_system.srcfile = 'compiler_warning.c'
self.build_system.cflags = ['-Wall']
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(r'warning', self.stderr)
_run(MyTest(), *local_exec_ctx)
def test_supports_system(hellotest, testsys_system):
hellotest.valid_systems = ['*']
assert hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['*:*']
assert hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys']
assert hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys:gpu']
assert hellotest.supports_system('gpu')
assert not hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert not hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys:login']
assert not hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert not hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['foo']
assert not hellotest.supports_system('gpu')
assert not hellotest.supports_system('login')
assert not hellotest.supports_system('testsys:gpu')
assert not hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['*:gpu']
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('foo:gpu')
assert not hellotest.supports_system('testsys:cpu')
assert not hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys:*']
assert hellotest.supports_system('testsys:login')
assert hellotest.supports_system('gpu')
assert not hellotest.supports_system('foo:gpu')
def test_supports_environ(hellotest, generic_system):
hellotest.valid_prog_environs = ['*']
assert hellotest.supports_environ('foo1')
assert hellotest.supports_environ('foo-env')
assert hellotest.supports_environ('*')
def test_sourcesdir_none(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RegressionTest):
def __init__(self):
self.sourcesdir = None
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
with pytest.raises(ReframeError):
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_build_system(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RegressionTest):
def __init__(self):
self.build_system = 'Make'
self.sourcepath = 'code'
self.executable = './code/hello'
self.valid_systems = ['*']
self.valid_prog_environs = ['*']
self.sanity_patterns = sn.assert_found(r'Hello, World\!',
self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_none_generated_sources(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RegressionTest):
def __init__(self):
self.sourcesdir = None
self.prebuild_cmd = [
"printf '#include <stdio.h>\\n int main(){ "
"printf(\"Hello, World!\\\\n\"); return 0; }' > hello.c"
]
self.executable = './hello'
self.sourcepath = 'hello.c'
self.valid_systems = ['*']
self.valid_prog_environs = ['*']
self.sanity_patterns = sn.assert_found(r'Hello, World\!',
self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_none_compile_only(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.sourcesdir = None
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
with pytest.raises(BuildError):
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_none_run_only(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.sourcesdir = None
self.executable = 'echo'
self.executable_opts = ["Hello, World!"]
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(r'Hello, World\!',
self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_sourcepath_abs(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
test = MyTest()
test.setup(*local_exec_ctx)
test.sourcepath = '/usr/src'
with pytest.raises(PipelineError):
test.compile()
def test_sourcepath_upref(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
test = MyTest()
test.setup(*local_exec_ctx)
test.sourcepath = '../hellosrc'
with pytest.raises(PipelineError):
test.compile()
def test_extra_resources(testsys_system):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.local = True
@rfm.run_after('setup')
def set_resources(self):
test.extra_resources = {
'gpu': {'num_gpus_per_node': 2},
'datawarp': {'capacity': '100GB',
'stagein_src': test.stagedir}
}
test.job.options += ['--foo']
test = MyTest()
partition = fixtures.partition_by_name('gpu')
environ = partition.environment('builtin-gcc')
_run(test, partition, environ)
expected_job_options = {'--gres=gpu:2',
'#DW jobdw capacity=100GB',
'#DW stage_in source=%s' % test.stagedir,
'--foo'}
assert expected_job_options == set(test.job.options)
def test_setup_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.count = 0
@rfm.run_before('setup')
def prefoo(self):
assert self.current_environ is None
self.count += 1
@rfm.run_after('setup')
def postfoo(self):
assert self.current_environ is not None
self.count += 1
test = MyTest()
_run(test, *local_exec_ctx)
assert test.count == 2
def test_compile_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.count = 0
@rfm.run_before('compile')
def setflags(self):
self.count += 1
@rfm.run_after('compile')
def check_executable(self):
exec_file = os.path.join(self.stagedir, self.executable)
# Make sure that this hook is executed after compile_wait()
assert os.path.exists(exec_file)
test = MyTest()
_run(test, *local_exec_ctx)
assert test.count == 1
def test_run_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
@rfm.run_before('run')
def setflags(self):
self.post_run = ['echo hello > greetings.txt']
@rfm.run_after('run')
def check_executable(self):
outfile = os.path.join(self.stagedir, 'greetings.txt')
# Make sure that this hook is executed after wait()
assert os.path.exists(outfile)
_run(MyTest(), *local_exec_ctx)
def test_multiple_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
@rfm.run_after('setup')
def x(self):
self.var += 1
@rfm.run_after('setup')
def y(self):
self.var += 1
@rfm.run_after('setup')
def z(self):
self.var += 1
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 3
def test_stacked_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
@rfm.run_before('setup')
@rfm.run_after('setup')
@rfm.run_after('compile')
def x(self):
self.var += 1
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 3
def test_inherited_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class BaseTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
@rfm.run_after('setup')
def x(self):
self.var += 1
class C(rfm.RegressionTest):
@rfm.run_before('run')
def y(self):
self.foo = 1
class DerivedTest(BaseTest, C):
@rfm.run_after('setup')
def z(self):
self.var += 1
class MyTest(DerivedTest):
pass
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 2
assert test.foo == 1
def test_overriden_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class BaseTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
self.foo = 0
@rfm.run_after('setup')
def x(self):
self.var += 1
@rfm.run_before('setup')
def y(self):
self.foo += 1
class DerivedTest(BaseTest):
@rfm.run_after('setup')
def x(self):
self.var += 5
class MyTest(DerivedTest):
@rfm.run_before('setup')
def y(self):
self.foo += 10
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 5
assert test.foo == 10
def test_require_deps(local_exec_ctx):
import reframe.frontend.dependency as dependency
import reframe.frontend.executors as executors
@fixtures.custom_prefix('unittests/resources/checks')
class T0(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.x = 1
@fixtures.custom_prefix('unittests/resources/checks')
class T1(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.depends_on('T0')
@rfm.require_deps
def sety(self, T0):
self.y = T0().x + 1
@rfm.run_before('run')
@rfm.require_deps
def setz(self, T0):
self.z = T0().x + 2
cases = executors.generate_testcases([T0(), T1()])
deps = dependency.build_deps(cases)
for c in dependency.toposort(deps):
_run(*c)
for c in cases:
t = c.check
if t.name == 'T0':
assert t.x == 1
elif t.name == 'T1':
assert t.y == 2
assert t.z == 3
def test_regression_test_name():
class MyTest(rfm.RegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest(1, 2)
assert os.path.abspath(os.path.dirname(__file__)) == test.prefix
assert 'test_regression_test_name.<locals>.MyTest_1_2' == test.name
def test_strange_test_names():
class C:
def __init__(self, a):
self.a = a
def __repr__(self):
return 'C(%s)' % self.a
class MyTest(rfm.RegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest('(a*b+c)/12', C(33))
assert ('test_strange_test_names.<locals>.MyTest__a_b_c__12_C_33_' ==
test.name)
def test_name_user_inheritance():
class MyBaseTest(rfm.RegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
class MyTest(MyBaseTest):
def __init__(self):
super().__init__(1, 2)
test = MyTest()
assert 'test_name_user_inheritance.<locals>.MyTest' == test.name
def test_name_runonly_test():
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest(1, 2)
assert os.path.abspath(os.path.dirname(__file__)) == test.prefix
assert 'test_name_runonly_test.<locals>.MyTest_1_2' == test.name
def test_name_compileonly_test():
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest(1, 2)
assert os.path.abspath(os.path.dirname(__file__)) == test.prefix
assert 'test_name_compileonly_test.<locals>.MyTest_1_2' == test.name
def test_registration_of_tests():
import sys
import unittests.resources.checks_unlisted.good as mod
checks = mod._rfm_gettests()
assert 13 == len(checks)
assert [mod.MyBaseTest(0, 0),
mod.MyBaseTest(0, 1),
mod.MyBaseTest(1, 0),
mod.MyBaseTest(1, 1),
mod.MyBaseTest(2, 0),
mod.MyBaseTest(2, 1),
mod.AnotherBaseTest(0, 0),
mod.AnotherBaseTest(0, 1),
mod.AnotherBaseTest(1, 0),
mod.AnotherBaseTest(1, 1),
mod.AnotherBaseTest(2, 0),
mod.AnotherBaseTest(2, 1),
mod.MyBaseTest(10, 20)] == checks
def _run_sanity(test, *exec_ctx, skip_perf=False):
test.setup(*exec_ctx)
test.check_sanity()
if not skip_perf:
test.check_performance()
@pytest.fixture
def dummy_gpu_exec_ctx(testsys_system):
partition = fixtures.partition_by_name('gpu')
environ = fixtures.environment_by_name('builtin-gcc', partition)
yield partition, environ
@pytest.fixture
def perf_file(tmp_path):
yield tmp_path / 'perf.out'
@pytest.fixture
def sanity_file(tmp_path):
yield tmp_path / 'sanity.out'
@pytest.fixture
def dummytest(testsys_system, perf_file, sanity_file):
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.perf_file = perf_file
self.sourcesdir = None
self.reference = {
'testsys': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
},
'testsys:gpu': {
'value3': (3.1, -0.1, 0.1, None),
}
}
self.perf_patterns = {
'value1': sn.extractsingle(
r'perf1 = (\S+)', perf_file, 1, float
),
'value2': sn.extractsingle(
r'perf2 = (\S+)', perf_file, 1, float
),
'value3': sn.extractsingle(
r'perf3 = (\S+)', perf_file, 1, float
)
}
self.sanity_patterns = sn.assert_found(
r'result = success', sanity_file
)
yield MyTest()
def test_sanity_success(dummytest, sanity_file, perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_sanity_failure(dummytest, sanity_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = failure\n')
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_sanity_failure_noassert(dummytest, sanity_file, dummy_gpu_exec_ctx):
dummytest.sanity_patterns = sn.findall(r'result = success', sanity_file)
sanity_file.write_text('result = failure\n')
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_sanity_multiple_patterns(dummytest, sanity_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result1 = success\n'
'result2 = success\n')
# Simulate a pure sanity test; reset the perf_patterns
dummytest.perf_patterns = None
dummytest.sanity_patterns = sn.assert_eq(
sn.count(sn.findall(r'result\d = success', sanity_file)), 2
)
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
# Require more patterns to be present
dummytest.sanity_patterns = sn.assert_eq(
sn.count(sn.findall(r'result\d = success', sanity_file)), 3
)
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_sanity_multiple_files(dummytest, tmp_path, dummy_gpu_exec_ctx):
file0 = tmp_path / 'out1.txt'
file1 = tmp_path / 'out2.txt'
file0.write_text('result = success\n')
file1.write_text('result = success\n')
dummytest.sanity_patterns = sn.all([
sn.assert_found(r'result = success', file0),
sn.assert_found(r'result = success', file1)
])
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_performance_failure(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.0\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
with pytest.raises(PerformanceError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_unknown_tag(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'testsys': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
'foo': (3.1, -0.1, 0.1, None),
}
}
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_unknown_system(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'testsys:login': {
'value1': (1.4, -0.1, 0.1, None),
'value3': (3.1, -0.1, 0.1, None),
},
'testsys:login2': {
'value2': (1.7, -0.1, 0.1, None)
}
}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_empty(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_default(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'*': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
'value3': (3.1, -0.1, 0.1, None),
}
}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_tag_resolution(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'testsys': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
},
'*': {
'value3': (3.1, -0.1, 0.1, None),
}
}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_performance_invalid_value(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = foo\n'
'perf3 = 3.3\n')
dummytest.perf_patterns = {
'value1': sn.extractsingle(r'perf1 = (\S+)', perf_file, 1, float),
'value2': sn.extractsingle(r'perf2 = (\S+)', perf_file, 1, str),
'value3': sn.extractsingle(r'perf3 = (\S+)', perf_file, 1, float)
}
with pytest.raises(SanityError, match='not a number'):
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_performance_var_evaluation(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
# All performance values must be evaluated, despite the first one
# failing To test this, we need an extract function that will have a
# side effect when evaluated, whose result we will check after calling
# `check_performance()`.
logfile = 'perf.log'
@sn.sanity_function
def extract_perf(patt, tag):
val = sn.evaluate(
sn.extractsingle(patt, perf_file, tag, float)
)
with open('perf.log', 'a') as fp:
fp.write('%s=%s' % (tag, val))
return val
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.0\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.perf_patterns = {
'value1': extract_perf(r'perf1 = (?P<v1>\S+)', 'v1'),
'value2': extract_perf(r'perf2 = (?P<v2>\S+)', 'v2'),
'value3': extract_perf(r'perf3 = (?P<v3>\S+)', 'v3')
}
with pytest.raises(PerformanceError) as cm:
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
logfile = os.path.join(dummytest.stagedir, logfile)
with open(logfile) as fp:
log_output = fp.read()
assert 'v1' in log_output
assert 'v2' in log_output
assert 'v3' in log_output
@pytest.fixture
def container_test(tmp_path):
def _container_test(platform, image):
@fixtures.custom_prefix(tmp_path)
class ContainerTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.name = 'container_test'
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.container_platform = platform
self.container_platform.image = image
self.container_platform.commands = [
'pwd', 'ls', 'cat /etc/os-release'
]
self.container_platform.workdir = '/workdir'
self.pre_run = ['touch foo']
self.sanity_patterns = sn.all([
sn.assert_found(
r'^' + self.container_platform.workdir, self.stdout),
sn.assert_found(r'^foo', self.stdout),
sn.assert_found(
r'18\.04\.\d+ LTS \(Bionic Beaver\)', self.stdout),
])
return ContainerTest()
yield _container_test
def _cray_cle_version():
completed = os_ext.run_command('cat /etc/opt/cray/release/cle-release')
matched = re.match(r'^RELEASE=(\S+)', completed.stdout)
if matched is None:
return None
return matched.group(1)
def test_with_singularity(container_test, container_remote_exec_ctx):
cle_version = _cray_cle_version()
if cle_version is not None and cle_version.startswith('6.0'):
pytest.skip('test not supported on Cray CLE6')
_run(container_test('Singularity', 'docker://ubuntu:18.04'),
*container_remote_exec_ctx('Singularity'))
def test_with_shifter(container_test, container_remote_exec_ctx):
_run(container_test('Shifter', 'ubuntu:18.04'),
*container_remote_exec_ctx('Shifter'))
def test_with_sarus(container_test, container_remote_exec_ctx):
_run(container_test('Sarus', 'ubuntu:18.04'),
*container_remote_exec_ctx('Sarus'))
def test_with_docker(container_test, container_local_exec_ctx):
_run(container_test('Docker', 'ubuntu:18.04'),
*container_local_exec_ctx('Docker'))
def test_unknown_container_platform(container_test, local_exec_ctx):
with pytest.raises(ValueError):
_run(container_test('foo', 'ubuntu:18.04'), *local_exec_ctx)
def test_not_configured_container_platform(container_test, local_exec_ctx):
partition, environ = local_exec_ctx
platform = None
for cp in ['Docker', 'Singularity', 'Sarus', 'ShifterNG']:
if cp not in partition.container_environs.keys():
platform = cp
break
if platform is None:
pytest.skip('cannot find a supported platform that is not configured')
with pytest.raises(PipelineError):
_run(container_test(platform, 'ubuntu:18.04'), *local_exec_ctx)
| 31.827239 | 79 | 0.616502 |
import os
import pathlib
import pytest
import re
import reframe as rfm
import reframe.core.runtime as rt
import reframe.utility.os_ext as os_ext
import reframe.utility.sanity as sn
import unittests.fixtures as fixtures
from reframe.core.exceptions import (BuildError, PipelineError, ReframeError,
ReframeSyntaxError, PerformanceError,
SanityError)
from reframe.frontend.loader import RegressionCheckLoader
from unittests.resources.checks.hellocheck import HelloTest
def _run(test, partition, prgenv):
test.setup(partition, prgenv)
test.compile()
test.compile_wait()
test.run()
test.wait()
test.check_sanity()
test.check_performance()
test.cleanup(remove_files=True)
def load_test(testfile):
loader = RegressionCheckLoader(['unittests/resources/checks'])
return loader.load_from_file(testfile)
@pytest.fixture
def temp_runtime(tmp_path):
def _temp_runtime(config_file, system=None, options={}):
options.update({'systems/prefix': str(tmp_path)})
with rt.temp_runtime(config_file, system, options):
yield rt.runtime()
yield _temp_runtime
@pytest.fixture
def generic_system(temp_runtime):
yield from temp_runtime(fixtures.TEST_CONFIG_FILE, 'generic')
@pytest.fixture
def testsys_system(temp_runtime):
yield from temp_runtime(fixtures.TEST_CONFIG_FILE, 'testsys')
@pytest.fixture
def user_system(temp_runtime):
if fixtures.USER_CONFIG_FILE:
yield from temp_runtime(fixtures.USER_CONFIG_FILE,
fixtures.USER_SYSTEM)
else:
yield generic_system
@pytest.fixture
def hellotest():
yield load_test('unittests/resources/checks/hellocheck.py')[0]
@pytest.fixture
def local_exec_ctx(generic_system):
partition = fixtures.partition_by_name('default')
environ = fixtures.environment_by_name('builtin-gcc', partition)
yield partition, environ
@pytest.fixture
def local_user_exec_ctx(user_system):
partition = fixtures.partition_by_scheduler('local')
if partition is None:
pytest.skip('no local jobs are supported')
try:
environ = partition.environs[0]
except IndexError:
pytest.skip('no environments configured for partition: %s' %
partition.fullname)
yield partition, environ
@pytest.fixture
def remote_exec_ctx(user_system):
partition = fixtures.partition_by_scheduler()
if partition is None:
pytest.skip('job submission not supported')
try:
environ = partition.environs[0]
except IndexError:
pytest.skip('no environments configured for partition: %s' %
partition.fullname)
yield partition, environ
@pytest.fixture
def remote_exec_ctx(user_system):
partition = fixtures.partition_by_scheduler()
if partition is None:
pytest.skip('job submission not supported')
try:
environ = partition.environs[0]
except IndexError:
pytest.skip('no environments configured for partition: %s' %
partition.fullname)
yield partition, environ
@pytest.fixture
def container_remote_exec_ctx(remote_exec_ctx):
def _container_exec_ctx(platform):
partition = remote_exec_ctx[0]
if platform not in partition.container_environs.keys():
pytest.skip('%s is not configured on the system' % platform)
yield from remote_exec_ctx
return _container_exec_ctx
@pytest.fixture
def container_local_exec_ctx(local_user_exec_ctx):
def _container_exec_ctx(platform):
partition = local_user_exec_ctx[0]
if platform not in partition.container_environs.keys():
pytest.skip('%s is not configured on the system' % platform)
yield from local_user_exec_ctx
return _container_exec_ctx
def test_environ_setup(hellotest, local_exec_ctx):
hellotest.variables = {'_FOO_': '1', '_BAR_': '2'}
hellotest.setup(*local_exec_ctx)
for k in hellotest.variables.keys():
assert k not in os.environ
def test_hellocheck(hellotest, remote_exec_ctx):
_run(hellotest, *remote_exec_ctx)
def test_hellocheck_make(remote_exec_ctx):
test = load_test('unittests/resources/checks/hellocheck_make.py')[0]
_run(test, *remote_exec_ctx)
def test_hellocheck_local(hellotest, local_exec_ctx):
hellotest.prebuild_cmd = ['touch prebuild', 'mkdir prebuild_dir']
hellotest.postbuild_cmd = ['touch postbuild', 'mkdir postbuild_dir']
hellotest.keep_files = ['prebuild', 'postbuild',
'prebuild_dir', 'postbuild_dir']
hellotest.local = True
_run(hellotest, *local_exec_ctx)
must_keep = [
hellotest.stdout.evaluate(),
hellotest.stderr.evaluate(),
hellotest.build_stdout.evaluate(),
hellotest.build_stderr.evaluate(),
hellotest.job.script_filename,
*hellotest.keep_files
]
for f in must_keep:
assert os.path.exists(os.path.join(hellotest.outputdir, f))
def test_hellocheck_local_prepost_run(hellotest, local_exec_ctx):
@sn.sanity_function
def stagedir(test):
return test.stagedir
hellotest.pre_run = ['echo prerun: `pwd`']
hellotest.post_run = ['echo postrun: `pwd`']
pre_run_path = sn.extractsingle(r'^prerun: (\S+)', hellotest.stdout, 1)
post_run_path = sn.extractsingle(r'^postrun: (\S+)', hellotest.stdout, 1)
hellotest.sanity_patterns = sn.all([
sn.assert_eq(stagedir(hellotest), pre_run_path),
sn.assert_eq(stagedir(hellotest), post_run_path),
])
_run(hellotest, *local_exec_ctx)
def test_run_only_sanity(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.executable = './hello.sh'
self.executable_opts = ['Hello, World!']
self.local = True
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(
r'Hello, World\!', self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_run_only_no_srcdir(local_exec_ctx):
@fixtures.custom_prefix('foo/bar/')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.executable = 'echo'
self.executable_opts = ['hello']
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(r'hello', self.stdout)
test = MyTest()
assert test.sourcesdir is None
_run(test, *local_exec_ctx)
def test_compile_only_failure(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.sourcepath = 'compiler_failure.c'
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
test = MyTest()
test.setup(*local_exec_ctx)
test.compile()
with pytest.raises(BuildError):
test.compile_wait()
def test_compile_only_warning(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.build_system = 'SingleSource'
self.build_system.srcfile = 'compiler_warning.c'
self.build_system.cflags = ['-Wall']
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(r'warning', self.stderr)
_run(MyTest(), *local_exec_ctx)
def test_supports_system(hellotest, testsys_system):
hellotest.valid_systems = ['*']
assert hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['*:*']
assert hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys']
assert hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys:gpu']
assert hellotest.supports_system('gpu')
assert not hellotest.supports_system('login')
assert hellotest.supports_system('testsys:gpu')
assert not hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys:login']
assert not hellotest.supports_system('gpu')
assert hellotest.supports_system('login')
assert not hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['foo']
assert not hellotest.supports_system('gpu')
assert not hellotest.supports_system('login')
assert not hellotest.supports_system('testsys:gpu')
assert not hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['*:gpu']
assert hellotest.supports_system('testsys:gpu')
assert hellotest.supports_system('foo:gpu')
assert not hellotest.supports_system('testsys:cpu')
assert not hellotest.supports_system('testsys:login')
hellotest.valid_systems = ['testsys:*']
assert hellotest.supports_system('testsys:login')
assert hellotest.supports_system('gpu')
assert not hellotest.supports_system('foo:gpu')
def test_supports_environ(hellotest, generic_system):
hellotest.valid_prog_environs = ['*']
assert hellotest.supports_environ('foo1')
assert hellotest.supports_environ('foo-env')
assert hellotest.supports_environ('*')
def test_sourcesdir_none(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RegressionTest):
def __init__(self):
self.sourcesdir = None
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
with pytest.raises(ReframeError):
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_build_system(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RegressionTest):
def __init__(self):
self.build_system = 'Make'
self.sourcepath = 'code'
self.executable = './code/hello'
self.valid_systems = ['*']
self.valid_prog_environs = ['*']
self.sanity_patterns = sn.assert_found(r'Hello, World\!',
self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_none_generated_sources(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RegressionTest):
def __init__(self):
self.sourcesdir = None
self.prebuild_cmd = [
"printf '#include <stdio.h>\\n int main(){ "
"printf(\"Hello, World!\\\\n\"); return 0; }' > hello.c"
]
self.executable = './hello'
self.sourcepath = 'hello.c'
self.valid_systems = ['*']
self.valid_prog_environs = ['*']
self.sanity_patterns = sn.assert_found(r'Hello, World\!',
self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_none_compile_only(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.sourcesdir = None
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
with pytest.raises(BuildError):
_run(MyTest(), *local_exec_ctx)
def test_sourcesdir_none_run_only(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.sourcesdir = None
self.executable = 'echo'
self.executable_opts = ["Hello, World!"]
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.sanity_patterns = sn.assert_found(r'Hello, World\!',
self.stdout)
_run(MyTest(), *local_exec_ctx)
def test_sourcepath_abs(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
test = MyTest()
test.setup(*local_exec_ctx)
test.sourcepath = '/usr/src'
with pytest.raises(PipelineError):
test.compile()
def test_sourcepath_upref(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self):
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
test = MyTest()
test.setup(*local_exec_ctx)
test.sourcepath = '../hellosrc'
with pytest.raises(PipelineError):
test.compile()
def test_extra_resources(testsys_system):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.local = True
@rfm.run_after('setup')
def set_resources(self):
test.extra_resources = {
'gpu': {'num_gpus_per_node': 2},
'datawarp': {'capacity': '100GB',
'stagein_src': test.stagedir}
}
test.job.options += ['--foo']
test = MyTest()
partition = fixtures.partition_by_name('gpu')
environ = partition.environment('builtin-gcc')
_run(test, partition, environ)
expected_job_options = {'--gres=gpu:2',
'#DW jobdw capacity=100GB',
'#DW stage_in source=%s' % test.stagedir,
'--foo'}
assert expected_job_options == set(test.job.options)
def test_setup_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.count = 0
@rfm.run_before('setup')
def prefoo(self):
assert self.current_environ is None
self.count += 1
@rfm.run_after('setup')
def postfoo(self):
assert self.current_environ is not None
self.count += 1
test = MyTest()
_run(test, *local_exec_ctx)
assert test.count == 2
def test_compile_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.count = 0
@rfm.run_before('compile')
def setflags(self):
self.count += 1
@rfm.run_after('compile')
def check_executable(self):
exec_file = os.path.join(self.stagedir, self.executable)
assert os.path.exists(exec_file)
test = MyTest()
_run(test, *local_exec_ctx)
assert test.count == 1
def test_run_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
@rfm.run_before('run')
def setflags(self):
self.post_run = ['echo hello > greetings.txt']
@rfm.run_after('run')
def check_executable(self):
outfile = os.path.join(self.stagedir, 'greetings.txt')
assert os.path.exists(outfile)
_run(MyTest(), *local_exec_ctx)
def test_multiple_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
@rfm.run_after('setup')
def x(self):
self.var += 1
@rfm.run_after('setup')
def y(self):
self.var += 1
@rfm.run_after('setup')
def z(self):
self.var += 1
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 3
def test_stacked_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class MyTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
@rfm.run_before('setup')
@rfm.run_after('setup')
@rfm.run_after('compile')
def x(self):
self.var += 1
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 3
def test_inherited_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class BaseTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
@rfm.run_after('setup')
def x(self):
self.var += 1
class C(rfm.RegressionTest):
@rfm.run_before('run')
def y(self):
self.foo = 1
class DerivedTest(BaseTest, C):
@rfm.run_after('setup')
def z(self):
self.var += 1
class MyTest(DerivedTest):
pass
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 2
assert test.foo == 1
def test_overriden_hooks(local_exec_ctx):
@fixtures.custom_prefix('unittests/resources/checks')
class BaseTest(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.var = 0
self.foo = 0
@rfm.run_after('setup')
def x(self):
self.var += 1
@rfm.run_before('setup')
def y(self):
self.foo += 1
class DerivedTest(BaseTest):
@rfm.run_after('setup')
def x(self):
self.var += 5
class MyTest(DerivedTest):
@rfm.run_before('setup')
def y(self):
self.foo += 10
test = MyTest()
_run(test, *local_exec_ctx)
assert test.var == 5
assert test.foo == 10
def test_require_deps(local_exec_ctx):
import reframe.frontend.dependency as dependency
import reframe.frontend.executors as executors
@fixtures.custom_prefix('unittests/resources/checks')
class T0(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.x = 1
@fixtures.custom_prefix('unittests/resources/checks')
class T1(HelloTest):
def __init__(self):
super().__init__()
self.name = type(self).__name__
self.executable = os.path.join('.', self.name)
self.depends_on('T0')
@rfm.require_deps
def sety(self, T0):
self.y = T0().x + 1
@rfm.run_before('run')
@rfm.require_deps
def setz(self, T0):
self.z = T0().x + 2
cases = executors.generate_testcases([T0(), T1()])
deps = dependency.build_deps(cases)
for c in dependency.toposort(deps):
_run(*c)
for c in cases:
t = c.check
if t.name == 'T0':
assert t.x == 1
elif t.name == 'T1':
assert t.y == 2
assert t.z == 3
def test_regression_test_name():
class MyTest(rfm.RegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest(1, 2)
assert os.path.abspath(os.path.dirname(__file__)) == test.prefix
assert 'test_regression_test_name.<locals>.MyTest_1_2' == test.name
def test_strange_test_names():
class C:
def __init__(self, a):
self.a = a
def __repr__(self):
return 'C(%s)' % self.a
class MyTest(rfm.RegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest('(a*b+c)/12', C(33))
assert ('test_strange_test_names.<locals>.MyTest__a_b_c__12_C_33_' ==
test.name)
def test_name_user_inheritance():
class MyBaseTest(rfm.RegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
class MyTest(MyBaseTest):
def __init__(self):
super().__init__(1, 2)
test = MyTest()
assert 'test_name_user_inheritance.<locals>.MyTest' == test.name
def test_name_runonly_test():
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest(1, 2)
assert os.path.abspath(os.path.dirname(__file__)) == test.prefix
assert 'test_name_runonly_test.<locals>.MyTest_1_2' == test.name
def test_name_compileonly_test():
class MyTest(rfm.CompileOnlyRegressionTest):
def __init__(self, a, b):
self.a = a
self.b = b
test = MyTest(1, 2)
assert os.path.abspath(os.path.dirname(__file__)) == test.prefix
assert 'test_name_compileonly_test.<locals>.MyTest_1_2' == test.name
def test_registration_of_tests():
import sys
import unittests.resources.checks_unlisted.good as mod
checks = mod._rfm_gettests()
assert 13 == len(checks)
assert [mod.MyBaseTest(0, 0),
mod.MyBaseTest(0, 1),
mod.MyBaseTest(1, 0),
mod.MyBaseTest(1, 1),
mod.MyBaseTest(2, 0),
mod.MyBaseTest(2, 1),
mod.AnotherBaseTest(0, 0),
mod.AnotherBaseTest(0, 1),
mod.AnotherBaseTest(1, 0),
mod.AnotherBaseTest(1, 1),
mod.AnotherBaseTest(2, 0),
mod.AnotherBaseTest(2, 1),
mod.MyBaseTest(10, 20)] == checks
def _run_sanity(test, *exec_ctx, skip_perf=False):
test.setup(*exec_ctx)
test.check_sanity()
if not skip_perf:
test.check_performance()
@pytest.fixture
def dummy_gpu_exec_ctx(testsys_system):
partition = fixtures.partition_by_name('gpu')
environ = fixtures.environment_by_name('builtin-gcc', partition)
yield partition, environ
@pytest.fixture
def perf_file(tmp_path):
yield tmp_path / 'perf.out'
@pytest.fixture
def sanity_file(tmp_path):
yield tmp_path / 'sanity.out'
@pytest.fixture
def dummytest(testsys_system, perf_file, sanity_file):
class MyTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.perf_file = perf_file
self.sourcesdir = None
self.reference = {
'testsys': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
},
'testsys:gpu': {
'value3': (3.1, -0.1, 0.1, None),
}
}
self.perf_patterns = {
'value1': sn.extractsingle(
r'perf1 = (\S+)', perf_file, 1, float
),
'value2': sn.extractsingle(
r'perf2 = (\S+)', perf_file, 1, float
),
'value3': sn.extractsingle(
r'perf3 = (\S+)', perf_file, 1, float
)
}
self.sanity_patterns = sn.assert_found(
r'result = success', sanity_file
)
yield MyTest()
def test_sanity_success(dummytest, sanity_file, perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_sanity_failure(dummytest, sanity_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = failure\n')
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_sanity_failure_noassert(dummytest, sanity_file, dummy_gpu_exec_ctx):
dummytest.sanity_patterns = sn.findall(r'result = success', sanity_file)
sanity_file.write_text('result = failure\n')
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_sanity_multiple_patterns(dummytest, sanity_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result1 = success\n'
'result2 = success\n')
dummytest.perf_patterns = None
dummytest.sanity_patterns = sn.assert_eq(
sn.count(sn.findall(r'result\d = success', sanity_file)), 2
)
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
dummytest.sanity_patterns = sn.assert_eq(
sn.count(sn.findall(r'result\d = success', sanity_file)), 3
)
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_sanity_multiple_files(dummytest, tmp_path, dummy_gpu_exec_ctx):
file0 = tmp_path / 'out1.txt'
file1 = tmp_path / 'out2.txt'
file0.write_text('result = success\n')
file1.write_text('result = success\n')
dummytest.sanity_patterns = sn.all([
sn.assert_found(r'result = success', file0),
sn.assert_found(r'result = success', file1)
])
_run_sanity(dummytest, *dummy_gpu_exec_ctx, skip_perf=True)
def test_performance_failure(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.0\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
with pytest.raises(PerformanceError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_unknown_tag(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'testsys': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
'foo': (3.1, -0.1, 0.1, None),
}
}
with pytest.raises(SanityError):
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_unknown_system(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'testsys:login': {
'value1': (1.4, -0.1, 0.1, None),
'value3': (3.1, -0.1, 0.1, None),
},
'testsys:login2': {
'value2': (1.7, -0.1, 0.1, None)
}
}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_empty(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_default(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'*': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
'value3': (3.1, -0.1, 0.1, None),
}
}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_reference_tag_resolution(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.reference = {
'testsys': {
'value1': (1.4, -0.1, 0.1, None),
'value2': (1.7, -0.1, 0.1, None),
},
'*': {
'value3': (3.1, -0.1, 0.1, None),
}
}
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_performance_invalid_value(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.3\n'
'perf2 = foo\n'
'perf3 = 3.3\n')
dummytest.perf_patterns = {
'value1': sn.extractsingle(r'perf1 = (\S+)', perf_file, 1, float),
'value2': sn.extractsingle(r'perf2 = (\S+)', perf_file, 1, str),
'value3': sn.extractsingle(r'perf3 = (\S+)', perf_file, 1, float)
}
with pytest.raises(SanityError, match='not a number'):
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
def test_performance_var_evaluation(dummytest, sanity_file,
perf_file, dummy_gpu_exec_ctx):
logfile = 'perf.log'
@sn.sanity_function
def extract_perf(patt, tag):
val = sn.evaluate(
sn.extractsingle(patt, perf_file, tag, float)
)
with open('perf.log', 'a') as fp:
fp.write('%s=%s' % (tag, val))
return val
sanity_file.write_text('result = success\n')
perf_file.write_text('perf1 = 1.0\n'
'perf2 = 1.8\n'
'perf3 = 3.3\n')
dummytest.perf_patterns = {
'value1': extract_perf(r'perf1 = (?P<v1>\S+)', 'v1'),
'value2': extract_perf(r'perf2 = (?P<v2>\S+)', 'v2'),
'value3': extract_perf(r'perf3 = (?P<v3>\S+)', 'v3')
}
with pytest.raises(PerformanceError) as cm:
_run_sanity(dummytest, *dummy_gpu_exec_ctx)
logfile = os.path.join(dummytest.stagedir, logfile)
with open(logfile) as fp:
log_output = fp.read()
assert 'v1' in log_output
assert 'v2' in log_output
assert 'v3' in log_output
@pytest.fixture
def container_test(tmp_path):
def _container_test(platform, image):
@fixtures.custom_prefix(tmp_path)
class ContainerTest(rfm.RunOnlyRegressionTest):
def __init__(self):
self.name = 'container_test'
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.container_platform = platform
self.container_platform.image = image
self.container_platform.commands = [
'pwd', 'ls', 'cat /etc/os-release'
]
self.container_platform.workdir = '/workdir'
self.pre_run = ['touch foo']
self.sanity_patterns = sn.all([
sn.assert_found(
r'^' + self.container_platform.workdir, self.stdout),
sn.assert_found(r'^foo', self.stdout),
sn.assert_found(
r'18\.04\.\d+ LTS \(Bionic Beaver\)', self.stdout),
])
return ContainerTest()
yield _container_test
def _cray_cle_version():
completed = os_ext.run_command('cat /etc/opt/cray/release/cle-release')
matched = re.match(r'^RELEASE=(\S+)', completed.stdout)
if matched is None:
return None
return matched.group(1)
def test_with_singularity(container_test, container_remote_exec_ctx):
cle_version = _cray_cle_version()
if cle_version is not None and cle_version.startswith('6.0'):
pytest.skip('test not supported on Cray CLE6')
_run(container_test('Singularity', 'docker://ubuntu:18.04'),
*container_remote_exec_ctx('Singularity'))
def test_with_shifter(container_test, container_remote_exec_ctx):
_run(container_test('Shifter', 'ubuntu:18.04'),
*container_remote_exec_ctx('Shifter'))
def test_with_sarus(container_test, container_remote_exec_ctx):
_run(container_test('Sarus', 'ubuntu:18.04'),
*container_remote_exec_ctx('Sarus'))
def test_with_docker(container_test, container_local_exec_ctx):
_run(container_test('Docker', 'ubuntu:18.04'),
*container_local_exec_ctx('Docker'))
def test_unknown_container_platform(container_test, local_exec_ctx):
with pytest.raises(ValueError):
_run(container_test('foo', 'ubuntu:18.04'), *local_exec_ctx)
def test_not_configured_container_platform(container_test, local_exec_ctx):
partition, environ = local_exec_ctx
platform = None
for cp in ['Docker', 'Singularity', 'Sarus', 'ShifterNG']:
if cp not in partition.container_environs.keys():
platform = cp
break
if platform is None:
pytest.skip('cannot find a supported platform that is not configured')
with pytest.raises(PipelineError):
_run(container_test(platform, 'ubuntu:18.04'), *local_exec_ctx)
| true | true |
1c2ff2f8bc879c16f78556a674ed8c6aeb2eef07 | 27,688 | py | Python | analysis.py | ElectronicNose/Electronic-Nose | 3e79711a7701d352ef5c1c2151c535c5b576b71e | [
"MIT"
] | 1 | 2019-10-08T05:12:52.000Z | 2019-10-08T05:12:52.000Z | analysis.py | ElectronicNose/Electronic-Nose | 3e79711a7701d352ef5c1c2151c535c5b576b71e | [
"MIT"
] | null | null | null | analysis.py | ElectronicNose/Electronic-Nose | 3e79711a7701d352ef5c1c2151c535c5b576b71e | [
"MIT"
] | null | null | null | #analysis files
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QFileDialog
from analysis_gui import Ui_Analysis
import numpy as np
import matplotlib,math,csv
matplotlib.use('Qt5Agg')
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None):
fig = Figure()
self.axes = fig.add_subplot(111)
self.compute_initial_figure()
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class Radar(FigureCanvas):
def __init__(self, titles, rect=None, parent=None):
fig = Figure()
if rect is None:
rect = [0.05, 0.05, 0.8, 0.8]
self.n = len(titles)
self.angles = np.arange(90, 90 + 360, 360.0 / self.n)
self.angles = [a % 360 for a in self.angles]
self.axes = [fig.add_axes(rect, projection="polar", label="axes%d" % i)
for i in range(self.n)]
#FigureCanvas.setSizePolicy(self,
#QtWidgets.QSizePolicy.Expanding,
#QtWidgets.QSizePolicy.Expanding)
#FigureCanvas.updateGeometry(self)
self.ax = self.axes[0]
self.ax.set_thetagrids(self.angles,labels=titles, fontsize=14)
for ax in self.axes[1:]:
ax.patch.set_visible(False)
ax.grid("off")
ax.xaxis.set_visible(False)
for ax, angle in zip(self.axes, self.angles):
ax.set_rgrids([0.2,0.4,0.6,0.8,1.0], angle=angle)
ax.spines["polar"].set_visible(False)
ax.set_ylim(auto=True)
ax.set_xlim(auto=True)
FigureCanvas.__init__(self, fig)
self.setParent(parent)
def plot(self, values, *args, **kw):
angle = np.deg2rad(np.r_[self.angles, self.angles[0]])
values = np.r_[values, values[0]]
self.ax.plot(angle, values, *args, **kw)
class Analysis(QtWidgets.QMainWindow, Ui_Analysis):
def __init__(self,parent = None):
super(Analysis,self).__init__(parent)
self.setupUi(self)
self.XY_widget = QtWidgets.QWidget(self.tab_XY)
self.Radar_widget = QtWidgets.QWidget(self.tab_Radar)
self.Box_widget = QtWidgets.QWidget(self.tab_Box)
self.Table_widget = QtWidgets.QWidget(self.tab_Table)
self.XY_Layout = QtWidgets.QVBoxLayout(self.XY_widget)
self.XY = MyMplCanvas(self.XY_widget)
self.XY_Layout.addWidget(self.XY)
self.mpl_toolbar = NavigationToolbar(self.XY, self.XY_widget)
self.XY_Layout.addWidget(self.mpl_toolbar)
self.Box_Layout = QtWidgets.QVBoxLayout(self.Box_widget)
self.box = MyMplCanvas(self.Box_widget)
self.Box_Layout.addWidget(self.box)
self.box_toolbar = NavigationToolbar(self.box, self.Box_widget)
self.Box_Layout.addWidget(self.box_toolbar)
#self.tabWidget.setFocus()
#self.setCentralWidget(self.tabWidget)
#self.XY_widget.setFocus()
#self.Radar_widget.setFocus()
#self.Box_widget.setFocus()
#self.tabWidget.setFocus()
#self.setCentralWidget(self.tabWidget)
self.actionOpen.triggered.connect(self.open)
self.actionMax_min.triggered.connect(self.max_min)
self.actionStandardization_M_0_S_1.triggered.connect(self.standardization)
self.actionBaseline_Correction.triggered.connect(self.baseline)
self.actionPeak_Detection.triggered.connect(self.peak_detection)
self.actionFWHM.triggered.connect(self.FWHM)
self.actionRise_Time.triggered.connect(self.rise_time)
self.actionFall_Time.triggered.connect(self.fall_time)
self.sensor_name = []
self.sensor_sn = []
self.time = []
self.s1, self.s2, self.s3, self.s4, self.s5 = [], [], [], [], []
self.s6, self.s7, self.s8, self.s9, self.s10 = [], [], [], [], []
self.s11, self.s12, self.s13, self.s14, self.s15 = [], [], [], [], []
self.s16, self.s17, self.s18 = [], [], []
def open(self):
self.data = []
self.sensor_name = []
self.sensor_sn = []
self.time = []
self.s1, self.s2, self.s3, self.s4, self.s5 = [], [], [], [], []
self.s6, self.s7, self.s8, self.s9, self.s10 = [], [], [], [], []
self.s11, self.s12, self.s13, self.s14, self.s15 = [], [], [], [], []
self.s16, self.s17, self.s18 = [], [], []
self.s1_normalized = []
self.s2_normalized = []
self.s3_normalized = []
self.s4_normalized = []
self.s5_normalized = []
self.s6_normalized = []
self.s7_normalized = []
self.s8_normalized = []
self.s9_normalized = []
self.s10_normalized = []
self.s11_normalized = []
self.s12_normalized = []
self.s13_normalized = []
self.s14_normalized = []
self.s15_normalized = []
self.s16_normalized = []
self.s17_normalized = []
self.s18_normalized = []
filename = QFileDialog.getOpenFileName(self, 'Open',filter="CSV Files (*.csv);;FOX Files (*.txt)",
initialFilter= "CSV Files (*.csv)")
if filename[0]=='':
print("Cancel")
elif filename[1]=='FOX Files (*.txt)':
file = open(filename[0])
lines = file.readlines()
for i in range(len(lines)):
if lines[i].startswith("[SENSOR NAME]"):
i += 1
self.sensor_name = lines[i].split()
if lines[i].startswith("[SENSOR SN]"):
i += 1
self.sensor_sn = lines[i].split()
if lines[i].startswith("[SENSOR DATA]"):
j = i + 1
self.data = []
for i in range(121):
self.data.append(lines[j].split())
j += 1
print(self.sensor_name)
print(self.sensor_sn)
print(self.data)
for i in range(len(self.data)):
for j in range(19):
if j==0:
self.time.append(self.data[i][j])
if j==1:
self.s1.append(float(self.data[i][j]))
if j==2:
self.s2.append(float(self.data[i][j]))
if j==3:
self.s3.append(float(self.data[i][j]))
if j==4:
self.s4.append(float(self.data[i][j]))
if j==5:
self.s5.append(float(self.data[i][j]))
if j==6:
self.s6.append(float(self.data[i][j]))
if j==7:
self.s7.append(float(self.data[i][j]))
if j==8:
self.s8.append(float(self.data[i][j]))
if j==9:
self.s9.append(float(self.data[i][j]))
if j==10:
self.s10.append(float(self.data[i][j]))
if j==11:
self.s11.append(float(self.data[i][j]))
if j==12:
self.s12.append(float(self.data[i][j]))
if j==13:
self.s13.append(float(self.data[i][j]))
if j==14:
self.s14.append(float(self.data[i][j]))
if j==15:
self.s15.append(float(self.data[i][j]))
if j==16:
self.s16.append(float(self.data[i][j]))
if j==17:
self.s17.append(float(self.data[i][j]))
if j==18:
self.s18.append(float(self.data[i][j]))
self.XY.axes.cla()
self.XY.axes.plot(self.time, self.s1,label=self.sensor_name[0])
self.XY.axes.plot(self.time, self.s2,label=self.sensor_name[1])
self.XY.axes.plot(self.time, self.s3,label=self.sensor_name[2])
self.XY.axes.plot(self.time, self.s4,label=self.sensor_name[3])
self.XY.axes.plot(self.time, self.s5,label=self.sensor_name[4])
self.XY.axes.plot(self.time, self.s6,label=self.sensor_name[5])
self.XY.axes.plot(self.time, self.s7,label=self.sensor_name[6])
self.XY.axes.plot(self.time, self.s8,label=self.sensor_name[7])
self.XY.axes.plot(self.time, self.s9,label=self.sensor_name[8])
self.XY.axes.plot(self.time, self.s10,label=self.sensor_name[9])
self.XY.axes.plot(self.time, self.s11,label=self.sensor_name[10])
self.XY.axes.plot(self.time, self.s12,label=self.sensor_name[11])
self.XY.axes.plot(self.time, self.s13,label=self.sensor_name[12])
self.XY.axes.plot(self.time, self.s14,label=self.sensor_name[13])
self.XY.axes.plot(self.time, self.s15,label=self.sensor_name[14])
self.XY.axes.plot(self.time, self.s16,label=self.sensor_name[15])
self.XY.axes.plot(self.time, self.s17,label=self.sensor_name[16])
self.XY.axes.plot(self.time, self.s18,label=self.sensor_name[17])
self.XY.axes.set_xlabel("Time")
self.XY.axes.set_ylabel("Impedance")
self.XY.axes.legend(loc='best')
self.XY.draw()
self.menuNormalization.setEnabled(True)
for item in self.s1:
self.s1_normalized.append((item - min(self.s1)) / (max(self.s1) - min(self.s1)))
for item in self.s2:
self.s2_normalized.append((item - min(self.s2)) / (max(self.s2) - min(self.s2)))
for item in self.s3:
self.s3_normalized.append((item - min(self.s3)) / (max(self.s3) - min(self.s3)))
for item in self.s4:
self.s4_normalized.append((item - min(self.s4)) / (max(self.s4) - min(self.s4)))
for item in self.s5:
self.s5_normalized.append((item - min(self.s5)) / (max(self.s5) - min(self.s5)))
for item in self.s6:
self.s6_normalized.append((item - min(self.s6)) / (max(self.s6) - min(self.s6)))
for item in self.s7:
self.s7_normalized.append((item - min(self.s7)) / (max(self.s7) - min(self.s7)))
for item in self.s8:
self.s8_normalized.append((item - min(self.s8)) / (max(self.s8) - min(self.s8)))
for item in self.s9:
self.s9_normalized.append((item - min(self.s9)) / (max(self.s9) - min(self.s9)))
for item in self.s10:
self.s10_normalized.append((item - min(self.s10)) / (max(self.s10) - min(self.s10)))
for item in self.s11:
self.s11_normalized.append((item - min(self.s11)) / (max(self.s11) - min(self.s11)))
for item in self.s12:
self.s12_normalized.append((item - min(self.s12)) / (max(self.s12) - min(self.s12)))
for item in self.s13:
self.s13_normalized.append((item - min(self.s13)) / (max(self.s13) - min(self.s13)))
for item in self.s14:
self.s14_normalized.append((item - min(self.s14)) / (max(self.s14) - min(self.s14)))
for item in self.s15:
self.s15_normalized.append((item - min(self.s15)) / (max(self.s15) - min(self.s15)))
for item in self.s16:
self.s16_normalized.append((item - min(self.s16)) / (max(self.s16) - min(self.s16)))
for item in self.s17:
self.s17_normalized.append((item - min(self.s17)) / (max(self.s17) - min(self.s17)))
for item in self.s18:
self.s18_normalized.append((item - min(self.s18)) / (max(self.s18) - min(self.s18)))
self.radar_plot()
self.box_plot()
elif filename[1] == "CSV Files (*.csv)":
with open(filename[0], 'r') as csvfile:
lines = csv.reader(csvfile)
data = list(lines)
self.tableWidget.setRowCount(len(data))
self.tableWidget.setColumnCount(64)
for i in range(3):
for j in range(2):
self.tableWidget.setItem(i,j,QtWidgets.QTableWidgetItem(data[i][j]))
for i in range(3,len(data)):
for j in range(64):
self.tableWidget.setItem(i, j, QtWidgets.QTableWidgetItem(data[i][j]))
def max_min(self):
self.XY.axes.cla()
self.XY.axes.plot(self.time, self.s1_normalized, label=self.sensor_name[0])
'''
self.sc.axes.plot(self.time, self.s2_normalized, label=self.sensor_name[1])
self.sc.axes.plot(self.time, self.s3_normalized, label=self.sensor_name[2])
self.sc.axes.plot(self.time, self.s4_normalized, label=self.sensor_name[3])
self.sc.axes.plot(self.time, self.s5_normalized, label=self.sensor_name[4])
self.sc.axes.plot(self.time, self.s6_normalized, label=self.sensor_name[5])
self.sc.axes.plot(self.time, self.s7_normalized, label=self.sensor_name[6])
self.sc.axes.plot(self.time, self.s8_normalized, label=self.sensor_name[7])
self.sc.axes.plot(self.time, self.s9_normalized, label=self.sensor_name[8])
self.sc.axes.plot(self.time, self.s10_normalized, label=self.sensor_name[9])
self.sc.axes.plot(self.time, self.s11_normalized, label=self.sensor_name[10])
self.sc.axes.plot(self.time, self.s12_normalized, label=self.sensor_name[11])
self.sc.axes.plot(self.time, self.s13_normalized, label=self.sensor_name[12])
self.sc.axes.plot(self.time, self.s14_normalized, label=self.sensor_name[13])
self.sc.axes.plot(self.time, self.s15_normalized, label=self.sensor_name[14])
self.sc.axes.plot(self.time, self.s16_normalized, label=self.sensor_name[15])
self.sc.axes.plot(self.time, self.s17_normalized, label=self.sensor_name[16])
self.sc.axes.plot(self.time, self.s18_normalized, label=self.sensor_name[17])
'''
self.XY.axes.set_xlabel("Time")
self.XY.axes.set_ylabel("Impedance")
self.XY.axes.legend(loc='best')
self.XY.draw()
self.actionPeak_Detection.setEnabled(True)
self.actionRise_Time.setEnabled(True)
self.actionFall_Time.setEnabled(True)
self.actionFWHM.setEnabled(True)
def standardization(self):
z1,z2,z3,z4,z5,z6,z7,z8,z9,z10,z11,z12,z13,z14,z15,z16,z17,z18 = [],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]
m1 = sum(self.s1) / len(self.s1)
m2 = sum(self.s2) / len(self.s2)
m3 = sum(self.s3) / len(self.s3)
m4 = sum(self.s4) / len(self.s4)
m5 = sum(self.s5) / len(self.s5)
m6 = sum(self.s6) / len(self.s6)
m7 = sum(self.s7) / len(self.s7)
m8 = sum(self.s8) / len(self.s8)
m9 = sum(self.s9) / len(self.s9)
m10 = sum(self.s10) / len(self.s10)
m11 = sum(self.s11) / len(self.s11)
m12 = sum(self.s12) / len(self.s12)
m13 = sum(self.s13) / len(self.s13)
m14 = sum(self.s14) / len(self.s14)
m15 = sum(self.s15) / len(self.s15)
m16 = sum(self.s16) / len(self.s16)
m17 = sum(self.s17) / len(self.s17)
m18 = sum(self.s18) / len(self.s18)
sd1 = self.calculate_sd(self.s1, m1)
sd2 = self.calculate_sd(self.s2, m2)
sd3 = self.calculate_sd(self.s3, m3)
sd4 = self.calculate_sd(self.s4, m4)
sd5 = self.calculate_sd(self.s5, m5)
sd6 = self.calculate_sd(self.s6, m6)
sd7 = self.calculate_sd(self.s7, m7)
sd8 = self.calculate_sd(self.s8, m8)
sd9 = self.calculate_sd(self.s9, m9)
sd10 = self.calculate_sd(self.s10, m10)
sd11 = self.calculate_sd(self.s11, m11)
sd12 = self.calculate_sd(self.s12, m12)
sd13 = self.calculate_sd(self.s13, m13)
sd14 = self.calculate_sd(self.s14, m14)
sd15 = self.calculate_sd(self.s15, m15)
sd16 = self.calculate_sd(self.s16, m16)
sd17 = self.calculate_sd(self.s17, m17)
sd18 = self.calculate_sd(self.s18, m18)
for item in self.s1:
z1.append((item-m1)/sd1)
for item in self.s2:
z2.append((item-m2)/sd2)
for item in self.s3:
z3.append((item-m3)/sd3)
for item in self.s4:
z4.append((item-m4)/sd4)
for item in self.s5:
z5.append((item-m5)/sd5)
for item in self.s6:
z6.append((item-m6)/sd6)
for item in self.s7:
z7.append((item-m7)/sd7)
for item in self.s8:
z8.append((item-m8)/sd8)
for item in self.s9:
z9.append((item-m9)/sd9)
for item in self.s10:
z10.append((item-m10)/sd10)
for item in self.s11:
z11.append((item-m11)/sd11)
for item in self.s12:
z12.append((item-m12)/sd12)
for item in self.s13:
z13.append((item-m13)/sd13)
for item in self.s14:
z14.append((item-m14)/sd14)
for item in self.s15:
z15.append((item-m15)/sd15)
for item in self.s16:
z16.append((item-m16)/sd16)
for item in self.s17:
z17.append((item-m17)/sd17)
for item in self.s18:
z18.append((item-m18)/sd18)
'''
mz1 = sum(z1) / len(z1)
mz2 = sum(z2) / len(z2)
mz3 = sum(z3) / len(z3)
mz4 = sum(z4) / len(z4)
mz5 = sum(z5) / len(z5)
mz6 = sum(z6) / len(z6)
mz7 = sum(z7) / len(z7)
mz8 = sum(z8) / len(z8)
mz9 = sum(z9) / len(z9)
mz10 = sum(z10) / len(z10)
mz11 = sum(z11) / len(z11)
mz12 = sum(z12) / len(z12)
mz13 = sum(z13) / len(z13)
mz14 = sum(z14) / len(z14)
mz15 = sum(z15) / len(z15)
mz16 = sum(z16) / len(z16)
mz17 = sum(z17) / len(z17)
mz18 = sum(z18) / len(z18)
sdz1 = self.calculate_sd(z1, mz1)
sdz2 = self.calculate_sd(z2, mz2)
sdz3 = self.calculate_sd(z3, mz3)
sdz4 = self.calculate_sd(z4, mz4)
sdz5 = self.calculate_sd(z5, mz5)
sdz6 = self.calculate_sd(z6, mz6)
sdz7 = self.calculate_sd(z7, mz7)
sdz8 = self.calculate_sd(z8, mz8)
sdz9 = self.calculate_sd(z9, mz9)
sdz10 = self.calculate_sd(z10, mz10)
sdz11 = self.calculate_sd(z11, mz11)
sdz12 = self.calculate_sd(z12, mz12)
sdz13 = self.calculate_sd(z13, mz13)
sdz14 = self.calculate_sd(z14, mz14)
sdz15 = self.calculate_sd(z15, mz15)
sdz16 = self.calculate_sd(z16, mz16)
sdz17 = self.calculate_sd(z17, mz17)
sdz18 = self.calculate_sd(z18, mz18)
print(mz1,sdz1)
print(mz2, sdz2)
print(mz3, sdz3)
print(mz4, sdz4)
print(mz5, sdz5)
print(mz6, sdz6)
print(mz7, sdz7)
print(mz8, sdz8)
print(mz9, sdz9)
print(mz10, sdz10)
print(mz11, sdz11)
print(mz12, sdz12)
print(mz13, sdz13)
print(mz14, sdz14)
print(mz15, sdz15)
print(mz16, sdz16)
print(mz17, sdz17)
print(mz18, sdz18)
'''
self.XY.axes.cla()
self.XY.axes.plot(self.time, z1, label=self.sensor_name[0])
'''
self.sc.axes.plot(self.time, z2, label=self.sensor_name[1])
self.sc.axes.plot(self.time, z3, label=self.sensor_name[2])
self.sc.axes.plot(self.time, z4, label=self.sensor_name[3])
self.sc.axes.plot(self.time, z5, label=self.sensor_name[4])
self.sc.axes.plot(self.time, z6, label=self.sensor_name[5])
self.sc.axes.plot(self.time, z7, label=self.sensor_name[6])
self.sc.axes.plot(self.time, z8, label=self.sensor_name[7])
self.sc.axes.plot(self.time, z9, label=self.sensor_name[8])
self.sc.axes.plot(self.time, z10, label=self.sensor_name[9])
self.sc.axes.plot(self.time, z11, label=self.sensor_name[10])
self.sc.axes.plot(self.time, z12, label=self.sensor_name[11])
self.sc.axes.plot(self.time, z13, label=self.sensor_name[12])
self.sc.axes.plot(self.time, z14, label=self.sensor_name[13])
self.sc.axes.plot(self.time, z15, label=self.sensor_name[14])
self.sc.axes.plot(self.time, z16, label=self.sensor_name[15])
self.sc.axes.plot(self.time, z17, label=self.sensor_name[16])
self.sc.axes.plot(self.time, z18, label=self.sensor_name[17])
'''
self.XY.axes.set_xlabel("Time")
self.XY.axes.set_ylabel("Impedance")
self.XY.axes.legend(loc='best')
self.XY.draw()
def calculate_sd(self,list,mean):
sd = 0.0
for item in list:
sd += (item-mean) ** 2
sd = sd/(len(list)-1)
sd = sd ** (1/2)
return sd
def baseline(self):
'''
s1 = np.array(self.s1)
base = peakutils.baseline(s1, deg=3, max_it=100, tol=0.001)
#self.sc.axes.cla()
self.sc.axes.plot(self.time, base, label="baseline",c='red')
self.sc.axes.legend(loc='best')
self.sc.draw()
'''
def peak_detection(self):
s1_diff = []
self.s1_indexes = []
for i in range(len(self.s1_normalized)-1):
s1_diff.append(self.s1_normalized[i+1]-self.s1_normalized[i])
print("diff=" + str(s1_diff))
print(len(s1_diff))
for i in range(len(s1_diff)-1):
if s1_diff[i]>0 and s1_diff[i+1]<0:
self.s1_indexes.append(i+1)
print(self.s1_indexes)
for i in range(len(self.s1_indexes)-1):
if self.s1_normalized[self.s1_indexes[i]]>0.5 and (self.s1_indexes[i+1]-self.s1_indexes[i])>=5:
self.XY.axes.scatter(self.time[self.s1_indexes[i]], self.s1_normalized[self.s1_indexes[i]],c='red')
self.XY.draw()
self.actionRise_Time.setEnabled(True)
def rise_time(self):
upper_limit = 0
lower_limit = 0
max_index = 0
rel_tol = 0.05
abs_tol = 0.1
peak_values = []
#for i in range(len(self.s1_indexes)):
#peak_values.append(self.s1_normalized[self.s1_indexes[i]])
for i in range(len(self.s1_normalized)):
if self.s1_normalized[i]==max(self.s1_normalized):
max_index = i
print("max index=" + str(max_index))
for i in range(max_index):
#if math.isclose(self.s1_normalized[i],0.9*self.s1_normalized[peak_index],rel_tol=0.05):
if abs(self.s1_normalized[i]-0.9*max(self.s1_normalized)) <= abs_tol:
upper_limit = i
#if math.isclose(self.s1_normalized[i], 0.1*self.s1_normalized[peak_index], rel_tol=0.05):
if abs(self.s1_normalized[i]-0.1*max(self.s1_normalized)) <= abs_tol:
lower_limit = i
print(upper_limit)
print(lower_limit)
self.XY.axes.text(100,0.9,"Rise Time = " + str(upper_limit-lower_limit)+'s')
self.XY.draw()
def fall_time(self):
upper_limit = 0
lower_limit = 0
max_index = 0
rel_tol = 0.05
abs_tol = 0.1
for i in range(len(self.s1_normalized)):
if self.s1_normalized[i]==max(self.s1_normalized):
max_index = i
print("max index="+ str(max_index))
for i in range(max_index,len(self.s1_normalized)):
if abs(self.s1_normalized[i] - 0.9 * max(self.s1_normalized)) <= abs_tol:
lower_limit = i
if abs(self.s1_normalized[i] - 0.1 * max(self.s1_normalized)) <= abs_tol:
upper_limit = i
break
print(upper_limit)
print(lower_limit)
self.XY.axes.text(100,0.8,"Fall Time = " + str(upper_limit - lower_limit) + 's')
self.XY.draw()
def FWHM(self):
upper_limit = 0
lower_limit = 0
max_index = 0
rel_tol = 0.15
abs_tol = 0.1
for i in range(len(self.s1_normalized)):
if self.s1_normalized[i] == max(self.s1_normalized):
max_index = i
print("max index=" + str(max_index))
for i in range(max_index):
if abs(self.s1_normalized[i] - 0.5 * max(self.s1_normalized)) <= abs_tol:
lower_limit = i
for i in range(max_index, len(self.s1_normalized)):
if abs(self.s1_normalized[i] - 0.5 * max(self.s1_normalized)) <= abs_tol:
upper_limit = i
break
print(upper_limit)
print(lower_limit)
x = [lower_limit,upper_limit]
y = [self.s1_normalized[lower_limit],self.s1_normalized[upper_limit]]
self.XY.axes.plot(x,y,c='red')
self.XY.axes.text(100,0.7, "FWHM = " + str(upper_limit - lower_limit) + 's')
self.XY.draw()
def radar_plot(self):
titles = self.sensor_name
self.Radar_Layout = QtWidgets.QVBoxLayout(self.Radar_widget)
self.radar = Radar(titles, rect=None, parent=self.Radar_widget)
self.Radar_Layout.addWidget(self.radar)
self.radar_toolbar = NavigationToolbar(self.radar, self.Radar_widget)
self.Radar_Layout.addWidget(self.radar_toolbar)
for i in range(121):
self.radar.plot([self.s1_normalized[i],self.s2_normalized[i],self.s3_normalized[i],self.s4_normalized[i],self.s5_normalized[i],self.s6_normalized[i],self.s7_normalized[i],self.s8_normalized[i],self.s9_normalized[i],self.s10_normalized[i],self.s11_normalized[i],self.s12_normalized[i],self.s13_normalized[i],self.s14_normalized[i],self.s15_normalized[i],self.s16_normalized[i],self.s17_normalized[i],self.s18_normalized[i]])
self.radar.draw()
self.actionRadar_Plot.setEnabled(False)
def box_plot(self):
labels = self.sensor_name
data = [self.s1_normalized,self.s2_normalized,self.s3_normalized,self.s4_normalized,self.s5_normalized,self.s6_normalized,self.s7_normalized,self.s8_normalized,self.s9_normalized,self.s10_normalized,self.s11_normalized,self.s12_normalized,self.s13_normalized,self.s14_normalized,self.s15_normalized,self.s16_normalized,self.s17_normalized,self.s18_normalized]
self.box.axes.cla()
self.box.axes.boxplot(data,labels=labels)
self.box.axes.set_ylabel("Impedance")
self.box.draw()
| 44.3008 | 436 | 0.556992 |
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QFileDialog
from analysis_gui import Ui_Analysis
import numpy as np
import matplotlib,math,csv
matplotlib.use('Qt5Agg')
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
class MyMplCanvas(FigureCanvas):
def __init__(self, parent=None):
fig = Figure()
self.axes = fig.add_subplot(111)
self.compute_initial_figure()
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class Radar(FigureCanvas):
def __init__(self, titles, rect=None, parent=None):
fig = Figure()
if rect is None:
rect = [0.05, 0.05, 0.8, 0.8]
self.n = len(titles)
self.angles = np.arange(90, 90 + 360, 360.0 / self.n)
self.angles = [a % 360 for a in self.angles]
self.axes = [fig.add_axes(rect, projection="polar", label="axes%d" % i)
for i in range(self.n)]
self.ax = self.axes[0]
self.ax.set_thetagrids(self.angles,labels=titles, fontsize=14)
for ax in self.axes[1:]:
ax.patch.set_visible(False)
ax.grid("off")
ax.xaxis.set_visible(False)
for ax, angle in zip(self.axes, self.angles):
ax.set_rgrids([0.2,0.4,0.6,0.8,1.0], angle=angle)
ax.spines["polar"].set_visible(False)
ax.set_ylim(auto=True)
ax.set_xlim(auto=True)
FigureCanvas.__init__(self, fig)
self.setParent(parent)
def plot(self, values, *args, **kw):
angle = np.deg2rad(np.r_[self.angles, self.angles[0]])
values = np.r_[values, values[0]]
self.ax.plot(angle, values, *args, **kw)
class Analysis(QtWidgets.QMainWindow, Ui_Analysis):
def __init__(self,parent = None):
super(Analysis,self).__init__(parent)
self.setupUi(self)
self.XY_widget = QtWidgets.QWidget(self.tab_XY)
self.Radar_widget = QtWidgets.QWidget(self.tab_Radar)
self.Box_widget = QtWidgets.QWidget(self.tab_Box)
self.Table_widget = QtWidgets.QWidget(self.tab_Table)
self.XY_Layout = QtWidgets.QVBoxLayout(self.XY_widget)
self.XY = MyMplCanvas(self.XY_widget)
self.XY_Layout.addWidget(self.XY)
self.mpl_toolbar = NavigationToolbar(self.XY, self.XY_widget)
self.XY_Layout.addWidget(self.mpl_toolbar)
self.Box_Layout = QtWidgets.QVBoxLayout(self.Box_widget)
self.box = MyMplCanvas(self.Box_widget)
self.Box_Layout.addWidget(self.box)
self.box_toolbar = NavigationToolbar(self.box, self.Box_widget)
self.Box_Layout.addWidget(self.box_toolbar)
self.actionOpen.triggered.connect(self.open)
self.actionMax_min.triggered.connect(self.max_min)
self.actionStandardization_M_0_S_1.triggered.connect(self.standardization)
self.actionBaseline_Correction.triggered.connect(self.baseline)
self.actionPeak_Detection.triggered.connect(self.peak_detection)
self.actionFWHM.triggered.connect(self.FWHM)
self.actionRise_Time.triggered.connect(self.rise_time)
self.actionFall_Time.triggered.connect(self.fall_time)
self.sensor_name = []
self.sensor_sn = []
self.time = []
self.s1, self.s2, self.s3, self.s4, self.s5 = [], [], [], [], []
self.s6, self.s7, self.s8, self.s9, self.s10 = [], [], [], [], []
self.s11, self.s12, self.s13, self.s14, self.s15 = [], [], [], [], []
self.s16, self.s17, self.s18 = [], [], []
def open(self):
self.data = []
self.sensor_name = []
self.sensor_sn = []
self.time = []
self.s1, self.s2, self.s3, self.s4, self.s5 = [], [], [], [], []
self.s6, self.s7, self.s8, self.s9, self.s10 = [], [], [], [], []
self.s11, self.s12, self.s13, self.s14, self.s15 = [], [], [], [], []
self.s16, self.s17, self.s18 = [], [], []
self.s1_normalized = []
self.s2_normalized = []
self.s3_normalized = []
self.s4_normalized = []
self.s5_normalized = []
self.s6_normalized = []
self.s7_normalized = []
self.s8_normalized = []
self.s9_normalized = []
self.s10_normalized = []
self.s11_normalized = []
self.s12_normalized = []
self.s13_normalized = []
self.s14_normalized = []
self.s15_normalized = []
self.s16_normalized = []
self.s17_normalized = []
self.s18_normalized = []
filename = QFileDialog.getOpenFileName(self, 'Open',filter="CSV Files (*.csv);;FOX Files (*.txt)",
initialFilter= "CSV Files (*.csv)")
if filename[0]=='':
print("Cancel")
elif filename[1]=='FOX Files (*.txt)':
file = open(filename[0])
lines = file.readlines()
for i in range(len(lines)):
if lines[i].startswith("[SENSOR NAME]"):
i += 1
self.sensor_name = lines[i].split()
if lines[i].startswith("[SENSOR SN]"):
i += 1
self.sensor_sn = lines[i].split()
if lines[i].startswith("[SENSOR DATA]"):
j = i + 1
self.data = []
for i in range(121):
self.data.append(lines[j].split())
j += 1
print(self.sensor_name)
print(self.sensor_sn)
print(self.data)
for i in range(len(self.data)):
for j in range(19):
if j==0:
self.time.append(self.data[i][j])
if j==1:
self.s1.append(float(self.data[i][j]))
if j==2:
self.s2.append(float(self.data[i][j]))
if j==3:
self.s3.append(float(self.data[i][j]))
if j==4:
self.s4.append(float(self.data[i][j]))
if j==5:
self.s5.append(float(self.data[i][j]))
if j==6:
self.s6.append(float(self.data[i][j]))
if j==7:
self.s7.append(float(self.data[i][j]))
if j==8:
self.s8.append(float(self.data[i][j]))
if j==9:
self.s9.append(float(self.data[i][j]))
if j==10:
self.s10.append(float(self.data[i][j]))
if j==11:
self.s11.append(float(self.data[i][j]))
if j==12:
self.s12.append(float(self.data[i][j]))
if j==13:
self.s13.append(float(self.data[i][j]))
if j==14:
self.s14.append(float(self.data[i][j]))
if j==15:
self.s15.append(float(self.data[i][j]))
if j==16:
self.s16.append(float(self.data[i][j]))
if j==17:
self.s17.append(float(self.data[i][j]))
if j==18:
self.s18.append(float(self.data[i][j]))
self.XY.axes.cla()
self.XY.axes.plot(self.time, self.s1,label=self.sensor_name[0])
self.XY.axes.plot(self.time, self.s2,label=self.sensor_name[1])
self.XY.axes.plot(self.time, self.s3,label=self.sensor_name[2])
self.XY.axes.plot(self.time, self.s4,label=self.sensor_name[3])
self.XY.axes.plot(self.time, self.s5,label=self.sensor_name[4])
self.XY.axes.plot(self.time, self.s6,label=self.sensor_name[5])
self.XY.axes.plot(self.time, self.s7,label=self.sensor_name[6])
self.XY.axes.plot(self.time, self.s8,label=self.sensor_name[7])
self.XY.axes.plot(self.time, self.s9,label=self.sensor_name[8])
self.XY.axes.plot(self.time, self.s10,label=self.sensor_name[9])
self.XY.axes.plot(self.time, self.s11,label=self.sensor_name[10])
self.XY.axes.plot(self.time, self.s12,label=self.sensor_name[11])
self.XY.axes.plot(self.time, self.s13,label=self.sensor_name[12])
self.XY.axes.plot(self.time, self.s14,label=self.sensor_name[13])
self.XY.axes.plot(self.time, self.s15,label=self.sensor_name[14])
self.XY.axes.plot(self.time, self.s16,label=self.sensor_name[15])
self.XY.axes.plot(self.time, self.s17,label=self.sensor_name[16])
self.XY.axes.plot(self.time, self.s18,label=self.sensor_name[17])
self.XY.axes.set_xlabel("Time")
self.XY.axes.set_ylabel("Impedance")
self.XY.axes.legend(loc='best')
self.XY.draw()
self.menuNormalization.setEnabled(True)
for item in self.s1:
self.s1_normalized.append((item - min(self.s1)) / (max(self.s1) - min(self.s1)))
for item in self.s2:
self.s2_normalized.append((item - min(self.s2)) / (max(self.s2) - min(self.s2)))
for item in self.s3:
self.s3_normalized.append((item - min(self.s3)) / (max(self.s3) - min(self.s3)))
for item in self.s4:
self.s4_normalized.append((item - min(self.s4)) / (max(self.s4) - min(self.s4)))
for item in self.s5:
self.s5_normalized.append((item - min(self.s5)) / (max(self.s5) - min(self.s5)))
for item in self.s6:
self.s6_normalized.append((item - min(self.s6)) / (max(self.s6) - min(self.s6)))
for item in self.s7:
self.s7_normalized.append((item - min(self.s7)) / (max(self.s7) - min(self.s7)))
for item in self.s8:
self.s8_normalized.append((item - min(self.s8)) / (max(self.s8) - min(self.s8)))
for item in self.s9:
self.s9_normalized.append((item - min(self.s9)) / (max(self.s9) - min(self.s9)))
for item in self.s10:
self.s10_normalized.append((item - min(self.s10)) / (max(self.s10) - min(self.s10)))
for item in self.s11:
self.s11_normalized.append((item - min(self.s11)) / (max(self.s11) - min(self.s11)))
for item in self.s12:
self.s12_normalized.append((item - min(self.s12)) / (max(self.s12) - min(self.s12)))
for item in self.s13:
self.s13_normalized.append((item - min(self.s13)) / (max(self.s13) - min(self.s13)))
for item in self.s14:
self.s14_normalized.append((item - min(self.s14)) / (max(self.s14) - min(self.s14)))
for item in self.s15:
self.s15_normalized.append((item - min(self.s15)) / (max(self.s15) - min(self.s15)))
for item in self.s16:
self.s16_normalized.append((item - min(self.s16)) / (max(self.s16) - min(self.s16)))
for item in self.s17:
self.s17_normalized.append((item - min(self.s17)) / (max(self.s17) - min(self.s17)))
for item in self.s18:
self.s18_normalized.append((item - min(self.s18)) / (max(self.s18) - min(self.s18)))
self.radar_plot()
self.box_plot()
elif filename[1] == "CSV Files (*.csv)":
with open(filename[0], 'r') as csvfile:
lines = csv.reader(csvfile)
data = list(lines)
self.tableWidget.setRowCount(len(data))
self.tableWidget.setColumnCount(64)
for i in range(3):
for j in range(2):
self.tableWidget.setItem(i,j,QtWidgets.QTableWidgetItem(data[i][j]))
for i in range(3,len(data)):
for j in range(64):
self.tableWidget.setItem(i, j, QtWidgets.QTableWidgetItem(data[i][j]))
def max_min(self):
self.XY.axes.cla()
self.XY.axes.plot(self.time, self.s1_normalized, label=self.sensor_name[0])
self.XY.axes.set_xlabel("Time")
self.XY.axes.set_ylabel("Impedance")
self.XY.axes.legend(loc='best')
self.XY.draw()
self.actionPeak_Detection.setEnabled(True)
self.actionRise_Time.setEnabled(True)
self.actionFall_Time.setEnabled(True)
self.actionFWHM.setEnabled(True)
def standardization(self):
z1,z2,z3,z4,z5,z6,z7,z8,z9,z10,z11,z12,z13,z14,z15,z16,z17,z18 = [],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]
m1 = sum(self.s1) / len(self.s1)
m2 = sum(self.s2) / len(self.s2)
m3 = sum(self.s3) / len(self.s3)
m4 = sum(self.s4) / len(self.s4)
m5 = sum(self.s5) / len(self.s5)
m6 = sum(self.s6) / len(self.s6)
m7 = sum(self.s7) / len(self.s7)
m8 = sum(self.s8) / len(self.s8)
m9 = sum(self.s9) / len(self.s9)
m10 = sum(self.s10) / len(self.s10)
m11 = sum(self.s11) / len(self.s11)
m12 = sum(self.s12) / len(self.s12)
m13 = sum(self.s13) / len(self.s13)
m14 = sum(self.s14) / len(self.s14)
m15 = sum(self.s15) / len(self.s15)
m16 = sum(self.s16) / len(self.s16)
m17 = sum(self.s17) / len(self.s17)
m18 = sum(self.s18) / len(self.s18)
sd1 = self.calculate_sd(self.s1, m1)
sd2 = self.calculate_sd(self.s2, m2)
sd3 = self.calculate_sd(self.s3, m3)
sd4 = self.calculate_sd(self.s4, m4)
sd5 = self.calculate_sd(self.s5, m5)
sd6 = self.calculate_sd(self.s6, m6)
sd7 = self.calculate_sd(self.s7, m7)
sd8 = self.calculate_sd(self.s8, m8)
sd9 = self.calculate_sd(self.s9, m9)
sd10 = self.calculate_sd(self.s10, m10)
sd11 = self.calculate_sd(self.s11, m11)
sd12 = self.calculate_sd(self.s12, m12)
sd13 = self.calculate_sd(self.s13, m13)
sd14 = self.calculate_sd(self.s14, m14)
sd15 = self.calculate_sd(self.s15, m15)
sd16 = self.calculate_sd(self.s16, m16)
sd17 = self.calculate_sd(self.s17, m17)
sd18 = self.calculate_sd(self.s18, m18)
for item in self.s1:
z1.append((item-m1)/sd1)
for item in self.s2:
z2.append((item-m2)/sd2)
for item in self.s3:
z3.append((item-m3)/sd3)
for item in self.s4:
z4.append((item-m4)/sd4)
for item in self.s5:
z5.append((item-m5)/sd5)
for item in self.s6:
z6.append((item-m6)/sd6)
for item in self.s7:
z7.append((item-m7)/sd7)
for item in self.s8:
z8.append((item-m8)/sd8)
for item in self.s9:
z9.append((item-m9)/sd9)
for item in self.s10:
z10.append((item-m10)/sd10)
for item in self.s11:
z11.append((item-m11)/sd11)
for item in self.s12:
z12.append((item-m12)/sd12)
for item in self.s13:
z13.append((item-m13)/sd13)
for item in self.s14:
z14.append((item-m14)/sd14)
for item in self.s15:
z15.append((item-m15)/sd15)
for item in self.s16:
z16.append((item-m16)/sd16)
for item in self.s17:
z17.append((item-m17)/sd17)
for item in self.s18:
z18.append((item-m18)/sd18)
self.XY.axes.cla()
self.XY.axes.plot(self.time, z1, label=self.sensor_name[0])
self.XY.axes.set_xlabel("Time")
self.XY.axes.set_ylabel("Impedance")
self.XY.axes.legend(loc='best')
self.XY.draw()
def calculate_sd(self,list,mean):
sd = 0.0
for item in list:
sd += (item-mean) ** 2
sd = sd/(len(list)-1)
sd = sd ** (1/2)
return sd
def baseline(self):
def peak_detection(self):
s1_diff = []
self.s1_indexes = []
for i in range(len(self.s1_normalized)-1):
s1_diff.append(self.s1_normalized[i+1]-self.s1_normalized[i])
print("diff=" + str(s1_diff))
print(len(s1_diff))
for i in range(len(s1_diff)-1):
if s1_diff[i]>0 and s1_diff[i+1]<0:
self.s1_indexes.append(i+1)
print(self.s1_indexes)
for i in range(len(self.s1_indexes)-1):
if self.s1_normalized[self.s1_indexes[i]]>0.5 and (self.s1_indexes[i+1]-self.s1_indexes[i])>=5:
self.XY.axes.scatter(self.time[self.s1_indexes[i]], self.s1_normalized[self.s1_indexes[i]],c='red')
self.XY.draw()
self.actionRise_Time.setEnabled(True)
def rise_time(self):
upper_limit = 0
lower_limit = 0
max_index = 0
rel_tol = 0.05
abs_tol = 0.1
peak_values = []
for i in range(len(self.s1_normalized)):
if self.s1_normalized[i]==max(self.s1_normalized):
max_index = i
print("max index=" + str(max_index))
for i in range(max_index):
if abs(self.s1_normalized[i]-0.9*max(self.s1_normalized)) <= abs_tol:
upper_limit = i
if abs(self.s1_normalized[i]-0.1*max(self.s1_normalized)) <= abs_tol:
lower_limit = i
print(upper_limit)
print(lower_limit)
self.XY.axes.text(100,0.9,"Rise Time = " + str(upper_limit-lower_limit)+'s')
self.XY.draw()
def fall_time(self):
upper_limit = 0
lower_limit = 0
max_index = 0
rel_tol = 0.05
abs_tol = 0.1
for i in range(len(self.s1_normalized)):
if self.s1_normalized[i]==max(self.s1_normalized):
max_index = i
print("max index="+ str(max_index))
for i in range(max_index,len(self.s1_normalized)):
if abs(self.s1_normalized[i] - 0.9 * max(self.s1_normalized)) <= abs_tol:
lower_limit = i
if abs(self.s1_normalized[i] - 0.1 * max(self.s1_normalized)) <= abs_tol:
upper_limit = i
break
print(upper_limit)
print(lower_limit)
self.XY.axes.text(100,0.8,"Fall Time = " + str(upper_limit - lower_limit) + 's')
self.XY.draw()
def FWHM(self):
upper_limit = 0
lower_limit = 0
max_index = 0
rel_tol = 0.15
abs_tol = 0.1
for i in range(len(self.s1_normalized)):
if self.s1_normalized[i] == max(self.s1_normalized):
max_index = i
print("max index=" + str(max_index))
for i in range(max_index):
if abs(self.s1_normalized[i] - 0.5 * max(self.s1_normalized)) <= abs_tol:
lower_limit = i
for i in range(max_index, len(self.s1_normalized)):
if abs(self.s1_normalized[i] - 0.5 * max(self.s1_normalized)) <= abs_tol:
upper_limit = i
break
print(upper_limit)
print(lower_limit)
x = [lower_limit,upper_limit]
y = [self.s1_normalized[lower_limit],self.s1_normalized[upper_limit]]
self.XY.axes.plot(x,y,c='red')
self.XY.axes.text(100,0.7, "FWHM = " + str(upper_limit - lower_limit) + 's')
self.XY.draw()
def radar_plot(self):
titles = self.sensor_name
self.Radar_Layout = QtWidgets.QVBoxLayout(self.Radar_widget)
self.radar = Radar(titles, rect=None, parent=self.Radar_widget)
self.Radar_Layout.addWidget(self.radar)
self.radar_toolbar = NavigationToolbar(self.radar, self.Radar_widget)
self.Radar_Layout.addWidget(self.radar_toolbar)
for i in range(121):
self.radar.plot([self.s1_normalized[i],self.s2_normalized[i],self.s3_normalized[i],self.s4_normalized[i],self.s5_normalized[i],self.s6_normalized[i],self.s7_normalized[i],self.s8_normalized[i],self.s9_normalized[i],self.s10_normalized[i],self.s11_normalized[i],self.s12_normalized[i],self.s13_normalized[i],self.s14_normalized[i],self.s15_normalized[i],self.s16_normalized[i],self.s17_normalized[i],self.s18_normalized[i]])
self.radar.draw()
self.actionRadar_Plot.setEnabled(False)
def box_plot(self):
labels = self.sensor_name
data = [self.s1_normalized,self.s2_normalized,self.s3_normalized,self.s4_normalized,self.s5_normalized,self.s6_normalized,self.s7_normalized,self.s8_normalized,self.s9_normalized,self.s10_normalized,self.s11_normalized,self.s12_normalized,self.s13_normalized,self.s14_normalized,self.s15_normalized,self.s16_normalized,self.s17_normalized,self.s18_normalized]
self.box.axes.cla()
self.box.axes.boxplot(data,labels=labels)
self.box.axes.set_ylabel("Impedance")
self.box.draw()
| true | true |
1c2ff38c1946798381749e366e028267adeefd55 | 2,459 | py | Python | batch_script/Bootstrap_Energy.py | DavidAce/2Component_GL | b0821956ebe1d65355b2afd954b099ed18b9ad54 | [
"MIT"
] | null | null | null | batch_script/Bootstrap_Energy.py | DavidAce/2Component_GL | b0821956ebe1d65355b2afd954b099ed18b9ad54 | [
"MIT"
] | null | null | null | batch_script/Bootstrap_Energy.py | DavidAce/2Component_GL | b0821956ebe1d65355b2afd954b099ed18b9ad54 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import sys
import os
import math
from statsmodels.graphics.tsaplots import plot_acf
import statsmodels.api as sm
from statsmodels.tsa.stattools import acf
import scipy.integrate as integrate
import random
import h5py
beta_low=float(sys.argv[1])
beta_high=float(sys.argv[2])
nbeta=int(sys.argv[3])
h=float(sys.argv[4])
e=float(sys.argv[5])
transient_time=float(sys.argv[6])
tau_max=float(sys.argv[7])
transient_time=int(transient_time)
tau_max=int(tau_max)
beta=np.zeros((nbeta))
if( (h).is_integer()): h=int(h)
L=[]
for ind in range(8, len(sys.argv)):
L.append(int(sys.argv[ind]))
block_size=20*tau_max
plt.rc('text', usetex=True)
plt.rc('font', family='serif', size=18)
plt.rc('text.latex', preamble=r'\usepackage{bm}')
fig, ax1 = plt.subplots(2, 1, figsize=(9,12))
ax1[0].set_title("h=%s; e=%s" %(h, e))
ax1[0].set_xlabel(r"$\beta$")
ax1[0].set_ylabel(r"$E/V$")
ax1[1].set_xlabel(r"$\beta$")
ax1[1].set_ylabel(r"$C_{v}$")
for l in range(len(L)):
BASEDIR=("/home/ilaria/Desktop/MultiComponents_SC/Output_2C/L%d_e%s_h%s_bmin%s_bmax%s" %(L[l], e, h, beta_low, beta_high))
Cv_mean=np.zeros((nbeta))
Cv_err=np.zeros((nbeta))
E_mean=np.zeros((nbeta))
E_err=np.zeros((nbeta))
for b in range(nbeta):
beta[b]=beta_low +b*(beta_high -beta_low)/(nbeta-1)
#fileE=("%s/beta_%d/Energy.npy" %(BASEDIR, b))
#E=np.load(fileE)
file=h5py.File('%s/beta_%d/Output.h5' %(BASEDIR, b), 'r')
E=np.asarray(file['Measurements']['E'])
#cut of the transient regime:
E=E[transient_time:]
# print(len(E))
E_mean[b]=np.mean(E)/(L[l]**3)
E_err[b]=np.sqrt(np.var(E/(L[l]**3))/(len(E)-1))
nblocks=int(len(E)/block_size)
# print(nblocks)
varE_resampling=np.zeros((nblocks))
for block in range(nblocks):
# <E²> - <E>²
varE_resampling[block]=np.var(np.random.choice(E, size=block_size))
Cv_mean[b]=beta[b]*np.var(E)/(L[l]**3)
Cv_err[b]= (beta[b]/(L[l]**3))*np.sqrt(np.var(varE_resampling)/(nblocks-1))
ax1[0].plot(beta, E_mean, '-')
ax1[0].errorbar(beta, E_mean, yerr=E_err, capsize=2,label="L=%s" %L[l])
ax1[1].plot(beta, Cv_mean, '-')
ax1[1].errorbar(beta, Cv_mean, yerr=Cv_err, capsize=2)
ax1[0].legend(loc="best")
plt.tight_layout()
plt.show()
| 26.44086 | 127 | 0.629118 | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import sys
import os
import math
from statsmodels.graphics.tsaplots import plot_acf
import statsmodels.api as sm
from statsmodels.tsa.stattools import acf
import scipy.integrate as integrate
import random
import h5py
beta_low=float(sys.argv[1])
beta_high=float(sys.argv[2])
nbeta=int(sys.argv[3])
h=float(sys.argv[4])
e=float(sys.argv[5])
transient_time=float(sys.argv[6])
tau_max=float(sys.argv[7])
transient_time=int(transient_time)
tau_max=int(tau_max)
beta=np.zeros((nbeta))
if( (h).is_integer()): h=int(h)
L=[]
for ind in range(8, len(sys.argv)):
L.append(int(sys.argv[ind]))
block_size=20*tau_max
plt.rc('text', usetex=True)
plt.rc('font', family='serif', size=18)
plt.rc('text.latex', preamble=r'\usepackage{bm}')
fig, ax1 = plt.subplots(2, 1, figsize=(9,12))
ax1[0].set_title("h=%s; e=%s" %(h, e))
ax1[0].set_xlabel(r"$\beta$")
ax1[0].set_ylabel(r"$E/V$")
ax1[1].set_xlabel(r"$\beta$")
ax1[1].set_ylabel(r"$C_{v}$")
for l in range(len(L)):
BASEDIR=("/home/ilaria/Desktop/MultiComponents_SC/Output_2C/L%d_e%s_h%s_bmin%s_bmax%s" %(L[l], e, h, beta_low, beta_high))
Cv_mean=np.zeros((nbeta))
Cv_err=np.zeros((nbeta))
E_mean=np.zeros((nbeta))
E_err=np.zeros((nbeta))
for b in range(nbeta):
beta[b]=beta_low +b*(beta_high -beta_low)/(nbeta-1)
file=h5py.File('%s/beta_%d/Output.h5' %(BASEDIR, b), 'r')
E=np.asarray(file['Measurements']['E'])
E=E[transient_time:]
E_mean[b]=np.mean(E)/(L[l]**3)
E_err[b]=np.sqrt(np.var(E/(L[l]**3))/(len(E)-1))
nblocks=int(len(E)/block_size)
varE_resampling=np.zeros((nblocks))
for block in range(nblocks):
varE_resampling[block]=np.var(np.random.choice(E, size=block_size))
Cv_mean[b]=beta[b]*np.var(E)/(L[l]**3)
Cv_err[b]= (beta[b]/(L[l]**3))*np.sqrt(np.var(varE_resampling)/(nblocks-1))
ax1[0].plot(beta, E_mean, '-')
ax1[0].errorbar(beta, E_mean, yerr=E_err, capsize=2,label="L=%s" %L[l])
ax1[1].plot(beta, Cv_mean, '-')
ax1[1].errorbar(beta, Cv_mean, yerr=Cv_err, capsize=2)
ax1[0].legend(loc="best")
plt.tight_layout()
plt.show()
| true | true |
1c2ff3c29461205a7f9fc39f271e6cb0d572c236 | 1,774 | py | Python | fedrec/utilities/registry.py | ruchirgarg05/RecoEdge | 5986af75bcd97087662ff4b1927925f7afb05ee1 | [
"Apache-2.0"
] | 68 | 2021-06-20T07:54:48.000Z | 2022-02-19T16:11:01.000Z | fedrec/utilities/registry.py | ruchirgarg05/RecoEdge | 5986af75bcd97087662ff4b1927925f7afb05ee1 | [
"Apache-2.0"
] | 100 | 2021-06-24T13:33:24.000Z | 2022-02-23T10:30:27.000Z | fedrec/utilities/registry.py | ruchirgarg05/RecoEdge | 5986af75bcd97087662ff4b1927925f7afb05ee1 | [
"Apache-2.0"
] | 38 | 2021-07-13T12:16:24.000Z | 2022-02-26T05:08:28.000Z | import collections
import collections.abc
import inspect
import sys
LOOKUP_DICT = collections.defaultdict(dict)
def load(kind, name):
registry = LOOKUP_DICT[kind]
def decorator(obj):
if name in registry:
raise LookupError('{} already present'.format(name, kind))
registry[name] = obj
return obj
return decorator
def lookup(kind, name):
if isinstance(name, collections.abc.Mapping):
name = name['name']
if kind not in LOOKUP_DICT:
raise KeyError('Nothing registered under "{}"'.format(kind))
return LOOKUP_DICT[kind][name]
def construct(kind, config, unused_keys=(), **kwargs):
if isinstance(config, str):
config = {'name': config}
return instantiate(
lookup(kind, config),
config,
unused_keys + ('name',),
**kwargs)
def instantiate(callable, config, unused_keys=(), **kwargs):
merged = {**config, **kwargs}
signature = inspect.signature(callable)
for name, param in signature.parameters.items():
if param.kind in (inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.VAR_POSITIONAL):
raise ValueError('Unsupported kind for param {}: {}'.format(
name, param.kind))
if any(param.kind == inspect.Parameter.VAR_KEYWORD
for param in signature.parameters.values()):
return callable(**merged)
missing = {}
for key in list(merged.keys()):
if key not in signature.parameters:
if key not in unused_keys:
missing[key] = merged[key]
merged.pop(key)
if missing:
print('WARNING {}: superfluous {}'.format(
callable, missing), file=sys.stderr)
return callable(**merged)
| 28.15873 | 72 | 0.619504 | import collections
import collections.abc
import inspect
import sys
LOOKUP_DICT = collections.defaultdict(dict)
def load(kind, name):
registry = LOOKUP_DICT[kind]
def decorator(obj):
if name in registry:
raise LookupError('{} already present'.format(name, kind))
registry[name] = obj
return obj
return decorator
def lookup(kind, name):
if isinstance(name, collections.abc.Mapping):
name = name['name']
if kind not in LOOKUP_DICT:
raise KeyError('Nothing registered under "{}"'.format(kind))
return LOOKUP_DICT[kind][name]
def construct(kind, config, unused_keys=(), **kwargs):
if isinstance(config, str):
config = {'name': config}
return instantiate(
lookup(kind, config),
config,
unused_keys + ('name',),
**kwargs)
def instantiate(callable, config, unused_keys=(), **kwargs):
merged = {**config, **kwargs}
signature = inspect.signature(callable)
for name, param in signature.parameters.items():
if param.kind in (inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.VAR_POSITIONAL):
raise ValueError('Unsupported kind for param {}: {}'.format(
name, param.kind))
if any(param.kind == inspect.Parameter.VAR_KEYWORD
for param in signature.parameters.values()):
return callable(**merged)
missing = {}
for key in list(merged.keys()):
if key not in signature.parameters:
if key not in unused_keys:
missing[key] = merged[key]
merged.pop(key)
if missing:
print('WARNING {}: superfluous {}'.format(
callable, missing), file=sys.stderr)
return callable(**merged)
| true | true |
1c2ff4247fcd37d7a23fe72dfd5f9801afa595fa | 19,358 | py | Python | src/model.py | GuillaumeAI/gia-style-transfer-tf2 | 543e4e3434b87612bff6bb901c6ce4026069fa15 | [
"MIT"
] | 16 | 2019-05-21T22:28:30.000Z | 2022-03-07T19:29:08.000Z | src/model.py | GuillaumeAI/gia-style-transfer-tf2 | 543e4e3434b87612bff6bb901c6ce4026069fa15 | [
"MIT"
] | 4 | 2021-09-01T07:23:48.000Z | 2022-02-26T12:15:40.000Z | src/model.py | GuillaumeAI/gia-style-transfer-tf2 | 543e4e3434b87612bff6bb901c6ce4026069fa15 | [
"MIT"
] | 5 | 2020-02-08T10:08:58.000Z | 2021-06-03T17:47:32.000Z | from absl import logging, flags
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.keras.utils import tf_utils
# Define model flags
FLAGS = flags.FLAGS
flags.DEFINE_enum("increase_size_layer_type", "default", ["default", "unpool", "deconv"],
"Type of layer to use in the decoder part. Default use the type specified in research paper (unpool for adaptive st, deconv for cartoon GAN)")
flags.DEFINE_enum("norm_layer", "default", ["default", "instance_norm", "batch_norm"],
"Type of layer to use for normalization. Default use the type specified in research paper (instance_norm for adaptive st, batch_norm for cartoon GAN)")
flags.DEFINE_bool("mobilenet", False, "Build model with mobilenet optimization (depthwise convolution...)")
flags.DEFINE_enum("model", "default", ["default", "adaptive_st", "cartoon_gan"],
"Model topology to use. If default then use the topology corresponding to training_method")
flags.DEFINE_integer("n_filter_generator", 32, "Number of filters in first conv layer of generator (encoder-decoder)")
flags.DEFINE_integer("n_filter_discriminator", 64, "Number of filters in first conv layer of discriminator")
flags.DEFINE_float("l2_reg", 0.001, "l2 regularization weigh to apply")
flags.DEFINE_integer("transformer_kernel_size", 10, "Size of kernel we apply to the input_tensor in the transformer model if using adaptive_st training method")
#################
# Custom layers #
#################
class UnpoolLayer(keras.layers.Layer):
def __init__(self, **kwargs):
super().__init__(**kwargs)
@tf_utils.shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape[0], input_shape[1] * 2, input_shape[2] * 2, input_shape[3]
def get_config(self):
base_config = super(UnpoolLayer, self).get_config()
return base_config
@classmethod
def from_config(cls, config):
return cls(**config)
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs):
return tf.image.resize(inputs, tf.shape(inputs)[1:3] * 2, method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
class InstanceNormLayer(keras.layers.Layer):
"""
See:
https://github.com/keras-team/keras-contrib/blob/master/keras_contrib/layers/normalization.py
"""
def __init__(self, **kwargs):
self.epsilon = 1e-5
super().__init__(**kwargs)
def compute_output_shape(self, input_shape):
shape = tf.TensorShape(input_shape).as_list()
return tf.TensorShape([shape[0], shape[1], shape[2], shape[3]])
def build(self, input_shape):
depth = (input_shape[3],)
self.scale = self.add_weight(shape=depth,
name='gamma',
initializer=keras.initializers.get('ones'))
self.offset = self.add_weight(shape=depth,
name='gamma',
initializer=keras.initializers.get('zeros'))
super().build(input_shape)
def call(self, inputs):
mean, variance = tf.nn.moments(inputs, axes=[1, 2], keepdims=True)
inv = tf.math.rsqrt(variance + self.epsilon)
normalized = (inputs - mean) * inv
return self.scale * normalized + self.offset
class ReflectPadLayer(keras.layers.Layer):
def __init__(self, pad_size, **kwargs):
self.pad_size = pad_size
super().__init__(**kwargs)
def compute_output_shape(self, input_shape):
shape = tf.TensorShape(input_shape).as_list()
return tf.TensorShape([shape[0], shape[1] + self.pad_size * 2, shape[2] + self.pad_size * 2, shape[3]])
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs):
return tf.pad(inputs, [[0, 0], [self.pad_size, self.pad_size], [self.pad_size, self.pad_size], [0, 0]], "SYMMETRIC")
class CenterLayer(keras.layers.Layer):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs):
return inputs * 2. - 1.
##############
# Custom ops #
##############
def relu6(x):
return tf.keras.activations.relu(x, max_value=6)
def inverted_res_block(inputs, stride, in_channels, out_channels, norm_layer, expansion=1):
x = inputs
x = tf.keras.layers.Conv2D(expansion * in_channels, kernel_size=1, padding='same', use_bias=False, activation=None)(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU(6.)(x)
# Depthwise
if stride == 2:
x = norm_layer()(x)
x = ReflectPadLayer(1)(x)
x = tf.keras.layers.DepthwiseConv2D(kernel_size=3, strides=stride, activation=None, use_bias=False, padding='valid' if stride == 1 else 'valid')(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU(6.)(x)
# Project
x = tf.keras.layers.Conv2D(out_channels, kernel_size=1, padding='same', use_bias=False, activation=None)(x)
x = norm_layer()(x)
if in_channels == out_channels and stride == 1:
return keras.layers.Add()([inputs, x])
return x
def make_models():
"""build all the models based on the arguments provided via absl
Returns: encoder_model, decoder_model, discriminator_model
"""
if FLAGS.model == "default" and FLAGS.training_method == "adaptive_st" or FLAGS.model == "adaptive_st":
logging.info("define adaptive_st model")
if FLAGS.norm_layer == "instance_norm" or FLAGS.norm_layer == "default":
norm_layer = InstanceNormLayer
else:
norm_layer = tf.keras.layers.BatchNormalization
logging.warning("Use unusual norm layer for this model")
if FLAGS.increase_size_layer_type == "default" or FLAGS.increase_size_layer_type == "unpool":
increase_size_layer = UnpoolLayer
else:
increase_size_layer = tf.keras.layers.Conv2DTranspose
raise Exception("Not yet implemented")
discriminator_model = make_discriminator_model_adaptive_style_transfer(norm_layer)
if not FLAGS.mobilenet:
encoder_model = make_encoder_model_adaptive_style_transfer(norm_layer)
decoder_model = make_decoder_model_adaptive_style_transfer(encoder_model.output_shape[1:], norm_layer)
else:
logging.info("Use mobilenet version")
encoder_model = make_encoder_model_mobilenet(norm_layer)
decoder_model = make_decoder_model_mobilenet(encoder_model.output_shape[1:], norm_layer)
else:
logging.info("define cartoon_gan model")
if FLAGS.norm_layer == "batch_norm" or FLAGS.norm_layer == "default":
norm_layer = tf.keras.layers.BatchNormalization
else:
norm_layer = InstanceNormLayer
logging.warning("Use unusual norm layer for this model")
if FLAGS.increase_size_layer_type == "default" or FLAGS.increase_size_layer_type == "deconv":
increase_size_layer = tf.keras.layers.Conv2DTranspose
else:
increase_size_layer = UnpoolLayer
raise Exception("Not yet implemented")
encoder_model = make_encoder_model_cartoon(norm_layer)
decoder_model = make_decoder_model_cartoon(encoder_model.output_shape[1:], norm_layer)
discriminator_model = make_discriminator_model_cartoon(norm_layer)
return encoder_model, decoder_model, discriminator_model
def make_encoder_model_adaptive_style_transfer(norm_layer):
"""encoder model following https://arxiv.org/pdf/1807.10201.pdf
Returns: encoder model
"""
model = keras.Sequential(name="Encoder")
model.add(norm_layer(input_shape=(FLAGS.image_size, FLAGS.image_size, 3), dtype=tf.float32))
model.add(ReflectPadLayer(15))
def add_conv(n_filter, strides):
model.add(keras.layers.Conv2D(n_filter, 3, strides, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg)))
model.add(norm_layer())
model.add(keras.layers.Activation("relu"))
add_conv(FLAGS.n_filter_generator, 1)
add_conv(FLAGS.n_filter_generator, 2)
add_conv(FLAGS.n_filter_generator * 2, 2)
add_conv(FLAGS.n_filter_generator * 4, 2)
add_conv(FLAGS.n_filter_generator * 8, 2)
return model
def make_decoder_model_adaptive_style_transfer(input_shape, norm_layer):
"""decoder model following https://arxiv.org/pdf/1807.10201.pdf
Returns: decoder model
"""
x = keras.layers.Input(shape=input_shape, dtype=tf.float32)
inputs = x
def residual_block(x, dim, kernel_size=3, s=1):
pad = int((kernel_size - 1) / 2)
y = ReflectPadLayer(pad)(x)
y = keras.layers.Activation("relu")(norm_layer()(keras.layers.Conv2D(dim, kernel_size, s, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(y)))
y = ReflectPadLayer(pad)(y)
y = norm_layer()(keras.layers.Conv2D(dim, kernel_size, s, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(y))
return keras.layers.Add()([x, y])
# Now stack 9 residual blocks
num_kernels = FLAGS.n_filter_generator * 8
for i in range(9):
x = residual_block(x, num_kernels)
# Decode image.
for i in range(4):
x = UnpoolLayer()(x)
x = keras.layers.Conv2D(FLAGS.n_filter_generator * 2 ** (3 - i), 3, 1, "SAME", kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x)
x = keras.layers.Activation("relu")(norm_layer()(x))
x = ReflectPadLayer(3)(x)
x = keras.layers.Activation("sigmoid")(keras.layers.Conv2D(3, 7, 1, "VALID", kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x))
x = CenterLayer()(x)
model = keras.Model(inputs=inputs, outputs=x, name="Decoder")
return model
def make_encoder_model_mobilenet(norm_layer):
x = keras.layers.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3), dtype=tf.float32)
inputs = x
x = norm_layer()(x)
x = ReflectPadLayer(15)(x)
def add_conv(n_filter_new, strides, x):
x = keras.layers.DepthwiseConv2D(3, strides=strides, use_bias=False)(x)
x = InstanceNormLayer()(x)
x = tf.keras.layers.Activation(relu6)(x)
x = keras.layers.Conv2D(n_filter_new, 1, 1, activation=relu6, use_bias=False, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x)
x = InstanceNormLayer()(x)
x = tf.keras.layers.Activation(relu6)(x)
return x
# First conv is a normal conv
x = keras.layers.Conv2D(FLAGS.n_filter_generator, 3, 1, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x)
x = norm_layer()(x)
x = keras.layers.Activation(relu6)(x)
# x = add_conv(n_filter, 1, x)
# Then use DWConv
x = add_conv(FLAGS.n_filter_generator, 2, x)
x = add_conv(FLAGS.n_filter_generator * 2, 2, x)
x = add_conv(FLAGS.n_filter_generator * 4, 2, x)
x = add_conv(FLAGS.n_filter_generator * 8, 2, x)
model = keras.Model(inputs=inputs, outputs=x, name="Encoder")
return model
def make_decoder_model_mobilenet(input_shape, norm_layer):
x = keras.layers.Input(shape=input_shape, dtype=tf.float32)
inputs = x
# Residual part
num_kernels = FLAGS.n_filter_generator * 8
kernel_size = 3
pad = int((kernel_size - 1) / 2)
for i in range(9):
x = inverted_res_block(x, 1, num_kernels, num_kernels, norm_layer)
x = inverted_res_block(x, 1, num_kernels, num_kernels, norm_layer)
# Decode image
for i in range(4):
x = UnpoolLayer()(x)
x = inverted_res_block(x, 1, FLAGS.n_filter_generator * 2 ** (3 - i + 1), FLAGS.n_filter_generator * 2 ** (3 - i), norm_layer)
x = ReflectPadLayer(3)(x)
x = keras.layers.Activation("sigmoid")(keras.layers.Conv2D(3, 7, 1, "VALID", kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x))
x = CenterLayer()(x)
model = keras.Model(inputs=inputs, outputs=x, name="Decoder")
return model
def make_discriminator_model_adaptive_style_transfer(norm_layer):
"""
Discriminator agent, that provides us with information about image plausibility at different scales.
Returns:
Image estimates at different scales.
"""
image = keras.layers.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3), dtype=tf.float32)
h0 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 2, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(image)))
h0_pred = keras.layers.Conv2D(1, 5)(h0)
h1 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 2, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h0)))
h1_pred = keras.layers.Conv2D(1, 10)(h1)
h2 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 4, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h1)))
h3 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 8, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h2)))
h3_pred = keras.layers.Conv2D(1, 10)(h3)
h4 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 8, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h3)))
h5 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 16, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h4)))
h5_pred = keras.layers.Conv2D(1, 6)(h5)
h6 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 16, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h5)))
h6_pred = keras.layers.Conv2D(1, 3)(h6)
model = keras.Model(inputs=image, outputs=[h0_pred, h1_pred, h3_pred, h5_pred, h6_pred], name="Discriminator")
return model
def make_transformer_model():
"""
This is a simplified version of transformer block described in the paper
https://arxiv.org/abs/1807.10201.
Returns:
Transformed tensor
"""
model = keras.Sequential(name="Transformer")
model.add(keras.layers.AvgPool2D(FLAGS.transformer_kernel_size, strides=1, padding="same"))
return model
def make_encoder_model_cartoon(norm_layer):
"""
Follow the description in the paper http://openaccess.thecvf.com/content_cvpr_2018/papers/Chen_CartoonGAN_Generative_Adversarial_CVPR_2018_paper.pdf
"""
x = tf.keras.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3))
x_input = x
# flat convolution stage
x = tf.keras.layers.Conv2D(64, 7, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
# Down convolution stage
for n_filters in [128, 256]:
x = tf.keras.layers.Conv2D(n_filters, 3, 2, padding="same")(x)
x = tf.keras.layers.Conv2D(n_filters, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
model = tf.keras.Model(inputs=[x_input], outputs=[x], name="Encoder")
return model
def make_decoder_model_cartoon(input_shape, norm_layer):
x = tf.keras.Input(shape=input_shape)
x_input = x
# Residual part
for _ in range(8):
x_residual = x
x = tf.keras.layers.Conv2D(256, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
x = tf.keras.layers.Conv2D(256, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.Add()([x, x_residual])
# Up-convolution
for n_filters in [128, 64]:
x = tf.keras.layers.Conv2DTranspose(n_filters, 3, 2, padding="same")(x)
x = tf.keras.layers.Conv2D(n_filters, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
x = tf.keras.layers.Conv2D(3, 7, padding="same")(x)
model = tf.keras.Model(inputs=[x_input], outputs=[x], name="Decoder")
return model
def make_discriminator_model_cartoon(norm_layer):
x = tf.keras.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3))
x_input = x
# Flat convolution
x = tf.keras.layers.Conv2D(32, 3, 1, "same")(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
# Down convolution stage
for n_filters in [64, 128]:
x = tf.keras.layers.Conv2D(n_filters, 3, 2, "same")(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
x = tf.keras.layers.Conv2D(n_filters * 2, 3, 1, "same")(x)
x = norm_layer()(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
x = tf.keras.layers.Conv2D(256, 3, 1, "same")(x)
x = norm_layer()(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
x = tf.keras.layers.Conv2D(1, 3, 1, "same")(x)
model = tf.keras.Model(inputs=[x_input], outputs=[x], name="generator")
return model
def VGG19():
layers = tf.keras.layers
img_input = layers.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3))
# Block 1
x = layers.Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input)
x = layers.Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)
# Block 2
x = layers.Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)
x = layers.Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
# Block 3
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv4')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
# Block 4
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv4')(x)
save_x = x
# Block 5
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv4')(x)
model = tf.keras.models.Model(img_input, x, name='vgg16')
# Load weights.
weights_path_no_top = ('https://github.com/fchollet/deep-learning-models/'
'releases/download/v0.1/'
'vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5')
weights_path = tf.keras.utils.get_file(
'vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5',
weights_path_no_top,
cache_subdir='models',
file_hash='253f8cb515780f3b799900260a226db6')
model.load_weights(weights_path)
sub_model = tf.keras.models.Model(img_input, save_x, name='VGG')
return sub_model
| 40.839662 | 171 | 0.667218 | from absl import logging, flags
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.keras.utils import tf_utils
FLAGS = flags.FLAGS
flags.DEFINE_enum("increase_size_layer_type", "default", ["default", "unpool", "deconv"],
"Type of layer to use in the decoder part. Default use the type specified in research paper (unpool for adaptive st, deconv for cartoon GAN)")
flags.DEFINE_enum("norm_layer", "default", ["default", "instance_norm", "batch_norm"],
"Type of layer to use for normalization. Default use the type specified in research paper (instance_norm for adaptive st, batch_norm for cartoon GAN)")
flags.DEFINE_bool("mobilenet", False, "Build model with mobilenet optimization (depthwise convolution...)")
flags.DEFINE_enum("model", "default", ["default", "adaptive_st", "cartoon_gan"],
"Model topology to use. If default then use the topology corresponding to training_method")
flags.DEFINE_integer("n_filter_generator", 32, "Number of filters in first conv layer of generator (encoder-decoder)")
flags.DEFINE_integer("n_filter_discriminator", 64, "Number of filters in first conv layer of discriminator")
flags.DEFINE_float("l2_reg", 0.001, "l2 regularization weigh to apply")
flags.DEFINE_integer("transformer_kernel_size", 10, "Size of kernel we apply to the input_tensor in the transformer model if using adaptive_st training method")
_shape[3]
def get_config(self):
base_config = super(UnpoolLayer, self).get_config()
return base_config
@classmethod
def from_config(cls, config):
return cls(**config)
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs):
return tf.image.resize(inputs, tf.shape(inputs)[1:3] * 2, method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
class InstanceNormLayer(keras.layers.Layer):
def __init__(self, **kwargs):
self.epsilon = 1e-5
super().__init__(**kwargs)
def compute_output_shape(self, input_shape):
shape = tf.TensorShape(input_shape).as_list()
return tf.TensorShape([shape[0], shape[1], shape[2], shape[3]])
def build(self, input_shape):
depth = (input_shape[3],)
self.scale = self.add_weight(shape=depth,
name='gamma',
initializer=keras.initializers.get('ones'))
self.offset = self.add_weight(shape=depth,
name='gamma',
initializer=keras.initializers.get('zeros'))
super().build(input_shape)
def call(self, inputs):
mean, variance = tf.nn.moments(inputs, axes=[1, 2], keepdims=True)
inv = tf.math.rsqrt(variance + self.epsilon)
normalized = (inputs - mean) * inv
return self.scale * normalized + self.offset
class ReflectPadLayer(keras.layers.Layer):
def __init__(self, pad_size, **kwargs):
self.pad_size = pad_size
super().__init__(**kwargs)
def compute_output_shape(self, input_shape):
shape = tf.TensorShape(input_shape).as_list()
return tf.TensorShape([shape[0], shape[1] + self.pad_size * 2, shape[2] + self.pad_size * 2, shape[3]])
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs):
return tf.pad(inputs, [[0, 0], [self.pad_size, self.pad_size], [self.pad_size, self.pad_size], [0, 0]], "SYMMETRIC")
class CenterLayer(keras.layers.Layer):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs):
return inputs * 2. - 1.
x = tf.keras.layers.Conv2D(expansion * in_channels, kernel_size=1, padding='same', use_bias=False, activation=None)(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU(6.)(x)
if stride == 2:
x = norm_layer()(x)
x = ReflectPadLayer(1)(x)
x = tf.keras.layers.DepthwiseConv2D(kernel_size=3, strides=stride, activation=None, use_bias=False, padding='valid' if stride == 1 else 'valid')(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU(6.)(x)
x = tf.keras.layers.Conv2D(out_channels, kernel_size=1, padding='same', use_bias=False, activation=None)(x)
x = norm_layer()(x)
if in_channels == out_channels and stride == 1:
return keras.layers.Add()([inputs, x])
return x
def make_models():
if FLAGS.model == "default" and FLAGS.training_method == "adaptive_st" or FLAGS.model == "adaptive_st":
logging.info("define adaptive_st model")
if FLAGS.norm_layer == "instance_norm" or FLAGS.norm_layer == "default":
norm_layer = InstanceNormLayer
else:
norm_layer = tf.keras.layers.BatchNormalization
logging.warning("Use unusual norm layer for this model")
if FLAGS.increase_size_layer_type == "default" or FLAGS.increase_size_layer_type == "unpool":
increase_size_layer = UnpoolLayer
else:
increase_size_layer = tf.keras.layers.Conv2DTranspose
raise Exception("Not yet implemented")
discriminator_model = make_discriminator_model_adaptive_style_transfer(norm_layer)
if not FLAGS.mobilenet:
encoder_model = make_encoder_model_adaptive_style_transfer(norm_layer)
decoder_model = make_decoder_model_adaptive_style_transfer(encoder_model.output_shape[1:], norm_layer)
else:
logging.info("Use mobilenet version")
encoder_model = make_encoder_model_mobilenet(norm_layer)
decoder_model = make_decoder_model_mobilenet(encoder_model.output_shape[1:], norm_layer)
else:
logging.info("define cartoon_gan model")
if FLAGS.norm_layer == "batch_norm" or FLAGS.norm_layer == "default":
norm_layer = tf.keras.layers.BatchNormalization
else:
norm_layer = InstanceNormLayer
logging.warning("Use unusual norm layer for this model")
if FLAGS.increase_size_layer_type == "default" or FLAGS.increase_size_layer_type == "deconv":
increase_size_layer = tf.keras.layers.Conv2DTranspose
else:
increase_size_layer = UnpoolLayer
raise Exception("Not yet implemented")
encoder_model = make_encoder_model_cartoon(norm_layer)
decoder_model = make_decoder_model_cartoon(encoder_model.output_shape[1:], norm_layer)
discriminator_model = make_discriminator_model_cartoon(norm_layer)
return encoder_model, decoder_model, discriminator_model
def make_encoder_model_adaptive_style_transfer(norm_layer):
model = keras.Sequential(name="Encoder")
model.add(norm_layer(input_shape=(FLAGS.image_size, FLAGS.image_size, 3), dtype=tf.float32))
model.add(ReflectPadLayer(15))
def add_conv(n_filter, strides):
model.add(keras.layers.Conv2D(n_filter, 3, strides, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg)))
model.add(norm_layer())
model.add(keras.layers.Activation("relu"))
add_conv(FLAGS.n_filter_generator, 1)
add_conv(FLAGS.n_filter_generator, 2)
add_conv(FLAGS.n_filter_generator * 2, 2)
add_conv(FLAGS.n_filter_generator * 4, 2)
add_conv(FLAGS.n_filter_generator * 8, 2)
return model
def make_decoder_model_adaptive_style_transfer(input_shape, norm_layer):
x = keras.layers.Input(shape=input_shape, dtype=tf.float32)
inputs = x
def residual_block(x, dim, kernel_size=3, s=1):
pad = int((kernel_size - 1) / 2)
y = ReflectPadLayer(pad)(x)
y = keras.layers.Activation("relu")(norm_layer()(keras.layers.Conv2D(dim, kernel_size, s, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(y)))
y = ReflectPadLayer(pad)(y)
y = norm_layer()(keras.layers.Conv2D(dim, kernel_size, s, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(y))
return keras.layers.Add()([x, y])
num_kernels = FLAGS.n_filter_generator * 8
for i in range(9):
x = residual_block(x, num_kernels)
for i in range(4):
x = UnpoolLayer()(x)
x = keras.layers.Conv2D(FLAGS.n_filter_generator * 2 ** (3 - i), 3, 1, "SAME", kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x)
x = keras.layers.Activation("relu")(norm_layer()(x))
x = ReflectPadLayer(3)(x)
x = keras.layers.Activation("sigmoid")(keras.layers.Conv2D(3, 7, 1, "VALID", kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x))
x = CenterLayer()(x)
model = keras.Model(inputs=inputs, outputs=x, name="Decoder")
return model
def make_encoder_model_mobilenet(norm_layer):
x = keras.layers.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3), dtype=tf.float32)
inputs = x
x = norm_layer()(x)
x = ReflectPadLayer(15)(x)
def add_conv(n_filter_new, strides, x):
x = keras.layers.DepthwiseConv2D(3, strides=strides, use_bias=False)(x)
x = InstanceNormLayer()(x)
x = tf.keras.layers.Activation(relu6)(x)
x = keras.layers.Conv2D(n_filter_new, 1, 1, activation=relu6, use_bias=False, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x)
x = InstanceNormLayer()(x)
x = tf.keras.layers.Activation(relu6)(x)
return x
x = keras.layers.Conv2D(FLAGS.n_filter_generator, 3, 1, 'VALID', kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x)
x = norm_layer()(x)
x = keras.layers.Activation(relu6)(x)
x = add_conv(FLAGS.n_filter_generator, 2, x)
x = add_conv(FLAGS.n_filter_generator * 2, 2, x)
x = add_conv(FLAGS.n_filter_generator * 4, 2, x)
x = add_conv(FLAGS.n_filter_generator * 8, 2, x)
model = keras.Model(inputs=inputs, outputs=x, name="Encoder")
return model
def make_decoder_model_mobilenet(input_shape, norm_layer):
x = keras.layers.Input(shape=input_shape, dtype=tf.float32)
inputs = x
num_kernels = FLAGS.n_filter_generator * 8
kernel_size = 3
pad = int((kernel_size - 1) / 2)
for i in range(9):
x = inverted_res_block(x, 1, num_kernels, num_kernels, norm_layer)
x = inverted_res_block(x, 1, num_kernels, num_kernels, norm_layer)
for i in range(4):
x = UnpoolLayer()(x)
x = inverted_res_block(x, 1, FLAGS.n_filter_generator * 2 ** (3 - i + 1), FLAGS.n_filter_generator * 2 ** (3 - i), norm_layer)
x = ReflectPadLayer(3)(x)
x = keras.layers.Activation("sigmoid")(keras.layers.Conv2D(3, 7, 1, "VALID", kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(x))
x = CenterLayer()(x)
model = keras.Model(inputs=inputs, outputs=x, name="Decoder")
return model
def make_discriminator_model_adaptive_style_transfer(norm_layer):
image = keras.layers.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3), dtype=tf.float32)
h0 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 2, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(image)))
h0_pred = keras.layers.Conv2D(1, 5)(h0)
h1 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 2, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h0)))
h1_pred = keras.layers.Conv2D(1, 10)(h1)
h2 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 4, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h1)))
h3 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 8, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h2)))
h3_pred = keras.layers.Conv2D(1, 10)(h3)
h4 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 8, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h3)))
h5 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 16, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h4)))
h5_pred = keras.layers.Conv2D(1, 6)(h5)
h6 = keras.layers.LeakyReLU()(norm_layer()(keras.layers.Conv2D(FLAGS.n_filter_discriminator * 16, 5, 2, kernel_regularizer=keras.regularizers.l2(FLAGS.l2_reg))(h5)))
h6_pred = keras.layers.Conv2D(1, 3)(h6)
model = keras.Model(inputs=image, outputs=[h0_pred, h1_pred, h3_pred, h5_pred, h6_pred], name="Discriminator")
return model
def make_transformer_model():
model = keras.Sequential(name="Transformer")
model.add(keras.layers.AvgPool2D(FLAGS.transformer_kernel_size, strides=1, padding="same"))
return model
def make_encoder_model_cartoon(norm_layer):
x = tf.keras.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3))
x_input = x
x = tf.keras.layers.Conv2D(64, 7, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
for n_filters in [128, 256]:
x = tf.keras.layers.Conv2D(n_filters, 3, 2, padding="same")(x)
x = tf.keras.layers.Conv2D(n_filters, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
model = tf.keras.Model(inputs=[x_input], outputs=[x], name="Encoder")
return model
def make_decoder_model_cartoon(input_shape, norm_layer):
x = tf.keras.Input(shape=input_shape)
x_input = x
for _ in range(8):
x_residual = x
x = tf.keras.layers.Conv2D(256, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
x = tf.keras.layers.Conv2D(256, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.Add()([x, x_residual])
for n_filters in [128, 64]:
x = tf.keras.layers.Conv2DTranspose(n_filters, 3, 2, padding="same")(x)
x = tf.keras.layers.Conv2D(n_filters, 3, 1, padding="same")(x)
x = norm_layer()(x)
x = tf.keras.layers.ReLU()(x)
x = tf.keras.layers.Conv2D(3, 7, padding="same")(x)
model = tf.keras.Model(inputs=[x_input], outputs=[x], name="Decoder")
return model
def make_discriminator_model_cartoon(norm_layer):
x = tf.keras.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3))
x_input = x
x = tf.keras.layers.Conv2D(32, 3, 1, "same")(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
for n_filters in [64, 128]:
x = tf.keras.layers.Conv2D(n_filters, 3, 2, "same")(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
x = tf.keras.layers.Conv2D(n_filters * 2, 3, 1, "same")(x)
x = norm_layer()(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
x = tf.keras.layers.Conv2D(256, 3, 1, "same")(x)
x = norm_layer()(x)
x = tf.keras.layers.LeakyReLU(0.2)(x)
x = tf.keras.layers.Conv2D(1, 3, 1, "same")(x)
model = tf.keras.Model(inputs=[x_input], outputs=[x], name="generator")
return model
def VGG19():
layers = tf.keras.layers
img_input = layers.Input(shape=(FLAGS.image_size, FLAGS.image_size, 3))
x = layers.Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input)
x = layers.Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)
x = layers.Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)
x = layers.Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)
x = layers.Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv4')(x)
x = layers.MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv4')(x)
save_x = x
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)
x = layers.Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv4')(x)
model = tf.keras.models.Model(img_input, x, name='vgg16')
weights_path_no_top = ('https://github.com/fchollet/deep-learning-models/'
'releases/download/v0.1/'
'vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5')
weights_path = tf.keras.utils.get_file(
'vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5',
weights_path_no_top,
cache_subdir='models',
file_hash='253f8cb515780f3b799900260a226db6')
model.load_weights(weights_path)
sub_model = tf.keras.models.Model(img_input, save_x, name='VGG')
return sub_model
| true | true |
1c2ff4a9a8a5c5b42363d7b52e47f2bf3c5fb502 | 2,127 | py | Python | chatter/core.py | ii-Python/Chatter-Server-Beta | 24ea0e5946259d659a13d1e53a0220340fcbd7e3 | [
"MIT"
] | null | null | null | chatter/core.py | ii-Python/Chatter-Server-Beta | 24ea0e5946259d659a13d1e53a0220340fcbd7e3 | [
"MIT"
] | null | null | null | chatter/core.py | ii-Python/Chatter-Server-Beta | 24ea0e5946259d659a13d1e53a0220340fcbd7e3 | [
"MIT"
] | null | null | null | # Modules
import socket
from .colors import colored
from .logging import verbose
from .config import load_config
from .client import ClientManager
# Initialization
__version__ = "1.0.32"
__author__ = "Benjamin O'Brien (iiPython)"
# Master class
class ChatterServer(object):
def __init__(self, args):
self.args = args
self.config = {}
def _generate_sock(self, host, port, config, max_connections = 5):
verbose("Generating new socket...")
# Create a new socket
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((host, port))
server.listen(max_connections)
# Then create a client manager
verbose("Creating client manager...")
clients = ClientManager(config)
verbose("Socket and client manager created.")
return (server, clients)
def load_config(self):
"""Loads configuration info from config.json"""
# Load configuration
verbose("Began loading configuration file.")
try:
self.config = load_config()
except Exception as err:
verbose("Failed while loading:", err)
return {}
print("Loaded configuration from config.json")
print()
server_name = input("Server name: ")
self.config["server_name"] = server_name
# Return our data
return self.config
def start(self):
"""Launches the Chatter server and begins an infinite response loop"""
print(f"Chatter Server v{__version__}")
config = self.load_config()
print()
print(f"Listening on {config['host']}:{config['port']}", f"with authentication code {config['code']}" if config["code"] else "")
print()
# Begin master loop
server, internal = self._generate_sock(config["host"], config["port"], config)
verbose("Server started, listening for requests!")
while True:
conn, addr = server.accept()
verbose("Received connection from", addr)
internal.add_client(addr, conn)
| 24.170455 | 136 | 0.615421 |
import socket
from .colors import colored
from .logging import verbose
from .config import load_config
from .client import ClientManager
__version__ = "1.0.32"
__author__ = "Benjamin O'Brien (iiPython)"
# Master class
class ChatterServer(object):
def __init__(self, args):
self.args = args
self.config = {}
def _generate_sock(self, host, port, config, max_connections = 5):
verbose("Generating new socket...")
# Create a new socket
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((host, port))
server.listen(max_connections)
# Then create a client manager
verbose("Creating client manager...")
clients = ClientManager(config)
verbose("Socket and client manager created.")
return (server, clients)
def load_config(self):
# Load configuration
verbose("Began loading configuration file.")
try:
self.config = load_config()
except Exception as err:
verbose("Failed while loading:", err)
return {}
print("Loaded configuration from config.json")
print()
server_name = input("Server name: ")
self.config["server_name"] = server_name
# Return our data
return self.config
def start(self):
print(f"Chatter Server v{__version__}")
config = self.load_config()
print()
print(f"Listening on {config['host']}:{config['port']}", f"with authentication code {config['code']}" if config["code"] else "")
print()
# Begin master loop
server, internal = self._generate_sock(config["host"], config["port"], config)
verbose("Server started, listening for requests!")
while True:
conn, addr = server.accept()
verbose("Received connection from", addr)
internal.add_client(addr, conn)
| true | true |
1c2ff4dbfd802e4d4eba5c4159798b8dbab43d7d | 1,175 | py | Python | maximum_effort/urls.py | PaulBowden673/Projects-MP-MP4 | a4a176457560e6d5087c823f86da0a88f2ad6537 | [
"W3C",
"PostgreSQL"
] | null | null | null | maximum_effort/urls.py | PaulBowden673/Projects-MP-MP4 | a4a176457560e6d5087c823f86da0a88f2ad6537 | [
"W3C",
"PostgreSQL"
] | null | null | null | maximum_effort/urls.py | PaulBowden673/Projects-MP-MP4 | a4a176457560e6d5087c823f86da0a88f2ad6537 | [
"W3C",
"PostgreSQL"
] | null | null | null | """maximum_effort URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('allauth.urls')),
path('', include('home.urls')),
path('products/', include('products.urls')),
path('bag/', include('bag.urls')),
path('checkout/', include('checkout.urls')),
path('profile/', include('profiles.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 37.903226 | 77 | 0.702128 | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('allauth.urls')),
path('', include('home.urls')),
path('products/', include('products.urls')),
path('bag/', include('bag.urls')),
path('checkout/', include('checkout.urls')),
path('profile/', include('profiles.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| true | true |
1c2ff5ae08e63595101cfc142c33334519f8fd9f | 907 | py | Python | tsx/examples/rocket_native_guide.py | MatthiasJakobs/tsx | 8a686ffd0af2f9f826d9ce11349e0fa0e883e897 | [
"MIT"
] | null | null | null | tsx/examples/rocket_native_guide.py | MatthiasJakobs/tsx | 8a686ffd0af2f9f826d9ce11349e0fa0e883e897 | [
"MIT"
] | null | null | null | tsx/examples/rocket_native_guide.py | MatthiasJakobs/tsx | 8a686ffd0af2f9f826d9ce11349e0fa0e883e897 | [
"MIT"
] | null | null | null | import numpy as np
import torch
import matplotlib.pyplot as plt
from tsx.models.classifier import ROCKET
from tsx.datasets import load_itapowdem
from tsx.counterfactuals import NativeGuide
ds = load_itapowdem()
x_train, y_train = ds.torch(train=True)
x_test, y_test = ds.torch(train=False)
model = ROCKET(input_length=x_train.shape[-1], batch_size=100, n_classes=len(np.unique(y_train)))
model.fit(x_train, y_train, x_test, y_test)
cf = NativeGuide(model, x_train, y_train, distance='euclidian', batch_size=1000)
print("Original classes of input: {}".format(y_test[0:2]))
# Get two counterfactuals for each datapoint
generated_cfs = cf.generate(x_test[0:2], y_test[0:2], n=2)
plt.figure()
for i in range(len(generated_cfs)):
plt.subplot(1,2,i+1)
plt.plot(x_test[i].squeeze(), color='green')
print(generated_cfs[i][0][1].shape)
plt.plot(generated_cfs[i][0][1], color='red')
plt.show()
| 30.233333 | 97 | 0.743109 | import numpy as np
import torch
import matplotlib.pyplot as plt
from tsx.models.classifier import ROCKET
from tsx.datasets import load_itapowdem
from tsx.counterfactuals import NativeGuide
ds = load_itapowdem()
x_train, y_train = ds.torch(train=True)
x_test, y_test = ds.torch(train=False)
model = ROCKET(input_length=x_train.shape[-1], batch_size=100, n_classes=len(np.unique(y_train)))
model.fit(x_train, y_train, x_test, y_test)
cf = NativeGuide(model, x_train, y_train, distance='euclidian', batch_size=1000)
print("Original classes of input: {}".format(y_test[0:2]))
generated_cfs = cf.generate(x_test[0:2], y_test[0:2], n=2)
plt.figure()
for i in range(len(generated_cfs)):
plt.subplot(1,2,i+1)
plt.plot(x_test[i].squeeze(), color='green')
print(generated_cfs[i][0][1].shape)
plt.plot(generated_cfs[i][0][1], color='red')
plt.show()
| true | true |
1c2ff5b0c279d00d3c1a682c519050f56c123082 | 2,232 | py | Python | demo/tutorial_TMD.py | aravindhk/Vides | 65d9ea9764ddf5f6ef40e869bd31387d0e3e378f | [
"BSD-4-Clause"
] | 2 | 2021-11-03T17:24:24.000Z | 2021-12-02T06:06:50.000Z | demo/tutorial_TMD.py | aravindhk/Vides | 65d9ea9764ddf5f6ef40e869bd31387d0e3e378f | [
"BSD-4-Clause"
] | null | null | null | demo/tutorial_TMD.py | aravindhk/Vides | 65d9ea9764ddf5f6ef40e869bd31387d0e3e378f | [
"BSD-4-Clause"
] | null | null | null | from NanoTCAD_ViDES import *
import sys
from module_TMD import *
rank = 0
# I create the grid
xg = nonuniformgrid(array([-2.0, 1, 0, 0.05, 2.0, 1]))
FLAKE = TMD(30.0, "n");
acc = FLAKE.acc;
kF = 2 * pi / (3 * sqrt(3) * acc);
kymax = pi / FLAKE.delta;
Nky = 32.0;
dk = kymax / Nky;
FLAKE.kmax = pi / FLAKE.delta;
FLAKE.kmin = 0;
FLAKE.dk = dk;
FLAKE.dE = 0.001
grid = grid2D(xg, FLAKE.y, FLAKE.x, FLAKE.y);
savetxt("gridx.out", grid.gridx)
savetxt("gridy.out", grid.gridy)
# I take care of the solid
Oxide1 = region("hex", grid.xmin, 0, grid.ymin, grid.ymax)
Oxide1.eps = 3.9;
Oxide2 = region("hex", 0, grid.xmax, grid.ymin, grid.ymax)
Oxide2.eps = 3.9;
top_gate = gate("hex", grid.xmax, grid.xmax, 10.0, 20.0);
bottom_gate = gate("hex", grid.xmin, grid.xmin, 10.0, 20.0);
p = interface2D(grid, Oxide1, Oxide2, top_gate, bottom_gate);
fraction_source = 0.01
fraction_drain = 0.01
dope_reservoir(grid, p, FLAKE, fraction_source, array([-1, 1, grid.ymin, 10.0]));
dope_reservoir(grid, p, FLAKE, fraction_drain, array([-1, 1, 20.0, grid.ymax]));
# solve_init(grid,p,FLAKE);
Vgmin = 0.0;
Vgmax = 1.0;
Vgstep = 0.05;
Np = int(abs(Vgmin - Vgmax) / Vgstep) + 1;
vg = zeros(Np);
current = zeros(Np);
p.underel = 0.1;
counter = 0;
Vgs = Vgmin;
FLAKE.mu1 = -0.0
FLAKE.mu2 = -0.1
while (Vgs <= Vgmax):
bottom_gate.Ef = -Vgs;
set_gate(p, bottom_gate)
top_gate.Ef = -Vgs;
set_gate(p, top_gate)
p.normpoisson = 1e-1;
p.normd = 5e-3;
solve_self_consistent(grid, p, FLAKE);
vg[counter] = Vgs;
current[counter] = FLAKE.current();
# I save the output files
if (rank == 0):
string = "./datiout/Phi%s.out" % Vgs;
savetxt(string, p.Phi);
string = "./datiout/ncar%s.out" % Vgs;
savetxt(string, p.free_charge);
a = [FLAKE.E, FLAKE.T];
string = "./datiout/T%s.out" % Vgs;
savetxt(string, transpose(a));
string = "./datiout/jayn%s.out" % Vgs;
fp = open(string, "w");
string2 = "%s" % current[counter];
fp.write(string2);
fp.close();
counter = counter + 1;
Vgs = Vgs + Vgstep;
tempo = [vg, current]
savetxt("./datiout/idvgs.out", transpose(tempo));
plot(vg[:counter], current[:counter])
show()
| 24.527473 | 81 | 0.611111 | from NanoTCAD_ViDES import *
import sys
from module_TMD import *
rank = 0
xg = nonuniformgrid(array([-2.0, 1, 0, 0.05, 2.0, 1]))
FLAKE = TMD(30.0, "n");
acc = FLAKE.acc;
kF = 2 * pi / (3 * sqrt(3) * acc);
kymax = pi / FLAKE.delta;
Nky = 32.0;
dk = kymax / Nky;
FLAKE.kmax = pi / FLAKE.delta;
FLAKE.kmin = 0;
FLAKE.dk = dk;
FLAKE.dE = 0.001
grid = grid2D(xg, FLAKE.y, FLAKE.x, FLAKE.y);
savetxt("gridx.out", grid.gridx)
savetxt("gridy.out", grid.gridy)
Oxide1 = region("hex", grid.xmin, 0, grid.ymin, grid.ymax)
Oxide1.eps = 3.9;
Oxide2 = region("hex", 0, grid.xmax, grid.ymin, grid.ymax)
Oxide2.eps = 3.9;
top_gate = gate("hex", grid.xmax, grid.xmax, 10.0, 20.0);
bottom_gate = gate("hex", grid.xmin, grid.xmin, 10.0, 20.0);
p = interface2D(grid, Oxide1, Oxide2, top_gate, bottom_gate);
fraction_source = 0.01
fraction_drain = 0.01
dope_reservoir(grid, p, FLAKE, fraction_source, array([-1, 1, grid.ymin, 10.0]));
dope_reservoir(grid, p, FLAKE, fraction_drain, array([-1, 1, 20.0, grid.ymax]));
Vgmin = 0.0;
Vgmax = 1.0;
Vgstep = 0.05;
Np = int(abs(Vgmin - Vgmax) / Vgstep) + 1;
vg = zeros(Np);
current = zeros(Np);
p.underel = 0.1;
counter = 0;
Vgs = Vgmin;
FLAKE.mu1 = -0.0
FLAKE.mu2 = -0.1
while (Vgs <= Vgmax):
bottom_gate.Ef = -Vgs;
set_gate(p, bottom_gate)
top_gate.Ef = -Vgs;
set_gate(p, top_gate)
p.normpoisson = 1e-1;
p.normd = 5e-3;
solve_self_consistent(grid, p, FLAKE);
vg[counter] = Vgs;
current[counter] = FLAKE.current();
if (rank == 0):
string = "./datiout/Phi%s.out" % Vgs;
savetxt(string, p.Phi);
string = "./datiout/ncar%s.out" % Vgs;
savetxt(string, p.free_charge);
a = [FLAKE.E, FLAKE.T];
string = "./datiout/T%s.out" % Vgs;
savetxt(string, transpose(a));
string = "./datiout/jayn%s.out" % Vgs;
fp = open(string, "w");
string2 = "%s" % current[counter];
fp.write(string2);
fp.close();
counter = counter + 1;
Vgs = Vgs + Vgstep;
tempo = [vg, current]
savetxt("./datiout/idvgs.out", transpose(tempo));
plot(vg[:counter], current[:counter])
show()
| true | true |
1c2ff604819a2df5c8840a388bb0d80ce8a53246 | 2,074 | py | Python | Applications/ParaView/Testing/Python/CTHAMRContour.py | mathstuf/ParaView | e867e280545ada10c4ed137f6a966d9d2f3db4cb | [
"Apache-2.0"
] | 2 | 2019-09-27T08:04:34.000Z | 2019-10-16T22:30:54.000Z | Applications/ParaView/Testing/Python/CTHAMRContour.py | mathstuf/ParaView | e867e280545ada10c4ed137f6a966d9d2f3db4cb | [
"Apache-2.0"
] | null | null | null | Applications/ParaView/Testing/Python/CTHAMRContour.py | mathstuf/ParaView | e867e280545ada10c4ed137f6a966d9d2f3db4cb | [
"Apache-2.0"
] | 5 | 2016-04-14T13:42:37.000Z | 2021-05-22T04:59:42.000Z | #/usr/bin/env python
import QtTesting
import QtTestingImage
object1 = 'pqClientMainWindow/MainControlsToolbar/actionOpenData'
QtTesting.playCommand(object1, 'activate', '')
object2 = 'pqClientMainWindow/FileOpenDialog'
QtTesting.playCommand(object2, 'filesSelected', '$PARAVIEW_DATA_ROOT/SPCTH/Dave_Karelitz_Small/spcth_a')
object3 = 'pqClientMainWindow/proxyTabDock/proxyTabWidget/qt_tabwidget_stackedwidget/objectInspector/ScrollArea/qt_scrollarea_viewport/PanelArea/Editor/CellArrayStatus/1QHeaderView0'
QtTesting.playCommand(object3, 'mousePress', '1,1,0,0,0,0')
QtTesting.playCommand(object3, 'mouseRelease', '1,0,0,0,0,0')
object4 = 'pqClientMainWindow/proxyTabDock/proxyTabWidget/qt_tabwidget_stackedwidget/objectInspector/Accept'
QtTesting.playCommand(object4, 'activate', '')
object5 = 'pqClientMainWindow/representationToolbar/displayRepresentation/comboBox'
QtTesting.playCommand(object5, 'set_string', 'Surface')
object6 = 'pqClientMainWindow/cameraToolbar/actionNegativeY'
QtTesting.playCommand(object6, 'activate', '')
object7 = 'pqClientMainWindow/variableToolbar/displayColor/Variables'
QtTesting.playCommand(object7, 'set_string', 'Pressure (dynes/cm^2^)')
object8 = 'pqClientMainWindow/menubar'
QtTesting.playCommand(object8, 'activate', 'menuFilters')
object9 = 'pqClientMainWindow/menubar/menuFilters/Alphabetical'
QtTesting.playCommand(object9, 'activate', 'AMRDualContour')
object10 = 'pqClientMainWindow/proxyTabDock/proxyTabWidget/qt_tabwidget_stackedwidget/objectInspector/ScrollArea/qt_scrollarea_viewport/PanelArea/Editor/SelectMaterialArrays/1QHeaderView0'
QtTesting.playCommand(object10, 'mousePress', '1,1,0,0,0,0')
QtTesting.playCommand(object10, 'mouseRelease', '1,0,0,0,0,0')
QtTesting.playCommand(object4, 'activate', '')
QtTesting.playCommand(object7, 'set_string', 'Pressure (dynes/cm^2^) (partial)')
# DO_IMAGE_COMPARE
snapshotWidget = 'pqClientMainWindow/centralwidget/MultiViewWidget/CoreWidget/qt_tabwidget_stackedwidget/MultiViewWidget1/Frame.0/Viewport'
QtTestingImage.compareImage(snapshotWidget, 'CTHAMRContour.png', 300, 300)
| 61 | 188 | 0.831244 |
import QtTesting
import QtTestingImage
object1 = 'pqClientMainWindow/MainControlsToolbar/actionOpenData'
QtTesting.playCommand(object1, 'activate', '')
object2 = 'pqClientMainWindow/FileOpenDialog'
QtTesting.playCommand(object2, 'filesSelected', '$PARAVIEW_DATA_ROOT/SPCTH/Dave_Karelitz_Small/spcth_a')
object3 = 'pqClientMainWindow/proxyTabDock/proxyTabWidget/qt_tabwidget_stackedwidget/objectInspector/ScrollArea/qt_scrollarea_viewport/PanelArea/Editor/CellArrayStatus/1QHeaderView0'
QtTesting.playCommand(object3, 'mousePress', '1,1,0,0,0,0')
QtTesting.playCommand(object3, 'mouseRelease', '1,0,0,0,0,0')
object4 = 'pqClientMainWindow/proxyTabDock/proxyTabWidget/qt_tabwidget_stackedwidget/objectInspector/Accept'
QtTesting.playCommand(object4, 'activate', '')
object5 = 'pqClientMainWindow/representationToolbar/displayRepresentation/comboBox'
QtTesting.playCommand(object5, 'set_string', 'Surface')
object6 = 'pqClientMainWindow/cameraToolbar/actionNegativeY'
QtTesting.playCommand(object6, 'activate', '')
object7 = 'pqClientMainWindow/variableToolbar/displayColor/Variables'
QtTesting.playCommand(object7, 'set_string', 'Pressure (dynes/cm^2^)')
object8 = 'pqClientMainWindow/menubar'
QtTesting.playCommand(object8, 'activate', 'menuFilters')
object9 = 'pqClientMainWindow/menubar/menuFilters/Alphabetical'
QtTesting.playCommand(object9, 'activate', 'AMRDualContour')
object10 = 'pqClientMainWindow/proxyTabDock/proxyTabWidget/qt_tabwidget_stackedwidget/objectInspector/ScrollArea/qt_scrollarea_viewport/PanelArea/Editor/SelectMaterialArrays/1QHeaderView0'
QtTesting.playCommand(object10, 'mousePress', '1,1,0,0,0,0')
QtTesting.playCommand(object10, 'mouseRelease', '1,0,0,0,0,0')
QtTesting.playCommand(object4, 'activate', '')
QtTesting.playCommand(object7, 'set_string', 'Pressure (dynes/cm^2^) (partial)')
snapshotWidget = 'pqClientMainWindow/centralwidget/MultiViewWidget/CoreWidget/qt_tabwidget_stackedwidget/MultiViewWidget1/Frame.0/Viewport'
QtTestingImage.compareImage(snapshotWidget, 'CTHAMRContour.png', 300, 300)
| true | true |
1c2ff621565308394041ea1c9e09a28937a17c49 | 12,515 | py | Python | jacket/tests/compute/unit/api/openstack/compute/test_tenant_networks.py | bopopescu/jacket | d7ad3147fcb43131098c2a5210847634ff5fb325 | [
"Apache-2.0"
] | null | null | null | jacket/tests/compute/unit/api/openstack/compute/test_tenant_networks.py | bopopescu/jacket | d7ad3147fcb43131098c2a5210847634ff5fb325 | [
"Apache-2.0"
] | null | null | null | jacket/tests/compute/unit/api/openstack/compute/test_tenant_networks.py | bopopescu/jacket | d7ad3147fcb43131098c2a5210847634ff5fb325 | [
"Apache-2.0"
] | 2 | 2016-08-10T02:21:49.000Z | 2020-07-24T01:57:21.000Z | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo_config import cfg
import webob
from jacket.api.compute.openstack.compute.legacy_v2.contrib import os_tenant_networks \
as networks
from jacket.api.compute.openstack.compute import tenant_networks \
as networks_v21
from jacket.compute import exception
from jacket.compute import test
from jacket.tests.compute.unit.api.openstack import fakes
CONF = cfg.CONF
NETWORKS = [
{
"id": 1,
"cidr": "10.20.105.0/24",
"label": "new net 1"
},
{
"id": 2,
"cidr": "10.20.105.0/24",
"label": "new net 2"
}
]
DEFAULT_NETWORK = {
"id": 3,
"cidr": "10.20.105.0/24",
"label": "default"
}
NETWORKS_WITH_DEFAULT_NET = copy.deepcopy(NETWORKS)
NETWORKS_WITH_DEFAULT_NET.append(DEFAULT_NETWORK)
DEFAULT_TENANT_ID = 1
def fake_network_api_get_all(context):
if (context.project_id == DEFAULT_TENANT_ID):
return NETWORKS_WITH_DEFAULT_NET
else:
return NETWORKS
class TenantNetworksTestV21(test.NoDBTestCase):
ctrlr = networks_v21.TenantNetworkController
validation_error = exception.ValidationError
def setUp(self):
super(TenantNetworksTestV21, self).setUp()
self.controller = self.ctrlr()
self.flags(enable_network_quota=True)
self.req = fakes.HTTPRequest.blank('')
self.original_value = CONF.use_neutron_default_nets
def tearDown(self):
super(TenantNetworksTestV21, self).tearDown()
CONF.set_override("use_neutron_default_nets", self.original_value)
def _fake_network_api_create(self, context, **kwargs):
self.assertEqual(context.project_id, kwargs['project_id'])
return NETWORKS
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.rollback')
@mock.patch('compute.network.api.API.disassociate')
@mock.patch('compute.network.api.API.delete')
def _test_network_delete_exception(self, delete_ex, disassociate_ex, expex,
delete_mock, disassociate_mock,
rollback_mock, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
if delete_mock:
delete_mock.side_effect = delete_ex
if disassociate_ex:
disassociate_mock.side_effect = disassociate_ex
self.assertRaises(expex, self.controller.delete, self.req, 1)
disassociate_mock.assert_called_once_with(ctxt, 1)
if not disassociate_ex:
delete_mock.assert_called_once_with(ctxt, 1)
rollback_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=-1)
def test_network_delete_exception_network_not_found(self):
ex = exception.NetworkNotFound(network_id=1)
expex = webob.exc.HTTPNotFound
self._test_network_delete_exception(None, ex, expex)
def test_network_delete_exception_policy_failed(self):
ex = exception.PolicyNotAuthorized(action='dummy')
expex = webob.exc.HTTPForbidden
self._test_network_delete_exception(ex, None, expex)
def test_network_delete_exception_network_in_use(self):
ex = exception.NetworkInUse(network_id=1)
expex = webob.exc.HTTPConflict
self._test_network_delete_exception(ex, None, expex)
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.commit')
@mock.patch('compute.network.api.API.delete')
@mock.patch('compute.network.api.API.disassociate')
def test_network_delete(self, disassociate_mock, delete_mock, commit_mock,
reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
res = self.controller.delete(self.req, 1)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.controller, networks_v21.TenantNetworkController):
status_int = self.controller.delete.wsgi_code
else:
status_int = res.status_int
self.assertEqual(202, status_int)
disassociate_mock.assert_called_once_with(ctxt, 1)
delete_mock.assert_called_once_with(ctxt, 1)
commit_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=-1)
@mock.patch('compute.network.api.API.get')
def test_network_show(self, get_mock):
get_mock.return_value = NETWORKS[0]
res = self.controller.show(self.req, 1)
self.assertEqual(NETWORKS[0], res['network'])
@mock.patch('compute.network.api.API.get')
def test_network_show_not_found(self, get_mock):
ctxt = self.req.environ['compute.context']
get_mock.side_effect = exception.NetworkNotFound(network_id=1)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, self.req, 1)
get_mock.assert_called_once_with(ctxt, 1)
@mock.patch('compute.network.api.API.get_all')
def _test_network_index(self, get_all_mock, default_net=True):
CONF.set_override("use_neutron_default_nets", default_net)
get_all_mock.side_effect = fake_network_api_get_all
expected = NETWORKS
if default_net is True:
self.req.environ['compute.context'].project_id = DEFAULT_TENANT_ID
expected = NETWORKS_WITH_DEFAULT_NET
res = self.controller.index(self.req)
self.assertEqual(expected, res['networks'])
def test_network_index_with_default_net(self):
self._test_network_index()
def test_network_index_without_default_net(self):
self._test_network_index(default_net=False)
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.commit')
@mock.patch('compute.network.api.API.create')
def test_network_create(self, create_mock, commit_mock, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
create_mock.side_effect = self._fake_network_api_create
body = copy.deepcopy(NETWORKS[0])
del body['id']
body = {'network': body}
res = self.controller.create(self.req, body=body)
self.assertEqual(NETWORKS[0], res['network'])
commit_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=1)
@mock.patch('compute.quota.QUOTAS.reserve')
def test_network_create_quota_error(self, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.side_effect = exception.OverQuota(overs='fake')
body = {'network': {"cidr": "10.20.105.0/24",
"label": "new net 1"}}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=body)
reserve_mock.assert_called_once_with(ctxt, networks=1)
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.rollback')
@mock.patch('compute.network.api.API.create')
def _test_network_create_exception(self, ex, expex, create_mock,
rollback_mock, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
create_mock.side_effect = ex
body = {'network': {"cidr": "10.20.105.0/24",
"label": "new net 1"}}
self.assertRaises(expex, self.controller.create, self.req, body=body)
reserve_mock.assert_called_once_with(ctxt, networks=1)
def test_network_create_exception_policy_failed(self):
ex = exception.PolicyNotAuthorized(action='dummy')
expex = webob.exc.HTTPForbidden
self._test_network_create_exception(ex, expex)
def test_network_create_exception_conflictcidr(self):
ex = exception.CidrConflict(cidr='dummy', other='dummy')
expex = webob.exc.HTTPConflict
self._test_network_create_exception(ex, expex)
def test_network_create_exception_service_unavailable(self):
ex = Exception
expex = webob.exc.HTTPServiceUnavailable
self._test_network_create_exception(ex, expex)
def test_network_create_empty_body(self):
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body={})
def test_network_create_without_cidr(self):
body = {'network': {"label": "new net 1"}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_network_create_bad_format_cidr(self):
body = {'network': {"cidr": "123",
"label": "new net 1"}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_network_create_empty_network(self):
body = {'network': {}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_network_create_without_label(self):
body = {'network': {"cidr": "10.20.105.0/24"}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
class TenantNetworksTestV2(TenantNetworksTestV21):
ctrlr = networks.NetworkController
validation_error = webob.exc.HTTPBadRequest
def setUp(self):
super(TenantNetworksTestV2, self).setUp()
self.req = fakes.HTTPRequest.blank('', use_admin_context=True)
def test_network_create_empty_body(self):
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, self.req, {})
class TenantNetworksEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(TenantNetworksEnforcementV21, self).setUp()
self.controller = networks_v21.TenantNetworkController()
self.req = fakes.HTTPRequest.blank('')
def test_create_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.create,
self.req, body={'network': {'label': 'test',
'cidr': '10.0.0.0/32'}})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_index_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index,
self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_delete_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete,
self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_show_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.show,
self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
| 38.389571 | 87 | 0.665521 |
import copy
import mock
from oslo_config import cfg
import webob
from jacket.api.compute.openstack.compute.legacy_v2.contrib import os_tenant_networks \
as networks
from jacket.api.compute.openstack.compute import tenant_networks \
as networks_v21
from jacket.compute import exception
from jacket.compute import test
from jacket.tests.compute.unit.api.openstack import fakes
CONF = cfg.CONF
NETWORKS = [
{
"id": 1,
"cidr": "10.20.105.0/24",
"label": "new net 1"
},
{
"id": 2,
"cidr": "10.20.105.0/24",
"label": "new net 2"
}
]
DEFAULT_NETWORK = {
"id": 3,
"cidr": "10.20.105.0/24",
"label": "default"
}
NETWORKS_WITH_DEFAULT_NET = copy.deepcopy(NETWORKS)
NETWORKS_WITH_DEFAULT_NET.append(DEFAULT_NETWORK)
DEFAULT_TENANT_ID = 1
def fake_network_api_get_all(context):
if (context.project_id == DEFAULT_TENANT_ID):
return NETWORKS_WITH_DEFAULT_NET
else:
return NETWORKS
class TenantNetworksTestV21(test.NoDBTestCase):
ctrlr = networks_v21.TenantNetworkController
validation_error = exception.ValidationError
def setUp(self):
super(TenantNetworksTestV21, self).setUp()
self.controller = self.ctrlr()
self.flags(enable_network_quota=True)
self.req = fakes.HTTPRequest.blank('')
self.original_value = CONF.use_neutron_default_nets
def tearDown(self):
super(TenantNetworksTestV21, self).tearDown()
CONF.set_override("use_neutron_default_nets", self.original_value)
def _fake_network_api_create(self, context, **kwargs):
self.assertEqual(context.project_id, kwargs['project_id'])
return NETWORKS
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.rollback')
@mock.patch('compute.network.api.API.disassociate')
@mock.patch('compute.network.api.API.delete')
def _test_network_delete_exception(self, delete_ex, disassociate_ex, expex,
delete_mock, disassociate_mock,
rollback_mock, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
if delete_mock:
delete_mock.side_effect = delete_ex
if disassociate_ex:
disassociate_mock.side_effect = disassociate_ex
self.assertRaises(expex, self.controller.delete, self.req, 1)
disassociate_mock.assert_called_once_with(ctxt, 1)
if not disassociate_ex:
delete_mock.assert_called_once_with(ctxt, 1)
rollback_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=-1)
def test_network_delete_exception_network_not_found(self):
ex = exception.NetworkNotFound(network_id=1)
expex = webob.exc.HTTPNotFound
self._test_network_delete_exception(None, ex, expex)
def test_network_delete_exception_policy_failed(self):
ex = exception.PolicyNotAuthorized(action='dummy')
expex = webob.exc.HTTPForbidden
self._test_network_delete_exception(ex, None, expex)
def test_network_delete_exception_network_in_use(self):
ex = exception.NetworkInUse(network_id=1)
expex = webob.exc.HTTPConflict
self._test_network_delete_exception(ex, None, expex)
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.commit')
@mock.patch('compute.network.api.API.delete')
@mock.patch('compute.network.api.API.disassociate')
def test_network_delete(self, disassociate_mock, delete_mock, commit_mock,
reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
res = self.controller.delete(self.req, 1)
if isinstance(self.controller, networks_v21.TenantNetworkController):
status_int = self.controller.delete.wsgi_code
else:
status_int = res.status_int
self.assertEqual(202, status_int)
disassociate_mock.assert_called_once_with(ctxt, 1)
delete_mock.assert_called_once_with(ctxt, 1)
commit_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=-1)
@mock.patch('compute.network.api.API.get')
def test_network_show(self, get_mock):
get_mock.return_value = NETWORKS[0]
res = self.controller.show(self.req, 1)
self.assertEqual(NETWORKS[0], res['network'])
@mock.patch('compute.network.api.API.get')
def test_network_show_not_found(self, get_mock):
ctxt = self.req.environ['compute.context']
get_mock.side_effect = exception.NetworkNotFound(network_id=1)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, self.req, 1)
get_mock.assert_called_once_with(ctxt, 1)
@mock.patch('compute.network.api.API.get_all')
def _test_network_index(self, get_all_mock, default_net=True):
CONF.set_override("use_neutron_default_nets", default_net)
get_all_mock.side_effect = fake_network_api_get_all
expected = NETWORKS
if default_net is True:
self.req.environ['compute.context'].project_id = DEFAULT_TENANT_ID
expected = NETWORKS_WITH_DEFAULT_NET
res = self.controller.index(self.req)
self.assertEqual(expected, res['networks'])
def test_network_index_with_default_net(self):
self._test_network_index()
def test_network_index_without_default_net(self):
self._test_network_index(default_net=False)
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.commit')
@mock.patch('compute.network.api.API.create')
def test_network_create(self, create_mock, commit_mock, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
create_mock.side_effect = self._fake_network_api_create
body = copy.deepcopy(NETWORKS[0])
del body['id']
body = {'network': body}
res = self.controller.create(self.req, body=body)
self.assertEqual(NETWORKS[0], res['network'])
commit_mock.assert_called_once_with(ctxt, 'rv')
reserve_mock.assert_called_once_with(ctxt, networks=1)
@mock.patch('compute.quota.QUOTAS.reserve')
def test_network_create_quota_error(self, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.side_effect = exception.OverQuota(overs='fake')
body = {'network': {"cidr": "10.20.105.0/24",
"label": "new net 1"}}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=body)
reserve_mock.assert_called_once_with(ctxt, networks=1)
@mock.patch('compute.quota.QUOTAS.reserve')
@mock.patch('compute.quota.QUOTAS.rollback')
@mock.patch('compute.network.api.API.create')
def _test_network_create_exception(self, ex, expex, create_mock,
rollback_mock, reserve_mock):
ctxt = self.req.environ['compute.context']
reserve_mock.return_value = 'rv'
create_mock.side_effect = ex
body = {'network': {"cidr": "10.20.105.0/24",
"label": "new net 1"}}
self.assertRaises(expex, self.controller.create, self.req, body=body)
reserve_mock.assert_called_once_with(ctxt, networks=1)
def test_network_create_exception_policy_failed(self):
ex = exception.PolicyNotAuthorized(action='dummy')
expex = webob.exc.HTTPForbidden
self._test_network_create_exception(ex, expex)
def test_network_create_exception_conflictcidr(self):
ex = exception.CidrConflict(cidr='dummy', other='dummy')
expex = webob.exc.HTTPConflict
self._test_network_create_exception(ex, expex)
def test_network_create_exception_service_unavailable(self):
ex = Exception
expex = webob.exc.HTTPServiceUnavailable
self._test_network_create_exception(ex, expex)
def test_network_create_empty_body(self):
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body={})
def test_network_create_without_cidr(self):
body = {'network': {"label": "new net 1"}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_network_create_bad_format_cidr(self):
body = {'network': {"cidr": "123",
"label": "new net 1"}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_network_create_empty_network(self):
body = {'network': {}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
def test_network_create_without_label(self):
body = {'network': {"cidr": "10.20.105.0/24"}}
self.assertRaises(self.validation_error,
self.controller.create, self.req, body=body)
class TenantNetworksTestV2(TenantNetworksTestV21):
ctrlr = networks.NetworkController
validation_error = webob.exc.HTTPBadRequest
def setUp(self):
super(TenantNetworksTestV2, self).setUp()
self.req = fakes.HTTPRequest.blank('', use_admin_context=True)
def test_network_create_empty_body(self):
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, self.req, {})
class TenantNetworksEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(TenantNetworksEnforcementV21, self).setUp()
self.controller = networks_v21.TenantNetworkController()
self.req = fakes.HTTPRequest.blank('')
def test_create_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.create,
self.req, body={'network': {'label': 'test',
'cidr': '10.0.0.0/32'}})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_index_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index,
self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_delete_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete,
self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_show_policy_failed(self):
rule_name = 'os_compute_api:os-tenant-networks'
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.show,
self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
| true | true |
1c2ff63af1be2d0cda330784b4e1026fce04aee2 | 3,381 | py | Python | domain_trainer.py | wiatrak2/BScThesis | e5dd012fd9052e7088d8464b409dc055dbfcf840 | [
"MIT"
] | null | null | null | domain_trainer.py | wiatrak2/BScThesis | e5dd012fd9052e7088d8464b409dc055dbfcf840 | [
"MIT"
] | null | null | null | domain_trainer.py | wiatrak2/BScThesis | e5dd012fd9052e7088d8464b409dc055dbfcf840 | [
"MIT"
] | null | null | null | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from collections import defaultdict, namedtuple
class DomainTrainer:
def __init__(self, models, optims, criterions, device, **kwargs):
self.models = models
self.optims = optims
self.criterions = criterions
self.device = device
self.history = kwargs.get('history', True)
self.log_interval = kwargs.get('log_interval', 100)
self.print_logs = kwargs.get('print_logs', True)
def _train_domain(self, loaders, gr_models, epoch, train_history):
model_d = self.models.model_d.train()
model_f = self.models.model_f.eval()
train_loader = loaders.merged_test_loader
optimizer = self.optims.optim_d
criterion_domain = self.criterions.criterion_domain
if gr_models is not None:
model_c = gr_models.model_c
model_gr = gr_models.model_d
for batch_idx, (data, domains) in enumerate(train_loader):
if train_loader.dataset.get_labels:
_, domains = domains
data, domains = data.to(self.device), domains.to(self.device)
optimizer.zero_grad()
output = model_d(model_f(data))
loss = criterion_domain(output, domains)
loss.backward()
optimizer.step()
if self.history and gr_models:
model_c_mtx = model_c.get_mtx().weight.cpu().detach().numpy()
model_d_mtx = model_d.get_mtx().weight.cpu().detach().numpy()
model_gr_mtx = model_gr.get_mtx().weight.cpu().detach().numpy()
train_history['avg_len'].append(np.mean(np.diag(model_d_mtx.dot(model_d_mtx.T))))
train_history['avg_dot'].append(np.mean(model_d_mtx.dot(model_c_mtx.T)))
train_history['avg_dot_gr'].append(np.mean(model_d_mtx.dot(model_gr_mtx.T)))
if batch_idx % self.log_interval == 0 and self.print_logs:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
@staticmethod
def test_domain_pred(model, device, merged_test_loader, print_logs=True, test_history=None):
model.eval()
domain_test_loss = 0
domain_correct = 0
with torch.no_grad():
for data, target in merged_test_loader:
data = data.to(device)
if merged_test_loader.dataset.get_labels:
_, domains = target
else:
domains = target
domains = domains.to(device)
domain_out = model(data)
domain_pred = domain_out.max(1, keepdim=True)[1]
domain_correct += domain_pred.eq(domains.view_as(domain_pred)).sum().item()
domain_test_loss /= len(merged_test_loader.dataset)
if print_logs:
print('\nDomains predictor: Accuracy: {}/{} ({:.0f}%)\n'.format(
domain_correct, len(merged_test_loader.dataset),
100. * domain_correct / len(merged_test_loader.dataset)))
if test_history is not None:
test_history['acc'].append(100. * domain_correct / len(merged_test_loader.dataset))
def train(self, epochs, loaders, gr_models=None, train_history=None):
self.epochs = epochs
if train_history is None:
train_history = defaultdict(lambda:[])
for epoch in range(1, self.epochs+1):
self._train_domain(loaders, gr_models, epoch, train_history)
domain_model = nn.Sequential(self.models.model_f, self.models.model_d)
self.test_domain_pred(domain_model, self.device, loaders.merged_test_loader, print_logs=self.print_logs, test_history=train_history) | 39.313953 | 138 | 0.724933 | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from collections import defaultdict, namedtuple
class DomainTrainer:
def __init__(self, models, optims, criterions, device, **kwargs):
self.models = models
self.optims = optims
self.criterions = criterions
self.device = device
self.history = kwargs.get('history', True)
self.log_interval = kwargs.get('log_interval', 100)
self.print_logs = kwargs.get('print_logs', True)
def _train_domain(self, loaders, gr_models, epoch, train_history):
model_d = self.models.model_d.train()
model_f = self.models.model_f.eval()
train_loader = loaders.merged_test_loader
optimizer = self.optims.optim_d
criterion_domain = self.criterions.criterion_domain
if gr_models is not None:
model_c = gr_models.model_c
model_gr = gr_models.model_d
for batch_idx, (data, domains) in enumerate(train_loader):
if train_loader.dataset.get_labels:
_, domains = domains
data, domains = data.to(self.device), domains.to(self.device)
optimizer.zero_grad()
output = model_d(model_f(data))
loss = criterion_domain(output, domains)
loss.backward()
optimizer.step()
if self.history and gr_models:
model_c_mtx = model_c.get_mtx().weight.cpu().detach().numpy()
model_d_mtx = model_d.get_mtx().weight.cpu().detach().numpy()
model_gr_mtx = model_gr.get_mtx().weight.cpu().detach().numpy()
train_history['avg_len'].append(np.mean(np.diag(model_d_mtx.dot(model_d_mtx.T))))
train_history['avg_dot'].append(np.mean(model_d_mtx.dot(model_c_mtx.T)))
train_history['avg_dot_gr'].append(np.mean(model_d_mtx.dot(model_gr_mtx.T)))
if batch_idx % self.log_interval == 0 and self.print_logs:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
@staticmethod
def test_domain_pred(model, device, merged_test_loader, print_logs=True, test_history=None):
model.eval()
domain_test_loss = 0
domain_correct = 0
with torch.no_grad():
for data, target in merged_test_loader:
data = data.to(device)
if merged_test_loader.dataset.get_labels:
_, domains = target
else:
domains = target
domains = domains.to(device)
domain_out = model(data)
domain_pred = domain_out.max(1, keepdim=True)[1]
domain_correct += domain_pred.eq(domains.view_as(domain_pred)).sum().item()
domain_test_loss /= len(merged_test_loader.dataset)
if print_logs:
print('\nDomains predictor: Accuracy: {}/{} ({:.0f}%)\n'.format(
domain_correct, len(merged_test_loader.dataset),
100. * domain_correct / len(merged_test_loader.dataset)))
if test_history is not None:
test_history['acc'].append(100. * domain_correct / len(merged_test_loader.dataset))
def train(self, epochs, loaders, gr_models=None, train_history=None):
self.epochs = epochs
if train_history is None:
train_history = defaultdict(lambda:[])
for epoch in range(1, self.epochs+1):
self._train_domain(loaders, gr_models, epoch, train_history)
domain_model = nn.Sequential(self.models.model_f, self.models.model_d)
self.test_domain_pred(domain_model, self.device, loaders.merged_test_loader, print_logs=self.print_logs, test_history=train_history) | true | true |
1c2ff6667032bf51186ac6754942e176dc8d0b6c | 4,355 | py | Python | a10_neutron_lbaas/v1/handler_hm.py | hthompson6/a10-neutron-lbaas | f1639758cd3abcc6c86c8e6b64dcb0397c359621 | [
"Apache-2.0"
] | 10 | 2015-09-15T05:16:15.000Z | 2020-03-18T02:34:39.000Z | a10_neutron_lbaas/v1/handler_hm.py | hthompson6/a10-neutron-lbaas | f1639758cd3abcc6c86c8e6b64dcb0397c359621 | [
"Apache-2.0"
] | 334 | 2015-02-11T23:45:00.000Z | 2020-02-28T08:58:51.000Z | a10_neutron_lbaas/v1/handler_hm.py | hthompson6/a10-neutron-lbaas | f1639758cd3abcc6c86c8e6b64dcb0397c359621 | [
"Apache-2.0"
] | 24 | 2015-01-13T21:14:45.000Z | 2021-06-02T17:22:14.000Z | # Copyright 2014, Doug Wiegley (dougwig), A10 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import acos_client.errors as acos_errors
import logging
from a10_neutron_lbaas.acos import openstack_mappings
from a10_neutron_lbaas.v1 import handler_base_v1
from a10_neutron_lbaas.v1 import v1_context as a10
LOG = logging.getLogger(__name__)
class HealthMonitorHandler(handler_base_v1.HandlerBaseV1):
def _name(self, hm):
return hm['id'][0:28]
def _set(self, c, set_method, context, hm):
hm_name = self._meta_name(hm)
method = None
url = None
expect_code = None
if hm['type'] in ['HTTP', 'HTTPS']:
method = hm['http_method']
url = hm['url_path']
expect_code = hm['expected_codes']
args = self.meta(hm, 'hm', {})
set_method(hm_name, openstack_mappings.hm_type(c, hm['type']),
hm['delay'], hm['timeout'], hm['max_retries'],
method=method, url=url, expect_code=expect_code,
axapi_args=args)
def create(self, context, hm, pool_id):
h = hm.copy()
h['pool_id'] = pool_id
with a10.A10WriteHMStatusContext(self, context, h, action='create') as c:
try:
self._set(c, c.client.slb.hm.create, context, hm)
except acos_errors.Exists:
pass
if pool_id is not None:
c.client.slb.service_group.update(
self._pool_name(context, pool_id),
health_monitor=self._meta_name(hm))
for pool in hm['pools']:
if pool['pool_id'] == pool_id:
continue
c.client.slb.service_group.update(
self._pool_name(context, pool['pool_id']),
health_monitor=self._meta_name(hm))
def update(self, context, old_hm, hm, pool_id):
h = hm.copy()
h['pool_id'] = pool_id
with a10.A10WriteHMStatusContext(self, context, h) as c:
self._set(c, c.client.slb.hm.update, context, hm)
def _dissociate(self, c, context, hm, pool_id):
"""Remove a pool association"""
pool_name = self._pool_name(context, pool_id)
c.client.slb.service_group.update(pool_name, health_monitor="",
health_check_disable=True)
def dissociate(self, c, context, hm, pool_id):
"""Remove a pool association, and the healthmonitor if its the last one"""
self._dissociate(c, context, hm, pool_id)
pools = hm.get("pools", [])
if not any(p for p in pools if p.get("pool_id") != pool_id):
self._delete_unused(c, context, hm)
def _delete(self, c, context, hm):
"""Delete a healthmonitor and ALL its pool associations"""
pools = hm.get("pools", [])
for pool in pools:
pool_id = pool.get("pool_id")
self._dissociate(c, context, hm, pool_id)
self._delete_unused(c, context, hm)
def _delete_unused(self, c, context, hm):
try:
c.client.slb.hm.delete(self._meta_name(hm))
except acos_errors.InUse:
LOG.error("Cannot delete a health monitor with existing associations")
raise
except acos_errors.NotFound:
pass
def delete(self, context, hm, pool_id):
h = hm.copy()
# Get the binding count to see if we need to perform disassociation
h['pool_id'] = pool_id
with a10.A10DeleteHMContext(self, context, h) as c:
if pool_id is None:
# Delete the whole healthmonitor
self._delete(c, context, hm)
else:
# Disassociate this pool
self.dissociate(c, context, hm, pool_id)
| 35.991736 | 82 | 0.60023 |
import acos_client.errors as acos_errors
import logging
from a10_neutron_lbaas.acos import openstack_mappings
from a10_neutron_lbaas.v1 import handler_base_v1
from a10_neutron_lbaas.v1 import v1_context as a10
LOG = logging.getLogger(__name__)
class HealthMonitorHandler(handler_base_v1.HandlerBaseV1):
def _name(self, hm):
return hm['id'][0:28]
def _set(self, c, set_method, context, hm):
hm_name = self._meta_name(hm)
method = None
url = None
expect_code = None
if hm['type'] in ['HTTP', 'HTTPS']:
method = hm['http_method']
url = hm['url_path']
expect_code = hm['expected_codes']
args = self.meta(hm, 'hm', {})
set_method(hm_name, openstack_mappings.hm_type(c, hm['type']),
hm['delay'], hm['timeout'], hm['max_retries'],
method=method, url=url, expect_code=expect_code,
axapi_args=args)
def create(self, context, hm, pool_id):
h = hm.copy()
h['pool_id'] = pool_id
with a10.A10WriteHMStatusContext(self, context, h, action='create') as c:
try:
self._set(c, c.client.slb.hm.create, context, hm)
except acos_errors.Exists:
pass
if pool_id is not None:
c.client.slb.service_group.update(
self._pool_name(context, pool_id),
health_monitor=self._meta_name(hm))
for pool in hm['pools']:
if pool['pool_id'] == pool_id:
continue
c.client.slb.service_group.update(
self._pool_name(context, pool['pool_id']),
health_monitor=self._meta_name(hm))
def update(self, context, old_hm, hm, pool_id):
h = hm.copy()
h['pool_id'] = pool_id
with a10.A10WriteHMStatusContext(self, context, h) as c:
self._set(c, c.client.slb.hm.update, context, hm)
def _dissociate(self, c, context, hm, pool_id):
pool_name = self._pool_name(context, pool_id)
c.client.slb.service_group.update(pool_name, health_monitor="",
health_check_disable=True)
def dissociate(self, c, context, hm, pool_id):
self._dissociate(c, context, hm, pool_id)
pools = hm.get("pools", [])
if not any(p for p in pools if p.get("pool_id") != pool_id):
self._delete_unused(c, context, hm)
def _delete(self, c, context, hm):
pools = hm.get("pools", [])
for pool in pools:
pool_id = pool.get("pool_id")
self._dissociate(c, context, hm, pool_id)
self._delete_unused(c, context, hm)
def _delete_unused(self, c, context, hm):
try:
c.client.slb.hm.delete(self._meta_name(hm))
except acos_errors.InUse:
LOG.error("Cannot delete a health monitor with existing associations")
raise
except acos_errors.NotFound:
pass
def delete(self, context, hm, pool_id):
h = hm.copy()
h['pool_id'] = pool_id
with a10.A10DeleteHMContext(self, context, h) as c:
if pool_id is None:
self._delete(c, context, hm)
else:
self.dissociate(c, context, hm, pool_id)
| true | true |
1c2ff740a6516ce48791949ac6dad97d7a9e429f | 5,737 | py | Python | mixcoatl/platform/relational_database_product.py | zomGreg/mixcoatl | dd8d7e206682955b251d7f858fffee56b11df8c6 | [
"Apache-2.0"
] | null | null | null | mixcoatl/platform/relational_database_product.py | zomGreg/mixcoatl | dd8d7e206682955b251d7f858fffee56b11df8c6 | [
"Apache-2.0"
] | null | null | null | mixcoatl/platform/relational_database_product.py | zomGreg/mixcoatl | dd8d7e206682955b251d7f858fffee56b11df8c6 | [
"Apache-2.0"
] | null | null | null | from mixcoatl.resource import Resource
from mixcoatl.admin.job import Job
from mixcoatl.decorators.lazy import lazy_property
from mixcoatl.utils import uncamel, camelize, camel_keys, uncamel_keys
class RelationalDatabaseProduct(Resource):
"""Represents a product with costs from a cloud relational database vendor."""
PATH = 'platform/RelationalDatabaseProduct'
COLLECTION_NAME = 'rdbmsProducts'
PRIMARY_KEY = 'product_id'
def __init__(self, product_id=None, endpoint=None, *args, **kwargs):
Resource.__init__(self, endpoint=endpoint)
self.__product_id = product_id
@property
def product_id(self):
"""`int` - The unique ID in DCM for this relational database product."""
return self.__product_id
@lazy_property
def status(self):
"""`str` - The current status of the product."""
return self.__status
@lazy_property
def architecture(self):
"""`str` - The underlying CPU architecture of this database server."""
return self.__architecture
@lazy_property
def cloud(self):
"""`dict` - The cloud for which this product operates."""
return self.__cloud
@lazy_property
def core_count(self):
"""`int` - The number of CPU cores allocated to your database environment."""
return self.__core_count
@lazy_property
def cpu_in_g_hz(self):
"""`int` - The speed of the CPUs allocated to your database environment."""
return self.__cpu_in_g_hz
@lazy_property
def custom_pricing(self):
"""`bool` - Indicates whether or not this pricing reflects the standard retail rates from the cloud provider."""
return self.__custom_pricing
@lazy_property
def description(self):
"""`str` - A long description for this product."""
return self.__description
@lazy_property
def engine(self):
""" `enum` - The database engine represented by this product."""
return self.__engine
@lazy_property
def hourly_pricing(self):
"""`dict` - The hourly rate the cloud provider charges for having a relational database provisioned."""
return self.__hourly_pricing
@lazy_property
def io_pricing(self):
"""`dict` - The rate charged by the cloud provider for data going in and out of the cloud to the database."""
return self.__io_pricing
@lazy_property
def io_units(self):
"""`int` - The number of I/O units reflecting in the I/O pricing."""
return self.__io_units
@lazy_property
def maximum_storage_in_gb(self):
"""`int` - The amount of storage up to which you may have allocated to be reflected by this product."""
return self.__maximum_storage_in_gb
@lazy_property
def memory_in_gb(self):
"""`int` - The amount of RAM allocated to this virtual database server."""
return self.__memory_in_gb
@lazy_property
def minimum_storage_in_gb(self):
"""`int` - The amount of storage you must have allocated to receive the pricing reflected in this product."""
return self.__minimum_storage_in_gb
@lazy_property
def name(self):
"""`str` - A user-friendly name to describe this product."""
return self.__name
@lazy_property
def region(self):
"""`dict` - A region for which this product is good."""
return self.__region
@lazy_property
def provider_id(self):
"""`str` - How this product is identified to the cloud provider."""
return self.__provider_id
@lazy_property
def storage_pricing(self):
"""`dict` - The rate per storageUnits charged for the storage allocated for this relational database product."""
return self.__storage_pricing
@lazy_property
def storage_units(self):
"""`int` - The number of storage units reflected in the storage pricing."""
return self.__storage_units
def reload(self):
"""Reload resource data from API calls"""
if self.product_id is not None:
self.load()
elif self.current_job is None:
self.load()
else:
if Job.wait_for(self.current_job):
job = Job(self.current_job, endpoint=self.endpoint)
self.__relational_database_id = job.message
self.load()
else:
return self.last_error
@classmethod
def all(cls, region_id, engine, endpoint=None, **kwargs):
"""Get a list of all known relational_databases
>>> RelationalDatabaseProduct.all(region_id=100, engine='MYSQL51')
[{'product_id':1,...},{'product_id':2,...}]
:returns: list -- a list of :class:`RelationalDatabaseProduct`
:raises: RelationalDatabaseProductException
"""
r = Resource(cls.PATH, endpoint=endpoint)
r.request_details = 'basic'
params = {'regionId': region_id, 'engine': engine}
if 'detail' in kwargs:
r.request_details = kwargs['detail']
else:
r.request_details = 'basic'
if 'keys_only' in kwargs:
keys_only = kwargs['keys_only']
else:
keys_only = False
x = r.get(params=params)
if r.last_error is None:
if keys_only is True:
results = [i[camelize(cls.PRIMARY_KEY)] for i in x[cls.COLLECTION_NAME]]
else:
results = [type(cls.__name__, (object,), i) for i in uncamel_keys(x)[uncamel(cls.COLLECTION_NAME)]]
return results
else:
raise RelationalDatabaseProductException(r.last_error)
class RelationalDatabaseProductException(BaseException):
pass
| 34.14881 | 120 | 0.644413 | from mixcoatl.resource import Resource
from mixcoatl.admin.job import Job
from mixcoatl.decorators.lazy import lazy_property
from mixcoatl.utils import uncamel, camelize, camel_keys, uncamel_keys
class RelationalDatabaseProduct(Resource):
PATH = 'platform/RelationalDatabaseProduct'
COLLECTION_NAME = 'rdbmsProducts'
PRIMARY_KEY = 'product_id'
def __init__(self, product_id=None, endpoint=None, *args, **kwargs):
Resource.__init__(self, endpoint=endpoint)
self.__product_id = product_id
@property
def product_id(self):
return self.__product_id
@lazy_property
def status(self):
return self.__status
@lazy_property
def architecture(self):
return self.__architecture
@lazy_property
def cloud(self):
return self.__cloud
@lazy_property
def core_count(self):
return self.__core_count
@lazy_property
def cpu_in_g_hz(self):
return self.__cpu_in_g_hz
@lazy_property
def custom_pricing(self):
return self.__custom_pricing
@lazy_property
def description(self):
return self.__description
@lazy_property
def engine(self):
return self.__engine
@lazy_property
def hourly_pricing(self):
return self.__hourly_pricing
@lazy_property
def io_pricing(self):
return self.__io_pricing
@lazy_property
def io_units(self):
return self.__io_units
@lazy_property
def maximum_storage_in_gb(self):
return self.__maximum_storage_in_gb
@lazy_property
def memory_in_gb(self):
return self.__memory_in_gb
@lazy_property
def minimum_storage_in_gb(self):
return self.__minimum_storage_in_gb
@lazy_property
def name(self):
return self.__name
@lazy_property
def region(self):
return self.__region
@lazy_property
def provider_id(self):
return self.__provider_id
@lazy_property
def storage_pricing(self):
return self.__storage_pricing
@lazy_property
def storage_units(self):
return self.__storage_units
def reload(self):
if self.product_id is not None:
self.load()
elif self.current_job is None:
self.load()
else:
if Job.wait_for(self.current_job):
job = Job(self.current_job, endpoint=self.endpoint)
self.__relational_database_id = job.message
self.load()
else:
return self.last_error
@classmethod
def all(cls, region_id, engine, endpoint=None, **kwargs):
r = Resource(cls.PATH, endpoint=endpoint)
r.request_details = 'basic'
params = {'regionId': region_id, 'engine': engine}
if 'detail' in kwargs:
r.request_details = kwargs['detail']
else:
r.request_details = 'basic'
if 'keys_only' in kwargs:
keys_only = kwargs['keys_only']
else:
keys_only = False
x = r.get(params=params)
if r.last_error is None:
if keys_only is True:
results = [i[camelize(cls.PRIMARY_KEY)] for i in x[cls.COLLECTION_NAME]]
else:
results = [type(cls.__name__, (object,), i) for i in uncamel_keys(x)[uncamel(cls.COLLECTION_NAME)]]
return results
else:
raise RelationalDatabaseProductException(r.last_error)
class RelationalDatabaseProductException(BaseException):
pass
| true | true |
1c2ff87a59fc0949bd18ae26bc290ba7f4c77632 | 62 | py | Python | neoepiscope/version.py | jxshi/neoepiscope | 4e9b7de2f355bf1de270e17eda22d176f0bff627 | [
"MIT"
] | 18 | 2018-09-14T23:38:10.000Z | 2022-01-25T22:32:26.000Z | neoepiscope/version.py | jxshi/neoepiscope | 4e9b7de2f355bf1de270e17eda22d176f0bff627 | [
"MIT"
] | 14 | 2018-10-09T17:03:52.000Z | 2021-05-07T07:26:27.000Z | neoepiscope/version.py | jxshi/neoepiscope | 4e9b7de2f355bf1de270e17eda22d176f0bff627 | [
"MIT"
] | 18 | 2018-09-13T21:00:21.000Z | 2022-02-11T07:39:36.000Z | #!/usr/bin/env python
# coding=utf-8
version_number = "0.5.0"
| 15.5 | 24 | 0.677419 |
version_number = "0.5.0"
| true | true |
1c2ff9917e9a0fcc17fee39c454701dcfa689355 | 53,055 | py | Python | netbox/netbox/views/generic.py | mtinberg/netbox | e8d6281007a70553b044f59a3ff6ed6f9d22ea10 | [
"Apache-2.0"
] | 1 | 2022-01-20T11:33:29.000Z | 2022-01-20T11:33:29.000Z | netbox/netbox/views/generic.py | mtinberg/netbox | e8d6281007a70553b044f59a3ff6ed6f9d22ea10 | [
"Apache-2.0"
] | null | null | null | netbox/netbox/views/generic.py | mtinberg/netbox | e8d6281007a70553b044f59a3ff6ed6f9d22ea10 | [
"Apache-2.0"
] | null | null | null | import logging
import re
from copy import deepcopy
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist, ValidationError
from django.db import transaction, IntegrityError
from django.db.models import ManyToManyField, ProtectedError
from django.forms import Form, ModelMultipleChoiceField, MultipleHiddenInput, Textarea
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.html import escape
from django.utils.http import is_safe_url
from django.utils.safestring import mark_safe
from django.views.generic import View
from django_tables2.export import TableExport
from extras.models import ExportTemplate
from extras.signals import clear_webhooks
from utilities.error_handlers import handle_protectederror
from utilities.exceptions import AbortTransaction, PermissionsViolation
from utilities.forms import (
BootstrapMixin, BulkRenameForm, ConfirmationForm, CSVDataField, CSVFileField, ImportForm, restrict_form_fields,
)
from utilities.permissions import get_permission_for_model
from utilities.tables import paginate_table
from utilities.utils import normalize_querydict, prepare_cloned_fields
from utilities.views import GetReturnURLMixin, ObjectPermissionRequiredMixin
class ObjectView(ObjectPermissionRequiredMixin, View):
"""
Retrieve a single object for display.
queryset: The base queryset for retrieving the object
template_name: Name of the template to use
"""
queryset = None
template_name = None
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'view')
def get_template_name(self):
"""
Return self.template_name if set. Otherwise, resolve the template path by model app_label and name.
"""
if self.template_name is not None:
return self.template_name
model_opts = self.queryset.model._meta
return f'{model_opts.app_label}/{model_opts.model_name}.html'
def get_extra_context(self, request, instance):
"""
Return any additional context data for the template.
request: The current request
instance: The object being viewed
"""
return {}
def get(self, request, *args, **kwargs):
"""
Generic GET handler for accessing an object by PK or slug
"""
instance = get_object_or_404(self.queryset, **kwargs)
return render(request, self.get_template_name(), {
'object': instance,
**self.get_extra_context(request, instance),
})
class ObjectListView(ObjectPermissionRequiredMixin, View):
"""
List a series of objects.
queryset: The queryset of objects to display. Note: Prefetching related objects is not necessary, as the
table will prefetch objects as needed depending on the columns being displayed.
filter: A django-filter FilterSet that is applied to the queryset
filter_form: The form used to render filter options
table: The django-tables2 Table used to render the objects list
template_name: The name of the template
"""
queryset = None
filterset = None
filterset_form = None
table = None
template_name = 'generic/object_list.html'
action_buttons = ('add', 'import', 'export')
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'view')
def get_table(self, request, permissions):
table = self.table(self.queryset, user=request.user)
if 'pk' in table.base_columns and (permissions['change'] or permissions['delete']):
table.columns.show('pk')
return table
def export_yaml(self):
"""
Export the queryset of objects as concatenated YAML documents.
"""
yaml_data = [obj.to_yaml() for obj in self.queryset]
return '---\n'.join(yaml_data)
def export_table(self, table, columns=None):
"""
Export all table data in CSV format.
:param table: The Table instance to export
:param columns: A list of specific columns to include. If not specified, all columns will be exported.
"""
exclude_columns = {'pk'}
if columns:
all_columns = [col_name for col_name, _ in table.selected_columns + table.available_columns]
exclude_columns.update({
col for col in all_columns if col not in columns
})
exporter = TableExport(
export_format=TableExport.CSV,
table=table,
exclude_columns=exclude_columns
)
return exporter.response(
filename=f'netbox_{self.queryset.model._meta.verbose_name_plural}.csv'
)
def export_template(self, template, request):
"""
Render an ExportTemplate using the current queryset.
:param template: ExportTemplate instance
:param request: The current request
"""
try:
return template.render_to_response(self.queryset)
except Exception as e:
messages.error(request, f"There was an error rendering the selected export template ({template.name}): {e}")
return redirect(request.path)
def get(self, request):
model = self.queryset.model
content_type = ContentType.objects.get_for_model(model)
if self.filterset:
self.queryset = self.filterset(request.GET, self.queryset).qs
# Compile a dictionary indicating which permissions are available to the current user for this model
permissions = {}
for action in ('add', 'change', 'delete', 'view'):
perm_name = get_permission_for_model(model, action)
permissions[action] = request.user.has_perm(perm_name)
if 'export' in request.GET:
# Export the current table view
if request.GET['export'] == 'table':
table = self.get_table(request, permissions)
columns = [name for name, _ in table.selected_columns]
return self.export_table(table, columns)
# Render an ExportTemplate
elif request.GET['export']:
template = get_object_or_404(ExportTemplate, content_type=content_type, name=request.GET['export'])
return self.export_template(template, request)
# Check for YAML export support on the model
elif hasattr(model, 'to_yaml'):
response = HttpResponse(self.export_yaml(), content_type='text/yaml')
filename = 'netbox_{}.yaml'.format(self.queryset.model._meta.verbose_name_plural)
response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename)
return response
# Fall back to default table/YAML export
else:
table = self.get_table(request, permissions)
return self.export_table(table)
# Render the objects table
table = self.get_table(request, permissions)
paginate_table(table, request)
context = {
'content_type': content_type,
'table': table,
'permissions': permissions,
'action_buttons': self.action_buttons,
'filter_form': self.filterset_form(request.GET, label_suffix='') if self.filterset_form else None,
}
context.update(self.extra_context())
return render(request, self.template_name, context)
def extra_context(self):
return {}
class ObjectEditView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Create or edit a single object.
queryset: The base queryset for the object being modified
model_form: The form used to create or edit the object
template_name: The name of the template
"""
queryset = None
model_form = None
template_name = 'generic/object_edit.html'
def get_required_permission(self):
# self._permission_action is set by dispatch() to either "add" or "change" depending on whether
# we are modifying an existing object or creating a new one.
return get_permission_for_model(self.queryset.model, self._permission_action)
def get_object(self, kwargs):
# Look up an existing object by slug or PK, if provided.
if 'slug' in kwargs:
obj = get_object_or_404(self.queryset, slug=kwargs['slug'])
elif 'pk' in kwargs:
obj = get_object_or_404(self.queryset, pk=kwargs['pk'])
# Otherwise, return a new instance.
else:
return self.queryset.model()
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
return obj
def alter_obj(self, obj, request, url_args, url_kwargs):
# Allow views to add extra info to an object before it is processed. For example, a parent object can be defined
# given some parameter from the request URL.
return obj
def dispatch(self, request, *args, **kwargs):
# Determine required permission based on whether we are editing an existing object
self._permission_action = 'change' if kwargs else 'add'
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
obj = self.alter_obj(self.get_object(kwargs), request, args, kwargs)
initial_data = normalize_querydict(request.GET)
form = self.model_form(instance=obj, initial=initial_data)
restrict_form_fields(form, request.user)
return render(request, self.template_name, {
'obj': obj,
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request, obj),
})
def post(self, request, *args, **kwargs):
logger = logging.getLogger('netbox.views.ObjectEditView')
obj = self.alter_obj(self.get_object(kwargs), request, args, kwargs)
form = self.model_form(
data=request.POST,
files=request.FILES,
instance=obj
)
restrict_form_fields(form, request.user)
if form.is_valid():
logger.debug("Form validation was successful")
try:
with transaction.atomic():
object_created = form.instance.pk is None
obj = form.save()
# Check that the new object conforms with any assigned object-level permissions
if not self.queryset.filter(pk=obj.pk).first():
raise PermissionsViolation()
msg = '{} {}'.format(
'Created' if object_created else 'Modified',
self.queryset.model._meta.verbose_name
)
logger.info(f"{msg} {obj} (PK: {obj.pk})")
if hasattr(obj, 'get_absolute_url'):
msg = '{} <a href="{}">{}</a>'.format(msg, obj.get_absolute_url(), escape(obj))
else:
msg = '{} {}'.format(msg, escape(obj))
messages.success(request, mark_safe(msg))
if '_addanother' in request.POST:
redirect_url = request.path
# If the object has clone_fields, pre-populate a new instance of the form
if hasattr(obj, 'clone_fields'):
redirect_url += f"?{prepare_cloned_fields(obj)}"
return redirect(redirect_url)
return_url = self.get_return_url(request, obj)
return redirect(return_url)
except PermissionsViolation:
msg = "Object save failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'obj': obj,
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request, obj),
})
class ObjectDeleteView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Delete a single object.
queryset: The base queryset for the object being deleted
template_name: The name of the template
"""
queryset = None
template_name = 'generic/object_delete.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'delete')
def get_object(self, kwargs):
# Look up object by slug if one has been provided. Otherwise, use PK.
if 'slug' in kwargs:
obj = get_object_or_404(self.queryset, slug=kwargs['slug'])
else:
obj = get_object_or_404(self.queryset, pk=kwargs['pk'])
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
return obj
def get(self, request, **kwargs):
obj = self.get_object(kwargs)
form = ConfirmationForm(initial=request.GET)
return render(request, self.template_name, {
'obj': obj,
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request, obj),
})
def post(self, request, **kwargs):
logger = logging.getLogger('netbox.views.ObjectDeleteView')
obj = self.get_object(kwargs)
form = ConfirmationForm(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
try:
obj.delete()
except ProtectedError as e:
logger.info("Caught ProtectedError while attempting to delete object")
handle_protectederror([obj], request, e)
return redirect(obj.get_absolute_url())
msg = 'Deleted {} {}'.format(self.queryset.model._meta.verbose_name, obj)
logger.info(msg)
messages.success(request, msg)
return_url = form.cleaned_data.get('return_url')
if return_url is not None and is_safe_url(url=return_url, allowed_hosts=request.get_host()):
return redirect(return_url)
else:
return redirect(self.get_return_url(request, obj))
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'obj': obj,
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request, obj),
})
class BulkCreateView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Create new objects in bulk.
queryset: Base queryset for the objects being created
form: Form class which provides the `pattern` field
model_form: The ModelForm used to create individual objects
pattern_target: Name of the field to be evaluated as a pattern (if any)
template_name: The name of the template
"""
queryset = None
form = None
model_form = None
pattern_target = ''
template_name = None
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
# Set initial values for visible form fields from query args
initial = {}
for field in getattr(self.model_form._meta, 'fields', []):
if request.GET.get(field):
initial[field] = request.GET[field]
form = self.form()
model_form = self.model_form(initial=initial)
return render(request, self.template_name, {
'obj_type': self.model_form._meta.model._meta.verbose_name,
'form': form,
'model_form': model_form,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.BulkCreateView')
model = self.queryset.model
form = self.form(request.POST)
model_form = self.model_form(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
pattern = form.cleaned_data['pattern']
new_objs = []
try:
with transaction.atomic():
# Create objects from the expanded. Abort the transaction on the first validation error.
for value in pattern:
# Reinstantiate the model form each time to avoid overwriting the same instance. Use a mutable
# copy of the POST QueryDict so that we can update the target field value.
model_form = self.model_form(request.POST.copy())
model_form.data[self.pattern_target] = value
# Validate each new object independently.
if model_form.is_valid():
obj = model_form.save()
logger.debug(f"Created {obj} (PK: {obj.pk})")
new_objs.append(obj)
else:
# Copy any errors on the pattern target field to the pattern form.
errors = model_form.errors.as_data()
if errors.get(self.pattern_target):
form.add_error('pattern', errors[self.pattern_target])
# Raise an IntegrityError to break the for loop and abort the transaction.
raise IntegrityError()
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
raise PermissionsViolation
# If we make it to this point, validation has succeeded on all new objects.
msg = "Added {} {}".format(len(new_objs), model._meta.verbose_name_plural)
logger.info(msg)
messages.success(request, msg)
if '_addanother' in request.POST:
return redirect(request.path)
return redirect(self.get_return_url(request))
except IntegrityError:
pass
except PermissionsViolation:
msg = "Object creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'form': form,
'model_form': model_form,
'obj_type': model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
class ObjectImportView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Import a single object (YAML or JSON format).
queryset: Base queryset for the objects being created
model_form: The ModelForm used to create individual objects
related_object_forms: A dictionary mapping of forms to be used for the creation of related (child) objects
template_name: The name of the template
"""
queryset = None
model_form = None
related_object_forms = dict()
template_name = 'generic/object_import.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
form = ImportForm()
return render(request, self.template_name, {
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.ObjectImportView')
form = ImportForm(request.POST)
if form.is_valid():
logger.debug("Import form validation was successful")
# Initialize model form
data = form.cleaned_data['data']
model_form = self.model_form(data)
restrict_form_fields(model_form, request.user)
# Assign default values for any fields which were not specified. We have to do this manually because passing
# 'initial=' to the form on initialization merely sets default values for the widgets. Since widgets are not
# used for YAML/JSON import, we first bind the imported data normally, then update the form's data with the
# applicable field defaults as needed prior to form validation.
for field_name, field in model_form.fields.items():
if field_name not in data and hasattr(field, 'initial'):
model_form.data[field_name] = field.initial
if model_form.is_valid():
try:
with transaction.atomic():
# Save the primary object
obj = model_form.save()
# Enforce object-level permissions
if not self.queryset.filter(pk=obj.pk).first():
raise PermissionsViolation()
logger.debug(f"Created {obj} (PK: {obj.pk})")
# Iterate through the related object forms (if any), validating and saving each instance.
for field_name, related_object_form in self.related_object_forms.items():
logger.debug("Processing form for related objects: {related_object_form}")
related_obj_pks = []
for i, rel_obj_data in enumerate(data.get(field_name, list())):
f = related_object_form(obj, rel_obj_data)
for subfield_name, field in f.fields.items():
if subfield_name not in rel_obj_data and hasattr(field, 'initial'):
f.data[subfield_name] = field.initial
if f.is_valid():
related_obj = f.save()
related_obj_pks.append(related_obj.pk)
else:
# Replicate errors on the related object form to the primary form for display
for subfield_name, errors in f.errors.items():
for err in errors:
err_msg = "{}[{}] {}: {}".format(field_name, i, subfield_name, err)
model_form.add_error(None, err_msg)
raise AbortTransaction()
# Enforce object-level permissions on related objects
model = related_object_form.Meta.model
if model.objects.filter(pk__in=related_obj_pks).count() != len(related_obj_pks):
raise ObjectDoesNotExist
except AbortTransaction:
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Object creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
if not model_form.errors:
logger.info(f"Import object {obj} (PK: {obj.pk})")
messages.success(request, mark_safe('Imported object: <a href="{}">{}</a>'.format(
obj.get_absolute_url(), obj
)))
if '_addanother' in request.POST:
return redirect(request.get_full_path())
return_url = form.cleaned_data.get('return_url')
if return_url is not None and is_safe_url(url=return_url, allowed_hosts=request.get_host()):
return redirect(return_url)
else:
return redirect(self.get_return_url(request, obj))
else:
logger.debug("Model form validation failed")
# Replicate model form errors for display
for field, errors in model_form.errors.items():
for err in errors:
if field == '__all__':
form.add_error(None, err)
else:
form.add_error(None, "{}: {}".format(field, err))
else:
logger.debug("Import form validation failed")
return render(request, self.template_name, {
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
class BulkImportView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Import objects in bulk (CSV format).
queryset: Base queryset for the model
model_form: The form used to create each imported object
table: The django-tables2 Table used to render the list of imported objects
template_name: The name of the template
widget_attrs: A dict of attributes to apply to the import widget (e.g. to require a session key)
"""
queryset = None
model_form = None
table = None
template_name = 'generic/object_bulk_import.html'
widget_attrs = {}
def _import_form(self, *args, **kwargs):
class ImportForm(BootstrapMixin, Form):
csv = CSVDataField(
from_form=self.model_form,
widget=Textarea(attrs=self.widget_attrs)
)
csv_file = CSVFileField(
label="CSV file",
from_form=self.model_form,
required=False
)
def clean(self):
csv_rows = self.cleaned_data['csv'][1] if 'csv' in self.cleaned_data else None
csv_file = self.files.get('csv_file')
# Check that the user has not submitted both text data and a file
if csv_rows and csv_file:
raise ValidationError(
"Cannot process CSV text and file attachment simultaneously. Please choose only one import "
"method."
)
return ImportForm(*args, **kwargs)
def _save_obj(self, obj_form, request):
"""
Provide a hook to modify the object immediately before saving it (e.g. to encrypt secret data).
"""
return obj_form.save()
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
return render(request, self.template_name, {
'form': self._import_form(),
'fields': self.model_form().fields,
'obj_type': self.model_form._meta.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.BulkImportView')
new_objs = []
form = self._import_form(request.POST, request.FILES)
if form.is_valid():
logger.debug("Form validation was successful")
try:
# Iterate through CSV data and bind each row to a new model form instance.
with transaction.atomic():
if request.FILES:
headers, records = form.cleaned_data['csv_file']
else:
headers, records = form.cleaned_data['csv']
for row, data in enumerate(records, start=1):
obj_form = self.model_form(data, headers=headers)
restrict_form_fields(obj_form, request.user)
if obj_form.is_valid():
obj = self._save_obj(obj_form, request)
new_objs.append(obj)
else:
for field, err in obj_form.errors.items():
form.add_error('csv', "Row {} {}: {}".format(row, field, err[0]))
raise ValidationError("")
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
raise PermissionsViolation
# Compile a table containing the imported objects
obj_table = self.table(new_objs)
if new_objs:
msg = 'Imported {} {}'.format(len(new_objs), new_objs[0]._meta.verbose_name_plural)
logger.info(msg)
messages.success(request, msg)
return render(request, "import_success.html", {
'table': obj_table,
'return_url': self.get_return_url(request),
})
except ValidationError:
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Object import failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'form': form,
'fields': self.model_form().fields,
'obj_type': self.model_form._meta.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
class BulkEditView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Edit objects in bulk.
queryset: Custom queryset to use when retrieving objects (e.g. to select related objects)
filter: FilterSet to apply when deleting by QuerySet
table: The table used to display devices being edited
form: The form class used to edit objects in bulk
template_name: The name of the template
"""
queryset = None
filterset = None
table = None
form = None
template_name = 'generic/object_bulk_edit.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'change')
def get(self, request):
return redirect(self.get_return_url(request))
def post(self, request, **kwargs):
logger = logging.getLogger('netbox.views.BulkEditView')
model = self.queryset.model
# If we are editing *all* objects in the queryset, replace the PK list with all matched objects.
if request.POST.get('_all') and self.filterset is not None:
pk_list = self.filterset(request.GET, self.queryset.values_list('pk', flat=True)).qs
else:
pk_list = request.POST.getlist('pk')
# Include the PK list as initial data for the form
initial_data = {'pk': pk_list}
# Check for other contextual data needed for the form. We avoid passing all of request.GET because the
# filter values will conflict with the bulk edit form fields.
# TODO: Find a better way to accomplish this
if 'device' in request.GET:
initial_data['device'] = request.GET.get('device')
elif 'device_type' in request.GET:
initial_data['device_type'] = request.GET.get('device_type')
elif 'virtual_machine' in request.GET:
initial_data['virtual_machine'] = request.GET.get('virtual_machine')
if '_apply' in request.POST:
form = self.form(model, request.POST, initial=initial_data)
restrict_form_fields(form, request.user)
if form.is_valid():
logger.debug("Form validation was successful")
custom_fields = form.custom_fields if hasattr(form, 'custom_fields') else []
standard_fields = [
field for field in form.fields if field not in custom_fields + ['pk']
]
nullified_fields = request.POST.getlist('_nullify')
try:
with transaction.atomic():
updated_objects = []
for obj in self.queryset.filter(pk__in=form.cleaned_data['pk']):
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
# Update standard fields. If a field is listed in _nullify, delete its value.
for name in standard_fields:
try:
model_field = model._meta.get_field(name)
except FieldDoesNotExist:
# This form field is used to modify a field rather than set its value directly
model_field = None
# Handle nullification
if name in form.nullable_fields and name in nullified_fields:
if isinstance(model_field, ManyToManyField):
getattr(obj, name).set([])
else:
setattr(obj, name, None if model_field.null else '')
# ManyToManyFields
elif isinstance(model_field, ManyToManyField):
if form.cleaned_data[name]:
getattr(obj, name).set(form.cleaned_data[name])
# Normal fields
elif name in form.changed_data:
setattr(obj, name, form.cleaned_data[name])
# Update custom fields
for name in custom_fields:
if name in form.nullable_fields and name in nullified_fields:
obj.custom_field_data[name] = None
elif name in form.changed_data:
obj.custom_field_data[name] = form.cleaned_data[name]
obj.full_clean()
obj.save()
updated_objects.append(obj)
logger.debug(f"Saved {obj} (PK: {obj.pk})")
# Add/remove tags
if form.cleaned_data.get('add_tags', None):
obj.tags.add(*form.cleaned_data['add_tags'])
if form.cleaned_data.get('remove_tags', None):
obj.tags.remove(*form.cleaned_data['remove_tags'])
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in updated_objects]).count() != len(updated_objects):
raise PermissionsViolation
if updated_objects:
msg = 'Updated {} {}'.format(len(updated_objects), model._meta.verbose_name_plural)
logger.info(msg)
messages.success(self.request, msg)
return redirect(self.get_return_url(request))
except ValidationError as e:
messages.error(self.request, "{} failed validation: {}".format(obj, ", ".join(e.messages)))
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Object update failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
logger.debug("Form validation failed")
else:
form = self.form(model, initial=initial_data)
restrict_form_fields(form, request.user)
# Retrieve objects being edited
table = self.table(self.queryset.filter(pk__in=pk_list), orderable=False)
if not table.rows:
messages.warning(request, "No {} were selected.".format(model._meta.verbose_name_plural))
return redirect(self.get_return_url(request))
return render(request, self.template_name, {
'form': form,
'table': table,
'obj_type_plural': model._meta.verbose_name_plural,
'return_url': self.get_return_url(request),
})
class BulkRenameView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
An extendable view for renaming objects in bulk.
"""
queryset = None
template_name = 'generic/object_bulk_rename.html'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Create a new Form class from BulkRenameForm
class _Form(BulkRenameForm):
pk = ModelMultipleChoiceField(
queryset=self.queryset,
widget=MultipleHiddenInput()
)
self.form = _Form
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'change')
def post(self, request):
logger = logging.getLogger('netbox.views.BulkRenameView')
if '_preview' in request.POST or '_apply' in request.POST:
form = self.form(request.POST, initial={'pk': request.POST.getlist('pk')})
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
if form.is_valid():
try:
with transaction.atomic():
renamed_pks = []
for obj in selected_objects:
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
find = form.cleaned_data['find']
replace = form.cleaned_data['replace']
if form.cleaned_data['use_regex']:
try:
obj.new_name = re.sub(find, replace, obj.name)
# Catch regex group reference errors
except re.error:
obj.new_name = obj.name
else:
obj.new_name = obj.name.replace(find, replace)
renamed_pks.append(obj.pk)
if '_apply' in request.POST:
for obj in selected_objects:
obj.name = obj.new_name
obj.save()
# Enforce constrained permissions
if self.queryset.filter(pk__in=renamed_pks).count() != len(selected_objects):
raise PermissionsViolation
messages.success(request, "Renamed {} {}".format(
len(selected_objects),
self.queryset.model._meta.verbose_name_plural
))
return redirect(self.get_return_url(request))
except PermissionsViolation:
msg = "Object update failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
form = self.form(initial={'pk': request.POST.getlist('pk')})
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
return render(request, self.template_name, {
'form': form,
'obj_type_plural': self.queryset.model._meta.verbose_name_plural,
'selected_objects': selected_objects,
'return_url': self.get_return_url(request),
})
class BulkDeleteView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Delete objects in bulk.
queryset: Custom queryset to use when retrieving objects (e.g. to select related objects)
filter: FilterSet to apply when deleting by QuerySet
table: The table used to display devices being deleted
form: The form class used to delete objects in bulk
template_name: The name of the template
"""
queryset = None
filterset = None
table = None
form = None
template_name = 'generic/object_bulk_delete.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'delete')
def get(self, request):
return redirect(self.get_return_url(request))
def post(self, request, **kwargs):
logger = logging.getLogger('netbox.views.BulkDeleteView')
model = self.queryset.model
# Are we deleting *all* objects in the queryset or just a selected subset?
if request.POST.get('_all'):
qs = model.objects.all()
if self.filterset is not None:
qs = self.filterset(request.GET, qs).qs
pk_list = qs.only('pk').values_list('pk', flat=True)
else:
pk_list = [int(pk) for pk in request.POST.getlist('pk')]
form_cls = self.get_form()
if '_confirm' in request.POST:
form = form_cls(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
# Delete objects
queryset = self.queryset.filter(pk__in=pk_list)
deleted_count = queryset.count()
try:
for obj in queryset:
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
obj.delete()
except ProtectedError as e:
logger.info("Caught ProtectedError while attempting to delete objects")
handle_protectederror(queryset, request, e)
return redirect(self.get_return_url(request))
msg = f"Deleted {deleted_count} {model._meta.verbose_name_plural}"
logger.info(msg)
messages.success(request, msg)
return redirect(self.get_return_url(request))
else:
logger.debug("Form validation failed")
else:
form = form_cls(initial={
'pk': pk_list,
'return_url': self.get_return_url(request),
})
# Retrieve objects being deleted
table = self.table(self.queryset.filter(pk__in=pk_list), orderable=False)
if not table.rows:
messages.warning(request, "No {} were selected for deletion.".format(model._meta.verbose_name_plural))
return redirect(self.get_return_url(request))
return render(request, self.template_name, {
'form': form,
'obj_type_plural': model._meta.verbose_name_plural,
'table': table,
'return_url': self.get_return_url(request),
})
def get_form(self):
"""
Provide a standard bulk delete form if none has been specified for the view
"""
class BulkDeleteForm(ConfirmationForm):
pk = ModelMultipleChoiceField(queryset=self.queryset, widget=MultipleHiddenInput)
if self.form:
return self.form
return BulkDeleteForm
#
# Device/VirtualMachine components
#
# TODO: Replace with BulkCreateView
class ComponentCreateView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Add one or more components (e.g. interfaces, console ports, etc.) to a Device or VirtualMachine.
"""
queryset = None
form = None
model_form = None
template_name = 'generic/object_edit.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
form = self.form(initial=request.GET)
return render(request, self.template_name, {
'obj': self.queryset.model(),
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.ComponentCreateView')
form = self.form(request.POST, initial=request.GET)
self.validate_form(request, form)
if form.is_valid() and not form.errors:
if '_addanother' in request.POST:
return redirect(request.get_full_path())
else:
return redirect(self.get_return_url(request))
return render(request, self.template_name, {
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request),
})
def validate_form(self, request, form):
"""
Validate form values and set errors on the form object as they are detected. If
no errors are found, signal success messages.
"""
logger = logging.getLogger('netbox.views.ComponentCreateView')
if form.is_valid():
new_components = []
data = deepcopy(request.POST)
names = form.cleaned_data['name_pattern']
labels = form.cleaned_data.get('label_pattern')
for i, name in enumerate(names):
label = labels[i] if labels else None
# Initialize the individual component form
data['name'] = name
data['label'] = label
if hasattr(form, 'get_iterative_data'):
data.update(form.get_iterative_data(i))
component_form = self.model_form(data)
if component_form.is_valid():
new_components.append(component_form)
else:
for field, errors in component_form.errors.as_data().items():
# Assign errors on the child form's name/label field to name_pattern/label_pattern on the parent form
if field == 'name':
field = 'name_pattern'
elif field == 'label':
field = 'label_pattern'
for e in errors:
form.add_error(field, '{}: {}'.format(name, ', '.join(e)))
if not form.errors:
try:
with transaction.atomic():
# Create the new components
new_objs = []
for component_form in new_components:
obj = component_form.save()
new_objs.append(obj)
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
raise PermissionsViolation
messages.success(request, "Added {} {}".format(
len(new_components), self.queryset.model._meta.verbose_name_plural
))
# Return the newly created objects so overridden post methods can use the data as needed.
return new_objs
except PermissionsViolation:
msg = "Component creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
return None
class BulkComponentCreateView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
"""
Add one or more components (e.g. interfaces, console ports, etc.) to a set of Devices or VirtualMachines.
"""
parent_model = None
parent_field = None
form = None
queryset = None
model_form = None
filterset = None
table = None
template_name = 'generic/object_bulk_add_component.html'
def get_required_permission(self):
return f'dcim.add_{self.queryset.model._meta.model_name}'
def post(self, request):
logger = logging.getLogger('netbox.views.BulkComponentCreateView')
parent_model_name = self.parent_model._meta.verbose_name_plural
model_name = self.queryset.model._meta.verbose_name_plural
# Are we editing *all* objects in the queryset or just a selected subset?
if request.POST.get('_all') and self.filterset is not None:
pk_list = [obj.pk for obj in self.filterset(request.GET, self.parent_model.objects.only('pk')).qs]
else:
pk_list = [int(pk) for pk in request.POST.getlist('pk')]
selected_objects = self.parent_model.objects.filter(pk__in=pk_list)
if not selected_objects:
messages.warning(request, "No {} were selected.".format(self.parent_model._meta.verbose_name_plural))
return redirect(self.get_return_url(request))
table = self.table(selected_objects)
if '_create' in request.POST:
form = self.form(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
new_components = []
data = deepcopy(form.cleaned_data)
try:
with transaction.atomic():
for obj in data['pk']:
names = data['name_pattern']
labels = data['label_pattern'] if 'label_pattern' in data else None
for i, name in enumerate(names):
label = labels[i] if labels else None
component_data = {
self.parent_field: obj.pk,
'name': name,
'label': label
}
component_data.update(data)
component_form = self.model_form(component_data)
if component_form.is_valid():
instance = component_form.save()
logger.debug(f"Created {instance} on {instance.parent_object}")
new_components.append(instance)
else:
for field, errors in component_form.errors.as_data().items():
for e in errors:
form.add_error(field, '{} {}: {}'.format(obj, name, ', '.join(e)))
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in new_components]).count() != len(new_components):
raise PermissionsViolation
except IntegrityError:
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Component creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
if not form.errors:
msg = "Added {} {} to {} {}.".format(
len(new_components),
model_name,
len(form.cleaned_data['pk']),
parent_model_name
)
logger.info(msg)
messages.success(request, msg)
return redirect(self.get_return_url(request))
else:
logger.debug("Form validation failed")
else:
form = self.form(initial={'pk': pk_list})
return render(request, self.template_name, {
'form': form,
'parent_model_name': parent_model_name,
'model_name': model_name,
'table': table,
'return_url': self.get_return_url(request),
})
| 40.592961 | 125 | 0.570276 | import logging
import re
from copy import deepcopy
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist, ValidationError
from django.db import transaction, IntegrityError
from django.db.models import ManyToManyField, ProtectedError
from django.forms import Form, ModelMultipleChoiceField, MultipleHiddenInput, Textarea
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.html import escape
from django.utils.http import is_safe_url
from django.utils.safestring import mark_safe
from django.views.generic import View
from django_tables2.export import TableExport
from extras.models import ExportTemplate
from extras.signals import clear_webhooks
from utilities.error_handlers import handle_protectederror
from utilities.exceptions import AbortTransaction, PermissionsViolation
from utilities.forms import (
BootstrapMixin, BulkRenameForm, ConfirmationForm, CSVDataField, CSVFileField, ImportForm, restrict_form_fields,
)
from utilities.permissions import get_permission_for_model
from utilities.tables import paginate_table
from utilities.utils import normalize_querydict, prepare_cloned_fields
from utilities.views import GetReturnURLMixin, ObjectPermissionRequiredMixin
class ObjectView(ObjectPermissionRequiredMixin, View):
queryset = None
template_name = None
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'view')
def get_template_name(self):
if self.template_name is not None:
return self.template_name
model_opts = self.queryset.model._meta
return f'{model_opts.app_label}/{model_opts.model_name}.html'
def get_extra_context(self, request, instance):
return {}
def get(self, request, *args, **kwargs):
instance = get_object_or_404(self.queryset, **kwargs)
return render(request, self.get_template_name(), {
'object': instance,
**self.get_extra_context(request, instance),
})
class ObjectListView(ObjectPermissionRequiredMixin, View):
queryset = None
filterset = None
filterset_form = None
table = None
template_name = 'generic/object_list.html'
action_buttons = ('add', 'import', 'export')
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'view')
def get_table(self, request, permissions):
table = self.table(self.queryset, user=request.user)
if 'pk' in table.base_columns and (permissions['change'] or permissions['delete']):
table.columns.show('pk')
return table
def export_yaml(self):
yaml_data = [obj.to_yaml() for obj in self.queryset]
return '---\n'.join(yaml_data)
def export_table(self, table, columns=None):
exclude_columns = {'pk'}
if columns:
all_columns = [col_name for col_name, _ in table.selected_columns + table.available_columns]
exclude_columns.update({
col for col in all_columns if col not in columns
})
exporter = TableExport(
export_format=TableExport.CSV,
table=table,
exclude_columns=exclude_columns
)
return exporter.response(
filename=f'netbox_{self.queryset.model._meta.verbose_name_plural}.csv'
)
def export_template(self, template, request):
try:
return template.render_to_response(self.queryset)
except Exception as e:
messages.error(request, f"There was an error rendering the selected export template ({template.name}): {e}")
return redirect(request.path)
def get(self, request):
model = self.queryset.model
content_type = ContentType.objects.get_for_model(model)
if self.filterset:
self.queryset = self.filterset(request.GET, self.queryset).qs
permissions = {}
for action in ('add', 'change', 'delete', 'view'):
perm_name = get_permission_for_model(model, action)
permissions[action] = request.user.has_perm(perm_name)
if 'export' in request.GET:
if request.GET['export'] == 'table':
table = self.get_table(request, permissions)
columns = [name for name, _ in table.selected_columns]
return self.export_table(table, columns)
elif request.GET['export']:
template = get_object_or_404(ExportTemplate, content_type=content_type, name=request.GET['export'])
return self.export_template(template, request)
elif hasattr(model, 'to_yaml'):
response = HttpResponse(self.export_yaml(), content_type='text/yaml')
filename = 'netbox_{}.yaml'.format(self.queryset.model._meta.verbose_name_plural)
response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename)
return response
else:
table = self.get_table(request, permissions)
return self.export_table(table)
table = self.get_table(request, permissions)
paginate_table(table, request)
context = {
'content_type': content_type,
'table': table,
'permissions': permissions,
'action_buttons': self.action_buttons,
'filter_form': self.filterset_form(request.GET, label_suffix='') if self.filterset_form else None,
}
context.update(self.extra_context())
return render(request, self.template_name, context)
def extra_context(self):
return {}
class ObjectEditView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
model_form = None
template_name = 'generic/object_edit.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, self._permission_action)
def get_object(self, kwargs):
if 'slug' in kwargs:
obj = get_object_or_404(self.queryset, slug=kwargs['slug'])
elif 'pk' in kwargs:
obj = get_object_or_404(self.queryset, pk=kwargs['pk'])
else:
return self.queryset.model()
if hasattr(obj, 'snapshot'):
obj.snapshot()
return obj
def alter_obj(self, obj, request, url_args, url_kwargs):
return obj
def dispatch(self, request, *args, **kwargs):
self._permission_action = 'change' if kwargs else 'add'
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
obj = self.alter_obj(self.get_object(kwargs), request, args, kwargs)
initial_data = normalize_querydict(request.GET)
form = self.model_form(instance=obj, initial=initial_data)
restrict_form_fields(form, request.user)
return render(request, self.template_name, {
'obj': obj,
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request, obj),
})
def post(self, request, *args, **kwargs):
logger = logging.getLogger('netbox.views.ObjectEditView')
obj = self.alter_obj(self.get_object(kwargs), request, args, kwargs)
form = self.model_form(
data=request.POST,
files=request.FILES,
instance=obj
)
restrict_form_fields(form, request.user)
if form.is_valid():
logger.debug("Form validation was successful")
try:
with transaction.atomic():
object_created = form.instance.pk is None
obj = form.save()
if not self.queryset.filter(pk=obj.pk).first():
raise PermissionsViolation()
msg = '{} {}'.format(
'Created' if object_created else 'Modified',
self.queryset.model._meta.verbose_name
)
logger.info(f"{msg} {obj} (PK: {obj.pk})")
if hasattr(obj, 'get_absolute_url'):
msg = '{} <a href="{}">{}</a>'.format(msg, obj.get_absolute_url(), escape(obj))
else:
msg = '{} {}'.format(msg, escape(obj))
messages.success(request, mark_safe(msg))
if '_addanother' in request.POST:
redirect_url = request.path
if hasattr(obj, 'clone_fields'):
redirect_url += f"?{prepare_cloned_fields(obj)}"
return redirect(redirect_url)
return_url = self.get_return_url(request, obj)
return redirect(return_url)
except PermissionsViolation:
msg = "Object save failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'obj': obj,
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request, obj),
})
class ObjectDeleteView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
template_name = 'generic/object_delete.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'delete')
def get_object(self, kwargs):
if 'slug' in kwargs:
obj = get_object_or_404(self.queryset, slug=kwargs['slug'])
else:
obj = get_object_or_404(self.queryset, pk=kwargs['pk'])
if hasattr(obj, 'snapshot'):
obj.snapshot()
return obj
def get(self, request, **kwargs):
obj = self.get_object(kwargs)
form = ConfirmationForm(initial=request.GET)
return render(request, self.template_name, {
'obj': obj,
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request, obj),
})
def post(self, request, **kwargs):
logger = logging.getLogger('netbox.views.ObjectDeleteView')
obj = self.get_object(kwargs)
form = ConfirmationForm(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
try:
obj.delete()
except ProtectedError as e:
logger.info("Caught ProtectedError while attempting to delete object")
handle_protectederror([obj], request, e)
return redirect(obj.get_absolute_url())
msg = 'Deleted {} {}'.format(self.queryset.model._meta.verbose_name, obj)
logger.info(msg)
messages.success(request, msg)
return_url = form.cleaned_data.get('return_url')
if return_url is not None and is_safe_url(url=return_url, allowed_hosts=request.get_host()):
return redirect(return_url)
else:
return redirect(self.get_return_url(request, obj))
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'obj': obj,
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request, obj),
})
class BulkCreateView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
form = None
model_form = None
pattern_target = ''
template_name = None
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
initial = {}
for field in getattr(self.model_form._meta, 'fields', []):
if request.GET.get(field):
initial[field] = request.GET[field]
form = self.form()
model_form = self.model_form(initial=initial)
return render(request, self.template_name, {
'obj_type': self.model_form._meta.model._meta.verbose_name,
'form': form,
'model_form': model_form,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.BulkCreateView')
model = self.queryset.model
form = self.form(request.POST)
model_form = self.model_form(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
pattern = form.cleaned_data['pattern']
new_objs = []
try:
with transaction.atomic():
for value in pattern:
model_form = self.model_form(request.POST.copy())
model_form.data[self.pattern_target] = value
if model_form.is_valid():
obj = model_form.save()
logger.debug(f"Created {obj} (PK: {obj.pk})")
new_objs.append(obj)
else:
errors = model_form.errors.as_data()
if errors.get(self.pattern_target):
form.add_error('pattern', errors[self.pattern_target])
raise IntegrityError()
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
raise PermissionsViolation
msg = "Added {} {}".format(len(new_objs), model._meta.verbose_name_plural)
logger.info(msg)
messages.success(request, msg)
if '_addanother' in request.POST:
return redirect(request.path)
return redirect(self.get_return_url(request))
except IntegrityError:
pass
except PermissionsViolation:
msg = "Object creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'form': form,
'model_form': model_form,
'obj_type': model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
class ObjectImportView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
model_form = None
related_object_forms = dict()
template_name = 'generic/object_import.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
form = ImportForm()
return render(request, self.template_name, {
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.ObjectImportView')
form = ImportForm(request.POST)
if form.is_valid():
logger.debug("Import form validation was successful")
data = form.cleaned_data['data']
model_form = self.model_form(data)
restrict_form_fields(model_form, request.user)
# applicable field defaults as needed prior to form validation.
for field_name, field in model_form.fields.items():
if field_name not in data and hasattr(field, 'initial'):
model_form.data[field_name] = field.initial
if model_form.is_valid():
try:
with transaction.atomic():
# Save the primary object
obj = model_form.save()
# Enforce object-level permissions
if not self.queryset.filter(pk=obj.pk).first():
raise PermissionsViolation()
logger.debug(f"Created {obj} (PK: {obj.pk})")
# Iterate through the related object forms (if any), validating and saving each instance.
for field_name, related_object_form in self.related_object_forms.items():
logger.debug("Processing form for related objects: {related_object_form}")
related_obj_pks = []
for i, rel_obj_data in enumerate(data.get(field_name, list())):
f = related_object_form(obj, rel_obj_data)
for subfield_name, field in f.fields.items():
if subfield_name not in rel_obj_data and hasattr(field, 'initial'):
f.data[subfield_name] = field.initial
if f.is_valid():
related_obj = f.save()
related_obj_pks.append(related_obj.pk)
else:
# Replicate errors on the related object form to the primary form for display
for subfield_name, errors in f.errors.items():
for err in errors:
err_msg = "{}[{}] {}: {}".format(field_name, i, subfield_name, err)
model_form.add_error(None, err_msg)
raise AbortTransaction()
# Enforce object-level permissions on related objects
model = related_object_form.Meta.model
if model.objects.filter(pk__in=related_obj_pks).count() != len(related_obj_pks):
raise ObjectDoesNotExist
except AbortTransaction:
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Object creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
if not model_form.errors:
logger.info(f"Import object {obj} (PK: {obj.pk})")
messages.success(request, mark_safe('Imported object: <a href="{}">{}</a>'.format(
obj.get_absolute_url(), obj
)))
if '_addanother' in request.POST:
return redirect(request.get_full_path())
return_url = form.cleaned_data.get('return_url')
if return_url is not None and is_safe_url(url=return_url, allowed_hosts=request.get_host()):
return redirect(return_url)
else:
return redirect(self.get_return_url(request, obj))
else:
logger.debug("Model form validation failed")
# Replicate model form errors for display
for field, errors in model_form.errors.items():
for err in errors:
if field == '__all__':
form.add_error(None, err)
else:
form.add_error(None, "{}: {}".format(field, err))
else:
logger.debug("Import form validation failed")
return render(request, self.template_name, {
'form': form,
'obj_type': self.queryset.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
class BulkImportView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
model_form = None
table = None
template_name = 'generic/object_bulk_import.html'
widget_attrs = {}
def _import_form(self, *args, **kwargs):
class ImportForm(BootstrapMixin, Form):
csv = CSVDataField(
from_form=self.model_form,
widget=Textarea(attrs=self.widget_attrs)
)
csv_file = CSVFileField(
label="CSV file",
from_form=self.model_form,
required=False
)
def clean(self):
csv_rows = self.cleaned_data['csv'][1] if 'csv' in self.cleaned_data else None
csv_file = self.files.get('csv_file')
# Check that the user has not submitted both text data and a file
if csv_rows and csv_file:
raise ValidationError(
"Cannot process CSV text and file attachment simultaneously. Please choose only one import "
"method."
)
return ImportForm(*args, **kwargs)
def _save_obj(self, obj_form, request):
return obj_form.save()
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
return render(request, self.template_name, {
'form': self._import_form(),
'fields': self.model_form().fields,
'obj_type': self.model_form._meta.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.BulkImportView')
new_objs = []
form = self._import_form(request.POST, request.FILES)
if form.is_valid():
logger.debug("Form validation was successful")
try:
# Iterate through CSV data and bind each row to a new model form instance.
with transaction.atomic():
if request.FILES:
headers, records = form.cleaned_data['csv_file']
else:
headers, records = form.cleaned_data['csv']
for row, data in enumerate(records, start=1):
obj_form = self.model_form(data, headers=headers)
restrict_form_fields(obj_form, request.user)
if obj_form.is_valid():
obj = self._save_obj(obj_form, request)
new_objs.append(obj)
else:
for field, err in obj_form.errors.items():
form.add_error('csv', "Row {} {}: {}".format(row, field, err[0]))
raise ValidationError("")
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
raise PermissionsViolation
# Compile a table containing the imported objects
obj_table = self.table(new_objs)
if new_objs:
msg = 'Imported {} {}'.format(len(new_objs), new_objs[0]._meta.verbose_name_plural)
logger.info(msg)
messages.success(request, msg)
return render(request, "import_success.html", {
'table': obj_table,
'return_url': self.get_return_url(request),
})
except ValidationError:
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Object import failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
logger.debug("Form validation failed")
return render(request, self.template_name, {
'form': form,
'fields': self.model_form().fields,
'obj_type': self.model_form._meta.model._meta.verbose_name,
'return_url': self.get_return_url(request),
})
class BulkEditView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
filterset = None
table = None
form = None
template_name = 'generic/object_bulk_edit.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'change')
def get(self, request):
return redirect(self.get_return_url(request))
def post(self, request, **kwargs):
logger = logging.getLogger('netbox.views.BulkEditView')
model = self.queryset.model
# If we are editing *all* objects in the queryset, replace the PK list with all matched objects.
if request.POST.get('_all') and self.filterset is not None:
pk_list = self.filterset(request.GET, self.queryset.values_list('pk', flat=True)).qs
else:
pk_list = request.POST.getlist('pk')
# Include the PK list as initial data for the form
initial_data = {'pk': pk_list}
# Check for other contextual data needed for the form. We avoid passing all of request.GET because the
# filter values will conflict with the bulk edit form fields.
# TODO: Find a better way to accomplish this
if 'device' in request.GET:
initial_data['device'] = request.GET.get('device')
elif 'device_type' in request.GET:
initial_data['device_type'] = request.GET.get('device_type')
elif 'virtual_machine' in request.GET:
initial_data['virtual_machine'] = request.GET.get('virtual_machine')
if '_apply' in request.POST:
form = self.form(model, request.POST, initial=initial_data)
restrict_form_fields(form, request.user)
if form.is_valid():
logger.debug("Form validation was successful")
custom_fields = form.custom_fields if hasattr(form, 'custom_fields') else []
standard_fields = [
field for field in form.fields if field not in custom_fields + ['pk']
]
nullified_fields = request.POST.getlist('_nullify')
try:
with transaction.atomic():
updated_objects = []
for obj in self.queryset.filter(pk__in=form.cleaned_data['pk']):
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
# Update standard fields. If a field is listed in _nullify, delete its value.
for name in standard_fields:
try:
model_field = model._meta.get_field(name)
except FieldDoesNotExist:
# This form field is used to modify a field rather than set its value directly
model_field = None
# Handle nullification
if name in form.nullable_fields and name in nullified_fields:
if isinstance(model_field, ManyToManyField):
getattr(obj, name).set([])
else:
setattr(obj, name, None if model_field.null else '')
# ManyToManyFields
elif isinstance(model_field, ManyToManyField):
if form.cleaned_data[name]:
getattr(obj, name).set(form.cleaned_data[name])
# Normal fields
elif name in form.changed_data:
setattr(obj, name, form.cleaned_data[name])
# Update custom fields
for name in custom_fields:
if name in form.nullable_fields and name in nullified_fields:
obj.custom_field_data[name] = None
elif name in form.changed_data:
obj.custom_field_data[name] = form.cleaned_data[name]
obj.full_clean()
obj.save()
updated_objects.append(obj)
logger.debug(f"Saved {obj} (PK: {obj.pk})")
# Add/remove tags
if form.cleaned_data.get('add_tags', None):
obj.tags.add(*form.cleaned_data['add_tags'])
if form.cleaned_data.get('remove_tags', None):
obj.tags.remove(*form.cleaned_data['remove_tags'])
# Enforce object-level permissions
if self.queryset.filter(pk__in=[obj.pk for obj in updated_objects]).count() != len(updated_objects):
raise PermissionsViolation
if updated_objects:
msg = 'Updated {} {}'.format(len(updated_objects), model._meta.verbose_name_plural)
logger.info(msg)
messages.success(self.request, msg)
return redirect(self.get_return_url(request))
except ValidationError as e:
messages.error(self.request, "{} failed validation: {}".format(obj, ", ".join(e.messages)))
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Object update failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
logger.debug("Form validation failed")
else:
form = self.form(model, initial=initial_data)
restrict_form_fields(form, request.user)
# Retrieve objects being edited
table = self.table(self.queryset.filter(pk__in=pk_list), orderable=False)
if not table.rows:
messages.warning(request, "No {} were selected.".format(model._meta.verbose_name_plural))
return redirect(self.get_return_url(request))
return render(request, self.template_name, {
'form': form,
'table': table,
'obj_type_plural': model._meta.verbose_name_plural,
'return_url': self.get_return_url(request),
})
class BulkRenameView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
template_name = 'generic/object_bulk_rename.html'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Create a new Form class from BulkRenameForm
class _Form(BulkRenameForm):
pk = ModelMultipleChoiceField(
queryset=self.queryset,
widget=MultipleHiddenInput()
)
self.form = _Form
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'change')
def post(self, request):
logger = logging.getLogger('netbox.views.BulkRenameView')
if '_preview' in request.POST or '_apply' in request.POST:
form = self.form(request.POST, initial={'pk': request.POST.getlist('pk')})
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
if form.is_valid():
try:
with transaction.atomic():
renamed_pks = []
for obj in selected_objects:
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
find = form.cleaned_data['find']
replace = form.cleaned_data['replace']
if form.cleaned_data['use_regex']:
try:
obj.new_name = re.sub(find, replace, obj.name)
# Catch regex group reference errors
except re.error:
obj.new_name = obj.name
else:
obj.new_name = obj.name.replace(find, replace)
renamed_pks.append(obj.pk)
if '_apply' in request.POST:
for obj in selected_objects:
obj.name = obj.new_name
obj.save()
# Enforce constrained permissions
if self.queryset.filter(pk__in=renamed_pks).count() != len(selected_objects):
raise PermissionsViolation
messages.success(request, "Renamed {} {}".format(
len(selected_objects),
self.queryset.model._meta.verbose_name_plural
))
return redirect(self.get_return_url(request))
except PermissionsViolation:
msg = "Object update failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
else:
form = self.form(initial={'pk': request.POST.getlist('pk')})
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
return render(request, self.template_name, {
'form': form,
'obj_type_plural': self.queryset.model._meta.verbose_name_plural,
'selected_objects': selected_objects,
'return_url': self.get_return_url(request),
})
class BulkDeleteView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
filterset = None
table = None
form = None
template_name = 'generic/object_bulk_delete.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'delete')
def get(self, request):
return redirect(self.get_return_url(request))
def post(self, request, **kwargs):
logger = logging.getLogger('netbox.views.BulkDeleteView')
model = self.queryset.model
# Are we deleting *all* objects in the queryset or just a selected subset?
if request.POST.get('_all'):
qs = model.objects.all()
if self.filterset is not None:
qs = self.filterset(request.GET, qs).qs
pk_list = qs.only('pk').values_list('pk', flat=True)
else:
pk_list = [int(pk) for pk in request.POST.getlist('pk')]
form_cls = self.get_form()
if '_confirm' in request.POST:
form = form_cls(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
# Delete objects
queryset = self.queryset.filter(pk__in=pk_list)
deleted_count = queryset.count()
try:
for obj in queryset:
# Take a snapshot of change-logged models
if hasattr(obj, 'snapshot'):
obj.snapshot()
obj.delete()
except ProtectedError as e:
logger.info("Caught ProtectedError while attempting to delete objects")
handle_protectederror(queryset, request, e)
return redirect(self.get_return_url(request))
msg = f"Deleted {deleted_count} {model._meta.verbose_name_plural}"
logger.info(msg)
messages.success(request, msg)
return redirect(self.get_return_url(request))
else:
logger.debug("Form validation failed")
else:
form = form_cls(initial={
'pk': pk_list,
'return_url': self.get_return_url(request),
})
# Retrieve objects being deleted
table = self.table(self.queryset.filter(pk__in=pk_list), orderable=False)
if not table.rows:
messages.warning(request, "No {} were selected for deletion.".format(model._meta.verbose_name_plural))
return redirect(self.get_return_url(request))
return render(request, self.template_name, {
'form': form,
'obj_type_plural': model._meta.verbose_name_plural,
'table': table,
'return_url': self.get_return_url(request),
})
def get_form(self):
class BulkDeleteForm(ConfirmationForm):
pk = ModelMultipleChoiceField(queryset=self.queryset, widget=MultipleHiddenInput)
if self.form:
return self.form
return BulkDeleteForm
#
# Device/VirtualMachine components
#
# TODO: Replace with BulkCreateView
class ComponentCreateView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
queryset = None
form = None
model_form = None
template_name = 'generic/object_edit.html'
def get_required_permission(self):
return get_permission_for_model(self.queryset.model, 'add')
def get(self, request):
form = self.form(initial=request.GET)
return render(request, self.template_name, {
'obj': self.queryset.model(),
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request),
})
def post(self, request):
logger = logging.getLogger('netbox.views.ComponentCreateView')
form = self.form(request.POST, initial=request.GET)
self.validate_form(request, form)
if form.is_valid() and not form.errors:
if '_addanother' in request.POST:
return redirect(request.get_full_path())
else:
return redirect(self.get_return_url(request))
return render(request, self.template_name, {
'obj_type': self.queryset.model._meta.verbose_name,
'form': form,
'return_url': self.get_return_url(request),
})
def validate_form(self, request, form):
logger = logging.getLogger('netbox.views.ComponentCreateView')
if form.is_valid():
new_components = []
data = deepcopy(request.POST)
names = form.cleaned_data['name_pattern']
labels = form.cleaned_data.get('label_pattern')
for i, name in enumerate(names):
label = labels[i] if labels else None
# Initialize the individual component form
data['name'] = name
data['label'] = label
if hasattr(form, 'get_iterative_data'):
data.update(form.get_iterative_data(i))
component_form = self.model_form(data)
if component_form.is_valid():
new_components.append(component_form)
else:
for field, errors in component_form.errors.as_data().items():
# Assign errors on the child form's name/label field to name_pattern/label_pattern on the parent form
if field == 'name':
field = 'name_pattern'
elif field == 'label':
field = 'label_pattern'
for e in errors:
form.add_error(field, '{}: {}'.format(name, ', '.join(e)))
if not form.errors:
try:
with transaction.atomic():
new_objs = []
for component_form in new_components:
obj = component_form.save()
new_objs.append(obj)
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
raise PermissionsViolation
messages.success(request, "Added {} {}".format(
len(new_components), self.queryset.model._meta.verbose_name_plural
))
return new_objs
except PermissionsViolation:
msg = "Component creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
return None
class BulkComponentCreateView(GetReturnURLMixin, ObjectPermissionRequiredMixin, View):
parent_model = None
parent_field = None
form = None
queryset = None
model_form = None
filterset = None
table = None
template_name = 'generic/object_bulk_add_component.html'
def get_required_permission(self):
return f'dcim.add_{self.queryset.model._meta.model_name}'
def post(self, request):
logger = logging.getLogger('netbox.views.BulkComponentCreateView')
parent_model_name = self.parent_model._meta.verbose_name_plural
model_name = self.queryset.model._meta.verbose_name_plural
if request.POST.get('_all') and self.filterset is not None:
pk_list = [obj.pk for obj in self.filterset(request.GET, self.parent_model.objects.only('pk')).qs]
else:
pk_list = [int(pk) for pk in request.POST.getlist('pk')]
selected_objects = self.parent_model.objects.filter(pk__in=pk_list)
if not selected_objects:
messages.warning(request, "No {} were selected.".format(self.parent_model._meta.verbose_name_plural))
return redirect(self.get_return_url(request))
table = self.table(selected_objects)
if '_create' in request.POST:
form = self.form(request.POST)
if form.is_valid():
logger.debug("Form validation was successful")
new_components = []
data = deepcopy(form.cleaned_data)
try:
with transaction.atomic():
for obj in data['pk']:
names = data['name_pattern']
labels = data['label_pattern'] if 'label_pattern' in data else None
for i, name in enumerate(names):
label = labels[i] if labels else None
component_data = {
self.parent_field: obj.pk,
'name': name,
'label': label
}
component_data.update(data)
component_form = self.model_form(component_data)
if component_form.is_valid():
instance = component_form.save()
logger.debug(f"Created {instance} on {instance.parent_object}")
new_components.append(instance)
else:
for field, errors in component_form.errors.as_data().items():
for e in errors:
form.add_error(field, '{} {}: {}'.format(obj, name, ', '.join(e)))
if self.queryset.filter(pk__in=[obj.pk for obj in new_components]).count() != len(new_components):
raise PermissionsViolation
except IntegrityError:
clear_webhooks.send(sender=self)
except PermissionsViolation:
msg = "Component creation failed due to object-level permissions violation"
logger.debug(msg)
form.add_error(None, msg)
clear_webhooks.send(sender=self)
if not form.errors:
msg = "Added {} {} to {} {}.".format(
len(new_components),
model_name,
len(form.cleaned_data['pk']),
parent_model_name
)
logger.info(msg)
messages.success(request, msg)
return redirect(self.get_return_url(request))
else:
logger.debug("Form validation failed")
else:
form = self.form(initial={'pk': pk_list})
return render(request, self.template_name, {
'form': form,
'parent_model_name': parent_model_name,
'model_name': model_name,
'table': table,
'return_url': self.get_return_url(request),
})
| true | true |
1c2ff9b1862ba7075c4b45778d02b951166dc9c9 | 7,084 | py | Python | chaingreen/cmds/configure.py | todortron/chaingreen-test | 9054bfd79812ebd7fb3a3d341c03dbadea990fd0 | [
"Apache-2.0"
] | null | null | null | chaingreen/cmds/configure.py | todortron/chaingreen-test | 9054bfd79812ebd7fb3a3d341c03dbadea990fd0 | [
"Apache-2.0"
] | null | null | null | chaingreen/cmds/configure.py | todortron/chaingreen-test | 9054bfd79812ebd7fb3a3d341c03dbadea990fd0 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
from typing import Dict
import click
from chaingreen.util.config import load_config, save_config, str2bool
from chaingreen.util.default_root import DEFAULT_ROOT_PATH
def configure(
root_path: Path,
set_farmer_peer: str,
set_node_introducer: str,
set_fullnode_port: str,
set_harvester_port: str,
set_log_level: str,
enable_upnp: str,
set_outbound_peer_count: str,
set_peer_count: str,
testnet: str,
):
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
change_made = False
if set_node_introducer:
try:
if set_node_introducer.index(":"):
host, port = (
":".join(set_node_introducer.split(":")[:-1]),
set_node_introducer.split(":")[-1],
)
config["full_node"]["introducer_peer"]["host"] = host
config["full_node"]["introducer_peer"]["port"] = int(port)
config["introducer"]["port"] = int(port)
print("Node introducer updated")
change_made = True
except ValueError:
print("Node introducer address must be in format [IP:Port]")
if set_farmer_peer:
try:
if set_farmer_peer.index(":"):
host, port = (
":".join(set_farmer_peer.split(":")[:-1]),
set_farmer_peer.split(":")[-1],
)
config["full_node"]["farmer_peer"]["host"] = host
config["full_node"]["farmer_peer"]["port"] = int(port)
config["harvester"]["farmer_peer"]["host"] = host
config["harvester"]["farmer_peer"]["port"] = int(port)
print("Farmer peer updated, make sure your harvester has the proper cert installed")
change_made = True
except ValueError:
print("Farmer address must be in format [IP:Port]")
if set_fullnode_port:
config["full_node"]["port"] = int(set_fullnode_port)
config["full_node"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["farmer"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["timelord"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["introducer"]["port"] = int(set_fullnode_port)
print("Default full node port updated")
change_made = True
if set_harvester_port:
config["harvester"]["port"] = int(set_harvester_port)
config["farmer"]["harvester_peer"]["port"] = int(set_harvester_port)
print("Default harvester port updated")
change_made = True
if set_log_level:
levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]
if set_log_level in levels:
config["logging"]["log_level"] = set_log_level
print(f"Logging level updated. Check {DEFAULT_ROOT_PATH}/log/debug.log")
change_made = True
else:
print(f"Logging level not updated. Use one of: {levels}")
if enable_upnp is not None:
config["full_node"]["enable_upnp"] = str2bool(enable_upnp)
if str2bool(enable_upnp):
print("uPnP enabled")
else:
print("uPnP disabled")
change_made = True
if set_outbound_peer_count is not None:
config["full_node"]["target_outbound_peer_count"] = int(set_outbound_peer_count)
print("Target outbound peer count updated")
change_made = True
if set_peer_count is not None:
config["full_node"]["target_peer_count"] = int(set_peer_count)
print("Target peer count updated")
change_made = True
if testnet is not None:
print("Setting Testnet")
testnet_port = "58744"
testnet_introducer = "testnet0.introducer.chaingreen.org"
testnet = "testnet0"
config["full_node"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["port"] = int(testnet_port)
config["farmer"]["full_node_peer"]["port"] = int(testnet_port)
config["timelord"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["introducer_peer"]["port"] = int(testnet_port)
config["introducer"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["host"] = testnet_introducer
config["selected_network"] = testnet
config["harvester"]["selected_network"] = testnet
config["pool"]["selected_network"] = testnet
config["farmer"]["selected_network"] = testnet
config["timelord"]["selected_network"] = testnet
config["full_node"]["selected_network"] = testnet
config["ui"]["selected_network"] = testnet
config["introducer"]["selected_network"] = testnet
config["wallet"]["selected_network"] = testnet
print("Default full node port, introducer and network setting updated")
change_made = True
if change_made:
print("Restart any running chaingreen services for changes to take effect")
save_config(root_path, "config.yaml", config)
return 0
@click.command("configure", short_help="Modify configuration")
@click.option(
"--testnet",
"-t",
help="configures for connection to testnet",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option("--set-node-introducer", help="Set the introducer for node - IP:Port", type=str)
@click.option("--set-farmer-peer", help="Set the farmer peer for harvester - IP:Port", type=str)
@click.option(
"--set-fullnode-port",
help="Set the port to use for the fullnode, useful for testing",
type=str,
)
@click.option(
"--set-harvester-port",
help="Set the port to use for the harvester, useful for testing",
type=str,
)
@click.option(
"--set-log-level",
"--log-level",
"-log-level",
help="Set the instance log level",
type=click.Choice(["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]),
)
@click.option(
"--enable-upnp",
"--upnp",
"-upnp",
help="Enable or disable uPnP",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option(
"--set_outbound-peer-count",
help="Update the target outbound peer count (default 8)",
type=str,
)
@click.option("--set-peer-count", help="Update the target peer count (default 80)", type=str)
@click.pass_context
def configure_cmd(
ctx,
set_farmer_peer,
set_node_introducer,
set_fullnode_port,
set_harvester_port,
set_log_level,
enable_upnp,
set_outbound_peer_count,
set_peer_count,
testnet,
):
configure(
ctx.obj["root_path"],
set_farmer_peer,
set_node_introducer,
set_fullnode_port,
set_harvester_port,
set_log_level,
enable_upnp,
set_outbound_peer_count,
set_peer_count,
testnet,
)
| 38.291892 | 100 | 0.621118 | from pathlib import Path
from typing import Dict
import click
from chaingreen.util.config import load_config, save_config, str2bool
from chaingreen.util.default_root import DEFAULT_ROOT_PATH
def configure(
root_path: Path,
set_farmer_peer: str,
set_node_introducer: str,
set_fullnode_port: str,
set_harvester_port: str,
set_log_level: str,
enable_upnp: str,
set_outbound_peer_count: str,
set_peer_count: str,
testnet: str,
):
config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml")
change_made = False
if set_node_introducer:
try:
if set_node_introducer.index(":"):
host, port = (
":".join(set_node_introducer.split(":")[:-1]),
set_node_introducer.split(":")[-1],
)
config["full_node"]["introducer_peer"]["host"] = host
config["full_node"]["introducer_peer"]["port"] = int(port)
config["introducer"]["port"] = int(port)
print("Node introducer updated")
change_made = True
except ValueError:
print("Node introducer address must be in format [IP:Port]")
if set_farmer_peer:
try:
if set_farmer_peer.index(":"):
host, port = (
":".join(set_farmer_peer.split(":")[:-1]),
set_farmer_peer.split(":")[-1],
)
config["full_node"]["farmer_peer"]["host"] = host
config["full_node"]["farmer_peer"]["port"] = int(port)
config["harvester"]["farmer_peer"]["host"] = host
config["harvester"]["farmer_peer"]["port"] = int(port)
print("Farmer peer updated, make sure your harvester has the proper cert installed")
change_made = True
except ValueError:
print("Farmer address must be in format [IP:Port]")
if set_fullnode_port:
config["full_node"]["port"] = int(set_fullnode_port)
config["full_node"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["farmer"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["timelord"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["full_node_peer"]["port"] = int(set_fullnode_port)
config["wallet"]["introducer_peer"]["port"] = int(set_fullnode_port)
config["introducer"]["port"] = int(set_fullnode_port)
print("Default full node port updated")
change_made = True
if set_harvester_port:
config["harvester"]["port"] = int(set_harvester_port)
config["farmer"]["harvester_peer"]["port"] = int(set_harvester_port)
print("Default harvester port updated")
change_made = True
if set_log_level:
levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]
if set_log_level in levels:
config["logging"]["log_level"] = set_log_level
print(f"Logging level updated. Check {DEFAULT_ROOT_PATH}/log/debug.log")
change_made = True
else:
print(f"Logging level not updated. Use one of: {levels}")
if enable_upnp is not None:
config["full_node"]["enable_upnp"] = str2bool(enable_upnp)
if str2bool(enable_upnp):
print("uPnP enabled")
else:
print("uPnP disabled")
change_made = True
if set_outbound_peer_count is not None:
config["full_node"]["target_outbound_peer_count"] = int(set_outbound_peer_count)
print("Target outbound peer count updated")
change_made = True
if set_peer_count is not None:
config["full_node"]["target_peer_count"] = int(set_peer_count)
print("Target peer count updated")
change_made = True
if testnet is not None:
print("Setting Testnet")
testnet_port = "58744"
testnet_introducer = "testnet0.introducer.chaingreen.org"
testnet = "testnet0"
config["full_node"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["port"] = int(testnet_port)
config["farmer"]["full_node_peer"]["port"] = int(testnet_port)
config["timelord"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["full_node_peer"]["port"] = int(testnet_port)
config["wallet"]["introducer_peer"]["port"] = int(testnet_port)
config["introducer"]["port"] = int(testnet_port)
config["full_node"]["introducer_peer"]["host"] = testnet_introducer
config["selected_network"] = testnet
config["harvester"]["selected_network"] = testnet
config["pool"]["selected_network"] = testnet
config["farmer"]["selected_network"] = testnet
config["timelord"]["selected_network"] = testnet
config["full_node"]["selected_network"] = testnet
config["ui"]["selected_network"] = testnet
config["introducer"]["selected_network"] = testnet
config["wallet"]["selected_network"] = testnet
print("Default full node port, introducer and network setting updated")
change_made = True
if change_made:
print("Restart any running chaingreen services for changes to take effect")
save_config(root_path, "config.yaml", config)
return 0
@click.command("configure", short_help="Modify configuration")
@click.option(
"--testnet",
"-t",
help="configures for connection to testnet",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option("--set-node-introducer", help="Set the introducer for node - IP:Port", type=str)
@click.option("--set-farmer-peer", help="Set the farmer peer for harvester - IP:Port", type=str)
@click.option(
"--set-fullnode-port",
help="Set the port to use for the fullnode, useful for testing",
type=str,
)
@click.option(
"--set-harvester-port",
help="Set the port to use for the harvester, useful for testing",
type=str,
)
@click.option(
"--set-log-level",
"--log-level",
"-log-level",
help="Set the instance log level",
type=click.Choice(["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]),
)
@click.option(
"--enable-upnp",
"--upnp",
"-upnp",
help="Enable or disable uPnP",
type=click.Choice(["true", "t", "false", "f"]),
)
@click.option(
"--set_outbound-peer-count",
help="Update the target outbound peer count (default 8)",
type=str,
)
@click.option("--set-peer-count", help="Update the target peer count (default 80)", type=str)
@click.pass_context
def configure_cmd(
ctx,
set_farmer_peer,
set_node_introducer,
set_fullnode_port,
set_harvester_port,
set_log_level,
enable_upnp,
set_outbound_peer_count,
set_peer_count,
testnet,
):
configure(
ctx.obj["root_path"],
set_farmer_peer,
set_node_introducer,
set_fullnode_port,
set_harvester_port,
set_log_level,
enable_upnp,
set_outbound_peer_count,
set_peer_count,
testnet,
)
| true | true |
1c2ff9f8f2ebf0991614c3a3a089c07f0b618eb9 | 9,702 | py | Python | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/16-extending_bound_1.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/16-extending_bound_1.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/16-extending_bound_1.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
# r' = r
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
# i >= l -> i' = 0 & l' = l + 1 & !inc_i'
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
loc = Location(env, mgr.GE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r0", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, inc_i)
loc.set_progress(0, x_inc_i)
h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.LE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Minus(r, n1)))
h_r = Hint("h_r1", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, mgr.Not(inc_i))
loc.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i2", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0),
stutterT=mgr.Equals(x_i, mgr.Plus(i, l)))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i3", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0),
stutterT=mgr.Equals(x_r, mgr.Plus(r, i)))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r3", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0),
stutterT=mgr.Equals(x_l, mgr.Plus(l, r)))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l3", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc3", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(2, mgr.Not(x_inc_i))
loc2 = Location(env, mgr.Not(inc_i))
loc2.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1, loc2])
res.append(h_inc)
return frozenset(res)
| 35.408759 | 89 | 0.625335 | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
loc = Location(env, mgr.GE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r0", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, inc_i)
loc.set_progress(0, x_inc_i)
h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.LE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Minus(r, n1)))
h_r = Hint("h_r1", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, mgr.Not(inc_i))
loc.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i2", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0),
stutterT=mgr.Equals(x_i, mgr.Plus(i, l)))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i3", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0),
stutterT=mgr.Equals(x_r, mgr.Plus(r, i)))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r3", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0),
stutterT=mgr.Equals(x_l, mgr.Plus(l, r)))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l3", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc3", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(2, mgr.Not(x_inc_i))
loc2 = Location(env, mgr.Not(inc_i))
loc2.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1, loc2])
res.append(h_inc)
return frozenset(res)
| true | true |
1c2ffaeb75554bbb8fc96f57950559d0ff84f295 | 28,917 | py | Python | electrum_dash/gui/qt/util.py | thephez/electrum-dash | bbf0ba1cc22feebc26d78b5d3b338251a41ca323 | [
"MIT"
] | null | null | null | electrum_dash/gui/qt/util.py | thephez/electrum-dash | bbf0ba1cc22feebc26d78b5d3b338251a41ca323 | [
"MIT"
] | null | null | null | electrum_dash/gui/qt/util.py | thephez/electrum-dash | bbf0ba1cc22feebc26d78b5d3b338251a41ca323 | [
"MIT"
] | null | null | null | import asyncio
import os.path
import time
import sys
import platform
import queue
import traceback
from functools import partial, lru_cache
from typing import NamedTuple, Callable, Optional, TYPE_CHECKING, Union, List, Dict
from PyQt5.QtGui import (QFont, QColor, QCursor, QPixmap, QStandardItem,
QPalette, QIcon)
from PyQt5.QtCore import (Qt, QPersistentModelIndex, QModelIndex, pyqtSignal,
QCoreApplication, QItemSelectionModel, QThread,
QSortFilterProxyModel, QSize, QLocale)
from PyQt5.QtWidgets import (QPushButton, QLabel, QMessageBox, QHBoxLayout,
QAbstractItemView, QVBoxLayout, QLineEdit,
QStyle, QDialog, QGroupBox, QButtonGroup, QRadioButton,
QFileDialog, QWidget, QToolButton, QTreeView, QPlainTextEdit,
QHeaderView, QApplication, QToolTip, QTreeWidget, QStyledItemDelegate)
from electrum_dash.i18n import _, languages
from electrum_dash.util import (FileImportFailed, FileExportFailed,
resource_path)
from electrum_dash.paymentrequest import PR_UNPAID, PR_PAID, PR_EXPIRED
if TYPE_CHECKING:
from .main_window import ElectrumWindow
if platform.system() == 'Windows':
if platform.release() in ['7', '8', '10']:
MONOSPACE_FONT = 'Consolas'
else:
MONOSPACE_FONT = 'Lucida Console'
elif platform.system() == 'Darwin':
MONOSPACE_FONT = 'Menlo'
else:
MONOSPACE_FONT = 'monospace'
dialogs = []
pr_icons = {
PR_UNPAID:"unpaid.png",
PR_PAID:"confirmed.png",
PR_EXPIRED:"expired.png"
}
pr_tooltips = {
PR_UNPAID:_('Pending'),
PR_PAID:_('Paid'),
PR_EXPIRED:_('Expired')
}
expiration_values = [
(_('1 hour'), 60*60),
(_('1 day'), 24*60*60),
(_('1 week'), 7*24*60*60),
(_('Never'), None)
]
class EnterButton(QPushButton):
def __init__(self, text, func):
QPushButton.__init__(self, text)
self.func = func
self.clicked.connect(func)
def keyPressEvent(self, e):
if e.key() == Qt.Key_Return:
self.func()
class ThreadedButton(QPushButton):
def __init__(self, text, task, on_success=None, on_error=None):
QPushButton.__init__(self, text)
self.task = task
self.on_success = on_success
self.on_error = on_error
self.clicked.connect(self.run_task)
def run_task(self):
self.setEnabled(False)
self.thread = TaskThread(self)
self.thread.add(self.task, self.on_success, self.done, self.on_error)
def done(self):
self.setEnabled(True)
self.thread.stop()
class WWLabel(QLabel):
def __init__ (self, text="", parent=None):
QLabel.__init__(self, text, parent)
self.setWordWrap(True)
class HelpLabel(QLabel):
def __init__(self, text, help_text):
QLabel.__init__(self, text)
self.help_text = help_text
self.app = QCoreApplication.instance()
self.font = QFont()
def mouseReleaseEvent(self, x):
QMessageBox.information(self, 'Help', self.help_text)
def enterEvent(self, event):
self.font.setUnderline(True)
self.setFont(self.font)
self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
return QLabel.enterEvent(self, event)
def leaveEvent(self, event):
self.font.setUnderline(False)
self.setFont(self.font)
self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
return QLabel.leaveEvent(self, event)
class HelpButton(QPushButton):
def __init__(self, text):
QPushButton.__init__(self, '?')
self.help_text = text
self.setFocusPolicy(Qt.NoFocus)
self.setFixedWidth(20)
self.clicked.connect(self.onclick)
def onclick(self):
QMessageBox.information(self, 'Help', self.help_text)
class InfoButton(QPushButton):
def __init__(self, text):
QPushButton.__init__(self, 'Info')
self.help_text = text
self.setFocusPolicy(Qt.NoFocus)
self.setFixedWidth(60)
self.clicked.connect(self.onclick)
def onclick(self):
QMessageBox.information(self, 'Info', self.help_text)
class Buttons(QHBoxLayout):
def __init__(self, *buttons):
QHBoxLayout.__init__(self)
self.addStretch(1)
for b in buttons:
self.addWidget(b)
class CloseButton(QPushButton):
def __init__(self, dialog):
QPushButton.__init__(self, _("Close"))
self.clicked.connect(dialog.close)
self.setDefault(True)
class CopyButton(QPushButton):
def __init__(self, text_getter, app):
QPushButton.__init__(self, _("Copy"))
self.clicked.connect(lambda: app.clipboard().setText(text_getter()))
class CopyCloseButton(QPushButton):
def __init__(self, text_getter, app, dialog):
QPushButton.__init__(self, _("Copy and Close"))
self.clicked.connect(lambda: app.clipboard().setText(text_getter()))
self.clicked.connect(dialog.close)
self.setDefault(True)
class OkButton(QPushButton):
def __init__(self, dialog, label=None):
QPushButton.__init__(self, label or _("OK"))
self.clicked.connect(dialog.accept)
self.setDefault(True)
class CancelButton(QPushButton):
def __init__(self, dialog, label=None):
QPushButton.__init__(self, label or _("Cancel"))
self.clicked.connect(dialog.reject)
class MessageBoxMixin(object):
def top_level_window_recurse(self, window=None, test_func=None):
window = window or self
classes = (WindowModalDialog, QMessageBox)
if test_func is None:
test_func = lambda x: True
for n, child in enumerate(window.children()):
# Test for visibility as old closed dialogs may not be GC-ed.
# Only accept children that confirm to test_func.
if isinstance(child, classes) and child.isVisible() \
and test_func(child):
return self.top_level_window_recurse(child, test_func=test_func)
return window
def top_level_window(self, test_func=None):
return self.top_level_window_recurse(test_func)
def question(self, msg, parent=None, title=None, icon=None):
Yes, No = QMessageBox.Yes, QMessageBox.No
return self.msg_box(icon or QMessageBox.Question,
parent, title or '',
msg, buttons=Yes|No, defaultButton=No) == Yes
def show_warning(self, msg, parent=None, title=None, **kwargs):
return self.msg_box(QMessageBox.Warning, parent,
title or _('Warning'), msg, **kwargs)
def show_error(self, msg, parent=None, **kwargs):
return self.msg_box(QMessageBox.Warning, parent,
_('Error'), msg, **kwargs)
def show_critical(self, msg, parent=None, title=None, **kwargs):
return self.msg_box(QMessageBox.Critical, parent,
title or _('Critical Error'), msg, **kwargs)
def show_message(self, msg, parent=None, title=None, **kwargs):
return self.msg_box(QMessageBox.Information, parent,
title or _('Information'), msg, **kwargs)
def msg_box(self, icon, parent, title, text, buttons=QMessageBox.Ok,
defaultButton=QMessageBox.NoButton, rich_text=False):
parent = parent or self.top_level_window()
if type(icon) is QPixmap:
d = QMessageBox(QMessageBox.Information, title, str(text), buttons, parent)
d.setIconPixmap(icon)
else:
d = QMessageBox(icon, title, str(text), buttons, parent)
d.setWindowModality(Qt.WindowModal)
d.setDefaultButton(defaultButton)
if rich_text:
d.setTextInteractionFlags(Qt.TextSelectableByMouse| Qt.LinksAccessibleByMouse)
d.setTextFormat(Qt.RichText)
else:
d.setTextInteractionFlags(Qt.TextSelectableByMouse)
d.setTextFormat(Qt.PlainText)
return d.exec_()
class WindowModalDialog(QDialog, MessageBoxMixin):
'''Handy wrapper; window modal dialogs are better for our multi-window
daemon model as other wallet windows can still be accessed.'''
def __init__(self, parent, title=None):
QDialog.__init__(self, parent)
self.setWindowModality(Qt.WindowModal)
if title:
self.setWindowTitle(title)
class WaitingDialog(WindowModalDialog):
'''Shows a please wait dialog whilst running a task. It is not
necessary to maintain a reference to this dialog.'''
def __init__(self, parent, message, task, on_success=None, on_error=None):
assert parent
if isinstance(parent, MessageBoxMixin):
parent = parent.top_level_window()
WindowModalDialog.__init__(self, parent, _("Please wait"))
vbox = QVBoxLayout(self)
vbox.addWidget(QLabel(message))
self.accepted.connect(self.on_accepted)
self.show()
self.thread = TaskThread(self)
self.thread.finished.connect(self.deleteLater) # see #3956
self.thread.add(task, on_success, self.accept, on_error)
def wait(self):
self.thread.wait()
def on_accepted(self):
self.thread.stop()
def line_dialog(parent, title, label, ok_label, default=None):
dialog = WindowModalDialog(parent, title)
dialog.setMinimumWidth(500)
l = QVBoxLayout()
dialog.setLayout(l)
l.addWidget(QLabel(label))
txt = QLineEdit()
if default:
txt.setText(default)
l.addWidget(txt)
l.addLayout(Buttons(CancelButton(dialog), OkButton(dialog, ok_label)))
if dialog.exec_():
return txt.text()
def text_dialog(parent, title, header_layout, ok_label, default=None, allow_multi=False):
from .qrtextedit import ScanQRTextEdit
dialog = WindowModalDialog(parent, title)
dialog.setMinimumWidth(600)
l = QVBoxLayout()
dialog.setLayout(l)
if isinstance(header_layout, str):
l.addWidget(QLabel(header_layout))
else:
l.addLayout(header_layout)
txt = ScanQRTextEdit(allow_multi=allow_multi)
if default:
txt.setText(default)
l.addWidget(txt)
l.addLayout(Buttons(CancelButton(dialog), OkButton(dialog, ok_label)))
if dialog.exec_():
return txt.toPlainText()
class ChoicesLayout(object):
def __init__(self, msg, choices, on_clicked=None, checked_index=0):
vbox = QVBoxLayout()
if len(msg) > 50:
vbox.addWidget(WWLabel(msg))
msg = ""
gb2 = QGroupBox(msg)
vbox.addWidget(gb2)
vbox2 = QVBoxLayout()
gb2.setLayout(vbox2)
self.group = group = QButtonGroup()
for i,c in enumerate(choices):
button = QRadioButton(gb2)
button.setText(c)
vbox2.addWidget(button)
group.addButton(button)
group.setId(button, i)
if i==checked_index:
button.setChecked(True)
if on_clicked:
group.buttonClicked.connect(partial(on_clicked, self))
self.vbox = vbox
def layout(self):
return self.vbox
def selected_index(self):
return self.group.checkedId()
def address_field(addresses):
hbox = QHBoxLayout()
address_e = QLineEdit()
if addresses and len(addresses) > 0:
address_e.setText(addresses[0])
else:
addresses = []
def func():
try:
i = addresses.index(str(address_e.text())) + 1
i = i % len(addresses)
address_e.setText(addresses[i])
except ValueError:
# the user might have changed address_e to an
# address not in the wallet (or to something that isn't an address)
if addresses and len(addresses) > 0:
address_e.setText(addresses[0])
button = QPushButton(_('Address'))
button.clicked.connect(func)
hbox.addWidget(button)
hbox.addWidget(address_e)
return hbox, address_e
def filename_field(parent, config, defaultname, select_msg):
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_("Format")))
gb = QGroupBox("format", parent)
b1 = QRadioButton(gb)
b1.setText(_("CSV"))
b1.setChecked(True)
b2 = QRadioButton(gb)
b2.setText(_("json"))
vbox.addWidget(b1)
vbox.addWidget(b2)
hbox = QHBoxLayout()
directory = config.get('io_dir', os.path.expanduser('~'))
path = os.path.join( directory, defaultname )
filename_e = QLineEdit()
filename_e.setText(path)
def func():
text = filename_e.text()
_filter = "*.csv" if text.endswith(".csv") else "*.json" if text.endswith(".json") else None
p, __ = QFileDialog.getSaveFileName(None, select_msg, text, _filter)
if p:
filename_e.setText(p)
button = QPushButton(_('File'))
button.clicked.connect(func)
hbox.addWidget(button)
hbox.addWidget(filename_e)
vbox.addLayout(hbox)
def set_csv(v):
text = filename_e.text()
text = text.replace(".json",".csv") if v else text.replace(".csv",".json")
filename_e.setText(text)
b1.clicked.connect(lambda: set_csv(True))
b2.clicked.connect(lambda: set_csv(False))
return vbox, filename_e, b1
class ElectrumItemDelegate(QStyledItemDelegate):
def __init__(self, tv):
super().__init__(tv)
self.tv = tv
self.opened = None
def on_closeEditor(editor: QLineEdit, hint):
self.opened = None
def on_commitData(editor: QLineEdit):
new_text = editor.text()
idx = QModelIndex(self.opened)
row, col = idx.row(), idx.column()
_prior_text, user_role = self.tv.text_txid_from_coordinate(row, col)
# check that we didn't forget to set UserRole on an editable field
assert user_role is not None, (row, col)
self.tv.on_edited(idx, user_role, new_text)
self.closeEditor.connect(on_closeEditor)
self.commitData.connect(on_commitData)
def createEditor(self, parent, option, idx):
self.opened = QPersistentModelIndex(idx)
return super().createEditor(parent, option, idx)
class MyTreeView(QTreeView):
def __init__(self, parent: 'ElectrumWindow', create_menu, stretch_column=None, editable_columns=None):
super().__init__(parent)
self.parent = parent
self.config = self.parent.config
self.stretch_column = stretch_column
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(create_menu)
self.setUniformRowHeights(True)
# Control which columns are editable
if editable_columns is None:
editable_columns = {stretch_column}
else:
editable_columns = set(editable_columns)
self.editable_columns = editable_columns
self.setItemDelegate(ElectrumItemDelegate(self))
self.current_filter = ""
self.setRootIsDecorated(False) # remove left margin
self.toolbar_shown = False
# When figuring out the size of columns, Qt by default looks at
# the first 1000 rows (at least if resize mode is QHeaderView.ResizeToContents).
# This would be REALLY SLOW, and it's not perfect anyway.
# So to speed the UI up considerably, set it to
# only look at as many rows as currently visible.
self.header().setResizeContentsPrecision(0)
def set_editability(self, items):
for idx, i in enumerate(items):
i.setEditable(idx in self.editable_columns)
def selected_in_column(self, column: int):
items = self.selectionModel().selectedIndexes()
return list(x for x in items if x.column() == column)
def current_item_user_role(self, col) -> Optional[QStandardItem]:
idx = self.selectionModel().currentIndex()
idx = idx.sibling(idx.row(), col)
item = self.model().itemFromIndex(idx)
if item:
return item.data(Qt.UserRole)
def set_current_idx(self, set_current: QPersistentModelIndex):
if set_current:
assert isinstance(set_current, QPersistentModelIndex)
assert set_current.isValid()
self.selectionModel().select(QModelIndex(set_current), QItemSelectionModel.SelectCurrent)
def update_headers(self, headers: Union[List[str], Dict[int, str]]):
# headers is either a list of column names, or a dict: (col_idx->col_name)
if not isinstance(headers, dict): # convert to dict
headers = dict(enumerate(headers))
col_names = [headers[col_idx] for col_idx in sorted(headers.keys())]
model = self.model()
model.setHorizontalHeaderLabels(col_names)
self.header().setStretchLastSection(False)
self.header().setDefaultAlignment(Qt.AlignCenter)
for col_idx in headers:
sm = QHeaderView.Stretch if col_idx == self.stretch_column else QHeaderView.ResizeToContents
self.header().setSectionResizeMode(col_idx, sm)
def keyPressEvent(self, event):
if self.itemDelegate().opened:
return
if event.key() in [ Qt.Key_F2, Qt.Key_Return ]:
self.on_activated(self.selectionModel().currentIndex())
return
super().keyPressEvent(event)
def on_activated(self, idx):
# on 'enter' we show the menu
pt = self.visualRect(idx).bottomLeft()
pt.setX(50)
self.customContextMenuRequested.emit(pt)
def edit(self, idx, trigger=QAbstractItemView.AllEditTriggers, event=None):
"""
this is to prevent:
edit: editing failed
from inside qt
"""
return super().edit(idx, trigger, event)
def on_edited(self, idx: QModelIndex, user_role, text):
self.parent.wallet.set_label(user_role, text)
self.parent.history_model.refresh('on_edited in MyTreeView')
self.parent.update_completions()
def should_hide(self, row):
"""
row_num is for self.model(). So if there is a proxy, it is the row number
in that!
"""
return False
def text_txid_from_coordinate(self, row_num, column):
assert not isinstance(self.model(), QSortFilterProxyModel)
idx = self.model().index(row_num, column)
item = self.model().itemFromIndex(idx)
user_role = item.data(Qt.UserRole)
return item.text(), user_role
def hide_row(self, row_num):
"""
row_num is for self.model(). So if there is a proxy, it is the row number
in that!
"""
should_hide = self.should_hide(row_num)
if not self.current_filter and should_hide is None:
# no filters at all, neither date nor search
self.setRowHidden(row_num, QModelIndex(), False)
return
for column in self.filter_columns:
txt, _ = self.text_txid_from_coordinate(row_num, column)
txt = txt.lower()
if self.current_filter in txt:
# the filter matched, but the date filter might apply
self.setRowHidden(row_num, QModelIndex(), bool(should_hide))
break
else:
# we did not find the filter in any columns, hide the item
self.setRowHidden(row_num, QModelIndex(), True)
def filter(self, p):
p = p.lower()
self.current_filter = p
self.hide_rows()
def hide_rows(self):
for row in range(self.model().rowCount()):
self.hide_row(row)
def create_toolbar(self, config=None):
hbox = QHBoxLayout()
buttons = self.get_toolbar_buttons()
for b in buttons:
b.setVisible(False)
hbox.addWidget(b)
hide_button = QPushButton('x')
hide_button.setVisible(False)
hide_button.pressed.connect(lambda: self.show_toolbar(False, config))
self.toolbar_buttons = buttons + (hide_button,)
hbox.addStretch()
hbox.addWidget(hide_button)
return hbox
def save_toolbar_state(self, state, config):
pass # implemented in subclasses
def show_toolbar(self, state, config=None):
if state == self.toolbar_shown:
return
self.toolbar_shown = state
if config:
self.save_toolbar_state(state, config)
for b in self.toolbar_buttons:
b.setVisible(state)
if not state:
self.on_hide_toolbar()
def toggle_toolbar(self, config=None):
self.show_toolbar(not self.toolbar_shown, config)
class ButtonsWidget(QWidget):
def __init__(self):
super(QWidget, self).__init__()
self.buttons = []
def resizeButtons(self):
frameWidth = self.style().pixelMetric(QStyle.PM_DefaultFrameWidth)
x = self.rect().right() - frameWidth
y = self.rect().bottom() - frameWidth
for button in self.buttons:
sz = button.sizeHint()
x -= sz.width()
button.move(x, y - sz.height())
def addButton(self, icon_name, on_click, tooltip):
button = QToolButton(self)
button.setIcon(read_QIcon(icon_name))
iconSize = QLineEdit().sizeHint().height() - 7 # 3px (button sz - icon sz), 2px borders, 2px padding
button.setIconSize(QSize(iconSize, iconSize))
button.setCursor(QCursor(Qt.PointingHandCursor))
button.setStyleSheet("QToolButton { border: none; hover {border: 1px} pressed {border: 1px} padding: 0px; }")
button.setVisible(True)
button.setToolTip(tooltip)
button.clicked.connect(on_click)
self.buttons.append(button)
return button
def addCopyButton(self, app):
self.app = app
self.addButton("copy.png", self.on_copy, _("Copy to clipboard"))
def on_copy(self):
self.app.clipboard().setText(self.text())
QToolTip.showText(QCursor.pos(), _("Text copied to clipboard"), self)
class ButtonsLineEdit(QLineEdit, ButtonsWidget):
def __init__(self, text=None):
QLineEdit.__init__(self, text)
self.buttons = []
def resizeEvent(self, e):
o = QLineEdit.resizeEvent(self, e)
self.resizeButtons()
return o
class ButtonsTextEdit(QPlainTextEdit, ButtonsWidget):
def __init__(self, text=None):
QPlainTextEdit.__init__(self, text)
self.setText = self.setPlainText
self.text = self.toPlainText
self.buttons = []
def resizeEvent(self, e):
o = QPlainTextEdit.resizeEvent(self, e)
self.resizeButtons()
return o
class TaskThread(QThread):
'''Thread that runs background tasks. Callbacks are guaranteed
to happen in the context of its parent.'''
class Task(NamedTuple):
task: Callable
cb_success: Optional[Callable]
cb_done: Optional[Callable]
cb_error: Optional[Callable]
doneSig = pyqtSignal(object, object, object)
def __init__(self, parent, on_error=None):
super(TaskThread, self).__init__(parent)
self.on_error = on_error
self.tasks = queue.Queue()
self.doneSig.connect(self.on_done)
self.start()
def add(self, task, on_success=None, on_done=None, on_error=None):
on_error = on_error or self.on_error
self.tasks.put(TaskThread.Task(task, on_success, on_done, on_error))
def run(self):
while True:
task = self.tasks.get() # type: TaskThread.Task
if not task:
break
try:
result = task.task()
self.doneSig.emit(result, task.cb_done, task.cb_success)
except BaseException:
self.doneSig.emit(sys.exc_info(), task.cb_done, task.cb_error)
def on_done(self, result, cb_done, cb_result):
# This runs in the parent's thread.
if cb_done:
cb_done()
if cb_result:
cb_result(result)
def stop(self):
self.tasks.put(None)
class ColorSchemeItem:
def __init__(self, fg_color, bg_color):
self.colors = (fg_color, bg_color)
def _get_color(self, background):
return self.colors[(int(background) + int(ColorScheme.dark_scheme)) % 2]
def as_stylesheet(self, background=False):
css_prefix = "background-" if background else ""
color = self._get_color(background)
return "QWidget {{ {}color:{}; }}".format(css_prefix, color)
def as_color(self, background=False):
color = self._get_color(background)
return QColor(color)
class ColorScheme:
dark_scheme = False
GREEN = ColorSchemeItem("#117c11", "#8af296")
YELLOW = ColorSchemeItem("#897b2a", "#ffff00")
RED = ColorSchemeItem("#7c1111", "#f18c8c")
BLUE = ColorSchemeItem("#123b7c", "#8cb3f2")
DEFAULT = ColorSchemeItem("#818181", "white")
@staticmethod
def has_dark_background(widget):
brightness = sum(widget.palette().color(QPalette.Background).getRgb()[0:3])
return brightness < (255*3/2)
@staticmethod
def update_from_widget(widget, force_dark=False):
if force_dark or ColorScheme.has_dark_background(widget):
ColorScheme.dark_scheme = True
class AcceptFileDragDrop:
def __init__(self, file_type=""):
assert isinstance(self, QWidget)
self.setAcceptDrops(True)
self.file_type = file_type
def validateEvent(self, event):
if not event.mimeData().hasUrls():
event.ignore()
return False
for url in event.mimeData().urls():
if not url.toLocalFile().endswith(self.file_type):
event.ignore()
return False
event.accept()
return True
def dragEnterEvent(self, event):
self.validateEvent(event)
def dragMoveEvent(self, event):
if self.validateEvent(event):
event.setDropAction(Qt.CopyAction)
def dropEvent(self, event):
if self.validateEvent(event):
for url in event.mimeData().urls():
self.onFileAdded(url.toLocalFile())
def onFileAdded(self, fn):
raise NotImplementedError()
def import_meta_gui(electrum_window, title, importer, on_success):
filter_ = "JSON (*.json);;All files (*)"
filename = electrum_window.getOpenFileName(_("Open {} file").format(title), filter_)
if not filename:
return
try:
importer(filename)
except FileImportFailed as e:
electrum_window.show_critical(str(e))
else:
electrum_window.show_message(_("Your {} were successfully imported").format(title))
on_success()
def export_meta_gui(electrum_window, title, exporter):
filter_ = "JSON (*.json);;All files (*)"
filename = electrum_window.getSaveFileName(_("Select file to save your {}").format(title),
'electrum_{}.json'.format(title), filter_)
if not filename:
return
try:
exporter(filename)
except FileExportFailed as e:
electrum_window.show_critical(str(e))
else:
electrum_window.show_message(_("Your {0} were exported to '{1}'")
.format(title, str(filename)))
def get_parent_main_window(widget):
"""Returns a reference to the ElectrumWindow this widget belongs to."""
from .main_window import ElectrumWindow
for _ in range(100):
if widget is None:
return None
if not isinstance(widget, ElectrumWindow):
widget = widget.parentWidget()
else:
return widget
return None
def icon_path(icon_basename):
return resource_path('gui', 'icons', icon_basename)
@lru_cache(maxsize=1000)
def read_QIcon(icon_basename):
return QIcon(icon_path(icon_basename))
def get_default_language():
name = QLocale.system().name()
return name if name in languages else 'en_UK'
class FromList(QTreeWidget):
def __init__(self, parent, create_menu):
super().__init__(parent)
self.setHeaderHidden(True)
self.setMaximumHeight(300)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(create_menu)
self.setUniformRowHeights(True)
# remove left margin
self.setRootIsDecorated(False)
self.setColumnCount(2)
self.header().setStretchLastSection(False)
sm = QHeaderView.ResizeToContents
self.header().setSectionResizeMode(0, sm)
self.header().setSectionResizeMode(1, sm)
if __name__ == "__main__":
app = QApplication([])
t = WaitingDialog(None, 'testing ...', lambda: [time.sleep(1)], lambda x: QMessageBox.information(None, 'done', "done"))
t.start()
app.exec_()
| 34.180851 | 124 | 0.63869 | import asyncio
import os.path
import time
import sys
import platform
import queue
import traceback
from functools import partial, lru_cache
from typing import NamedTuple, Callable, Optional, TYPE_CHECKING, Union, List, Dict
from PyQt5.QtGui import (QFont, QColor, QCursor, QPixmap, QStandardItem,
QPalette, QIcon)
from PyQt5.QtCore import (Qt, QPersistentModelIndex, QModelIndex, pyqtSignal,
QCoreApplication, QItemSelectionModel, QThread,
QSortFilterProxyModel, QSize, QLocale)
from PyQt5.QtWidgets import (QPushButton, QLabel, QMessageBox, QHBoxLayout,
QAbstractItemView, QVBoxLayout, QLineEdit,
QStyle, QDialog, QGroupBox, QButtonGroup, QRadioButton,
QFileDialog, QWidget, QToolButton, QTreeView, QPlainTextEdit,
QHeaderView, QApplication, QToolTip, QTreeWidget, QStyledItemDelegate)
from electrum_dash.i18n import _, languages
from electrum_dash.util import (FileImportFailed, FileExportFailed,
resource_path)
from electrum_dash.paymentrequest import PR_UNPAID, PR_PAID, PR_EXPIRED
if TYPE_CHECKING:
from .main_window import ElectrumWindow
if platform.system() == 'Windows':
if platform.release() in ['7', '8', '10']:
MONOSPACE_FONT = 'Consolas'
else:
MONOSPACE_FONT = 'Lucida Console'
elif platform.system() == 'Darwin':
MONOSPACE_FONT = 'Menlo'
else:
MONOSPACE_FONT = 'monospace'
dialogs = []
pr_icons = {
PR_UNPAID:"unpaid.png",
PR_PAID:"confirmed.png",
PR_EXPIRED:"expired.png"
}
pr_tooltips = {
PR_UNPAID:_('Pending'),
PR_PAID:_('Paid'),
PR_EXPIRED:_('Expired')
}
expiration_values = [
(_('1 hour'), 60*60),
(_('1 day'), 24*60*60),
(_('1 week'), 7*24*60*60),
(_('Never'), None)
]
class EnterButton(QPushButton):
def __init__(self, text, func):
QPushButton.__init__(self, text)
self.func = func
self.clicked.connect(func)
def keyPressEvent(self, e):
if e.key() == Qt.Key_Return:
self.func()
class ThreadedButton(QPushButton):
def __init__(self, text, task, on_success=None, on_error=None):
QPushButton.__init__(self, text)
self.task = task
self.on_success = on_success
self.on_error = on_error
self.clicked.connect(self.run_task)
def run_task(self):
self.setEnabled(False)
self.thread = TaskThread(self)
self.thread.add(self.task, self.on_success, self.done, self.on_error)
def done(self):
self.setEnabled(True)
self.thread.stop()
class WWLabel(QLabel):
def __init__ (self, text="", parent=None):
QLabel.__init__(self, text, parent)
self.setWordWrap(True)
class HelpLabel(QLabel):
def __init__(self, text, help_text):
QLabel.__init__(self, text)
self.help_text = help_text
self.app = QCoreApplication.instance()
self.font = QFont()
def mouseReleaseEvent(self, x):
QMessageBox.information(self, 'Help', self.help_text)
def enterEvent(self, event):
self.font.setUnderline(True)
self.setFont(self.font)
self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
return QLabel.enterEvent(self, event)
def leaveEvent(self, event):
self.font.setUnderline(False)
self.setFont(self.font)
self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
return QLabel.leaveEvent(self, event)
class HelpButton(QPushButton):
def __init__(self, text):
QPushButton.__init__(self, '?')
self.help_text = text
self.setFocusPolicy(Qt.NoFocus)
self.setFixedWidth(20)
self.clicked.connect(self.onclick)
def onclick(self):
QMessageBox.information(self, 'Help', self.help_text)
class InfoButton(QPushButton):
def __init__(self, text):
QPushButton.__init__(self, 'Info')
self.help_text = text
self.setFocusPolicy(Qt.NoFocus)
self.setFixedWidth(60)
self.clicked.connect(self.onclick)
def onclick(self):
QMessageBox.information(self, 'Info', self.help_text)
class Buttons(QHBoxLayout):
def __init__(self, *buttons):
QHBoxLayout.__init__(self)
self.addStretch(1)
for b in buttons:
self.addWidget(b)
class CloseButton(QPushButton):
def __init__(self, dialog):
QPushButton.__init__(self, _("Close"))
self.clicked.connect(dialog.close)
self.setDefault(True)
class CopyButton(QPushButton):
def __init__(self, text_getter, app):
QPushButton.__init__(self, _("Copy"))
self.clicked.connect(lambda: app.clipboard().setText(text_getter()))
class CopyCloseButton(QPushButton):
def __init__(self, text_getter, app, dialog):
QPushButton.__init__(self, _("Copy and Close"))
self.clicked.connect(lambda: app.clipboard().setText(text_getter()))
self.clicked.connect(dialog.close)
self.setDefault(True)
class OkButton(QPushButton):
def __init__(self, dialog, label=None):
QPushButton.__init__(self, label or _("OK"))
self.clicked.connect(dialog.accept)
self.setDefault(True)
class CancelButton(QPushButton):
def __init__(self, dialog, label=None):
QPushButton.__init__(self, label or _("Cancel"))
self.clicked.connect(dialog.reject)
class MessageBoxMixin(object):
def top_level_window_recurse(self, window=None, test_func=None):
window = window or self
classes = (WindowModalDialog, QMessageBox)
if test_func is None:
test_func = lambda x: True
for n, child in enumerate(window.children()):
if isinstance(child, classes) and child.isVisible() \
and test_func(child):
return self.top_level_window_recurse(child, test_func=test_func)
return window
def top_level_window(self, test_func=None):
return self.top_level_window_recurse(test_func)
def question(self, msg, parent=None, title=None, icon=None):
Yes, No = QMessageBox.Yes, QMessageBox.No
return self.msg_box(icon or QMessageBox.Question,
parent, title or '',
msg, buttons=Yes|No, defaultButton=No) == Yes
def show_warning(self, msg, parent=None, title=None, **kwargs):
return self.msg_box(QMessageBox.Warning, parent,
title or _('Warning'), msg, **kwargs)
def show_error(self, msg, parent=None, **kwargs):
return self.msg_box(QMessageBox.Warning, parent,
_('Error'), msg, **kwargs)
def show_critical(self, msg, parent=None, title=None, **kwargs):
return self.msg_box(QMessageBox.Critical, parent,
title or _('Critical Error'), msg, **kwargs)
def show_message(self, msg, parent=None, title=None, **kwargs):
return self.msg_box(QMessageBox.Information, parent,
title or _('Information'), msg, **kwargs)
def msg_box(self, icon, parent, title, text, buttons=QMessageBox.Ok,
defaultButton=QMessageBox.NoButton, rich_text=False):
parent = parent or self.top_level_window()
if type(icon) is QPixmap:
d = QMessageBox(QMessageBox.Information, title, str(text), buttons, parent)
d.setIconPixmap(icon)
else:
d = QMessageBox(icon, title, str(text), buttons, parent)
d.setWindowModality(Qt.WindowModal)
d.setDefaultButton(defaultButton)
if rich_text:
d.setTextInteractionFlags(Qt.TextSelectableByMouse| Qt.LinksAccessibleByMouse)
d.setTextFormat(Qt.RichText)
else:
d.setTextInteractionFlags(Qt.TextSelectableByMouse)
d.setTextFormat(Qt.PlainText)
return d.exec_()
class WindowModalDialog(QDialog, MessageBoxMixin):
def __init__(self, parent, title=None):
QDialog.__init__(self, parent)
self.setWindowModality(Qt.WindowModal)
if title:
self.setWindowTitle(title)
class WaitingDialog(WindowModalDialog):
def __init__(self, parent, message, task, on_success=None, on_error=None):
assert parent
if isinstance(parent, MessageBoxMixin):
parent = parent.top_level_window()
WindowModalDialog.__init__(self, parent, _("Please wait"))
vbox = QVBoxLayout(self)
vbox.addWidget(QLabel(message))
self.accepted.connect(self.on_accepted)
self.show()
self.thread = TaskThread(self)
self.thread.finished.connect(self.deleteLater) self.thread.add(task, on_success, self.accept, on_error)
def wait(self):
self.thread.wait()
def on_accepted(self):
self.thread.stop()
def line_dialog(parent, title, label, ok_label, default=None):
dialog = WindowModalDialog(parent, title)
dialog.setMinimumWidth(500)
l = QVBoxLayout()
dialog.setLayout(l)
l.addWidget(QLabel(label))
txt = QLineEdit()
if default:
txt.setText(default)
l.addWidget(txt)
l.addLayout(Buttons(CancelButton(dialog), OkButton(dialog, ok_label)))
if dialog.exec_():
return txt.text()
def text_dialog(parent, title, header_layout, ok_label, default=None, allow_multi=False):
from .qrtextedit import ScanQRTextEdit
dialog = WindowModalDialog(parent, title)
dialog.setMinimumWidth(600)
l = QVBoxLayout()
dialog.setLayout(l)
if isinstance(header_layout, str):
l.addWidget(QLabel(header_layout))
else:
l.addLayout(header_layout)
txt = ScanQRTextEdit(allow_multi=allow_multi)
if default:
txt.setText(default)
l.addWidget(txt)
l.addLayout(Buttons(CancelButton(dialog), OkButton(dialog, ok_label)))
if dialog.exec_():
return txt.toPlainText()
class ChoicesLayout(object):
def __init__(self, msg, choices, on_clicked=None, checked_index=0):
vbox = QVBoxLayout()
if len(msg) > 50:
vbox.addWidget(WWLabel(msg))
msg = ""
gb2 = QGroupBox(msg)
vbox.addWidget(gb2)
vbox2 = QVBoxLayout()
gb2.setLayout(vbox2)
self.group = group = QButtonGroup()
for i,c in enumerate(choices):
button = QRadioButton(gb2)
button.setText(c)
vbox2.addWidget(button)
group.addButton(button)
group.setId(button, i)
if i==checked_index:
button.setChecked(True)
if on_clicked:
group.buttonClicked.connect(partial(on_clicked, self))
self.vbox = vbox
def layout(self):
return self.vbox
def selected_index(self):
return self.group.checkedId()
def address_field(addresses):
hbox = QHBoxLayout()
address_e = QLineEdit()
if addresses and len(addresses) > 0:
address_e.setText(addresses[0])
else:
addresses = []
def func():
try:
i = addresses.index(str(address_e.text())) + 1
i = i % len(addresses)
address_e.setText(addresses[i])
except ValueError:
if addresses and len(addresses) > 0:
address_e.setText(addresses[0])
button = QPushButton(_('Address'))
button.clicked.connect(func)
hbox.addWidget(button)
hbox.addWidget(address_e)
return hbox, address_e
def filename_field(parent, config, defaultname, select_msg):
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_("Format")))
gb = QGroupBox("format", parent)
b1 = QRadioButton(gb)
b1.setText(_("CSV"))
b1.setChecked(True)
b2 = QRadioButton(gb)
b2.setText(_("json"))
vbox.addWidget(b1)
vbox.addWidget(b2)
hbox = QHBoxLayout()
directory = config.get('io_dir', os.path.expanduser('~'))
path = os.path.join( directory, defaultname )
filename_e = QLineEdit()
filename_e.setText(path)
def func():
text = filename_e.text()
_filter = "*.csv" if text.endswith(".csv") else "*.json" if text.endswith(".json") else None
p, __ = QFileDialog.getSaveFileName(None, select_msg, text, _filter)
if p:
filename_e.setText(p)
button = QPushButton(_('File'))
button.clicked.connect(func)
hbox.addWidget(button)
hbox.addWidget(filename_e)
vbox.addLayout(hbox)
def set_csv(v):
text = filename_e.text()
text = text.replace(".json",".csv") if v else text.replace(".csv",".json")
filename_e.setText(text)
b1.clicked.connect(lambda: set_csv(True))
b2.clicked.connect(lambda: set_csv(False))
return vbox, filename_e, b1
class ElectrumItemDelegate(QStyledItemDelegate):
def __init__(self, tv):
super().__init__(tv)
self.tv = tv
self.opened = None
def on_closeEditor(editor: QLineEdit, hint):
self.opened = None
def on_commitData(editor: QLineEdit):
new_text = editor.text()
idx = QModelIndex(self.opened)
row, col = idx.row(), idx.column()
_prior_text, user_role = self.tv.text_txid_from_coordinate(row, col)
# check that we didn't forget to set UserRole on an editable field
assert user_role is not None, (row, col)
self.tv.on_edited(idx, user_role, new_text)
self.closeEditor.connect(on_closeEditor)
self.commitData.connect(on_commitData)
def createEditor(self, parent, option, idx):
self.opened = QPersistentModelIndex(idx)
return super().createEditor(parent, option, idx)
class MyTreeView(QTreeView):
def __init__(self, parent: 'ElectrumWindow', create_menu, stretch_column=None, editable_columns=None):
super().__init__(parent)
self.parent = parent
self.config = self.parent.config
self.stretch_column = stretch_column
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(create_menu)
self.setUniformRowHeights(True)
if editable_columns is None:
editable_columns = {stretch_column}
else:
editable_columns = set(editable_columns)
self.editable_columns = editable_columns
self.setItemDelegate(ElectrumItemDelegate(self))
self.current_filter = ""
self.setRootIsDecorated(False)
self.toolbar_shown = False
# So to speed the UI up considerably, set it to
# only look at as many rows as currently visible.
self.header().setResizeContentsPrecision(0)
def set_editability(self, items):
for idx, i in enumerate(items):
i.setEditable(idx in self.editable_columns)
def selected_in_column(self, column: int):
items = self.selectionModel().selectedIndexes()
return list(x for x in items if x.column() == column)
def current_item_user_role(self, col) -> Optional[QStandardItem]:
idx = self.selectionModel().currentIndex()
idx = idx.sibling(idx.row(), col)
item = self.model().itemFromIndex(idx)
if item:
return item.data(Qt.UserRole)
def set_current_idx(self, set_current: QPersistentModelIndex):
if set_current:
assert isinstance(set_current, QPersistentModelIndex)
assert set_current.isValid()
self.selectionModel().select(QModelIndex(set_current), QItemSelectionModel.SelectCurrent)
def update_headers(self, headers: Union[List[str], Dict[int, str]]):
# headers is either a list of column names, or a dict: (col_idx->col_name)
if not isinstance(headers, dict): # convert to dict
headers = dict(enumerate(headers))
col_names = [headers[col_idx] for col_idx in sorted(headers.keys())]
model = self.model()
model.setHorizontalHeaderLabels(col_names)
self.header().setStretchLastSection(False)
self.header().setDefaultAlignment(Qt.AlignCenter)
for col_idx in headers:
sm = QHeaderView.Stretch if col_idx == self.stretch_column else QHeaderView.ResizeToContents
self.header().setSectionResizeMode(col_idx, sm)
def keyPressEvent(self, event):
if self.itemDelegate().opened:
return
if event.key() in [ Qt.Key_F2, Qt.Key_Return ]:
self.on_activated(self.selectionModel().currentIndex())
return
super().keyPressEvent(event)
def on_activated(self, idx):
# on 'enter' we show the menu
pt = self.visualRect(idx).bottomLeft()
pt.setX(50)
self.customContextMenuRequested.emit(pt)
def edit(self, idx, trigger=QAbstractItemView.AllEditTriggers, event=None):
return super().edit(idx, trigger, event)
def on_edited(self, idx: QModelIndex, user_role, text):
self.parent.wallet.set_label(user_role, text)
self.parent.history_model.refresh('on_edited in MyTreeView')
self.parent.update_completions()
def should_hide(self, row):
return False
def text_txid_from_coordinate(self, row_num, column):
assert not isinstance(self.model(), QSortFilterProxyModel)
idx = self.model().index(row_num, column)
item = self.model().itemFromIndex(idx)
user_role = item.data(Qt.UserRole)
return item.text(), user_role
def hide_row(self, row_num):
should_hide = self.should_hide(row_num)
if not self.current_filter and should_hide is None:
# no filters at all, neither date nor search
self.setRowHidden(row_num, QModelIndex(), False)
return
for column in self.filter_columns:
txt, _ = self.text_txid_from_coordinate(row_num, column)
txt = txt.lower()
if self.current_filter in txt:
# the filter matched, but the date filter might apply
self.setRowHidden(row_num, QModelIndex(), bool(should_hide))
break
else:
# we did not find the filter in any columns, hide the item
self.setRowHidden(row_num, QModelIndex(), True)
def filter(self, p):
p = p.lower()
self.current_filter = p
self.hide_rows()
def hide_rows(self):
for row in range(self.model().rowCount()):
self.hide_row(row)
def create_toolbar(self, config=None):
hbox = QHBoxLayout()
buttons = self.get_toolbar_buttons()
for b in buttons:
b.setVisible(False)
hbox.addWidget(b)
hide_button = QPushButton('x')
hide_button.setVisible(False)
hide_button.pressed.connect(lambda: self.show_toolbar(False, config))
self.toolbar_buttons = buttons + (hide_button,)
hbox.addStretch()
hbox.addWidget(hide_button)
return hbox
def save_toolbar_state(self, state, config):
pass # implemented in subclasses
def show_toolbar(self, state, config=None):
if state == self.toolbar_shown:
return
self.toolbar_shown = state
if config:
self.save_toolbar_state(state, config)
for b in self.toolbar_buttons:
b.setVisible(state)
if not state:
self.on_hide_toolbar()
def toggle_toolbar(self, config=None):
self.show_toolbar(not self.toolbar_shown, config)
class ButtonsWidget(QWidget):
def __init__(self):
super(QWidget, self).__init__()
self.buttons = []
def resizeButtons(self):
frameWidth = self.style().pixelMetric(QStyle.PM_DefaultFrameWidth)
x = self.rect().right() - frameWidth
y = self.rect().bottom() - frameWidth
for button in self.buttons:
sz = button.sizeHint()
x -= sz.width()
button.move(x, y - sz.height())
def addButton(self, icon_name, on_click, tooltip):
button = QToolButton(self)
button.setIcon(read_QIcon(icon_name))
iconSize = QLineEdit().sizeHint().height() - 7 # 3px (button sz - icon sz), 2px borders, 2px padding
button.setIconSize(QSize(iconSize, iconSize))
button.setCursor(QCursor(Qt.PointingHandCursor))
button.setStyleSheet("QToolButton { border: none; hover {border: 1px} pressed {border: 1px} padding: 0px; }")
button.setVisible(True)
button.setToolTip(tooltip)
button.clicked.connect(on_click)
self.buttons.append(button)
return button
def addCopyButton(self, app):
self.app = app
self.addButton("copy.png", self.on_copy, _("Copy to clipboard"))
def on_copy(self):
self.app.clipboard().setText(self.text())
QToolTip.showText(QCursor.pos(), _("Text copied to clipboard"), self)
class ButtonsLineEdit(QLineEdit, ButtonsWidget):
def __init__(self, text=None):
QLineEdit.__init__(self, text)
self.buttons = []
def resizeEvent(self, e):
o = QLineEdit.resizeEvent(self, e)
self.resizeButtons()
return o
class ButtonsTextEdit(QPlainTextEdit, ButtonsWidget):
def __init__(self, text=None):
QPlainTextEdit.__init__(self, text)
self.setText = self.setPlainText
self.text = self.toPlainText
self.buttons = []
def resizeEvent(self, e):
o = QPlainTextEdit.resizeEvent(self, e)
self.resizeButtons()
return o
class TaskThread(QThread):
class Task(NamedTuple):
task: Callable
cb_success: Optional[Callable]
cb_done: Optional[Callable]
cb_error: Optional[Callable]
doneSig = pyqtSignal(object, object, object)
def __init__(self, parent, on_error=None):
super(TaskThread, self).__init__(parent)
self.on_error = on_error
self.tasks = queue.Queue()
self.doneSig.connect(self.on_done)
self.start()
def add(self, task, on_success=None, on_done=None, on_error=None):
on_error = on_error or self.on_error
self.tasks.put(TaskThread.Task(task, on_success, on_done, on_error))
def run(self):
while True:
task = self.tasks.get() # type: TaskThread.Task
if not task:
break
try:
result = task.task()
self.doneSig.emit(result, task.cb_done, task.cb_success)
except BaseException:
self.doneSig.emit(sys.exc_info(), task.cb_done, task.cb_error)
def on_done(self, result, cb_done, cb_result):
# This runs in the parent's thread.
if cb_done:
cb_done()
if cb_result:
cb_result(result)
def stop(self):
self.tasks.put(None)
class ColorSchemeItem:
def __init__(self, fg_color, bg_color):
self.colors = (fg_color, bg_color)
def _get_color(self, background):
return self.colors[(int(background) + int(ColorScheme.dark_scheme)) % 2]
def as_stylesheet(self, background=False):
css_prefix = "background-" if background else ""
color = self._get_color(background)
return "QWidget {{ {}color:{}; }}".format(css_prefix, color)
def as_color(self, background=False):
color = self._get_color(background)
return QColor(color)
class ColorScheme:
dark_scheme = False
GREEN = ColorSchemeItem("#117c11", "#8af296")
YELLOW = ColorSchemeItem("#897b2a", "#ffff00")
RED = ColorSchemeItem("#7c1111", "#f18c8c")
BLUE = ColorSchemeItem("#123b7c", "#8cb3f2")
DEFAULT = ColorSchemeItem("#818181", "white")
@staticmethod
def has_dark_background(widget):
brightness = sum(widget.palette().color(QPalette.Background).getRgb()[0:3])
return brightness < (255*3/2)
@staticmethod
def update_from_widget(widget, force_dark=False):
if force_dark or ColorScheme.has_dark_background(widget):
ColorScheme.dark_scheme = True
class AcceptFileDragDrop:
def __init__(self, file_type=""):
assert isinstance(self, QWidget)
self.setAcceptDrops(True)
self.file_type = file_type
def validateEvent(self, event):
if not event.mimeData().hasUrls():
event.ignore()
return False
for url in event.mimeData().urls():
if not url.toLocalFile().endswith(self.file_type):
event.ignore()
return False
event.accept()
return True
def dragEnterEvent(self, event):
self.validateEvent(event)
def dragMoveEvent(self, event):
if self.validateEvent(event):
event.setDropAction(Qt.CopyAction)
def dropEvent(self, event):
if self.validateEvent(event):
for url in event.mimeData().urls():
self.onFileAdded(url.toLocalFile())
def onFileAdded(self, fn):
raise NotImplementedError()
def import_meta_gui(electrum_window, title, importer, on_success):
filter_ = "JSON (*.json);;All files (*)"
filename = electrum_window.getOpenFileName(_("Open {} file").format(title), filter_)
if not filename:
return
try:
importer(filename)
except FileImportFailed as e:
electrum_window.show_critical(str(e))
else:
electrum_window.show_message(_("Your {} were successfully imported").format(title))
on_success()
def export_meta_gui(electrum_window, title, exporter):
filter_ = "JSON (*.json);;All files (*)"
filename = electrum_window.getSaveFileName(_("Select file to save your {}").format(title),
'electrum_{}.json'.format(title), filter_)
if not filename:
return
try:
exporter(filename)
except FileExportFailed as e:
electrum_window.show_critical(str(e))
else:
electrum_window.show_message(_("Your {0} were exported to '{1}'")
.format(title, str(filename)))
def get_parent_main_window(widget):
from .main_window import ElectrumWindow
for _ in range(100):
if widget is None:
return None
if not isinstance(widget, ElectrumWindow):
widget = widget.parentWidget()
else:
return widget
return None
def icon_path(icon_basename):
return resource_path('gui', 'icons', icon_basename)
@lru_cache(maxsize=1000)
def read_QIcon(icon_basename):
return QIcon(icon_path(icon_basename))
def get_default_language():
name = QLocale.system().name()
return name if name in languages else 'en_UK'
class FromList(QTreeWidget):
def __init__(self, parent, create_menu):
super().__init__(parent)
self.setHeaderHidden(True)
self.setMaximumHeight(300)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(create_menu)
self.setUniformRowHeights(True)
self.setRootIsDecorated(False)
self.setColumnCount(2)
self.header().setStretchLastSection(False)
sm = QHeaderView.ResizeToContents
self.header().setSectionResizeMode(0, sm)
self.header().setSectionResizeMode(1, sm)
if __name__ == "__main__":
app = QApplication([])
t = WaitingDialog(None, 'testing ...', lambda: [time.sleep(1)], lambda x: QMessageBox.information(None, 'done', "done"))
t.start()
app.exec_()
| true | true |
1c2ffc104a4e49aee23eb6a73b5f88a7f6995938 | 414 | py | Python | ontask/migrations/0018_auto_20180428_1425.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 33 | 2017-12-02T04:09:24.000Z | 2021-11-07T08:41:57.000Z | ontask/migrations/0018_auto_20180428_1425.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 189 | 2017-11-16T04:06:29.000Z | 2022-03-11T23:35:59.000Z | ontask/migrations/0018_auto_20180428_1425.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 30 | 2017-11-30T03:35:44.000Z | 2022-01-31T03:08:08.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-04-28 04:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ontask', '0017_auto_20180417_1557'),
]
operations = [
migrations.AlterModelOptions(
name='column',
options={'ordering': ('-is_key',)},
),
]
| 20.7 | 49 | 0.608696 |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ontask', '0017_auto_20180417_1557'),
]
operations = [
migrations.AlterModelOptions(
name='column',
options={'ordering': ('-is_key',)},
),
]
| true | true |
1c2ffc8083c9d545f112d5983231edff751831f7 | 3,172 | py | Python | tests/test_record.py | cyemeng/python-little_r | 13aa985c9fd89106acc6260e6c4eeb4eb99111af | [
"BSD-3-Clause"
] | 7 | 2018-03-19T01:39:37.000Z | 2022-01-09T09:19:30.000Z | tests/test_record.py | cyemeng/python-little_r | 13aa985c9fd89106acc6260e6c4eeb4eb99111af | [
"BSD-3-Clause"
] | null | null | null | tests/test_record.py | cyemeng/python-little_r | 13aa985c9fd89106acc6260e6c4eeb4eb99111af | [
"BSD-3-Clause"
] | 4 | 2020-03-20T09:19:59.000Z | 2022-01-09T07:49:50.000Z | import unittest
from datetime import datetime
from little_r import Record
class TestRecord(unittest.TestCase):
def create_sample_record(self, **kwargs):
'''
Creates a toy record.abs
'''
return Record('TestName', 100, 50, None, datetime(2017, 1, 1, 18, 30, 0), **kwargs)
def test_getitem(self):
r = self.create_sample_record(temperature=100.0)
self.assertEqual(r['temperature'], 100.0)
def test_setitem(self):
r = Record('TestName', 100, 50, None, '2017-01-01')
r['temperature'] = 100.0
self.assertEqual(r['temperature'], 100.0)
def test_setitem_ignores_unknown(self):
r = self.create_sample_record()
with self.assertRaises(KeyError):
r['something'] = 100.0
def test_date_format(self):
r = self.create_sample_record()
self.assertEqual(r.get_formated_time(), '20170101183000')
def test_end_of_record(self):
r = self.create_sample_record()
self.assertEqual(r.end_of_message_line(), ' 1 0 0')
def test_data_field_all_empty(self):
r = self.create_sample_record()
expected_output = '-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
self.assertEqual(r.data_record(), expected_output)
def test_data_field_one_set(self):
r = self.create_sample_record()
r['temperature'] = 453.14999
expected_output = '-888888.00000 0-888888.00000 0 453.14999 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
self.assertEqual(r.data_record(), expected_output)
def test_closing_line(self):
r = self.create_sample_record()
expected_output = '-777777.00000 0-777777.00000 0 1.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
self.assertEqual(r.data_closing_line(), expected_output)
def test_generate_header(self):
self.maxDiff = None
r = Record('Chieti 14.181 42.377 ', 42.377, 14.181, None, datetime(2011, 10, 25, 6, 30, 0))
# expected_output = ' 42.37700 14.18100Chieti 14.181 42.377 xxxxxxxxxxxxxxxxxxxSURFACE DATA FROM MY DATABASExxxxxxxxxxxFM-12 SYNOPxxxxxxxxxxxxxxxxxxxxxxxxxxxxxI did itxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -888888.00000 6 0 0 0 0 F F F -888888 -888888 20111025063000-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
# Just check the lenght
self.assertEqual(len(r.message_header()), 600)
if __name__ == '__main__':
unittest.main()
| 38.216867 | 630 | 0.613178 | import unittest
from datetime import datetime
from little_r import Record
class TestRecord(unittest.TestCase):
def create_sample_record(self, **kwargs):
return Record('TestName', 100, 50, None, datetime(2017, 1, 1, 18, 30, 0), **kwargs)
def test_getitem(self):
r = self.create_sample_record(temperature=100.0)
self.assertEqual(r['temperature'], 100.0)
def test_setitem(self):
r = Record('TestName', 100, 50, None, '2017-01-01')
r['temperature'] = 100.0
self.assertEqual(r['temperature'], 100.0)
def test_setitem_ignores_unknown(self):
r = self.create_sample_record()
with self.assertRaises(KeyError):
r['something'] = 100.0
def test_date_format(self):
r = self.create_sample_record()
self.assertEqual(r.get_formated_time(), '20170101183000')
def test_end_of_record(self):
r = self.create_sample_record()
self.assertEqual(r.end_of_message_line(), ' 1 0 0')
def test_data_field_all_empty(self):
r = self.create_sample_record()
expected_output = '-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
self.assertEqual(r.data_record(), expected_output)
def test_data_field_one_set(self):
r = self.create_sample_record()
r['temperature'] = 453.14999
expected_output = '-888888.00000 0-888888.00000 0 453.14999 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
self.assertEqual(r.data_record(), expected_output)
def test_closing_line(self):
r = self.create_sample_record()
expected_output = '-777777.00000 0-777777.00000 0 1.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0-888888.00000 0'
self.assertEqual(r.data_closing_line(), expected_output)
def test_generate_header(self):
self.maxDiff = None
r = Record('Chieti 14.181 42.377 ', 42.377, 14.181, None, datetime(2011, 10, 25, 6, 30, 0))
self.assertEqual(len(r.message_header()), 600)
if __name__ == '__main__':
unittest.main()
| true | true |
1c2ffd8667949f55ea8e8b7327d6c14a76c3d52e | 3,425 | py | Python | gtts/utils.py | scottgigante/gTTS | f17c6d5affc68464f2fc4498cc51b61e8cb704d1 | [
"MIT"
] | null | null | null | gtts/utils.py | scottgigante/gTTS | f17c6d5affc68464f2fc4498cc51b61e8cb704d1 | [
"MIT"
] | null | null | null | gtts/utils.py | scottgigante/gTTS | f17c6d5affc68464f2fc4498cc51b61e8cb704d1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .tokenizer.symbols import ALL_PUNC as punc
from string import whitespace as ws
import re
_ALL_PUNC_OR_SPACE = re.compile(u"^[{}]*$".format(re.escape(punc + ws)))
"""Regex that matches if an entire line is only comprised
of whitespace and punctuation
"""
def _minimize(the_string, delim, max_size):
"""Recursively split a string in the largest chunks
possible from the highest position of a delimiter all the way
to a maximum size
Args:
the_string (string): The string to split.
delim (string): The delimiter to split on.
max_size (int): The maximum size of a chunk.
Returns:
list: the minimized string in tokens
Every chunk size will be at minimum ``the_string[0:idx]`` where ``idx``
is the highest index of ``delim`` found in ``the_string``; and at maximum
``the_string[0:max_size]`` if no ``delim`` was found in ``the_string``.
In the latter case, the split will occur at ``the_string[max_size]``
which can be any character. The function runs itself again on the rest of
``the_string`` (``the_string[idx:]``) until no chunk is larger than
``max_size``.
"""
# Remove `delim` from start of `the_string`
# i.e. prevent a recursive infinite loop on `the_string[0:0]`
# if `the_string` starts with `delim` and is larger than `max_size`
if the_string.startswith(delim):
the_string = the_string[_len(delim):]
if _len(the_string) > max_size:
try:
# Find the highest index of `delim` in `the_string[0:max_size]`
# i.e. `the_string` will be cut in half on `delim` index
idx = the_string.rindex(delim, 0, max_size)
except ValueError:
# `delim` not found in `the_string`, index becomes `max_size`
# i.e. `the_string` will be cut in half arbitrarily on `max_size`
idx = max_size
# Call itself again for `the_string[idx:]`
return [the_string[:idx]] + \
_minimize(the_string[idx:], delim, max_size)
else:
return [the_string]
def _len(text):
"""Same as ``len(text)`` for a string but that decodes
``text`` first in Python 2.x
Args:
text (string): String to get the size of.
Returns:
int: The size of the string.
"""
try:
# Python 2
return len(unicode(text))
except NameError: # pragma: no cover
# Python 3
return len(text)
def _clean_tokens(tokens):
"""Clean a list of strings
Args:
tokens (list): A list of strings (tokens) to clean.
Returns:
list: Stripped strings ``tokens`` without the original elements
that only consisted of whitespace and/or punctuation characters.
"""
return [t.strip() for t in tokens if not _ALL_PUNC_OR_SPACE.match(t)]
def _translate_url(tld="com", path=""):
"""Generates a Google Translate URL
Args:
tld (string): Top-level domain for the Google Translate host,
i.e ``https://translate.google.<tld>``. Default is ``com``.
path: (string): A path to append to the Google Translate host,
i.e ``https://translate.google.com/<path>``. Default is ``""``.
Returns:
string: A Google Translate URL `https://translate.google.<tld>/path`
"""
_GOOGLE_TTS_URL = "https://translate.google.{}/{}"
return _GOOGLE_TTS_URL.format(tld, path) | 33.578431 | 77 | 0.633577 |
from .tokenizer.symbols import ALL_PUNC as punc
from string import whitespace as ws
import re
_ALL_PUNC_OR_SPACE = re.compile(u"^[{}]*$".format(re.escape(punc + ws)))
def _minimize(the_string, delim, max_size):
if the_string.startswith(delim):
the_string = the_string[_len(delim):]
if _len(the_string) > max_size:
try:
idx = the_string.rindex(delim, 0, max_size)
except ValueError:
idx = max_size
return [the_string[:idx]] + \
_minimize(the_string[idx:], delim, max_size)
else:
return [the_string]
def _len(text):
try:
return len(unicode(text))
except NameError:
return len(text)
def _clean_tokens(tokens):
return [t.strip() for t in tokens if not _ALL_PUNC_OR_SPACE.match(t)]
def _translate_url(tld="com", path=""):
_GOOGLE_TTS_URL = "https://translate.google.{}/{}"
return _GOOGLE_TTS_URL.format(tld, path) | true | true |
1c2ffe7d14d648ccda0d61bed0666a3e5eefeaf2 | 191 | py | Python | streaming/urls.py | joelsegoviacrespo/control_aforo_migrado | be90d1d45a20f735e7ef20449c4ab91ca05b5d85 | [
"MIT"
] | null | null | null | streaming/urls.py | joelsegoviacrespo/control_aforo_migrado | be90d1d45a20f735e7ef20449c4ab91ca05b5d85 | [
"MIT"
] | null | null | null | streaming/urls.py | joelsegoviacrespo/control_aforo_migrado | be90d1d45a20f735e7ef20449c4ab91ca05b5d85 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
from django.urls import path
from django.conf.urls import url
from streaming import views
urlpatterns = [
path('', views.streaming, name='streaming'),
] | 23.875 | 56 | 0.675393 |
from django.urls import path
from django.conf.urls import url
from streaming import views
urlpatterns = [
path('', views.streaming, name='streaming'),
] | true | true |
1c2fff320c8eda3af4138c91c695d6aa49179dc9 | 5,681 | py | Python | tests/djinn_regression_example.py | LLNL/DJINN | 3dd40a7f3bd179e1e51f7f7a9991faa457b9546a | [
"BSD-Source-Code"
] | 32 | 2018-07-20T00:00:31.000Z | 2022-02-18T15:35:04.000Z | tests/djinn_regression_example.py | LLNL/DJINN | 3dd40a7f3bd179e1e51f7f7a9991faa457b9546a | [
"BSD-Source-Code"
] | 3 | 2019-09-15T04:34:19.000Z | 2021-06-01T16:41:23.000Z | tests/djinn_regression_example.py | LLNL/DJINN | 3dd40a7f3bd179e1e51f7f7a9991faa457b9546a | [
"BSD-Source-Code"
] | 9 | 2018-07-22T21:02:25.000Z | 2021-04-04T04:57:37.000Z | ###############################################################################
# Copyright (c) 2018, Lawrence Livermore National Security, LLC.
#
# Produced at the Lawrence Livermore National Laboratory
#
# Written by K. Humbird (humbird1@llnl.gov), L. Peterson (peterson76@llnl.gov).
#
# LLNL-CODE-754815
#
# All rights reserved.
#
# This file is part of DJINN.
#
# For details, see github.com/LLNL/djinn.
#
# For details about use and distribution, please read DJINN/LICENSE .
###############################################################################
###############################################################################
# Demo script for DJINN
# Below, each function available in DJINN is demonstrated for the
# Boston housing dataset. Please see comments and djinn docs for
# details on each function.
###############################################################################
import numpy as np
import matplotlib.pyplot as plt
import sklearn
try: from sklearn.model_selection import train_test_split
except: from sklearn.cross_validation import train_test_split
from sklearn import datasets
from djinn import djinn
print(sklearn.__version__)
'''
NOTE: for the boston housing data you can expect test
MSE~10-20, Mean Abs Err~3-4, Exp.Var.~0.8+
when using get_hyperparameters() function
'''
#Load the data, split into training/testing groups
d=datasets.load_boston()
X=d.data
Y=d.target
x_train,x_test,y_train,y_test=train_test_split(X, Y, test_size=0.2, random_state=1)
print("djinn example")
modelname="reg_djinn_test" # name the model
ntrees=1 # number of trees = number of neural nets in ensemble
maxdepth=4 # max depth of tree -- optimize this for each data set
dropout_keep=1.0 # dropout typically set to 1 for non-Bayesian models
#initialize the model
model=djinn.DJINN_Regressor(ntrees,maxdepth,dropout_keep)
# find optimal settings: this function returns dict with hyper-parameters
# each djinn function accepts random seeds for reproducible behavior
optimal=model.get_hyperparameters(x_train, y_train, random_state=1)
batchsize=optimal['batch_size']
learnrate=optimal['learn_rate']
epochs=optimal['epochs']
# train the model with hyperparameters determined above
model.train(x_train,y_train,epochs=epochs,learn_rate=learnrate, batch_size=batchsize,
display_step=1, save_files=True, file_name=modelname,
save_model=True,model_name=modelname, random_state=1)
# *note there is a function model.fit(x_train,y_train, ... ) that wraps
# get_hyperparameters() and train(), so that you do not have to manually
# pass hyperparameters to train(). However, get_hyperparameters() can
# be expensive, so I recommend running it once per dataset and using those
# hyperparameter values in train() to save computational time
# make predictions
m=model.predict(x_test) #returns the median prediction if more than one tree
#evaluate results
mse=sklearn.metrics.mean_squared_error(y_test,m)
mabs=sklearn.metrics.mean_absolute_error(y_test,m)
exvar=sklearn.metrics.explained_variance_score(y_test,m)
print('MSE',mse)
print('M Abs Err',mabs)
print('Expl. Var.',exvar)
#close model
model.close_model()
print("Reload model and continue training for 20 epochs")
# reload model; can also open it using cPickle.load()
model2=djinn.load(model_name="reg_djinn_test")
#continue training for 20 epochs using same learning rate, etc as before
model2.continue_training(x_train, y_train, 20, learnrate, batchsize, random_state=1)
#make updated predictions
m2=model2.predict(x_test)
#evaluate results
mse2=sklearn.metrics.mean_squared_error(y_test,m2)
mabs2=sklearn.metrics.mean_absolute_error(y_test,m2)
exvar2=sklearn.metrics.explained_variance_score(y_test,m2)
print('MSE',mse2)
print('M Abs Err',mabs2)
print('Expl. Var.',exvar2)
# Bayesian formulation with dropout. Recommend dropout keep
# probability ~0.95, 5-10 trees.
print("Bayesian djinn example")
ntrees=3
dropout_keep=0.95
modelname="reg_bdjinn_test"
# initialize a model
bmodel=djinn.DJINN_Regressor(ntrees,maxdepth,dropout_keep)
# "fit()" does what get_hyperparameters + train does, in one step:
bmodel.fit(x_train,y_train, display_step=1, save_files=True, file_name=modelname,
save_model=True,model_name=modelname, random_state=1)
# evaluate: niters is the number of times you evaluate the network for
# a single sample. higher niters = better resolved distribution of predictions
niters=100
bl,bm,bu,results=bmodel.bayesian_predict(x_test,n_iters=niters, random_state=1)
# bayesian_predict returns 25, 50, 75 percentile and results dict with all predictions
# evaluate performance on median predictions
mse=sklearn.metrics.mean_squared_error(y_test,bm)
mabs=sklearn.metrics.mean_absolute_error(y_test,bm)
exvar=sklearn.metrics.explained_variance_score(y_test,bm)
print('MSE',mse)
print('M Abs Err',mabs)
print('Expl. Var.',exvar)
# make a pretty plot
yerrors = np.column_stack((bm-bl, bu-bm)).reshape((2,bl.shape[0]))
g=np.linspace(np.min(y_test),np.max(y_test),10)
fig, axs = plt.subplots(1,1, figsize=(8,8), facecolor='w', edgecolor='k')
fig.subplots_adjust(hspace = .15, wspace=.1)
sc=axs.scatter(y_test, bm, linewidth=0,s=6,
alpha=0.8, c='#68d1ca')
a,b,c=axs.errorbar(y_test, bm, yerr=yerrors, marker='',ls='',zorder=0,
alpha=0.5, ecolor='black')
axs.set_xlabel("True")
axs.set_ylabel("B-DJINN Prediction")
axs.plot(g,g,color='red')
plt.show()
# collect_tree_predictions gathers predictions in results dict
# in a more intuitive way for easy plotting, etc
p=bmodel.collect_tree_predictions(results['predictions'])
| 36.416667 | 86 | 0.718007 | true | true | |
1c2fffe06ce7975caa39d9f22dfdacf3c33a8984 | 596 | py | Python | examples/chat/server/wsgi.py | maxmzd/django-eventstream | 20c5bf68263505f3a4ae4abdc5df71e9790b992c | [
"MIT"
] | null | null | null | examples/chat/server/wsgi.py | maxmzd/django-eventstream | 20c5bf68263505f3a4ae4abdc5df71e9790b992c | [
"MIT"
] | null | null | null | examples/chat/server/wsgi.py | maxmzd/django-eventstream | 20c5bf68263505f3a4ae4abdc5df71e9790b992c | [
"MIT"
] | null | null | null | """
WSGI config for server project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))))
import dotenv
from django.core.wsgi import get_wsgi_application
dotenv.read_dotenv(os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings")
application = get_wsgi_application()
| 25.913043 | 93 | 0.778523 |
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))))
import dotenv
from django.core.wsgi import get_wsgi_application
dotenv.read_dotenv(os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings")
application = get_wsgi_application()
| true | true |
1c300254f128afd98e51a0416faca1fd3b346d12 | 1,315 | py | Python | tests/integration/fixtures/__init__.py | ghga-de/datameta-client | f7900027af9d7d1eff23594de79e90e75baa123a | [
"Apache-2.0"
] | 1 | 2021-07-20T12:59:09.000Z | 2021-07-20T12:59:09.000Z | tests/integration/fixtures/__init__.py | ghga-de/datameta-client | f7900027af9d7d1eff23594de79e90e75baa123a | [
"Apache-2.0"
] | 11 | 2021-03-17T20:27:27.000Z | 2021-04-07T16:22:55.000Z | tests/integration/fixtures/__init__.py | ghga-de/datameta-client | f7900027af9d7d1eff23594de79e90e75baa123a | [
"Apache-2.0"
] | null | null | null | import os
import json
from random import choice
from string import ascii_letters, digits
from copy import deepcopy
base_dir = os.path.dirname(os.path.abspath(__file__))
# metadataset fixtures:
metadataset_record_json_path = os.path.join(
base_dir,
"test_metadataset_record.json"
)
with open(metadataset_record_json_path, "r") as json_file:
metadataset_record = json.load(json_file)
# change #ID to not fail global uniqueness constraint:
#! to be removed:
def replace_ID(mset):
mset_ = deepcopy(mset)
mset_["#ID"] = (
"".join(choice(ascii_letters).upper() for _ in range(2)) +
"".join(str(choice(digits)) for _ in range(2))
)
return mset_
from datameta_client import metadatasets
def get_fresh_metadataset_id():
mset = replace_ID(metadataset_record)
return [
metadatasets.stage(
metadata_json=mset
)["id"]["uuid"]
]
#! to be removed end
# file fixtures:
storage_path = os.getenv("DATAMETA_STORAGE_PATH")
def get_content(path:str):
with open(path, "r") as test_file:
return test_file.read()
files = [
{
"name": name,
"path": os.path.join(base_dir, name),
"content": get_content(os.path.join(base_dir, name))
}
for name in os.listdir(base_dir)
if "test_file_" in name
]
| 24.351852 | 66 | 0.674525 | import os
import json
from random import choice
from string import ascii_letters, digits
from copy import deepcopy
base_dir = os.path.dirname(os.path.abspath(__file__))
metadataset_record_json_path = os.path.join(
base_dir,
"test_metadataset_record.json"
)
with open(metadataset_record_json_path, "r") as json_file:
metadataset_record = json.load(json_file)
py(mset)
mset_["#ID"] = (
"".join(choice(ascii_letters).upper() for _ in range(2)) +
"".join(str(choice(digits)) for _ in range(2))
)
return mset_
from datameta_client import metadatasets
def get_fresh_metadataset_id():
mset = replace_ID(metadataset_record)
return [
metadatasets.stage(
metadata_json=mset
)["id"]["uuid"]
]
storage_path = os.getenv("DATAMETA_STORAGE_PATH")
def get_content(path:str):
with open(path, "r") as test_file:
return test_file.read()
files = [
{
"name": name,
"path": os.path.join(base_dir, name),
"content": get_content(os.path.join(base_dir, name))
}
for name in os.listdir(base_dir)
if "test_file_" in name
]
| true | true |
1c30030b10dc952c82faa3f440dd0221a612d20e | 90 | py | Python | py-flask/handlers.py | boscoh/rpcseed | 4b9040c57f3d273a96f62cb1db931f2105fe0945 | [
"MIT"
] | 1 | 2021-05-08T12:38:03.000Z | 2021-05-08T12:38:03.000Z | py-flask/handlers.py | boscoh/rpcseed | 4b9040c57f3d273a96f62cb1db931f2105fe0945 | [
"MIT"
] | null | null | null | py-flask/handlers.py | boscoh/rpcseed | 4b9040c57f3d273a96f62cb1db931f2105fe0945 | [
"MIT"
] | null | null | null | config = {}
def setConfig(k, v):
config[k] = v
def getConfig():
return config
| 9 | 20 | 0.577778 | config = {}
def setConfig(k, v):
config[k] = v
def getConfig():
return config
| true | true |
1c30049f639b04b25fc3ecb5c6ec0973f6e2a51a | 1,687 | py | Python | donkeycar/tests/test_tub.py | walzimmer/donkey | b73596400040ce881c0a319b0a4cc3cbd05226da | [
"MIT"
] | null | null | null | donkeycar/tests/test_tub.py | walzimmer/donkey | b73596400040ce881c0a319b0a4cc3cbd05226da | [
"MIT"
] | null | null | null | donkeycar/tests/test_tub.py | walzimmer/donkey | b73596400040ce881c0a319b0a4cc3cbd05226da | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import tempfile
import unittest
from donkeycar.parts.datastore import TubWriter, Tub
import os
import pytest
#fixtures
from .setup import tub, tub_path
def test_tub_load(tub, tub_path):
"""Tub loads from existing tub path."""
t = Tub(tub_path)
assert t is not None
def test_tub_update_df(tub):
""" Tub updats its dataframe """
tub.update_df()
assert len(tub.df) == 128
def test_tub_add_record(tub):
"""Tub can save a record and then retrieve it."""
import numpy as np
#img_arr = np.zeros((120,160))
img_arr = np.zeros((240, 320))
x=123
y=90
rec_in = {'cam/image_array': img_arr, 'user/angle': x, 'user/throttle':y}
rec_index = tub.put_record(rec_in)
rec_out = tub.get_record(rec_index)
assert rec_in.keys() == rec_out.keys()
class TestTubWriter(unittest.TestCase):
def setUp(self):
tempfolder = tempfile.TemporaryDirectory()
self.path = os.path.join(tempfolder.name, 'new')
self.inputs = ['name', 'age', 'pic']
self.types = ['str', 'float', 'str']
def test_tub_create(self):
tub = TubWriter(self.path, inputs=self.inputs, types=self.types)
def test_tub_path(self):
tub = TubWriter(self.path, inputs=self.inputs, types=self.types)
tub.run('will', 323, 'asdfasdf')
def test_make_paths_absolute(self):
tub = Tub(self.path, inputs=['file_path'], types=['image'])
rel_file_name = 'test.jpg'
record_dict = {'file_path': rel_file_name}
abs_record_dict = tub.make_record_paths_absolute(record_dict)
assert abs_record_dict['file_path'] == os.path.join(self.path, rel_file_name)
| 27.209677 | 85 | 0.656194 |
import tempfile
import unittest
from donkeycar.parts.datastore import TubWriter, Tub
import os
import pytest
from .setup import tub, tub_path
def test_tub_load(tub, tub_path):
t = Tub(tub_path)
assert t is not None
def test_tub_update_df(tub):
tub.update_df()
assert len(tub.df) == 128
def test_tub_add_record(tub):
import numpy as np
img_arr = np.zeros((240, 320))
x=123
y=90
rec_in = {'cam/image_array': img_arr, 'user/angle': x, 'user/throttle':y}
rec_index = tub.put_record(rec_in)
rec_out = tub.get_record(rec_index)
assert rec_in.keys() == rec_out.keys()
class TestTubWriter(unittest.TestCase):
def setUp(self):
tempfolder = tempfile.TemporaryDirectory()
self.path = os.path.join(tempfolder.name, 'new')
self.inputs = ['name', 'age', 'pic']
self.types = ['str', 'float', 'str']
def test_tub_create(self):
tub = TubWriter(self.path, inputs=self.inputs, types=self.types)
def test_tub_path(self):
tub = TubWriter(self.path, inputs=self.inputs, types=self.types)
tub.run('will', 323, 'asdfasdf')
def test_make_paths_absolute(self):
tub = Tub(self.path, inputs=['file_path'], types=['image'])
rel_file_name = 'test.jpg'
record_dict = {'file_path': rel_file_name}
abs_record_dict = tub.make_record_paths_absolute(record_dict)
assert abs_record_dict['file_path'] == os.path.join(self.path, rel_file_name)
| true | true |
1c3004d1094d9967cccd10a343c6ff1e02fc35ab | 401 | py | Python | main_project/wsgi.py | rohithb/oneeightyone | c626ae1a8d91ac5650a9af7b9050c3522f0cde71 | [
"MIT"
] | null | null | null | main_project/wsgi.py | rohithb/oneeightyone | c626ae1a8d91ac5650a9af7b9050c3522f0cde71 | [
"MIT"
] | null | null | null | main_project/wsgi.py | rohithb/oneeightyone | c626ae1a8d91ac5650a9af7b9050c3522f0cde71 | [
"MIT"
] | null | null | null | """
WSGI config for main_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "main_project.settings")
application = get_wsgi_application()
| 23.588235 | 78 | 0.790524 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "main_project.settings")
application = get_wsgi_application()
| true | true |
1c3006d83d52a83c9deb255edfa04d5bb5aa7d84 | 8,148 | py | Python | src/saml2/authn.py | HaToHo/pysaml2 | be15a1e9c5f28cddb28b095f8da720a9f029c5c7 | [
"Apache-2.0"
] | null | null | null | src/saml2/authn.py | HaToHo/pysaml2 | be15a1e9c5f28cddb28b095f8da720a9f029c5c7 | [
"Apache-2.0"
] | null | null | null | src/saml2/authn.py | HaToHo/pysaml2 | be15a1e9c5f28cddb28b095f8da720a9f029c5c7 | [
"Apache-2.0"
] | null | null | null | import logging
import six
import time
import ldap
from saml2 import SAMLError
from saml2.aes import AESCipher
from saml2.httputil import Response
from saml2.httputil import make_cookie
from saml2.httputil import Redirect
from saml2.httputil import Unauthorized
from saml2.httputil import parse_cookie
from six.moves.urllib.parse import urlencode, parse_qs, urlsplit
__author__ = 'rolandh'
logger = logging.getLogger(__name__)
class AuthnFailure(SAMLError):
pass
class EncodeError(SAMLError):
pass
class UserAuthnMethod(object):
def __init__(self, srv):
self.srv = srv
def __call__(self, *args, **kwargs):
raise NotImplemented
def authenticated_as(self, **kwargs):
raise NotImplemented
def verify(self, **kwargs):
raise NotImplemented
def is_equal(a, b):
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= x ^ y
return result == 0
def url_encode_params(params=None):
if not isinstance(params, dict):
raise EncodeError("You must pass in a dictionary!")
params_list = []
for k, v in params.items():
if isinstance(v, list):
params_list.extend([(k, x) for x in v])
else:
params_list.append((k, v))
return urlencode(params_list)
def create_return_url(base, query, **kwargs):
"""
Add a query string plus extra parameters to a base URL which may contain
a query part already.
:param base: redirect_uri may contain a query part, no fragment allowed.
:param query: Old query part as a string
:param kwargs: extra query parameters
:return:
"""
part = urlsplit(base)
if part.fragment:
raise ValueError("Base URL contained parts it shouldn't")
for key, values in parse_qs(query).items():
if key in kwargs:
if isinstance(kwargs[key], six.string_types):
kwargs[key] = [kwargs[key]]
kwargs[key].extend(values)
else:
kwargs[key] = values
if part.query:
for key, values in parse_qs(part.query).items():
if key in kwargs:
if isinstance(kwargs[key], six.string_types):
kwargs[key] = [kwargs[key]]
kwargs[key].extend(values)
else:
kwargs[key] = values
_pre = base.split("?")[0]
else:
_pre = base
logger.debug("kwargs: %s" % kwargs)
return "%s?%s" % (_pre, url_encode_params(kwargs))
class UsernamePasswordMako(UserAuthnMethod):
"""Do user authentication using the normal username password form
using Mako as template system"""
cookie_name = "userpassmako"
def __init__(self, srv, mako_template, template_lookup, pwd, return_to):
"""
:param srv: The server instance
:param mako_template: Which Mako template to use
:param pwd: Username/password dictionary like database
:param return_to: Where to send the user after authentication
:return:
"""
UserAuthnMethod.__init__(self, srv)
self.mako_template = mako_template
self.template_lookup = template_lookup
self.passwd = pwd
self.return_to = return_to
self.active = {}
self.query_param = "upm_answer"
self.aes = AESCipher(self.srv.symkey, srv.iv)
def __call__(self, cookie=None, policy_url=None, logo_url=None,
query="", **kwargs):
"""
Put up the login form
"""
if cookie:
headers = [cookie]
else:
headers = []
resp = Response(headers=headers)
argv = {"login": "",
"password": "",
"action": "verify",
"policy_url": policy_url,
"logo_url": logo_url,
"query": query}
logger.info("do_authentication argv: %s" % argv)
mte = self.template_lookup.get_template(self.mako_template)
resp.message = mte.render(**argv)
return resp
def _verify(self, pwd, user):
assert is_equal(pwd, self.passwd[user])
def verify(self, request, **kwargs):
"""
Verifies that the given username and password was correct
:param request: Either the query part of a URL a urlencoded
body of a HTTP message or a parse such.
:param kwargs: Catch whatever else is sent.
:return: redirect back to where ever the base applications
wants the user after authentication.
"""
#logger.debug("verify(%s)" % request)
if isinstance(request, six.string_types):
_dict = parse_qs(request)
elif isinstance(request, dict):
_dict = request
else:
raise ValueError("Wrong type of input")
# verify username and password
try:
self._verify(_dict["password"][0], _dict["login"][0])
timestamp = str(int(time.mktime(time.gmtime())))
info = self.aes.encrypt("::".join([_dict["login"][0], timestamp]))
self.active[info] = timestamp
cookie = make_cookie(self.cookie_name, info, self.srv.seed)
return_to = create_return_url(self.return_to, _dict["query"][0],
**{self.query_param: "true"})
resp = Redirect(return_to, headers=[cookie])
except (AssertionError, KeyError):
resp = Unauthorized("Unknown user or wrong password")
return resp
def authenticated_as(self, cookie=None, **kwargs):
if cookie is None:
return None
else:
logger.debug("kwargs: %s" % kwargs)
try:
info, timestamp = parse_cookie(self.cookie_name,
self.srv.seed, cookie)
if self.active[info] == timestamp:
uid, _ts = self.aes.decrypt(info).split("::")
if timestamp == _ts:
return {"uid": uid}
except Exception:
pass
return None
def done(self, areq):
try:
_ = areq[self.query_param]
return False
except KeyError:
return True
class SocialService(UserAuthnMethod):
def __init__(self, social):
UserAuthnMethod.__init__(self, None)
self.social = social
def __call__(self, server_env, cookie=None, sid="", query="", **kwargs):
return self.social.begin(server_env, cookie, sid, query)
def callback(self, server_env, cookie=None, sid="", query="", **kwargs):
return self.social.callback(server_env, cookie, sid, query, **kwargs)
class AuthnMethodChooser(object):
def __init__(self, methods=None):
self.methods = methods
def __call__(self, **kwargs):
if not self.methods:
raise SAMLError("No authentication methods defined")
elif len(self.methods) == 1:
return self.methods[0]
else:
pass # TODO
class LDAPAuthn(UsernamePasswordMako):
def __init__(self, srv, ldapsrv, return_to,
dn_pattern, mako_template, template_lookup):
"""
:param srv: The server instance
:param ldapsrv: Which LDAP server to us
:param return_to: Where to send the user after authentication
:return:
"""
UsernamePasswordMako.__init__(self, srv, mako_template, template_lookup,
None, return_to)
self.ldap = ldap.initialize(ldapsrv)
self.ldap.protocol_version = 3
self.ldap.set_option(ldap.OPT_REFERRALS, 0)
self.dn_pattern = dn_pattern
def _verify(self, pwd, user):
"""
Verifies the username and password agains a LDAP server
:param pwd: The password
:param user: The username
:return: AssertionError if the LDAP verification failed.
"""
_dn = self.dn_pattern % user
try:
self.ldap.simple_bind_s(_dn, pwd)
except Exception:
raise AssertionError()
| 30.863636 | 80 | 0.592415 | import logging
import six
import time
import ldap
from saml2 import SAMLError
from saml2.aes import AESCipher
from saml2.httputil import Response
from saml2.httputil import make_cookie
from saml2.httputil import Redirect
from saml2.httputil import Unauthorized
from saml2.httputil import parse_cookie
from six.moves.urllib.parse import urlencode, parse_qs, urlsplit
__author__ = 'rolandh'
logger = logging.getLogger(__name__)
class AuthnFailure(SAMLError):
pass
class EncodeError(SAMLError):
pass
class UserAuthnMethod(object):
def __init__(self, srv):
self.srv = srv
def __call__(self, *args, **kwargs):
raise NotImplemented
def authenticated_as(self, **kwargs):
raise NotImplemented
def verify(self, **kwargs):
raise NotImplemented
def is_equal(a, b):
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= x ^ y
return result == 0
def url_encode_params(params=None):
if not isinstance(params, dict):
raise EncodeError("You must pass in a dictionary!")
params_list = []
for k, v in params.items():
if isinstance(v, list):
params_list.extend([(k, x) for x in v])
else:
params_list.append((k, v))
return urlencode(params_list)
def create_return_url(base, query, **kwargs):
part = urlsplit(base)
if part.fragment:
raise ValueError("Base URL contained parts it shouldn't")
for key, values in parse_qs(query).items():
if key in kwargs:
if isinstance(kwargs[key], six.string_types):
kwargs[key] = [kwargs[key]]
kwargs[key].extend(values)
else:
kwargs[key] = values
if part.query:
for key, values in parse_qs(part.query).items():
if key in kwargs:
if isinstance(kwargs[key], six.string_types):
kwargs[key] = [kwargs[key]]
kwargs[key].extend(values)
else:
kwargs[key] = values
_pre = base.split("?")[0]
else:
_pre = base
logger.debug("kwargs: %s" % kwargs)
return "%s?%s" % (_pre, url_encode_params(kwargs))
class UsernamePasswordMako(UserAuthnMethod):
cookie_name = "userpassmako"
def __init__(self, srv, mako_template, template_lookup, pwd, return_to):
UserAuthnMethod.__init__(self, srv)
self.mako_template = mako_template
self.template_lookup = template_lookup
self.passwd = pwd
self.return_to = return_to
self.active = {}
self.query_param = "upm_answer"
self.aes = AESCipher(self.srv.symkey, srv.iv)
def __call__(self, cookie=None, policy_url=None, logo_url=None,
query="", **kwargs):
if cookie:
headers = [cookie]
else:
headers = []
resp = Response(headers=headers)
argv = {"login": "",
"password": "",
"action": "verify",
"policy_url": policy_url,
"logo_url": logo_url,
"query": query}
logger.info("do_authentication argv: %s" % argv)
mte = self.template_lookup.get_template(self.mako_template)
resp.message = mte.render(**argv)
return resp
def _verify(self, pwd, user):
assert is_equal(pwd, self.passwd[user])
def verify(self, request, **kwargs):
#logger.debug("verify(%s)" % request)
if isinstance(request, six.string_types):
_dict = parse_qs(request)
elif isinstance(request, dict):
_dict = request
else:
raise ValueError("Wrong type of input")
# verify username and password
try:
self._verify(_dict["password"][0], _dict["login"][0])
timestamp = str(int(time.mktime(time.gmtime())))
info = self.aes.encrypt("::".join([_dict["login"][0], timestamp]))
self.active[info] = timestamp
cookie = make_cookie(self.cookie_name, info, self.srv.seed)
return_to = create_return_url(self.return_to, _dict["query"][0],
**{self.query_param: "true"})
resp = Redirect(return_to, headers=[cookie])
except (AssertionError, KeyError):
resp = Unauthorized("Unknown user or wrong password")
return resp
def authenticated_as(self, cookie=None, **kwargs):
if cookie is None:
return None
else:
logger.debug("kwargs: %s" % kwargs)
try:
info, timestamp = parse_cookie(self.cookie_name,
self.srv.seed, cookie)
if self.active[info] == timestamp:
uid, _ts = self.aes.decrypt(info).split("::")
if timestamp == _ts:
return {"uid": uid}
except Exception:
pass
return None
def done(self, areq):
try:
_ = areq[self.query_param]
return False
except KeyError:
return True
class SocialService(UserAuthnMethod):
def __init__(self, social):
UserAuthnMethod.__init__(self, None)
self.social = social
def __call__(self, server_env, cookie=None, sid="", query="", **kwargs):
return self.social.begin(server_env, cookie, sid, query)
def callback(self, server_env, cookie=None, sid="", query="", **kwargs):
return self.social.callback(server_env, cookie, sid, query, **kwargs)
class AuthnMethodChooser(object):
def __init__(self, methods=None):
self.methods = methods
def __call__(self, **kwargs):
if not self.methods:
raise SAMLError("No authentication methods defined")
elif len(self.methods) == 1:
return self.methods[0]
else:
pass # TODO
class LDAPAuthn(UsernamePasswordMako):
def __init__(self, srv, ldapsrv, return_to,
dn_pattern, mako_template, template_lookup):
UsernamePasswordMako.__init__(self, srv, mako_template, template_lookup,
None, return_to)
self.ldap = ldap.initialize(ldapsrv)
self.ldap.protocol_version = 3
self.ldap.set_option(ldap.OPT_REFERRALS, 0)
self.dn_pattern = dn_pattern
def _verify(self, pwd, user):
_dn = self.dn_pattern % user
try:
self.ldap.simple_bind_s(_dn, pwd)
except Exception:
raise AssertionError()
| true | true |
1c3006de92a94f2615760f2a4b6c4585a5947d34 | 54,344 | py | Python | cinder/volume/storwize_svc.py | alexpilotti/cinder | df2f070604dad61738ccd3113016f76f2af20cae | [
"Apache-2.0"
] | null | null | null | cinder/volume/storwize_svc.py | alexpilotti/cinder | df2f070604dad61738ccd3113016f76f2af20cae | [
"Apache-2.0"
] | null | null | null | cinder/volume/storwize_svc.py | alexpilotti/cinder | df2f070604dad61738ccd3113016f76f2af20cae | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 IBM, Inc.
# Copyright (c) 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Authors:
# Ronen Kat <ronenkat@il.ibm.com>
# Avishay Traeger <avishay@il.ibm.com>
"""
Volume driver for IBM Storwize V7000 and SVC storage systems.
Notes:
1. If you specify both a password and a key file, this driver will use the
key file only.
2. When using a key file for authentication, it is up to the user or
system administrator to store the private key in a safe manner.
3. The defaults for creating volumes are "-rsize 2% -autoexpand
-grainsize 256 -warning 0". These can be changed in the configuration
file or by using volume types(recommended only for advanced users).
Limitations:
1. The driver was not tested with SVC or clustered configurations of Storwize
V7000.
2. The driver expects CLI output in English, error messages may be in a
localized format.
"""
import random
import re
import string
import time
from cinder import exception
from cinder import flags
from cinder.openstack.common import cfg
from cinder.openstack.common import excutils
from cinder.openstack.common import log as logging
from cinder.volume import san
LOG = logging.getLogger(__name__)
storwize_svc_opts = [
cfg.StrOpt('storwize_svc_volpool_name',
default='volpool',
help='Storage system storage pool for volumes'),
cfg.StrOpt('storwize_svc_vol_rsize',
default='2%',
help='Storage system space-efficiency parameter for volumes'),
cfg.StrOpt('storwize_svc_vol_warning',
default='0',
help='Storage system threshold for volume capacity warnings'),
cfg.BoolOpt('storwize_svc_vol_autoexpand',
default=True,
help='Storage system autoexpand parameter for volumes '
'(True/False)'),
cfg.StrOpt('storwize_svc_vol_grainsize',
default='256',
help='Storage system grain size parameter for volumes '
'(32/64/128/256)'),
cfg.BoolOpt('storwize_svc_vol_compression',
default=False,
help='Storage system compression option for volumes'),
cfg.BoolOpt('storwize_svc_vol_easytier',
default=True,
help='Enable Easy Tier for volumes'),
cfg.StrOpt('storwize_svc_flashcopy_timeout',
default='120',
help='Maximum number of seconds to wait for FlashCopy to be '
'prepared. Maximum value is 600 seconds (10 minutes).'),
]
FLAGS = flags.FLAGS
FLAGS.register_opts(storwize_svc_opts)
class StorwizeSVCDriver(san.SanISCSIDriver):
"""IBM Storwize V7000 and SVC iSCSI volume driver."""
def __init__(self, *args, **kwargs):
super(StorwizeSVCDriver, self).__init__(*args, **kwargs)
self.iscsi_ipv4_conf = None
self.iscsi_ipv6_conf = None
# Build cleanup transaltion tables for hosts names to follow valid
# host names for Storwizew V7000 and SVC storage systems.
invalid_ch_in_host = ''
for num in range(0, 128):
ch = chr(num)
if ((not ch.isalnum()) and (ch != ' ') and (ch != '.')
and (ch != '-') and (ch != '_')):
invalid_ch_in_host = invalid_ch_in_host + ch
self._string_host_name_filter = string.maketrans(invalid_ch_in_host,
'-' * len(invalid_ch_in_host))
self._unicode_host_name_filter = dict((ord(unicode(char)), u'-')
for char in invalid_ch_in_host)
def _get_hdr_dic(self, header, row, delim):
"""Return CLI row data as a dictionary indexed by names from header.
Create a dictionary object from the data row string using the header
string. The strings are converted to columns using the delimiter in
delim.
"""
attributes = header.split(delim)
values = row.split(delim)
self._driver_assert(len(values) == len(attributes),
_('_get_hdr_dic: attribute headers and values do not match.\n '
'Headers: %(header)s\n Values: %(row)s')
% {'header': str(header),
'row': str(row)})
dic = {}
for attribute, value in map(None, attributes, values):
dic[attribute] = value
return dic
def _driver_assert(self, assert_condition, exception_message):
"""Internal assertion mechanism for CLI output."""
if not assert_condition:
LOG.error(exception_message)
raise exception.VolumeBackendAPIException(data=exception_message)
def check_for_setup_error(self):
"""Check that we have all configuration details from the storage."""
LOG.debug(_('enter: check_for_setup_error'))
# Validate that the pool exists
ssh_cmd = 'lsmdiskgrp -delim ! -nohdr'
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
search_text = '!%s!' % FLAGS.storwize_svc_volpool_name
if search_text not in out:
raise exception.InvalidInput(
reason=(_('pool %s doesn\'t exist')
% FLAGS.storwize_svc_volpool_name))
storage_nodes = {}
# Get the iSCSI names of the Storwize/SVC nodes
ssh_cmd = 'svcinfo lsnode -delim !'
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
nodes = out.strip().split('\n')
self._driver_assert(len(nodes) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
header = nodes.pop(0)
for node_line in nodes:
try:
node_data = self._get_hdr_dic(header, node_line, '!')
except exception.VolumeBackendAPIException as e:
with excutils.save_and_reraise_exception():
LOG.error(_('check_for_setup_error: '
'failed with unexpected CLI output.\n '
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
node = {}
try:
node['id'] = node_data['id']
node['name'] = node_data['name']
node['iscsi_name'] = node_data['iscsi_name']
node['status'] = node_data['status']
node['ipv4'] = []
node['ipv6'] = []
if node['iscsi_name'] != '':
storage_nodes[node['id']] = node
except KeyError as e:
LOG.error(_('Did not find expected column name in '
'svcinfo lsnode: %s') % str(e))
exception_message = (
_('check_for_setup_error: Unexpected CLI output.\n '
'Details: %(msg)s\n'
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'msg': str(e),
'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
raise exception.VolumeBackendAPIException(
data=exception_message)
# Get the iSCSI IP addresses of the Storwize/SVC nodes
ssh_cmd = 'lsportip -delim !'
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
portips = out.strip().split('\n')
self._driver_assert(len(portips) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
header = portips.pop(0)
for portip_line in portips:
try:
port_data = self._get_hdr_dic(header, portip_line, '!')
except exception.VolumeBackendAPIException as e:
with excutils.save_and_reraise_exception():
LOG.error(_('check_for_setup_error: '
'failed with unexpected CLI output.\n '
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
try:
port_node_id = port_data['node_id']
port_ipv4 = port_data['IP_address']
port_ipv6 = port_data['IP_address_6']
except KeyError as e:
LOG.error(_('Did not find expected column name in '
'lsportip: %s') % str(e))
exception_message = (
_('check_for_setup_error: Unexpected CLI output.\n '
'Details: %(msg)s\n'
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'msg': str(e),
'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
raise exception.VolumeBackendAPIException(
data=exception_message)
if port_node_id in storage_nodes:
node = storage_nodes[port_node_id]
if len(port_ipv4) > 0:
node['ipv4'].append(port_ipv4)
if len(port_ipv6) > 0:
node['ipv6'].append(port_ipv6)
else:
raise exception.VolumeBackendAPIException(
data=_('check_for_setup_error: '
'fail to storage configuration: unknown '
'storage node %(node_id)s from CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'node_id': port_node_id,
'out': str(out),
'err': str(err)})
iscsi_ipv4_conf = []
iscsi_ipv6_conf = []
for node_key in storage_nodes:
node = storage_nodes[node_key]
if 'ipv4' in node and len(node['iscsi_name']) > 0:
iscsi_ipv4_conf.append({'iscsi_name': node['iscsi_name'],
'ip': node['ipv4'],
'node_id': node['id']})
if 'ipv6' in node and len(node['iscsi_name']) > 0:
iscsi_ipv6_conf.append({'iscsi_name': node['iscsi_name'],
'ip': node['ipv6'],
'node_id': node['id']})
if (len(node['ipv4']) == 0) and (len(node['ipv6']) == 0):
raise exception.VolumeBackendAPIException(
data=_('check_for_setup_error: '
'fail to storage configuration: storage '
'node %s has no IP addresses configured')
% node['id'])
# Make sure we have at least one IPv4 address with a iSCSI name
# TODO(ronenkat) need to expand this to support IPv6
self._driver_assert(len(iscsi_ipv4_conf) > 0,
_('could not obtain IP address and iSCSI name from the storage. '
'Please verify that the storage is configured for iSCSI.\n '
'Storage nodes: %(nodes)s\n portips: %(portips)s')
% {'nodes': nodes, 'portips': portips})
self.iscsi_ipv4_conf = iscsi_ipv4_conf
self.iscsi_ipv6_conf = iscsi_ipv6_conf
LOG.debug(_('leave: check_for_setup_error'))
def _check_num_perc(self, value):
"""Return True if value is either a number or a percentage."""
if value.endswith('%'):
value = value[0:-1]
return value.isdigit()
def _check_flags(self):
"""Ensure that the flags are set properly."""
required_flags = ['san_ip', 'san_ssh_port', 'san_login',
'storwize_svc_volpool_name']
for flag in required_flags:
if not getattr(FLAGS, flag, None):
raise exception.InvalidInput(
reason=_('%s is not set') % flag)
# Ensure that either password or keyfile were set
if not (FLAGS.san_password or FLAGS.san_private_key):
raise exception.InvalidInput(
reason=_('Password or SSH private key is required for '
'authentication: set either san_password or '
'san_private_key option'))
# Check that rsize is a number or percentage
rsize = FLAGS.storwize_svc_vol_rsize
if not self._check_num_perc(rsize) and (rsize != '-1'):
raise exception.InvalidInput(
reason=_('Illegal value specified for storwize_svc_vol_rsize: '
'set to either a number or a percentage'))
# Check that warning is a number or percentage
warning = FLAGS.storwize_svc_vol_warning
if not self._check_num_perc(warning):
raise exception.InvalidInput(
reason=_('Illegal value specified for '
'storwize_svc_vol_warning: '
'set to either a number or a percentage'))
# Check that grainsize is 32/64/128/256
grainsize = FLAGS.storwize_svc_vol_grainsize
if grainsize not in ['32', '64', '128', '256']:
raise exception.InvalidInput(
reason=_('Illegal value specified for '
'storwize_svc_vol_grainsize: set to either '
'\'32\', \'64\', \'128\', or \'256\''))
# Check that flashcopy_timeout is numeric and 32/64/128/256
flashcopy_timeout = FLAGS.storwize_svc_flashcopy_timeout
if not (flashcopy_timeout.isdigit() and int(flashcopy_timeout) > 0 and
int(flashcopy_timeout) <= 600):
raise exception.InvalidInput(
reason=_('Illegal value %s specified for '
'storwize_svc_flashcopy_timeout: '
'valid values are between 0 and 600')
% flashcopy_timeout)
# Check that rsize is set
volume_compression = FLAGS.storwize_svc_vol_compression
if ((volume_compression == True) and
(FLAGS.storwize_svc_vol_rsize == '-1')):
raise exception.InvalidInput(
reason=_('If compression is set to True, rsize must '
'also be set (not equal to -1)'))
def do_setup(self, context):
"""Validate the flags."""
LOG.debug(_('enter: do_setup'))
self._check_flags()
LOG.debug(_('leave: do_setup'))
def create_volume(self, volume):
"""Create a new volume - uses the internal method."""
return self._create_volume(volume, units='gb')
def _create_volume(self, volume, units='gb'):
"""Create a new volume."""
name = volume['name']
model_update = None
LOG.debug(_('enter: create_volume: volume %s ') % name)
size = int(volume['size'])
if FLAGS.storwize_svc_vol_autoexpand == True:
autoex = '-autoexpand'
else:
autoex = ''
if FLAGS.storwize_svc_vol_easytier == True:
easytier = '-easytier on'
else:
easytier = '-easytier off'
# Set space-efficient options
if FLAGS.storwize_svc_vol_rsize.strip() == '-1':
ssh_cmd_se_opt = ''
else:
ssh_cmd_se_opt = ('-rsize %(rsize)s %(autoex)s -warning %(warn)s' %
{'rsize': FLAGS.storwize_svc_vol_rsize,
'autoex': autoex,
'warn': FLAGS.storwize_svc_vol_warning})
if FLAGS.storwize_svc_vol_compression:
ssh_cmd_se_opt = ssh_cmd_se_opt + ' -compressed'
else:
ssh_cmd_se_opt = ssh_cmd_se_opt + (' -grainsize %(grain)s' %
{'grain': FLAGS.storwize_svc_vol_grainsize})
ssh_cmd = ('mkvdisk -name %(name)s -mdiskgrp %(mdiskgrp)s '
'-iogrp 0 -size %(size)s -unit '
'%(unit)s %(easytier)s %(ssh_cmd_se_opt)s'
% {'name': name,
'mdiskgrp': FLAGS.storwize_svc_volpool_name,
'size': size, 'unit': units, 'easytier': easytier,
'ssh_cmd_se_opt': ssh_cmd_se_opt})
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out.strip()) > 0,
_('create volume %(name)s - did not find '
'success message in CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'name': name, 'out': str(out), 'err': str(err)})
# Ensure that the output is as expected
match_obj = re.search('Virtual Disk, id \[([0-9]+)\], '
'successfully created', out)
# Make sure we got a "successfully created" message with vdisk id
self._driver_assert(match_obj is not None,
_('create volume %(name)s - did not find '
'success message in CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'name': name, 'out': str(out), 'err': str(err)})
LOG.debug(_('leave: create_volume: volume %(name)s ') % {'name': name})
def delete_volume(self, volume):
self._delete_volume(volume, False)
def _delete_volume(self, volume, force_opt):
"""Driver entry point for destroying existing volumes."""
name = volume['name']
LOG.debug(_('enter: delete_volume: volume %(name)s ') % {'name': name})
if force_opt:
force_flag = '-force'
else:
force_flag = ''
volume_defined = self._is_volume_defined(name)
# Try to delete volume only if found on the storage
if volume_defined:
out, err = self._run_ssh('rmvdisk %(force)s %(name)s'
% {'force': force_flag,
'name': name})
# No output should be returned from rmvdisk
self._driver_assert(len(out.strip()) == 0,
_('delete volume %(name)s - non empty output from CLI.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'name': name,
'out': str(out),
'err': str(err)})
else:
# Log that volume does not exist
LOG.info(_('warning: tried to delete volume %(name)s but '
'it does not exist.') % {'name': name})
LOG.debug(_('leave: delete_volume: volume %(name)s ') % {'name': name})
def ensure_export(self, context, volume):
"""Check that the volume exists on the storage.
The system does not "export" volumes as a Linux iSCSI target does,
and therefore we just check that the volume exists on the storage.
"""
volume_defined = self._is_volume_defined(volume['name'])
if not volume_defined:
LOG.error(_('ensure_export: volume %s not found on storage')
% volume['name'])
def create_export(self, context, volume):
model_update = None
return model_update
def remove_export(self, context, volume):
pass
def initialize_connection(self, volume, connector):
"""Perform the necessary work so that an iSCSI connection can be made.
To be able to create an iSCSI connection from a given iSCSI name to a
volume, we must:
1. Translate the given iSCSI name to a host name
2. Create new host on the storage system if it does not yet exist
2. Map the volume to the host if it is not already done
3. Return iSCSI properties, including the IP address of the preferred
node for this volume and the LUN number.
"""
LOG.debug(_('enter: initialize_connection: volume %(vol)s with '
'connector %(conn)s') % {'vol': str(volume),
'conn': str(connector)})
initiator_name = connector['initiator']
volume_name = volume['name']
host_name = self._get_host_from_iscsiname(initiator_name)
# Check if a host is defined for the iSCSI initiator name
if host_name is None:
# Host does not exist - add a new host to Storwize/SVC
host_name = self._create_new_host('host%s' % initiator_name,
initiator_name)
# Verify that create_new_host succeeded
self._driver_assert(host_name is not None,
_('_create_new_host failed to return the host name.'))
lun_id = self._map_vol_to_host(volume_name, host_name)
# Get preferred path
# Only IPv4 for now because lack of OpenStack support
# TODO(ronenkat): Add support for IPv6
volume_attributes = self._get_volume_attributes(volume_name)
if (volume_attributes is not None and
'preferred_node_id' in volume_attributes):
preferred_node = volume_attributes['preferred_node_id']
preferred_node_entry = None
for node in self.iscsi_ipv4_conf:
if node['node_id'] == preferred_node:
preferred_node_entry = node
break
if preferred_node_entry is None:
preferred_node_entry = self.iscsi_ipv4_conf[0]
LOG.error(_('initialize_connection: did not find preferred '
'node %(node)s for volume %(vol)s in iSCSI '
'configuration') % {'node': preferred_node,
'vol': volume_name})
else:
# Get 1st node
preferred_node_entry = self.iscsi_ipv4_conf[0]
LOG.error(
_('initialize_connection: did not find a preferred node '
'for volume %s in iSCSI configuration') % volume_name)
properties = {}
# We didn't use iSCSI discover, as in server-based iSCSI
properties['target_discovered'] = False
# We take the first IP address for now. Ideally, OpenStack will
# support multipath for improved performance.
properties['target_portal'] = ('%s:%s' %
(preferred_node_entry['ip'][0], '3260'))
properties['target_iqn'] = preferred_node_entry['iscsi_name']
properties['target_lun'] = lun_id
properties['volume_id'] = volume['id']
LOG.debug(_('leave: initialize_connection:\n volume: %(vol)s\n '
'connector %(conn)s\n properties: %(prop)s')
% {'vol': str(volume),
'conn': str(connector),
'prop': str(properties)})
return {'driver_volume_type': 'iscsi', 'data': properties, }
def terminate_connection(self, volume, connector):
"""Cleanup after an iSCSI connection has been terminated.
When we clean up a terminated connection between a given iSCSI name
and volume, we:
1. Translate the given iSCSI name to a host name
2. Remove the volume-to-host mapping if it exists
3. Delete the host if it has no more mappings (hosts are created
automatically by this driver when mappings are created)
"""
LOG.debug(_('enter: terminate_connection: volume %(vol)s with '
'connector %(conn)s') % {'vol': str(volume),
'conn': str(connector)})
vol_name = volume['name']
initiator_name = connector['initiator']
host_name = self._get_host_from_iscsiname(initiator_name)
# Verify that _get_host_from_iscsiname returned the host.
# This should always succeed as we terminate an existing connection.
self._driver_assert(host_name is not None,
_('_get_host_from_iscsiname failed to return the host name '
'for iscsi name %s') % initiator_name)
# Check if vdisk-host mapping exists, remove if it does
mapping_data = self._get_hostvdisk_mappings(host_name)
if vol_name in mapping_data:
out, err = self._run_ssh('rmvdiskhostmap -host %s %s'
% (host_name, vol_name))
# Verify CLI behaviour - no output is returned from
# rmvdiskhostmap
self._driver_assert(len(out.strip()) == 0,
_('delete mapping of volume %(vol)s to host %(host)s '
'- non empty output from CLI.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'vol': vol_name,
'host': host_name,
'out': str(out),
'err': str(err)})
del mapping_data[vol_name]
else:
LOG.error(_('terminate_connection: no mapping of volume '
'%(vol)s to host %(host)s found') %
{'vol': vol_name, 'host': host_name})
# If this host has no more mappings, delete it
if not mapping_data:
self._delete_host(host_name)
LOG.debug(_('leave: terminate_connection: volume %(vol)s with '
'connector %(conn)s') % {'vol': str(volume),
'conn': str(connector)})
def _flashcopy_cleanup(self, fc_map_id, source, target):
"""Clean up a failed FlashCopy operation."""
try:
out, err = self._run_ssh('stopfcmap -force %s' % fc_map_id)
out, err = self._run_ssh('rmfcmap -force %s' % fc_map_id)
except exception.ProcessExecutionError as e:
LOG.error(_('_run_flashcopy: fail to cleanup failed FlashCopy '
'mapping %(fc_map_id)% '
'from %(source)s to %(target)s.\n'
'stdout: %(out)s\n stderr: %(err)s')
% {'fc_map_id': fc_map_id,
'source': source,
'target': target,
'out': e.stdout,
'err': e.stderr})
def _run_flashcopy(self, source, target):
"""Create a FlashCopy mapping from the source to the target."""
LOG.debug(
_('enter: _run_flashcopy: execute FlashCopy from source '
'%(source)s to target %(target)s') % {'source': source,
'target': target})
fc_map_cli_cmd = ('mkfcmap -source %s -target %s -autodelete '
'-cleanrate 0' % (source, target))
out, err = self._run_ssh(fc_map_cli_cmd)
self._driver_assert(len(out.strip()) > 0,
_('create FC mapping from %(source)s to %(target)s - '
'did not find success message in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
# Ensure that the output is as expected
match_obj = re.search('FlashCopy Mapping, id \[([0-9]+)\], '
'successfully created', out)
# Make sure we got a "successfully created" message with vdisk id
self._driver_assert(match_obj is not None,
_('create FC mapping from %(source)s to %(target)s - '
'did not find success message in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
try:
fc_map_id = match_obj.group(1)
self._driver_assert(fc_map_id is not None,
_('create FC mapping from %(source)s to %(target)s - '
'did not find mapping id in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
except IndexError:
self._driver_assert(False,
_('create FC mapping from %(source)s to %(target)s - '
'did not find mapping id in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
try:
out, err = self._run_ssh('prestartfcmap %s' % fc_map_id)
except exception.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('_run_flashcopy: fail to prepare FlashCopy '
'from %(source)s to %(target)s.\n'
'stdout: %(out)s\n stderr: %(err)s')
% {'source': source,
'target': target,
'out': e.stdout,
'err': e.stderr})
self._flashcopy_cleanup(fc_map_id, source, target)
mapping_ready = False
wait_time = 5
# Allow waiting of up to timeout (set as parameter)
max_retries = (int(FLAGS.storwize_svc_flashcopy_timeout)
/ wait_time) + 1
for try_number in range(1, max_retries):
mapping_attributes = self._get_flashcopy_mapping_attributes(
fc_map_id)
if (mapping_attributes is None or
'status' not in mapping_attributes):
break
if mapping_attributes['status'] == 'prepared':
mapping_ready = True
break
elif mapping_attributes['status'] != 'preparing':
# Unexpected mapping status
exception_msg = (_('unexecpted mapping status %(status)s '
'for mapping %(id)s. Attributes: '
'%(attr)s')
% {'status': mapping_attributes['status'],
'id': fc_map_id,
'attr': mapping_attributes})
raise exception.VolumeBackendAPIException(
data=exception_msg)
# Need to wait for mapping to be prepared, wait a few seconds
time.sleep(wait_time)
if not mapping_ready:
exception_msg = (_('mapping %(id)s prepare failed to complete '
'within the alloted %(to)s seconds timeout. '
'Terminating') % {'id': fc_map_id,
'to': FLAGS.storwize_svc_flashcopy_timeout})
LOG.error(_('_run_flashcopy: fail to start FlashCopy '
'from %(source)s to %(target)s with '
'exception %(ex)s')
% {'source': source,
'target': target,
'ex': exception_msg})
self._flashcopy_cleanup(fc_map_id, source, target)
raise exception.InvalidSnapshot(
reason=_('_run_flashcopy: %s') % exception_msg)
try:
out, err = self._run_ssh('startfcmap %s' % fc_map_id)
except exception.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('_run_flashcopy: fail to start FlashCopy '
'from %(source)s to %(target)s.\n'
'stdout: %(out)s\n stderr: %(err)s')
% {'source': source,
'target': target,
'out': e.stdout,
'err': e.stderr})
self._flashcopy_cleanup(fc_map_id, source, target)
LOG.debug(_('leave: _run_flashcopy: FlashCopy started from '
'%(source)s to %(target)s') % {'source': source,
'target': target})
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a new snapshot from volume."""
source_volume = snapshot['name']
tgt_volume = volume['name']
LOG.debug(_('enter: create_volume_from_snapshot: snapshot %(tgt)s '
'from volume %(src)s') % {'tgt': tgt_volume,
'src': source_volume})
src_volume_attributes = self._get_volume_attributes(source_volume)
if src_volume_attributes is None:
exception_msg = (_('create_volume_from_snapshot: source volume %s '
'does not exist') % source_volume)
LOG.error(exception_msg)
raise exception.SnapshotNotFound(exception_msg,
volume_id=source_volume)
self._driver_assert('capacity' in src_volume_attributes,
_('create_volume_from_snapshot: cannot get source '
'volume %(src)s capacity from volume attributes '
'%(attr)s') % {'src': source_volume,
'attr': src_volume_attributes})
src_volume_size = src_volume_attributes['capacity']
tgt_volume_attributes = self._get_volume_attributes(tgt_volume)
# Does the snapshot target exist?
if tgt_volume_attributes is not None:
exception_msg = (_('create_volume_from_snapshot: target volume %s '
'already exists, cannot create') % tgt_volume)
LOG.error(exception_msg)
raise exception.InvalidSnapshot(reason=exception_msg)
snapshot_volume = {}
snapshot_volume['name'] = tgt_volume
snapshot_volume['size'] = src_volume_size
self._create_volume(snapshot_volume, units='b')
try:
self._run_flashcopy(source_volume, tgt_volume)
except Exception:
with excutils.save_and_reraise_exception():
# Clean up newly-created snapshot if the FlashCopy failed
self._delete_volume(snapshot_volume, True)
LOG.debug(
_('leave: create_volume_from_snapshot: %s created successfully')
% tgt_volume)
def create_snapshot(self, snapshot):
"""Create a new snapshot using FlashCopy."""
src_volume = snapshot['volume_name']
tgt_volume = snapshot['name']
# Flag to keep track of created volumes in case FlashCopy
tgt_volume_created = False
LOG.debug(_('enter: create_snapshot: snapshot %(tgt)s from '
'volume %(src)s') % {'tgt': tgt_volume,
'src': src_volume})
src_volume_attributes = self._get_volume_attributes(src_volume)
if src_volume_attributes is None:
exception_msg = (
_('create_snapshot: source volume %s does not exist')
% src_volume)
LOG.error(exception_msg)
raise exception.VolumeNotFound(exception_msg,
volume_id=src_volume)
self._driver_assert('capacity' in src_volume_attributes,
_('create_volume_from_snapshot: cannot get source '
'volume %(src)s capacity from volume attributes '
'%(attr)s') % {'src': src_volume,
'attr': src_volume_attributes})
source_volume_size = src_volume_attributes['capacity']
tgt_volume_attributes = self._get_volume_attributes(tgt_volume)
# Does the snapshot target exist?
snapshot_volume = {}
if tgt_volume_attributes is None:
# No, create a new snapshot volume
snapshot_volume['name'] = tgt_volume
snapshot_volume['size'] = source_volume_size
self._create_volume(snapshot_volume, units='b')
tgt_volume_created = True
else:
# Yes, target exists, verify exact same size as source
self._driver_assert('capacity' in tgt_volume_attributes,
_('create_volume_from_snapshot: cannot get source '
'volume %(src)s capacity from volume attributes '
'%(attr)s') % {'src': tgt_volume,
'attr': tgt_volume_attributes})
target_volume_size = tgt_volume_attributes['capacity']
if target_volume_size != source_volume_size:
exception_msg = (
_('create_snapshot: source %(src)s and target '
'volume %(tgt)s have different capacities '
'(source:%(ssize)s target:%(tsize)s)') %
{'src': src_volume,
'tgt': tgt_volume,
'ssize': source_volume_size,
'tsize': target_volume_size})
LOG.error(exception_msg)
raise exception.InvalidSnapshot(reason=exception_msg)
try:
self._run_flashcopy(src_volume, tgt_volume)
except exception.InvalidSnapshot:
with excutils.save_and_reraise_exception():
# Clean up newly-created snapshot if the FlashCopy failed
if tgt_volume_created:
self._delete_volume(snapshot_volume, True)
LOG.debug(_('leave: create_snapshot: %s created successfully')
% tgt_volume)
def delete_snapshot(self, snapshot):
self._delete_snapshot(snapshot, False)
def _delete_snapshot(self, snapshot, force_opt):
"""Delete a snapshot from the storage."""
LOG.debug(_('enter: delete_snapshot: snapshot %s') % snapshot)
snapshot_defined = self._is_volume_defined(snapshot['name'])
if snapshot_defined:
if force_opt:
self._delete_volume(snapshot, force_opt)
else:
self.delete_volume(snapshot)
LOG.debug(_('leave: delete_snapshot: snapshot %s') % snapshot)
def _get_host_from_iscsiname(self, iscsi_name):
"""List the hosts defined in the storage.
Return the host name with the given iSCSI name, or None if there is
no host name with that iSCSI name.
"""
LOG.debug(_('enter: _get_host_from_iscsiname: iSCSI initiator %s')
% iscsi_name)
# Get list of host in the storage
ssh_cmd = 'lshost -delim !'
out, err = self._run_ssh(ssh_cmd)
if (len(out.strip()) == 0):
return None
err_msg = _('_get_host_from_iscsiname: '
'failed with unexpected CLI output.\n'
' command: %(cmd)s\n stdout: %(out)s\n '
'stderr: %(err)s') % {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)}
host_lines = out.strip().split('\n')
self._driver_assert(len(host_lines) > 0, err_msg)
header = host_lines.pop(0).split('!')
self._driver_assert('name' in header, err_msg)
name_index = header.index('name')
hosts = map(lambda x: x.split('!')[name_index], host_lines)
hostname = None
# For each host, get its details and check for its iSCSI name
for host in hosts:
ssh_cmd = 'lshost -delim ! %s' % host
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('_get_host_from_iscsiname: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
for attrib_line in out.split('\n'):
# If '!' not found, return the string and two empty strings
attrib_name, foo, attrib_value = attrib_line.partition('!')
if attrib_name == 'iscsi_name':
if iscsi_name == attrib_value:
hostname = host
break
if hostname is not None:
break
LOG.debug(_('leave: _get_host_from_iscsiname: iSCSI initiator %s')
% iscsi_name)
return hostname
def _create_new_host(self, host_name, initiator_name):
"""Create a new host on the storage system.
We modify the given host name, replace any invalid characters and
adding a random suffix to avoid conflicts due to the translation. The
host is associated with the given iSCSI initiator name.
"""
LOG.debug(_('enter: _create_new_host: host %(name)s with iSCSI '
'initiator %(init)s') % {'name': host_name,
'init': initiator_name})
if isinstance(host_name, unicode):
host_name = host_name.translate(self._unicode_host_name_filter)
elif isinstance(host_name, str):
host_name = host_name.translate(self._string_host_name_filter)
else:
msg = _('_create_new_host: cannot clean host name. Host name '
'is not unicode or string')
LOG.error(msg)
raise exception.NoValidHost(reason=msg)
# Add 5 digit random suffix to the host name to avoid
# conflicts in host names after removing invalid characters
# for Storwize/SVC names
host_name = '%s_%s' % (host_name, random.randint(10000, 99999))
out, err = self._run_ssh('mkhost -name "%s" -iscsiname "%s"'
% (host_name, initiator_name))
self._driver_assert(len(out.strip()) > 0 and
'successfully created' in out,
_('create host %(name)s with iSCSI initiator %(init)s - '
'did not find success message in CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'name': host_name,
'init': initiator_name,
'out': str(out),
'err': str(err)})
LOG.debug(_('leave: _create_new_host: host %(host)s with iSCSI '
'initiator %(init)s') % {'host': host_name,
'init': initiator_name})
return host_name
def _delete_host(self, host_name):
"""Delete a host and associated iSCSI initiator name."""
LOG.debug(_('enter: _delete_host: host %s ') % host_name)
# Check if host exists on system, expect to find the host
is_defined = self._is_host_defined(host_name)
if is_defined:
# Delete host
out, err = self._run_ssh('rmhost %s ' % host_name)
else:
LOG.info(_('warning: tried to delete host %(name)s but '
'it does not exist.') % {'name': host_name})
LOG.debug(_('leave: _delete_host: host %s ') % host_name)
def _is_volume_defined(self, volume_name):
"""Check if volume is defined."""
LOG.debug(_('enter: _is_volume_defined: volume %s ') % volume_name)
volume_attributes = self._get_volume_attributes(volume_name)
LOG.debug(_('leave: _is_volume_defined: volume %(vol)s with %(str)s ')
% {'vol': volume_name,
'str': volume_attributes is not None})
if volume_attributes is None:
return False
else:
return True
def _is_host_defined(self, host_name):
"""Check if a host is defined on the storage."""
LOG.debug(_('enter: _is_host_defined: host %s ') % host_name)
# Get list of hosts with the name %host_name%
# We expect zero or one line if host does not exist,
# two lines if it does exist, otherwise error
out, err = self._run_ssh('lshost -filtervalue name=%s -delim !'
% host_name)
if len(out.strip()) == 0:
return False
lines = out.strip().split('\n')
self._driver_assert(len(lines) <= 2,
_('_is_host_defined: Unexpected response from CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'out': str(out),
'err': str(err)})
if len(lines) == 2:
host_info = self._get_hdr_dic(lines[0], lines[1], '!')
host_name_from_storage = host_info['name']
# Make sure we got the data for the right host
self._driver_assert(host_name_from_storage == host_name,
_('Data received for host %(host1)s instead of host '
'%(host2)s.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'host1': host_name_from_storage,
'host2': host_name,
'out': str(out),
'err': str(err)})
else: # 0 or 1 lines
host_name_from_storage = None
LOG.debug(_('leave: _is_host_defined: host %(host)s with %(str)s ') % {
'host': host_name,
'str': host_name_from_storage is not None})
if host_name_from_storage is None:
return False
else:
return True
def _get_hostvdisk_mappings(self, host_name):
"""Return the defined storage mappings for a host."""
return_data = {}
ssh_cmd = 'lshostvdiskmap -delim ! %s' % host_name
out, err = self._run_ssh(ssh_cmd)
mappings = out.strip().split('\n')
if len(mappings) > 0:
header = mappings.pop(0)
for mapping_line in mappings:
mapping_data = self._get_hdr_dic(header, mapping_line, '!')
return_data[mapping_data['vdisk_name']] = mapping_data
return return_data
def _map_vol_to_host(self, volume_name, host_name):
"""Create a mapping between a volume to a host."""
LOG.debug(_('enter: _map_vol_to_host: volume %(vol)s to '
'host %(host)s') % {'vol': volume_name,
'host': host_name})
# Check if this volume is already mapped to this host
mapping_data = self._get_hostvdisk_mappings(host_name)
mapped_flag = False
result_lun = '-1'
if volume_name in mapping_data:
mapped_flag = True
result_lun = mapping_data[volume_name]['SCSI_id']
else:
lun_used = []
for k, v in mapping_data.iteritems():
lun_used.append(int(v['SCSI_id']))
lun_used.sort()
# Assume all luns are taken to this point, and then try to find
# an unused one
result_lun = str(len(lun_used))
for index, n in enumerate(lun_used):
if n > index:
result_lun = str(index)
# Volume is not mapped to host, create a new LUN
if not mapped_flag:
out, err = self._run_ssh('mkvdiskhostmap -host %s -scsi %s %s'
% (host_name, result_lun, volume_name))
self._driver_assert(len(out.strip()) > 0 and
'successfully created' in out,
_('_map_vol_to_host: mapping host %(host)s to '
'volume %(vol)s with LUN '
'%(lun)s - did not find success message in CLI output. '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'host': host_name,
'vol': volume_name,
'lun': result_lun,
'out': str(out),
'err': str(err)})
LOG.debug(_('leave: _map_vol_to_host: LUN %(lun)s, volume %(vol)s, '
'host %(host)s') % {'lun': result_lun, 'vol': volume_name,
'host': host_name})
return result_lun
def _get_flashcopy_mapping_attributes(self, fc_map_id):
"""Return the attributes of a FlashCopy mapping.
Returns the attributes for the specified FlashCopy mapping, or
None if the mapping does not exist.
An exception is raised if the information from system can not
be parsed or matched to a single FlashCopy mapping (this case
should not happen under normal conditions).
"""
LOG.debug(_('enter: _get_flashcopy_mapping_attributes: mapping %s')
% fc_map_id)
# Get the lunid to be used
fc_ls_map_cmd = ('lsfcmap -filtervalue id=%s -delim !' % fc_map_id)
out, err = self._run_ssh(fc_ls_map_cmd)
self._driver_assert(len(out) > 0,
_('_get_flashcopy_mapping_attributes: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': fc_ls_map_cmd,
'out': str(out),
'err': str(err)})
# Get list of FlashCopy mappings
# We expect zero or one line if mapping does not exist,
# two lines if it does exist, otherwise error
lines = out.strip().split('\n')
self._driver_assert(len(lines) <= 2,
_('_get_flashcopy_mapping_attributes: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': fc_ls_map_cmd,
'out': str(out),
'err': str(err)})
if len(lines) == 2:
attributes = self._get_hdr_dic(lines[0], lines[1], '!')
else: # 0 or 1 lines
attributes = None
LOG.debug(_('leave: _get_flashcopy_mapping_attributes: mapping '
'%(id)s, attributes %(attr)s') %
{'id': fc_map_id,
'attr': attributes})
return attributes
def _get_volume_attributes(self, volume_name):
"""Return volume attributes, or None if volume does not exist
Exception is raised if the information from system can not be
parsed/matched to a single volume.
"""
LOG.debug(_('enter: _get_volume_attributes: volume %s')
% volume_name)
# Get the lunid to be used
try:
ssh_cmd = 'lsvdisk -bytes -delim ! %s ' % volume_name
out, err = self._run_ssh(ssh_cmd)
except exception.ProcessExecutionError as e:
# Didn't get details from the storage, return None
LOG.error(_('CLI Exception output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s') %
{'cmd': ssh_cmd,
'out': e.stdout,
'err': e.stderr})
return None
self._driver_assert(len(out) > 0,
('_get_volume_attributes: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
attributes = {}
for attrib_line in out.split('\n'):
# If '!' not found, return the string and two empty strings
attrib_name, foo, attrib_value = attrib_line.partition('!')
if attrib_name is not None and attrib_name.strip() > 0:
attributes[attrib_name] = attrib_value
LOG.debug(_('leave: _get_volume_attributes:\n volume %(vol)s\n '
'attributes: %(attr)s')
% {'vol': volume_name,
'attr': str(attributes)})
return attributes
| 44.146223 | 79 | 0.533159 |
import random
import re
import string
import time
from cinder import exception
from cinder import flags
from cinder.openstack.common import cfg
from cinder.openstack.common import excutils
from cinder.openstack.common import log as logging
from cinder.volume import san
LOG = logging.getLogger(__name__)
storwize_svc_opts = [
cfg.StrOpt('storwize_svc_volpool_name',
default='volpool',
help='Storage system storage pool for volumes'),
cfg.StrOpt('storwize_svc_vol_rsize',
default='2%',
help='Storage system space-efficiency parameter for volumes'),
cfg.StrOpt('storwize_svc_vol_warning',
default='0',
help='Storage system threshold for volume capacity warnings'),
cfg.BoolOpt('storwize_svc_vol_autoexpand',
default=True,
help='Storage system autoexpand parameter for volumes '
'(True/False)'),
cfg.StrOpt('storwize_svc_vol_grainsize',
default='256',
help='Storage system grain size parameter for volumes '
'(32/64/128/256)'),
cfg.BoolOpt('storwize_svc_vol_compression',
default=False,
help='Storage system compression option for volumes'),
cfg.BoolOpt('storwize_svc_vol_easytier',
default=True,
help='Enable Easy Tier for volumes'),
cfg.StrOpt('storwize_svc_flashcopy_timeout',
default='120',
help='Maximum number of seconds to wait for FlashCopy to be '
'prepared. Maximum value is 600 seconds (10 minutes).'),
]
FLAGS = flags.FLAGS
FLAGS.register_opts(storwize_svc_opts)
class StorwizeSVCDriver(san.SanISCSIDriver):
def __init__(self, *args, **kwargs):
super(StorwizeSVCDriver, self).__init__(*args, **kwargs)
self.iscsi_ipv4_conf = None
self.iscsi_ipv6_conf = None
invalid_ch_in_host = ''
for num in range(0, 128):
ch = chr(num)
if ((not ch.isalnum()) and (ch != ' ') and (ch != '.')
and (ch != '-') and (ch != '_')):
invalid_ch_in_host = invalid_ch_in_host + ch
self._string_host_name_filter = string.maketrans(invalid_ch_in_host,
'-' * len(invalid_ch_in_host))
self._unicode_host_name_filter = dict((ord(unicode(char)), u'-')
for char in invalid_ch_in_host)
def _get_hdr_dic(self, header, row, delim):
attributes = header.split(delim)
values = row.split(delim)
self._driver_assert(len(values) == len(attributes),
_('_get_hdr_dic: attribute headers and values do not match.\n '
'Headers: %(header)s\n Values: %(row)s')
% {'header': str(header),
'row': str(row)})
dic = {}
for attribute, value in map(None, attributes, values):
dic[attribute] = value
return dic
def _driver_assert(self, assert_condition, exception_message):
if not assert_condition:
LOG.error(exception_message)
raise exception.VolumeBackendAPIException(data=exception_message)
def check_for_setup_error(self):
LOG.debug(_('enter: check_for_setup_error'))
ssh_cmd = 'lsmdiskgrp -delim ! -nohdr'
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
search_text = '!%s!' % FLAGS.storwize_svc_volpool_name
if search_text not in out:
raise exception.InvalidInput(
reason=(_('pool %s doesn\'t exist')
% FLAGS.storwize_svc_volpool_name))
storage_nodes = {}
# Get the iSCSI names of the Storwize/SVC nodes
ssh_cmd = 'svcinfo lsnode -delim !'
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
nodes = out.strip().split('\n')
self._driver_assert(len(nodes) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
header = nodes.pop(0)
for node_line in nodes:
try:
node_data = self._get_hdr_dic(header, node_line, '!')
except exception.VolumeBackendAPIException as e:
with excutils.save_and_reraise_exception():
LOG.error(_('check_for_setup_error: '
'failed with unexpected CLI output.\n '
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
node = {}
try:
node['id'] = node_data['id']
node['name'] = node_data['name']
node['iscsi_name'] = node_data['iscsi_name']
node['status'] = node_data['status']
node['ipv4'] = []
node['ipv6'] = []
if node['iscsi_name'] != '':
storage_nodes[node['id']] = node
except KeyError as e:
LOG.error(_('Did not find expected column name in '
'svcinfo lsnode: %s') % str(e))
exception_message = (
_('check_for_setup_error: Unexpected CLI output.\n '
'Details: %(msg)s\n'
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'msg': str(e),
'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
raise exception.VolumeBackendAPIException(
data=exception_message)
# Get the iSCSI IP addresses of the Storwize/SVC nodes
ssh_cmd = 'lsportip -delim !'
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
portips = out.strip().split('\n')
self._driver_assert(len(portips) > 0,
_('check_for_setup_error: failed with unexpected CLI output.\n '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
header = portips.pop(0)
for portip_line in portips:
try:
port_data = self._get_hdr_dic(header, portip_line, '!')
except exception.VolumeBackendAPIException as e:
with excutils.save_and_reraise_exception():
LOG.error(_('check_for_setup_error: '
'failed with unexpected CLI output.\n '
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
try:
port_node_id = port_data['node_id']
port_ipv4 = port_data['IP_address']
port_ipv6 = port_data['IP_address_6']
except KeyError as e:
LOG.error(_('Did not find expected column name in '
'lsportip: %s') % str(e))
exception_message = (
_('check_for_setup_error: Unexpected CLI output.\n '
'Details: %(msg)s\n'
'Command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'msg': str(e),
'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
raise exception.VolumeBackendAPIException(
data=exception_message)
if port_node_id in storage_nodes:
node = storage_nodes[port_node_id]
if len(port_ipv4) > 0:
node['ipv4'].append(port_ipv4)
if len(port_ipv6) > 0:
node['ipv6'].append(port_ipv6)
else:
raise exception.VolumeBackendAPIException(
data=_('check_for_setup_error: '
'fail to storage configuration: unknown '
'storage node %(node_id)s from CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'node_id': port_node_id,
'out': str(out),
'err': str(err)})
iscsi_ipv4_conf = []
iscsi_ipv6_conf = []
for node_key in storage_nodes:
node = storage_nodes[node_key]
if 'ipv4' in node and len(node['iscsi_name']) > 0:
iscsi_ipv4_conf.append({'iscsi_name': node['iscsi_name'],
'ip': node['ipv4'],
'node_id': node['id']})
if 'ipv6' in node and len(node['iscsi_name']) > 0:
iscsi_ipv6_conf.append({'iscsi_name': node['iscsi_name'],
'ip': node['ipv6'],
'node_id': node['id']})
if (len(node['ipv4']) == 0) and (len(node['ipv6']) == 0):
raise exception.VolumeBackendAPIException(
data=_('check_for_setup_error: '
'fail to storage configuration: storage '
'node %s has no IP addresses configured')
% node['id'])
# Make sure we have at least one IPv4 address with a iSCSI name
# TODO(ronenkat) need to expand this to support IPv6
self._driver_assert(len(iscsi_ipv4_conf) > 0,
_('could not obtain IP address and iSCSI name from the storage. '
'Please verify that the storage is configured for iSCSI.\n '
'Storage nodes: %(nodes)s\n portips: %(portips)s')
% {'nodes': nodes, 'portips': portips})
self.iscsi_ipv4_conf = iscsi_ipv4_conf
self.iscsi_ipv6_conf = iscsi_ipv6_conf
LOG.debug(_('leave: check_for_setup_error'))
def _check_num_perc(self, value):
if value.endswith('%'):
value = value[0:-1]
return value.isdigit()
def _check_flags(self):
required_flags = ['san_ip', 'san_ssh_port', 'san_login',
'storwize_svc_volpool_name']
for flag in required_flags:
if not getattr(FLAGS, flag, None):
raise exception.InvalidInput(
reason=_('%s is not set') % flag)
# Ensure that either password or keyfile were set
if not (FLAGS.san_password or FLAGS.san_private_key):
raise exception.InvalidInput(
reason=_('Password or SSH private key is required for '
'authentication: set either san_password or '
'san_private_key option'))
# Check that rsize is a number or percentage
rsize = FLAGS.storwize_svc_vol_rsize
if not self._check_num_perc(rsize) and (rsize != '-1'):
raise exception.InvalidInput(
reason=_('Illegal value specified for storwize_svc_vol_rsize: '
'set to either a number or a percentage'))
# Check that warning is a number or percentage
warning = FLAGS.storwize_svc_vol_warning
if not self._check_num_perc(warning):
raise exception.InvalidInput(
reason=_('Illegal value specified for '
'storwize_svc_vol_warning: '
'set to either a number or a percentage'))
# Check that grainsize is 32/64/128/256
grainsize = FLAGS.storwize_svc_vol_grainsize
if grainsize not in ['32', '64', '128', '256']:
raise exception.InvalidInput(
reason=_('Illegal value specified for '
'storwize_svc_vol_grainsize: set to either '
'\'32\', \'64\', \'128\', or \'256\''))
# Check that flashcopy_timeout is numeric and 32/64/128/256
flashcopy_timeout = FLAGS.storwize_svc_flashcopy_timeout
if not (flashcopy_timeout.isdigit() and int(flashcopy_timeout) > 0 and
int(flashcopy_timeout) <= 600):
raise exception.InvalidInput(
reason=_('Illegal value %s specified for '
'storwize_svc_flashcopy_timeout: '
'valid values are between 0 and 600')
% flashcopy_timeout)
# Check that rsize is set
volume_compression = FLAGS.storwize_svc_vol_compression
if ((volume_compression == True) and
(FLAGS.storwize_svc_vol_rsize == '-1')):
raise exception.InvalidInput(
reason=_('If compression is set to True, rsize must '
'also be set (not equal to -1)'))
def do_setup(self, context):
LOG.debug(_('enter: do_setup'))
self._check_flags()
LOG.debug(_('leave: do_setup'))
def create_volume(self, volume):
return self._create_volume(volume, units='gb')
def _create_volume(self, volume, units='gb'):
name = volume['name']
model_update = None
LOG.debug(_('enter: create_volume: volume %s ') % name)
size = int(volume['size'])
if FLAGS.storwize_svc_vol_autoexpand == True:
autoex = '-autoexpand'
else:
autoex = ''
if FLAGS.storwize_svc_vol_easytier == True:
easytier = '-easytier on'
else:
easytier = '-easytier off'
# Set space-efficient options
if FLAGS.storwize_svc_vol_rsize.strip() == '-1':
ssh_cmd_se_opt = ''
else:
ssh_cmd_se_opt = ('-rsize %(rsize)s %(autoex)s -warning %(warn)s' %
{'rsize': FLAGS.storwize_svc_vol_rsize,
'autoex': autoex,
'warn': FLAGS.storwize_svc_vol_warning})
if FLAGS.storwize_svc_vol_compression:
ssh_cmd_se_opt = ssh_cmd_se_opt + ' -compressed'
else:
ssh_cmd_se_opt = ssh_cmd_se_opt + (' -grainsize %(grain)s' %
{'grain': FLAGS.storwize_svc_vol_grainsize})
ssh_cmd = ('mkvdisk -name %(name)s -mdiskgrp %(mdiskgrp)s '
'-iogrp 0 -size %(size)s -unit '
'%(unit)s %(easytier)s %(ssh_cmd_se_opt)s'
% {'name': name,
'mdiskgrp': FLAGS.storwize_svc_volpool_name,
'size': size, 'unit': units, 'easytier': easytier,
'ssh_cmd_se_opt': ssh_cmd_se_opt})
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out.strip()) > 0,
_('create volume %(name)s - did not find '
'success message in CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'name': name, 'out': str(out), 'err': str(err)})
# Ensure that the output is as expected
match_obj = re.search('Virtual Disk, id \[([0-9]+)\], '
'successfully created', out)
# Make sure we got a "successfully created" message with vdisk id
self._driver_assert(match_obj is not None,
_('create volume %(name)s - did not find '
'success message in CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'name': name, 'out': str(out), 'err': str(err)})
LOG.debug(_('leave: create_volume: volume %(name)s ') % {'name': name})
def delete_volume(self, volume):
self._delete_volume(volume, False)
def _delete_volume(self, volume, force_opt):
name = volume['name']
LOG.debug(_('enter: delete_volume: volume %(name)s ') % {'name': name})
if force_opt:
force_flag = '-force'
else:
force_flag = ''
volume_defined = self._is_volume_defined(name)
# Try to delete volume only if found on the storage
if volume_defined:
out, err = self._run_ssh('rmvdisk %(force)s %(name)s'
% {'force': force_flag,
'name': name})
# No output should be returned from rmvdisk
self._driver_assert(len(out.strip()) == 0,
_('delete volume %(name)s - non empty output from CLI.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'name': name,
'out': str(out),
'err': str(err)})
else:
# Log that volume does not exist
LOG.info(_('warning: tried to delete volume %(name)s but '
'it does not exist.') % {'name': name})
LOG.debug(_('leave: delete_volume: volume %(name)s ') % {'name': name})
def ensure_export(self, context, volume):
volume_defined = self._is_volume_defined(volume['name'])
if not volume_defined:
LOG.error(_('ensure_export: volume %s not found on storage')
% volume['name'])
def create_export(self, context, volume):
model_update = None
return model_update
def remove_export(self, context, volume):
pass
def initialize_connection(self, volume, connector):
LOG.debug(_('enter: initialize_connection: volume %(vol)s with '
'connector %(conn)s') % {'vol': str(volume),
'conn': str(connector)})
initiator_name = connector['initiator']
volume_name = volume['name']
host_name = self._get_host_from_iscsiname(initiator_name)
# Check if a host is defined for the iSCSI initiator name
if host_name is None:
# Host does not exist - add a new host to Storwize/SVC
host_name = self._create_new_host('host%s' % initiator_name,
initiator_name)
# Verify that create_new_host succeeded
self._driver_assert(host_name is not None,
_('_create_new_host failed to return the host name.'))
lun_id = self._map_vol_to_host(volume_name, host_name)
# Get preferred path
# Only IPv4 for now because lack of OpenStack support
# TODO(ronenkat): Add support for IPv6
volume_attributes = self._get_volume_attributes(volume_name)
if (volume_attributes is not None and
'preferred_node_id' in volume_attributes):
preferred_node = volume_attributes['preferred_node_id']
preferred_node_entry = None
for node in self.iscsi_ipv4_conf:
if node['node_id'] == preferred_node:
preferred_node_entry = node
break
if preferred_node_entry is None:
preferred_node_entry = self.iscsi_ipv4_conf[0]
LOG.error(_('initialize_connection: did not find preferred '
'node %(node)s for volume %(vol)s in iSCSI '
'configuration') % {'node': preferred_node,
'vol': volume_name})
else:
# Get 1st node
preferred_node_entry = self.iscsi_ipv4_conf[0]
LOG.error(
_('initialize_connection: did not find a preferred node '
'for volume %s in iSCSI configuration') % volume_name)
properties = {}
# We didn't use iSCSI discover, as in server-based iSCSI
properties['target_discovered'] = False
properties['target_portal'] = ('%s:%s' %
(preferred_node_entry['ip'][0], '3260'))
properties['target_iqn'] = preferred_node_entry['iscsi_name']
properties['target_lun'] = lun_id
properties['volume_id'] = volume['id']
LOG.debug(_('leave: initialize_connection:\n volume: %(vol)s\n '
'connector %(conn)s\n properties: %(prop)s')
% {'vol': str(volume),
'conn': str(connector),
'prop': str(properties)})
return {'driver_volume_type': 'iscsi', 'data': properties, }
def terminate_connection(self, volume, connector):
LOG.debug(_('enter: terminate_connection: volume %(vol)s with '
'connector %(conn)s') % {'vol': str(volume),
'conn': str(connector)})
vol_name = volume['name']
initiator_name = connector['initiator']
host_name = self._get_host_from_iscsiname(initiator_name)
self._driver_assert(host_name is not None,
_('_get_host_from_iscsiname failed to return the host name '
'for iscsi name %s') % initiator_name)
mapping_data = self._get_hostvdisk_mappings(host_name)
if vol_name in mapping_data:
out, err = self._run_ssh('rmvdiskhostmap -host %s %s'
% (host_name, vol_name))
self._driver_assert(len(out.strip()) == 0,
_('delete mapping of volume %(vol)s to host %(host)s '
'- non empty output from CLI.\n '
'stdout: %(out)s\n stderr: %(err)s')
% {'vol': vol_name,
'host': host_name,
'out': str(out),
'err': str(err)})
del mapping_data[vol_name]
else:
LOG.error(_('terminate_connection: no mapping of volume '
'%(vol)s to host %(host)s found') %
{'vol': vol_name, 'host': host_name})
if not mapping_data:
self._delete_host(host_name)
LOG.debug(_('leave: terminate_connection: volume %(vol)s with '
'connector %(conn)s') % {'vol': str(volume),
'conn': str(connector)})
def _flashcopy_cleanup(self, fc_map_id, source, target):
try:
out, err = self._run_ssh('stopfcmap -force %s' % fc_map_id)
out, err = self._run_ssh('rmfcmap -force %s' % fc_map_id)
except exception.ProcessExecutionError as e:
LOG.error(_('_run_flashcopy: fail to cleanup failed FlashCopy '
'mapping %(fc_map_id)% '
'from %(source)s to %(target)s.\n'
'stdout: %(out)s\n stderr: %(err)s')
% {'fc_map_id': fc_map_id,
'source': source,
'target': target,
'out': e.stdout,
'err': e.stderr})
def _run_flashcopy(self, source, target):
LOG.debug(
_('enter: _run_flashcopy: execute FlashCopy from source '
'%(source)s to target %(target)s') % {'source': source,
'target': target})
fc_map_cli_cmd = ('mkfcmap -source %s -target %s -autodelete '
'-cleanrate 0' % (source, target))
out, err = self._run_ssh(fc_map_cli_cmd)
self._driver_assert(len(out.strip()) > 0,
_('create FC mapping from %(source)s to %(target)s - '
'did not find success message in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
match_obj = re.search('FlashCopy Mapping, id \[([0-9]+)\], '
'successfully created', out)
self._driver_assert(match_obj is not None,
_('create FC mapping from %(source)s to %(target)s - '
'did not find success message in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
try:
fc_map_id = match_obj.group(1)
self._driver_assert(fc_map_id is not None,
_('create FC mapping from %(source)s to %(target)s - '
'did not find mapping id in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
except IndexError:
self._driver_assert(False,
_('create FC mapping from %(source)s to %(target)s - '
'did not find mapping id in CLI output.\n'
' stdout: %(out)s\n stderr: %(err)s\n')
% {'source': source,
'target': target,
'out': str(out),
'err': str(err)})
try:
out, err = self._run_ssh('prestartfcmap %s' % fc_map_id)
except exception.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('_run_flashcopy: fail to prepare FlashCopy '
'from %(source)s to %(target)s.\n'
'stdout: %(out)s\n stderr: %(err)s')
% {'source': source,
'target': target,
'out': e.stdout,
'err': e.stderr})
self._flashcopy_cleanup(fc_map_id, source, target)
mapping_ready = False
wait_time = 5
max_retries = (int(FLAGS.storwize_svc_flashcopy_timeout)
/ wait_time) + 1
for try_number in range(1, max_retries):
mapping_attributes = self._get_flashcopy_mapping_attributes(
fc_map_id)
if (mapping_attributes is None or
'status' not in mapping_attributes):
break
if mapping_attributes['status'] == 'prepared':
mapping_ready = True
break
elif mapping_attributes['status'] != 'preparing':
exception_msg = (_('unexecpted mapping status %(status)s '
'for mapping %(id)s. Attributes: '
'%(attr)s')
% {'status': mapping_attributes['status'],
'id': fc_map_id,
'attr': mapping_attributes})
raise exception.VolumeBackendAPIException(
data=exception_msg)
time.sleep(wait_time)
if not mapping_ready:
exception_msg = (_('mapping %(id)s prepare failed to complete '
'within the alloted %(to)s seconds timeout. '
'Terminating') % {'id': fc_map_id,
'to': FLAGS.storwize_svc_flashcopy_timeout})
LOG.error(_('_run_flashcopy: fail to start FlashCopy '
'from %(source)s to %(target)s with '
'exception %(ex)s')
% {'source': source,
'target': target,
'ex': exception_msg})
self._flashcopy_cleanup(fc_map_id, source, target)
raise exception.InvalidSnapshot(
reason=_('_run_flashcopy: %s') % exception_msg)
try:
out, err = self._run_ssh('startfcmap %s' % fc_map_id)
except exception.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('_run_flashcopy: fail to start FlashCopy '
'from %(source)s to %(target)s.\n'
'stdout: %(out)s\n stderr: %(err)s')
% {'source': source,
'target': target,
'out': e.stdout,
'err': e.stderr})
self._flashcopy_cleanup(fc_map_id, source, target)
LOG.debug(_('leave: _run_flashcopy: FlashCopy started from '
'%(source)s to %(target)s') % {'source': source,
'target': target})
def create_volume_from_snapshot(self, volume, snapshot):
source_volume = snapshot['name']
tgt_volume = volume['name']
LOG.debug(_('enter: create_volume_from_snapshot: snapshot %(tgt)s '
'from volume %(src)s') % {'tgt': tgt_volume,
'src': source_volume})
src_volume_attributes = self._get_volume_attributes(source_volume)
if src_volume_attributes is None:
exception_msg = (_('create_volume_from_snapshot: source volume %s '
'does not exist') % source_volume)
LOG.error(exception_msg)
raise exception.SnapshotNotFound(exception_msg,
volume_id=source_volume)
self._driver_assert('capacity' in src_volume_attributes,
_('create_volume_from_snapshot: cannot get source '
'volume %(src)s capacity from volume attributes '
'%(attr)s') % {'src': source_volume,
'attr': src_volume_attributes})
src_volume_size = src_volume_attributes['capacity']
tgt_volume_attributes = self._get_volume_attributes(tgt_volume)
if tgt_volume_attributes is not None:
exception_msg = (_('create_volume_from_snapshot: target volume %s '
'already exists, cannot create') % tgt_volume)
LOG.error(exception_msg)
raise exception.InvalidSnapshot(reason=exception_msg)
snapshot_volume = {}
snapshot_volume['name'] = tgt_volume
snapshot_volume['size'] = src_volume_size
self._create_volume(snapshot_volume, units='b')
try:
self._run_flashcopy(source_volume, tgt_volume)
except Exception:
with excutils.save_and_reraise_exception():
self._delete_volume(snapshot_volume, True)
LOG.debug(
_('leave: create_volume_from_snapshot: %s created successfully')
% tgt_volume)
def create_snapshot(self, snapshot):
src_volume = snapshot['volume_name']
tgt_volume = snapshot['name']
tgt_volume_created = False
LOG.debug(_('enter: create_snapshot: snapshot %(tgt)s from '
'volume %(src)s') % {'tgt': tgt_volume,
'src': src_volume})
src_volume_attributes = self._get_volume_attributes(src_volume)
if src_volume_attributes is None:
exception_msg = (
_('create_snapshot: source volume %s does not exist')
% src_volume)
LOG.error(exception_msg)
raise exception.VolumeNotFound(exception_msg,
volume_id=src_volume)
self._driver_assert('capacity' in src_volume_attributes,
_('create_volume_from_snapshot: cannot get source '
'volume %(src)s capacity from volume attributes '
'%(attr)s') % {'src': src_volume,
'attr': src_volume_attributes})
source_volume_size = src_volume_attributes['capacity']
tgt_volume_attributes = self._get_volume_attributes(tgt_volume)
snapshot_volume = {}
if tgt_volume_attributes is None:
snapshot_volume['name'] = tgt_volume
snapshot_volume['size'] = source_volume_size
self._create_volume(snapshot_volume, units='b')
tgt_volume_created = True
else:
self._driver_assert('capacity' in tgt_volume_attributes,
_('create_volume_from_snapshot: cannot get source '
'volume %(src)s capacity from volume attributes '
'%(attr)s') % {'src': tgt_volume,
'attr': tgt_volume_attributes})
target_volume_size = tgt_volume_attributes['capacity']
if target_volume_size != source_volume_size:
exception_msg = (
_('create_snapshot: source %(src)s and target '
'volume %(tgt)s have different capacities '
'(source:%(ssize)s target:%(tsize)s)') %
{'src': src_volume,
'tgt': tgt_volume,
'ssize': source_volume_size,
'tsize': target_volume_size})
LOG.error(exception_msg)
raise exception.InvalidSnapshot(reason=exception_msg)
try:
self._run_flashcopy(src_volume, tgt_volume)
except exception.InvalidSnapshot:
with excutils.save_and_reraise_exception():
if tgt_volume_created:
self._delete_volume(snapshot_volume, True)
LOG.debug(_('leave: create_snapshot: %s created successfully')
% tgt_volume)
def delete_snapshot(self, snapshot):
self._delete_snapshot(snapshot, False)
def _delete_snapshot(self, snapshot, force_opt):
LOG.debug(_('enter: delete_snapshot: snapshot %s') % snapshot)
snapshot_defined = self._is_volume_defined(snapshot['name'])
if snapshot_defined:
if force_opt:
self._delete_volume(snapshot, force_opt)
else:
self.delete_volume(snapshot)
LOG.debug(_('leave: delete_snapshot: snapshot %s') % snapshot)
def _get_host_from_iscsiname(self, iscsi_name):
LOG.debug(_('enter: _get_host_from_iscsiname: iSCSI initiator %s')
% iscsi_name)
ssh_cmd = 'lshost -delim !'
out, err = self._run_ssh(ssh_cmd)
if (len(out.strip()) == 0):
return None
err_msg = _('_get_host_from_iscsiname: '
'failed with unexpected CLI output.\n'
' command: %(cmd)s\n stdout: %(out)s\n '
'stderr: %(err)s') % {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)}
host_lines = out.strip().split('\n')
self._driver_assert(len(host_lines) > 0, err_msg)
header = host_lines.pop(0).split('!')
self._driver_assert('name' in header, err_msg)
name_index = header.index('name')
hosts = map(lambda x: x.split('!')[name_index], host_lines)
hostname = None
for host in hosts:
ssh_cmd = 'lshost -delim ! %s' % host
out, err = self._run_ssh(ssh_cmd)
self._driver_assert(len(out) > 0,
_('_get_host_from_iscsiname: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
for attrib_line in out.split('\n'):
attrib_name, foo, attrib_value = attrib_line.partition('!')
if attrib_name == 'iscsi_name':
if iscsi_name == attrib_value:
hostname = host
break
if hostname is not None:
break
LOG.debug(_('leave: _get_host_from_iscsiname: iSCSI initiator %s')
% iscsi_name)
return hostname
def _create_new_host(self, host_name, initiator_name):
LOG.debug(_('enter: _create_new_host: host %(name)s with iSCSI '
'initiator %(init)s') % {'name': host_name,
'init': initiator_name})
if isinstance(host_name, unicode):
host_name = host_name.translate(self._unicode_host_name_filter)
elif isinstance(host_name, str):
host_name = host_name.translate(self._string_host_name_filter)
else:
msg = _('_create_new_host: cannot clean host name. Host name '
'is not unicode or string')
LOG.error(msg)
raise exception.NoValidHost(reason=msg)
host_name = '%s_%s' % (host_name, random.randint(10000, 99999))
out, err = self._run_ssh('mkhost -name "%s" -iscsiname "%s"'
% (host_name, initiator_name))
self._driver_assert(len(out.strip()) > 0 and
'successfully created' in out,
_('create host %(name)s with iSCSI initiator %(init)s - '
'did not find success message in CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'name': host_name,
'init': initiator_name,
'out': str(out),
'err': str(err)})
LOG.debug(_('leave: _create_new_host: host %(host)s with iSCSI '
'initiator %(init)s') % {'host': host_name,
'init': initiator_name})
return host_name
def _delete_host(self, host_name):
LOG.debug(_('enter: _delete_host: host %s ') % host_name)
is_defined = self._is_host_defined(host_name)
if is_defined:
out, err = self._run_ssh('rmhost %s ' % host_name)
else:
LOG.info(_('warning: tried to delete host %(name)s but '
'it does not exist.') % {'name': host_name})
LOG.debug(_('leave: _delete_host: host %s ') % host_name)
def _is_volume_defined(self, volume_name):
LOG.debug(_('enter: _is_volume_defined: volume %s ') % volume_name)
volume_attributes = self._get_volume_attributes(volume_name)
LOG.debug(_('leave: _is_volume_defined: volume %(vol)s with %(str)s ')
% {'vol': volume_name,
'str': volume_attributes is not None})
if volume_attributes is None:
return False
else:
return True
def _is_host_defined(self, host_name):
LOG.debug(_('enter: _is_host_defined: host %s ') % host_name)
out, err = self._run_ssh('lshost -filtervalue name=%s -delim !'
% host_name)
if len(out.strip()) == 0:
return False
lines = out.strip().split('\n')
self._driver_assert(len(lines) <= 2,
_('_is_host_defined: Unexpected response from CLI output.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'out': str(out),
'err': str(err)})
if len(lines) == 2:
host_info = self._get_hdr_dic(lines[0], lines[1], '!')
host_name_from_storage = host_info['name']
self._driver_assert(host_name_from_storage == host_name,
_('Data received for host %(host1)s instead of host '
'%(host2)s.\n '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'host1': host_name_from_storage,
'host2': host_name,
'out': str(out),
'err': str(err)})
else:
host_name_from_storage = None
LOG.debug(_('leave: _is_host_defined: host %(host)s with %(str)s ') % {
'host': host_name,
'str': host_name_from_storage is not None})
if host_name_from_storage is None:
return False
else:
return True
def _get_hostvdisk_mappings(self, host_name):
return_data = {}
ssh_cmd = 'lshostvdiskmap -delim ! %s' % host_name
out, err = self._run_ssh(ssh_cmd)
mappings = out.strip().split('\n')
if len(mappings) > 0:
header = mappings.pop(0)
for mapping_line in mappings:
mapping_data = self._get_hdr_dic(header, mapping_line, '!')
return_data[mapping_data['vdisk_name']] = mapping_data
return return_data
def _map_vol_to_host(self, volume_name, host_name):
LOG.debug(_('enter: _map_vol_to_host: volume %(vol)s to '
'host %(host)s') % {'vol': volume_name,
'host': host_name})
mapping_data = self._get_hostvdisk_mappings(host_name)
mapped_flag = False
result_lun = '-1'
if volume_name in mapping_data:
mapped_flag = True
result_lun = mapping_data[volume_name]['SCSI_id']
else:
lun_used = []
for k, v in mapping_data.iteritems():
lun_used.append(int(v['SCSI_id']))
lun_used.sort()
result_lun = str(len(lun_used))
for index, n in enumerate(lun_used):
if n > index:
result_lun = str(index)
if not mapped_flag:
out, err = self._run_ssh('mkvdiskhostmap -host %s -scsi %s %s'
% (host_name, result_lun, volume_name))
self._driver_assert(len(out.strip()) > 0 and
'successfully created' in out,
_('_map_vol_to_host: mapping host %(host)s to '
'volume %(vol)s with LUN '
'%(lun)s - did not find success message in CLI output. '
'stdout: %(out)s\n stderr: %(err)s\n')
% {'host': host_name,
'vol': volume_name,
'lun': result_lun,
'out': str(out),
'err': str(err)})
LOG.debug(_('leave: _map_vol_to_host: LUN %(lun)s, volume %(vol)s, '
'host %(host)s') % {'lun': result_lun, 'vol': volume_name,
'host': host_name})
return result_lun
def _get_flashcopy_mapping_attributes(self, fc_map_id):
LOG.debug(_('enter: _get_flashcopy_mapping_attributes: mapping %s')
% fc_map_id)
fc_ls_map_cmd = ('lsfcmap -filtervalue id=%s -delim !' % fc_map_id)
out, err = self._run_ssh(fc_ls_map_cmd)
self._driver_assert(len(out) > 0,
_('_get_flashcopy_mapping_attributes: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': fc_ls_map_cmd,
'out': str(out),
'err': str(err)})
lines = out.strip().split('\n')
self._driver_assert(len(lines) <= 2,
_('_get_flashcopy_mapping_attributes: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': fc_ls_map_cmd,
'out': str(out),
'err': str(err)})
if len(lines) == 2:
attributes = self._get_hdr_dic(lines[0], lines[1], '!')
else:
attributes = None
LOG.debug(_('leave: _get_flashcopy_mapping_attributes: mapping '
'%(id)s, attributes %(attr)s') %
{'id': fc_map_id,
'attr': attributes})
return attributes
def _get_volume_attributes(self, volume_name):
LOG.debug(_('enter: _get_volume_attributes: volume %s')
% volume_name)
try:
ssh_cmd = 'lsvdisk -bytes -delim ! %s ' % volume_name
out, err = self._run_ssh(ssh_cmd)
except exception.ProcessExecutionError as e:
LOG.error(_('CLI Exception output:\n command: %(cmd)s\n '
'stdout: %(out)s\n stderr: %(err)s') %
{'cmd': ssh_cmd,
'out': e.stdout,
'err': e.stderr})
return None
self._driver_assert(len(out) > 0,
('_get_volume_attributes: '
'Unexpected response from CLI output. '
'Command: %(cmd)s\n stdout: %(out)s\n stderr: %(err)s')
% {'cmd': ssh_cmd,
'out': str(out),
'err': str(err)})
attributes = {}
for attrib_line in out.split('\n'):
# If '!' not found, return the string and two empty strings
attrib_name, foo, attrib_value = attrib_line.partition('!')
if attrib_name is not None and attrib_name.strip() > 0:
attributes[attrib_name] = attrib_value
LOG.debug(_('leave: _get_volume_attributes:\n volume %(vol)s\n '
'attributes: %(attr)s')
% {'vol': volume_name,
'attr': str(attributes)})
return attributes
| true | true |
1c3006ed4c173fcdda961ab21b61e392548c69c5 | 8,547 | py | Python | homework_08/teste_diversidade.py | ufpa-organization-repositories/evolutionary-computing | e16786f9619e2b357b94ab91ff3a7b352e6a0d92 | [
"MIT"
] | null | null | null | homework_08/teste_diversidade.py | ufpa-organization-repositories/evolutionary-computing | e16786f9619e2b357b94ab91ff3a7b352e6a0d92 | [
"MIT"
] | null | null | null | homework_08/teste_diversidade.py | ufpa-organization-repositories/evolutionary-computing | e16786f9619e2b357b94ab91ff3a7b352e6a0d92 | [
"MIT"
] | null | null | null | populacao = [[0.7187463206122816, ['011101001110001100100110', '100101001111000000001110']], [0.68102618759073, ['100101111000011110111011', '100101100010100110100100']], [0.6342542148451269, ['100111110001101100101110', '011011100011000111001000']], [0.6241986174388695, ['010111001001101001111110', '100001010010000101100001']], [0.6067886961900137, ['011011011110111011011100', '011100010010110010010000']], [0.5928225700022598, ['011111101011110001000011', '101001111011001001101100']], [0.584219196618116, ['100011111000000001001010', '010100100110111011001000']], [0.5833582107194295, ['010101101111111100111101', '100110010010010001010100']], [0.5472229567081915, ['011100100110110001111000', '101101100011010101110001']], [0.5329211648083871, ['100100101010000001101111', '001111100001100110011011']], [0.5326154278718859, ['011001010110101111001110', '100001011110110101010101']], [0.5282310619742139, ['100010111100101111100000', '110001110111011011111101']], [0.5214442858967496, ['010011101000111000101010', '101110011101010101101100']], [0.5196969217927273, ['101010011101000100110110', '001110111000010110001011']], [0.5172587209761046, ['010110001010010011100101', '001111110001101001011100']], [0.5145249560525812, ['110100100010111010010010', '010111111011100100101001']], [0.5132246833711624, ['110100111101000111000011', '011110101101011111100011']], [0.5127319356114696, ['101011000010001010100101', '001011101101110100011000']], [0.5123261530855516, ['010110111100011111101001', '001011101110000010010011']], [0.511792586739197, ['100111100011011101110001', '110010011101110001001101']], [0.5113004549955176, ['001111111000000110111101', '010011101110101110101001']], [0.5090593740706316, ['010000110011010001010010', '101110111000100101001100']], [0.5087896635485242, ['100001100010010011011000', '110001110100111010101110']], [0.5084744316358049, ['111000100010000101110111', '011010111100101110000001']], [0.5070905144251526, ['000110111011100011110111', '100111101101011111010011']], [0.5068205853526966, ['100100001110011111110000', '000101000111111011110110']], [0.5064607453442134, ['010010000111000111010110', '000111011111010101011100']], [0.5063263806377141, ['101100000111110010010011', '000110101001010001111000']], [0.5062346533156891, ['001011101100010101111101', '101000010010001110111111']], [0.5056703090925954, ['010100010111101101001100', '001100001110011000001111']], [0.5053438812924628, ['010100110011111100000101', '001000001110010000100010']], [0.505334187929147, ['100100011001100010001111', '000101010101111110010110']], [0.5049561175396251, ['101111010111010011100101', '001011000010111000000110']], [0.5042455849934674, ['111000101001110011001000', '101011101101110100010100']], [0.5041036223873756, ['010000011110000000110011', '111000110100010100000010']], [0.5037768518327882, ['100011011011111100010100', '000011111010001111001110']], [0.5037167968409552, ['111001000001111110010010', '001011101100010010010000']], [0.5036727677825407, ['011111010010010111110011', '111111000011011000100100']], [0.5035205973510747, ['111011000110001110110010', '101111001010100100101110']], [0.5033318890765699, ['000110011010101010101100', '110001100101001111011101']], [0.5032842065445811, ['110100011111100001110001', '000111010101011100101001']], [0.5032758702411534, ['110101100111001111111010', '001110001110110110000111']], [0.5032675929341657, ['000101001011001101000000', '001100100110110100100011']], [0.5029612751578049, ['010010111011010100111010', '000111010001011011111101']], [0.502740799162989, ['110000010100010111110110', '100000011101101000010100']], [0.5024640860265372, ['111111001111011111100001', '011101110000100000111000']], [0.5022702065127201, ['101111011001111011100110', '011010100101111000110111']], [0.5021512426803183, ['111011011011100010010100', '010101100101000110110101']], [0.5018871104159476, ['000100100101011110100011', '111100000101010100101111']], [0.5018462352965699, ['000000000101101101000000', '001011001010001100110111']], [0.5015561651248678, ['011101110111011001100111', '000000101101101100100000']], [0.501304728374417, ['101100001010100010110100', '001001101111101111011010']], [0.5012761169335559, ['101110111011110011111111', '111101110101101100000101']], [0.5009980399693558, ['010000011000011110001001', '110011111111010100110101']], [0.5009297316560789, ['111110101001011001010100', '111100110101011001110110']], [0.5008536157286538, ['111110001111000101100101', '000101110000000100101110']], [0.5007199352252496, ['101110101010111000101111', '101110110101101101001000']], [0.5006006776568536, ['000000100110001100001101', '111010111111010000010010']], [0.5005003983582097, ['110111101111101100011000', '001000100000010111100111']], [0.5004555945275694, ['000111011000011010101110', '101111001010100111010000']], [0.5003551274646579, ['100100110010110111011011', '110001101101010000010111']], [0.5002938326873326, ['001100111010111100011011', '111110011110100110101010']], [0.49994994549836447, ['000110101000001000010011', '001111001110111100010111']], [0.49961102429402826, ['111101011001010011001110', '111100011110101010010011']], [0.4989678975527242, ['101100101011011101110001', '011110001000101100001110']], [0.49891505125833413, ['001000001100101010011101', '111000000110110000010001']], [0.4988469295160771, ['101010101110011011011101', '000011100000001101100101']], [0.49825199163698425, ['000101010000010101001111', '001011010000111000101100']], [0.4980846796618078, ['100100111001001110100111', '000010111100101010011101']], [0.49795649100195205, ['100111110100110110010101', '111101011101010000110001']], [0.49794375436360994, ['001011010100001001001101', '111001000101001001001011']], [0.4978695994946748, ['110100011011101010001111', '110101001111000000000100']], [0.4974906338726385, ['101010111110111110100111', '110110010001111111000010']], [0.4971979064536933, ['111011110111011100111000', '100101110110011010100110']], [0.49669039383694424, ['010000101010010100110110', '000110011100110000111010']], [0.49657481845943346, ['110001000111101010101100', '110101011111010011000000']], [0.49651493142138214, ['111011110101110110001000', '010000101101010100100101']], [0.4958188916169864, ['110001110001100011101010', '111000111000001110100111']], [0.49324295192733053, ['000100011000011111011000', '011110000101101000000001']], [0.493108959350183, ['000100010101001110011010', '100000000101100010100111']], [0.4921658613458329, ['110101011000100010010010', '101100011100101001101001']], [0.4921006356769773, ['010010010000000111011000', '110101100010001111111110']], [0.4919531422253398, ['111000010101011011011111', '100010111110110011010000']], [0.49182007177827647, ['111001000100100001010011', '011010010100111101100101']], [0.491708056867424, ['011010100001110111011001', '110111111001111001101101']], [0.4885509974363045, ['100101101001100000101111', '110110111000101000101001']], [0.47787048805875665, ['101001111100000001101001', '110000111000110001101000']], [0.4753911295047857, ['010101111010110011011101', '101010110000111011111101']], [0.46980604588344066, ['110001011010111110111001', '100010101010000101110100']], [0.4678475431692213, ['101101110110000100110010', '010110101110011000010110']], [0.44893377098383047, ['101001111110110011011111', '101010100011110010110000']], [0.4375619418936983, ['010111110000010110101101', '010101010001001010010100']], [0.4235820879484121, ['011000111011001011101010', '101010011010010000101011']], [0.4145945206988787, ['100110000010001001101010', '011000110010101000011110']], [0.3806746119644495, ['100001001101001000011100', '010111101011000011011101']], [0.3686907763703363, ['011100110110101110010000', '100101100010011101111010']], [0.3477816576486814, ['011000000101110110010011', '011100100101010100011111']], [0.16263560725056742, ['100011100100001110111011', '011111011110000111001100']], [0.10465748888686299, ['100010110100001000011000', '011101110111001101110000']], [0.10351800098674369, ['011101100001111011010000', '011101011110101000010111']]]
melhor = [0, 0, 0]
pior = [1, 1, 1]
print(melhor)
for i in range(populacao.__len__()):
print(i, populacao[i])
if populacao[i][0] > melhor[0]: melhor = populacao[i]
if populacao[i][0] < pior[0]: pior = populacao[i]
print(melhor, pior)
diversidade_hamming_x = 0
diversidade_hamming_y = 0
for i in range(24):
diversidade_hamming_x += abs(int(melhor[1][0][i]) - int(pior[1][0][i]))
diversidade_hamming_y += abs(int(melhor[1][1][i]) - int(pior[1][1][i]))
print(diversidade_hamming_x, diversidade_hamming_y) | 474.833333 | 8,020 | 0.803323 | populacao = [[0.7187463206122816, ['011101001110001100100110', '100101001111000000001110']], [0.68102618759073, ['100101111000011110111011', '100101100010100110100100']], [0.6342542148451269, ['100111110001101100101110', '011011100011000111001000']], [0.6241986174388695, ['010111001001101001111110', '100001010010000101100001']], [0.6067886961900137, ['011011011110111011011100', '011100010010110010010000']], [0.5928225700022598, ['011111101011110001000011', '101001111011001001101100']], [0.584219196618116, ['100011111000000001001010', '010100100110111011001000']], [0.5833582107194295, ['010101101111111100111101', '100110010010010001010100']], [0.5472229567081915, ['011100100110110001111000', '101101100011010101110001']], [0.5329211648083871, ['100100101010000001101111', '001111100001100110011011']], [0.5326154278718859, ['011001010110101111001110', '100001011110110101010101']], [0.5282310619742139, ['100010111100101111100000', '110001110111011011111101']], [0.5214442858967496, ['010011101000111000101010', '101110011101010101101100']], [0.5196969217927273, ['101010011101000100110110', '001110111000010110001011']], [0.5172587209761046, ['010110001010010011100101', '001111110001101001011100']], [0.5145249560525812, ['110100100010111010010010', '010111111011100100101001']], [0.5132246833711624, ['110100111101000111000011', '011110101101011111100011']], [0.5127319356114696, ['101011000010001010100101', '001011101101110100011000']], [0.5123261530855516, ['010110111100011111101001', '001011101110000010010011']], [0.511792586739197, ['100111100011011101110001', '110010011101110001001101']], [0.5113004549955176, ['001111111000000110111101', '010011101110101110101001']], [0.5090593740706316, ['010000110011010001010010', '101110111000100101001100']], [0.5087896635485242, ['100001100010010011011000', '110001110100111010101110']], [0.5084744316358049, ['111000100010000101110111', '011010111100101110000001']], [0.5070905144251526, ['000110111011100011110111', '100111101101011111010011']], [0.5068205853526966, ['100100001110011111110000', '000101000111111011110110']], [0.5064607453442134, ['010010000111000111010110', '000111011111010101011100']], [0.5063263806377141, ['101100000111110010010011', '000110101001010001111000']], [0.5062346533156891, ['001011101100010101111101', '101000010010001110111111']], [0.5056703090925954, ['010100010111101101001100', '001100001110011000001111']], [0.5053438812924628, ['010100110011111100000101', '001000001110010000100010']], [0.505334187929147, ['100100011001100010001111', '000101010101111110010110']], [0.5049561175396251, ['101111010111010011100101', '001011000010111000000110']], [0.5042455849934674, ['111000101001110011001000', '101011101101110100010100']], [0.5041036223873756, ['010000011110000000110011', '111000110100010100000010']], [0.5037768518327882, ['100011011011111100010100', '000011111010001111001110']], [0.5037167968409552, ['111001000001111110010010', '001011101100010010010000']], [0.5036727677825407, ['011111010010010111110011', '111111000011011000100100']], [0.5035205973510747, ['111011000110001110110010', '101111001010100100101110']], [0.5033318890765699, ['000110011010101010101100', '110001100101001111011101']], [0.5032842065445811, ['110100011111100001110001', '000111010101011100101001']], [0.5032758702411534, ['110101100111001111111010', '001110001110110110000111']], [0.5032675929341657, ['000101001011001101000000', '001100100110110100100011']], [0.5029612751578049, ['010010111011010100111010', '000111010001011011111101']], [0.502740799162989, ['110000010100010111110110', '100000011101101000010100']], [0.5024640860265372, ['111111001111011111100001', '011101110000100000111000']], [0.5022702065127201, ['101111011001111011100110', '011010100101111000110111']], [0.5021512426803183, ['111011011011100010010100', '010101100101000110110101']], [0.5018871104159476, ['000100100101011110100011', '111100000101010100101111']], [0.5018462352965699, ['000000000101101101000000', '001011001010001100110111']], [0.5015561651248678, ['011101110111011001100111', '000000101101101100100000']], [0.501304728374417, ['101100001010100010110100', '001001101111101111011010']], [0.5012761169335559, ['101110111011110011111111', '111101110101101100000101']], [0.5009980399693558, ['010000011000011110001001', '110011111111010100110101']], [0.5009297316560789, ['111110101001011001010100', '111100110101011001110110']], [0.5008536157286538, ['111110001111000101100101', '000101110000000100101110']], [0.5007199352252496, ['101110101010111000101111', '101110110101101101001000']], [0.5006006776568536, ['000000100110001100001101', '111010111111010000010010']], [0.5005003983582097, ['110111101111101100011000', '001000100000010111100111']], [0.5004555945275694, ['000111011000011010101110', '101111001010100111010000']], [0.5003551274646579, ['100100110010110111011011', '110001101101010000010111']], [0.5002938326873326, ['001100111010111100011011', '111110011110100110101010']], [0.49994994549836447, ['000110101000001000010011', '001111001110111100010111']], [0.49961102429402826, ['111101011001010011001110', '111100011110101010010011']], [0.4989678975527242, ['101100101011011101110001', '011110001000101100001110']], [0.49891505125833413, ['001000001100101010011101', '111000000110110000010001']], [0.4988469295160771, ['101010101110011011011101', '000011100000001101100101']], [0.49825199163698425, ['000101010000010101001111', '001011010000111000101100']], [0.4980846796618078, ['100100111001001110100111', '000010111100101010011101']], [0.49795649100195205, ['100111110100110110010101', '111101011101010000110001']], [0.49794375436360994, ['001011010100001001001101', '111001000101001001001011']], [0.4978695994946748, ['110100011011101010001111', '110101001111000000000100']], [0.4974906338726385, ['101010111110111110100111', '110110010001111111000010']], [0.4971979064536933, ['111011110111011100111000', '100101110110011010100110']], [0.49669039383694424, ['010000101010010100110110', '000110011100110000111010']], [0.49657481845943346, ['110001000111101010101100', '110101011111010011000000']], [0.49651493142138214, ['111011110101110110001000', '010000101101010100100101']], [0.4958188916169864, ['110001110001100011101010', '111000111000001110100111']], [0.49324295192733053, ['000100011000011111011000', '011110000101101000000001']], [0.493108959350183, ['000100010101001110011010', '100000000101100010100111']], [0.4921658613458329, ['110101011000100010010010', '101100011100101001101001']], [0.4921006356769773, ['010010010000000111011000', '110101100010001111111110']], [0.4919531422253398, ['111000010101011011011111', '100010111110110011010000']], [0.49182007177827647, ['111001000100100001010011', '011010010100111101100101']], [0.491708056867424, ['011010100001110111011001', '110111111001111001101101']], [0.4885509974363045, ['100101101001100000101111', '110110111000101000101001']], [0.47787048805875665, ['101001111100000001101001', '110000111000110001101000']], [0.4753911295047857, ['010101111010110011011101', '101010110000111011111101']], [0.46980604588344066, ['110001011010111110111001', '100010101010000101110100']], [0.4678475431692213, ['101101110110000100110010', '010110101110011000010110']], [0.44893377098383047, ['101001111110110011011111', '101010100011110010110000']], [0.4375619418936983, ['010111110000010110101101', '010101010001001010010100']], [0.4235820879484121, ['011000111011001011101010', '101010011010010000101011']], [0.4145945206988787, ['100110000010001001101010', '011000110010101000011110']], [0.3806746119644495, ['100001001101001000011100', '010111101011000011011101']], [0.3686907763703363, ['011100110110101110010000', '100101100010011101111010']], [0.3477816576486814, ['011000000101110110010011', '011100100101010100011111']], [0.16263560725056742, ['100011100100001110111011', '011111011110000111001100']], [0.10465748888686299, ['100010110100001000011000', '011101110111001101110000']], [0.10351800098674369, ['011101100001111011010000', '011101011110101000010111']]]
melhor = [0, 0, 0]
pior = [1, 1, 1]
print(melhor)
for i in range(populacao.__len__()):
print(i, populacao[i])
if populacao[i][0] > melhor[0]: melhor = populacao[i]
if populacao[i][0] < pior[0]: pior = populacao[i]
print(melhor, pior)
diversidade_hamming_x = 0
diversidade_hamming_y = 0
for i in range(24):
diversidade_hamming_x += abs(int(melhor[1][0][i]) - int(pior[1][0][i]))
diversidade_hamming_y += abs(int(melhor[1][1][i]) - int(pior[1][1][i]))
print(diversidade_hamming_x, diversidade_hamming_y) | true | true |
1c300766ce7df5c0b0d884a7c65d71f8177fd028 | 4,172 | py | Python | nfr_badge/src/nfr_badge.py | rrajj/gha-checklist-badges | 77785b93eadf6dc833d30b0e41f04f9b1e31aad8 | [
"MIT"
] | null | null | null | nfr_badge/src/nfr_badge.py | rrajj/gha-checklist-badges | 77785b93eadf6dc833d30b0e41f04f9b1e31aad8 | [
"MIT"
] | null | null | null | nfr_badge/src/nfr_badge.py | rrajj/gha-checklist-badges | 77785b93eadf6dc833d30b0e41f04f9b1e31aad8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import typing
import re
import os
import argparse
import urllib.parse
class Checklist:
def __init__(self, fname: str):
self.fname = fname
self.content = self.get_file_content()
def get_name(self) -> str:
return Checklist.get_name_from_fname(self.fname)
def get_file_content(self) -> str:
with open(self.fname, 'r') as f:
return f.read()
def get_counters(self) -> (int, int, int):
return Checklist.get_counters_from_content(self.content)
@staticmethod
def get_counters_from_content(content: str) -> (int, int, int):
regex = r"- \[([ x])\] "
matches = re.finditer(regex, content, re.MULTILINE)
checked=int(0)
unchecked=int(0)
for _, match in enumerate(matches, start=1):
if match.group(1) == 'x':
checked+=1
continue
if match.group(1) == ' ':
unchecked+=1
continue
return checked, unchecked, (checked + unchecked)
@staticmethod
def get_name_from_fname(fname: str) -> str:
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
class Badge:
@staticmethod
def new_common_badge_url(badge_name: str, badge_value: str, badge_color: str):
return (
"https://img.shields.io/badge/%s-%s-%s" % (
urllib.parse.quote(badge_name.replace('-', '--'), safe=''),
urllib.parse.quote(badge_value, safe=''),
badge_color
)
)
@staticmethod
def new_progress_badge_url(badge_name: str, total: int, passed: int ) -> str:
progress = int(passed/total*100)
color = "green" if (progress>=70) else "yellow" if (progress>=30) else "red"
return Badge.new_common_badge_url(
badge_name=badge_name,
badge_value="%d%% %d/%d" % (
progress,
passed,
total
),
badge_color=color
)
@staticmethod
def content_update_badge_url(content: str, badge_name: str, new_badge_url: str):
regex = r"(\!\[%s\])\((.*?)\)" % badge_name
subst = "\\1(%s)" % new_badge_url
result = re.sub(regex, subst, content, 0, re.MULTILINE)
finded = re.findall(regex, content, re.MULTILINE)
new_line = content[1 : ].find("\n")
if len(finded) > 0:
return result
else:
return content[:new_line + 1] + " \n" % (badge_name, new_badge_url) + content[new_line + 1:]
class ReadmeMD:
def __init__(self, fname: str):
self.fname = fname
self.content = self.get_content()
def get_content(self) -> str:
return ReadmeMD.get_file_content(self.fname)
def write_content(self, content: str):
ReadmeMD.write_file_content(self.fname, content)
self.content=content
@staticmethod
def get_file_content(fname: str) -> str:
with open(fname, 'r') as f:
return f.read()
@staticmethod
def write_file_content(fname, content: str):
with open(fname, 'w') as f:
f.write(content)
def get_cli_args():
parser = argparse.ArgumentParser(description='NFR')
parser.add_argument('--readme', type=str, required=True, help='Output readme file')
parser.add_argument('fnames', type=str, nargs='+', help='Checklist files')
args = parser.parse_args()
return args.readme, args.fnames
def main():
readme_fname, fnames = get_cli_args()
readme = ReadmeMD(readme_fname)
for fname in fnames:
checklist = Checklist(fname)
mdname = checklist.get_name()
checked, _, total = checklist.get_counters()
readme.write_content(
content=Badge.content_update_badge_url(
content=readme.content,
badge_name=mdname,
new_badge_url=Badge.new_progress_badge_url(
badge_name=mdname,
total=total,
passed=checked,
)
)
)
if __name__== "__main__":
main()
| 29.8 | 113 | 0.575024 |
import typing
import re
import os
import argparse
import urllib.parse
class Checklist:
def __init__(self, fname: str):
self.fname = fname
self.content = self.get_file_content()
def get_name(self) -> str:
return Checklist.get_name_from_fname(self.fname)
def get_file_content(self) -> str:
with open(self.fname, 'r') as f:
return f.read()
def get_counters(self) -> (int, int, int):
return Checklist.get_counters_from_content(self.content)
@staticmethod
def get_counters_from_content(content: str) -> (int, int, int):
regex = r"- \[([ x])\] "
matches = re.finditer(regex, content, re.MULTILINE)
checked=int(0)
unchecked=int(0)
for _, match in enumerate(matches, start=1):
if match.group(1) == 'x':
checked+=1
continue
if match.group(1) == ' ':
unchecked+=1
continue
return checked, unchecked, (checked + unchecked)
@staticmethod
def get_name_from_fname(fname: str) -> str:
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
class Badge:
@staticmethod
def new_common_badge_url(badge_name: str, badge_value: str, badge_color: str):
return (
"https://img.shields.io/badge/%s-%s-%s" % (
urllib.parse.quote(badge_name.replace('-', '--'), safe=''),
urllib.parse.quote(badge_value, safe=''),
badge_color
)
)
@staticmethod
def new_progress_badge_url(badge_name: str, total: int, passed: int ) -> str:
progress = int(passed/total*100)
color = "green" if (progress>=70) else "yellow" if (progress>=30) else "red"
return Badge.new_common_badge_url(
badge_name=badge_name,
badge_value="%d%% %d/%d" % (
progress,
passed,
total
),
badge_color=color
)
@staticmethod
def content_update_badge_url(content: str, badge_name: str, new_badge_url: str):
regex = r"(\!\[%s\])\((.*?)\)" % badge_name
subst = "\\1(%s)" % new_badge_url
result = re.sub(regex, subst, content, 0, re.MULTILINE)
finded = re.findall(regex, content, re.MULTILINE)
new_line = content[1 : ].find("\n")
if len(finded) > 0:
return result
else:
return content[:new_line + 1] + " \n" % (badge_name, new_badge_url) + content[new_line + 1:]
class ReadmeMD:
def __init__(self, fname: str):
self.fname = fname
self.content = self.get_content()
def get_content(self) -> str:
return ReadmeMD.get_file_content(self.fname)
def write_content(self, content: str):
ReadmeMD.write_file_content(self.fname, content)
self.content=content
@staticmethod
def get_file_content(fname: str) -> str:
with open(fname, 'r') as f:
return f.read()
@staticmethod
def write_file_content(fname, content: str):
with open(fname, 'w') as f:
f.write(content)
def get_cli_args():
parser = argparse.ArgumentParser(description='NFR')
parser.add_argument('--readme', type=str, required=True, help='Output readme file')
parser.add_argument('fnames', type=str, nargs='+', help='Checklist files')
args = parser.parse_args()
return args.readme, args.fnames
def main():
readme_fname, fnames = get_cli_args()
readme = ReadmeMD(readme_fname)
for fname in fnames:
checklist = Checklist(fname)
mdname = checklist.get_name()
checked, _, total = checklist.get_counters()
readme.write_content(
content=Badge.content_update_badge_url(
content=readme.content,
badge_name=mdname,
new_badge_url=Badge.new_progress_badge_url(
badge_name=mdname,
total=total,
passed=checked,
)
)
)
if __name__== "__main__":
main()
| true | true |
1c3007fd1cfdf2397111890fc42efd0a2462af0b | 8,732 | py | Python | easyreg/seg_unet.py | norveclibalikci/easyreg-mirror | a16254733fe957cc4024923f8dce91412966a189 | [
"Apache-2.0"
] | null | null | null | easyreg/seg_unet.py | norveclibalikci/easyreg-mirror | a16254733fe957cc4024923f8dce91412966a189 | [
"Apache-2.0"
] | null | null | null | easyreg/seg_unet.py | norveclibalikci/easyreg-mirror | a16254733fe957cc4024923f8dce91412966a189 | [
"Apache-2.0"
] | null | null | null | from .modules import Seg_resid
from .utils import *
import torch.nn as nn
from data_pre.partition import partition
class SegUnet(nn.Module):
def __init__(self, opt=None):
super(SegUnet, self).__init__()
self.opt = opt
seg_opt = opt['tsk_set'][('seg',{},"settings for seg task")]
self.is_train = opt['tsk_set']["train"]
self.num_class = seg_opt['class_num',-1,"the num of class"]
use_bn = seg_opt["use_bn", True, "use the batch normalization"]
patch_sz = opt['dataset']['seg']['patch_size',[-1,-1,-1],"the size of input patch"]
overlap_sz = opt['dataset']['seg']['partition']['overlap_size',[-1,-1,-1],"the size of input patch"]
patch_sz_itk = list(np.flipud(np.array(patch_sz)))
overlap_sz_itk = list(np.flipud(np.array(overlap_sz)))
self.img_sz = None
self.unet = Seg_resid(self.num_class,bn=use_bn)
self.print_count = 0
self.partition = partition(opt['dataset']['seg']['partition'],patch_sz_itk,overlap_sz_itk)
self.ensemble_during_the_test = opt['tsk_set']['seg'][("ensemble_during_the_test",False,"do test phase ensemble, which needs the test phase data augmentation already done")]
def set_loss_fn(self, loss_fn):
""" set loss function"""
self.loss_fn = loss_fn
def get_loss(self, output, gt):
loss = self.loss_fn.get_loss(output,gt)
return loss
def check_if_update_lr(self):
return False, None
def set_img_sz(self, img_sz):
self.img_sz = img_sz
def forward(self, input, is_train=True):
if is_train:
output = self.unet(input)
else:
with torch.no_grad():
if not self.is_train and self.ensemble_during_the_test:
output = self.get_assemble_ensemble(input)
else:
output = self.get_assemble_pred(input)
self.print_count += 1
return output
def get_assemble_pred(self, input, split_size=8):
output = []
input_split = torch.split(input, split_size)
for input_sub in input_split:
res = self.forward(input_sub)
if isinstance(res, list):
res = res[-1]
output.append(res.detach().cpu())
pred_patched = torch.cat(output, dim=0)
pred_patched = torch.max(pred_patched.data, 1)[1]
output_np = self.partition.assemble(pred_patched,image_size=self.img_sz)
return output_np
def set_file_path(self, file_path, fname):
self.file_path =file_path
self.fname = fname
def get_assemble_pred_for_ensemble(self, input, split_size=8):
output = []
input_split = torch.split(input, split_size)
for input_sub in input_split:
res = self.forward(input_sub)
if isinstance(res, list):
res = res[-1]
output.append(res.detach().cpu())
pred_patched = torch.cat(output, dim=0)
return pred_patched
def get_assemble_ensemble(self, input):
import os
from .reg_data_utils import read_txt_into_list, get_file_name
from tools.image_rescale import save_image_with_given_reference
import SimpleITK as sitk
import torch
import numpy as np
from glob import glob
from copy import deepcopy
from mermaid.utils import compute_warped_image_multiNC
patch_sz = self.opt['dataset']['seg']['patch_size', [-1, -1, -1], "the size of input patch"]
overlap_sz = self.opt['dataset']['seg']['partition']['overlap_size', [-1, -1, -1], "the size of input patch"]
option_p = self.opt['dataset']['seg'][('partition', {}, "settings for the partition")]
patch_sz_itk = list(np.flipud(np.array(patch_sz)))
overlap_sz_itk = list(np.flipud(np.array(overlap_sz)))
corr_partition_pool = deepcopy(partition(option_p, patch_sz_itk, overlap_sz_itk))
def compute_warped_image_label(input, warped_pth, warped_type,inv_phi_pth,inv_switcher,num_max=50,weight_for_orig_img=0):
warped_pth_list = glob(os.path.join(warped_pth, warped_type))
num_max = min(len(warped_pth_list),num_max)
inv_phi_pth_list = [pth.replace(warped_pth,inv_phi_pth).replace(*inv_switcher) for pth in warped_pth_list]
f = lambda pth: sitk.GetArrayFromImage(sitk.ReadImage(pth))
fname = get_file_name(self.fname[0])
f_warped = lambda x: get_file_name(x).find(fname+'_') == 0
warped_sub_list = list(filter(f_warped, warped_pth_list))
inv_phi_sub_list = list(filter(f_warped, inv_phi_pth_list))
warped_sub_list = warped_sub_list[:num_max]
inv_phi_sub_list = inv_phi_sub_list[:num_max]
num_aug = len(warped_sub_list)
warped_list = [f(pth) for pth in warped_sub_list]
inv_phi_list = [f(pth) for pth in inv_phi_sub_list]
warped_img = np.stack(warped_list, 0)[:,None]
#warped_img = torch.Tensor(warped_img)*2-1.
warped_img = self.normalize_input(warped_img,None)#self.file_path[0][0])
warped_img = torch.Tensor(warped_img)
inv_phi = np.stack(inv_phi_list, 0)
inv_phi = np.transpose(inv_phi, (0, 4, 3, 2, 1))
inv_phi = torch.Tensor(inv_phi)
img_input_sz = self.opt["dataset"]["img_after_resize"]
differ_sz = any(np.array(warped_img.shape[2:]) != np.array(img_input_sz))
sz = np.array(self.img_sz)
spacing = 1. / (sz - 1)
output_np = np.zeros([1, self.num_class] + self.img_sz)
if weight_for_orig_img!=0:
tzero_img = self.get_assemble_pred_for_ensemble(input)
tzero_pred = self.partition.assemble_multi_torch(tzero_img, image_size=self.img_sz)
output_np = tzero_pred.cpu().numpy() * float(round(weight_for_orig_img*num_aug))
for i in range(num_aug):
if differ_sz:
warped_img_cur, _ = resample_image(warped_img[i:i+1].cuda(), [1, 1, 1], [1, 3] + self.img_sz)
inv_phi_cur, _ = resample_image(inv_phi[i:i+1].cuda(), [1, 1, 1], [1, 1] + self.img_sz)
warped_img_cur = warped_img_cur.detach().cpu()
inv_phi_cur = inv_phi_cur.detach().cpu()
else:
warped_img_cur = warped_img[i:i+1]
inv_phi_cur = inv_phi[i:i+1]
sample = {"image":[warped_img_cur[0,0].numpy()]}
sample_p =corr_partition_pool(sample)
pred_patched = self.get_assemble_pred_for_ensemble(torch.Tensor(sample_p["image"]).cuda())
pred_patched = self.partition.assemble_multi_torch(pred_patched, image_size=self.img_sz)
pred_patched = torch.nn.functional.softmax(pred_patched,1)
pred_patched = compute_warped_image_multiNC(pred_patched.cuda(), inv_phi_cur.cuda(),spacing, spline_order=1, zero_boundary=True)
output_np += pred_patched.cpu().numpy()
res = torch.max(torch.Tensor(output_np), 1)[1]
return res[None]
seg_ensemble_opt = self.opt['tsk_set']['seg'][("seg_ensemble",{},"settings of test phase data ensemble")]
warped_pth = seg_ensemble_opt[("warped_pth", None,"the folder path containing the warped image from the original image")]
inv_phi_pth = seg_ensemble_opt[("inv_phi_pth",None,"the folder path containing the inverse transformation")]
warped_type = seg_ensemble_opt[("warped_type","*_warped.nii.gz","the suffix of the augmented data")]
inv_switcher = seg_ensemble_opt[("inv_switcher",["_warped.nii.gz","_inv_phi.nii.gz"],"the fname switcher from warped image to inverse transformation map")]
num_max = seg_ensemble_opt[("num_max",20,"max num of augmentation for per test image")]
weight_for_orig_img = seg_ensemble_opt[("weight_for_orig_img",0.0,"the weight of original image")]
output_np = compute_warped_image_label(input, warped_pth, warped_type,inv_phi_pth,inv_switcher,num_max=num_max,weight_for_orig_img=weight_for_orig_img)
return output_np
def normalize_input(self,img,refer_img_path):
import SimpleITK as sitk
if refer_img_path is not None:
refer_img = sitk.GetArrayFromImage(sitk.ReadImage(refer_img_path))
else:
refer_img = img
min_intensity = refer_img.min()
max_intensity = refer_img.max()
normalized_img = (img - refer_img.min()) / (max_intensity - min_intensity)
normalized_img = normalized_img * 2 - 1
return normalized_img
| 46.946237 | 181 | 0.636853 | from .modules import Seg_resid
from .utils import *
import torch.nn as nn
from data_pre.partition import partition
class SegUnet(nn.Module):
def __init__(self, opt=None):
super(SegUnet, self).__init__()
self.opt = opt
seg_opt = opt['tsk_set'][('seg',{},"settings for seg task")]
self.is_train = opt['tsk_set']["train"]
self.num_class = seg_opt['class_num',-1,"the num of class"]
use_bn = seg_opt["use_bn", True, "use the batch normalization"]
patch_sz = opt['dataset']['seg']['patch_size',[-1,-1,-1],"the size of input patch"]
overlap_sz = opt['dataset']['seg']['partition']['overlap_size',[-1,-1,-1],"the size of input patch"]
patch_sz_itk = list(np.flipud(np.array(patch_sz)))
overlap_sz_itk = list(np.flipud(np.array(overlap_sz)))
self.img_sz = None
self.unet = Seg_resid(self.num_class,bn=use_bn)
self.print_count = 0
self.partition = partition(opt['dataset']['seg']['partition'],patch_sz_itk,overlap_sz_itk)
self.ensemble_during_the_test = opt['tsk_set']['seg'][("ensemble_during_the_test",False,"do test phase ensemble, which needs the test phase data augmentation already done")]
def set_loss_fn(self, loss_fn):
self.loss_fn = loss_fn
def get_loss(self, output, gt):
loss = self.loss_fn.get_loss(output,gt)
return loss
def check_if_update_lr(self):
return False, None
def set_img_sz(self, img_sz):
self.img_sz = img_sz
def forward(self, input, is_train=True):
if is_train:
output = self.unet(input)
else:
with torch.no_grad():
if not self.is_train and self.ensemble_during_the_test:
output = self.get_assemble_ensemble(input)
else:
output = self.get_assemble_pred(input)
self.print_count += 1
return output
def get_assemble_pred(self, input, split_size=8):
output = []
input_split = torch.split(input, split_size)
for input_sub in input_split:
res = self.forward(input_sub)
if isinstance(res, list):
res = res[-1]
output.append(res.detach().cpu())
pred_patched = torch.cat(output, dim=0)
pred_patched = torch.max(pred_patched.data, 1)[1]
output_np = self.partition.assemble(pred_patched,image_size=self.img_sz)
return output_np
def set_file_path(self, file_path, fname):
self.file_path =file_path
self.fname = fname
def get_assemble_pred_for_ensemble(self, input, split_size=8):
output = []
input_split = torch.split(input, split_size)
for input_sub in input_split:
res = self.forward(input_sub)
if isinstance(res, list):
res = res[-1]
output.append(res.detach().cpu())
pred_patched = torch.cat(output, dim=0)
return pred_patched
def get_assemble_ensemble(self, input):
import os
from .reg_data_utils import read_txt_into_list, get_file_name
from tools.image_rescale import save_image_with_given_reference
import SimpleITK as sitk
import torch
import numpy as np
from glob import glob
from copy import deepcopy
from mermaid.utils import compute_warped_image_multiNC
patch_sz = self.opt['dataset']['seg']['patch_size', [-1, -1, -1], "the size of input patch"]
overlap_sz = self.opt['dataset']['seg']['partition']['overlap_size', [-1, -1, -1], "the size of input patch"]
option_p = self.opt['dataset']['seg'][('partition', {}, "settings for the partition")]
patch_sz_itk = list(np.flipud(np.array(patch_sz)))
overlap_sz_itk = list(np.flipud(np.array(overlap_sz)))
corr_partition_pool = deepcopy(partition(option_p, patch_sz_itk, overlap_sz_itk))
def compute_warped_image_label(input, warped_pth, warped_type,inv_phi_pth,inv_switcher,num_max=50,weight_for_orig_img=0):
warped_pth_list = glob(os.path.join(warped_pth, warped_type))
num_max = min(len(warped_pth_list),num_max)
inv_phi_pth_list = [pth.replace(warped_pth,inv_phi_pth).replace(*inv_switcher) for pth in warped_pth_list]
f = lambda pth: sitk.GetArrayFromImage(sitk.ReadImage(pth))
fname = get_file_name(self.fname[0])
f_warped = lambda x: get_file_name(x).find(fname+'_') == 0
warped_sub_list = list(filter(f_warped, warped_pth_list))
inv_phi_sub_list = list(filter(f_warped, inv_phi_pth_list))
warped_sub_list = warped_sub_list[:num_max]
inv_phi_sub_list = inv_phi_sub_list[:num_max]
num_aug = len(warped_sub_list)
warped_list = [f(pth) for pth in warped_sub_list]
inv_phi_list = [f(pth) for pth in inv_phi_sub_list]
warped_img = np.stack(warped_list, 0)[:,None]
warped_img = self.normalize_input(warped_img,None)
warped_img = torch.Tensor(warped_img)
inv_phi = np.stack(inv_phi_list, 0)
inv_phi = np.transpose(inv_phi, (0, 4, 3, 2, 1))
inv_phi = torch.Tensor(inv_phi)
img_input_sz = self.opt["dataset"]["img_after_resize"]
differ_sz = any(np.array(warped_img.shape[2:]) != np.array(img_input_sz))
sz = np.array(self.img_sz)
spacing = 1. / (sz - 1)
output_np = np.zeros([1, self.num_class] + self.img_sz)
if weight_for_orig_img!=0:
tzero_img = self.get_assemble_pred_for_ensemble(input)
tzero_pred = self.partition.assemble_multi_torch(tzero_img, image_size=self.img_sz)
output_np = tzero_pred.cpu().numpy() * float(round(weight_for_orig_img*num_aug))
for i in range(num_aug):
if differ_sz:
warped_img_cur, _ = resample_image(warped_img[i:i+1].cuda(), [1, 1, 1], [1, 3] + self.img_sz)
inv_phi_cur, _ = resample_image(inv_phi[i:i+1].cuda(), [1, 1, 1], [1, 1] + self.img_sz)
warped_img_cur = warped_img_cur.detach().cpu()
inv_phi_cur = inv_phi_cur.detach().cpu()
else:
warped_img_cur = warped_img[i:i+1]
inv_phi_cur = inv_phi[i:i+1]
sample = {"image":[warped_img_cur[0,0].numpy()]}
sample_p =corr_partition_pool(sample)
pred_patched = self.get_assemble_pred_for_ensemble(torch.Tensor(sample_p["image"]).cuda())
pred_patched = self.partition.assemble_multi_torch(pred_patched, image_size=self.img_sz)
pred_patched = torch.nn.functional.softmax(pred_patched,1)
pred_patched = compute_warped_image_multiNC(pred_patched.cuda(), inv_phi_cur.cuda(),spacing, spline_order=1, zero_boundary=True)
output_np += pred_patched.cpu().numpy()
res = torch.max(torch.Tensor(output_np), 1)[1]
return res[None]
seg_ensemble_opt = self.opt['tsk_set']['seg'][("seg_ensemble",{},"settings of test phase data ensemble")]
warped_pth = seg_ensemble_opt[("warped_pth", None,"the folder path containing the warped image from the original image")]
inv_phi_pth = seg_ensemble_opt[("inv_phi_pth",None,"the folder path containing the inverse transformation")]
warped_type = seg_ensemble_opt[("warped_type","*_warped.nii.gz","the suffix of the augmented data")]
inv_switcher = seg_ensemble_opt[("inv_switcher",["_warped.nii.gz","_inv_phi.nii.gz"],"the fname switcher from warped image to inverse transformation map")]
num_max = seg_ensemble_opt[("num_max",20,"max num of augmentation for per test image")]
weight_for_orig_img = seg_ensemble_opt[("weight_for_orig_img",0.0,"the weight of original image")]
output_np = compute_warped_image_label(input, warped_pth, warped_type,inv_phi_pth,inv_switcher,num_max=num_max,weight_for_orig_img=weight_for_orig_img)
return output_np
def normalize_input(self,img,refer_img_path):
import SimpleITK as sitk
if refer_img_path is not None:
refer_img = sitk.GetArrayFromImage(sitk.ReadImage(refer_img_path))
else:
refer_img = img
min_intensity = refer_img.min()
max_intensity = refer_img.max()
normalized_img = (img - refer_img.min()) / (max_intensity - min_intensity)
normalized_img = normalized_img * 2 - 1
return normalized_img
| true | true |
1c30094813589d02854b0a9a96e186cc2ccb5a7f | 554 | py | Python | sendpdf/templatetags/pdftags.py | kutakitu/django-sendpdf | 9acf779a0ff476020926802c8615ece2932c98fc | [
"MIT"
] | 1 | 2021-02-03T02:53:49.000Z | 2021-02-03T02:53:49.000Z | sendpdf/templatetags/pdftags.py | kutakitu/django-sendpdf | 9acf779a0ff476020926802c8615ece2932c98fc | [
"MIT"
] | 7 | 2020-06-05T16:50:49.000Z | 2021-09-22T17:38:26.000Z | sendpdf/templatetags/pdftags.py | kutakitu/django-sendpdf | 9acf779a0ff476020926802c8615ece2932c98fc | [
"MIT"
] | null | null | null | """Template tags for the app"""
# pylint: disable=invalid-name
import os
from django import template
register = template.Library()
_DIR = os.path.abspath(os.path.dirname(__file__))
@register.simple_tag
def pdf_static(static_url, pdf=None):
"""Static files access
Ensure they are accessed when rendering page as HTML and as PDF"""
if pdf is not None:
file = os.path.join(
_DIR,
os.pardir,
"static",
static_url
)
return file
return "/static/{}".format(static_url)
| 23.083333 | 70 | 0.629964 |
import os
from django import template
register = template.Library()
_DIR = os.path.abspath(os.path.dirname(__file__))
@register.simple_tag
def pdf_static(static_url, pdf=None):
if pdf is not None:
file = os.path.join(
_DIR,
os.pardir,
"static",
static_url
)
return file
return "/static/{}".format(static_url)
| true | true |
1c300b0716c3271a0bbfefb3bb87eee655696e0d | 13,947 | py | Python | idaes/apps/caprese/util.py | carldlaird/idaes-pse | cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f | [
"RSA-MD"
] | 112 | 2019-02-11T23:16:36.000Z | 2022-03-23T20:59:57.000Z | idaes/apps/caprese/util.py | carldlaird/idaes-pse | cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f | [
"RSA-MD"
] | 621 | 2019-03-01T14:44:12.000Z | 2022-03-31T19:49:25.000Z | idaes/apps/caprese/util.py | carldlaird/idaes-pse | cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f | [
"RSA-MD"
] | 154 | 2019-02-01T23:46:33.000Z | 2022-03-23T15:07:10.000Z | # -*- coding: utf-8 -*-
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
A module of helper functions for working with flattened DAE models.
"""
from pyomo.environ import (
Constraint,
Var,
TerminationCondition,
SolverFactory,
)
from pyomo.common.collections import ComponentSet, ComponentMap
from pyomo.dae.flatten import flatten_dae_components
from pyomo.dae.set_utils import is_in_block_indexed_by
from pyomo.core.expr.visitor import identify_variables
from pyomo.core.base.constraint import _ConstraintData
from pyomo.core.base.block import _BlockData
from idaes.core.util.model_statistics import degrees_of_freedom
from idaes.core.util.dyn_utils import (
get_activity_dict,
deactivate_model_at,
get_implicit_index_of_set,
get_fixed_dict,
deactivate_constraints_unindexed_by,
)
from idaes.apps.caprese.common.config import NoiseBoundOption
import idaes.logger as idaeslog
import random
__author__ = "Robert Parker"
class CachedVarsContext(object):
""" This is a simple class to cache the values of variables,
for instance while a solve is performed, so that they may be loaded
later.
For example, to set values of disturbances different than those
already loaded at t0 when solving for the steady state setpoint:
>>> ctrl = nmpc.controller
>>> disturbances = [ctrl.FIXED_BLOCK[0].var, ctrl.FIXED_BLOCK[1].var]
>>> with CachedVarsContext(disturbances, [t0]):
>>> for d in disturbances:
>>> d[t0].set_value(1.5)
>>> ctrl.solve_setpoint(solver)
"""
def __init__(self, varlist, tlist):
if type(tlist) is not list:
tlist = [tlist]
self.n_t = len(tlist)
self.vars = varlist
self.tlist = tlist
self.cache = [[None for j in range(self.n_t)]
for i in range(len(self.vars))]
def __enter__(self):
for i in range(len(self.vars)):
for j, t in enumerate(self.tlist):
self.cache[i][j] = self.vars[i][t].value
return self
def __exit__(self, a, b, c):
for i in range(len(self.vars)):
for j, t in enumerate(self.tlist):
self.vars[i][t].set_value(self.cache[i][j])
def initialize_by_element_in_range(model, time, t_start, t_end,
time_linking_vars=[],
dae_vars=[],
max_linking_range=0,
**kwargs):
"""Function for solving a square model, time element-by-time element,
between specified start and end times.
Args:
model : Flowsheet model to solve
t_start : Beginning of timespan over which to solve
t_end : End of timespan over which to solve
Kwargs:
solver : Solver option used to solve portions of the square model
outlvl : idaes.logger output level
"""
solver = kwargs.pop('solver', SolverFactory('ipopt'))
outlvl = kwargs.pop('outlvl', idaeslog.NOTSET)
init_log = idaeslog.getInitLogger('nmpc', outlvl)
solver_log = idaeslog.getSolveLogger('nmpc', outlvl)
solve_initial_conditions = kwargs.pop('solve_initial_conditions', False)
#TODO: Move to docstring
# Variables that will be fixed for time points outside the finite element
# when constraints for a finite element are activated.
# For a "normal" process, these should just be differential variables
# (and maybe derivative variables). For a process with a (PID) controller,
# these should also include variables used by the controller.
# If these variables are not specified,
# Timespan over which these variables will be fixed, counting backwards
# from the first time point in the finite element (which will always be
# fixed)
# Should I specify max_linking_range as an integer number of finite
# elements, an integer number of time points, or a float in actual time
# units? Go with latter for now.
assert t_start in time.get_finite_elements()
assert t_end in time.get_finite_elements()
#assert degrees_of_freedom(model) == 0
# No need to check dof here as we will check right before each solve
#dae_vars = kwargs.pop('dae_vars', [])
if not dae_vars:
scalar_vars, dae_vars = flatten_dae_components(model, time, ctype=Var)
for var in scalar_vars:
var.fix()
deactivate_constraints_unindexed_by(model, time)
ncp = time.get_discretization_info()['ncp']
fe_in_range = [i for i, fe in enumerate(time.get_finite_elements())
if fe >= t_start and fe <= t_end]
t_in_range = [t for t in time if t >= t_start and t <= t_end]
fe_in_range.pop(0)
n_fe_in_range = len(fe_in_range)
was_originally_active = get_activity_dict(model)
was_originally_fixed = get_fixed_dict(model)
# Deactivate model
if not solve_initial_conditions:
time_list = [t for t in time]
deactivated = deactivate_model_at(model, time, time_list,
outlvl=idaeslog.ERROR)
else:
time_list = [t for t in time if t != time.first()]
deactivated = deactivate_model_at(model, time, time_list,
outlvl=idaeslog.ERROR)
assert degrees_of_freedom(model) == 0
with idaeslog.solver_log(solver_log, level=idaeslog.DEBUG) as slc:
results = solver.solve(model, tee=slc.tee)
if results.solver.termination_condition == TerminationCondition.optimal:
pass
else:
raise ValueError(
'Failed to solve for consistent initial conditions.'
)
deactivated[time.first()] = deactivate_model_at(model, time,
time.first(),
outlvl=idaeslog.ERROR)[time.first()]
# "Integration" loop
for i in fe_in_range:
t_prev = time.at((i-1)*ncp+1)
fe = [time.at(k) for k in range((i-1)*ncp+2, i*ncp+2)]
con_list = []
for t in fe:
# These will be fixed vars in constraints at t
# Probably not necessary to record at what t
# they occur
for comp in deactivated[t]:
if was_originally_active[id(comp)]:
comp.activate()
if not time_linking_vars:
if isinstance(comp, _ConstraintData):
con_list.append(comp)
elif isinstance(comp, _BlockData):
# Active here should be independent of whether block
# was active
con_list.extend(
list(comp.component_data_objects(Constraint,
active=True)))
if not time_linking_vars:
fixed_vars = []
for con in con_list:
for var in identify_variables(con.expr,
include_fixed=False):
# use var_locator/ComponentMap to get index somehow
t_idx = get_implicit_index_of_set(var, time)
if t_idx is None:
assert not is_in_block_indexed_by(var, time)
continue
if t_idx <= t_prev:
fixed_vars.append(var)
var.fix()
else:
fixed_vars = []
time_range = [t for t in time
if t_prev - t <= max_linking_range
and t <= t_prev]
time_range = [t_prev]
for _slice in time_linking_vars:
for t in time_range:
#if not _slice[t].fixed:
_slice[t].fix()
fixed_vars.append(_slice[t])
# Here I assume that the only variables that can appear in
# constraints at a different (later) time index are derivatives
# and differential variables (they do so in the discretization
# equations) and that they only participate at t_prev.
#
# This is not the case for, say, PID controllers, in which case
# I should pass in a list of "complicating variables," then fix
# them at all time points outside the finite element.
#
# Alternative solution is to identify_variables in each constraint
# that is activated and fix those belonging to a previous finite
# element. (Should not encounter variables belonging to a future
# finite element.)
# ^ This option is easier, less efficient
#
# In either case need to record whether variable was previously fixed
# so I know if I should unfix it or not.
for t in fe:
for _slice in dae_vars:
if not _slice[t].fixed:
# Fixed DAE variables are time-dependent disturbances,
# whose values should not be altered by this function.
_slice[t].set_value(_slice[t_prev].value)
assert degrees_of_freedom(model) == 0
with idaeslog.solver_log(solver_log, level=idaeslog.DEBUG) as slc:
results = solver.solve(model, tee=slc.tee)
if results.solver.termination_condition == TerminationCondition.optimal:
pass
else:
raise ValueError(
'Failed to solve for finite element %s' %i
)
for t in fe:
for comp in deactivated[t]:
comp.deactivate()
for var in fixed_vars:
if not was_originally_fixed[id(var)]:
var.unfix()
for t in time:
for comp in deactivated[t]:
if was_originally_active[id(comp)]:
comp.activate()
def get_violated_bounds(val, bounds):
""" This function tests a value against a lower and an upper bound,
returning which if either is violated, as well as a direction
that the value needs to move for feasibility.
Arguments:
val: Value to be tested
bounds: Tuple containing the lower, upper bounds
"""
lower = bounds[0]
upper = bounds[1]
if upper is not None:
if val > upper:
return (upper, -1)
if lower is not None:
if val < lower:
return (lower, 1)
return (None, 0)
class MaxDiscardError(Exception):
pass
def apply_noise(val_list, noise_params, noise_function):
"""
Applies noise to each value in a list of values and returns the result.
Noise is generated by a user-provided function that maps a value and
parameters to a random value.
"""
result = []
for val, params in zip(val_list, noise_params):
if type(params) is not tuple:
# better be a scalar
params = (params,)
result.append(noise_function(val, *params))
return result
def apply_bounded_noise_discard(val, params, noise_function, bounds,
max_number_discards):
i = 0
while i <= max_number_discards:
newval = noise_function(val, *params)
violated_bound, direction = get_violated_bounds(newval, bounds)
if violated_bound is None:
return newval
else:
# Discard.
i += 1
# This could be caught by the caller to raise a more useful
# error that includes the variable whose noise violates a
# bound.
raise MaxDiscardError(
'Max number of discards exceeded when applying noise.')
def apply_bounded_noise_push(val, params, noise_function, bounds,
bound_push):
newval = noise_function(val, *params)
violated_bound, direction = get_violated_bounds(newval, bounds)
if not violated_bound:
return newval
return violated_bound + bound_push*direction
def apply_bounded_noise_fail(val, params, noise_function, bounds):
newval = noise_function(val, *params)
violated_bound, direction = get_violated_bounds(newval, bounds)
if violated_bound:
raise RuntimeError(
'Applying noise caused a bound to be violated')
return newval
def apply_noise_with_bounds(val_list, noise_params, noise_function, bound_list,
bound_option=NoiseBoundOption.DISCARD, max_number_discards=5,
bound_push=0.0):
result = []
for val, params, bounds in zip(val_list, noise_params, bound_list):
if type(params) is not tuple:
# better be a scalar
# better check: if type(params) not in {sequence_types}...
params = (params,)
if bound_option == NoiseBoundOption.DISCARD:
newval = apply_bounded_noise_discard(val, params, noise_function,
bounds, max_number_discards)
elif bound_option == NoiseBoundOption.PUSH:
newval = apply_bounded_noise_push(val, params, noise_function,
bounds, bound_push)
elif bound_option == NoiseBoundOption.FAIL:
newval = apply_bounded_noise_fail(val, params, noise_function,
bounds)
else:
raise RuntimeError(
'Bound violation option not recognized')
result.append(newval)
return result
| 38.210959 | 81 | 0.617839 |
d(var)]:
var.unfix()
for t in time:
for comp in deactivated[t]:
if was_originally_active[id(comp)]:
comp.activate()
def get_violated_bounds(val, bounds):
lower = bounds[0]
upper = bounds[1]
if upper is not None:
if val > upper:
return (upper, -1)
if lower is not None:
if val < lower:
return (lower, 1)
return (None, 0)
class MaxDiscardError(Exception):
pass
def apply_noise(val_list, noise_params, noise_function):
result = []
for val, params in zip(val_list, noise_params):
if type(params) is not tuple:
params = (params,)
result.append(noise_function(val, *params))
return result
def apply_bounded_noise_discard(val, params, noise_function, bounds,
max_number_discards):
i = 0
while i <= max_number_discards:
newval = noise_function(val, *params)
violated_bound, direction = get_violated_bounds(newval, bounds)
if violated_bound is None:
return newval
else:
i += 1
raise MaxDiscardError(
'Max number of discards exceeded when applying noise.')
def apply_bounded_noise_push(val, params, noise_function, bounds,
bound_push):
newval = noise_function(val, *params)
violated_bound, direction = get_violated_bounds(newval, bounds)
if not violated_bound:
return newval
return violated_bound + bound_push*direction
def apply_bounded_noise_fail(val, params, noise_function, bounds):
newval = noise_function(val, *params)
violated_bound, direction = get_violated_bounds(newval, bounds)
if violated_bound:
raise RuntimeError(
'Applying noise caused a bound to be violated')
return newval
def apply_noise_with_bounds(val_list, noise_params, noise_function, bound_list,
bound_option=NoiseBoundOption.DISCARD, max_number_discards=5,
bound_push=0.0):
result = []
for val, params, bounds in zip(val_list, noise_params, bound_list):
if type(params) is not tuple:
params = (params,)
if bound_option == NoiseBoundOption.DISCARD:
newval = apply_bounded_noise_discard(val, params, noise_function,
bounds, max_number_discards)
elif bound_option == NoiseBoundOption.PUSH:
newval = apply_bounded_noise_push(val, params, noise_function,
bounds, bound_push)
elif bound_option == NoiseBoundOption.FAIL:
newval = apply_bounded_noise_fail(val, params, noise_function,
bounds)
else:
raise RuntimeError(
'Bound violation option not recognized')
result.append(newval)
return result
| true | true |
1c300b9e48fb395b37683f0bf7739d58f7492cb3 | 107 | py | Python | lab15/account/admin.py | alejo8591/angular-labs | 5c55b966e832e5261554da3f41fc8786bab8dce6 | [
"MIT"
] | null | null | null | lab15/account/admin.py | alejo8591/angular-labs | 5c55b966e832e5261554da3f41fc8786bab8dce6 | [
"MIT"
] | null | null | null | lab15/account/admin.py | alejo8591/angular-labs | 5c55b966e832e5261554da3f41fc8786bab8dce6 | [
"MIT"
] | null | null | null | from django.contrib import admin
from account.models import UserProfile
admin.site.register(UserProfile)
| 17.833333 | 38 | 0.841121 | from django.contrib import admin
from account.models import UserProfile
admin.site.register(UserProfile)
| true | true |
1c300ea645b005cc85266fe54fb45e174b556b13 | 2,900 | py | Python | tests/utils/tqdm_test.py | medecau/sciencebeam-utils | 0253139f17c4208ccacdedf6d2c4eb2b062b7721 | [
"MIT"
] | 2 | 2019-07-17T14:53:07.000Z | 2021-09-15T04:47:47.000Z | tests/utils/tqdm_test.py | medecau/sciencebeam-utils | 0253139f17c4208ccacdedf6d2c4eb2b062b7721 | [
"MIT"
] | 108 | 2018-07-24T15:20:54.000Z | 2022-03-28T16:57:39.000Z | tests/utils/tqdm_test.py | medecau/sciencebeam-utils | 0253139f17c4208ccacdedf6d2c4eb2b062b7721 | [
"MIT"
] | 2 | 2020-02-07T10:58:48.000Z | 2021-09-01T10:15:32.000Z | from __future__ import absolute_import
import logging
import sys
from io import StringIO
from sciencebeam_utils.utils.tqdm import (
redirect_logging_to_tqdm,
tqdm_with_logging_redirect,
TqdmLoggingHandler
)
LOGGER = logging.getLogger(__name__)
class TestRedirectLoggingToTqdm:
def test_should_add_and_remove_tqdm_handler(self):
logger = logging.Logger('test')
with redirect_logging_to_tqdm(logger=logger):
assert len(logger.handlers) == 1
assert isinstance(logger.handlers[0], TqdmLoggingHandler)
assert not logger.handlers
def test_should_remove_and_restore_console_handlers(self):
logger = logging.Logger('test')
stderr_console_handler = logging.StreamHandler(sys.stderr)
stdout_console_handler = logging.StreamHandler(sys.stderr)
logger.handlers = [stderr_console_handler, stdout_console_handler]
with redirect_logging_to_tqdm(logger=logger):
assert len(logger.handlers) == 1
assert isinstance(logger.handlers[0], TqdmLoggingHandler)
assert logger.handlers == [stderr_console_handler, stdout_console_handler]
def test_should_inherit_console_logger_formatter(self):
logger = logging.Logger('test')
formatter = logging.Formatter('custom: %(message)s')
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(formatter)
logger.handlers = [console_handler]
with redirect_logging_to_tqdm(logger=logger):
assert logger.handlers[0].formatter == formatter
def test_should_not_remove_stream_handlers_not_fot_stdout_or_stderr(self):
logger = logging.Logger('test')
stream_handler = logging.StreamHandler(StringIO())
logger.addHandler(stream_handler)
with redirect_logging_to_tqdm(logger=logger):
assert len(logger.handlers) == 2
assert logger.handlers[0] == stream_handler
assert isinstance(logger.handlers[1], TqdmLoggingHandler)
assert logger.handlers == [stream_handler]
class TestTqdmWithLoggingRedirect:
def test_should_add_and_remove_handler_from_root_logger_by_default(self):
original_handlers = list(logging.root.handlers)
with tqdm_with_logging_redirect(total=1) as pbar:
assert isinstance(logging.root.handlers[-1], TqdmLoggingHandler)
LOGGER.info('test')
pbar.update(1)
assert logging.root.handlers == original_handlers
def test_should_add_and_remove_handler_from_custom_logger(self):
logger = logging.Logger('test')
with tqdm_with_logging_redirect(total=1, logger=logger) as pbar:
assert len(logger.handlers) == 1
assert isinstance(logger.handlers[0], TqdmLoggingHandler)
LOGGER.info('test')
pbar.update(1)
assert not logger.handlers
| 40.277778 | 82 | 0.713793 | from __future__ import absolute_import
import logging
import sys
from io import StringIO
from sciencebeam_utils.utils.tqdm import (
redirect_logging_to_tqdm,
tqdm_with_logging_redirect,
TqdmLoggingHandler
)
LOGGER = logging.getLogger(__name__)
class TestRedirectLoggingToTqdm:
def test_should_add_and_remove_tqdm_handler(self):
logger = logging.Logger('test')
with redirect_logging_to_tqdm(logger=logger):
assert len(logger.handlers) == 1
assert isinstance(logger.handlers[0], TqdmLoggingHandler)
assert not logger.handlers
def test_should_remove_and_restore_console_handlers(self):
logger = logging.Logger('test')
stderr_console_handler = logging.StreamHandler(sys.stderr)
stdout_console_handler = logging.StreamHandler(sys.stderr)
logger.handlers = [stderr_console_handler, stdout_console_handler]
with redirect_logging_to_tqdm(logger=logger):
assert len(logger.handlers) == 1
assert isinstance(logger.handlers[0], TqdmLoggingHandler)
assert logger.handlers == [stderr_console_handler, stdout_console_handler]
def test_should_inherit_console_logger_formatter(self):
logger = logging.Logger('test')
formatter = logging.Formatter('custom: %(message)s')
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(formatter)
logger.handlers = [console_handler]
with redirect_logging_to_tqdm(logger=logger):
assert logger.handlers[0].formatter == formatter
def test_should_not_remove_stream_handlers_not_fot_stdout_or_stderr(self):
logger = logging.Logger('test')
stream_handler = logging.StreamHandler(StringIO())
logger.addHandler(stream_handler)
with redirect_logging_to_tqdm(logger=logger):
assert len(logger.handlers) == 2
assert logger.handlers[0] == stream_handler
assert isinstance(logger.handlers[1], TqdmLoggingHandler)
assert logger.handlers == [stream_handler]
class TestTqdmWithLoggingRedirect:
def test_should_add_and_remove_handler_from_root_logger_by_default(self):
original_handlers = list(logging.root.handlers)
with tqdm_with_logging_redirect(total=1) as pbar:
assert isinstance(logging.root.handlers[-1], TqdmLoggingHandler)
LOGGER.info('test')
pbar.update(1)
assert logging.root.handlers == original_handlers
def test_should_add_and_remove_handler_from_custom_logger(self):
logger = logging.Logger('test')
with tqdm_with_logging_redirect(total=1, logger=logger) as pbar:
assert len(logger.handlers) == 1
assert isinstance(logger.handlers[0], TqdmLoggingHandler)
LOGGER.info('test')
pbar.update(1)
assert not logger.handlers
| true | true |
1c300eaca3c75f0dc625939b862d2b27551f5399 | 717 | py | Python | src/twoSumII/search.py | rajitbanerjee/leetcode | 720fcdd88d371e2d6592ceec8370a6760a77bb89 | [
"CC0-1.0"
] | null | null | null | src/twoSumII/search.py | rajitbanerjee/leetcode | 720fcdd88d371e2d6592ceec8370a6760a77bb89 | [
"CC0-1.0"
] | null | null | null | src/twoSumII/search.py | rajitbanerjee/leetcode | 720fcdd88d371e2d6592ceec8370a6760a77bb89 | [
"CC0-1.0"
] | 1 | 2021-04-28T18:17:55.000Z | 2021-04-28T18:17:55.000Z | from typing import List
class Solution:
def twoSum(self, numbers: List[int], target: int) -> List[int]:
for i, n in enumerate(numbers):
left, right = i + 1, len(numbers) - 1
while left <= right:
mid = left + (right - left) // 2
if numbers[mid] == target - n:
return [i + 1, mid + 1]
elif numbers[mid] < target - n:
left = mid + 1
else:
right = mid - 1
return [-1]
if __name__ == '__main__':
nums = [2, 7, 11, 15]
target = 9
print(f"Input: numbers = {nums}, target = {target}")
print(f"Output: {Solution().twoSum(nums, target)}")
| 29.875 | 67 | 0.470014 | from typing import List
class Solution:
def twoSum(self, numbers: List[int], target: int) -> List[int]:
for i, n in enumerate(numbers):
left, right = i + 1, len(numbers) - 1
while left <= right:
mid = left + (right - left) // 2
if numbers[mid] == target - n:
return [i + 1, mid + 1]
elif numbers[mid] < target - n:
left = mid + 1
else:
right = mid - 1
return [-1]
if __name__ == '__main__':
nums = [2, 7, 11, 15]
target = 9
print(f"Input: numbers = {nums}, target = {target}")
print(f"Output: {Solution().twoSum(nums, target)}")
| true | true |
1c3010de71681acb956f98ba650a1d48f13db2b5 | 136 | py | Python | app/tools/__init__.py | staneyffer/my_blog | db6220eddb591da5e6368744c9b86eca2cd9696e | [
"MIT"
] | null | null | null | app/tools/__init__.py | staneyffer/my_blog | db6220eddb591da5e6368744c9b86eca2cd9696e | [
"MIT"
] | 3 | 2020-03-24T15:41:12.000Z | 2021-02-02T21:43:38.000Z | app/tools/__init__.py | staneyffer/my_blog | db6220eddb591da5e6368744c9b86eca2cd9696e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from flask import Blueprint
tools = Blueprint('tools', __name__)
from . import views, jinja_keys, pagination
| 17 | 43 | 0.705882 |
from flask import Blueprint
tools = Blueprint('tools', __name__)
from . import views, jinja_keys, pagination
| true | true |
1c3010f1d6e8270f52a5282e388f1f5fc39129db | 1,728 | py | Python | solum/api/handlers/service_handler.py | devdattakulkarni/test-solum | 4e9ddb82d217116aa2c30a6f2581080cbdfae325 | [
"Apache-2.0"
] | null | null | null | solum/api/handlers/service_handler.py | devdattakulkarni/test-solum | 4e9ddb82d217116aa2c30a6f2581080cbdfae325 | [
"Apache-2.0"
] | null | null | null | solum/api/handlers/service_handler.py | devdattakulkarni/test-solum | 4e9ddb82d217116aa2c30a6f2581080cbdfae325 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 - Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from solum.api.handlers import handler
from solum import objects
class ServiceHandler(handler.Handler):
"""Fulfills a request on the service resource."""
def get(self, id):
"""Return a service."""
return objects.registry.Service.get_by_uuid(self.context, id)
def update(self, id, data):
"""Modify a resource."""
updated = objects.registry.Service.update_and_save(self.context,
id, data)
return updated
def delete(self, id):
"""Delete a resource."""
db_obj = objects.registry.Service.get_by_uuid(self.context, id)
db_obj.destroy(self.context)
def create(self, data):
"""Create a new resource."""
db_obj = objects.registry.Service()
db_obj.update(data)
db_obj.uuid = str(uuid.uuid4())
db_obj.user_id = self.context.user
db_obj.project_id = self.context.tenant
db_obj.create(self.context)
return db_obj
def get_all(self):
"""Return all services."""
return objects.registry.ServiceList.get_all(self.context)
| 33.230769 | 75 | 0.66088 |
import uuid
from solum.api.handlers import handler
from solum import objects
class ServiceHandler(handler.Handler):
def get(self, id):
return objects.registry.Service.get_by_uuid(self.context, id)
def update(self, id, data):
updated = objects.registry.Service.update_and_save(self.context,
id, data)
return updated
def delete(self, id):
db_obj = objects.registry.Service.get_by_uuid(self.context, id)
db_obj.destroy(self.context)
def create(self, data):
db_obj = objects.registry.Service()
db_obj.update(data)
db_obj.uuid = str(uuid.uuid4())
db_obj.user_id = self.context.user
db_obj.project_id = self.context.tenant
db_obj.create(self.context)
return db_obj
def get_all(self):
return objects.registry.ServiceList.get_all(self.context)
| true | true |
1c3010ff357f572a4fba07a968d31746208dc1e6 | 3,463 | py | Python | codechef_competition/nov_2020_long_challenge/ADADISH.py | souvikb07/DS-Algo-and-CP | 6333e8f409fb7e6e7a0c42a91d5cf98c7a4d29a2 | [
"MIT"
] | null | null | null | codechef_competition/nov_2020_long_challenge/ADADISH.py | souvikb07/DS-Algo-and-CP | 6333e8f409fb7e6e7a0c42a91d5cf98c7a4d29a2 | [
"MIT"
] | null | null | null | codechef_competition/nov_2020_long_challenge/ADADISH.py | souvikb07/DS-Algo-and-CP | 6333e8f409fb7e6e7a0c42a91d5cf98c7a4d29a2 | [
"MIT"
] | null | null | null | """
Ada and Dishes
Chef Ada is preparing N dishes (numbered 1 through N). For each valid i, it takes Ci minutes to prepare the i-th dish. The dishes can be prepared in any order.
Ada has a kitchen with two identical burners. For each valid i, to prepare the i-th dish, she puts it on one of the burners and after Ci minutes,
removes it from this burner; the dish may not be removed from the burner before those Ci minutes pass, because otherwise it cools down and gets spoiled.
Any two dishes may be prepared simultaneously, however, no two dishes may be on the same burner at the same time. Ada may remove a dish from a burner and
put another dish on the same burner at the same time.
What is the minimum time needed to prepare all dishes, i.e. reach the state where all dishes are prepared?
Input
The first line of the input contains a single integer T denoting the number of test cases. The description of T test cases follows.
The first line of each test case contains a single integer N.
The second line contains N space-separated integers C1,C2,…,CN.
Output
For each test case, print a single line containing one integer ― the minimum number of minutes needed to prepare all dishes.
Constraints
1≤T≤1,000
1≤N≤4
1≤Ci≤5 for each valid i
Subtasks
Subtask #1 (1 points): C1=C2=…=CN
Subtask #2 (99 points): original constraints
Example Input
3
3
2 2 2
3
1 2 3
4
2 3 4 5
Example Output
4
3
7
Explanation
Example case 1: Place the first two dishes on the burners, wait for two minutes, remove both dishes and prepare the last one on one burner.
Example case 2: Place the first and third dish on the burners. When the first dish is prepared, remove it and put the second dish on the same burner.
Example case 3: Place the third and fourth dish on the burners. When the third dish is prepared, remove it and put the second dish on the same burner.
Similarly, replace the fourth dish (when it is prepared) by the first dish on the other burner.
"""
num_dishes=5
time_dishes=[2,3,4,5,1]
if num_dishes == 1:
print(time_dishes[0])
else:
time_dishes.sort(reverse=True)
# print(time_dishes)
burners=[]
i=0
time = 0
while i<=num_dishes:
if i==0 and len(burners)==0:
burners.extend(time_dishes[:2])
print(burners)
min_time = min(burners)
print(min_time)
burners = [x - min_time for x in burners]
print(burners)
time = time+min_time
print(time)
burners.remove(0)
print(burners)
i+=2
elif i<num_dishes and len(burners)==1:
print('step 2')
burners.append(time_dishes[i])
min_time = min(burners)
burners = [x - min_time for x in burners]
print(burners)
time = time+min_time
print(time)
burners.remove(0)
print(burners)
print('not empty',burners)
print(i)
i+=1
print(i)
print('num dish', num_dishes)
#break
elif i==num_dishes and len(burners)==1:
time= time+burners[0]
print('step final', time)
i+=1
print('final time', time)
# cook your dish here
for _ in range(int(input())):
num_dishes = int(input())
time_dishes = list(map(int, input().split()))
if num_dishes == 1:
print(time_dishes[0])
else:
time_dishes.sort(reverse=True)
print(time_dishes) | 33.95098 | 159 | 0.663298 |
num_dishes=5
time_dishes=[2,3,4,5,1]
if num_dishes == 1:
print(time_dishes[0])
else:
time_dishes.sort(reverse=True)
burners=[]
i=0
time = 0
while i<=num_dishes:
if i==0 and len(burners)==0:
burners.extend(time_dishes[:2])
print(burners)
min_time = min(burners)
print(min_time)
burners = [x - min_time for x in burners]
print(burners)
time = time+min_time
print(time)
burners.remove(0)
print(burners)
i+=2
elif i<num_dishes and len(burners)==1:
print('step 2')
burners.append(time_dishes[i])
min_time = min(burners)
burners = [x - min_time for x in burners]
print(burners)
time = time+min_time
print(time)
burners.remove(0)
print(burners)
print('not empty',burners)
print(i)
i+=1
print(i)
print('num dish', num_dishes)
elif i==num_dishes and len(burners)==1:
time= time+burners[0]
print('step final', time)
i+=1
print('final time', time)
for _ in range(int(input())):
num_dishes = int(input())
time_dishes = list(map(int, input().split()))
if num_dishes == 1:
print(time_dishes[0])
else:
time_dishes.sort(reverse=True)
print(time_dishes) | true | true |
1c3011732b84a4b6984e1fbbcbbc0b70fadf6693 | 1,571 | py | Python | utils/exceptions.py | whateverfw/Fixtures-Builder | 39fa1631f2822eb08059047c1aa56016b79ce670 | [
"MIT"
] | 2 | 2020-07-03T12:25:53.000Z | 2020-07-08T20:42:49.000Z | utils/exceptions.py | whateverfw/Fixtures-Builder | 39fa1631f2822eb08059047c1aa56016b79ce670 | [
"MIT"
] | null | null | null | utils/exceptions.py | whateverfw/Fixtures-Builder | 39fa1631f2822eb08059047c1aa56016b79ce670 | [
"MIT"
] | null | null | null | class InputError(Exception):
def __init__(self) -> None:
self.message = f'''
Expected 2 arguments, got 1
You should run script via: <script_name> <file_name>
e.g. >> main.py somename.csv
'''
def __str__(self) -> str:
return self.message
class FileTypeError(Exception):
def __init__(self, allowed_extensions) -> None:
self.message = f'''
This type is not supported.
Should be one of the following {allowed_extensions}
'''
def __str__(self) -> str:
return self.message
class ModeTypeError(Exception):
def __init__(self, allowed_modes) -> None:
self.message = f'''
This input mode is not supported.
Should be one of the following {allowed_modes}
'''
def __str__(self) -> str:
return self.message
class MissingFileError(Exception):
def __init__(self, file) -> None:
self.message = f'''
File {file} was not found in project directory.
Remember that file should be placed in the same directory as <main.py>
'''
def __str__(self) -> str:
return self.message
class InvalidColumnsAmountError(Exception):
def __init__(self, user_columns_count, data_columns_count) -> None:
self.message = f'''
Number of columns that you specified - {user_columns_count} is greater than
number of columns in csv file - {data_columns_count}
'''
def __str__(self) -> str:
return self.message
| 28.563636 | 87 | 0.605983 | class InputError(Exception):
def __init__(self) -> None:
self.message = f'''
Expected 2 arguments, got 1
You should run script via: <script_name> <file_name>
e.g. >> main.py somename.csv
'''
def __str__(self) -> str:
return self.message
class FileTypeError(Exception):
def __init__(self, allowed_extensions) -> None:
self.message = f'''
This type is not supported.
Should be one of the following {allowed_extensions}
'''
def __str__(self) -> str:
return self.message
class ModeTypeError(Exception):
def __init__(self, allowed_modes) -> None:
self.message = f'''
This input mode is not supported.
Should be one of the following {allowed_modes}
'''
def __str__(self) -> str:
return self.message
class MissingFileError(Exception):
def __init__(self, file) -> None:
self.message = f'''
File {file} was not found in project directory.
Remember that file should be placed in the same directory as <main.py>
'''
def __str__(self) -> str:
return self.message
class InvalidColumnsAmountError(Exception):
def __init__(self, user_columns_count, data_columns_count) -> None:
self.message = f'''
Number of columns that you specified - {user_columns_count} is greater than
number of columns in csv file - {data_columns_count}
'''
def __str__(self) -> str:
return self.message
| true | true |
1c30117d7ec4fe8e22b3743475611d1c2b53cd5f | 1,669 | py | Python | odoons/utils/config.py | jiksaa/odoons | 6845b5cd1ad73a1d44f04772b63431c5a54cfca7 | [
"MIT"
] | null | null | null | odoons/utils/config.py | jiksaa/odoons | 6845b5cd1ad73a1d44f04772b63431c5a54cfca7 | [
"MIT"
] | 3 | 2021-10-05T14:29:51.000Z | 2021-10-17T21:52:41.000Z | odoons/utils/config.py | jiksaa/odoons | 6845b5cd1ad73a1d44f04772b63431c5a54cfca7 | [
"MIT"
] | null | null | null | import os
from configparser import ConfigParser, ExtendedInterpolation
from ruamel.yaml import YAML
ADDONS_REQ_INSTALL_CONFIG = "install-requirements"
OPT_APPLY_REQS = "apply-requirements"
OPT_INSTALL_ODOO = "install-odoo-command"
OPT_CONF_TEMPLATE = "config-template"
OPT_CONF_DIR = "config-directory"
OPT_BIN_DIR = "bin-directory"
DEFAULT_OPTIONS = {
OPT_APPLY_REQS: True,
OPT_INSTALL_ODOO: True,
OPT_CONF_TEMPLATE: "odoo.cfg.template",
OPT_CONF_DIR: "etc",
OPT_BIN_DIR: "bin",
}
def load_odoons_config(path):
yaml = YAML(typ="safe")
with open(path, "r") as file:
odoons_file = yaml.load(file)
if "odoons" not in odoons_file:
raise Exception("missing odoons section in {}".format(path))
return {
'odoo': odoons_file["odoons"]["odoo"],
"options": odoons_file["odoons"].get("options", {}),
"addons": odoons_file["odoons"].get("addons", []),
}
def get_config_parser():
return ConfigParser(interpolation=ExtendedInterpolation(), strict=False)
def get_git_addons_path(conf):
"""
Function computing addons path according to the given addons configuration dict()
`conf` dict is expecting to follow the addons configuration entries structure
This method mainly handle the standalone option which allows to use Git repository
containing a standalone module.
:param conf: addons configuration dict()
:return: string representation of addons path
"""
abspath = os.path.abspath(conf["path"])
if "standalone" in conf and conf["standalone"]:
abspath = os.path.abspath(os.path.join(conf["path"], conf["standalone"]))
return abspath
| 29.280702 | 86 | 0.705812 | import os
from configparser import ConfigParser, ExtendedInterpolation
from ruamel.yaml import YAML
ADDONS_REQ_INSTALL_CONFIG = "install-requirements"
OPT_APPLY_REQS = "apply-requirements"
OPT_INSTALL_ODOO = "install-odoo-command"
OPT_CONF_TEMPLATE = "config-template"
OPT_CONF_DIR = "config-directory"
OPT_BIN_DIR = "bin-directory"
DEFAULT_OPTIONS = {
OPT_APPLY_REQS: True,
OPT_INSTALL_ODOO: True,
OPT_CONF_TEMPLATE: "odoo.cfg.template",
OPT_CONF_DIR: "etc",
OPT_BIN_DIR: "bin",
}
def load_odoons_config(path):
yaml = YAML(typ="safe")
with open(path, "r") as file:
odoons_file = yaml.load(file)
if "odoons" not in odoons_file:
raise Exception("missing odoons section in {}".format(path))
return {
'odoo': odoons_file["odoons"]["odoo"],
"options": odoons_file["odoons"].get("options", {}),
"addons": odoons_file["odoons"].get("addons", []),
}
def get_config_parser():
return ConfigParser(interpolation=ExtendedInterpolation(), strict=False)
def get_git_addons_path(conf):
abspath = os.path.abspath(conf["path"])
if "standalone" in conf and conf["standalone"]:
abspath = os.path.abspath(os.path.join(conf["path"], conf["standalone"]))
return abspath
| true | true |
1c3011a1aef962c0d178f92f5ead1609884ae9b8 | 6,176 | py | Python | embedded_gremlin/predicate/base.py | CODAIT/embedded-gremlin | cdeb3b21204fee257b3e53f1afbd3efdca20a29b | [
"Apache-2.0"
] | null | null | null | embedded_gremlin/predicate/base.py | CODAIT/embedded-gremlin | cdeb3b21204fee257b3e53f1afbd3efdca20a29b | [
"Apache-2.0"
] | 1 | 2020-08-18T20:51:13.000Z | 2020-08-18T21:56:33.000Z | embedded_gremlin/predicate/base.py | CODAIT/embedded-gremlin | cdeb3b21204fee257b3e53f1afbd3efdca20a29b | [
"Apache-2.0"
] | 1 | 2020-08-18T21:57:21.000Z | 2020-08-18T21:57:21.000Z | #
# Copyright (c) 2020 IBM Corp.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# base.py
#
# Abstract base classes for predicates.
from abc import ABC
from typing import Iterator
import pandas as pd
import numpy as np
from typing import *
from embedded_gremlin.traversal.base import GraphTraversal
class VertexPredicate:
"""
Base class for Boolean predicates applied to individual vertices of the
graph.
"""
def __init__(self, *children: "VertexPredicate"):
self._children = children
def __call__(self, vertices: pd.DataFrame) -> np.ndarray:
"""
:param vertices: DataFrame of vertices on which to apply the predicate
:return: A numpy Boolean mask containing `True` for each row that
satisfies the predicate
"""
raise NotImplementedError("Subclasses must implement this method.")
def bind_aliases(self, parent: GraphTraversal) -> None:
"""
Bind any aliases to other parts of the current path to the appropriate
vertices of the current path.
:param parent: Tail node node in the current path. Must already be
computed.
"""
self.bind_aliases_self(parent)
for c in self._children:
c.bind_aliases(parent)
def bind_aliases_self(self, parent: GraphTraversal) -> None:
"""
Subclasses that reference other nodes of the current path should
override this method.
:param parent: Tail node node in the current path. Must already be
computed.
"""
pass
def modulate(self, modulator: Iterator[str]) -> None:
"""
Apply one or more modulators to this predicate.
:param modulator: Infinite iterator backed by a circular buffer of
string-valued modulators to apply round-robin style to the expression
tree rooted at this node.
"""
self.modulate_self(modulator)
for c in self._children:
c.modulate(modulator)
def modulate_self(self, modulator: Iterator[str]) -> None:
"""
Subclasses that consume an element of the modulators stream should
override this method.
:param modulator: Infinite iterator backed by a circular buffer of
string-valued modulators to apply round-robin style to the expression
tree rooted at this node.
"""
pass
class ColumnPredicate(VertexPredicate, ABC):
"""
Abstract base class for VertexPredicates that only read one column and may
need that column to be bound late, as in a `has` step.
"""
def __init__(self):
VertexPredicate.__init__(self)
self._target_col = None
def modulate_self(self, modulator: Iterator[str]) -> None:
self._target_col = next(modulator)
@property
def target_col(self) -> str:
"""
:returns: Name of the column on which this predicate will be applied.
"""
return self._target_col
class BinaryPredicate(VertexPredicate, ABC):
"""
Abstract base class for Gremlin binary predicates.
"""
def __init__(self, target_alias: str, *children: "BinaryPredicate"):
"""
:param target_alias: Name of the second vertex to compare against.
:param *children: Optional set of child predicates for propagating bindings
"""
VertexPredicate.__init__(self, *children)
self._target_alias = target_alias
self._left_col = None # Type: str
self._right_col = None # Type: str
self._right_input = None # Type: pd.Series
def bind_aliases_self(self, parent: GraphTraversal) -> None:
if self._right_col is not None:
vertices = parent.alias_to_vertices(self._target_alias)
self._right_input = vertices[self._right_col]
else:
# self._target_alias references a scalar-valued step
_, self._right_input = parent.alias_to_step(self._target_alias)
# The inputs are views on the vertices tables, so we need to reset the
# Pandas indexes to prevent any vectorized operations in the subclass
# from matching rows based on misaligned indexes.
self._right_input = self._right_input.reset_index(drop=True)
def modulate_self(self, modulator: Iterator[str]) -> None:
self._left_col = next(modulator)
self._right_col = next(modulator)
if self._left_col is None:
raise ValueError(f"Attempted to modulate first input column of {self} to "
f"`None`. Currently only string values are supported, "
f"because the first (left) input of a binary predicate "
f"must be of type vertex.")
def left_input(self, vertices: pd.DataFrame) -> pd.Series:
"""
:param vertices: DataFrame of vertices on which to apply the predicate,
same as eponymous argument to `VertexPredicate.__call__()`.
:return: First input to the binary predicate, taking into account any `by`
modulators applied to this predicate.
"""
return vertices[self._left_col]
def right_input(self) -> pd.Series:
"""
:return: Second input to the binary predicate, taking into account the
values of `self.target_alias` and any `by` modulators applied to this
predicate.
"""
return self._right_input
@property
def target_alias(self) -> str:
"""
:return: Name of the alias that the predicate uses to acquire the second
argument to the binary predicate
"""
return self._target_alias
| 35.291429 | 86 | 0.655602 |
from abc import ABC
from typing import Iterator
import pandas as pd
import numpy as np
from typing import *
from embedded_gremlin.traversal.base import GraphTraversal
class VertexPredicate:
def __init__(self, *children: "VertexPredicate"):
self._children = children
def __call__(self, vertices: pd.DataFrame) -> np.ndarray:
raise NotImplementedError("Subclasses must implement this method.")
def bind_aliases(self, parent: GraphTraversal) -> None:
self.bind_aliases_self(parent)
for c in self._children:
c.bind_aliases(parent)
def bind_aliases_self(self, parent: GraphTraversal) -> None:
pass
def modulate(self, modulator: Iterator[str]) -> None:
self.modulate_self(modulator)
for c in self._children:
c.modulate(modulator)
def modulate_self(self, modulator: Iterator[str]) -> None:
pass
class ColumnPredicate(VertexPredicate, ABC):
def __init__(self):
VertexPredicate.__init__(self)
self._target_col = None
def modulate_self(self, modulator: Iterator[str]) -> None:
self._target_col = next(modulator)
@property
def target_col(self) -> str:
return self._target_col
class BinaryPredicate(VertexPredicate, ABC):
def __init__(self, target_alias: str, *children: "BinaryPredicate"):
VertexPredicate.__init__(self, *children)
self._target_alias = target_alias
self._left_col = None
self._right_col = None
self._right_input = None
def bind_aliases_self(self, parent: GraphTraversal) -> None:
if self._right_col is not None:
vertices = parent.alias_to_vertices(self._target_alias)
self._right_input = vertices[self._right_col]
else:
_, self._right_input = parent.alias_to_step(self._target_alias)
self._right_input = self._right_input.reset_index(drop=True)
def modulate_self(self, modulator: Iterator[str]) -> None:
self._left_col = next(modulator)
self._right_col = next(modulator)
if self._left_col is None:
raise ValueError(f"Attempted to modulate first input column of {self} to "
f"`None`. Currently only string values are supported, "
f"because the first (left) input of a binary predicate "
f"must be of type vertex.")
def left_input(self, vertices: pd.DataFrame) -> pd.Series:
return vertices[self._left_col]
def right_input(self) -> pd.Series:
return self._right_input
@property
def target_alias(self) -> str:
return self._target_alias
| true | true |
1c3011a5b4d5c4eb50c3cd4b0799bbb5202d5c1f | 2,029 | py | Python | api/tests/test_elections.py | gsbevilaqua/elections | 3675cbac34bdc691cb9530a05c857cf7bbdd6bb3 | [
"BSD-3-Clause"
] | null | null | null | api/tests/test_elections.py | gsbevilaqua/elections | 3675cbac34bdc691cb9530a05c857cf7bbdd6bb3 | [
"BSD-3-Clause"
] | null | null | null | api/tests/test_elections.py | gsbevilaqua/elections | 3675cbac34bdc691cb9530a05c857cf7bbdd6bb3 | [
"BSD-3-Clause"
] | null | null | null | import os,sys,inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
from Elections import Elections
from nose.tools import assert_equals
elec = None
elec2 = None
def setup_module(module):
global elec, elec2
print ("") # this is to get a newline after the dots
print ("Setup...")
elec = Elections(1000, [0, 0, 0, 0], 1)
elec.reset()
elec2 = Elections(1000, [0, 0, 0, 0, 0], 1, voter_profiles = [{"pop_percentage": 100, "scores": [10, 5, 0, -5, -10]}])
elec2.reset()
elec2.create_candidates()
elec2.create_voters()
def teardown_module(module):
print ("")
print ("Teardown...")
def test_create_candidates():
global elec
elec.create_candidates()
assert_equals(len(elec.candidates), 4)
assert_equals(len(elec.votes), 4)
def test_create_voters():
global elec
elec.create_voters()
assert_equals(len(elec.voters), 1000)
elec3 = Elections(1000, [0, 0, 0, 0], 1, voter_profiles = [{"pop_percentage": 50, "scores": [-10, -5, 5, 10]}, {"pop_percentage": 50, "scores": [6, 3, 1, 0]}])
elec3.reset()
elec3.create_voters()
assert_equals(len(elec3.voters), 1000)
def test_account_for_coalitions():
global elec2
length = len(elec2.voters)
elec2._account_for_coalitions()
assert_equals(len(elec2.voters), length)
def test_sort_ranks():
global elec2
elec2.sort_ranks()
assert len(elec2.sorted_voters) > 0
def test_sort_candidates():
global elec
sor = elec.sort_candidates(elec.candidates)
assert_equals(len(elec.candidates), len(sor))
def test_calculate_mean():
global elec2
assert_equals(elec2.calculate_mean([0]), (10, 1.0, False))
assert_equals(elec2.calculate_mean([1]), (5, 1.0, False))
assert_equals(elec2.calculate_mean([2]), (0, 0.0, False))
assert_equals(elec2.calculate_mean([3]), (-5, 0.0, False))
assert_equals(elec2.calculate_mean([4]), (-10, 0.0, False))
| 31.215385 | 163 | 0.678167 | import os,sys,inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
from Elections import Elections
from nose.tools import assert_equals
elec = None
elec2 = None
def setup_module(module):
global elec, elec2
print ("")
print ("Setup...")
elec = Elections(1000, [0, 0, 0, 0], 1)
elec.reset()
elec2 = Elections(1000, [0, 0, 0, 0, 0], 1, voter_profiles = [{"pop_percentage": 100, "scores": [10, 5, 0, -5, -10]}])
elec2.reset()
elec2.create_candidates()
elec2.create_voters()
def teardown_module(module):
print ("")
print ("Teardown...")
def test_create_candidates():
global elec
elec.create_candidates()
assert_equals(len(elec.candidates), 4)
assert_equals(len(elec.votes), 4)
def test_create_voters():
global elec
elec.create_voters()
assert_equals(len(elec.voters), 1000)
elec3 = Elections(1000, [0, 0, 0, 0], 1, voter_profiles = [{"pop_percentage": 50, "scores": [-10, -5, 5, 10]}, {"pop_percentage": 50, "scores": [6, 3, 1, 0]}])
elec3.reset()
elec3.create_voters()
assert_equals(len(elec3.voters), 1000)
def test_account_for_coalitions():
global elec2
length = len(elec2.voters)
elec2._account_for_coalitions()
assert_equals(len(elec2.voters), length)
def test_sort_ranks():
global elec2
elec2.sort_ranks()
assert len(elec2.sorted_voters) > 0
def test_sort_candidates():
global elec
sor = elec.sort_candidates(elec.candidates)
assert_equals(len(elec.candidates), len(sor))
def test_calculate_mean():
global elec2
assert_equals(elec2.calculate_mean([0]), (10, 1.0, False))
assert_equals(elec2.calculate_mean([1]), (5, 1.0, False))
assert_equals(elec2.calculate_mean([2]), (0, 0.0, False))
assert_equals(elec2.calculate_mean([3]), (-5, 0.0, False))
assert_equals(elec2.calculate_mean([4]), (-10, 0.0, False))
| true | true |
1c3012c4521b2052ec93c1b4ceba47deebfcdad3 | 5,646 | py | Python | common/bin/db_connections/influxdb.py | frankovacevich/aleph | 9b01dcabf3c074e8617e50fffd35c9ee1960eab6 | [
"MIT"
] | null | null | null | common/bin/db_connections/influxdb.py | frankovacevich/aleph | 9b01dcabf3c074e8617e50fffd35c9ee1960eab6 | [
"MIT"
] | null | null | null | common/bin/db_connections/influxdb.py | frankovacevich/aleph | 9b01dcabf3c074e8617e50fffd35c9ee1960eab6 | [
"MIT"
] | null | null | null | """
"""
from influxdb import InfluxDBClient
import datetime
import math
from dateutil.tz import tzutc
class InfluxDBConnection:
def __init__(self, username, password, database, server="localhost", port=8086):
self.server = server
self.port = port
self.username = username
self.password = password
self.database = database
##
self.client = None
self.data_buffer = []
self.buffer_size = 15000
return
# Connect to database
def connect(self):
self.client = InfluxDBClient(self.server, self.port, self.username, self.password, self.database)
return
def close(self):
self.client.close()
return
# ==========================================================================
# Operations (save, get, delete)
# ==========================================================================
def save_data(self, key, data):
try:
dat = {}
for x in data:
y = data[x]
if isinstance(y, float) and (math.isinf(y) or math.isnan(y)): continue
if isinstance(y, int): y = float(y)
dat[x] = y
item = {"measurement": key, "tags": {}, "fields": dat, "time": data["t"]}
self.data_buffer.append(item)
if len(self.data_buffer) >= self.buffer_size:
self.client.write_points(self.data_buffer, database=self.database)
self.data_buffer.clear()
except:
for item in self.data_buffer:
try:
self.client.write_points([item], database=self.database)
except:
pass
self.data_buffer.clear()
raise
return
def get_data(self, key, field, since, until, count):
if key not in self.get_keys(): return []
since_t = since.strftime('%Y-%m-%d %H:%M:%S')
until_t = until.strftime('%Y-%m-%d %H:%M:%S')
# get field ids
if field == "*":
fields_str = "*"
null_filter = ""
else:
if isinstance(field, str): field = [field]
field_ids = [x for x in field]
fields_str = ",".join(field_ids)
null_filter = " AND (" + "".join([x + ' IS NOT NULL OR ' for x in field_ids])[0:-4] + ")"
query = "SELECT " + fields_str + " FROM \"" + key + "\" WHERE time >= '" + since_t + "' AND time <= '" + until_t + "'" + "" + " ORDER BY time DESC LIMIT " + str(count)
print(query)
q = list(self.client.query(query))
if len(q) == 0: return []
result = q[0]
for r in result:
r["t"] = datetime.datetime.strptime(r["time"], "%Y-%m-%dT%H:%M:%SZ")
r.pop("time")
return result
def get_data_by_id(self, key, id_):
# should not be implemented on influx since update queries are not optimized
# use another db engine to store id'd data
raise Exception("Invalid method")
def delete_data(self, key, since, until):
since_t = since.strftime('%Y-%m-%d %H:%M:%S')
until_t = until.strftime('%Y-%m-%d %H:%M:%S')
query = "DELETE FROM \"" + key + "\" WHERE time >= '" + str(since_t) + "' AND time <= '" + str(until_t) + "'"
q = list(self.client.query(query))
return q
def delete_data_by_id(self, key, id_):
# should not be implemented on influx since update queries are not optimized
# use another db engine to store id'd data
raise Exception("Invalid method")
# ==========================================================================
# Get keys and fields
# ==========================================================================
def get_keys(self):
query = "SHOW MEASUREMENTS ON " + self.database
q = self.client.query(query)
if len(q) == 0: return []
return [x["name"] for x in list(q)[0]]
def get_fields(self, key):
query = 'SHOW FIELD KEYS ON ' + self.database + ' FROM "' + key + '"'
q = self.client.query(query)
if len(q) == 0: return []
return [x["fieldKey"] for x in list(q)[0]]
def remove_key(self, key):
self.client.drop_measurement(key)
return
def remove_field(self, key, field):
raise Exception("Invalid method")
def rename_key(self, key, new_key):
query = f'SELECT * INTO "{new_key}" FROM "{key}"'
q = self.client.query(query)
query = f'DROP MEASUREMENT "{key}"'
q = self.client.query(query)
return
def rename_field(self, key, field, new_field):
raise Exception("Invalid method")
# ==========================================================================
# Metadata
# ==========================================================================
def get_metadata(self, key):
raise Exception("Invalid method")
def set_metadata(self, key, field, alias, description):
raise Exception("Invalid method")
# ==========================================================================
# Other
# ==========================================================================
def count_all_records(self):
keys = self.get_all_keys()
all = {}
total = 0
for key in keys:
query = 'SELECT COUNT(t) FROM "' + key + '"'
q = self.client.query(query)
count = list(q)[0][0]["count"]
total += count
all[key] = count
all["__total__"] = total
return all
| 32.448276 | 175 | 0.481049 |
from influxdb import InfluxDBClient
import datetime
import math
from dateutil.tz import tzutc
class InfluxDBConnection:
def __init__(self, username, password, database, server="localhost", port=8086):
self.server = server
self.port = port
self.username = username
self.password = password
self.database = database
self.client = None
self.data_buffer = []
self.buffer_size = 15000
return
def connect(self):
self.client = InfluxDBClient(self.server, self.port, self.username, self.password, self.database)
return
def close(self):
self.client.close()
return
def save_data(self, key, data):
try:
dat = {}
for x in data:
y = data[x]
if isinstance(y, float) and (math.isinf(y) or math.isnan(y)): continue
if isinstance(y, int): y = float(y)
dat[x] = y
item = {"measurement": key, "tags": {}, "fields": dat, "time": data["t"]}
self.data_buffer.append(item)
if len(self.data_buffer) >= self.buffer_size:
self.client.write_points(self.data_buffer, database=self.database)
self.data_buffer.clear()
except:
for item in self.data_buffer:
try:
self.client.write_points([item], database=self.database)
except:
pass
self.data_buffer.clear()
raise
return
def get_data(self, key, field, since, until, count):
if key not in self.get_keys(): return []
since_t = since.strftime('%Y-%m-%d %H:%M:%S')
until_t = until.strftime('%Y-%m-%d %H:%M:%S')
if field == "*":
fields_str = "*"
null_filter = ""
else:
if isinstance(field, str): field = [field]
field_ids = [x for x in field]
fields_str = ",".join(field_ids)
null_filter = " AND (" + "".join([x + ' IS NOT NULL OR ' for x in field_ids])[0:-4] + ")"
query = "SELECT " + fields_str + " FROM \"" + key + "\" WHERE time >= '" + since_t + "' AND time <= '" + until_t + "'" + "" + " ORDER BY time DESC LIMIT " + str(count)
print(query)
q = list(self.client.query(query))
if len(q) == 0: return []
result = q[0]
for r in result:
r["t"] = datetime.datetime.strptime(r["time"], "%Y-%m-%dT%H:%M:%SZ")
r.pop("time")
return result
def get_data_by_id(self, key, id_):
raise Exception("Invalid method")
def delete_data(self, key, since, until):
since_t = since.strftime('%Y-%m-%d %H:%M:%S')
until_t = until.strftime('%Y-%m-%d %H:%M:%S')
query = "DELETE FROM \"" + key + "\" WHERE time >= '" + str(since_t) + "' AND time <= '" + str(until_t) + "'"
q = list(self.client.query(query))
return q
def delete_data_by_id(self, key, id_):
# should not be implemented on influx since update queries are not optimized
# use another db engine to store id'd data
raise Exception("Invalid method")
def get_keys(self):
query = "SHOW MEASUREMENTS ON " + self.database
q = self.client.query(query)
if len(q) == 0: return []
return [x["name"] for x in list(q)[0]]
def get_fields(self, key):
query = 'SHOW FIELD KEYS ON ' + self.database + ' FROM "' + key + '"'
q = self.client.query(query)
if len(q) == 0: return []
return [x["fieldKey"] for x in list(q)[0]]
def remove_key(self, key):
self.client.drop_measurement(key)
return
def remove_field(self, key, field):
raise Exception("Invalid method")
def rename_key(self, key, new_key):
query = f'SELECT * INTO "{new_key}" FROM "{key}"'
q = self.client.query(query)
query = f'DROP MEASUREMENT "{key}"'
q = self.client.query(query)
return
def rename_field(self, key, field, new_field):
raise Exception("Invalid method")
def get_metadata(self, key):
raise Exception("Invalid method")
def set_metadata(self, key, field, alias, description):
raise Exception("Invalid method")
def count_all_records(self):
keys = self.get_all_keys()
all = {}
total = 0
for key in keys:
query = 'SELECT COUNT(t) FROM "' + key + '"'
q = self.client.query(query)
count = list(q)[0][0]["count"]
total += count
all[key] = count
all["__total__"] = total
return all
| true | true |
1c3013983f195866dba486e32c2df98ef07bea20 | 6,474 | py | Python | sunpy/visualization/wcsaxes_compat.py | Naman9639/sunpy | 24c0cfbd9b03d7f9554bc86036fac2b78a5fcc56 | [
"BSD-2-Clause"
] | null | null | null | sunpy/visualization/wcsaxes_compat.py | Naman9639/sunpy | 24c0cfbd9b03d7f9554bc86036fac2b78a5fcc56 | [
"BSD-2-Clause"
] | null | null | null | sunpy/visualization/wcsaxes_compat.py | Naman9639/sunpy | 24c0cfbd9b03d7f9554bc86036fac2b78a5fcc56 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Helpers and Functions to make WCSAxes work in SunPy
"""
import matplotlib.pyplot as plt
import astropy.units as u
try:
from astropy.visualization import wcsaxes
except ImportError:
raise ImportError("Astropy >= 1.3 is required to use SunPy")
# Force is put here to enable disabling all checks in this module. It should
# only be used by tests and other such hacks.
_FORCE_NO_WCSAXES = False
__all__ = ['is_wcsaxes']
def is_wcsaxes(axes):
"""
Test a matplotlib Axes object to see if it is an instance of WCSAxes.
Parameters
----------
axes : `matplotlib.axes` Object
Axes to test
Returns
-------
result : `bool`
Result of the test
"""
if not _FORCE_NO_WCSAXES:
return isinstance(axes, wcsaxes.WCSAxes)
else:
return False
def gca_wcs(wcs, fig=None, slices=None):
"""
Get the current axes, and return a WCSAxes if possible.
Parameters
----------
wcs : `astropy.wcs.WCS`
A `~astropy.wcs.WCS` object used to create a new axes.
fig : `matplotlib.figure.Figure`
The figure in which to check for the axes.
slices : `tuple`
``slices`` is passed to `~astropy.visualization.wcsaxes.WCSAxes`
to describe which two dimensions of the `~astropy.wcs.WCS` object
are being plotted.
This slices the multidimensional wcs object in the way it needs
to be sliced.
Returns
-------
ax : `matplotlib.axes.Axes` or `~astropy.visualization.wcsaxes.WCSAxes`
object. The current axes, or a new one if created.
"""
if not fig:
fig = plt.gcf()
if not len(fig.get_axes()):
if not _FORCE_NO_WCSAXES:
ax = plt.gca(projection=wcs, slices=slices)
else:
ax = plt.gca()
else:
ax = plt.gca()
return ax
def get_world_transform(axes):
"""
Get the transformation to world coordinates.
If the axes is a `~astropy.visualization.wcsaxes.WCSAxes` instance this
returns the transform to the ``'world'`` coordinates, otherwise it returns
the transform to the matplotlib data coordinates, which are assumed to be in
world coordinates.
Parameters
----------
axes : `~astropy.visualization.wcsaxes.WCSAxes` or `~matplotlib.axes.Axes`
object. The axes to get the transform from.
Returns
-------
transform : `~matplotlib.transforms.CompositeGenericTransform`
The transformation object.
"""
if is_wcsaxes(axes):
transform = axes.get_transform('world')
else:
transform = axes.transData
return transform
def solar_coord_type_from_ctype(ctype):
"""
Determine whether a particular WCS ctype corresponds to an angle or scalar
coordinate.
"""
if ctype[2:4] == 'LN':
if ctype[:4] in ['HPLN', 'HGLN']:
return 'longitude', 180.
return 'longitude', None
elif ctype[2:4] == 'LT':
return 'latitude', None
else:
return 'scalar', None
def default_wcs_ticks(axes, units, ctypes):
"""
Set the ticks and axes type on a solar WCSAxes plot.
"""
if not isinstance(axes, wcsaxes.WCSAxes):
raise TypeError("This axes is not a WCSAxes")
x = axes.coords[0]
y = axes.coords[1]
if x.ticks.get_tick_out() == 'in':
x.set_ticks(color='white')
if y.ticks.get_tick_out() == 'in':
y.set_ticks(color='white')
x.set_ticks_position('bl')
y.set_ticks_position('bl')
xtype = solar_coord_type_from_ctype(ctypes[0])
ytype = solar_coord_type_from_ctype(ctypes[1])
x.set_coord_type(*xtype)
y.set_coord_type(*ytype)
if xtype[0] == 'scalar':
x.set_major_formatter('x.x')
elif units[0] is u.deg:
x.set_major_formatter('d.d')
elif units[0] is u.arcsec:
x.set_major_formatter('s.s')
else:
x.set_major_formatter('x.x')
if ytype[0] == 'scalar':
x.set_major_formatter('x.x')
elif units[1] is u.deg:
y.set_major_formatter('d.d')
elif units[1] is u.arcsec:
y.set_major_formatter('s.s')
else:
y.set_major_formatter('x.x')
def default_wcs_grid(axes, units, ctypes):
"""
Apply some default wcsaxes grid formatting.
Parameters
----------
axes : `~astropy.visualization.wcsaxes.WCSAxes` object.
The `~astropy.visualization.wcsaxes.WCSAxes` object to draw the world
coordinate grid on.
units : `tuple`
The axes units axes x y order.
"""
default_wcs_ticks(axes, units, ctypes)
axes.coords.grid(color='white', alpha=0.6, linestyle='dotted',
linewidth=0.5)
@u.quantity_input(grid_spacing=u.deg)
def wcsaxes_heliographic_overlay(axes, grid_spacing=10*u.deg, **kwargs):
"""
Create a heliographic overlay using wcsaxes.
Also draw a grid and label the top axes.
Parameters
----------
axes : `~astropy.visualization.wcsaxes.WCSAxes` object.
The `~astropy.visualization.wcsaxes.WCSAxes` object to create the HGS overlay on.
grid_spacing: `~astropy.units.Quantity`
Spacing for longitude and latitude grid in degrees.
Returns
-------
overlay : `~astropy.visualization.wcsaxes.WCSAxes` overlay
The overlay object.
Notes
-----
Keywords are passed to `~astropy.visualization.wcsaxes.coordinates_map.CoordinatesMap.grid`.
"""
# Unpack spacing
if isinstance(grid_spacing, u.Quantity) and grid_spacing.size == 1:
lon_space = lat_space = grid_spacing
elif grid_spacing.size == 2:
lon_space, lat_space = grid_spacing
else:
raise ValueError("grid_spacing must be a Quantity of length one or two.")
overlay = axes.get_coords_overlay('heliographic_stonyhurst')
lon = overlay[0]
lat = overlay[1]
lon.coord_wrap = 180
lon.set_major_formatter('dd')
lon.set_axislabel('Solar Longitude', minpad=0.8)
lat.set_axislabel('Solar Latitude', minpad=0.9)
lon.set_ticks_position('tr')
lat.set_ticks_position('tr')
grid_kw = {'color': 'white', 'zorder': 100, 'alpha': 0.5}
grid_kw.update(kwargs)
lon.set_ticks(spacing=lon_space, color=grid_kw['color'])
lat.set_ticks(spacing=lat_space, color=grid_kw['color'])
overlay.grid(**grid_kw)
if axes.title:
x, y = axes.title.get_position()
axes.title.set_position([x, y + 0.08])
return overlay
| 25.690476 | 96 | 0.637319 |
import matplotlib.pyplot as plt
import astropy.units as u
try:
from astropy.visualization import wcsaxes
except ImportError:
raise ImportError("Astropy >= 1.3 is required to use SunPy")
_FORCE_NO_WCSAXES = False
__all__ = ['is_wcsaxes']
def is_wcsaxes(axes):
if not _FORCE_NO_WCSAXES:
return isinstance(axes, wcsaxes.WCSAxes)
else:
return False
def gca_wcs(wcs, fig=None, slices=None):
if not fig:
fig = plt.gcf()
if not len(fig.get_axes()):
if not _FORCE_NO_WCSAXES:
ax = plt.gca(projection=wcs, slices=slices)
else:
ax = plt.gca()
else:
ax = plt.gca()
return ax
def get_world_transform(axes):
if is_wcsaxes(axes):
transform = axes.get_transform('world')
else:
transform = axes.transData
return transform
def solar_coord_type_from_ctype(ctype):
if ctype[2:4] == 'LN':
if ctype[:4] in ['HPLN', 'HGLN']:
return 'longitude', 180.
return 'longitude', None
elif ctype[2:4] == 'LT':
return 'latitude', None
else:
return 'scalar', None
def default_wcs_ticks(axes, units, ctypes):
if not isinstance(axes, wcsaxes.WCSAxes):
raise TypeError("This axes is not a WCSAxes")
x = axes.coords[0]
y = axes.coords[1]
if x.ticks.get_tick_out() == 'in':
x.set_ticks(color='white')
if y.ticks.get_tick_out() == 'in':
y.set_ticks(color='white')
x.set_ticks_position('bl')
y.set_ticks_position('bl')
xtype = solar_coord_type_from_ctype(ctypes[0])
ytype = solar_coord_type_from_ctype(ctypes[1])
x.set_coord_type(*xtype)
y.set_coord_type(*ytype)
if xtype[0] == 'scalar':
x.set_major_formatter('x.x')
elif units[0] is u.deg:
x.set_major_formatter('d.d')
elif units[0] is u.arcsec:
x.set_major_formatter('s.s')
else:
x.set_major_formatter('x.x')
if ytype[0] == 'scalar':
x.set_major_formatter('x.x')
elif units[1] is u.deg:
y.set_major_formatter('d.d')
elif units[1] is u.arcsec:
y.set_major_formatter('s.s')
else:
y.set_major_formatter('x.x')
def default_wcs_grid(axes, units, ctypes):
default_wcs_ticks(axes, units, ctypes)
axes.coords.grid(color='white', alpha=0.6, linestyle='dotted',
linewidth=0.5)
@u.quantity_input(grid_spacing=u.deg)
def wcsaxes_heliographic_overlay(axes, grid_spacing=10*u.deg, **kwargs):
if isinstance(grid_spacing, u.Quantity) and grid_spacing.size == 1:
lon_space = lat_space = grid_spacing
elif grid_spacing.size == 2:
lon_space, lat_space = grid_spacing
else:
raise ValueError("grid_spacing must be a Quantity of length one or two.")
overlay = axes.get_coords_overlay('heliographic_stonyhurst')
lon = overlay[0]
lat = overlay[1]
lon.coord_wrap = 180
lon.set_major_formatter('dd')
lon.set_axislabel('Solar Longitude', minpad=0.8)
lat.set_axislabel('Solar Latitude', minpad=0.9)
lon.set_ticks_position('tr')
lat.set_ticks_position('tr')
grid_kw = {'color': 'white', 'zorder': 100, 'alpha': 0.5}
grid_kw.update(kwargs)
lon.set_ticks(spacing=lon_space, color=grid_kw['color'])
lat.set_ticks(spacing=lat_space, color=grid_kw['color'])
overlay.grid(**grid_kw)
if axes.title:
x, y = axes.title.get_position()
axes.title.set_position([x, y + 0.08])
return overlay
| true | true |
1c3013bd2cebdebd315d8f3d429bbca5d0355513 | 2,852 | py | Python | tests/components/demo/test_geo_location.py | OpenPeerPower/openpeerpower | 940a04a88e8f78e2d010dc912ad6905ae363503c | [
"Apache-2.0"
] | null | null | null | tests/components/demo/test_geo_location.py | OpenPeerPower/openpeerpower | 940a04a88e8f78e2d010dc912ad6905ae363503c | [
"Apache-2.0"
] | null | null | null | tests/components/demo/test_geo_location.py | OpenPeerPower/openpeerpower | 940a04a88e8f78e2d010dc912ad6905ae363503c | [
"Apache-2.0"
] | 1 | 2019-04-24T14:10:08.000Z | 2019-04-24T14:10:08.000Z | """The tests for the demo platform."""
import unittest
from unittest.mock import patch
from openpeerpower.components import geo_location
from openpeerpower.components.demo.geo_location import (
DEFAULT_UNIT_OF_MEASUREMENT,
DEFAULT_UPDATE_INTERVAL,
NUMBER_OF_DEMO_DEVICES,
)
from openpeerpower.setup import setup_component
import openpeerpower.util.dt as dt_util
from tests.common import (
assert_setup_component,
fire_time_changed,
get_test_open_peer_power,
)
CONFIG = {geo_location.DOMAIN: [{"platform": "demo"}]}
class TestDemoPlatform(unittest.TestCase):
"""Test the demo platform."""
def setUp(self):
"""Initialize values for this testcase class."""
self.opp = get_test_open_peer_power()
def tearDown(self):
"""Stop everything that was started."""
self.opp.stop()
def test_setup_platform(self):
"""Test setup of demo platform via configuration."""
utcnow = dt_util.utcnow()
# Patching 'utcnow' to gain more control over the timed update.
with patch("openpeerpower.util.dt.utcnow", return_value=utcnow):
with assert_setup_component(1, geo_location.DOMAIN):
assert setup_component(self.opp, geo_location.DOMAIN, CONFIG)
self.opp.block_till_done()
# In this test, one zone and geolocation entities have been
# generated.
all_states = [
self.opp.states.get(entity_id)
for entity_id in self.opp.states.entity_ids(geo_location.DOMAIN)
]
assert len(all_states) == NUMBER_OF_DEMO_DEVICES
for state in all_states:
# Check a single device's attributes.
if state.domain != geo_location.DOMAIN:
# ignore home zone state
continue
assert (
abs(state.attributes["latitude"] - self.opp.config.latitude) < 1.0
)
assert (
abs(state.attributes["longitude"] - self.opp.config.longitude) < 1.0
)
assert (
state.attributes["unit_of_measurement"]
== DEFAULT_UNIT_OF_MEASUREMENT
)
# Update (replaces 1 device).
fire_time_changed(self.opp, utcnow + DEFAULT_UPDATE_INTERVAL)
self.opp.block_till_done()
# Get all states again, ensure that the number of states is still
# the same, but the lists are different.
all_states_updated = [
self.opp.states.get(entity_id)
for entity_id in self.opp.states.entity_ids(geo_location.DOMAIN)
]
assert len(all_states_updated) == NUMBER_OF_DEMO_DEVICES
assert all_states != all_states_updated
| 36.564103 | 88 | 0.615007 | import unittest
from unittest.mock import patch
from openpeerpower.components import geo_location
from openpeerpower.components.demo.geo_location import (
DEFAULT_UNIT_OF_MEASUREMENT,
DEFAULT_UPDATE_INTERVAL,
NUMBER_OF_DEMO_DEVICES,
)
from openpeerpower.setup import setup_component
import openpeerpower.util.dt as dt_util
from tests.common import (
assert_setup_component,
fire_time_changed,
get_test_open_peer_power,
)
CONFIG = {geo_location.DOMAIN: [{"platform": "demo"}]}
class TestDemoPlatform(unittest.TestCase):
def setUp(self):
self.opp = get_test_open_peer_power()
def tearDown(self):
self.opp.stop()
def test_setup_platform(self):
utcnow = dt_util.utcnow()
with patch("openpeerpower.util.dt.utcnow", return_value=utcnow):
with assert_setup_component(1, geo_location.DOMAIN):
assert setup_component(self.opp, geo_location.DOMAIN, CONFIG)
self.opp.block_till_done()
all_states = [
self.opp.states.get(entity_id)
for entity_id in self.opp.states.entity_ids(geo_location.DOMAIN)
]
assert len(all_states) == NUMBER_OF_DEMO_DEVICES
for state in all_states:
if state.domain != geo_location.DOMAIN:
# ignore home zone state
continue
assert (
abs(state.attributes["latitude"] - self.opp.config.latitude) < 1.0
)
assert (
abs(state.attributes["longitude"] - self.opp.config.longitude) < 1.0
)
assert (
state.attributes["unit_of_measurement"]
== DEFAULT_UNIT_OF_MEASUREMENT
)
# Update (replaces 1 device).
fire_time_changed(self.opp, utcnow + DEFAULT_UPDATE_INTERVAL)
self.opp.block_till_done()
# Get all states again, ensure that the number of states is still
# the same, but the lists are different.
all_states_updated = [
self.opp.states.get(entity_id)
for entity_id in self.opp.states.entity_ids(geo_location.DOMAIN)
]
assert len(all_states_updated) == NUMBER_OF_DEMO_DEVICES
assert all_states != all_states_updated
| true | true |
1c30141ccdd08330d522e5467a59ee6418280bb7 | 35,375 | py | Python | bindings/python/crocoddyl/utils/quadruped.py | pFernbach/crocoddyl | cbf81a329e3abaf4ce1b4a8fab1431f93cd9a5c8 | [
"BSD-3-Clause"
] | null | null | null | bindings/python/crocoddyl/utils/quadruped.py | pFernbach/crocoddyl | cbf81a329e3abaf4ce1b4a8fab1431f93cd9a5c8 | [
"BSD-3-Clause"
] | null | null | null | bindings/python/crocoddyl/utils/quadruped.py | pFernbach/crocoddyl | cbf81a329e3abaf4ce1b4a8fab1431f93cd9a5c8 | [
"BSD-3-Clause"
] | null | null | null | import crocoddyl
import pinocchio
import numpy as np
class SimpleQuadrupedalGaitProblem:
def __init__(self, rmodel, lfFoot, rfFoot, lhFoot, rhFoot):
self.rmodel = rmodel
self.rdata = rmodel.createData()
self.state = crocoddyl.StateMultibody(self.rmodel)
self.actuation = crocoddyl.ActuationModelFloatingBase(self.state)
# Getting the frame id for all the legs
self.lfFootId = self.rmodel.getFrameId(lfFoot)
self.rfFootId = self.rmodel.getFrameId(rfFoot)
self.lhFootId = self.rmodel.getFrameId(lhFoot)
self.rhFootId = self.rmodel.getFrameId(rhFoot)
# Defining default state
q0 = self.rmodel.referenceConfigurations["standing"]
self.rmodel.defaultState = np.concatenate([q0, np.zeros(self.rmodel.nv)])
self.firstStep = True
# Defining the friction coefficient and normal
self.mu = 0.7
self.nsurf = np.array([0., 0., 1.])
def createCoMProblem(self, x0, comGoTo, timeStep, numKnots):
""" Create a shooting problem for a CoM forward/backward task.
:param x0: initial state
:param comGoTo: initial CoM motion
:param timeStep: step time for each knot
:param numKnots: number of knots per each phase
:return shooting problem
"""
# Compute the current foot positions
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
com0 = pinocchio.centerOfMass(self.rmodel, self.rdata, q0)
# Defining the action models along the time instances
comModels = []
# Creating the action model for the CoM task
comForwardModels = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(numKnots)
]
comForwardTermModel = self.createSwingFootModel(timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
com0 + np.array([comGoTo, 0., 0.]))
comForwardTermModel.differential.costs.costs['comTrack'].weight = 1e6
comBackwardModels = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(numKnots)
]
comBackwardTermModel = self.createSwingFootModel(timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
com0 + np.array([-comGoTo, 0., 0.]))
comBackwardTermModel.differential.costs.costs['comTrack'].weight = 1e6
# Adding the CoM tasks
comModels += comForwardModels + [comForwardTermModel]
comModels += comBackwardModels + [comBackwardTermModel]
# Defining the shooting problem
problem = crocoddyl.ShootingProblem(x0, comModels, comModels[-1])
return problem
def createCoMGoalProblem(self, x0, comGoTo, timeStep, numKnots):
""" Create a shooting problem for a CoM position goal task.
:param x0: initial state
:param comGoTo: CoM position change target
:param timeStep: step time for each knot
:param numKnots: number of knots per each phase
:return shooting problem
"""
# Compute the current foot positions
q0 = self.rmodel.referenceConfigurations["standing"]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
com0 = pinocchio.centerOfMass(self.rmodel, self.rdata, q0)
# Defining the action models along the time instances
comModels = []
# Creating the action model for the CoM task
comForwardModels = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(numKnots)
]
comForwardTermModel = self.createSwingFootModel(timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
com0 + np.array([comGoTo, 0., 0.]))
comForwardTermModel.differential.costs.costs['comTrack'].weight = 1e6
# Adding the CoM tasks
comModels += comForwardModels + [comForwardTermModel]
# Defining the shooting problem
problem = crocoddyl.ShootingProblem(x0, comModels, comModels[-1])
return problem
def createWalkingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
""" Create a shooting problem for a simple walking gait.
:param x0: initial state
:param stepLength: step length
:param stepHeight: step height
:param timeStep: step time for each knot
:param stepKnots: number of knots for step phases
:param supportKnots: number of knots for double support phases
:return shooting problem
"""
# Compute the current foot positions
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
# Defining the action models along the time instances
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(supportKnots)
]
if self.firstStep is True:
rhStep = self.createFootstepModels(comRef, [rhFootPos0], 0.5 * stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.rfFootId, self.lhFootId], [self.rhFootId])
rfStep = self.createFootstepModels(comRef, [rfFootPos0], 0.5 * stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.lhFootId, self.rhFootId], [self.rfFootId])
self.firstStep = False
else:
rhStep = self.createFootstepModels(comRef, [rhFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.rfFootId, self.lhFootId], [self.rhFootId])
rfStep = self.createFootstepModels(comRef, [rfFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.lhFootId, self.rhFootId], [self.rfFootId])
lhStep = self.createFootstepModels(comRef, [lhFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.rfFootId, self.rhFootId], [self.lhFootId])
lfStep = self.createFootstepModels(comRef, [lfFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.rfFootId, self.lhFootId, self.rhFootId], [self.lfFootId])
loco3dModel += doubleSupport + rhStep + rfStep
loco3dModel += doubleSupport + lhStep + lfStep
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createTrottingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
""" Create a shooting problem for a simple trotting gait.
:param x0: initial state
:param stepLength: step length
:param stepHeight: step height
:param timeStep: step time for each knot
:param stepKnots: number of knots for step phases
:param supportKnots: number of knots for double support phases
:return shooting problem
"""
# Compute the current foot positions
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
# Defining the action models along the time instances
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(supportKnots)
]
if self.firstStep is True:
rflhStep = self.createFootstepModels(comRef, [rfFootPos0, lhFootPos0], 0.5 * stepLength, stepHeight,
timeStep, stepKnots, [self.lfFootId, self.rhFootId],
[self.rfFootId, self.lhFootId])
self.firstStep = False
else:
rflhStep = self.createFootstepModels(comRef, [rfFootPos0, lhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lfFootId, self.rhFootId],
[self.rfFootId, self.lhFootId])
lfrhStep = self.createFootstepModels(comRef, [lfFootPos0, rhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.rfFootId, self.lhFootId], [self.lfFootId, self.rhFootId])
loco3dModel += doubleSupport + rflhStep
loco3dModel += doubleSupport + lfrhStep
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createPacingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
""" Create a shooting problem for a simple pacing gait.
:param x0: initial state
:param stepLength: step length
:param stepHeight: step height
:param timeStep: step time for each knot
:param stepKnots: number of knots for step phases
:param supportKnots: number of knots for double support phases
:return shooting problem
"""
# Compute the current foot positions
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
# Defining the action models along the time instances
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(supportKnots)
]
if self.firstStep is True:
rightSteps = self.createFootstepModels(comRef, [rfFootPos0, rhFootPos0], 0.5 * stepLength, stepHeight,
timeStep, stepKnots, [self.lfFootId, self.lhFootId],
[self.rfFootId, self.rhFootId])
self.firstStep = False
else:
rightSteps = self.createFootstepModels(comRef, [rfFootPos0, rhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lfFootId, self.lhFootId],
[self.rfFootId, self.rhFootId])
leftSteps = self.createFootstepModels(comRef, [lfFootPos0, lhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.rfFootId, self.rhFootId],
[self.lfFootId, self.lhFootId])
loco3dModel += doubleSupport + rightSteps
loco3dModel += doubleSupport + leftSteps
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createBoundingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
""" Create a shooting problem for a simple bounding gait.
:param x0: initial state
:param stepLength: step length
:param stepHeight: step height
:param timeStep: step time for each knot
:param stepKnots: number of knots for step phases
:param supportKnots: number of knots for double support phases
:return shooting problem
"""
# Compute the current foot positions
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
# Defining the action models along the time instances
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(timeStep, [self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId])
for k in range(supportKnots)
]
hindSteps = self.createFootstepModels(comRef, [lfFootPos0, rfFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lhFootId, self.rhFootId],
[self.lfFootId, self.rfFootId])
frontSteps = self.createFootstepModels(comRef, [lhFootPos0, rhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lfFootId, self.rfFootId],
[self.lhFootId, self.rhFootId])
loco3dModel += doubleSupport + hindSteps
loco3dModel += doubleSupport + frontSteps
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createJumpingProblem(self, x0, jumpHeight, jumpLength, timeStep, groundKnots, flyingKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
df = jumpLength[2] - rfFootPos0[2]
rfFootPos0[2] = 0.
rhFootPos0[2] = 0.
lfFootPos0[2] = 0.
lhFootPos0[2] = 0.
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
loco3dModel = []
takeOff = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(groundKnots)
]
flyingUpPhase = [
self.createSwingFootModel(
timeStep, [],
np.array([jumpLength[0], jumpLength[1], jumpLength[2] + jumpHeight]) * (k + 1) / flyingKnots + comRef)
for k in range(flyingKnots)
]
flyingDownPhase = []
for k in range(flyingKnots):
flyingDownPhase += [self.createSwingFootModel(timeStep, [])]
f0 = jumpLength
footTask = [
crocoddyl.FramePlacement(self.lfFootId, pinocchio.SE3(np.eye(3), lfFootPos0 + f0)),
crocoddyl.FramePlacement(self.rfFootId, pinocchio.SE3(np.eye(3), rfFootPos0 + f0)),
crocoddyl.FramePlacement(self.lhFootId, pinocchio.SE3(np.eye(3), lhFootPos0 + f0)),
crocoddyl.FramePlacement(self.rhFootId, pinocchio.SE3(np.eye(3), rhFootPos0 + f0))
]
landingPhase = [
self.createFootSwitchModel([self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId], footTask, False)
]
f0[2] = df
landed = [
self.createSwingFootModel(timeStep, [self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
comTask=comRef + f0) for k in range(groundKnots)
]
loco3dModel += takeOff
loco3dModel += flyingUpPhase
loco3dModel += flyingDownPhase
loco3dModel += landingPhase
loco3dModel += landed
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createFootstepModels(self, comPos0, feetPos0, stepLength, stepHeight, timeStep, numKnots, supportFootIds,
swingFootIds):
""" Action models for a footstep phase.
:param comPos0, initial CoM position
:param feetPos0: initial position of the swinging feet
:param stepLength: step length
:param stepHeight: step height
:param timeStep: time step
:param numKnots: number of knots for the footstep phase
:param supportFootIds: Ids of the supporting feet
:param swingFootIds: Ids of the swinging foot
:return footstep action models
"""
numLegs = len(supportFootIds) + len(swingFootIds)
comPercentage = float(len(swingFootIds)) / numLegs
# Action models for the foot swing
footSwingModel = []
for k in range(numKnots):
swingFootTask = []
for i, p in zip(swingFootIds, feetPos0):
# Defining a foot swing task given the step length
# resKnot = numKnots % 2
phKnots = numKnots / 2
if k < phKnots:
dp = np.array([stepLength * (k + 1) / numKnots, 0., stepHeight * k / phKnots])
elif k == phKnots:
dp = np.array([stepLength * (k + 1) / numKnots, 0., stepHeight])
else:
dp = np.array(
[stepLength * (k + 1) / numKnots, 0., stepHeight * (1 - float(k - phKnots) / phKnots)])
tref = p + dp
swingFootTask += [crocoddyl.FramePlacement(i, pinocchio.SE3(np.eye(3), tref))]
comTask = np.array([stepLength * (k + 1) / numKnots, 0., 0.]) * comPercentage + comPos0
footSwingModel += [
self.createSwingFootModel(timeStep, supportFootIds, comTask=comTask, swingFootTask=swingFootTask)
]
# Action model for the foot switch
footSwitchModel = self.createFootSwitchModel(supportFootIds, swingFootTask)
# Updating the current foot position for next step
comPos0 += [stepLength * comPercentage, 0., 0.]
for p in feetPos0:
p += [stepLength, 0., 0.]
return footSwingModel + [footSwitchModel]
def createSwingFootModel(self, timeStep, supportFootIds, comTask=None, swingFootTask=None):
""" Action model for a swing foot phase.
:param timeStep: step duration of the action model
:param supportFootIds: Ids of the constrained feet
:param comTask: CoM task
:param swingFootTask: swinging foot task
:return action model for a swing foot phase
"""
# Creating a 3D multi-contact model, and then including the supporting
# foot
contactModel = crocoddyl.ContactModelMultiple(self.state, self.actuation.nu)
for i in supportFootIds:
xref = crocoddyl.FrameTranslation(i, np.array([0., 0., 0.]))
supportContactModel = crocoddyl.ContactModel3D(self.state, xref, self.actuation.nu, np.array([0., 50.]))
contactModel.addContact(self.rmodel.frames[i].name + "_contact", supportContactModel)
# Creating the cost model for a contact phase
costModel = crocoddyl.CostModelSum(self.state, self.actuation.nu)
if isinstance(comTask, np.ndarray):
comTrack = crocoddyl.CostModelCoMPosition(self.state, comTask, self.actuation.nu)
costModel.addCost("comTrack", comTrack, 1e6)
for i in supportFootIds:
cone = crocoddyl.FrictionCone(self.nsurf, self.mu, 4, False)
frictionCone = crocoddyl.CostModelContactFrictionCone(
self.state, crocoddyl.ActivationModelQuadraticBarrier(crocoddyl.ActivationBounds(cone.lb, cone.ub)),
crocoddyl.FrameFrictionCone(i, cone), self.actuation.nu)
costModel.addCost(self.rmodel.frames[i].name + "_frictionCone", frictionCone, 1e1)
if swingFootTask is not None:
for i in swingFootTask:
xref = crocoddyl.FrameTranslation(i.id, i.placement.translation)
footTrack = crocoddyl.CostModelFrameTranslation(self.state, xref, self.actuation.nu)
costModel.addCost(self.rmodel.frames[i.id].name + "_footTrack", footTrack, 1e6)
stateWeights = np.array([0.] * 3 + [500.] * 3 + [0.01] * (self.rmodel.nv - 6) + [10.] * 6 + [1.] *
(self.rmodel.nv - 6))
stateReg = crocoddyl.CostModelState(self.state, crocoddyl.ActivationModelWeightedQuad(stateWeights**2),
self.rmodel.defaultState, self.actuation.nu)
ctrlReg = crocoddyl.CostModelControl(self.state, self.actuation.nu)
costModel.addCost("stateReg", stateReg, 1e1)
costModel.addCost("ctrlReg", ctrlReg, 1e-1)
lb = np.concatenate([self.state.lb[1:self.state.nv + 1], self.state.lb[-self.state.nv:]])
ub = np.concatenate([self.state.ub[1:self.state.nv + 1], self.state.ub[-self.state.nv:]])
stateBounds = crocoddyl.CostModelState(
self.state, crocoddyl.ActivationModelQuadraticBarrier(crocoddyl.ActivationBounds(lb, ub)),
0 * self.rmodel.defaultState, self.actuation.nu)
costModel.addCost("stateBounds", stateBounds, 1e3)
# Creating the action model for the KKT dynamics with simpletic Euler
# integration scheme
dmodel = crocoddyl.DifferentialActionModelContactFwdDynamics(self.state, self.actuation, contactModel,
costModel, 0., True)
model = crocoddyl.IntegratedActionModelEuler(dmodel, timeStep)
return model
def createFootSwitchModel(self, supportFootIds, swingFootTask, pseudoImpulse=False):
""" Action model for a foot switch phase.
:param supportFootIds: Ids of the constrained feet
:param swingFootTask: swinging foot task
:param pseudoImpulse: true for pseudo-impulse models, otherwise it uses the impulse model
:return action model for a foot switch phase
"""
if pseudoImpulse:
return self.createPseudoImpulseModel(supportFootIds, swingFootTask)
else:
return self.createImpulseModel(supportFootIds, swingFootTask)
def createPseudoImpulseModel(self, supportFootIds, swingFootTask):
""" Action model for pseudo-impulse models.
A pseudo-impulse model consists of adding high-penalty cost for the contact velocities.
:param supportFootIds: Ids of the constrained feet
:param swingFootTask: swinging foot task
:return pseudo-impulse differential action model
"""
# Creating a 3D multi-contact model, and then including the supporting
# foot
contactModel = crocoddyl.ContactModelMultiple(self.state, self.actuation.nu)
for i in supportFootIds:
xref = crocoddyl.FrameTranslation(i, np.array([0., 0., 0.]))
supportContactModel = crocoddyl.ContactModel3D(self.state, xref, self.actuation.nu, np.array([0., 50.]))
contactModel.addContact(self.rmodel.frames[i].name + "_contact", supportContactModel)
# Creating the cost model for a contact phase
costModel = crocoddyl.CostModelSum(self.state, self.actuation.nu)
for i in supportFootIds:
cone = crocoddyl.FrictionCone(self.nsurf, self.mu, 4, False)
frictionCone = crocoddyl.CostModelContactFrictionCone(
self.state, crocoddyl.ActivationModelQuadraticBarrier(crocoddyl.ActivationBounds(cone.lb, cone.ub)),
crocoddyl.FrameFrictionCone(i, cone), self.actuation.nu)
costModel.addCost(self.rmodel.frames[i].name + "_frictionCone", frictionCone, 1e1)
if swingFootTask is not None:
for i in swingFootTask:
xref = crocoddyl.FrameTranslation(i.frame, i.oMf.translation)
vref = crocoddyl.FrameMotion(i.frame, pinocchio.Motion.Zero())
footTrack = crocoddyl.CostModelFrameTranslation(self.state, xref, self.actuation.nu)
impulseFootVelCost = crocoddyl.CostModelFrameVelocity(self.state, vref, self.actuation.nu)
costModel.addCost(self.rmodel.frames[i.frame].name + "_footTrack", footTrack, 1e7)
costModel.addCost(self.rmodel.frames[i.frame].name + "_impulseVel", impulseFootVelCost, 1e6)
stateWeights = np.array([0.] * 3 + [500.] * 3 + [0.01] * (self.rmodel.nv - 6) + [10.] * self.rmodel.nv)
stateReg = crocoddyl.CostModelState(self.state, crocoddyl.ActivationModelWeightedQuad(stateWeights**2),
self.rmodel.defaultState, self.actuation.nu)
ctrlReg = crocoddyl.CostModelControl(self.state, self.actuation.nu)
costModel.addCost("stateReg", stateReg, 1e1)
costModel.addCost("ctrlReg", ctrlReg, 1e-3)
# Creating the action model for the KKT dynamics with simpletic Euler
# integration scheme
dmodel = crocoddyl.DifferentialActionModelContactFwdDynamics(self.state, self.actuation, contactModel,
costModel, 0., True)
model = crocoddyl.IntegratedActionModelEuler(dmodel, 0.)
return model
def createImpulseModel(self, supportFootIds, swingFootTask, JMinvJt_damping=1e-12, r_coeff=0.0):
""" Action model for impulse models.
An impulse model consists of describing the impulse dynamics against a set of contacts.
:param supportFootIds: Ids of the constrained feet
:param swingFootTask: swinging foot task
:return impulse action model
"""
# Creating a 3D multi-contact model, and then including the supporting foot
impulseModel = crocoddyl.ImpulseModelMultiple(self.state)
for i in supportFootIds:
supportContactModel = crocoddyl.ImpulseModel3D(self.state, i)
impulseModel.addImpulse(self.rmodel.frames[i].name + "_impulse", supportContactModel)
# Creating the cost model for a contact phase
costModel = crocoddyl.CostModelSum(self.state, 0)
if swingFootTask is not None:
for i in swingFootTask:
xref = crocoddyl.FrameTranslation(i.id, i.placement.translation)
footTrack = crocoddyl.CostModelFrameTranslation(self.state, xref, 0)
costModel.addCost(self.rmodel.frames[i.id].name + "_footTrack", footTrack, 1e7)
stateWeights = np.array([1.] * 6 + [10.] * (self.rmodel.nv - 6) + [10.] * self.rmodel.nv)
stateReg = crocoddyl.CostModelState(self.state, crocoddyl.ActivationModelWeightedQuad(stateWeights**2),
self.rmodel.defaultState, 0)
costModel.addCost("stateReg", stateReg, 1e1)
# Creating the action model for the KKT dynamics with simpletic Euler
# integration scheme
model = crocoddyl.ActionModelImpulseFwdDynamics(self.state, impulseModel, costModel)
model.JMinvJt_damping = JMinvJt_damping
model.r_coeff = r_coeff
return model
def plotSolution(solver, bounds=True, figIndex=1, figTitle="", show=True):
import matplotlib.pyplot as plt
xs, us = [], []
if bounds:
us_lb, us_ub = [], []
xs_lb, xs_ub = [], []
if isinstance(solver, list):
rmodel = solver[0].problem.runningModels[0].state.pinocchio
for s in solver:
xs.extend(s.xs[:-1])
us.extend(s.us)
if bounds:
models = s.problem.runningModels.tolist() + [s.problem.terminalModel]
for m in models:
us_lb += [m.u_lb]
us_ub += [m.u_ub]
xs_lb += [m.state.lb]
xs_ub += [m.state.ub]
else:
rmodel = solver.problem.runningModels[0].state.pinocchio
xs, us = solver.xs, solver.us
if bounds:
models = solver.problem.runningModels.tolist() + [solver.problem.terminalModel]
for m in models:
us_lb += [m.u_lb]
us_ub += [m.u_ub]
xs_lb += [m.state.lb]
xs_ub += [m.state.ub]
# Getting the state and control trajectories
nx, nq, nu = xs[0].shape[0], rmodel.nq, us[0].shape[0]
X = [0.] * nx
U = [0.] * nu
if bounds:
U_LB = [0.] * nu
U_UB = [0.] * nu
X_LB = [0.] * nx
X_UB = [0.] * nx
for i in range(nx):
X[i] = [np.asscalar(x[i]) for x in xs]
if bounds:
X_LB[i] = [np.asscalar(x[i]) for x in xs_lb]
X_UB[i] = [np.asscalar(x[i]) for x in xs_ub]
for i in range(nu):
U[i] = [np.asscalar(u[i]) if u.shape[0] != 0 else 0 for u in us]
if bounds:
U_LB[i] = [np.asscalar(u[i]) if u.shape[0] != 0 else np.nan for u in us_lb]
U_UB[i] = [np.asscalar(u[i]) if u.shape[0] != 0 else np.nan for u in us_ub]
# Plotting the joint positions, velocities and torques
plt.figure(figIndex)
plt.suptitle(figTitle)
legJointNames = ['HAA', 'HFE', 'KFE']
# LF foot
plt.subplot(4, 3, 1)
plt.title('joint position [rad]')
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(7, 10))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(7, 10))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(7, 10))]
plt.ylabel('LF')
plt.legend()
plt.subplot(4, 3, 2)
plt.title('joint velocity [rad/s]')
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 6, nq + 9))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 6, nq + 9))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 6, nq + 9))]
plt.ylabel('LF')
plt.legend()
plt.subplot(4, 3, 3)
plt.title('joint torque [Nm]')
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(0, 3))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(0, 3))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(0, 3))]
plt.ylabel('LF')
plt.legend()
# LH foot
plt.subplot(4, 3, 4)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(10, 13))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(10, 13))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(10, 13))]
plt.ylabel('LH')
plt.legend()
plt.subplot(4, 3, 5)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 9, nq + 12))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 9, nq + 12))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 9, nq + 12))]
plt.ylabel('LH')
plt.legend()
plt.subplot(4, 3, 6)
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(3, 6))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(3, 6))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(3, 6))]
plt.ylabel('LH')
plt.legend()
# RF foot
plt.subplot(4, 3, 7)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(13, 16))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(13, 16))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(13, 16))]
plt.ylabel('RF')
plt.legend()
plt.subplot(4, 3, 8)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 12, nq + 15))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 12, nq + 15))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 12, nq + 15))]
plt.ylabel('RF')
plt.legend()
plt.subplot(4, 3, 9)
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(6, 9))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(6, 9))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(6, 9))]
plt.ylabel('RF')
plt.legend()
# RH foot
plt.subplot(4, 3, 10)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(16, 19))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(16, 19))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(16, 19))]
plt.ylabel('RH')
plt.xlabel('knots')
plt.legend()
plt.subplot(4, 3, 11)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 15, nq + 18))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 15, nq + 18))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 15, nq + 18))]
plt.ylabel('RH')
plt.xlabel('knots')
plt.legend()
plt.subplot(4, 3, 12)
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(9, 12))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(9, 12))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(9, 12))]
plt.ylabel('RH')
plt.legend()
plt.xlabel('knots')
plt.figure(figIndex + 1)
plt.suptitle(figTitle)
rdata = rmodel.createData()
Cx = []
Cy = []
for x in xs:
q = x[:nq]
c = pinocchio.centerOfMass(rmodel, rdata, q)
Cx.append(np.asscalar(c[0]))
Cy.append(np.asscalar(c[1]))
plt.plot(Cx, Cy)
plt.title('CoM position')
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.grid(True)
if show:
plt.show()
| 48.860497 | 119 | 0.607322 | import crocoddyl
import pinocchio
import numpy as np
class SimpleQuadrupedalGaitProblem:
def __init__(self, rmodel, lfFoot, rfFoot, lhFoot, rhFoot):
self.rmodel = rmodel
self.rdata = rmodel.createData()
self.state = crocoddyl.StateMultibody(self.rmodel)
self.actuation = crocoddyl.ActuationModelFloatingBase(self.state)
self.lfFootId = self.rmodel.getFrameId(lfFoot)
self.rfFootId = self.rmodel.getFrameId(rfFoot)
self.lhFootId = self.rmodel.getFrameId(lhFoot)
self.rhFootId = self.rmodel.getFrameId(rhFoot)
q0 = self.rmodel.referenceConfigurations["standing"]
self.rmodel.defaultState = np.concatenate([q0, np.zeros(self.rmodel.nv)])
self.firstStep = True
self.mu = 0.7
self.nsurf = np.array([0., 0., 1.])
def createCoMProblem(self, x0, comGoTo, timeStep, numKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
com0 = pinocchio.centerOfMass(self.rmodel, self.rdata, q0)
comModels = []
comForwardModels = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(numKnots)
]
comForwardTermModel = self.createSwingFootModel(timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
com0 + np.array([comGoTo, 0., 0.]))
comForwardTermModel.differential.costs.costs['comTrack'].weight = 1e6
comBackwardModels = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(numKnots)
]
comBackwardTermModel = self.createSwingFootModel(timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
com0 + np.array([-comGoTo, 0., 0.]))
comBackwardTermModel.differential.costs.costs['comTrack'].weight = 1e6
comModels += comForwardModels + [comForwardTermModel]
comModels += comBackwardModels + [comBackwardTermModel]
problem = crocoddyl.ShootingProblem(x0, comModels, comModels[-1])
return problem
def createCoMGoalProblem(self, x0, comGoTo, timeStep, numKnots):
q0 = self.rmodel.referenceConfigurations["standing"]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
com0 = pinocchio.centerOfMass(self.rmodel, self.rdata, q0)
comModels = []
comForwardModels = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(numKnots)
]
comForwardTermModel = self.createSwingFootModel(timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
com0 + np.array([comGoTo, 0., 0.]))
comForwardTermModel.differential.costs.costs['comTrack'].weight = 1e6
comModels += comForwardModels + [comForwardTermModel]
problem = crocoddyl.ShootingProblem(x0, comModels, comModels[-1])
return problem
def createWalkingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(supportKnots)
]
if self.firstStep is True:
rhStep = self.createFootstepModels(comRef, [rhFootPos0], 0.5 * stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.rfFootId, self.lhFootId], [self.rhFootId])
rfStep = self.createFootstepModels(comRef, [rfFootPos0], 0.5 * stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.lhFootId, self.rhFootId], [self.rfFootId])
self.firstStep = False
else:
rhStep = self.createFootstepModels(comRef, [rhFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.rfFootId, self.lhFootId], [self.rhFootId])
rfStep = self.createFootstepModels(comRef, [rfFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.lhFootId, self.rhFootId], [self.rfFootId])
lhStep = self.createFootstepModels(comRef, [lhFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.lfFootId, self.rfFootId, self.rhFootId], [self.lhFootId])
lfStep = self.createFootstepModels(comRef, [lfFootPos0], stepLength, stepHeight, timeStep, stepKnots,
[self.rfFootId, self.lhFootId, self.rhFootId], [self.lfFootId])
loco3dModel += doubleSupport + rhStep + rfStep
loco3dModel += doubleSupport + lhStep + lfStep
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createTrottingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(supportKnots)
]
if self.firstStep is True:
rflhStep = self.createFootstepModels(comRef, [rfFootPos0, lhFootPos0], 0.5 * stepLength, stepHeight,
timeStep, stepKnots, [self.lfFootId, self.rhFootId],
[self.rfFootId, self.lhFootId])
self.firstStep = False
else:
rflhStep = self.createFootstepModels(comRef, [rfFootPos0, lhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lfFootId, self.rhFootId],
[self.rfFootId, self.lhFootId])
lfrhStep = self.createFootstepModels(comRef, [lfFootPos0, rhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.rfFootId, self.lhFootId], [self.lfFootId, self.rhFootId])
loco3dModel += doubleSupport + rflhStep
loco3dModel += doubleSupport + lfrhStep
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createPacingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(supportKnots)
]
if self.firstStep is True:
rightSteps = self.createFootstepModels(comRef, [rfFootPos0, rhFootPos0], 0.5 * stepLength, stepHeight,
timeStep, stepKnots, [self.lfFootId, self.lhFootId],
[self.rfFootId, self.rhFootId])
self.firstStep = False
else:
rightSteps = self.createFootstepModels(comRef, [rfFootPos0, rhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lfFootId, self.lhFootId],
[self.rfFootId, self.rhFootId])
leftSteps = self.createFootstepModels(comRef, [lfFootPos0, lhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.rfFootId, self.rhFootId],
[self.lfFootId, self.lhFootId])
loco3dModel += doubleSupport + rightSteps
loco3dModel += doubleSupport + leftSteps
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createBoundingProblem(self, x0, stepLength, stepHeight, timeStep, stepKnots, supportKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
loco3dModel = []
doubleSupport = [
self.createSwingFootModel(timeStep, [self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId])
for k in range(supportKnots)
]
hindSteps = self.createFootstepModels(comRef, [lfFootPos0, rfFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lhFootId, self.rhFootId],
[self.lfFootId, self.rfFootId])
frontSteps = self.createFootstepModels(comRef, [lhFootPos0, rhFootPos0], stepLength, stepHeight, timeStep,
stepKnots, [self.lfFootId, self.rfFootId],
[self.lhFootId, self.rhFootId])
loco3dModel += doubleSupport + hindSteps
loco3dModel += doubleSupport + frontSteps
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createJumpingProblem(self, x0, jumpHeight, jumpLength, timeStep, groundKnots, flyingKnots):
q0 = x0[:self.rmodel.nq]
pinocchio.forwardKinematics(self.rmodel, self.rdata, q0)
pinocchio.updateFramePlacements(self.rmodel, self.rdata)
rfFootPos0 = self.rdata.oMf[self.rfFootId].translation
rhFootPos0 = self.rdata.oMf[self.rhFootId].translation
lfFootPos0 = self.rdata.oMf[self.lfFootId].translation
lhFootPos0 = self.rdata.oMf[self.lhFootId].translation
df = jumpLength[2] - rfFootPos0[2]
rfFootPos0[2] = 0.
rhFootPos0[2] = 0.
lfFootPos0[2] = 0.
lhFootPos0[2] = 0.
comRef = (rfFootPos0 + rhFootPos0 + lfFootPos0 + lhFootPos0) / 4
comRef[2] = np.asscalar(pinocchio.centerOfMass(self.rmodel, self.rdata, q0)[2])
loco3dModel = []
takeOff = [
self.createSwingFootModel(
timeStep,
[self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
) for k in range(groundKnots)
]
flyingUpPhase = [
self.createSwingFootModel(
timeStep, [],
np.array([jumpLength[0], jumpLength[1], jumpLength[2] + jumpHeight]) * (k + 1) / flyingKnots + comRef)
for k in range(flyingKnots)
]
flyingDownPhase = []
for k in range(flyingKnots):
flyingDownPhase += [self.createSwingFootModel(timeStep, [])]
f0 = jumpLength
footTask = [
crocoddyl.FramePlacement(self.lfFootId, pinocchio.SE3(np.eye(3), lfFootPos0 + f0)),
crocoddyl.FramePlacement(self.rfFootId, pinocchio.SE3(np.eye(3), rfFootPos0 + f0)),
crocoddyl.FramePlacement(self.lhFootId, pinocchio.SE3(np.eye(3), lhFootPos0 + f0)),
crocoddyl.FramePlacement(self.rhFootId, pinocchio.SE3(np.eye(3), rhFootPos0 + f0))
]
landingPhase = [
self.createFootSwitchModel([self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId], footTask, False)
]
f0[2] = df
landed = [
self.createSwingFootModel(timeStep, [self.lfFootId, self.rfFootId, self.lhFootId, self.rhFootId],
comTask=comRef + f0) for k in range(groundKnots)
]
loco3dModel += takeOff
loco3dModel += flyingUpPhase
loco3dModel += flyingDownPhase
loco3dModel += landingPhase
loco3dModel += landed
problem = crocoddyl.ShootingProblem(x0, loco3dModel, loco3dModel[-1])
return problem
def createFootstepModels(self, comPos0, feetPos0, stepLength, stepHeight, timeStep, numKnots, supportFootIds,
swingFootIds):
numLegs = len(supportFootIds) + len(swingFootIds)
comPercentage = float(len(swingFootIds)) / numLegs
footSwingModel = []
for k in range(numKnots):
swingFootTask = []
for i, p in zip(swingFootIds, feetPos0):
phKnots = numKnots / 2
if k < phKnots:
dp = np.array([stepLength * (k + 1) / numKnots, 0., stepHeight * k / phKnots])
elif k == phKnots:
dp = np.array([stepLength * (k + 1) / numKnots, 0., stepHeight])
else:
dp = np.array(
[stepLength * (k + 1) / numKnots, 0., stepHeight * (1 - float(k - phKnots) / phKnots)])
tref = p + dp
swingFootTask += [crocoddyl.FramePlacement(i, pinocchio.SE3(np.eye(3), tref))]
comTask = np.array([stepLength * (k + 1) / numKnots, 0., 0.]) * comPercentage + comPos0
footSwingModel += [
self.createSwingFootModel(timeStep, supportFootIds, comTask=comTask, swingFootTask=swingFootTask)
]
footSwitchModel = self.createFootSwitchModel(supportFootIds, swingFootTask)
comPos0 += [stepLength * comPercentage, 0., 0.]
for p in feetPos0:
p += [stepLength, 0., 0.]
return footSwingModel + [footSwitchModel]
def createSwingFootModel(self, timeStep, supportFootIds, comTask=None, swingFootTask=None):
contactModel = crocoddyl.ContactModelMultiple(self.state, self.actuation.nu)
for i in supportFootIds:
xref = crocoddyl.FrameTranslation(i, np.array([0., 0., 0.]))
supportContactModel = crocoddyl.ContactModel3D(self.state, xref, self.actuation.nu, np.array([0., 50.]))
contactModel.addContact(self.rmodel.frames[i].name + "_contact", supportContactModel)
costModel = crocoddyl.CostModelSum(self.state, self.actuation.nu)
if isinstance(comTask, np.ndarray):
comTrack = crocoddyl.CostModelCoMPosition(self.state, comTask, self.actuation.nu)
costModel.addCost("comTrack", comTrack, 1e6)
for i in supportFootIds:
cone = crocoddyl.FrictionCone(self.nsurf, self.mu, 4, False)
frictionCone = crocoddyl.CostModelContactFrictionCone(
self.state, crocoddyl.ActivationModelQuadraticBarrier(crocoddyl.ActivationBounds(cone.lb, cone.ub)),
crocoddyl.FrameFrictionCone(i, cone), self.actuation.nu)
costModel.addCost(self.rmodel.frames[i].name + "_frictionCone", frictionCone, 1e1)
if swingFootTask is not None:
for i in swingFootTask:
xref = crocoddyl.FrameTranslation(i.id, i.placement.translation)
footTrack = crocoddyl.CostModelFrameTranslation(self.state, xref, self.actuation.nu)
costModel.addCost(self.rmodel.frames[i.id].name + "_footTrack", footTrack, 1e6)
stateWeights = np.array([0.] * 3 + [500.] * 3 + [0.01] * (self.rmodel.nv - 6) + [10.] * 6 + [1.] *
(self.rmodel.nv - 6))
stateReg = crocoddyl.CostModelState(self.state, crocoddyl.ActivationModelWeightedQuad(stateWeights**2),
self.rmodel.defaultState, self.actuation.nu)
ctrlReg = crocoddyl.CostModelControl(self.state, self.actuation.nu)
costModel.addCost("stateReg", stateReg, 1e1)
costModel.addCost("ctrlReg", ctrlReg, 1e-1)
lb = np.concatenate([self.state.lb[1:self.state.nv + 1], self.state.lb[-self.state.nv:]])
ub = np.concatenate([self.state.ub[1:self.state.nv + 1], self.state.ub[-self.state.nv:]])
stateBounds = crocoddyl.CostModelState(
self.state, crocoddyl.ActivationModelQuadraticBarrier(crocoddyl.ActivationBounds(lb, ub)),
0 * self.rmodel.defaultState, self.actuation.nu)
costModel.addCost("stateBounds", stateBounds, 1e3)
dmodel = crocoddyl.DifferentialActionModelContactFwdDynamics(self.state, self.actuation, contactModel,
costModel, 0., True)
model = crocoddyl.IntegratedActionModelEuler(dmodel, timeStep)
return model
def createFootSwitchModel(self, supportFootIds, swingFootTask, pseudoImpulse=False):
if pseudoImpulse:
return self.createPseudoImpulseModel(supportFootIds, swingFootTask)
else:
return self.createImpulseModel(supportFootIds, swingFootTask)
def createPseudoImpulseModel(self, supportFootIds, swingFootTask):
contactModel = crocoddyl.ContactModelMultiple(self.state, self.actuation.nu)
for i in supportFootIds:
xref = crocoddyl.FrameTranslation(i, np.array([0., 0., 0.]))
supportContactModel = crocoddyl.ContactModel3D(self.state, xref, self.actuation.nu, np.array([0., 50.]))
contactModel.addContact(self.rmodel.frames[i].name + "_contact", supportContactModel)
costModel = crocoddyl.CostModelSum(self.state, self.actuation.nu)
for i in supportFootIds:
cone = crocoddyl.FrictionCone(self.nsurf, self.mu, 4, False)
frictionCone = crocoddyl.CostModelContactFrictionCone(
self.state, crocoddyl.ActivationModelQuadraticBarrier(crocoddyl.ActivationBounds(cone.lb, cone.ub)),
crocoddyl.FrameFrictionCone(i, cone), self.actuation.nu)
costModel.addCost(self.rmodel.frames[i].name + "_frictionCone", frictionCone, 1e1)
if swingFootTask is not None:
for i in swingFootTask:
xref = crocoddyl.FrameTranslation(i.frame, i.oMf.translation)
vref = crocoddyl.FrameMotion(i.frame, pinocchio.Motion.Zero())
footTrack = crocoddyl.CostModelFrameTranslation(self.state, xref, self.actuation.nu)
impulseFootVelCost = crocoddyl.CostModelFrameVelocity(self.state, vref, self.actuation.nu)
costModel.addCost(self.rmodel.frames[i.frame].name + "_footTrack", footTrack, 1e7)
costModel.addCost(self.rmodel.frames[i.frame].name + "_impulseVel", impulseFootVelCost, 1e6)
stateWeights = np.array([0.] * 3 + [500.] * 3 + [0.01] * (self.rmodel.nv - 6) + [10.] * self.rmodel.nv)
stateReg = crocoddyl.CostModelState(self.state, crocoddyl.ActivationModelWeightedQuad(stateWeights**2),
self.rmodel.defaultState, self.actuation.nu)
ctrlReg = crocoddyl.CostModelControl(self.state, self.actuation.nu)
costModel.addCost("stateReg", stateReg, 1e1)
costModel.addCost("ctrlReg", ctrlReg, 1e-3)
dmodel = crocoddyl.DifferentialActionModelContactFwdDynamics(self.state, self.actuation, contactModel,
costModel, 0., True)
model = crocoddyl.IntegratedActionModelEuler(dmodel, 0.)
return model
def createImpulseModel(self, supportFootIds, swingFootTask, JMinvJt_damping=1e-12, r_coeff=0.0):
impulseModel = crocoddyl.ImpulseModelMultiple(self.state)
for i in supportFootIds:
supportContactModel = crocoddyl.ImpulseModel3D(self.state, i)
impulseModel.addImpulse(self.rmodel.frames[i].name + "_impulse", supportContactModel)
costModel = crocoddyl.CostModelSum(self.state, 0)
if swingFootTask is not None:
for i in swingFootTask:
xref = crocoddyl.FrameTranslation(i.id, i.placement.translation)
footTrack = crocoddyl.CostModelFrameTranslation(self.state, xref, 0)
costModel.addCost(self.rmodel.frames[i.id].name + "_footTrack", footTrack, 1e7)
stateWeights = np.array([1.] * 6 + [10.] * (self.rmodel.nv - 6) + [10.] * self.rmodel.nv)
stateReg = crocoddyl.CostModelState(self.state, crocoddyl.ActivationModelWeightedQuad(stateWeights**2),
self.rmodel.defaultState, 0)
costModel.addCost("stateReg", stateReg, 1e1)
model = crocoddyl.ActionModelImpulseFwdDynamics(self.state, impulseModel, costModel)
model.JMinvJt_damping = JMinvJt_damping
model.r_coeff = r_coeff
return model
def plotSolution(solver, bounds=True, figIndex=1, figTitle="", show=True):
import matplotlib.pyplot as plt
xs, us = [], []
if bounds:
us_lb, us_ub = [], []
xs_lb, xs_ub = [], []
if isinstance(solver, list):
rmodel = solver[0].problem.runningModels[0].state.pinocchio
for s in solver:
xs.extend(s.xs[:-1])
us.extend(s.us)
if bounds:
models = s.problem.runningModels.tolist() + [s.problem.terminalModel]
for m in models:
us_lb += [m.u_lb]
us_ub += [m.u_ub]
xs_lb += [m.state.lb]
xs_ub += [m.state.ub]
else:
rmodel = solver.problem.runningModels[0].state.pinocchio
xs, us = solver.xs, solver.us
if bounds:
models = solver.problem.runningModels.tolist() + [solver.problem.terminalModel]
for m in models:
us_lb += [m.u_lb]
us_ub += [m.u_ub]
xs_lb += [m.state.lb]
xs_ub += [m.state.ub]
nx, nq, nu = xs[0].shape[0], rmodel.nq, us[0].shape[0]
X = [0.] * nx
U = [0.] * nu
if bounds:
U_LB = [0.] * nu
U_UB = [0.] * nu
X_LB = [0.] * nx
X_UB = [0.] * nx
for i in range(nx):
X[i] = [np.asscalar(x[i]) for x in xs]
if bounds:
X_LB[i] = [np.asscalar(x[i]) for x in xs_lb]
X_UB[i] = [np.asscalar(x[i]) for x in xs_ub]
for i in range(nu):
U[i] = [np.asscalar(u[i]) if u.shape[0] != 0 else 0 for u in us]
if bounds:
U_LB[i] = [np.asscalar(u[i]) if u.shape[0] != 0 else np.nan for u in us_lb]
U_UB[i] = [np.asscalar(u[i]) if u.shape[0] != 0 else np.nan for u in us_ub]
plt.figure(figIndex)
plt.suptitle(figTitle)
legJointNames = ['HAA', 'HFE', 'KFE']
plt.subplot(4, 3, 1)
plt.title('joint position [rad]')
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(7, 10))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(7, 10))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(7, 10))]
plt.ylabel('LF')
plt.legend()
plt.subplot(4, 3, 2)
plt.title('joint velocity [rad/s]')
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 6, nq + 9))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 6, nq + 9))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 6, nq + 9))]
plt.ylabel('LF')
plt.legend()
plt.subplot(4, 3, 3)
plt.title('joint torque [Nm]')
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(0, 3))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(0, 3))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(0, 3))]
plt.ylabel('LF')
plt.legend()
plt.subplot(4, 3, 4)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(10, 13))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(10, 13))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(10, 13))]
plt.ylabel('LH')
plt.legend()
plt.subplot(4, 3, 5)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 9, nq + 12))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 9, nq + 12))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 9, nq + 12))]
plt.ylabel('LH')
plt.legend()
plt.subplot(4, 3, 6)
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(3, 6))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(3, 6))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(3, 6))]
plt.ylabel('LH')
plt.legend()
plt.subplot(4, 3, 7)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(13, 16))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(13, 16))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(13, 16))]
plt.ylabel('RF')
plt.legend()
plt.subplot(4, 3, 8)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 12, nq + 15))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 12, nq + 15))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 12, nq + 15))]
plt.ylabel('RF')
plt.legend()
plt.subplot(4, 3, 9)
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(6, 9))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(6, 9))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(6, 9))]
plt.ylabel('RF')
plt.legend()
plt.subplot(4, 3, 10)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(16, 19))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(16, 19))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(16, 19))]
plt.ylabel('RH')
plt.xlabel('knots')
plt.legend()
plt.subplot(4, 3, 11)
[plt.plot(X[k], label=legJointNames[i]) for i, k in enumerate(range(nq + 15, nq + 18))]
if bounds:
[plt.plot(X_LB[k], '--r') for i, k in enumerate(range(nq + 15, nq + 18))]
[plt.plot(X_UB[k], '--r') for i, k in enumerate(range(nq + 15, nq + 18))]
plt.ylabel('RH')
plt.xlabel('knots')
plt.legend()
plt.subplot(4, 3, 12)
[plt.plot(U[k], label=legJointNames[i]) for i, k in enumerate(range(9, 12))]
if bounds:
[plt.plot(U_LB[k], '--r') for i, k in enumerate(range(9, 12))]
[plt.plot(U_UB[k], '--r') for i, k in enumerate(range(9, 12))]
plt.ylabel('RH')
plt.legend()
plt.xlabel('knots')
plt.figure(figIndex + 1)
plt.suptitle(figTitle)
rdata = rmodel.createData()
Cx = []
Cy = []
for x in xs:
q = x[:nq]
c = pinocchio.centerOfMass(rmodel, rdata, q)
Cx.append(np.asscalar(c[0]))
Cy.append(np.asscalar(c[1]))
plt.plot(Cx, Cy)
plt.title('CoM position')
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.grid(True)
if show:
plt.show()
| true | true |
1c3014f133ce0417cca626cf3e8e7c76faf83681 | 455 | py | Python | plotly/validators/histogram2dcontour/_zsrc.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 12 | 2020-04-18T18:10:22.000Z | 2021-12-06T10:11:15.000Z | plotly/validators/histogram2dcontour/_zsrc.py | Vesauza/plotly.py | e53e626d59495d440341751f60aeff73ff365c28 | [
"MIT"
] | 27 | 2020-04-28T21:23:12.000Z | 2021-06-25T15:36:38.000Z | plotly/validators/histogram2dcontour/_zsrc.py | Vesauza/plotly.py | e53e626d59495d440341751f60aeff73ff365c28 | [
"MIT"
] | 6 | 2020-04-18T23:07:08.000Z | 2021-11-18T07:53:06.000Z | import _plotly_utils.basevalidators
class ZsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name='zsrc', parent_name='histogram2dcontour', **kwargs
):
super(ZsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 28.4375 | 76 | 0.630769 | import _plotly_utils.basevalidators
class ZsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name='zsrc', parent_name='histogram2dcontour', **kwargs
):
super(ZsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| true | true |
1c3016aefbeb82d751c16adfb3029bb4fb855ea9 | 665 | py | Python | tests/src/metrics/recall_vs_loc.py | bellwethers-in-se/issueCloseTime | e5e00c9625da0793dc8e7985fd88b0ca0b35f7d3 | [
"MIT"
] | 9 | 2017-07-27T10:32:48.000Z | 2021-07-01T11:51:51.000Z | tests/src/metrics/recall_vs_loc.py | bellwethers-in-se/issueCloseTime | e5e00c9625da0793dc8e7985fd88b0ca0b35f7d3 | [
"MIT"
] | 11 | 2016-03-15T16:27:47.000Z | 2019-09-05T02:25:08.000Z | tests/src/metrics/recall_vs_loc.py | bellwethers-in-se/issueCloseTime | e5e00c9625da0793dc8e7985fd88b0ca0b35f7d3 | [
"MIT"
] | 5 | 2017-01-28T22:45:34.000Z | 2019-12-04T13:15:10.000Z | from __future__ import print_function, division
import numpy as np
from pdb import set_trace
def get_curve(loc, actual, predicted):
sorted_loc = np.array(loc)[np.argsort(loc)]
sorted_act = np.array(actual)[np.argsort(loc)]
sorted_prd = np.array(predicted)[np.argsort(loc)]
recall, loc = [], []
tp, fn, Pd = 0, 0, 0
for a, p, l in zip(sorted_act, sorted_prd, sorted_loc):
tp += 1 if a == 1 and p == 1 else 0
fn += 1 if a == 1 and p == 0 else 0
Pd = tp / (tp + fn) if (tp + fn) > 0 else Pd
loc.append(l)
recall.append(int(Pd * 100))
auc = np.trapz(recall, loc) / 100
return recall, loc, auc
| 30.227273 | 59 | 0.598496 | from __future__ import print_function, division
import numpy as np
from pdb import set_trace
def get_curve(loc, actual, predicted):
sorted_loc = np.array(loc)[np.argsort(loc)]
sorted_act = np.array(actual)[np.argsort(loc)]
sorted_prd = np.array(predicted)[np.argsort(loc)]
recall, loc = [], []
tp, fn, Pd = 0, 0, 0
for a, p, l in zip(sorted_act, sorted_prd, sorted_loc):
tp += 1 if a == 1 and p == 1 else 0
fn += 1 if a == 1 and p == 0 else 0
Pd = tp / (tp + fn) if (tp + fn) > 0 else Pd
loc.append(l)
recall.append(int(Pd * 100))
auc = np.trapz(recall, loc) / 100
return recall, loc, auc
| true | true |
1c3016bf5a6af05377f952d07ae5fcdd8087d536 | 1,357 | py | Python | dash_docs/reusable_components/WorkspaceBlurb.py | jbampton/dash-docs | 33348057877ebd33622b514fc6073038c431976a | [
"MIT"
] | null | null | null | dash_docs/reusable_components/WorkspaceBlurb.py | jbampton/dash-docs | 33348057877ebd33622b514fc6073038c431976a | [
"MIT"
] | null | null | null | dash_docs/reusable_components/WorkspaceBlurb.py | jbampton/dash-docs | 33348057877ebd33622b514fc6073038c431976a | [
"MIT"
] | null | null | null | import dash_core_components as dcc
import dash_html_components as html
def image(vendor):
return html.A(
[
html.Img(
src='/assets/images/dds/{}-logo.png'.format(vendor)
),
html.Div(
'Install Dash Enterprise on {}'.format(vendor),
className='label'
)
],
href='https://plotly.com/dash/{}'.format(vendor),
className='azure'
)
WorkspaceBlurb = html.Div([
html.Blockquote([dcc.Markdown(
'''
Dash Enterprise is the fastest way to write & deploy Dash apps and
Jupyter notebooks. Dash Enterprise can be installed on the Kubernetes
services of
[AWS](https://go.plotly.com/dash-aws),
[Azure](https://go.plotly.com/dash-azure),
GCP,
or an
[on-premise Linux Server](https://plotly.com/dash/on-premises-linux/?utm_source=docs&utm_medium=workspace&utm_campaign=nov&utm_content=linux).
10% of the Fortune 500 uses Dash Enterprise to productionize AI and data science apps.
[Find out if your company is using Dash Enterprise](https://go.plotly.com/company-lookup)
'''),
html.Div([
image('Azure'),
html.Div(className='spacer'),
image('AWS'),
], className='azure-parent')
])
])
| 31.55814 | 150 | 0.582903 | import dash_core_components as dcc
import dash_html_components as html
def image(vendor):
return html.A(
[
html.Img(
src='/assets/images/dds/{}-logo.png'.format(vendor)
),
html.Div(
'Install Dash Enterprise on {}'.format(vendor),
className='label'
)
],
href='https://plotly.com/dash/{}'.format(vendor),
className='azure'
)
WorkspaceBlurb = html.Div([
html.Blockquote([dcc.Markdown(
'''
Dash Enterprise is the fastest way to write & deploy Dash apps and
Jupyter notebooks. Dash Enterprise can be installed on the Kubernetes
services of
[AWS](https://go.plotly.com/dash-aws),
[Azure](https://go.plotly.com/dash-azure),
GCP,
or an
[on-premise Linux Server](https://plotly.com/dash/on-premises-linux/?utm_source=docs&utm_medium=workspace&utm_campaign=nov&utm_content=linux).
10% of the Fortune 500 uses Dash Enterprise to productionize AI and data science apps.
[Find out if your company is using Dash Enterprise](https://go.plotly.com/company-lookup)
'''),
html.Div([
image('Azure'),
html.Div(className='spacer'),
image('AWS'),
], className='azure-parent')
])
])
| true | true |
1c3018d33d41f38ee26f5532c311979cf53838ae | 554 | py | Python | waffle/migrations/0004_update_everyone_nullbooleanfield.py | DavidCain/django-waffle | 006626cac66af6034b88d60712e87509b792cc97 | [
"BSD-3-Clause"
] | 313 | 2015-01-04T11:47:41.000Z | 2018-10-31T09:47:28.000Z | waffle/migrations/0004_update_everyone_nullbooleanfield.py | DavidCain/django-waffle | 006626cac66af6034b88d60712e87509b792cc97 | [
"BSD-3-Clause"
] | 167 | 2015-01-02T18:10:17.000Z | 2018-11-03T18:11:06.000Z | waffle/migrations/0004_update_everyone_nullbooleanfield.py | DavidCain/django-waffle | 006626cac66af6034b88d60712e87509b792cc97 | [
"BSD-3-Clause"
] | 138 | 2015-01-06T21:10:04.000Z | 2018-10-11T12:44:57.000Z | # Generated by Django 3.0.9 on 2020-08-18 04:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('waffle', '0003_update_strings_for_i18n'),
]
operations = [
migrations.AlterField(
model_name='flag',
name='everyone',
field=models.BooleanField(blank=True, help_text='Flip this flag on (Yes) or off (No) for everyone, overriding all other settings. Leave as Unknown to use normally.', null=True, verbose_name='Everyone'),
),
]
| 29.157895 | 214 | 0.649819 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('waffle', '0003_update_strings_for_i18n'),
]
operations = [
migrations.AlterField(
model_name='flag',
name='everyone',
field=models.BooleanField(blank=True, help_text='Flip this flag on (Yes) or off (No) for everyone, overriding all other settings. Leave as Unknown to use normally.', null=True, verbose_name='Everyone'),
),
]
| true | true |
1c30192485317796fd46fb75d6de82b58e94da2a | 4,717 | py | Python | src/rogerthat/bizz/debugging.py | goubertbrent/oca-backend | b9f59cc02568aecb55d4b54aec05245790ea25fd | [
"Apache-2.0"
] | null | null | null | src/rogerthat/bizz/debugging.py | goubertbrent/oca-backend | b9f59cc02568aecb55d4b54aec05245790ea25fd | [
"Apache-2.0"
] | null | null | null | src/rogerthat/bizz/debugging.py | goubertbrent/oca-backend | b9f59cc02568aecb55d4b54aec05245790ea25fd | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
import base64
from google.appengine.ext import deferred, db
from mcfw.properties import azzert
from mcfw.rpc import returns, arguments
from rogerthat.bizz.system import start_log_forwarding, delete_xmpp_account
from rogerthat.consts import SCHEDULED_QUEUE
from rogerthat.dal.mobile import get_mobile_by_id, get_user_active_mobiles
from rogerthat.models import StartDebuggingRequest, CurrentlyForwardingLogs
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.utils import channel, now, try_or_defer
from rogerthat.utils.crypto import encrypt_value, md5
from rogerthat.utils.transactions import on_trans_committed
@returns(CurrentlyForwardingLogs)
@arguments(app_user=users.User, timeout=(int, long))
def start_admin_debugging(app_user, timeout):
mobiles = list(get_user_active_mobiles(app_user))
azzert(len(mobiles) == 1)
settings = get_server_settings()
jid = base64.b64encode(encrypt_value(md5(settings.secret), users.get_current_user().email().encode('utf8')))
password = None
type_ = CurrentlyForwardingLogs.TYPE_GAE_CHANNEL_API
def trans():
debug_request = StartDebuggingRequest(key=StartDebuggingRequest.create_key(app_user, jid),
timestamp=now())
db.put_async(debug_request)
deferred.defer(stop_debugging, app_user, jid, debug_request=debug_request, notify_user=False,
_countdown=timeout * 60, _transactional=True, _queue=SCHEDULED_QUEUE)
return start_log_forwarding(app_user, jid, xmpp_target_password=password, type_=type_)
xg_on = db.create_transaction_options(xg=True)
return db.run_in_transaction_options(xg_on, trans)
@returns()
@arguments(app_user=users.User, mobile_id=unicode)
def start_debugging(app_user, mobile_id):
settings = get_server_settings()
domain = settings.jabberDomain
target_jid = "kick.%s/debug:%s" % (domain, base64.b64encode(app_user.email().encode('utf-8')))
def trans(mobile):
debug_request = StartDebuggingRequest(key=StartDebuggingRequest.create_key(app_user, mobile_id),
timestamp=now())
db.put_async(debug_request)
start_log_forwarding(app_user, target_jid, mobile=mobile)
deferred.defer(stop_debugging, app_user, mobile_id, debug_request=debug_request,
_countdown=30 * 60, _transactional=True, _queue=SCHEDULED_QUEUE)
xg_on = db.create_transaction_options(xg=True)
db.run_in_transaction_options(xg_on, trans, get_mobile_by_id(mobile_id))
@returns()
@arguments(app_user=users.User, mobile_id=unicode, debug_request=StartDebuggingRequest, notify_user=bool)
def stop_debugging(app_user, mobile_id, debug_request=None, notify_user=True):
# debug_request is not None when debug session timed out
def trans(mobile):
stopped = False
debug_request_from_ds = db.get(StartDebuggingRequest.create_key(app_user, mobile_id))
if debug_request_from_ds:
if not debug_request or debug_request.timestamp == debug_request_from_ds.timestamp:
db.delete_async(debug_request_from_ds)
start_log_forwarding(app_user, None, mobile) # target_jid=None ==> will stop log forwarding
stopped = True
if debug_request_from_ds.target_id.startswith('dbg_'):
on_trans_committed(try_or_defer, delete_xmpp_account, debug_request_from_ds.target_id, None)
return stopped
# stop debugging session after timeout, or when user closed the debugging dialog in the web UI
xg_on = db.create_transaction_options(xg=True)
stopped = db.run_in_transaction_options(xg_on, trans, get_mobile_by_id(mobile_id))
if stopped and notify_user:
channel.send_message(app_user, 'rogerthat.settings.stopped_debugging')
@returns()
@arguments(app_user=users.User, message=unicode)
def forward_log(app_user, message):
channel.send_message(app_user, 'rogerthat.settings.log', message=message, silent=True) # don't slog
| 44.084112 | 112 | 0.744541 |
import base64
from google.appengine.ext import deferred, db
from mcfw.properties import azzert
from mcfw.rpc import returns, arguments
from rogerthat.bizz.system import start_log_forwarding, delete_xmpp_account
from rogerthat.consts import SCHEDULED_QUEUE
from rogerthat.dal.mobile import get_mobile_by_id, get_user_active_mobiles
from rogerthat.models import StartDebuggingRequest, CurrentlyForwardingLogs
from rogerthat.rpc import users
from rogerthat.settings import get_server_settings
from rogerthat.utils import channel, now, try_or_defer
from rogerthat.utils.crypto import encrypt_value, md5
from rogerthat.utils.transactions import on_trans_committed
@returns(CurrentlyForwardingLogs)
@arguments(app_user=users.User, timeout=(int, long))
def start_admin_debugging(app_user, timeout):
mobiles = list(get_user_active_mobiles(app_user))
azzert(len(mobiles) == 1)
settings = get_server_settings()
jid = base64.b64encode(encrypt_value(md5(settings.secret), users.get_current_user().email().encode('utf8')))
password = None
type_ = CurrentlyForwardingLogs.TYPE_GAE_CHANNEL_API
def trans():
debug_request = StartDebuggingRequest(key=StartDebuggingRequest.create_key(app_user, jid),
timestamp=now())
db.put_async(debug_request)
deferred.defer(stop_debugging, app_user, jid, debug_request=debug_request, notify_user=False,
_countdown=timeout * 60, _transactional=True, _queue=SCHEDULED_QUEUE)
return start_log_forwarding(app_user, jid, xmpp_target_password=password, type_=type_)
xg_on = db.create_transaction_options(xg=True)
return db.run_in_transaction_options(xg_on, trans)
@returns()
@arguments(app_user=users.User, mobile_id=unicode)
def start_debugging(app_user, mobile_id):
settings = get_server_settings()
domain = settings.jabberDomain
target_jid = "kick.%s/debug:%s" % (domain, base64.b64encode(app_user.email().encode('utf-8')))
def trans(mobile):
debug_request = StartDebuggingRequest(key=StartDebuggingRequest.create_key(app_user, mobile_id),
timestamp=now())
db.put_async(debug_request)
start_log_forwarding(app_user, target_jid, mobile=mobile)
deferred.defer(stop_debugging, app_user, mobile_id, debug_request=debug_request,
_countdown=30 * 60, _transactional=True, _queue=SCHEDULED_QUEUE)
xg_on = db.create_transaction_options(xg=True)
db.run_in_transaction_options(xg_on, trans, get_mobile_by_id(mobile_id))
@returns()
@arguments(app_user=users.User, mobile_id=unicode, debug_request=StartDebuggingRequest, notify_user=bool)
def stop_debugging(app_user, mobile_id, debug_request=None, notify_user=True):
def trans(mobile):
stopped = False
debug_request_from_ds = db.get(StartDebuggingRequest.create_key(app_user, mobile_id))
if debug_request_from_ds:
if not debug_request or debug_request.timestamp == debug_request_from_ds.timestamp:
db.delete_async(debug_request_from_ds)
start_log_forwarding(app_user, None, mobile)
stopped = True
if debug_request_from_ds.target_id.startswith('dbg_'):
on_trans_committed(try_or_defer, delete_xmpp_account, debug_request_from_ds.target_id, None)
return stopped
xg_on = db.create_transaction_options(xg=True)
stopped = db.run_in_transaction_options(xg_on, trans, get_mobile_by_id(mobile_id))
if stopped and notify_user:
channel.send_message(app_user, 'rogerthat.settings.stopped_debugging')
@returns()
@arguments(app_user=users.User, message=unicode)
def forward_log(app_user, message):
channel.send_message(app_user, 'rogerthat.settings.log', message=message, silent=True)
| true | true |
1c301ba946adcd8381d52d5601d669c6459a142b | 405 | py | Python | rand_param_envs/gym/envs/classic_control/__init__.py | erinaldi/MetaRL | 6dfb8d2e63a1802ca7ef9c28f6ab1a758d07f871 | [
"MIT"
] | 24 | 2021-03-24T07:14:52.000Z | 2022-03-17T08:15:44.000Z | rand_param_envs/gym/envs/classic_control/__init__.py | erinaldi/MetaRL | 6dfb8d2e63a1802ca7ef9c28f6ab1a758d07f871 | [
"MIT"
] | 12 | 2021-02-02T22:53:59.000Z | 2022-03-12T00:41:30.000Z | rand_param_envs/gym/envs/classic_control/__init__.py | erinaldi/MetaRL | 6dfb8d2e63a1802ca7ef9c28f6ab1a758d07f871 | [
"MIT"
] | 6 | 2021-04-12T18:49:47.000Z | 2021-09-07T05:33:22.000Z | from rand_param_envs.gym.envs.classic_control.cartpole import CartPoleEnv
from rand_param_envs.gym.envs.classic_control.mountain_car import MountainCarEnv
from rand_param_envs.gym.envs.classic_control.continuous_mountain_car import Continuous_MountainCarEnv
from rand_param_envs.gym.envs.classic_control.pendulum import PendulumEnv
from rand_param_envs.gym.envs.classic_control.acrobot import AcrobotEnv
| 57.857143 | 102 | 0.898765 | from rand_param_envs.gym.envs.classic_control.cartpole import CartPoleEnv
from rand_param_envs.gym.envs.classic_control.mountain_car import MountainCarEnv
from rand_param_envs.gym.envs.classic_control.continuous_mountain_car import Continuous_MountainCarEnv
from rand_param_envs.gym.envs.classic_control.pendulum import PendulumEnv
from rand_param_envs.gym.envs.classic_control.acrobot import AcrobotEnv
| true | true |
1c301c0af1297402d729bdd00ef91beeec46e0f8 | 1,294 | py | Python | doc/source/cookbook/halo_profiler.py | cphyc/yt_astro_analysis | 68fbf5370d716734fcd31a95cbd108efff46980a | [
"BSD-3-Clause-Clear"
] | null | null | null | doc/source/cookbook/halo_profiler.py | cphyc/yt_astro_analysis | 68fbf5370d716734fcd31a95cbd108efff46980a | [
"BSD-3-Clause-Clear"
] | null | null | null | doc/source/cookbook/halo_profiler.py | cphyc/yt_astro_analysis | 68fbf5370d716734fcd31a95cbd108efff46980a | [
"BSD-3-Clause-Clear"
] | null | null | null | import yt
from yt.extensions.astro_analysis.halo_analysis.api import HaloCatalog
# Load the data set with the full simulation information
# and rockstar halos
data_ds = yt.load('Enzo_64/RD0006/RedshiftOutput0006')
halos_ds = yt.load('rockstar_halos/halos_0.0.bin')
# Instantiate a catalog using those two parameter files
hc = HaloCatalog(data_ds=data_ds, halos_ds=halos_ds)
# Filter out less massive halos
hc.add_filter("quantity_value", "particle_mass", ">", 1e14, "Msun")
# This recipe creates a spherical data container, computes
# radial profiles, and calculates r_200 and M_200.
hc.add_recipe("calculate_virial_quantities", ["radius", "matter_mass"])
# Create a sphere container with radius 5x r_200.
field_params = dict(virial_radius=('quantity', 'radius_200'))
hc.add_callback('sphere', radius_field='radius_200', factor=5,
field_parameters=field_params)
# Compute profiles of T vs. r/r_200
hc.add_callback('profile', ['virial_radius_fraction'],
[('gas', 'temperature')],
storage='virial_profiles',
weight_field='cell_mass',
accumulation=False, output_dir='profiles')
# Save the profiles
hc.add_callback("save_profiles", storage="virial_profiles",
output_dir="profiles")
hc.create()
| 35.944444 | 71 | 0.722566 | import yt
from yt.extensions.astro_analysis.halo_analysis.api import HaloCatalog
data_ds = yt.load('Enzo_64/RD0006/RedshiftOutput0006')
halos_ds = yt.load('rockstar_halos/halos_0.0.bin')
hc = HaloCatalog(data_ds=data_ds, halos_ds=halos_ds)
hc.add_filter("quantity_value", "particle_mass", ">", 1e14, "Msun")
hc.add_recipe("calculate_virial_quantities", ["radius", "matter_mass"])
field_params = dict(virial_radius=('quantity', 'radius_200'))
hc.add_callback('sphere', radius_field='radius_200', factor=5,
field_parameters=field_params)
hc.add_callback('profile', ['virial_radius_fraction'],
[('gas', 'temperature')],
storage='virial_profiles',
weight_field='cell_mass',
accumulation=False, output_dir='profiles')
hc.add_callback("save_profiles", storage="virial_profiles",
output_dir="profiles")
hc.create()
| true | true |
1c301ce7b9000ab96cc831f7a72d44f6394b3e08 | 10,714 | py | Python | plugins/Channel/test.py | atr000/Limnoria | 1f60a9487ca4114f040135fb14cabc155a041918 | [
"BSD-3-Clause"
] | null | null | null | plugins/Channel/test.py | atr000/Limnoria | 1f60a9487ca4114f040135fb14cabc155a041918 | [
"BSD-3-Clause"
] | null | null | null | plugins/Channel/test.py | atr000/Limnoria | 1f60a9487ca4114f040135fb14cabc155a041918 | [
"BSD-3-Clause"
] | 1 | 2021-01-23T21:20:57.000Z | 2021-01-23T21:20:57.000Z | ###
# Copyright (c) 2002-2005, Jeremiah Fincher
# Copyright (c) 2009, James Vega
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
import supybot.conf as conf
import supybot.ircdb as ircdb
import supybot.ircmsgs as ircmsgs
class ChannelTestCase(ChannelPluginTestCase):
plugins = ('Channel', 'User')
def setUp(self):
super(ChannelTestCase, self).setUp()
self.irc.state.channels[self.channel].addUser('foo')
self.irc.state.channels[self.channel].addUser('bar')
def testLobotomies(self):
self.assertRegexp('lobotomy list', 'not.*any')
## def testCapabilities(self):
## self.prefix = 'foo!bar@baz'
## self.irc.feedMsg(ircmsgs.privmsg(self.irc.nick, 'register foo bar',
## prefix=self.prefix))
## u = ircdb.users.getUser(0)
## u.addCapability('%s.op' % self.channel)
## ircdb.users.setUser(u)
## self.assertNotError(' ')
## self.assertResponse('user capabilities foo', '[]')
## self.assertNotError('channel addcapability foo op')
## self.assertRegexp('channel capabilities foo', 'op')
## self.assertNotError('channel removecapability foo op')
## self.assertResponse('user capabilities foo', '[]')
def testCapabilities(self):
self.assertNotError('channel capability list')
self.assertNotError('channel capability set -foo')
self.assertNotError('channel capability unset -foo')
self.assertError('channel capability unset -foo')
self.assertNotError('channel capability set -foo bar baz')
self.assertRegexp('channel capability list', 'baz')
self.assertNotError('channel capability unset -foo baz')
self.assertError('channel capability unset baz')
def testEnableDisable(self):
self.assertNotRegexp('channel capability list', '-Channel')
self.assertError('channel enable channel')
self.assertNotError('channel disable channel')
self.assertRegexp('channel capability list', '-Channel')
self.assertNotError('channel enable channel')
self.assertNotRegexp('channel capability list', '-Channel')
self.assertNotError('channel disable channel nicks')
self.assertRegexp('channel capability list', '-Channel.nicks')
self.assertNotError('channel enable channel nicks')
self.assertNotRegexp('channel capability list', '-Channel.nicks')
self.assertNotRegexp('channel capability list', 'nicks')
self.assertNotError('channel disable nicks')
self.assertRegexp('channel capability list', 'nicks')
self.assertNotError('channel enable nicks')
self.assertError('channel disable invalidPlugin')
self.assertError('channel disable channel invalidCommand')
def testUnban(self):
self.assertError('unban foo!bar@baz')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
m = self.getMsg('unban foo!bar@baz')
self.assertEqual(m.command, 'MODE')
self.assertEqual(m.args, (self.channel, '-b', 'foo!bar@baz'))
self.assertNoResponse(' ', 2)
def testErrorsWithoutOps(self):
for s in 'op deop halfop dehalfop voice devoice kick invite'.split():
self.assertError('%s foo' % s)
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('%s foo' % s)
self.irc.feedMsg(ircmsgs.deop(self.channel, self.nick))
def testWontDeItself(self):
for s in 'deop dehalfop devoice'.split():
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertError('%s %s' % (s, self.nick))
def testOp(self):
self.assertError('op')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('op')
m = self.getMsg('op foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'foo'))
m = self.getMsg('op foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'bar'))
self.irc.state.supported['MODES'] = 2
m = self.getMsg('op foo bar')
try:
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+oo', 'foo', 'bar'))
finally:
self.irc.state.supported['MODES'] = 1
def testHalfOp(self):
self.assertError('halfop')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('halfop')
m = self.getMsg('halfop foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'foo'))
m = self.getMsg('halfop foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'bar'))
def testVoice(self):
self.assertError('voice')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('voice')
m = self.getMsg('voice foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'foo'))
m = self.getMsg('voice foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'bar'))
def assertBan(self, query, hostmask, **kwargs):
m = self.getMsg(query, **kwargs)
self.assertEqual(m, ircmsgs.ban(self.channel, hostmask))
m = self.getMsg(' ')
self.assertEqual(m.command, 'KICK')
## def testKban(self):
## self.irc.prefix = 'something!else@somehwere.else'
## self.irc.nick = 'something'
## self.irc.feedMsg(ircmsgs.join(self.channel,
## prefix='foobar!user@host.domain.tld'))
## self.assertError('kban foobar')
## self.irc.feedMsg(ircmsgs.op(self.channel, self.irc.nick))
## self.assertError('kban foobar -1')
## self.assertBan('kban foobar', '*!*@*.domain.tld')
## self.assertBan('kban --exact foobar', 'foobar!user@host.domain.tld')
## self.assertBan('kban --host foobar', '*!*@host.domain.tld')
## self.assertBan('kban --user foobar', '*!user@*')
## self.assertBan('kban --nick foobar', 'foobar!*@*')
## self.assertBan('kban --nick --user foobar', 'foobar!user@*')
## self.assertBan('kban --nick --host foobar',
## 'foobar!*@host.domain.tld')
## self.assertBan('kban --user --host foobar', '*!user@host.domain.tld')
## self.assertBan('kban --nick --user --host foobar',
## 'foobar!user@host.domain.tld')
## self.assertNotRegexp('kban adlkfajsdlfkjsd', 'KeyError')
## self.assertNotRegexp('kban foobar time', 'ValueError')
## self.assertError('kban %s' % self.irc.nick)
def testBan(self):
origban = conf.supybot.protocols.irc.banmask()
try:
conf.supybot.protocols.irc.banmask.setValue(['exact'])
self.assertNotError('ban add foo!bar@baz')
self.assertNotError('ban remove foo!bar@baz')
orig = conf.supybot.protocols.irc.strictRfc()
try:
conf.supybot.protocols.irc.strictRfc.setValue(True)
# something wonky is going on here. irc.error (src/Channel.py|449)
# is being called but the assert is failing
self.assertError('ban add not!a.hostmask')
self.assertNotRegexp('ban add not!a.hostmask', 'KeyError')
finally:
conf.supybot.protocols.irc.strictRfc.setValue(orig)
finally:
conf.supybot.protocols.irc.banmask.setValue(origban)
def testIgnore(self):
orig = conf.supybot.protocols.irc.banmask()
def ignore(given, expect=None):
if expect is None:
expect = given
self.assertNotError('channel ignore add %s' % given)
self.assertResponse('channel ignore list', "'%s'" % expect)
self.assertNotError('channel ignore remove %s' % expect)
self.assertRegexp('channel ignore list', 'not currently')
try:
ignore('foo!bar@baz', '*!bar@baz')
ignore('foo!*@*')
conf.supybot.protocols.irc.banmask.setValue(['exact'])
ignore('foo!bar@baz')
ignore('foo!*@*')
self.assertError('ban add not!a.hostmask')
finally:
conf.supybot.protocols.irc.banmask.setValue(orig)
def testNicks(self):
self.assertResponse('channel nicks', 'bar, foo, and test')
self.assertResponse('channel nicks --count', '3')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| 46.380952 | 82 | 0.617416 |
rom supybot.test import *
import supybot.conf as conf
import supybot.ircdb as ircdb
import supybot.ircmsgs as ircmsgs
class ChannelTestCase(ChannelPluginTestCase):
plugins = ('Channel', 'User')
def setUp(self):
super(ChannelTestCase, self).setUp()
self.irc.state.channels[self.channel].addUser('foo')
self.irc.state.channels[self.channel].addUser('bar')
def testLobotomies(self):
self.assertRegexp('lobotomy list', 'not.*any')
ble channel')
self.assertNotError('channel disable channel')
self.assertRegexp('channel capability list', '-Channel')
self.assertNotError('channel enable channel')
self.assertNotRegexp('channel capability list', '-Channel')
self.assertNotError('channel disable channel nicks')
self.assertRegexp('channel capability list', '-Channel.nicks')
self.assertNotError('channel enable channel nicks')
self.assertNotRegexp('channel capability list', '-Channel.nicks')
self.assertNotRegexp('channel capability list', 'nicks')
self.assertNotError('channel disable nicks')
self.assertRegexp('channel capability list', 'nicks')
self.assertNotError('channel enable nicks')
self.assertError('channel disable invalidPlugin')
self.assertError('channel disable channel invalidCommand')
def testUnban(self):
self.assertError('unban foo!bar@baz')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
m = self.getMsg('unban foo!bar@baz')
self.assertEqual(m.command, 'MODE')
self.assertEqual(m.args, (self.channel, '-b', 'foo!bar@baz'))
self.assertNoResponse(' ', 2)
def testErrorsWithoutOps(self):
for s in 'op deop halfop dehalfop voice devoice kick invite'.split():
self.assertError('%s foo' % s)
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('%s foo' % s)
self.irc.feedMsg(ircmsgs.deop(self.channel, self.nick))
def testWontDeItself(self):
for s in 'deop dehalfop devoice'.split():
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertError('%s %s' % (s, self.nick))
def testOp(self):
self.assertError('op')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('op')
m = self.getMsg('op foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'foo'))
m = self.getMsg('op foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'bar'))
self.irc.state.supported['MODES'] = 2
m = self.getMsg('op foo bar')
try:
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+oo', 'foo', 'bar'))
finally:
self.irc.state.supported['MODES'] = 1
def testHalfOp(self):
self.assertError('halfop')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('halfop')
m = self.getMsg('halfop foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'foo'))
m = self.getMsg('halfop foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'bar'))
def testVoice(self):
self.assertError('voice')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('voice')
m = self.getMsg('voice foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'foo'))
m = self.getMsg('voice foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'bar'))
def assertBan(self, query, hostmask, **kwargs):
m = self.getMsg(query, **kwargs)
self.assertEqual(m, ircmsgs.ban(self.channel, hostmask))
m = self.getMsg(' ')
self.assertEqual(m.command, 'KICK')
ignore('foo!bar@baz', '*!bar@baz')
ignore('foo!*@*')
conf.supybot.protocols.irc.banmask.setValue(['exact'])
ignore('foo!bar@baz')
ignore('foo!*@*')
self.assertError('ban add not!a.hostmask')
finally:
conf.supybot.protocols.irc.banmask.setValue(orig)
def testNicks(self):
self.assertResponse('channel nicks', 'bar, foo, and test')
self.assertResponse('channel nicks --count', '3')
| true | true |
1c301d0241b470c8b887c9d429844e8d2f0f4a77 | 5,954 | py | Python | cloud/cdk/baseline_cdk/resources/cfnres_iot_fleet.py | MartinMReed/aws-iot-baseline | 61bdc51708e6f4480d0117a43f0adde5f6a63506 | [
"MIT"
] | 1 | 2021-12-31T05:05:30.000Z | 2021-12-31T05:05:30.000Z | cloud/cdk/baseline_cdk/resources/cfnres_iot_fleet.py | nelsestu/thing-expert | 2e105d718c386258d8efdb329ea60da1072ffbe8 | [
"MIT"
] | null | null | null | cloud/cdk/baseline_cdk/resources/cfnres_iot_fleet.py | nelsestu/thing-expert | 2e105d718c386258d8efdb329ea60da1072ffbe8 | [
"MIT"
] | 1 | 2021-04-05T23:44:12.000Z | 2021-04-05T23:44:12.000Z | import os
import zipfile
from aws_cdk import aws_cloudformation
from aws_cdk import aws_iam
from aws_cdk import aws_lambda
from aws_cdk import core
from aws_cdk.core import RemovalPolicy
from baseline_cdk.resources import cfnres_log_group
from baseline_cdk.util import cdk
from baseline_cdk.util.hash import file_sha1
from baseline_cdk.util.os import shell
from baseline_cdk.util.zip import exclude_pycache
from baseline_cdk.util.zip import zip_all
lambda_type = 'cfnres-iot-fleet'
def create_layer_zip() -> str:
this_dir = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
cloud_dir = os.path.abspath(f'{this_dir}/../../..')
layer_dir = f'{cdk.outdir}/{cdk.app_name}/lambda-{lambda_type}-layer'
layer_zip = f'{cdk.outdir}/{cdk.app_name}/lambda-{lambda_type}-layer.zip'
if not os.path.exists(layer_dir):
os.makedirs(layer_dir)
shell(f'bash {cloud_dir}/scripts/aws-lambda-pip.sh'
f' -pyver 3.7'
f' -out "{layer_dir}/python/lib/python3.7/site-packages"'
f' -req "{this_dir}/cfnres/iot_fleet/requirements.txt"')
with zipfile.ZipFile(layer_zip, 'w', zipfile.ZIP_DEFLATED) as zip:
zip_all(zip, f'{layer_dir}', exclude_pycache, path='python')
return layer_zip
def create_lambda_zip() -> str:
this_dir = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
lambda_zip = f'{cdk.outdir}/{cdk.app_name}/lambda-{lambda_type}.zip'
with zipfile.ZipFile(lambda_zip, 'w', zipfile.ZIP_DEFLATED) as zip:
zip.write(f'{this_dir}/cfnres/iot_fleet/index.py', arcname='index.py')
return lambda_zip
def create_lambda(stack: core.Stack, iot_scope: core.Construct) -> aws_lambda.CfnFunction:
lambda_scope = core.Construct(iot_scope, 'FleetLambda')
layer_zip = create_layer_zip()
lambda_zip = create_lambda_zip()
layer_asset = stack.synthesizer.add_file_asset(
file_name=layer_zip,
packaging=core.FileAssetPackaging.FILE,
source_hash=file_sha1(layer_zip)
)
lambda_asset = stack.synthesizer.add_file_asset(
file_name=lambda_zip,
packaging=core.FileAssetPackaging.FILE,
source_hash=file_sha1(lambda_zip)
)
lambda_role = aws_iam.CfnRole(
lambda_scope, 'ExecutionRole',
role_name=f'{cdk.app_name}-lambda-{lambda_type}',
assume_role_policy_document={
'Version': '2012-10-17',
'Statement': [{
'Effect': 'Allow',
'Action': 'sts:AssumeRole',
'Principal': {
'Service': 'lambda.amazonaws.com',
**({'AWS': f'arn:aws:iam::{stack.account}:root'} if cdk.debug_lambda_roles else {})
}
}]
},
managed_policy_arns=[
'arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole'
],
policies=[aws_iam.CfnRole.PolicyProperty(
policy_name=f'{cdk.app_name}-lambda-{lambda_type}',
policy_document={
'Version': '2012-10-17',
'Statement': [{
'Effect': 'Allow',
'Action': [
'iot:UpdateCertificate',
'iot:DeleteCertificate',
'iot:DeleteThing',
'iot:ListAttachedPolicies',
'iot:DetachPolicy',
'iot:ListThingPrincipals',
'iot:DetachThingPrincipal',
'iot:ListThings',
'iot:ListPrincipalThings',
'iot:ListTagsForResource',
'events:PutRule',
'events:DeleteRule',
'events:PutTargets',
'events:RemoveTargets',
'lambda:AddPermission',
'lambda:RemovePermission'
],
'Resource': '*'
}]
}
)]
)
lambda_layer = aws_lambda.CfnLayerVersion(
lambda_scope, 'Layer',
layer_name=f'{cdk.app_name}-{lambda_type}',
compatible_runtimes=['python3.7'],
content=aws_lambda.CfnLayerVersion.ContentProperty(
s3_bucket=layer_asset.bucket_name,
s3_key=layer_asset.object_key
)
)
lambda_function = aws_lambda.CfnFunction(
lambda_scope, 'Function',
function_name=f'{cdk.app_name}-{lambda_type}',
runtime='python3.7',
code=aws_lambda.CfnFunction.CodeProperty(
s3_bucket=lambda_asset.bucket_name,
s3_key=lambda_asset.object_key
),
handler='index.handle',
layers=[lambda_layer.ref],
memory_size=128,
timeout=600,
role=lambda_role.attr_arn
)
lambda_function.add_depends_on(lambda_role)
# use a custom CfnLogGroup to avoid errors if the group still exists (failed deployment)
cfnres_log_group.CfnLogGroup(
stack, lambda_scope, 'LogGroup',
log_group_name=f'/aws/lambda/{lambda_function.ref}',
retention_in_days=7,
)
return lambda_function
def create(stack: core.Stack) -> None:
iot_scope: core.Construct = cdk.find_resource(stack, 'Iot')
lambda_function = create_lambda(stack, iot_scope)
custom_resource = aws_cloudformation.CfnCustomResource(
iot_scope, 'Fleet',
service_token=lambda_function.attr_arn
)
custom_resource.add_override('Type', 'Custom::IotFleet')
custom_resource.add_override('Properties.ThingTypeName', cdk.app_name)
custom_resource.add_override('Properties.ThingRemovalPolicy', RemovalPolicy.DESTROY)
custom_resource.add_depends_on(lambda_function)
# make sure this runs first when destroying the stack
for child in iot_scope.node.children:
if isinstance(child, core.CfnResource):
custom_resource.add_depends_on(child)
| 34.416185 | 103 | 0.620591 | import os
import zipfile
from aws_cdk import aws_cloudformation
from aws_cdk import aws_iam
from aws_cdk import aws_lambda
from aws_cdk import core
from aws_cdk.core import RemovalPolicy
from baseline_cdk.resources import cfnres_log_group
from baseline_cdk.util import cdk
from baseline_cdk.util.hash import file_sha1
from baseline_cdk.util.os import shell
from baseline_cdk.util.zip import exclude_pycache
from baseline_cdk.util.zip import zip_all
lambda_type = 'cfnres-iot-fleet'
def create_layer_zip() -> str:
this_dir = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
cloud_dir = os.path.abspath(f'{this_dir}/../../..')
layer_dir = f'{cdk.outdir}/{cdk.app_name}/lambda-{lambda_type}-layer'
layer_zip = f'{cdk.outdir}/{cdk.app_name}/lambda-{lambda_type}-layer.zip'
if not os.path.exists(layer_dir):
os.makedirs(layer_dir)
shell(f'bash {cloud_dir}/scripts/aws-lambda-pip.sh'
f' -pyver 3.7'
f' -out "{layer_dir}/python/lib/python3.7/site-packages"'
f' -req "{this_dir}/cfnres/iot_fleet/requirements.txt"')
with zipfile.ZipFile(layer_zip, 'w', zipfile.ZIP_DEFLATED) as zip:
zip_all(zip, f'{layer_dir}', exclude_pycache, path='python')
return layer_zip
def create_lambda_zip() -> str:
this_dir = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
lambda_zip = f'{cdk.outdir}/{cdk.app_name}/lambda-{lambda_type}.zip'
with zipfile.ZipFile(lambda_zip, 'w', zipfile.ZIP_DEFLATED) as zip:
zip.write(f'{this_dir}/cfnres/iot_fleet/index.py', arcname='index.py')
return lambda_zip
def create_lambda(stack: core.Stack, iot_scope: core.Construct) -> aws_lambda.CfnFunction:
lambda_scope = core.Construct(iot_scope, 'FleetLambda')
layer_zip = create_layer_zip()
lambda_zip = create_lambda_zip()
layer_asset = stack.synthesizer.add_file_asset(
file_name=layer_zip,
packaging=core.FileAssetPackaging.FILE,
source_hash=file_sha1(layer_zip)
)
lambda_asset = stack.synthesizer.add_file_asset(
file_name=lambda_zip,
packaging=core.FileAssetPackaging.FILE,
source_hash=file_sha1(lambda_zip)
)
lambda_role = aws_iam.CfnRole(
lambda_scope, 'ExecutionRole',
role_name=f'{cdk.app_name}-lambda-{lambda_type}',
assume_role_policy_document={
'Version': '2012-10-17',
'Statement': [{
'Effect': 'Allow',
'Action': 'sts:AssumeRole',
'Principal': {
'Service': 'lambda.amazonaws.com',
**({'AWS': f'arn:aws:iam::{stack.account}:root'} if cdk.debug_lambda_roles else {})
}
}]
},
managed_policy_arns=[
'arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole'
],
policies=[aws_iam.CfnRole.PolicyProperty(
policy_name=f'{cdk.app_name}-lambda-{lambda_type}',
policy_document={
'Version': '2012-10-17',
'Statement': [{
'Effect': 'Allow',
'Action': [
'iot:UpdateCertificate',
'iot:DeleteCertificate',
'iot:DeleteThing',
'iot:ListAttachedPolicies',
'iot:DetachPolicy',
'iot:ListThingPrincipals',
'iot:DetachThingPrincipal',
'iot:ListThings',
'iot:ListPrincipalThings',
'iot:ListTagsForResource',
'events:PutRule',
'events:DeleteRule',
'events:PutTargets',
'events:RemoveTargets',
'lambda:AddPermission',
'lambda:RemovePermission'
],
'Resource': '*'
}]
}
)]
)
lambda_layer = aws_lambda.CfnLayerVersion(
lambda_scope, 'Layer',
layer_name=f'{cdk.app_name}-{lambda_type}',
compatible_runtimes=['python3.7'],
content=aws_lambda.CfnLayerVersion.ContentProperty(
s3_bucket=layer_asset.bucket_name,
s3_key=layer_asset.object_key
)
)
lambda_function = aws_lambda.CfnFunction(
lambda_scope, 'Function',
function_name=f'{cdk.app_name}-{lambda_type}',
runtime='python3.7',
code=aws_lambda.CfnFunction.CodeProperty(
s3_bucket=lambda_asset.bucket_name,
s3_key=lambda_asset.object_key
),
handler='index.handle',
layers=[lambda_layer.ref],
memory_size=128,
timeout=600,
role=lambda_role.attr_arn
)
lambda_function.add_depends_on(lambda_role)
cfnres_log_group.CfnLogGroup(
stack, lambda_scope, 'LogGroup',
log_group_name=f'/aws/lambda/{lambda_function.ref}',
retention_in_days=7,
)
return lambda_function
def create(stack: core.Stack) -> None:
iot_scope: core.Construct = cdk.find_resource(stack, 'Iot')
lambda_function = create_lambda(stack, iot_scope)
custom_resource = aws_cloudformation.CfnCustomResource(
iot_scope, 'Fleet',
service_token=lambda_function.attr_arn
)
custom_resource.add_override('Type', 'Custom::IotFleet')
custom_resource.add_override('Properties.ThingTypeName', cdk.app_name)
custom_resource.add_override('Properties.ThingRemovalPolicy', RemovalPolicy.DESTROY)
custom_resource.add_depends_on(lambda_function)
for child in iot_scope.node.children:
if isinstance(child, core.CfnResource):
custom_resource.add_depends_on(child)
| true | true |
1c301e36e501a99e2df8a590d3a6447d4a83d534 | 422 | py | Python | cadastros/migrations/0006_pcvaga_data_criacao.py | lhggomes/vagas | 84b429d642f1703d067ce12735875a8b95769b3f | [
"Unlicense"
] | null | null | null | cadastros/migrations/0006_pcvaga_data_criacao.py | lhggomes/vagas | 84b429d642f1703d067ce12735875a8b95769b3f | [
"Unlicense"
] | null | null | null | cadastros/migrations/0006_pcvaga_data_criacao.py | lhggomes/vagas | 84b429d642f1703d067ce12735875a8b95769b3f | [
"Unlicense"
] | null | null | null | # Generated by Django 3.1.4 on 2020-12-20 02:30
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cadastros', '0005_auto_20201208_2339'),
]
operations = [
migrations.AddField(
model_name='pcvaga',
name='data_criacao',
field=models.DateField(default=datetime.date.today),
),
]
| 21.1 | 64 | 0.623223 |
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cadastros', '0005_auto_20201208_2339'),
]
operations = [
migrations.AddField(
model_name='pcvaga',
name='data_criacao',
field=models.DateField(default=datetime.date.today),
),
]
| true | true |
1c301f3501a192ccb0b9ef84509c69d14ee42689 | 68 | py | Python | psi/data/api.py | bburan/psiexperiment | 9b70f7f0b4a4379d8c3fc463e1df272153afd247 | [
"MIT"
] | 5 | 2016-05-26T13:46:00.000Z | 2020-03-03T13:07:47.000Z | psi/data/api.py | bburan/psiexperiment | 9b70f7f0b4a4379d8c3fc463e1df272153afd247 | [
"MIT"
] | 2 | 2018-04-17T15:06:35.000Z | 2019-03-25T18:13:10.000Z | psi/data/api.py | psiexperiment/psiexperiment | 2701558e1d0637b8a5d6762912dfb5c183f3be87 | [
"MIT"
] | 3 | 2020-04-17T15:03:36.000Z | 2022-01-14T23:19:29.000Z | from .plots import FFTContainer, PlotContainer, ResultPlot, ViewBox
| 34 | 67 | 0.838235 | from .plots import FFTContainer, PlotContainer, ResultPlot, ViewBox
| true | true |
1c301f6cb8cae54e2b472abd80d09c9c70abf531 | 13,322 | py | Python | ctapipe/reco/HillasReconstructor.py | mpecimotika/ctapipe | ffd7930921f7139b761fbf1208da16dd302e97a6 | [
"BSD-3-Clause"
] | null | null | null | ctapipe/reco/HillasReconstructor.py | mpecimotika/ctapipe | ffd7930921f7139b761fbf1208da16dd302e97a6 | [
"BSD-3-Clause"
] | null | null | null | ctapipe/reco/HillasReconstructor.py | mpecimotika/ctapipe | ffd7930921f7139b761fbf1208da16dd302e97a6 | [
"BSD-3-Clause"
] | null | null | null | """
Line-intersection-based fitting.
Contact: Tino Michael <Tino.Michael@cea.fr>
"""
from ctapipe.reco.reco_algorithms import Reconstructor
from ctapipe.io.containers import ReconstructedShowerContainer
from ctapipe.coordinates import HorizonFrame, CameraFrame, GroundFrame, TiltedGroundFrame, project_to_ground
from astropy.coordinates import SkyCoord, spherical_to_cartesian, cartesian_to_spherical
from itertools import combinations
import numpy as np
from astropy import units as u
__all__ = ['HillasReconstructor', 'TooFewTelescopesException', 'HillasPlane']
class TooFewTelescopesException(Exception):
pass
def angle(v1, v2):
""" computes the angle between two vectors
assuming carthesian coordinates
Parameters
----------
v1 : numpy array
v2 : numpy array
Returns
-------
the angle between v1 and v2 as a dimensioned astropy quantity
"""
norm = np.linalg.norm(v1) * np.linalg.norm(v2)
return np.arccos(np.clip(v1.dot(v2) / norm, -1.0, 1.0))
def normalise(vec):
""" Sets the length of the vector to 1
without changing its direction
Parameters
----------
vec : numpy array
Returns
-------
numpy array with the same direction but length of 1
"""
try:
return vec / np.linalg.norm(vec)
except ZeroDivisionError:
return vec
def line_line_intersection_3d(uvw_vectors, origins):
'''
Intersection of many lines in 3d.
See https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
'''
C = []
S = []
for n, pos in zip(uvw_vectors, origins):
n = n.reshape((3, 1))
norm_matrix = (n @ n.T) - np.eye(3)
C.append(norm_matrix @ pos)
S.append(norm_matrix)
S = np.array(S).sum(axis=0)
C = np.array(C).sum(axis=0)
return np.linalg.inv(S) @ C
class HillasReconstructor(Reconstructor):
"""
class that reconstructs the direction of an atmospheric shower
using a simple hillas parametrisation of the camera images it
provides a direction estimate in two steps and an estimate for the
shower's impact position on the ground.
so far, it does neither provide an energy estimator nor an
uncertainty on the reconstructed parameters
"""
def __init__(self, config=None, tool=None, **kwargs):
super().__init__(config=config, parent=tool, **kwargs)
self.hillas_planes = {}
def predict(self, hillas_dict, inst, pointing_alt, pointing_az):
'''
The function you want to call for the reconstruction of the
event. It takes care of setting up the event and consecutively
calls the functions for the direction and core position
reconstruction. Shower parameters not reconstructed by this
class are set to np.nan
Parameters
-----------
hillas_dict: dict
dictionary with telescope IDs as key and
HillasParametersContainer instances as values
inst : ctapipe.io.InstrumentContainer
instrumental description
pointing_alt: dict[astropy.coordinates.Angle]
dict mapping telescope ids to pointing altitude
pointing_az: dict[astropy.coordinates.Angle]
dict mapping telescope ids to pointing azimuth
Raises
------
TooFewTelescopesException
if len(hillas_dict) < 2
'''
# stereoscopy needs at least two telescopes
if len(hillas_dict) < 2:
raise TooFewTelescopesException(
"need at least two telescopes, have {}"
.format(len(hillas_dict)))
self.initialize_hillas_planes(
hillas_dict,
inst.subarray,
pointing_alt,
pointing_az
)
# algebraic direction estimate
direction, err_est_dir = self.estimate_direction()
alt = u.Quantity(list(pointing_alt.values()))
az = u.Quantity(list(pointing_az.values()))
if np.any(alt != alt[0]) or np.any(az != az[0]):
raise ValueError('Divergent pointing not supported')
pointing_direction = SkyCoord(alt=alt[0], az=az[0], frame='altaz')
# core position estimate using a geometric approach
core_pos = self.estimate_core_position(hillas_dict, pointing_direction)
# container class for reconstructed showers
result = ReconstructedShowerContainer()
_, lat, lon = cartesian_to_spherical(*direction)
# estimate max height of shower
h_max = self.estimate_h_max()
# astropy's coordinates system rotates counter-clockwise.
# Apparently we assume it to be clockwise.
result.alt, result.az = lat, -lon
result.core_x = core_pos[0]
result.core_y = core_pos[1]
result.core_uncert = np.nan
result.tel_ids = [h for h in hillas_dict.keys()]
result.average_size = np.mean([h.intensity for h in hillas_dict.values()])
result.is_valid = True
result.alt_uncert = err_est_dir
result.az_uncert = np.nan
result.h_max = h_max
result.h_max_uncert = np.nan
result.goodness_of_fit = np.nan
return result
def initialize_hillas_planes(
self,
hillas_dict,
subarray,
pointing_alt,
pointing_az
):
"""
creates a dictionary of :class:`.HillasPlane` from a dictionary of
hillas
parameters
Parameters
----------
hillas_dict : dictionary
dictionary of hillas moments
subarray : ctapipe.instrument.SubarrayDescription
subarray information
tel_phi, tel_theta : dictionaries
dictionaries of the orientation angles of the telescopes
needs to contain at least the same keys as in `hillas_dict`
"""
self.hillas_planes = {}
for tel_id, moments in hillas_dict.items():
# we just need any point on the main shower axis a bit away from the cog
p2_x = moments.x + 0.1 * u.m * np.cos(moments.psi)
p2_y = moments.y + 0.1 * u.m * np.sin(moments.psi)
focal_length = subarray.tel[tel_id].optics.equivalent_focal_length
pointing = SkyCoord(
alt=pointing_alt[tel_id],
az=pointing_az[tel_id],
frame='altaz'
)
hf = HorizonFrame(
array_direction=pointing,
pointing_direction=pointing
)
cf = CameraFrame(
focal_length=focal_length,
array_direction=pointing,
pointing_direction=pointing,
)
cog_coord = SkyCoord(x=moments.x, y=moments.y, frame=cf)
cog_coord = cog_coord.transform_to(hf)
p2_coord = SkyCoord(x=p2_x, y=p2_y, frame=cf)
p2_coord = p2_coord.transform_to(hf)
circle = HillasPlane(
p1=cog_coord,
p2=p2_coord,
telescope_position=subarray.positions[tel_id],
weight=moments.intensity * (moments.length / moments.width),
)
self.hillas_planes[tel_id] = circle
def estimate_direction(self):
"""calculates the origin of the gamma as the weighted average
direction of the intersections of all hillas planes
Returns
-------
gamma : shape (3) numpy array
direction of origin of the reconstructed shower as a 3D vector
crossings : shape (n,3) list
an error esimate
"""
crossings = []
for perm in combinations(self.hillas_planes.values(), 2):
n1, n2 = perm[0].norm, perm[1].norm
# cross product automatically weighs in the angle between
# the two vectors: narrower angles have less impact,
# perpendicular vectors have the most
crossing = np.cross(n1, n2)
# two great circles cross each other twice (one would be
# the origin, the other one the direction of the gamma) it
# doesn't matter which we pick but it should at least be
# consistent: make sure to always take the "upper" solution
if crossing[2] < 0:
crossing *= -1
crossings.append(crossing * perm[0].weight * perm[1].weight)
result = normalise(np.sum(crossings, axis=0))
off_angles = [angle(result, cross) for cross in crossings] * u.rad
err_est_dir = np.average(
off_angles,
weights=[len(cross) for cross in crossings]
)
return result, err_est_dir
def estimate_core_position(self, hillas_dict, pointing_direction):
'''
Estimate the core position by intersection the major ellipse lines of each telescope.
Parameters
-----------
hillas_dict: dict[HillasContainer]
dictionary of hillas moments
pointing_direction: SkyCoord[AltAz]
Pointing direction of the array
Returns
-----------
core_x: u.Quantity
estimated x position of impact
core_y: u.Quantity
estimated y position of impact
'''
psi = u.Quantity([h.psi for h in hillas_dict.values()])
z = np.zeros(len(psi))
uvw_vectors = np.column_stack([np.cos(psi).value, np.sin(psi).value, z])
tilted_frame = TiltedGroundFrame(pointing_direction=pointing_direction)
ground_frame = GroundFrame(pointing_direction=pointing_direction)
positions = [
(
SkyCoord(*plane.pos, frame=ground_frame)
.transform_to(tilted_frame)
.cartesian.xyz
)
for plane in self.hillas_planes.values()
]
core_position = line_line_intersection_3d(uvw_vectors, positions)
core_pos_tilted = SkyCoord(
x=core_position[0] * u.m,
y=core_position[1] * u.m,
frame=tilted_frame
)
core_pos = project_to_ground(core_pos_tilted)
return core_pos.x, core_pos.y
def estimate_h_max(self):
'''
Estimate the max height by intersecting the lines of the cog directions of each telescope.
Parameters
-----------
hillas_dict : dictionary
dictionary of hillas moments
subarray : ctapipe.instrument.SubarrayDescription
subarray information
Returns
-----------
astropy.unit.Quantity
the estimated max height
'''
uvw_vectors = np.array([plane.a for plane in self.hillas_planes.values()])
positions = [plane.pos for plane in self.hillas_planes.values()]
# not sure if its better to return the length of the vector of the z component
return np.linalg.norm(line_line_intersection_3d(uvw_vectors, positions)) * u.m
class HillasPlane:
"""
a tiny helper class to collect some parameters for each great great
circle
Stores some vectors a, b, and c
These vectors are eucledian [x, y, z] where positive z values point towards the sky
and x and y are parallel to the ground.
"""
def __init__(self, p1, p2, telescope_position, weight=1):
"""The constructor takes two coordinates in the horizontal
frame (alt, az) which define a plane perpedicular
to the camera.
Parameters
-----------
p1: astropy.coordinates.SkyCoord
One of two direction vectors which define the plane.
This coordinate has to be defined in the ctapipe.coordinates.HorizonFrame
p2: astropy.coordinates.SkyCoord
One of two direction vectors which define the plane.
This coordinate has to be defined in the ctapipe.coordinates.HorizonFrame
telescope_position: np.array(3)
Position of the telescope on the ground
weight : float, optional
weight of this plane for later use during the reconstruction
Notes
-----
c: numpy.ndarray(3)
:math:`\vec c = (\vec a \times \vec b) \times \vec a`
:math:`\rightarrow` a and c form an orthogonal base for the
great circle
(only orthonormal if a and b are of unit-length)
norm: numpy.ndarray(3)
normal vector of the circle's plane,
perpendicular to a, b and c
"""
self.pos = telescope_position
# astropy's coordinates system rotates counter clockwise. Apparently we assume it to
# be clockwise
self.a = np.array(spherical_to_cartesian(1, p1.alt, -p1.az)).ravel()
self.b = np.array(spherical_to_cartesian(1, p2.alt, -p2.az)).ravel()
# a and c form an orthogonal basis for the great circle
# not really necessary since the norm can be calculated
# with a and b just as well
self.c = np.cross(np.cross(self.a, self.b), self.a)
# normal vector for the plane defined by the great circle
self.norm = normalise(np.cross(self.a, self.c))
# some weight for this circle
# (put e.g. uncertainty on the Hillas parameters
# or number of PE in here)
self.weight = weight
| 33.388471 | 108 | 0.617325 |
from ctapipe.reco.reco_algorithms import Reconstructor
from ctapipe.io.containers import ReconstructedShowerContainer
from ctapipe.coordinates import HorizonFrame, CameraFrame, GroundFrame, TiltedGroundFrame, project_to_ground
from astropy.coordinates import SkyCoord, spherical_to_cartesian, cartesian_to_spherical
from itertools import combinations
import numpy as np
from astropy import units as u
__all__ = ['HillasReconstructor', 'TooFewTelescopesException', 'HillasPlane']
class TooFewTelescopesException(Exception):
pass
def angle(v1, v2):
norm = np.linalg.norm(v1) * np.linalg.norm(v2)
return np.arccos(np.clip(v1.dot(v2) / norm, -1.0, 1.0))
def normalise(vec):
try:
return vec / np.linalg.norm(vec)
except ZeroDivisionError:
return vec
def line_line_intersection_3d(uvw_vectors, origins):
C = []
S = []
for n, pos in zip(uvw_vectors, origins):
n = n.reshape((3, 1))
norm_matrix = (n @ n.T) - np.eye(3)
C.append(norm_matrix @ pos)
S.append(norm_matrix)
S = np.array(S).sum(axis=0)
C = np.array(C).sum(axis=0)
return np.linalg.inv(S) @ C
class HillasReconstructor(Reconstructor):
def __init__(self, config=None, tool=None, **kwargs):
super().__init__(config=config, parent=tool, **kwargs)
self.hillas_planes = {}
def predict(self, hillas_dict, inst, pointing_alt, pointing_az):
if len(hillas_dict) < 2:
raise TooFewTelescopesException(
"need at least two telescopes, have {}"
.format(len(hillas_dict)))
self.initialize_hillas_planes(
hillas_dict,
inst.subarray,
pointing_alt,
pointing_az
)
direction, err_est_dir = self.estimate_direction()
alt = u.Quantity(list(pointing_alt.values()))
az = u.Quantity(list(pointing_az.values()))
if np.any(alt != alt[0]) or np.any(az != az[0]):
raise ValueError('Divergent pointing not supported')
pointing_direction = SkyCoord(alt=alt[0], az=az[0], frame='altaz')
core_pos = self.estimate_core_position(hillas_dict, pointing_direction)
result = ReconstructedShowerContainer()
_, lat, lon = cartesian_to_spherical(*direction)
h_max = self.estimate_h_max()
# Apparently we assume it to be clockwise.
result.alt, result.az = lat, -lon
result.core_x = core_pos[0]
result.core_y = core_pos[1]
result.core_uncert = np.nan
result.tel_ids = [h for h in hillas_dict.keys()]
result.average_size = np.mean([h.intensity for h in hillas_dict.values()])
result.is_valid = True
result.alt_uncert = err_est_dir
result.az_uncert = np.nan
result.h_max = h_max
result.h_max_uncert = np.nan
result.goodness_of_fit = np.nan
return result
def initialize_hillas_planes(
self,
hillas_dict,
subarray,
pointing_alt,
pointing_az
):
self.hillas_planes = {}
for tel_id, moments in hillas_dict.items():
# we just need any point on the main shower axis a bit away from the cog
p2_x = moments.x + 0.1 * u.m * np.cos(moments.psi)
p2_y = moments.y + 0.1 * u.m * np.sin(moments.psi)
focal_length = subarray.tel[tel_id].optics.equivalent_focal_length
pointing = SkyCoord(
alt=pointing_alt[tel_id],
az=pointing_az[tel_id],
frame='altaz'
)
hf = HorizonFrame(
array_direction=pointing,
pointing_direction=pointing
)
cf = CameraFrame(
focal_length=focal_length,
array_direction=pointing,
pointing_direction=pointing,
)
cog_coord = SkyCoord(x=moments.x, y=moments.y, frame=cf)
cog_coord = cog_coord.transform_to(hf)
p2_coord = SkyCoord(x=p2_x, y=p2_y, frame=cf)
p2_coord = p2_coord.transform_to(hf)
circle = HillasPlane(
p1=cog_coord,
p2=p2_coord,
telescope_position=subarray.positions[tel_id],
weight=moments.intensity * (moments.length / moments.width),
)
self.hillas_planes[tel_id] = circle
def estimate_direction(self):
crossings = []
for perm in combinations(self.hillas_planes.values(), 2):
n1, n2 = perm[0].norm, perm[1].norm
# cross product automatically weighs in the angle between
# the two vectors: narrower angles have less impact,
# perpendicular vectors have the most
crossing = np.cross(n1, n2)
# two great circles cross each other twice (one would be
# the origin, the other one the direction of the gamma) it
# doesn't matter which we pick but it should at least be
if crossing[2] < 0:
crossing *= -1
crossings.append(crossing * perm[0].weight * perm[1].weight)
result = normalise(np.sum(crossings, axis=0))
off_angles = [angle(result, cross) for cross in crossings] * u.rad
err_est_dir = np.average(
off_angles,
weights=[len(cross) for cross in crossings]
)
return result, err_est_dir
def estimate_core_position(self, hillas_dict, pointing_direction):
psi = u.Quantity([h.psi for h in hillas_dict.values()])
z = np.zeros(len(psi))
uvw_vectors = np.column_stack([np.cos(psi).value, np.sin(psi).value, z])
tilted_frame = TiltedGroundFrame(pointing_direction=pointing_direction)
ground_frame = GroundFrame(pointing_direction=pointing_direction)
positions = [
(
SkyCoord(*plane.pos, frame=ground_frame)
.transform_to(tilted_frame)
.cartesian.xyz
)
for plane in self.hillas_planes.values()
]
core_position = line_line_intersection_3d(uvw_vectors, positions)
core_pos_tilted = SkyCoord(
x=core_position[0] * u.m,
y=core_position[1] * u.m,
frame=tilted_frame
)
core_pos = project_to_ground(core_pos_tilted)
return core_pos.x, core_pos.y
def estimate_h_max(self):
uvw_vectors = np.array([plane.a for plane in self.hillas_planes.values()])
positions = [plane.pos for plane in self.hillas_planes.values()]
return np.linalg.norm(line_line_intersection_3d(uvw_vectors, positions)) * u.m
class HillasPlane:
def __init__(self, p1, p2, telescope_position, weight=1):
self.pos = telescope_position
# be clockwise
self.a = np.array(spherical_to_cartesian(1, p1.alt, -p1.az)).ravel()
self.b = np.array(spherical_to_cartesian(1, p2.alt, -p2.az)).ravel()
# a and c form an orthogonal basis for the great circle
# not really necessary since the norm can be calculated
# with a and b just as well
self.c = np.cross(np.cross(self.a, self.b), self.a)
# normal vector for the plane defined by the great circle
self.norm = normalise(np.cross(self.a, self.c))
# some weight for this circle
# (put e.g. uncertainty on the Hillas parameters
# or number of PE in here)
self.weight = weight
| true | true |
1c301fc86aa3e71ef49092f8fcb7561db23b5210 | 91 | py | Python | plugins/googleanalytics/kpireport_googleanalytics/__init__.py | diurnalist/kpireporter | b3ce9ca52567405557ea12f45c1a7fda076d746a | [
"BlueOak-1.0.0",
"Apache-2.0"
] | 9 | 2021-05-17T05:32:46.000Z | 2022-03-16T22:49:26.000Z | plugins/googleanalytics/kpireport_googleanalytics/__init__.py | diurnalist/kpireporter | b3ce9ca52567405557ea12f45c1a7fda076d746a | [
"BlueOak-1.0.0",
"Apache-2.0"
] | 4 | 2020-10-10T23:38:20.000Z | 2020-11-08T22:41:24.000Z | plugins/googleanalytics/kpireport_googleanalytics/__init__.py | diurnalist/kpireporter | b3ce9ca52567405557ea12f45c1a7fda076d746a | [
"BlueOak-1.0.0",
"Apache-2.0"
] | 1 | 2021-01-12T02:49:04.000Z | 2021-01-12T02:49:04.000Z | from .datasource import GoogleAnalyticsDatasource
__all__ = ["GoogleAnalyticsDatasource"]
| 22.75 | 49 | 0.846154 | from .datasource import GoogleAnalyticsDatasource
__all__ = ["GoogleAnalyticsDatasource"]
| true | true |
1c302026c6f18a359f53fe88062140ac1d30261b | 890 | py | Python | tests/test_table_metadata.py | Synicix/DJ-GUI-API | 9afcf242f5f62c7d03c219380bf8490b46257f58 | [
"MIT"
] | null | null | null | tests/test_table_metadata.py | Synicix/DJ-GUI-API | 9afcf242f5f62c7d03c219380bf8490b46257f58 | [
"MIT"
] | null | null | null | tests/test_table_metadata.py | Synicix/DJ-GUI-API | 9afcf242f5f62c7d03c219380bf8490b46257f58 | [
"MIT"
] | null | null | null | from . import SCHEMA_PREFIX, client, token, connection, schemas_simple
def test_definition(token, client, schemas_simple):
simple1, simple2 = schemas_simple
REST_definition = client.post('/get_table_definition',
headers=dict(Authorization=f'Bearer {token}'),
json=dict(schemaName=simple1.database,
tableName='TableB')).data
assert f'{simple1.database}.TableA' in REST_definition.decode('utf-8')
REST_definition = client.post('/get_table_definition',
headers=dict(Authorization=f'Bearer {token}'),
json=dict(schemaName=simple2.database,
tableName='DiffTableB')).data
assert f'`{simple1.database}`.`#table_a`' in REST_definition.decode('utf-8')
| 52.352941 | 80 | 0.573034 | from . import SCHEMA_PREFIX, client, token, connection, schemas_simple
def test_definition(token, client, schemas_simple):
simple1, simple2 = schemas_simple
REST_definition = client.post('/get_table_definition',
headers=dict(Authorization=f'Bearer {token}'),
json=dict(schemaName=simple1.database,
tableName='TableB')).data
assert f'{simple1.database}.TableA' in REST_definition.decode('utf-8')
REST_definition = client.post('/get_table_definition',
headers=dict(Authorization=f'Bearer {token}'),
json=dict(schemaName=simple2.database,
tableName='DiffTableB')).data
assert f'`{simple1.database}`.`#table_a`' in REST_definition.decode('utf-8')
| true | true |
1c3022538e0d8a8157a20448e2808121b074799c | 4,841 | py | Python | app.py | parzibyte/camara_vigilancia_flask_python_opencv | ef4e186066128fbf4aac7bf9b5073443c1a432ea | [
"MIT"
] | 2 | 2021-03-04T11:39:13.000Z | 2021-10-09T00:25:01.000Z | app.py | parzibyte/camara_vigilancia_flask_python_opencv | ef4e186066128fbf4aac7bf9b5073443c1a432ea | [
"MIT"
] | null | null | null | app.py | parzibyte/camara_vigilancia_flask_python_opencv | ef4e186066128fbf4aac7bf9b5073443c1a432ea | [
"MIT"
] | null | null | null | """
____ _____ _ _ _
| _ \ | __ \ (_) | | |
| |_) |_ _ | |__) |_ _ _ __ _____| |__ _ _| |_ ___
| _ <| | | | | ___/ _` | '__|_ / | '_ \| | | | __/ _ \
| |_) | |_| | | | | (_| | | / /| | |_) | |_| | || __/
|____/ \__, | |_| \__,_|_| /___|_|_.__/ \__, |\__\___|
__/ | __/ |
|___/ |___/
____________________________________
/ Si necesitas ayuda, contáctame en \
\ https://parzibyte.me /
------------------------------------
\ ^__^
\ (oo)\_______
(__)\ )\/\
||----w |
|| ||
Creado por Parzibyte (https://parzibyte.me).
------------------------------------------------------------------------------------------------
Si el código es útil para ti, puedes agradecerme siguiéndome: https://parzibyte.me/blog/sigueme/
Y compartiendo mi blog con tus amigos
También tengo canal de YouTube: https://www.youtube.com/channel/UCroP4BTWjfM0CkGB6AFUoBg?sub_confirmation=1
------------------------------------------------------------------------------------------------
"""
import cv2
import utiles
from flask import Flask, render_template, Response, jsonify
app = Flask(__name__)
# Si tienes varias cámaras puedes acceder a ellas en 1, 2, etcétera (en lugar de 0)
camara = cv2.VideoCapture(0)
"""
Configuraciones de vídeo
"""
FRAMES_VIDEO = 20.0
RESOLUCION_VIDEO = (640, 480)
# Marca de agua
# https://docs.opencv.org/master/d6/d6e/group__imgproc__draw.html#ga5126f47f883d730f633d74f07456c576
UBICACION_FECHA_HORA = (0, 15)
FUENTE_FECHA_Y_HORA = cv2.FONT_HERSHEY_PLAIN
ESCALA_FUENTE = 1
COLOR_FECHA_HORA = (255, 255, 255)
GROSOR_TEXTO = 1
TIPO_LINEA_TEXTO = cv2.LINE_AA
# El código de 4 dígitos. En windows me parece que se soporta el XVID
fourcc = cv2.VideoWriter_fourcc(*'XVID')
archivo_video = None
grabando = False
def agregar_fecha_hora_frame(frame):
cv2.putText(frame, utiles.fecha_y_hora(), UBICACION_FECHA_HORA, FUENTE_FECHA_Y_HORA,
ESCALA_FUENTE, COLOR_FECHA_HORA, GROSOR_TEXTO, TIPO_LINEA_TEXTO)
# Una función generadora para stremear la cámara
# https://flask.palletsprojects.com/en/1.1.x/patterns/streaming/
def generador_frames():
while True:
ok, imagen = obtener_frame_camara()
if not ok:
break
else:
# Regresar la imagen en modo de respuesta HTTP
yield b"--frame\r\nContent-Type: image/jpeg\r\n\r\n" + imagen + b"\r\n"
def obtener_frame_camara():
ok, frame = camara.read()
if not ok:
return False, None
agregar_fecha_hora_frame(frame)
# Escribir en el vídeo en caso de que se esté grabando
if grabando and archivo_video is not None:
archivo_video.write(frame)
# Codificar la imagen como JPG
_, bufer = cv2.imencode(".jpg", frame)
imagen = bufer.tobytes()
return True, imagen
# Cuando visiten la ruta
@app.route("/streaming_camara")
def streaming_camara():
return Response(generador_frames(), mimetype='multipart/x-mixed-replace; boundary=frame')
# Cuando toman la foto
@app.route("/tomar_foto_descargar")
def descargar_foto():
ok, frame = obtener_frame_camara()
if not ok:
abort(500)
return
respuesta = Response(frame)
respuesta.headers["Content-Type"] = "image/jpeg"
respuesta.headers["Content-Transfer-Encoding"] = "Binary"
respuesta.headers["Content-Disposition"] = "attachment; filename=\"foto.jpg\""
return respuesta
@app.route("/tomar_foto_guardar")
def guardar_foto():
nombre_foto = utiles.obtener_uuid() + ".jpg"
ok, frame = camara.read()
if ok:
agregar_fecha_hora_frame(frame)
cv2.imwrite(nombre_foto, frame)
return jsonify({
"ok": ok,
"nombre_foto": nombre_foto,
})
# Cuando visiten /, servimos el index.html
@app.route('/')
def index():
return render_template("index.html")
@app.route("/comenzar_grabacion")
def comenzar_grabacion():
global grabando
global archivo_video
if grabando and archivo_video:
return jsonify(False)
nombre = utiles.fecha_y_hora_para_nombre_archivo() + ".avi"
archivo_video = cv2.VideoWriter(
nombre, fourcc, FRAMES_VIDEO, RESOLUCION_VIDEO)
grabando = True
return jsonify(True)
@app.route("/detener_grabacion")
def detener_grabacion():
global grabando
global archivo_video
if not grabando or not archivo_video:
return jsonify(False)
grabando = False
archivo_video.release()
archivo_video = None
return jsonify(True)
@app.route("/estado_grabacion")
def estado_grabacion():
return jsonify(grabando)
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0")
| 29.882716 | 107 | 0.606899 |
import cv2
import utiles
from flask import Flask, render_template, Response, jsonify
app = Flask(__name__)
camara = cv2.VideoCapture(0)
FRAMES_VIDEO = 20.0
RESOLUCION_VIDEO = (640, 480)
NTE_FECHA_Y_HORA = cv2.FONT_HERSHEY_PLAIN
ESCALA_FUENTE = 1
COLOR_FECHA_HORA = (255, 255, 255)
GROSOR_TEXTO = 1
TIPO_LINEA_TEXTO = cv2.LINE_AA
fourcc = cv2.VideoWriter_fourcc(*'XVID')
archivo_video = None
grabando = False
def agregar_fecha_hora_frame(frame):
cv2.putText(frame, utiles.fecha_y_hora(), UBICACION_FECHA_HORA, FUENTE_FECHA_Y_HORA,
ESCALA_FUENTE, COLOR_FECHA_HORA, GROSOR_TEXTO, TIPO_LINEA_TEXTO)
def generador_frames():
while True:
ok, imagen = obtener_frame_camara()
if not ok:
break
else:
yield b"--frame\r\nContent-Type: image/jpeg\r\n\r\n" + imagen + b"\r\n"
def obtener_frame_camara():
ok, frame = camara.read()
if not ok:
return False, None
agregar_fecha_hora_frame(frame)
if grabando and archivo_video is not None:
archivo_video.write(frame)
_, bufer = cv2.imencode(".jpg", frame)
imagen = bufer.tobytes()
return True, imagen
@app.route("/streaming_camara")
def streaming_camara():
return Response(generador_frames(), mimetype='multipart/x-mixed-replace; boundary=frame')
@app.route("/tomar_foto_descargar")
def descargar_foto():
ok, frame = obtener_frame_camara()
if not ok:
abort(500)
return
respuesta = Response(frame)
respuesta.headers["Content-Type"] = "image/jpeg"
respuesta.headers["Content-Transfer-Encoding"] = "Binary"
respuesta.headers["Content-Disposition"] = "attachment; filename=\"foto.jpg\""
return respuesta
@app.route("/tomar_foto_guardar")
def guardar_foto():
nombre_foto = utiles.obtener_uuid() + ".jpg"
ok, frame = camara.read()
if ok:
agregar_fecha_hora_frame(frame)
cv2.imwrite(nombre_foto, frame)
return jsonify({
"ok": ok,
"nombre_foto": nombre_foto,
})
@app.route('/')
def index():
return render_template("index.html")
@app.route("/comenzar_grabacion")
def comenzar_grabacion():
global grabando
global archivo_video
if grabando and archivo_video:
return jsonify(False)
nombre = utiles.fecha_y_hora_para_nombre_archivo() + ".avi"
archivo_video = cv2.VideoWriter(
nombre, fourcc, FRAMES_VIDEO, RESOLUCION_VIDEO)
grabando = True
return jsonify(True)
@app.route("/detener_grabacion")
def detener_grabacion():
global grabando
global archivo_video
if not grabando or not archivo_video:
return jsonify(False)
grabando = False
archivo_video.release()
archivo_video = None
return jsonify(True)
@app.route("/estado_grabacion")
def estado_grabacion():
return jsonify(grabando)
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0")
| true | true |
1c3023b6c0572515d2d8db0cd59ac6cbd42856ee | 728 | py | Python | memex-datawake-stream/src/datawakeio/data_connector.py | fritojay/Datawake-Legacy | 67fe147ee20a540ebe32d5d7a4fdb97708c0bb2e | [
"Apache-2.0"
] | 5 | 2015-11-11T18:54:21.000Z | 2018-03-03T22:25:12.000Z | memex-datawake-stream/src/datawakeio/data_connector.py | fritojay/Datawake-Legacy | 67fe147ee20a540ebe32d5d7a4fdb97708c0bb2e | [
"Apache-2.0"
] | null | null | null | memex-datawake-stream/src/datawakeio/data_connector.py | fritojay/Datawake-Legacy | 67fe147ee20a540ebe32d5d7a4fdb97708c0bb2e | [
"Apache-2.0"
] | 3 | 2015-10-29T10:22:52.000Z | 2017-09-01T19:34:21.000Z | class ExtractedDataConnector:
def __init__(self):
pass
def open(self):
raise NotImplementedError("Implement open()")
def close(self):
raise NotImplementedError("Implement close()")
def _check_conn(self):
raise NotImplementedError("Implement _checkConn()")
def insert_entities(self, url, entity_type, entity_values):
raise NotImplementedError("Implement insertEntities()")
def insert_domain_entities(self, domain,url, entity_type, entity_values):
raise NotImplementedError("Implement insertDomainEntities()")
def get_domain_entity_matches(self, domain, type, values):
raise NotImplementedError("Implement getEntityMatches()")
| 20.8 | 77 | 0.70467 | class ExtractedDataConnector:
def __init__(self):
pass
def open(self):
raise NotImplementedError("Implement open()")
def close(self):
raise NotImplementedError("Implement close()")
def _check_conn(self):
raise NotImplementedError("Implement _checkConn()")
def insert_entities(self, url, entity_type, entity_values):
raise NotImplementedError("Implement insertEntities()")
def insert_domain_entities(self, domain,url, entity_type, entity_values):
raise NotImplementedError("Implement insertDomainEntities()")
def get_domain_entity_matches(self, domain, type, values):
raise NotImplementedError("Implement getEntityMatches()")
| true | true |
1c3023eb4b1e14ac3f341041f735ae9f153653cd | 20,551 | py | Python | v0.5/classification_and_detection/python/main.py | goldgeisser/inference | 42c68883f6f60bf6e0d0253c1bb797a285e2d5f2 | [
"Apache-2.0"
] | null | null | null | v0.5/classification_and_detection/python/main.py | goldgeisser/inference | 42c68883f6f60bf6e0d0253c1bb797a285e2d5f2 | [
"Apache-2.0"
] | null | null | null | v0.5/classification_and_detection/python/main.py | goldgeisser/inference | 42c68883f6f60bf6e0d0253c1bb797a285e2d5f2 | [
"Apache-2.0"
] | 1 | 2018-11-20T16:59:07.000Z | 2018-11-20T16:59:07.000Z | """
mlperf inference benchmarking tool
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import array
import collections
import json
import logging
import os
import threading
import time
from queue import Queue
import mlperf_loadgen as lg
import numpy as np
import dataset
import imagenet
import coco
logging.basicConfig(level=logging.INFO)
log = logging.getLogger("main")
NANO_SEC = 1e9
MILLI_SEC = 1000
# pylint: disable=missing-docstring
# the datasets we support
SUPPORTED_DATASETS = {
"imagenet":
(imagenet.Imagenet, dataset.pre_process_vgg, dataset.PostProcessCommon(offset=-1),
{"image_size": [224, 224, 3]}),
"imagenet_mobilenet":
(imagenet.Imagenet, dataset.pre_process_mobilenet, dataset.PostProcessArgMax(offset=-1),
{"image_size": [224, 224, 3]}),
"coco-300":
(coco.Coco, dataset.pre_process_coco_mobilenet, coco.PostProcessCoco(),
{"image_size": [300, 300, 3]}),
"coco-300-pt":
(coco.Coco, dataset.pre_process_coco_pt_mobilenet, coco.PostProcessCocoPt(False,0.3),
{"image_size": [300, 300, 3]}),
"coco-1200":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCoco(),
{"image_size": [1200, 1200, 3]}),
"coco-1200-onnx":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCocoOnnx(),
{"image_size": [1200, 1200, 3]}),
"coco-1200-pt":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCocoPt(True,0.05),
{"image_size": [1200, 1200, 3]}),
"coco-1200-tf":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCocoTf(),
{"image_size": [1200, 1200, 3]}),
}
# pre-defined command line options so simplify things. They are used as defaults and can be
# overwritten from command line
DEFAULT_LATENCY = "0.100"
LATENCY_RESNET50 = "0.015"
LATENCY_MOBILENET = "0.010"
LATENCY_SSD_MOBILENET = "0.010"
# FIXME: change once final value is known
LATENCY_SSD_RESNET34 = "0.100"
SUPPORTED_PROFILES = {
"defaults": {
"dataset": "imagenet",
"backend": "tensorflow",
"cache": 0,
"queries-single": 1024,
"queries-multi": 24576,
"max-latency": DEFAULT_LATENCY,
"max-batchsize": 32,
},
# resnet
"resnet50-tf": {
"inputs": "input_tensor:0",
"outputs": "ArgMax:0",
"dataset": "imagenet",
"backend": "tensorflow",
"max-latency": LATENCY_RESNET50,
},
"resnet50-onnxruntime": {
"dataset": "imagenet",
"outputs": "ArgMax:0",
"backend": "onnxruntime",
"max-latency": LATENCY_RESNET50,
},
# mobilenet
"mobilenet-tf": {
"inputs": "input:0",
"outputs": "MobilenetV1/Predictions/Reshape_1:0",
"dataset": "imagenet_mobilenet",
"backend": "tensorflow",
"max-latency": LATENCY_MOBILENET,
},
"mobilenet-onnxruntime": {
"dataset": "imagenet_mobilenet",
"outputs": "MobilenetV1/Predictions/Reshape_1:0",
"backend": "onnxruntime",
"max-latency": LATENCY_MOBILENET,
},
# ssd-mobilenet
"ssd-mobilenet-tf": {
"inputs": "image_tensor:0",
"outputs": "num_detections:0,detection_boxes:0,detection_scores:0,detection_classes:0",
"dataset": "coco-300",
"backend": "tensorflow",
"max-latency": LATENCY_SSD_MOBILENET,
},
"ssd-mobilenet-pytorch": {
"inputs": "image",
"outputs": "bboxes,labels,scores",
"dataset": "coco-300-pt",
"backend": "pytorch-native",
"max-latency": LATENCY_SSD_MOBILENET,
},
"ssd-mobilenet-onnxruntime": {
"dataset": "coco-300",
"outputs": "num_detections:0,detection_boxes:0,detection_scores:0,detection_classes:0",
"backend": "onnxruntime",
"data-format": "NHWC",
"max-latency": LATENCY_SSD_MOBILENET,
},
# ssd-resnet34
"ssd-resnet34-tf": {
"inputs": "image:0",
"outputs": "detection_bboxes:0,detection_classes:0,detection_scores:0",
"dataset": "coco-1200-tf",
"backend": "tensorflow",
"data-format": "NCHW",
"max-latency": LATENCY_SSD_RESNET34,
},
"ssd-resnet34-pytorch": {
"inputs": "image",
"outputs": "bboxes,labels,scores",
"dataset": "coco-1200-pt",
"backend": "pytorch-native",
"max-latency": LATENCY_SSD_RESNET34,
},
"ssd-resnet34-onnxruntime": {
"dataset": "coco-1200-onnx",
"inputs": "image",
"outputs": "bboxes,labels,scores",
"backend": "onnxruntime",
"data-format": "NCHW",
"max-batchsize": 1,
"max-latency": LATENCY_SSD_RESNET34,
},
"ssd-resnet34-onnxruntime-tf": {
"dataset": "coco-1200-tf",
"inputs": "image:0",
"outputs": "detection_bboxes:0,detection_classes:0,detection_scores:0",
"backend": "onnxruntime",
"data-format": "NHWC",
"max-latency": LATENCY_SSD_RESNET34,
},
}
SCENARIO_MAP = {
"SingleStream": lg.TestScenario.SingleStream,
"MultiStream": lg.TestScenario.MultiStream,
"Server": lg.TestScenario.Server,
"Offline": lg.TestScenario.Offline,
}
last_timeing = []
def get_args():
"""Parse commandline."""
parser = argparse.ArgumentParser()
parser.add_argument("--dataset", choices=SUPPORTED_DATASETS.keys(), help="dataset")
parser.add_argument("--dataset-path", required=True, help="path to the dataset")
parser.add_argument("--dataset-list", help="path to the dataset list")
parser.add_argument("--data-format", choices=["NCHW", "NHWC"], help="data format")
parser.add_argument("--profile", choices=SUPPORTED_PROFILES.keys(), help="standard profiles")
parser.add_argument("--scenario", default="SingleStream",
help="mlperf benchmark scenario, list of " + str(list(SCENARIO_MAP.keys())))
parser.add_argument("--queries-single", type=int, default=1024,
help="mlperf number of queries for SingleStream")
parser.add_argument("--queries-offline", type=int, default=24576,
help="mlperf number of queries for Offline")
parser.add_argument("--queries-multi", type=int, default=24576,
help="mlperf number of queries for MultiStream,Server")
parser.add_argument("--max-batchsize", type=int,
help="max batch size in a single inference")
parser.add_argument("--model", required=True, help="model file")
parser.add_argument("--output", help="test results")
parser.add_argument("--inputs", help="model inputs")
parser.add_argument("--outputs", help="model outputs")
parser.add_argument("--backend", help="runtime to use")
parser.add_argument("--threads", default=os.cpu_count(), type=int, help="threads")
parser.add_argument("--time", type=int, help="time to scan in seconds")
parser.add_argument("--count", type=int, help="dataset items to use")
parser.add_argument("--qps", type=int, default=10, help="target qps estimate")
parser.add_argument("--max-latency", type=str, help="mlperf max latency in 99pct tile")
parser.add_argument("--cache", type=int, default=0, help="use cache")
parser.add_argument("--accuracy", action="store_true", help="enable accuracy pass")
parser.add_argument("--find-peak-performance", action="store_true", help="enable finding peak performance pass")
args = parser.parse_args()
# don't use defaults in argparser. Instead we default to a dict, override that with a profile
# and take this as default unless command line give
defaults = SUPPORTED_PROFILES["defaults"]
if args.profile:
profile = SUPPORTED_PROFILES[args.profile]
defaults.update(profile)
for k, v in defaults.items():
kc = k.replace("-", "_")
if getattr(args, kc) is None:
setattr(args, kc, v)
if args.inputs:
args.inputs = args.inputs.split(",")
if args.outputs:
args.outputs = args.outputs.split(",")
if args.max_latency:
args.max_latency = [float(i) for i in args.max_latency.split(",")]
try:
args.scenario = [SCENARIO_MAP[scenario] for scenario in args.scenario.split(",")]
except:
parser.error("valid scanarios:" + str(list(SCENARIO_MAP.keys())))
return args
def get_backend(backend):
if backend == "tensorflow":
from backend_tf import BackendTensorflow
backend = BackendTensorflow()
elif backend == "onnxruntime":
from backend_onnxruntime import BackendOnnxruntime
backend = BackendOnnxruntime()
elif backend == "null":
from backend_null import BackendNull
backend = BackendNull()
elif backend == "pytorch":
from backend_pytorch import BackendPytorch
backend = BackendPytorch()
elif backend == "pytorch-native":
from backend_pytorch_native import BackendPytorchNative
backend = BackendPytorchNative()
elif backend == "tflite":
from backend_tflite import BackendTflite
backend = BackendTflite()
else:
raise ValueError("unknown backend: " + backend)
return backend
class Item:
"""An item that we queue for processing by the thread pool."""
def __init__(self, query_id, content_id, img, label=None):
self.query_id = query_id
self.content_id = content_id
self.img = img
self.label = label
self.start = time.time()
class RunnerBase:
def __init__(self, model, ds, threads, post_proc=None, max_batchsize=128):
self.take_accuracy = False
self.ds = ds
self.model = model
self.post_process = post_proc
self.threads = threads
self.take_accuracy = False
self.max_batchsize = max_batchsize
self.result_timing = []
def handle_tasks(self, tasks_queue):
pass
def start_run(self, result_dict, take_accuracy):
self.result_dict = result_dict
self.result_timing = []
self.take_accuracy = take_accuracy
self.post_process.start()
def run_one_item(self, qitem):
# run the prediction
processed_results = []
try:
results = self.model.predict({self.model.inputs[0]: qitem.img})
processed_results = self.post_process(results, qitem.content_id, qitem.label, self.result_dict)
if self.take_accuracy:
self.post_process.add_results(processed_results)
self.result_timing.append(time.time() - qitem.start)
except Exception as ex: # pylint: disable=broad-except
src = [self.ds.get_item_loc(i) for i in qitem.content_id]
log.error("thread: failed on contentid=%s, %s", src, ex)
# since post_process will not run, fake empty responses
processed_results = [[]] * len(qitem.query_id)
finally:
response_array_refs = []
response = []
for idx, query_id in enumerate(qitem.query_id):
response_array = array.array("B", np.array(processed_results[idx], np.float32).tobytes())
response_array_refs.append(response_array)
bi = response_array.buffer_info()
response.append(lg.QuerySampleResponse(query_id, bi[0], bi[1]))
lg.QuerySamplesComplete(response)
def enqueue(self, query_samples):
idx = [q.index for q in query_samples]
query_id = [q.id for q in query_samples]
if len(query_samples) < self.max_batchsize:
data, label = self.ds.get_samples(idx)
self.run_one_item(Item(query_id, idx, data, label))
else:
bs = self.max_batchsize
for i in range(0, len(idx), bs):
data, label = self.ds.get_samples(idx[i:i+bs])
self.run_one_item(Item(query_id[i:i+bs], idx[i:i+bs], data, label))
def finish(self):
pass
class QueueRunner(RunnerBase):
def __init__(self, model, ds, threads, post_proc=None, max_batchsize=128):
super().__init__(model, ds, threads, post_proc, max_batchsize)
self.tasks = Queue(maxsize=threads * 4)
self.workers = []
self.result_dict = {}
for _ in range(self.threads):
worker = threading.Thread(target=self.handle_tasks, args=(self.tasks,))
worker.daemon = True
self.workers.append(worker)
worker.start()
def handle_tasks(self, tasks_queue):
"""Worker thread."""
while True:
qitem = tasks_queue.get()
if qitem is None:
# None in the queue indicates the parent want us to exit
tasks_queue.task_done()
break
self.run_one_item(qitem)
tasks_queue.task_done()
def enqueue(self, query_samples):
idx = [q.index for q in query_samples]
query_id = [q.id for q in query_samples]
if len(query_samples) < self.max_batchsize:
data, label = self.ds.get_samples(idx)
self.tasks.put(Item(query_id, idx, data, label))
else:
bs = self.max_batchsize
for i in range(0, len(idx), bs):
ie = i + bs
data, label = self.ds.get_samples(idx[i:ie])
self.tasks.put(Item(query_id[i:ie], idx[i:ie], data, label))
def finish(self):
# exit all threads
for _ in self.workers:
self.tasks.put(None)
for worker in self.workers:
worker.join()
def add_results(final_results, name, result_dict, result_list, took, show_accuracy=False):
percentiles = [50., 80., 90., 95., 99., 99.9]
buckets = np.percentile(result_list, percentiles).tolist()
buckets_str = ",".join(["{}:{:.4f}".format(p, b) for p, b in zip(percentiles, buckets)])
if result_dict["total"] == 0:
result_dict["total"] = len(result_list)
# this is what we record for each run
result = {
"took": took,
"mean": np.mean(result_list),
"percentiles": {str(k): v for k, v in zip(percentiles, buckets)},
"qps": len(result_list) / took,
"count": len(result_list),
"good_items": result_dict["good"],
"total_items": result_dict["total"],
}
acc_str = ""
if show_accuracy:
result["accuracy"] = 100. * result_dict["good"] / result_dict["total"]
acc_str = ", acc={:.3f}%".format(result["accuracy"])
if "mAP" in result_dict:
result["mAP"] = 100. * result_dict["mAP"]
acc_str += ", mAP={:.3f}%".format(result["mAP"])
# add the result to the result dict
final_results[name] = result
# to stdout
print("{} qps={:.2f}, mean={:.4f}, time={:.3f}{}, queries={}, tiles={}".format(
name, result["qps"], result["mean"], took, acc_str,
len(result_list), buckets_str))
def main():
global last_timeing
args = get_args()
log.info(args)
# find backend
backend = get_backend(args.backend)
# override image format if given
image_format = args.data_format if args.data_format else backend.image_format()
# --count applies to accuracy mode only and can be used to limit the number of images
# for testing. For perf model we always limit count to 200.
count = args.count
if not count:
if not args.accuracy:
count = 200
# dataset to use
wanted_dataset, pre_proc, post_proc, kwargs = SUPPORTED_DATASETS[args.dataset]
ds = wanted_dataset(data_path=args.dataset_path,
image_list=args.dataset_list,
name=args.dataset,
image_format=image_format,
pre_process=pre_proc,
use_cache=args.cache,
count=count, **kwargs)
# load model to backend
model = backend.load(args.model, inputs=args.inputs, outputs=args.outputs)
final_results = {
"runtime": model.name(),
"version": model.version(),
"time": int(time.time()),
"cmdline": str(args),
}
#
# make one pass over the dataset to validate accuracy
#
count = ds.get_item_count()
# warmup
ds.load_query_samples([0])
for _ in range(5):
img, _ = ds.get_samples([0])
_ = backend.predict({backend.inputs[0]: img})
ds.unload_query_samples(None)
for scenario in args.scenario:
runner_map = {
lg.TestScenario.SingleStream: RunnerBase,
lg.TestScenario.MultiStream: QueueRunner,
lg.TestScenario.Server: QueueRunner,
lg.TestScenario.Offline: QueueRunner
}
runner = runner_map[scenario](model, ds, args.threads, post_proc=post_proc, max_batchsize=args.max_batchsize)
def issue_queries(query_samples):
runner.enqueue(query_samples)
def flush_queries(): pass
def process_latencies(latencies_ns):
# called by loadgen to show us the recorded latencies
global last_timeing
last_timeing = [t / NANO_SEC for t in latencies_ns]
settings = lg.TestSettings()
settings.scenario = scenario
settings.mode = lg.TestMode.PerformanceOnly
if args.accuracy:
settings.mode = lg.TestMode.AccuracyOnly
if args.find_peak_performance:
settings.mode = lg.TestMode.FindPeakPerformance
if args.time:
# override the time we want to run
settings.min_duration_ms = args.time * MILLI_SEC
settings.max_duration_ms = args.time * MILLI_SEC
if args.qps:
qps = float(args.qps)
settings.server_target_qps = qps
settings.offline_expected_qps = qps
if scenario == lg.TestScenario.SingleStream:
settings.min_query_count = args.queries_single
settings.max_query_count = args.queries_single
elif scenario == lg.TestScenario.MultiStream:
settings.min_query_count = args.queries_multi
settings.max_query_count = args.queries_multi
settings.multi_stream_samples_per_query = 4
elif scenario == lg.TestScenario.Server:
max_latency = args.max_latency
elif scenario == lg.TestScenario.Offline:
settings.min_query_count = args.queries_offline
settings.max_query_count = args.queries_offline
sut = lg.ConstructSUT(issue_queries, flush_queries, process_latencies)
qsl = lg.ConstructQSL(count, min(count, 1000), ds.load_query_samples, ds.unload_query_samples)
if scenario == lg.TestScenario.Server:
for target_latency in max_latency:
log.info("starting {}, latency={}".format(scenario, target_latency))
settings.server_target_latency_ns = int(target_latency * NANO_SEC)
result_dict = {"good": 0, "total": 0, "scenario": str(scenario)}
runner.start_run(result_dict, args.accuracy)
lg.StartTest(sut, qsl, settings)
if not last_timeing:
last_timeing = runner.result_timing
if args.accuracy:
post_proc.finalize(result_dict, ds, output_dir=os.path.dirname(args.output))
add_results(final_results, "{}-{}".format(scenario, target_latency),
result_dict, last_timeing, time.time() - ds.last_loaded, args.accuracy)
else:
log.info("starting {}".format(scenario))
result_dict = {"good": 0, "total": 0, "scenario": str(scenario)}
runner.start_run(result_dict, args.accuracy)
lg.StartTest(sut, qsl, settings)
if not last_timeing:
last_timeing = runner.result_timing
if args.accuracy:
post_proc.finalize(result_dict, ds, output_dir=os.path.dirname(args.output))
add_results(final_results, "{}".format(scenario),
result_dict, last_timeing, time.time() - ds.last_loaded, args.accuracy)
runner.finish()
lg.DestroyQSL(qsl)
lg.DestroySUT(sut)
#
# write final results
#
if args.output:
with open(args.output, "w") as f:
json.dump(final_results, f, sort_keys=True, indent=4)
if __name__ == "__main__":
main()
| 36.895871 | 117 | 0.618753 |
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import array
import collections
import json
import logging
import os
import threading
import time
from queue import Queue
import mlperf_loadgen as lg
import numpy as np
import dataset
import imagenet
import coco
logging.basicConfig(level=logging.INFO)
log = logging.getLogger("main")
NANO_SEC = 1e9
MILLI_SEC = 1000
SUPPORTED_DATASETS = {
"imagenet":
(imagenet.Imagenet, dataset.pre_process_vgg, dataset.PostProcessCommon(offset=-1),
{"image_size": [224, 224, 3]}),
"imagenet_mobilenet":
(imagenet.Imagenet, dataset.pre_process_mobilenet, dataset.PostProcessArgMax(offset=-1),
{"image_size": [224, 224, 3]}),
"coco-300":
(coco.Coco, dataset.pre_process_coco_mobilenet, coco.PostProcessCoco(),
{"image_size": [300, 300, 3]}),
"coco-300-pt":
(coco.Coco, dataset.pre_process_coco_pt_mobilenet, coco.PostProcessCocoPt(False,0.3),
{"image_size": [300, 300, 3]}),
"coco-1200":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCoco(),
{"image_size": [1200, 1200, 3]}),
"coco-1200-onnx":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCocoOnnx(),
{"image_size": [1200, 1200, 3]}),
"coco-1200-pt":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCocoPt(True,0.05),
{"image_size": [1200, 1200, 3]}),
"coco-1200-tf":
(coco.Coco, dataset.pre_process_coco_resnet34, coco.PostProcessCocoTf(),
{"image_size": [1200, 1200, 3]}),
}
DEFAULT_LATENCY = "0.100"
LATENCY_RESNET50 = "0.015"
LATENCY_MOBILENET = "0.010"
LATENCY_SSD_MOBILENET = "0.010"
LATENCY_SSD_RESNET34 = "0.100"
SUPPORTED_PROFILES = {
"defaults": {
"dataset": "imagenet",
"backend": "tensorflow",
"cache": 0,
"queries-single": 1024,
"queries-multi": 24576,
"max-latency": DEFAULT_LATENCY,
"max-batchsize": 32,
},
"resnet50-tf": {
"inputs": "input_tensor:0",
"outputs": "ArgMax:0",
"dataset": "imagenet",
"backend": "tensorflow",
"max-latency": LATENCY_RESNET50,
},
"resnet50-onnxruntime": {
"dataset": "imagenet",
"outputs": "ArgMax:0",
"backend": "onnxruntime",
"max-latency": LATENCY_RESNET50,
},
"mobilenet-tf": {
"inputs": "input:0",
"outputs": "MobilenetV1/Predictions/Reshape_1:0",
"dataset": "imagenet_mobilenet",
"backend": "tensorflow",
"max-latency": LATENCY_MOBILENET,
},
"mobilenet-onnxruntime": {
"dataset": "imagenet_mobilenet",
"outputs": "MobilenetV1/Predictions/Reshape_1:0",
"backend": "onnxruntime",
"max-latency": LATENCY_MOBILENET,
},
"ssd-mobilenet-tf": {
"inputs": "image_tensor:0",
"outputs": "num_detections:0,detection_boxes:0,detection_scores:0,detection_classes:0",
"dataset": "coco-300",
"backend": "tensorflow",
"max-latency": LATENCY_SSD_MOBILENET,
},
"ssd-mobilenet-pytorch": {
"inputs": "image",
"outputs": "bboxes,labels,scores",
"dataset": "coco-300-pt",
"backend": "pytorch-native",
"max-latency": LATENCY_SSD_MOBILENET,
},
"ssd-mobilenet-onnxruntime": {
"dataset": "coco-300",
"outputs": "num_detections:0,detection_boxes:0,detection_scores:0,detection_classes:0",
"backend": "onnxruntime",
"data-format": "NHWC",
"max-latency": LATENCY_SSD_MOBILENET,
},
"ssd-resnet34-tf": {
"inputs": "image:0",
"outputs": "detection_bboxes:0,detection_classes:0,detection_scores:0",
"dataset": "coco-1200-tf",
"backend": "tensorflow",
"data-format": "NCHW",
"max-latency": LATENCY_SSD_RESNET34,
},
"ssd-resnet34-pytorch": {
"inputs": "image",
"outputs": "bboxes,labels,scores",
"dataset": "coco-1200-pt",
"backend": "pytorch-native",
"max-latency": LATENCY_SSD_RESNET34,
},
"ssd-resnet34-onnxruntime": {
"dataset": "coco-1200-onnx",
"inputs": "image",
"outputs": "bboxes,labels,scores",
"backend": "onnxruntime",
"data-format": "NCHW",
"max-batchsize": 1,
"max-latency": LATENCY_SSD_RESNET34,
},
"ssd-resnet34-onnxruntime-tf": {
"dataset": "coco-1200-tf",
"inputs": "image:0",
"outputs": "detection_bboxes:0,detection_classes:0,detection_scores:0",
"backend": "onnxruntime",
"data-format": "NHWC",
"max-latency": LATENCY_SSD_RESNET34,
},
}
SCENARIO_MAP = {
"SingleStream": lg.TestScenario.SingleStream,
"MultiStream": lg.TestScenario.MultiStream,
"Server": lg.TestScenario.Server,
"Offline": lg.TestScenario.Offline,
}
last_timeing = []
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--dataset", choices=SUPPORTED_DATASETS.keys(), help="dataset")
parser.add_argument("--dataset-path", required=True, help="path to the dataset")
parser.add_argument("--dataset-list", help="path to the dataset list")
parser.add_argument("--data-format", choices=["NCHW", "NHWC"], help="data format")
parser.add_argument("--profile", choices=SUPPORTED_PROFILES.keys(), help="standard profiles")
parser.add_argument("--scenario", default="SingleStream",
help="mlperf benchmark scenario, list of " + str(list(SCENARIO_MAP.keys())))
parser.add_argument("--queries-single", type=int, default=1024,
help="mlperf number of queries for SingleStream")
parser.add_argument("--queries-offline", type=int, default=24576,
help="mlperf number of queries for Offline")
parser.add_argument("--queries-multi", type=int, default=24576,
help="mlperf number of queries for MultiStream,Server")
parser.add_argument("--max-batchsize", type=int,
help="max batch size in a single inference")
parser.add_argument("--model", required=True, help="model file")
parser.add_argument("--output", help="test results")
parser.add_argument("--inputs", help="model inputs")
parser.add_argument("--outputs", help="model outputs")
parser.add_argument("--backend", help="runtime to use")
parser.add_argument("--threads", default=os.cpu_count(), type=int, help="threads")
parser.add_argument("--time", type=int, help="time to scan in seconds")
parser.add_argument("--count", type=int, help="dataset items to use")
parser.add_argument("--qps", type=int, default=10, help="target qps estimate")
parser.add_argument("--max-latency", type=str, help="mlperf max latency in 99pct tile")
parser.add_argument("--cache", type=int, default=0, help="use cache")
parser.add_argument("--accuracy", action="store_true", help="enable accuracy pass")
parser.add_argument("--find-peak-performance", action="store_true", help="enable finding peak performance pass")
args = parser.parse_args()
# and take this as default unless command line give
defaults = SUPPORTED_PROFILES["defaults"]
if args.profile:
profile = SUPPORTED_PROFILES[args.profile]
defaults.update(profile)
for k, v in defaults.items():
kc = k.replace("-", "_")
if getattr(args, kc) is None:
setattr(args, kc, v)
if args.inputs:
args.inputs = args.inputs.split(",")
if args.outputs:
args.outputs = args.outputs.split(",")
if args.max_latency:
args.max_latency = [float(i) for i in args.max_latency.split(",")]
try:
args.scenario = [SCENARIO_MAP[scenario] for scenario in args.scenario.split(",")]
except:
parser.error("valid scanarios:" + str(list(SCENARIO_MAP.keys())))
return args
def get_backend(backend):
if backend == "tensorflow":
from backend_tf import BackendTensorflow
backend = BackendTensorflow()
elif backend == "onnxruntime":
from backend_onnxruntime import BackendOnnxruntime
backend = BackendOnnxruntime()
elif backend == "null":
from backend_null import BackendNull
backend = BackendNull()
elif backend == "pytorch":
from backend_pytorch import BackendPytorch
backend = BackendPytorch()
elif backend == "pytorch-native":
from backend_pytorch_native import BackendPytorchNative
backend = BackendPytorchNative()
elif backend == "tflite":
from backend_tflite import BackendTflite
backend = BackendTflite()
else:
raise ValueError("unknown backend: " + backend)
return backend
class Item:
def __init__(self, query_id, content_id, img, label=None):
self.query_id = query_id
self.content_id = content_id
self.img = img
self.label = label
self.start = time.time()
class RunnerBase:
def __init__(self, model, ds, threads, post_proc=None, max_batchsize=128):
self.take_accuracy = False
self.ds = ds
self.model = model
self.post_process = post_proc
self.threads = threads
self.take_accuracy = False
self.max_batchsize = max_batchsize
self.result_timing = []
def handle_tasks(self, tasks_queue):
pass
def start_run(self, result_dict, take_accuracy):
self.result_dict = result_dict
self.result_timing = []
self.take_accuracy = take_accuracy
self.post_process.start()
def run_one_item(self, qitem):
# run the prediction
processed_results = []
try:
results = self.model.predict({self.model.inputs[0]: qitem.img})
processed_results = self.post_process(results, qitem.content_id, qitem.label, self.result_dict)
if self.take_accuracy:
self.post_process.add_results(processed_results)
self.result_timing.append(time.time() - qitem.start)
except Exception as ex: # pylint: disable=broad-except
src = [self.ds.get_item_loc(i) for i in qitem.content_id]
log.error("thread: failed on contentid=%s, %s", src, ex)
# since post_process will not run, fake empty responses
processed_results = [[]] * len(qitem.query_id)
finally:
response_array_refs = []
response = []
for idx, query_id in enumerate(qitem.query_id):
response_array = array.array("B", np.array(processed_results[idx], np.float32).tobytes())
response_array_refs.append(response_array)
bi = response_array.buffer_info()
response.append(lg.QuerySampleResponse(query_id, bi[0], bi[1]))
lg.QuerySamplesComplete(response)
def enqueue(self, query_samples):
idx = [q.index for q in query_samples]
query_id = [q.id for q in query_samples]
if len(query_samples) < self.max_batchsize:
data, label = self.ds.get_samples(idx)
self.run_one_item(Item(query_id, idx, data, label))
else:
bs = self.max_batchsize
for i in range(0, len(idx), bs):
data, label = self.ds.get_samples(idx[i:i+bs])
self.run_one_item(Item(query_id[i:i+bs], idx[i:i+bs], data, label))
def finish(self):
pass
class QueueRunner(RunnerBase):
def __init__(self, model, ds, threads, post_proc=None, max_batchsize=128):
super().__init__(model, ds, threads, post_proc, max_batchsize)
self.tasks = Queue(maxsize=threads * 4)
self.workers = []
self.result_dict = {}
for _ in range(self.threads):
worker = threading.Thread(target=self.handle_tasks, args=(self.tasks,))
worker.daemon = True
self.workers.append(worker)
worker.start()
def handle_tasks(self, tasks_queue):
while True:
qitem = tasks_queue.get()
if qitem is None:
# None in the queue indicates the parent want us to exit
tasks_queue.task_done()
break
self.run_one_item(qitem)
tasks_queue.task_done()
def enqueue(self, query_samples):
idx = [q.index for q in query_samples]
query_id = [q.id for q in query_samples]
if len(query_samples) < self.max_batchsize:
data, label = self.ds.get_samples(idx)
self.tasks.put(Item(query_id, idx, data, label))
else:
bs = self.max_batchsize
for i in range(0, len(idx), bs):
ie = i + bs
data, label = self.ds.get_samples(idx[i:ie])
self.tasks.put(Item(query_id[i:ie], idx[i:ie], data, label))
def finish(self):
# exit all threads
for _ in self.workers:
self.tasks.put(None)
for worker in self.workers:
worker.join()
def add_results(final_results, name, result_dict, result_list, took, show_accuracy=False):
percentiles = [50., 80., 90., 95., 99., 99.9]
buckets = np.percentile(result_list, percentiles).tolist()
buckets_str = ",".join(["{}:{:.4f}".format(p, b) for p, b in zip(percentiles, buckets)])
if result_dict["total"] == 0:
result_dict["total"] = len(result_list)
# this is what we record for each run
result = {
"took": took,
"mean": np.mean(result_list),
"percentiles": {str(k): v for k, v in zip(percentiles, buckets)},
"qps": len(result_list) / took,
"count": len(result_list),
"good_items": result_dict["good"],
"total_items": result_dict["total"],
}
acc_str = ""
if show_accuracy:
result["accuracy"] = 100. * result_dict["good"] / result_dict["total"]
acc_str = ", acc={:.3f}%".format(result["accuracy"])
if "mAP" in result_dict:
result["mAP"] = 100. * result_dict["mAP"]
acc_str += ", mAP={:.3f}%".format(result["mAP"])
# add the result to the result dict
final_results[name] = result
# to stdout
print("{} qps={:.2f}, mean={:.4f}, time={:.3f}{}, queries={}, tiles={}".format(
name, result["qps"], result["mean"], took, acc_str,
len(result_list), buckets_str))
def main():
global last_timeing
args = get_args()
log.info(args)
# find backend
backend = get_backend(args.backend)
# override image format if given
image_format = args.data_format if args.data_format else backend.image_format()
# --count applies to accuracy mode only and can be used to limit the number of images
# for testing. For perf model we always limit count to 200.
count = args.count
if not count:
if not args.accuracy:
count = 200
# dataset to use
wanted_dataset, pre_proc, post_proc, kwargs = SUPPORTED_DATASETS[args.dataset]
ds = wanted_dataset(data_path=args.dataset_path,
image_list=args.dataset_list,
name=args.dataset,
image_format=image_format,
pre_process=pre_proc,
use_cache=args.cache,
count=count, **kwargs)
# load model to backend
model = backend.load(args.model, inputs=args.inputs, outputs=args.outputs)
final_results = {
"runtime": model.name(),
"version": model.version(),
"time": int(time.time()),
"cmdline": str(args),
}
#
# make one pass over the dataset to validate accuracy
#
count = ds.get_item_count()
# warmup
ds.load_query_samples([0])
for _ in range(5):
img, _ = ds.get_samples([0])
_ = backend.predict({backend.inputs[0]: img})
ds.unload_query_samples(None)
for scenario in args.scenario:
runner_map = {
lg.TestScenario.SingleStream: RunnerBase,
lg.TestScenario.MultiStream: QueueRunner,
lg.TestScenario.Server: QueueRunner,
lg.TestScenario.Offline: QueueRunner
}
runner = runner_map[scenario](model, ds, args.threads, post_proc=post_proc, max_batchsize=args.max_batchsize)
def issue_queries(query_samples):
runner.enqueue(query_samples)
def flush_queries(): pass
def process_latencies(latencies_ns):
# called by loadgen to show us the recorded latencies
global last_timeing
last_timeing = [t / NANO_SEC for t in latencies_ns]
settings = lg.TestSettings()
settings.scenario = scenario
settings.mode = lg.TestMode.PerformanceOnly
if args.accuracy:
settings.mode = lg.TestMode.AccuracyOnly
if args.find_peak_performance:
settings.mode = lg.TestMode.FindPeakPerformance
if args.time:
# override the time we want to run
settings.min_duration_ms = args.time * MILLI_SEC
settings.max_duration_ms = args.time * MILLI_SEC
if args.qps:
qps = float(args.qps)
settings.server_target_qps = qps
settings.offline_expected_qps = qps
if scenario == lg.TestScenario.SingleStream:
settings.min_query_count = args.queries_single
settings.max_query_count = args.queries_single
elif scenario == lg.TestScenario.MultiStream:
settings.min_query_count = args.queries_multi
settings.max_query_count = args.queries_multi
settings.multi_stream_samples_per_query = 4
elif scenario == lg.TestScenario.Server:
max_latency = args.max_latency
elif scenario == lg.TestScenario.Offline:
settings.min_query_count = args.queries_offline
settings.max_query_count = args.queries_offline
sut = lg.ConstructSUT(issue_queries, flush_queries, process_latencies)
qsl = lg.ConstructQSL(count, min(count, 1000), ds.load_query_samples, ds.unload_query_samples)
if scenario == lg.TestScenario.Server:
for target_latency in max_latency:
log.info("starting {}, latency={}".format(scenario, target_latency))
settings.server_target_latency_ns = int(target_latency * NANO_SEC)
result_dict = {"good": 0, "total": 0, "scenario": str(scenario)}
runner.start_run(result_dict, args.accuracy)
lg.StartTest(sut, qsl, settings)
if not last_timeing:
last_timeing = runner.result_timing
if args.accuracy:
post_proc.finalize(result_dict, ds, output_dir=os.path.dirname(args.output))
add_results(final_results, "{}-{}".format(scenario, target_latency),
result_dict, last_timeing, time.time() - ds.last_loaded, args.accuracy)
else:
log.info("starting {}".format(scenario))
result_dict = {"good": 0, "total": 0, "scenario": str(scenario)}
runner.start_run(result_dict, args.accuracy)
lg.StartTest(sut, qsl, settings)
if not last_timeing:
last_timeing = runner.result_timing
if args.accuracy:
post_proc.finalize(result_dict, ds, output_dir=os.path.dirname(args.output))
add_results(final_results, "{}".format(scenario),
result_dict, last_timeing, time.time() - ds.last_loaded, args.accuracy)
runner.finish()
lg.DestroyQSL(qsl)
lg.DestroySUT(sut)
#
# write final results
#
if args.output:
with open(args.output, "w") as f:
json.dump(final_results, f, sort_keys=True, indent=4)
if __name__ == "__main__":
main()
| true | true |
1c302425028a7aa9aebab9f105a5f7dcc50bb75b | 65 | py | Python | hobo/__main__.py | mikewaters/hobo | d3a5376b55a99096488f3f2ad8e42b533de73114 | [
"MIT"
] | null | null | null | hobo/__main__.py | mikewaters/hobo | d3a5376b55a99096488f3f2ad8e42b533de73114 | [
"MIT"
] | 4 | 2015-05-07T14:06:44.000Z | 2015-05-08T17:03:27.000Z | hobo/__main__.py | mikewaters/hobo | d3a5376b55a99096488f3f2ad8e42b533de73114 | [
"MIT"
] | null | null | null | import sys
from hobo.cli import main
sys.exit(0 if main() else 1) | 21.666667 | 28 | 0.753846 | import sys
from hobo.cli import main
sys.exit(0 if main() else 1) | true | true |
1c302609075c3022a8f0051bb2399c8416c54fdc | 7,961 | py | Python | FClip/postprocess.py | Delay-Xili/F-Clip | ea5a7b2ddba8f4baf57e62962b479d8f0447bd65 | [
"MIT"
] | 93 | 2021-04-22T03:20:27.000Z | 2022-03-27T02:21:49.000Z | FClip/postprocess.py | Delay-Xili/F-Clip | ea5a7b2ddba8f4baf57e62962b479d8f0447bd65 | [
"MIT"
] | 10 | 2021-04-23T09:30:37.000Z | 2022-02-28T10:24:41.000Z | FClip/postprocess.py | Delay-Xili/F-Clip | ea5a7b2ddba8f4baf57e62962b479d8f0447bd65 | [
"MIT"
] | 9 | 2021-04-22T05:21:26.000Z | 2022-03-17T07:57:45.000Z | import numpy as np
import torch
def pline(x1, y1, x2, y2, x, y):
"""the L2 distance from n(x,y) to line n1 n2"""
px = x2 - x1
py = y2 - y1
dd = px * px + py * py
u = ((x - x1) * px + (y - y1) * py) / max(1e-9, float(dd))
dx = x1 + u * px - x
dy = y1 + u * py - y
return dx * dx + dy * dy
def psegment(x1, y1, x2, y2, x, y):
px = x2 - x1
py = y2 - y1
dd = px * px + py * py
u = max(min(((x - x1) * px + (y - y1) * py) / float(dd), 1), 0)
dx = x1 + u * px - x
dy = y1 + u * py - y
return dx * dx + dy * dy
def plambda(x1, y1, x2, y2, x, y):
"""
project the n(x,y) on line n1(x1, y1), n2(x2, y2), the plambda will return the
ratio of line (n n1) for line (n1, n2)
:return:
"""
px = x2 - x1
py = y2 - y1
dd = px * px + py * py
return ((x - x1) * px + (y - y1) * py) / max(1e-9, float(dd))
def postprocess(lines, scores, threshold=0.01, tol=1e9, do_clip=False):
nlines, nscores = [], []
for (p, q), score in zip(lines, scores):
start, end = 0, 1
for a, b in nlines:
if (
min(
max(pline(*p, *q, *a), pline(*p, *q, *b)),
max(pline(*a, *b, *p), pline(*a, *b, *q)),
)
> threshold ** 2
):
continue
lambda_a = plambda(*p, *q, *a)
lambda_b = plambda(*p, *q, *b)
if lambda_a > lambda_b:
lambda_a, lambda_b = lambda_b, lambda_a
lambda_a -= tol
lambda_b += tol
# case 1: skip (if not do_clip)
# current line cover the existent line which has higher score.
if start < lambda_a and lambda_b < end:
# start = 10 # drop
continue
# not intersect
# no overlap
if lambda_b < start or lambda_a > end:
continue
# cover
# the current line be covered by line with higher score
if lambda_a <= start and end <= lambda_b:
start = 10 # drop
break
# case 2 & 3:
if lambda_a <= start and start <= lambda_b:
start = lambda_b
if lambda_a <= end and end <= lambda_b:
end = lambda_a
if start >= end:
break
if start >= end:
continue
nlines.append(np.array([p + (q - p) * start, p + (q - p) * end]))
nscores.append(score)
return np.array(nlines), np.array(nscores)
def acc_postprocess(gtlines, lines, scores, threshold=1, is_cover=True, is_becover=True, overlap_fraction=0):
lines1 = gtlines.copy()
lines2 = lines.copy()
def get_u_line(_lines, points):
"""N lines to M points u score and l2 distance"""
p = _lines[:, 1] - _lines[:, 0] # (N, 2)
dd = np.sum(p ** 2, -1) # (N,)
pv1 = points[:, None] - _lines[:, 0][None] # (M, N, 2)
inner_u = np.sum(pv1 * p[None], -1) # (M, N)
u = inner_u / np.clip(dd[None], 1e-9, 1e9) # (M, N)
v1p = - points[:, None] + _lines[:, 0][None] # (M, N, 2)
d = v1p + u[:, :, None] * p[None] # (M, N ,2)
pline = np.sum(d ** 2, -1) # (M, N)
return u.transpose(), pline.transpose() # (N, M)
lambda_a12, pqa12 = get_u_line(lines1, lines2[:, 0])
lambda_b12, pqb12 = get_u_line(lines1, lines2[:, 1])
dis12 = np.concatenate([pqa12[:, :, None], pqb12[:, :, None]], -1)
dist12 = np.amax(dis12, -1)
lambda_a21, pqa21 = get_u_line(lines2, lines1[:, 0])
lambda_b21, pqb21 = get_u_line(lines2, lines1[:, 1])
dis21 = np.concatenate([pqa21[:, :, None], pqb21[:, :, None]], -1)
dist21 = np.amax(dis21, -1)
distmin = np.concatenate([dist12[:, :, None], np.transpose(dist21)[:, :, None]], -1)
distm = np.amin(distmin, -1)
mask = distm < threshold
if (lines1 == lines2).all():
diag = np.eye(len(mask)).astype(np.bool)
mask[diag] = False
k = 0
hit = np.zeros((len(mask),)).astype(np.bool)
lambda_a = lambda_a12 # each row means all u for one line
lambda_b = lambda_b12
while k < len(mask) - 2:
if hit[k]:
k += 1
continue
else:
cline = mask[k, k+1:]
cline_ab = np.concatenate([lambda_a[k, k+1:][None], lambda_b[k, k+1:][None]], 0)
cline_a = np.amin(cline_ab, 0)
cline_b = np.amax(cline_ab, 0)
cover = (cline_a > 0) & (cline_b < 1)
be_covered = (cline_a < 0) & (cline_b > 1)
overlap1 = (cline_a < 0) & (cline_b > overlap_fraction)
overlap2 = (cline_a < 1 - overlap_fraction) & (cline_b > 1)
overlap = overlap1 | overlap2
if is_cover:
remove = cover # cover | be_covered
else:
remove = np.zeros_like(cover).astype(np.bool)
if is_becover:
remove = remove | be_covered
if overlap_fraction > 0:
remove = remove | overlap
hit[k+1:] = hit[k+1:] | (cline & remove)
k += 1
drop = ~hit
return lines[drop], scores[drop]
def acc_postprocess_torch(gtlines, lines, scores, threshold=1, is_cover=True, is_becover=True, overlap_fraction=0):
lines1 = gtlines.clone()
lines2 = lines.clone()
def get_u_line(_lines, points):
"""N lines to M points u score and l2 distance"""
p = _lines[:, 1] - _lines[:, 0] # (N, 2)
dd = (p ** 2).sum(-1) # (N,)
pv1 = points[:, None] - _lines[:, 0][None] # (M, N, 2)
inner_u = torch.sum(pv1 * p[None], -1) # (M, N)
u = inner_u / dd[None].clamp(1e-9, 1e9) # (M, N)
v1p = - points[:, None] + _lines[:, 0][None] # (M, N, 2)
d = v1p + u[:, :, None] * p[None] # (M, N ,2)
pline = (d ** 2).sum(-1) # (M, N)
return u.transpose(0, 1), pline.transpose(0, 1) # (N, M)
lambda_a12, pqa12 = get_u_line(lines1, lines2[:, 0])
lambda_b12, pqb12 = get_u_line(lines1, lines2[:, 1])
dis12 = torch.cat([pqa12[:, :, None], pqb12[:, :, None]], -1)
dist12, _ = torch.max(dis12, -1)
lambda_a21, pqa21 = get_u_line(lines2, lines1[:, 0])
lambda_b21, pqb21 = get_u_line(lines2, lines1[:, 1])
dis21 = torch.cat([pqa21[:, :, None], pqb21[:, :, None]], -1)
dist21, _ = torch.max(dis21, -1)
distmin = torch.cat([dist12[:, :, None], dist21.transpose(0, 1)[:, :, None]], -1)
distm, _ = torch.min(distmin, -1)
mask = distm < threshold
if (lines1 == lines2).all():
diag = torch.eye(len(mask)).bool()
# diag = np.eye(len(mask)).astype(np.bool)
mask[diag] = False
k = 0
hit = torch.zeros((len(mask),), device=mask.device).bool()
lambda_a = lambda_a12 # each row means all u for one line
lambda_b = lambda_b12
while k < len(mask) - 2:
if hit[k]:
k += 1
continue
else:
cline = mask[k, k+1:]
cline_ab = torch.cat([lambda_a[k, k+1:][None], lambda_b[k, k+1:][None]], 0)
cline_a, _ = torch.min(cline_ab, 0)
cline_b, _ = torch.max(cline_ab, 0)
cover = (cline_a > 0) & (cline_b < 1)
be_covered = (cline_a < 0) & (cline_b > 1)
overlap1 = (cline_a < 0) & (cline_b > overlap_fraction)
overlap2 = (cline_a < 1 - overlap_fraction) & (cline_b > 1)
overlap = overlap1 | overlap2
if is_cover:
remove = cover # cover | be_covered
else:
remove = torch.zeros_like(cover).bool()
if is_becover:
remove = remove | be_covered
if overlap_fraction > 0:
remove = remove | overlap
hit[k+1:] = hit[k+1:] | (cline & remove)
k += 1
drop = ~hit
return lines[drop], scores[drop]
| 32.361789 | 115 | 0.495415 | import numpy as np
import torch
def pline(x1, y1, x2, y2, x, y):
px = x2 - x1
py = y2 - y1
dd = px * px + py * py
u = ((x - x1) * px + (y - y1) * py) / max(1e-9, float(dd))
dx = x1 + u * px - x
dy = y1 + u * py - y
return dx * dx + dy * dy
def psegment(x1, y1, x2, y2, x, y):
px = x2 - x1
py = y2 - y1
dd = px * px + py * py
u = max(min(((x - x1) * px + (y - y1) * py) / float(dd), 1), 0)
dx = x1 + u * px - x
dy = y1 + u * py - y
return dx * dx + dy * dy
def plambda(x1, y1, x2, y2, x, y):
px = x2 - x1
py = y2 - y1
dd = px * px + py * py
return ((x - x1) * px + (y - y1) * py) / max(1e-9, float(dd))
def postprocess(lines, scores, threshold=0.01, tol=1e9, do_clip=False):
nlines, nscores = [], []
for (p, q), score in zip(lines, scores):
start, end = 0, 1
for a, b in nlines:
if (
min(
max(pline(*p, *q, *a), pline(*p, *q, *b)),
max(pline(*a, *b, *p), pline(*a, *b, *q)),
)
> threshold ** 2
):
continue
lambda_a = plambda(*p, *q, *a)
lambda_b = plambda(*p, *q, *b)
if lambda_a > lambda_b:
lambda_a, lambda_b = lambda_b, lambda_a
lambda_a -= tol
lambda_b += tol
if start < lambda_a and lambda_b < end:
continue
if lambda_b < start or lambda_a > end:
continue
if lambda_a <= start and end <= lambda_b:
start = 10
break
if lambda_a <= start and start <= lambda_b:
start = lambda_b
if lambda_a <= end and end <= lambda_b:
end = lambda_a
if start >= end:
break
if start >= end:
continue
nlines.append(np.array([p + (q - p) * start, p + (q - p) * end]))
nscores.append(score)
return np.array(nlines), np.array(nscores)
def acc_postprocess(gtlines, lines, scores, threshold=1, is_cover=True, is_becover=True, overlap_fraction=0):
lines1 = gtlines.copy()
lines2 = lines.copy()
def get_u_line(_lines, points):
p = _lines[:, 1] - _lines[:, 0]
dd = np.sum(p ** 2, -1)
pv1 = points[:, None] - _lines[:, 0][None]
inner_u = np.sum(pv1 * p[None], -1)
u = inner_u / np.clip(dd[None], 1e-9, 1e9)
v1p = - points[:, None] + _lines[:, 0][None]
d = v1p + u[:, :, None] * p[None]
pline = np.sum(d ** 2, -1)
return u.transpose(), pline.transpose()
lambda_a12, pqa12 = get_u_line(lines1, lines2[:, 0])
lambda_b12, pqb12 = get_u_line(lines1, lines2[:, 1])
dis12 = np.concatenate([pqa12[:, :, None], pqb12[:, :, None]], -1)
dist12 = np.amax(dis12, -1)
lambda_a21, pqa21 = get_u_line(lines2, lines1[:, 0])
lambda_b21, pqb21 = get_u_line(lines2, lines1[:, 1])
dis21 = np.concatenate([pqa21[:, :, None], pqb21[:, :, None]], -1)
dist21 = np.amax(dis21, -1)
distmin = np.concatenate([dist12[:, :, None], np.transpose(dist21)[:, :, None]], -1)
distm = np.amin(distmin, -1)
mask = distm < threshold
if (lines1 == lines2).all():
diag = np.eye(len(mask)).astype(np.bool)
mask[diag] = False
k = 0
hit = np.zeros((len(mask),)).astype(np.bool)
lambda_a = lambda_a12
lambda_b = lambda_b12
while k < len(mask) - 2:
if hit[k]:
k += 1
continue
else:
cline = mask[k, k+1:]
cline_ab = np.concatenate([lambda_a[k, k+1:][None], lambda_b[k, k+1:][None]], 0)
cline_a = np.amin(cline_ab, 0)
cline_b = np.amax(cline_ab, 0)
cover = (cline_a > 0) & (cline_b < 1)
be_covered = (cline_a < 0) & (cline_b > 1)
overlap1 = (cline_a < 0) & (cline_b > overlap_fraction)
overlap2 = (cline_a < 1 - overlap_fraction) & (cline_b > 1)
overlap = overlap1 | overlap2
if is_cover:
remove = cover
else:
remove = np.zeros_like(cover).astype(np.bool)
if is_becover:
remove = remove | be_covered
if overlap_fraction > 0:
remove = remove | overlap
hit[k+1:] = hit[k+1:] | (cline & remove)
k += 1
drop = ~hit
return lines[drop], scores[drop]
def acc_postprocess_torch(gtlines, lines, scores, threshold=1, is_cover=True, is_becover=True, overlap_fraction=0):
lines1 = gtlines.clone()
lines2 = lines.clone()
def get_u_line(_lines, points):
p = _lines[:, 1] - _lines[:, 0]
dd = (p ** 2).sum(-1)
pv1 = points[:, None] - _lines[:, 0][None]
inner_u = torch.sum(pv1 * p[None], -1)
u = inner_u / dd[None].clamp(1e-9, 1e9)
v1p = - points[:, None] + _lines[:, 0][None]
d = v1p + u[:, :, None] * p[None]
pline = (d ** 2).sum(-1)
return u.transpose(0, 1), pline.transpose(0, 1)
lambda_a12, pqa12 = get_u_line(lines1, lines2[:, 0])
lambda_b12, pqb12 = get_u_line(lines1, lines2[:, 1])
dis12 = torch.cat([pqa12[:, :, None], pqb12[:, :, None]], -1)
dist12, _ = torch.max(dis12, -1)
lambda_a21, pqa21 = get_u_line(lines2, lines1[:, 0])
lambda_b21, pqb21 = get_u_line(lines2, lines1[:, 1])
dis21 = torch.cat([pqa21[:, :, None], pqb21[:, :, None]], -1)
dist21, _ = torch.max(dis21, -1)
distmin = torch.cat([dist12[:, :, None], dist21.transpose(0, 1)[:, :, None]], -1)
distm, _ = torch.min(distmin, -1)
mask = distm < threshold
if (lines1 == lines2).all():
diag = torch.eye(len(mask)).bool()
mask[diag] = False
k = 0
hit = torch.zeros((len(mask),), device=mask.device).bool()
lambda_a = lambda_a12
lambda_b = lambda_b12
while k < len(mask) - 2:
if hit[k]:
k += 1
continue
else:
cline = mask[k, k+1:]
cline_ab = torch.cat([lambda_a[k, k+1:][None], lambda_b[k, k+1:][None]], 0)
cline_a, _ = torch.min(cline_ab, 0)
cline_b, _ = torch.max(cline_ab, 0)
cover = (cline_a > 0) & (cline_b < 1)
be_covered = (cline_a < 0) & (cline_b > 1)
overlap1 = (cline_a < 0) & (cline_b > overlap_fraction)
overlap2 = (cline_a < 1 - overlap_fraction) & (cline_b > 1)
overlap = overlap1 | overlap2
if is_cover:
remove = cover
else:
remove = torch.zeros_like(cover).bool()
if is_becover:
remove = remove | be_covered
if overlap_fraction > 0:
remove = remove | overlap
hit[k+1:] = hit[k+1:] | (cline & remove)
k += 1
drop = ~hit
return lines[drop], scores[drop]
| true | true |
1c3026900b6f2bd8aa4de380eff71a596a25b011 | 950 | py | Python | forms.py | HarrySng/itoi | b2b23dea542299c3c0156b5e32994a154323b881 | [
"MIT"
] | null | null | null | forms.py | HarrySng/itoi | b2b23dea542299c3c0156b5e32994a154323b881 | [
"MIT"
] | null | null | null | forms.py | HarrySng/itoi | b2b23dea542299c3c0156b5e32994a154323b881 | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm, Form
from flask_wtf.file import FileField, FileRequired
from wtforms import StringField, IntegerField, TextAreaField, DateTimeField, BooleanField, SelectField, PasswordField, SubmitField
from wtforms.validators import DataRequired, Optional
class loginForm(FlaskForm):
mngrID = StringField('Enter User ID:', validators=[DataRequired()])
pswd = PasswordField('Enter password:', validators=[DataRequired()])
key = StringField('Enter Org Key', validators=[DataRequired()])
submit = SubmitField('Login')
class signupForm(FlaskForm):
mngrID = StringField('Enter Manager ID', validators=[DataRequired()])
pswd = PasswordField('Enter password:', validators=[DataRequired()])
cpswd = PasswordField('Confirm password:', validators=[DataRequired()])
emailID = StringField('Enter Email ID', validators=[DataRequired()])
key = StringField('Enter Org Key', validators=[DataRequired()])
submit = SubmitField('Register') | 52.777778 | 130 | 0.776842 | from flask_wtf import FlaskForm, Form
from flask_wtf.file import FileField, FileRequired
from wtforms import StringField, IntegerField, TextAreaField, DateTimeField, BooleanField, SelectField, PasswordField, SubmitField
from wtforms.validators import DataRequired, Optional
class loginForm(FlaskForm):
mngrID = StringField('Enter User ID:', validators=[DataRequired()])
pswd = PasswordField('Enter password:', validators=[DataRequired()])
key = StringField('Enter Org Key', validators=[DataRequired()])
submit = SubmitField('Login')
class signupForm(FlaskForm):
mngrID = StringField('Enter Manager ID', validators=[DataRequired()])
pswd = PasswordField('Enter password:', validators=[DataRequired()])
cpswd = PasswordField('Confirm password:', validators=[DataRequired()])
emailID = StringField('Enter Email ID', validators=[DataRequired()])
key = StringField('Enter Org Key', validators=[DataRequired()])
submit = SubmitField('Register') | true | true |
1c30272f6c0631c8d6b19553eea6f9d98f4e42c1 | 1,635 | py | Python | data_loader.py | Deepika1108/Facial-Expression-Recognition | 7e37c0a5e69d1e8d5af429af4a43c70371c9cecf | [
"MIT"
] | 87 | 2019-12-02T03:21:08.000Z | 2022-02-28T05:15:33.000Z | data_loader.py | Deepika1108/Facial-Expression-Recognition | 7e37c0a5e69d1e8d5af429af4a43c70371c9cecf | [
"MIT"
] | 12 | 2019-12-14T05:56:01.000Z | 2021-11-29T02:51:32.000Z | data_loader.py | Deepika1108/Facial-Expression-Recognition | 7e37c0a5e69d1e8d5af429af4a43c70371c9cecf | [
"MIT"
] | 23 | 2019-12-16T04:56:39.000Z | 2022-03-14T03:15:52.000Z | import torch
import os
from PIL import Image
import random
import numpy as np
import pickle
import torchvision.transforms as transforms
from .ckplus_res import CKPlusResDataset
from .mmi_res import MMIResDataset
from .affectnet import AffectNetDataset
from .base_dataset import BaseDataset
def create_dataloader(opt):
data_loader = DataLoader()
data_loader.initialize(opt)
return data_loader
class DataLoader:
def name(self):
return self.dataset.name() + "_Loader"
def create_datase(self):
# specify which dataset to load here
loaded_dataset = os.path.basename(self.opt.data_root.strip('/'))
if 'CK' in loaded_dataset:
dataset = CKPlusResDataset()
elif 'MMI' in loaded_dataset:
dataset = MMIResDataset()
elif 'Affect' in loaded_dataset:
dataset = AffectNetDataset()
else:
dataset = BaseDataset()
dataset.initialize(self.opt)
return dataset
def initialize(self, opt):
self.opt = opt
self.dataset = self.create_datase()
self.dataloader = torch.utils.data.DataLoader(
self.dataset,
batch_size=opt.batch_size,
shuffle=not opt.serial_batches,
num_workers=int(opt.n_threads)
)
def __len__(self):
return min(len(self.dataset), self.opt.max_dataset_size)
def __iter__(self):
for i, data in enumerate(self.dataloader):
if i * self.opt.batch_size >= self.opt.max_dataset_size:
break
yield data | 29.196429 | 73 | 0.628746 | import torch
import os
from PIL import Image
import random
import numpy as np
import pickle
import torchvision.transforms as transforms
from .ckplus_res import CKPlusResDataset
from .mmi_res import MMIResDataset
from .affectnet import AffectNetDataset
from .base_dataset import BaseDataset
def create_dataloader(opt):
data_loader = DataLoader()
data_loader.initialize(opt)
return data_loader
class DataLoader:
def name(self):
return self.dataset.name() + "_Loader"
def create_datase(self):
loaded_dataset = os.path.basename(self.opt.data_root.strip('/'))
if 'CK' in loaded_dataset:
dataset = CKPlusResDataset()
elif 'MMI' in loaded_dataset:
dataset = MMIResDataset()
elif 'Affect' in loaded_dataset:
dataset = AffectNetDataset()
else:
dataset = BaseDataset()
dataset.initialize(self.opt)
return dataset
def initialize(self, opt):
self.opt = opt
self.dataset = self.create_datase()
self.dataloader = torch.utils.data.DataLoader(
self.dataset,
batch_size=opt.batch_size,
shuffle=not opt.serial_batches,
num_workers=int(opt.n_threads)
)
def __len__(self):
return min(len(self.dataset), self.opt.max_dataset_size)
def __iter__(self):
for i, data in enumerate(self.dataloader):
if i * self.opt.batch_size >= self.opt.max_dataset_size:
break
yield data | true | true |
1c30273cd6a8c1868c1f8b91a8863bd016068a59 | 1,130 | py | Python | backend/storage.py | lakshbhasin/VoterValidation | 2249f387046b5039b650e3b5c63b9d3d3b7aea8f | [
"Apache-2.0"
] | 6 | 2018-11-02T14:10:54.000Z | 2020-07-14T02:38:03.000Z | backend/storage.py | lakshbhasin/VoterValidation | 2249f387046b5039b650e3b5c63b9d3d3b7aea8f | [
"Apache-2.0"
] | 2 | 2021-06-10T20:56:46.000Z | 2021-06-10T20:58:54.000Z | backend/storage.py | lakshbhasin/VoterValidation | 2249f387046b5039b650e3b5c63b9d3d3b7aea8f | [
"Apache-2.0"
] | null | null | null | """
Adapted from
http://condopilot.com/blog/web/how-setup-gzip-compressor-and-aws-s3-django/
"""
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from backend.settings import STATICFILES_LOCATION
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that caches files locally (on the same server as the
Django application), too (so we can see what static files have changed
and whether a new compressed version needs to be created).
"""
location = STATICFILES_LOCATION
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
"""
A workaround to save non-gzipped content locally.
"""
non_gzipped_file_content = content.file
name = super(CachedS3BotoStorage, self).save(name, content)
content.file = non_gzipped_file_content
self.local_storage._save(name, content)
return name
| 34.242424 | 75 | 0.709735 |
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from backend.settings import STATICFILES_LOCATION
class CachedS3BotoStorage(S3BotoStorage):
location = STATICFILES_LOCATION
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
non_gzipped_file_content = content.file
name = super(CachedS3BotoStorage, self).save(name, content)
content.file = non_gzipped_file_content
self.local_storage._save(name, content)
return name
| true | true |
1c3028185d8c41fdc39aa16889e8966955df26ad | 730 | py | Python | anees/migrations/0038_auto_20200928_0507.py | ashish2020kashyap/cessini | 9713fd76d2e31a95266ec69da2abc98424a46e52 | [
"MIT"
] | null | null | null | anees/migrations/0038_auto_20200928_0507.py | ashish2020kashyap/cessini | 9713fd76d2e31a95266ec69da2abc98424a46e52 | [
"MIT"
] | null | null | null | anees/migrations/0038_auto_20200928_0507.py | ashish2020kashyap/cessini | 9713fd76d2e31a95266ec69da2abc98424a46e52 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.1 on 2020-09-28 05:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('anees', '0037_auto_20200928_0504'),
]
operations = [
migrations.AlterField(
model_name='campmail',
name='camp',
field=models.ManyToManyField(blank=True, null=True, related_name='emailings', to='anees.Email'),
),
migrations.AlterField(
model_name='campmail',
name='customer_mail',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='customermail', to='anees.campaign'),
),
]
| 29.2 | 143 | 0.634247 |
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('anees', '0037_auto_20200928_0504'),
]
operations = [
migrations.AlterField(
model_name='campmail',
name='camp',
field=models.ManyToManyField(blank=True, null=True, related_name='emailings', to='anees.Email'),
),
migrations.AlterField(
model_name='campmail',
name='customer_mail',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='customermail', to='anees.campaign'),
),
]
| true | true |
1c3028b675a02c0d60e802724ad657e807832f03 | 11,058 | py | Python | msgraph/cli/command_modules/groups/azext_groups/vendored_sdks/groups/operations/_groups_onenote_section_groups_sections_pages_parent_notebook_sections_operations.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | null | null | null | msgraph/cli/command_modules/groups/azext_groups/vendored_sdks/groups/operations/_groups_onenote_section_groups_sections_pages_parent_notebook_sections_operations.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | 22 | 2022-03-29T22:54:37.000Z | 2022-03-29T22:55:27.000Z | msgraph/cli/command_modules/groups/azext_groups/vendored_sdks/groups/operations/_groups_onenote_section_groups_sections_pages_parent_notebook_sections_operations.py | microsoftgraph/msgraph-cli-archived | 489f70bf4ede1ce67b84bfb31e66da3e4db76062 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class GroupsOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations(object):
"""GroupsOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~groups.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def copy_to_notebook(
self,
group_id, # type: str
section_group_id, # type: str
onenote_section_id, # type: str
onenote_page_id, # type: str
onenote_section_id1, # type: str
body, # type: "models.Paths1Ku8ThmGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytonotebookPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphOnenoteOperation"
"""Invoke action copyToNotebook.
Invoke action copyToNotebook.
:param group_id: key: id of group.
:type group_id: str
:param section_group_id: key: id of sectionGroup.
:type section_group_id: str
:param onenote_section_id: key: id of onenoteSection.
:type onenote_section_id: str
:param onenote_page_id: key: id of onenotePage.
:type onenote_page_id: str
:param onenote_section_id1: key: id of onenoteSection.
:type onenote_section_id1: str
:param body: Action parameters.
:type body: ~groups.models.Paths1Ku8ThmGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytonotebookPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphOnenoteOperation, or the result of cls(response)
:rtype: ~groups.models.MicrosoftGraphOnenoteOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphOnenoteOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.copy_to_notebook.metadata['url'] # type: ignore
path_format_arguments = {
'group-id': self._serialize.url("group_id", group_id, 'str'),
'sectionGroup-id': self._serialize.url("section_group_id", section_group_id, 'str'),
'onenoteSection-id': self._serialize.url("onenote_section_id", onenote_section_id, 'str'),
'onenotePage-id': self._serialize.url("onenote_page_id", onenote_page_id, 'str'),
'onenoteSection-id1': self._serialize.url("onenote_section_id1", onenote_section_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Ku8ThmGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytonotebookPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphOnenoteOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
copy_to_notebook.metadata = {'url': '/groups/{group-id}/onenote/sectionGroups/{sectionGroup-id}/sections/{onenoteSection-id}/pages/{onenotePage-id}/parentNotebook/sections/{onenoteSection-id1}/microsoft.graph.copyToNotebook'} # type: ignore
def copy_to_section_group(
self,
group_id, # type: str
section_group_id, # type: str
onenote_section_id, # type: str
onenote_page_id, # type: str
onenote_section_id1, # type: str
body, # type: "models.Paths17T903XGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytosectiongroupPostRequestbodyContentApplicationJsonSchema"
**kwargs # type: Any
):
# type: (...) -> "models.MicrosoftGraphOnenoteOperation"
"""Invoke action copyToSectionGroup.
Invoke action copyToSectionGroup.
:param group_id: key: id of group.
:type group_id: str
:param section_group_id: key: id of sectionGroup.
:type section_group_id: str
:param onenote_section_id: key: id of onenoteSection.
:type onenote_section_id: str
:param onenote_page_id: key: id of onenotePage.
:type onenote_page_id: str
:param onenote_section_id1: key: id of onenoteSection.
:type onenote_section_id1: str
:param body: Action parameters.
:type body: ~groups.models.Paths17T903XGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytosectiongroupPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphOnenoteOperation, or the result of cls(response)
:rtype: ~groups.models.MicrosoftGraphOnenoteOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphOnenoteOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.copy_to_section_group.metadata['url'] # type: ignore
path_format_arguments = {
'group-id': self._serialize.url("group_id", group_id, 'str'),
'sectionGroup-id': self._serialize.url("section_group_id", section_group_id, 'str'),
'onenoteSection-id': self._serialize.url("onenote_section_id", onenote_section_id, 'str'),
'onenotePage-id': self._serialize.url("onenote_page_id", onenote_page_id, 'str'),
'onenoteSection-id1': self._serialize.url("onenote_section_id1", onenote_section_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths17T903XGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytosectiongroupPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphOnenoteOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
copy_to_section_group.metadata = {'url': '/groups/{group-id}/onenote/sectionGroups/{sectionGroup-id}/sections/{onenoteSection-id}/pages/{onenotePage-id}/parentNotebook/sections/{onenoteSection-id1}/microsoft.graph.copyToSectionGroup'} # type: ignore
| 54.205882 | 268 | 0.714324 |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class GroupsOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations(object):
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def copy_to_notebook(
self,
group_id,
section_group_id,
onenote_section_id,
onenote_page_id,
onenote_section_id1,
body,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.copy_to_notebook.metadata['url']
path_format_arguments = {
'group-id': self._serialize.url("group_id", group_id, 'str'),
'sectionGroup-id': self._serialize.url("section_group_id", section_group_id, 'str'),
'onenoteSection-id': self._serialize.url("onenote_section_id", onenote_section_id, 'str'),
'onenotePage-id': self._serialize.url("onenote_page_id", onenote_page_id, 'str'),
'onenoteSection-id1': self._serialize.url("onenote_section_id1", onenote_section_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(body, 'Paths1Ku8ThmGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytonotebookPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphOnenoteOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
copy_to_notebook.metadata = {'url': '/groups/{group-id}/onenote/sectionGroups/{sectionGroup-id}/sections/{onenoteSection-id}/pages/{onenotePage-id}/parentNotebook/sections/{onenoteSection-id1}/microsoft.graph.copyToNotebook'}
def copy_to_section_group(
self,
group_id,
section_group_id,
onenote_section_id,
onenote_page_id,
onenote_section_id1,
body,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.copy_to_section_group.metadata['url']
path_format_arguments = {
'group-id': self._serialize.url("group_id", group_id, 'str'),
'sectionGroup-id': self._serialize.url("section_group_id", section_group_id, 'str'),
'onenoteSection-id': self._serialize.url("onenote_section_id", onenote_section_id, 'str'),
'onenotePage-id': self._serialize.url("onenote_page_id", onenote_page_id, 'str'),
'onenoteSection-id1': self._serialize.url("onenote_section_id1", onenote_section_id1, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(body, 'Paths17T903XGroupsGroupIdOnenoteSectiongroupsSectiongroupIdSectionsOnenotesectionIdPagesOnenotepageIdParentnotebookSectionsOnenotesectionId1MicrosoftGraphCopytosectiongroupPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphOnenoteOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
copy_to_section_group.metadata = {'url': '/groups/{group-id}/onenote/sectionGroups/{sectionGroup-id}/sections/{onenoteSection-id}/pages/{onenotePage-id}/parentNotebook/sections/{onenoteSection-id1}/microsoft.graph.copyToSectionGroup'}
| true | true |
1c3028bb8bd3f560b3d10a49db839605d65b5e44 | 4,631 | py | Python | aptl3/tests/test_am.py | matteoterruzzi/aptl3 | 680ab58ffa79d0eee293729d36f677a588350519 | [
"MIT"
] | null | null | null | aptl3/tests/test_am.py | matteoterruzzi/aptl3 | 680ab58ffa79d0eee293729d36f677a588350519 | [
"MIT"
] | null | null | null | aptl3/tests/test_am.py | matteoterruzzi/aptl3 | 680ab58ffa79d0eee293729d36f677a588350519 | [
"MIT"
] | null | null | null | import itertools
import time
from typing import Any
from ..am import ActorSystem, MapActor, Actor
def double(x):
return 2 * x
def test_empty_system():
with ActorSystem(maxsize=0, use_multiprocessing=False) as s:
s.start()
print(s.status)
print(s.status)
def test_simple_actor():
with ActorSystem(maxsize=0, use_multiprocessing=False) as s:
s.add_thread('str', MapActor(str, 'main'))
s.add_mailbox('main')
s.start()
s.tell('str', 42)
out = s.ask('main')
assert out == '42'
print(s.status)
print(s.status)
def test_mixed_actors():
n = 100
with ActorSystem(maxsize=n, use_multiprocessing=True) as s:
s.add_process("double", MapActor(double, "str"), pool=2)
s.add_thread("str", MapActor(str, "main"), pool=2)
s.add_mailbox("main")
s.start()
for _ in range(3):
for __ in range(n):
s.tell("double", 21)
out = [s.ask("main") for _ in range(n)]
assert out == ["42"] * n, out
assert s.pending == 0
def test_actor_system_exit():
n = 100
with ActorSystem(maxsize=0) as s:
s.add_thread("str", MapActor(str, "sleep"))
s.add_thread("sleep", MapActor(lambda x: [x, time.sleep(0.01)][0], "main"), pool=10)
s.add_collector("main")
s.start()
for _ in range(n):
s.tell("str", 42)
assert not s.is_alive, repr(s)
assert s.pending == 0, s.status
back = 0
for got in s.ask_available('main'):
assert got == "42"
back += 1
assert back == n, (back, n, s.status)
print(s.status)
def test_actor_system_stop_exit():
delay = 0.01
n = 100
_tic = time.perf_counter()
with ActorSystem(maxsize=0) as s:
s.add_thread("sleep", MapActor(time.sleep, "main"))
s.add_collector("main")
s.start()
for _ in range(n):
s.tell("sleep", delay)
assert s.ask("main") is None # will wait 1 * delay
s.stop()
print(s.status)
print() # this will appear in the backtrace of s.__exit__
# will wait 1 * delay while finishing the job that was started before s.stop()
assert s.pending == 0, s.status
# 'main' collector is expected to contain 0 or 1 msg at this point.
print(s.status)
_elapsed = time.perf_counter() - _tic
print(f'elapsed: {_elapsed*1000:.0f}ms')
assert _elapsed < 10 * delay, _elapsed # NOTE: 2 * delay is expected; 4 * delay should be enough on a slow system.
def test_actor_system_join_pending():
for test_i in range(100):
n = 100
with ActorSystem(maxsize=0) as s:
s.add_thread('input', MapActor(str, 'main'))
s.add_collector('main')
s.start()
for i in range(n):
s.tell('input', i)
if test_i % 2 == 0:
s.finish() # It should have no effect. Let's skip it in half of the tests.
s.join_pending()
print('before stop', s.status)
s.stop()
print('after exit ', s.status)
assert list(map(s.ask, itertools.repeat('main', n))) == list(map(str, range(n)))
def test_actor_system_request_finish_batch():
n = 100
class BatchingActor(Actor):
def receive(self, msg: Any):
_batch = list(itertools.chain(
[msg],
self.ask_available(None, block=not self.finish_requested),
))
return 'main', _batch
with ActorSystem(maxsize=0) as s:
s.add_thread('batch', BatchingActor())
s.add_collector('main')
s.start()
for i in range(n):
s.tell('batch', i)
print('before finish:', s.status)
s.finish()
print('before exit:', s.status)
print()
print('exited:', s.status)
num_batches = 0
concatenated = []
while len(concatenated) < n:
batch = s.ask('main')
print('batch:', batch)
assert 1 <= len(batch) <= n, batch
concatenated.extend(batch)
num_batches += 1
assert concatenated == list(range(n)), concatenated
assert num_batches == 1 # finish is requested after all `tell`
if __name__ == '__main__':
import multiprocessing
multiprocessing.set_start_method('spawn') # fork is default on unix, but windows uses spawn, which pickles actors.
test_empty_system()
test_simple_actor()
test_actor_system_request_finish_batch()
test_actor_system_join_pending()
test_actor_system_stop_exit()
test_actor_system_exit()
test_mixed_actors()
| 30.267974 | 119 | 0.585403 | import itertools
import time
from typing import Any
from ..am import ActorSystem, MapActor, Actor
def double(x):
return 2 * x
def test_empty_system():
with ActorSystem(maxsize=0, use_multiprocessing=False) as s:
s.start()
print(s.status)
print(s.status)
def test_simple_actor():
with ActorSystem(maxsize=0, use_multiprocessing=False) as s:
s.add_thread('str', MapActor(str, 'main'))
s.add_mailbox('main')
s.start()
s.tell('str', 42)
out = s.ask('main')
assert out == '42'
print(s.status)
print(s.status)
def test_mixed_actors():
n = 100
with ActorSystem(maxsize=n, use_multiprocessing=True) as s:
s.add_process("double", MapActor(double, "str"), pool=2)
s.add_thread("str", MapActor(str, "main"), pool=2)
s.add_mailbox("main")
s.start()
for _ in range(3):
for __ in range(n):
s.tell("double", 21)
out = [s.ask("main") for _ in range(n)]
assert out == ["42"] * n, out
assert s.pending == 0
def test_actor_system_exit():
n = 100
with ActorSystem(maxsize=0) as s:
s.add_thread("str", MapActor(str, "sleep"))
s.add_thread("sleep", MapActor(lambda x: [x, time.sleep(0.01)][0], "main"), pool=10)
s.add_collector("main")
s.start()
for _ in range(n):
s.tell("str", 42)
assert not s.is_alive, repr(s)
assert s.pending == 0, s.status
back = 0
for got in s.ask_available('main'):
assert got == "42"
back += 1
assert back == n, (back, n, s.status)
print(s.status)
def test_actor_system_stop_exit():
delay = 0.01
n = 100
_tic = time.perf_counter()
with ActorSystem(maxsize=0) as s:
s.add_thread("sleep", MapActor(time.sleep, "main"))
s.add_collector("main")
s.start()
for _ in range(n):
s.tell("sleep", delay)
assert s.ask("main") is None
s.stop()
print(s.status)
print()
assert s.pending == 0, s.status
print(s.status)
_elapsed = time.perf_counter() - _tic
print(f'elapsed: {_elapsed*1000:.0f}ms')
assert _elapsed < 10 * delay, _elapsed
def test_actor_system_join_pending():
for test_i in range(100):
n = 100
with ActorSystem(maxsize=0) as s:
s.add_thread('input', MapActor(str, 'main'))
s.add_collector('main')
s.start()
for i in range(n):
s.tell('input', i)
if test_i % 2 == 0:
s.finish()
s.join_pending()
print('before stop', s.status)
s.stop()
print('after exit ', s.status)
assert list(map(s.ask, itertools.repeat('main', n))) == list(map(str, range(n)))
def test_actor_system_request_finish_batch():
n = 100
class BatchingActor(Actor):
def receive(self, msg: Any):
_batch = list(itertools.chain(
[msg],
self.ask_available(None, block=not self.finish_requested),
))
return 'main', _batch
with ActorSystem(maxsize=0) as s:
s.add_thread('batch', BatchingActor())
s.add_collector('main')
s.start()
for i in range(n):
s.tell('batch', i)
print('before finish:', s.status)
s.finish()
print('before exit:', s.status)
print()
print('exited:', s.status)
num_batches = 0
concatenated = []
while len(concatenated) < n:
batch = s.ask('main')
print('batch:', batch)
assert 1 <= len(batch) <= n, batch
concatenated.extend(batch)
num_batches += 1
assert concatenated == list(range(n)), concatenated
assert num_batches == 1 # finish is requested after all `tell`
if __name__ == '__main__':
import multiprocessing
multiprocessing.set_start_method('spawn') # fork is default on unix, but windows uses spawn, which pickles actors.
test_empty_system()
test_simple_actor()
test_actor_system_request_finish_batch()
test_actor_system_join_pending()
test_actor_system_stop_exit()
test_actor_system_exit()
test_mixed_actors()
| true | true |
1c3029102b8a50c6094ed7e50625d167e115f7b7 | 467 | py | Python | omtool/visualizer/draw_parameters.py | Kraysent/OMTool | abb293ee359720d622ed0c4ecdf90967171007c8 | [
"Apache-2.0"
] | null | null | null | omtool/visualizer/draw_parameters.py | Kraysent/OMTool | abb293ee359720d622ed0c4ecdf90967171007c8 | [
"Apache-2.0"
] | 51 | 2021-12-05T13:31:51.000Z | 2022-03-27T16:05:04.000Z | omtool/visualizer/draw_parameters.py | Kraysent/OMTool | abb293ee359720d622ed0c4ecdf90967171007c8 | [
"Apache-2.0"
] | null | null | null | from dataclasses import dataclass
from typing import Any, Tuple
import matplotlib as mpl
@dataclass
class DrawParameters:
id: str
markersize: float = 0.1
linestyle: str = 'None'
color: str = 'b'
marker: str = 'o'
is_density_plot: bool = False
resolution: int = 100
extent: Tuple[int, int, int, int] = (0, 100, 0, 100)
cmap: str = 'ocean_r'
cmapnorm: Any = mpl.colors.LogNorm()
label: str = None
channel: str = 'b'
| 22.238095 | 56 | 0.631692 | from dataclasses import dataclass
from typing import Any, Tuple
import matplotlib as mpl
@dataclass
class DrawParameters:
id: str
markersize: float = 0.1
linestyle: str = 'None'
color: str = 'b'
marker: str = 'o'
is_density_plot: bool = False
resolution: int = 100
extent: Tuple[int, int, int, int] = (0, 100, 0, 100)
cmap: str = 'ocean_r'
cmapnorm: Any = mpl.colors.LogNorm()
label: str = None
channel: str = 'b'
| true | true |
1c30295399e59dfc6f9d4cba1970aea2e3d81c57 | 1,422 | py | Python | data_analysis.py | PatriceC/MLProjectISDP2020 | 64e83824690ccde2714d915c70fb00b20aa66a42 | [
"MIT"
] | 1 | 2021-01-23T01:04:00.000Z | 2021-01-23T01:04:00.000Z | data_analysis.py | cor3ntino/Time-Series-Prediction-with-Deep-Learning-for-Road-Trafic-Data | e8eefdf2e630a53e09f88550357b67732f2bccd0 | [
"MIT"
] | null | null | null | data_analysis.py | cor3ntino/Time-Series-Prediction-with-Deep-Learning-for-Road-Trafic-Data | e8eefdf2e630a53e09f88550357b67732f2bccd0 | [
"MIT"
] | 1 | 2021-01-19T16:57:27.000Z | 2021-01-19T16:57:27.000Z | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 27 19:19:45 2020
@author: Patrice CHANOL & Corentin MORVAN--CHAUMEIL
"""
import pandas as pd
import matplotlib.pyplot as plt
# %% Load Data
data_load = pd.read_csv('./Radar_Traffic_Counts.csv')
data_load = data_load.drop(columns=['Time Bin', 'location_name'])
data_load['Direction'] = data_load['Direction'].astype('category').cat.codes
# %% Select set
col = ['Direction', 'location_latitude', 'location_longitude',
'Year', 'Month', 'Day of Week', 'Day', 'Hour']
data_pd = data_load.groupby(col)['Volume'].sum().reset_index()
data_pd['Date'] = pd.to_datetime(data_pd[['Year', 'Month', 'Day', 'Hour']])
data_pd.index = data_pd['Date']
data_pd_0 = data_pd[data_pd['Direction'] == 0].sort_values(['Year', 'Month', 'Day', 'Hour'])
# data_pd_0 = data_pd[(data_pd['Direction'] == 0) & (data_pd['location_latitude']==30.268652000000003) & (data_pd['location_longitude']==-97.759929)].sort_values(['Year', 'Month', 'Day', 'Hour'])
plt.figure(0)
data_pd_0[(data_pd_0['Date'] >= '2018-07-09') & (data_pd_0['Date'] <= '2018-08-10')]['Volume'].plot(label='Mois du 09/07/18 au 10/08/18')
data_pd_0[(data_pd_0['Date'] >= '2018-07-09') & (data_pd_0['Date'] < '2018-07-16')]['Volume'].plot(label='Semaine 09/07 du 15/07')
data_pd_0.loc['2018-07-16', 'Volume'].plot(label='Journée du 16/07')
plt.ylabel("Volume")
plt.title("Du 09/07/18 au 10/08/18")
plt.legend()
plt.show()
| 36.461538 | 195 | 0.668776 |
import pandas as pd
import matplotlib.pyplot as plt
data_load = pd.read_csv('./Radar_Traffic_Counts.csv')
data_load = data_load.drop(columns=['Time Bin', 'location_name'])
data_load['Direction'] = data_load['Direction'].astype('category').cat.codes
col = ['Direction', 'location_latitude', 'location_longitude',
'Year', 'Month', 'Day of Week', 'Day', 'Hour']
data_pd = data_load.groupby(col)['Volume'].sum().reset_index()
data_pd['Date'] = pd.to_datetime(data_pd[['Year', 'Month', 'Day', 'Hour']])
data_pd.index = data_pd['Date']
data_pd_0 = data_pd[data_pd['Direction'] == 0].sort_values(['Year', 'Month', 'Day', 'Hour'])
plt.figure(0)
data_pd_0[(data_pd_0['Date'] >= '2018-07-09') & (data_pd_0['Date'] <= '2018-08-10')]['Volume'].plot(label='Mois du 09/07/18 au 10/08/18')
data_pd_0[(data_pd_0['Date'] >= '2018-07-09') & (data_pd_0['Date'] < '2018-07-16')]['Volume'].plot(label='Semaine 09/07 du 15/07')
data_pd_0.loc['2018-07-16', 'Volume'].plot(label='Journée du 16/07')
plt.ylabel("Volume")
plt.title("Du 09/07/18 au 10/08/18")
plt.legend()
plt.show()
| true | true |
1c3029a9d34293ddf7ec3cf7a9c97844a4285f5f | 22,629 | py | Python | homeassistant/const.py | SergioBPereira/core | 4501906da369e23b304857b8a3512798696f26a0 | [
"Apache-2.0"
] | null | null | null | homeassistant/const.py | SergioBPereira/core | 4501906da369e23b304857b8a3512798696f26a0 | [
"Apache-2.0"
] | 62 | 2020-11-17T06:56:37.000Z | 2022-03-23T18:24:45.000Z | homeassistant/const.py | SergioBPereira/core | 4501906da369e23b304857b8a3512798696f26a0 | [
"Apache-2.0"
] | 2 | 2021-01-26T07:41:33.000Z | 2021-01-26T07:45:11.000Z | """Constants used by Home Assistant components."""
from __future__ import annotations
from typing import Final
MAJOR_VERSION: Final = 2021
MINOR_VERSION: Final = 11
PATCH_VERSION: Final = "0.dev0"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 8, 0)
# Truthy date string triggers showing related deprecation warning messages.
REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2022.1"
# Format for platform files
PLATFORM_FORMAT: Final = "{platform}.{domain}"
# Can be used to specify a catch all when registering state or event listeners.
MATCH_ALL: Final = "*"
# Entity target all constant
ENTITY_MATCH_NONE: Final = "none"
ENTITY_MATCH_ALL: Final = "all"
# If no name is specified
DEVICE_DEFAULT_NAME: Final = "Unnamed Device"
# Max characters for data stored in the recorder (changes to these limits would require
# a database migration)
MAX_LENGTH_EVENT_EVENT_TYPE: Final = 64
MAX_LENGTH_EVENT_ORIGIN: Final = 32
MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36
MAX_LENGTH_STATE_DOMAIN: Final = 64
MAX_LENGTH_STATE_ENTITY_ID: Final = 255
MAX_LENGTH_STATE_STATE: Final = 255
# Sun events
SUN_EVENT_SUNSET: Final = "sunset"
SUN_EVENT_SUNRISE: Final = "sunrise"
# #### CONFIG ####
CONF_ABOVE: Final = "above"
CONF_ACCESS_TOKEN: Final = "access_token"
CONF_ADDRESS: Final = "address"
CONF_AFTER: Final = "after"
CONF_ALIAS: Final = "alias"
CONF_ALLOWLIST_EXTERNAL_URLS: Final = "allowlist_external_urls"
CONF_API_KEY: Final = "api_key"
CONF_API_TOKEN: Final = "api_token"
CONF_API_VERSION: Final = "api_version"
CONF_ARMING_TIME: Final = "arming_time"
CONF_AT: Final = "at"
CONF_ATTRIBUTE: Final = "attribute"
CONF_AUTH_MFA_MODULES: Final = "auth_mfa_modules"
CONF_AUTH_PROVIDERS: Final = "auth_providers"
CONF_AUTHENTICATION: Final = "authentication"
CONF_BASE: Final = "base"
CONF_BEFORE: Final = "before"
CONF_BELOW: Final = "below"
CONF_BINARY_SENSORS: Final = "binary_sensors"
CONF_BRIGHTNESS: Final = "brightness"
CONF_BROADCAST_ADDRESS: Final = "broadcast_address"
CONF_BROADCAST_PORT: Final = "broadcast_port"
CONF_CHOOSE: Final = "choose"
CONF_CLIENT_ID: Final = "client_id"
CONF_CLIENT_SECRET: Final = "client_secret"
CONF_CODE: Final = "code"
CONF_COLOR_TEMP: Final = "color_temp"
CONF_COMMAND: Final = "command"
CONF_COMMAND_CLOSE: Final = "command_close"
CONF_COMMAND_OFF: Final = "command_off"
CONF_COMMAND_ON: Final = "command_on"
CONF_COMMAND_OPEN: Final = "command_open"
CONF_COMMAND_STATE: Final = "command_state"
CONF_COMMAND_STOP: Final = "command_stop"
CONF_CONDITION: Final = "condition"
CONF_CONDITIONS: Final = "conditions"
CONF_CONTINUE_ON_TIMEOUT: Final = "continue_on_timeout"
CONF_COUNT: Final = "count"
CONF_COVERS: Final = "covers"
CONF_CURRENCY: Final = "currency"
CONF_CUSTOMIZE: Final = "customize"
CONF_CUSTOMIZE_DOMAIN: Final = "customize_domain"
CONF_CUSTOMIZE_GLOB: Final = "customize_glob"
CONF_DEFAULT: Final = "default"
CONF_DELAY: Final = "delay"
CONF_DELAY_TIME: Final = "delay_time"
CONF_DESCRIPTION: Final = "description"
CONF_DEVICE: Final = "device"
CONF_DEVICES: Final = "devices"
CONF_DEVICE_CLASS: Final = "device_class"
CONF_DEVICE_ID: Final = "device_id"
CONF_DISARM_AFTER_TRIGGER: Final = "disarm_after_trigger"
CONF_DISCOVERY: Final = "discovery"
CONF_DISKS: Final = "disks"
CONF_DISPLAY_CURRENCY: Final = "display_currency"
CONF_DISPLAY_OPTIONS: Final = "display_options"
CONF_DOMAIN: Final = "domain"
CONF_DOMAINS: Final = "domains"
CONF_EFFECT: Final = "effect"
CONF_ELEVATION: Final = "elevation"
CONF_EMAIL: Final = "email"
CONF_ENTITIES: Final = "entities"
CONF_ENTITY_CATEGORY: Final = "entity_category"
CONF_ENTITY_ID: Final = "entity_id"
CONF_ENTITY_NAMESPACE: Final = "entity_namespace"
CONF_ENTITY_PICTURE_TEMPLATE: Final = "entity_picture_template"
CONF_EVENT: Final = "event"
CONF_EVENT_DATA: Final = "event_data"
CONF_EVENT_DATA_TEMPLATE: Final = "event_data_template"
CONF_EXCLUDE: Final = "exclude"
CONF_EXTERNAL_URL: Final = "external_url"
CONF_FILENAME: Final = "filename"
CONF_FILE_PATH: Final = "file_path"
CONF_FOR: Final = "for"
CONF_FORCE_UPDATE: Final = "force_update"
CONF_FRIENDLY_NAME: Final = "friendly_name"
CONF_FRIENDLY_NAME_TEMPLATE: Final = "friendly_name_template"
CONF_HEADERS: Final = "headers"
CONF_HOST: Final = "host"
CONF_HOSTS: Final = "hosts"
CONF_HS: Final = "hs"
CONF_ICON: Final = "icon"
CONF_ICON_TEMPLATE: Final = "icon_template"
CONF_ID: Final = "id"
CONF_INCLUDE: Final = "include"
CONF_INTERNAL_URL: Final = "internal_url"
CONF_IP_ADDRESS: Final = "ip_address"
CONF_LATITUDE: Final = "latitude"
CONF_LEGACY_TEMPLATES: Final = "legacy_templates"
CONF_LIGHTS: Final = "lights"
CONF_LONGITUDE: Final = "longitude"
CONF_MAC: Final = "mac"
CONF_MAXIMUM: Final = "maximum"
CONF_MEDIA_DIRS: Final = "media_dirs"
CONF_METHOD: Final = "method"
CONF_MINIMUM: Final = "minimum"
CONF_MODE: Final = "mode"
CONF_MONITORED_CONDITIONS: Final = "monitored_conditions"
CONF_MONITORED_VARIABLES: Final = "monitored_variables"
CONF_NAME: Final = "name"
CONF_OFFSET: Final = "offset"
CONF_OPTIMISTIC: Final = "optimistic"
CONF_PACKAGES: Final = "packages"
CONF_PARAMS: Final = "params"
CONF_PASSWORD: Final = "password"
CONF_PATH: Final = "path"
CONF_PAYLOAD: Final = "payload"
CONF_PAYLOAD_OFF: Final = "payload_off"
CONF_PAYLOAD_ON: Final = "payload_on"
CONF_PENDING_TIME: Final = "pending_time"
CONF_PIN: Final = "pin"
CONF_PLATFORM: Final = "platform"
CONF_PORT: Final = "port"
CONF_PREFIX: Final = "prefix"
CONF_PROFILE_NAME: Final = "profile_name"
CONF_PROTOCOL: Final = "protocol"
CONF_PROXY_SSL: Final = "proxy_ssl"
CONF_QUOTE: Final = "quote"
CONF_RADIUS: Final = "radius"
CONF_RECIPIENT: Final = "recipient"
CONF_REGION: Final = "region"
CONF_REPEAT: Final = "repeat"
CONF_RESOURCE: Final = "resource"
CONF_RESOURCES: Final = "resources"
CONF_RESOURCE_TEMPLATE: Final = "resource_template"
CONF_RGB: Final = "rgb"
CONF_ROOM: Final = "room"
CONF_SCAN_INTERVAL: Final = "scan_interval"
CONF_SCENE: Final = "scene"
CONF_SELECTOR: Final = "selector"
CONF_SENDER: Final = "sender"
CONF_SENSORS: Final = "sensors"
CONF_SENSOR_TYPE: Final = "sensor_type"
CONF_SEQUENCE: Final = "sequence"
CONF_SERVICE: Final = "service"
CONF_SERVICE_DATA: Final = "data"
CONF_SERVICE_TEMPLATE: Final = "service_template"
CONF_SHOW_ON_MAP: Final = "show_on_map"
CONF_SLAVE: Final = "slave"
CONF_SOURCE: Final = "source"
CONF_SSL: Final = "ssl"
CONF_STATE: Final = "state"
CONF_STATE_TEMPLATE: Final = "state_template"
CONF_STRUCTURE: Final = "structure"
CONF_SWITCHES: Final = "switches"
CONF_TARGET: Final = "target"
CONF_TEMPERATURE_UNIT: Final = "temperature_unit"
CONF_TIMEOUT: Final = "timeout"
CONF_TIME_ZONE: Final = "time_zone"
CONF_TOKEN: Final = "token"
CONF_TRIGGER_TIME: Final = "trigger_time"
CONF_TTL: Final = "ttl"
CONF_TYPE: Final = "type"
CONF_UNIQUE_ID: Final = "unique_id"
CONF_UNIT_OF_MEASUREMENT: Final = "unit_of_measurement"
CONF_UNIT_SYSTEM: Final = "unit_system"
CONF_UNTIL: Final = "until"
CONF_URL: Final = "url"
CONF_USERNAME: Final = "username"
CONF_VALUE_TEMPLATE: Final = "value_template"
CONF_VARIABLES: Final = "variables"
CONF_VERIFY_SSL: Final = "verify_ssl"
CONF_WAIT_FOR_TRIGGER: Final = "wait_for_trigger"
CONF_WAIT_TEMPLATE: Final = "wait_template"
CONF_WEBHOOK_ID: Final = "webhook_id"
CONF_WEEKDAY: Final = "weekday"
CONF_WHILE: Final = "while"
CONF_WHITELIST: Final = "whitelist"
CONF_ALLOWLIST_EXTERNAL_DIRS: Final = "allowlist_external_dirs"
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS: Final = "whitelist_external_dirs"
CONF_WHITE_VALUE: Final = "white_value"
CONF_XY: Final = "xy"
CONF_ZONE: Final = "zone"
# #### EVENTS ####
EVENT_CALL_SERVICE: Final = "call_service"
EVENT_COMPONENT_LOADED: Final = "component_loaded"
EVENT_CORE_CONFIG_UPDATE: Final = "core_config_updated"
EVENT_HOMEASSISTANT_CLOSE: Final = "homeassistant_close"
EVENT_HOMEASSISTANT_START: Final = "homeassistant_start"
EVENT_HOMEASSISTANT_STARTED: Final = "homeassistant_started"
EVENT_HOMEASSISTANT_STOP: Final = "homeassistant_stop"
EVENT_HOMEASSISTANT_FINAL_WRITE: Final = "homeassistant_final_write"
EVENT_LOGBOOK_ENTRY: Final = "logbook_entry"
EVENT_SERVICE_REGISTERED: Final = "service_registered"
EVENT_SERVICE_REMOVED: Final = "service_removed"
EVENT_STATE_CHANGED: Final = "state_changed"
EVENT_THEMES_UPDATED: Final = "themes_updated"
EVENT_TIMER_OUT_OF_SYNC: Final = "timer_out_of_sync"
EVENT_TIME_CHANGED: Final = "time_changed"
# #### DEVICE CLASSES ####
DEVICE_CLASS_AQI: Final = "aqi"
DEVICE_CLASS_BATTERY: Final = "battery"
DEVICE_CLASS_CO: Final = "carbon_monoxide"
DEVICE_CLASS_CO2: Final = "carbon_dioxide"
DEVICE_CLASS_CURRENT: Final = "current"
DEVICE_CLASS_DATE: Final = "date"
DEVICE_CLASS_ENERGY: Final = "energy"
DEVICE_CLASS_HUMIDITY: Final = "humidity"
DEVICE_CLASS_ILLUMINANCE: Final = "illuminance"
DEVICE_CLASS_MONETARY: Final = "monetary"
DEVICE_CLASS_NITROGEN_DIOXIDE = "nitrogen_dioxide"
DEVICE_CLASS_NITROGEN_MONOXIDE = "nitrogen_monoxide"
DEVICE_CLASS_NITROUS_OXIDE = "nitrous_oxide"
DEVICE_CLASS_OZONE: Final = "ozone"
DEVICE_CLASS_POWER_FACTOR: Final = "power_factor"
DEVICE_CLASS_POWER: Final = "power"
DEVICE_CLASS_PM25: Final = "pm25"
DEVICE_CLASS_PM1: Final = "pm1"
DEVICE_CLASS_PM10: Final = "pm10"
DEVICE_CLASS_PRESSURE: Final = "pressure"
DEVICE_CLASS_SIGNAL_STRENGTH: Final = "signal_strength"
DEVICE_CLASS_SULPHUR_DIOXIDE = "sulphur_dioxide"
DEVICE_CLASS_TEMPERATURE: Final = "temperature"
DEVICE_CLASS_TIMESTAMP: Final = "timestamp"
DEVICE_CLASS_VOLTAGE: Final = "voltage"
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS = "volatile_organic_compounds"
DEVICE_CLASS_GAS: Final = "gas"
# #### STATES ####
STATE_ON: Final = "on"
STATE_OFF: Final = "off"
STATE_HOME: Final = "home"
STATE_NOT_HOME: Final = "not_home"
STATE_UNKNOWN: Final = "unknown"
STATE_OPEN: Final = "open"
STATE_OPENING: Final = "opening"
STATE_CLOSED: Final = "closed"
STATE_CLOSING: Final = "closing"
STATE_PLAYING: Final = "playing"
STATE_PAUSED: Final = "paused"
STATE_IDLE: Final = "idle"
STATE_STANDBY: Final = "standby"
STATE_ALARM_DISARMED: Final = "disarmed"
STATE_ALARM_ARMED_HOME: Final = "armed_home"
STATE_ALARM_ARMED_AWAY: Final = "armed_away"
STATE_ALARM_ARMED_NIGHT: Final = "armed_night"
STATE_ALARM_ARMED_VACATION: Final = "armed_vacation"
STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = "armed_custom_bypass"
STATE_ALARM_PENDING: Final = "pending"
STATE_ALARM_ARMING: Final = "arming"
STATE_ALARM_DISARMING: Final = "disarming"
STATE_ALARM_TRIGGERED: Final = "triggered"
STATE_LOCKED: Final = "locked"
STATE_UNLOCKED: Final = "unlocked"
STATE_LOCKING: Final = "locking"
STATE_UNLOCKING: Final = "unlocking"
STATE_JAMMED: Final = "jammed"
STATE_UNAVAILABLE: Final = "unavailable"
STATE_OK: Final = "ok"
STATE_PROBLEM: Final = "problem"
# #### STATE AND EVENT ATTRIBUTES ####
# Attribution
ATTR_ATTRIBUTION: Final = "attribution"
# Credentials
ATTR_CREDENTIALS: Final = "credentials"
# Contains time-related attributes
ATTR_NOW: Final = "now"
ATTR_DATE: Final = "date"
ATTR_TIME: Final = "time"
ATTR_SECONDS: Final = "seconds"
# Contains domain, service for a SERVICE_CALL event
ATTR_DOMAIN: Final = "domain"
ATTR_SERVICE: Final = "service"
ATTR_SERVICE_DATA: Final = "service_data"
# IDs
ATTR_ID: Final = "id"
# Name
ATTR_NAME: Final = "name"
# Contains one string or a list of strings, each being an entity id
ATTR_ENTITY_ID: Final = "entity_id"
# Contains one string or a list of strings, each being an area id
ATTR_AREA_ID: Final = "area_id"
# Contains one string, the device ID
ATTR_DEVICE_ID: Final = "device_id"
# String with a friendly name for the entity
ATTR_FRIENDLY_NAME: Final = "friendly_name"
# A picture to represent entity
ATTR_ENTITY_PICTURE: Final = "entity_picture"
ATTR_IDENTIFIERS: Final = "identifiers"
# Icon to use in the frontend
ATTR_ICON: Final = "icon"
# The unit of measurement if applicable
ATTR_UNIT_OF_MEASUREMENT: Final = "unit_of_measurement"
CONF_UNIT_SYSTEM_METRIC: Final = "metric"
CONF_UNIT_SYSTEM_IMPERIAL: Final = "imperial"
# Electrical attributes
ATTR_VOLTAGE: Final = "voltage"
# Location of the device/sensor
ATTR_LOCATION: Final = "location"
ATTR_MODE: Final = "mode"
ATTR_MANUFACTURER: Final = "manufacturer"
ATTR_MODEL: Final = "model"
ATTR_SW_VERSION: Final = "sw_version"
ATTR_BATTERY_CHARGING: Final = "battery_charging"
ATTR_BATTERY_LEVEL: Final = "battery_level"
ATTR_WAKEUP: Final = "wake_up_interval"
# For devices which support a code attribute
ATTR_CODE: Final = "code"
ATTR_CODE_FORMAT: Final = "code_format"
# For calling a device specific command
ATTR_COMMAND: Final = "command"
# For devices which support an armed state
ATTR_ARMED: Final = "device_armed"
# For devices which support a locked state
ATTR_LOCKED: Final = "locked"
# For sensors that support 'tripping', eg. motion and door sensors
ATTR_TRIPPED: Final = "device_tripped"
# For sensors that support 'tripping' this holds the most recent
# time the device was tripped
ATTR_LAST_TRIP_TIME: Final = "last_tripped_time"
# For all entity's, this hold whether or not it should be hidden
ATTR_HIDDEN: Final = "hidden"
# Location of the entity
ATTR_LATITUDE: Final = "latitude"
ATTR_LONGITUDE: Final = "longitude"
# Accuracy of location in meters
ATTR_GPS_ACCURACY: Final = "gps_accuracy"
# If state is assumed
ATTR_ASSUMED_STATE: Final = "assumed_state"
ATTR_STATE: Final = "state"
ATTR_EDITABLE: Final = "editable"
ATTR_OPTION: Final = "option"
# The entity has been restored with restore state
ATTR_RESTORED: Final = "restored"
# Bitfield of supported component features for the entity
ATTR_SUPPORTED_FEATURES: Final = "supported_features"
# Class of device within its domain
ATTR_DEVICE_CLASS: Final = "device_class"
# Temperature attribute
ATTR_TEMPERATURE: Final = "temperature"
# #### UNITS OF MEASUREMENT ####
# Power units
POWER_WATT: Final = "W"
POWER_KILO_WATT: Final = "kW"
POWER_VOLT_AMPERE: Final = "VA"
# Energy units
ENERGY_WATT_HOUR: Final = "Wh"
ENERGY_KILO_WATT_HOUR: Final = "kWh"
# Electric_current units
ELECTRIC_CURRENT_MILLIAMPERE: Final = "mA"
ELECTRIC_CURRENT_AMPERE: Final = "A"
# Electric_potential units
ELECTRIC_POTENTIAL_MILLIVOLT: Final = "mV"
ELECTRIC_POTENTIAL_VOLT: Final = "V"
# Degree units
DEGREE: Final = "°"
# Currency units
CURRENCY_EURO: Final = "€"
CURRENCY_DOLLAR: Final = "$"
CURRENCY_CENT: Final = "¢"
# Temperature units
TEMP_CELSIUS: Final = "°C"
TEMP_FAHRENHEIT: Final = "°F"
TEMP_KELVIN: Final = "K"
# Time units
TIME_MICROSECONDS: Final = "μs"
TIME_MILLISECONDS: Final = "ms"
TIME_SECONDS: Final = "s"
TIME_MINUTES: Final = "min"
TIME_HOURS: Final = "h"
TIME_DAYS: Final = "d"
TIME_WEEKS: Final = "w"
TIME_MONTHS: Final = "m"
TIME_YEARS: Final = "y"
# Length units
LENGTH_MILLIMETERS: Final = "mm"
LENGTH_CENTIMETERS: Final = "cm"
LENGTH_METERS: Final = "m"
LENGTH_KILOMETERS: Final = "km"
LENGTH_INCHES: Final = "in"
LENGTH_FEET: Final = "ft"
LENGTH_YARD: Final = "yd"
LENGTH_MILES: Final = "mi"
# Frequency units
FREQUENCY_HERTZ: Final = "Hz"
FREQUENCY_MEGAHERTZ: Final = "MHz"
FREQUENCY_GIGAHERTZ: Final = "GHz"
# Pressure units
PRESSURE_PA: Final = "Pa"
PRESSURE_HPA: Final = "hPa"
PRESSURE_KPA: Final = "kPa"
PRESSURE_BAR: Final = "bar"
PRESSURE_MBAR: Final = "mbar"
PRESSURE_INHG: Final = "inHg"
PRESSURE_PSI: Final = "psi"
# Sound pressure units
SOUND_PRESSURE_DB: Final = "dB"
SOUND_PRESSURE_WEIGHTED_DBA: Final = "dBa"
# Volume units
VOLUME_LITERS: Final = "L"
VOLUME_MILLILITERS: Final = "mL"
VOLUME_CUBIC_METERS: Final = "m³"
VOLUME_CUBIC_FEET: Final = "ft³"
VOLUME_GALLONS: Final = "gal"
VOLUME_FLUID_OUNCE: Final = "fl. oz."
# Volume Flow Rate units
VOLUME_FLOW_RATE_CUBIC_METERS_PER_HOUR: Final = "m³/h"
VOLUME_FLOW_RATE_CUBIC_FEET_PER_MINUTE: Final = "ft³/m"
# Area units
AREA_SQUARE_METERS: Final = "m²"
# Mass units
MASS_GRAMS: Final = "g"
MASS_KILOGRAMS: Final = "kg"
MASS_MILLIGRAMS: Final = "mg"
MASS_MICROGRAMS: Final = "µg"
MASS_OUNCES: Final = "oz"
MASS_POUNDS: Final = "lb"
# Conductivity units
CONDUCTIVITY: Final = "µS/cm"
# Light units
LIGHT_LUX: Final = "lx"
# UV Index units
UV_INDEX: Final = "UV index"
# Percentage units
PERCENTAGE: Final = "%"
# Irradiation units
IRRADIATION_WATTS_PER_SQUARE_METER: Final = "W/m²"
IRRADIATION_BTUS_PER_HOUR_SQUARE_FOOT: Final = "BTU/(h×ft²)"
# Precipitation units
PRECIPITATION_MILLIMETERS_PER_HOUR: Final = "mm/h"
PRECIPITATION_INCHES: Final = "in"
PRECIPITATION_INCHES_PER_HOUR: Final = "in/h"
# Concentration units
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: Final = "µg/m³"
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: Final = "mg/m³"
CONCENTRATION_MICROGRAMS_PER_CUBIC_FOOT: Final = "μg/ft³"
CONCENTRATION_PARTS_PER_CUBIC_METER: Final = "p/m³"
CONCENTRATION_PARTS_PER_MILLION: Final = "ppm"
CONCENTRATION_PARTS_PER_BILLION: Final = "ppb"
# Speed units
SPEED_MILLIMETERS_PER_DAY: Final = "mm/d"
SPEED_INCHES_PER_DAY: Final = "in/d"
SPEED_METERS_PER_SECOND: Final = "m/s"
SPEED_INCHES_PER_HOUR: Final = "in/h"
SPEED_KILOMETERS_PER_HOUR: Final = "km/h"
SPEED_MILES_PER_HOUR: Final = "mph"
# Signal_strength units
SIGNAL_STRENGTH_DECIBELS: Final = "dB"
SIGNAL_STRENGTH_DECIBELS_MILLIWATT: Final = "dBm"
# Data units
DATA_BITS: Final = "bit"
DATA_KILOBITS: Final = "kbit"
DATA_MEGABITS: Final = "Mbit"
DATA_GIGABITS: Final = "Gbit"
DATA_BYTES: Final = "B"
DATA_KILOBYTES: Final = "kB"
DATA_MEGABYTES: Final = "MB"
DATA_GIGABYTES: Final = "GB"
DATA_TERABYTES: Final = "TB"
DATA_PETABYTES: Final = "PB"
DATA_EXABYTES: Final = "EB"
DATA_ZETTABYTES: Final = "ZB"
DATA_YOTTABYTES: Final = "YB"
DATA_KIBIBYTES: Final = "KiB"
DATA_MEBIBYTES: Final = "MiB"
DATA_GIBIBYTES: Final = "GiB"
DATA_TEBIBYTES: Final = "TiB"
DATA_PEBIBYTES: Final = "PiB"
DATA_EXBIBYTES: Final = "EiB"
DATA_ZEBIBYTES: Final = "ZiB"
DATA_YOBIBYTES: Final = "YiB"
# Data_rate units
DATA_RATE_BITS_PER_SECOND: Final = "bit/s"
DATA_RATE_KILOBITS_PER_SECOND: Final = "kbit/s"
DATA_RATE_MEGABITS_PER_SECOND: Final = "Mbit/s"
DATA_RATE_GIGABITS_PER_SECOND: Final = "Gbit/s"
DATA_RATE_BYTES_PER_SECOND: Final = "B/s"
DATA_RATE_KILOBYTES_PER_SECOND: Final = "kB/s"
DATA_RATE_MEGABYTES_PER_SECOND: Final = "MB/s"
DATA_RATE_GIGABYTES_PER_SECOND: Final = "GB/s"
DATA_RATE_KIBIBYTES_PER_SECOND: Final = "KiB/s"
DATA_RATE_MEBIBYTES_PER_SECOND: Final = "MiB/s"
DATA_RATE_GIBIBYTES_PER_SECOND: Final = "GiB/s"
# #### SERVICES ####
SERVICE_HOMEASSISTANT_STOP: Final = "stop"
SERVICE_HOMEASSISTANT_RESTART: Final = "restart"
SERVICE_TURN_ON: Final = "turn_on"
SERVICE_TURN_OFF: Final = "turn_off"
SERVICE_TOGGLE: Final = "toggle"
SERVICE_RELOAD: Final = "reload"
SERVICE_VOLUME_UP: Final = "volume_up"
SERVICE_VOLUME_DOWN: Final = "volume_down"
SERVICE_VOLUME_MUTE: Final = "volume_mute"
SERVICE_VOLUME_SET: Final = "volume_set"
SERVICE_MEDIA_PLAY_PAUSE: Final = "media_play_pause"
SERVICE_MEDIA_PLAY: Final = "media_play"
SERVICE_MEDIA_PAUSE: Final = "media_pause"
SERVICE_MEDIA_STOP: Final = "media_stop"
SERVICE_MEDIA_NEXT_TRACK: Final = "media_next_track"
SERVICE_MEDIA_PREVIOUS_TRACK: Final = "media_previous_track"
SERVICE_MEDIA_SEEK: Final = "media_seek"
SERVICE_REPEAT_SET: Final = "repeat_set"
SERVICE_SHUFFLE_SET: Final = "shuffle_set"
SERVICE_ALARM_DISARM: Final = "alarm_disarm"
SERVICE_ALARM_ARM_HOME: Final = "alarm_arm_home"
SERVICE_ALARM_ARM_AWAY: Final = "alarm_arm_away"
SERVICE_ALARM_ARM_NIGHT: Final = "alarm_arm_night"
SERVICE_ALARM_ARM_VACATION: Final = "alarm_arm_vacation"
SERVICE_ALARM_ARM_CUSTOM_BYPASS: Final = "alarm_arm_custom_bypass"
SERVICE_ALARM_TRIGGER: Final = "alarm_trigger"
SERVICE_LOCK: Final = "lock"
SERVICE_UNLOCK: Final = "unlock"
SERVICE_OPEN: Final = "open"
SERVICE_CLOSE: Final = "close"
SERVICE_CLOSE_COVER: Final = "close_cover"
SERVICE_CLOSE_COVER_TILT: Final = "close_cover_tilt"
SERVICE_OPEN_COVER: Final = "open_cover"
SERVICE_OPEN_COVER_TILT: Final = "open_cover_tilt"
SERVICE_SAVE_PERSISTENT_STATES: Final = "save_persistent_states"
SERVICE_SET_COVER_POSITION: Final = "set_cover_position"
SERVICE_SET_COVER_TILT_POSITION: Final = "set_cover_tilt_position"
SERVICE_STOP_COVER: Final = "stop_cover"
SERVICE_STOP_COVER_TILT: Final = "stop_cover_tilt"
SERVICE_TOGGLE_COVER_TILT: Final = "toggle_cover_tilt"
SERVICE_SELECT_OPTION: Final = "select_option"
# #### API / REMOTE ####
SERVER_PORT: Final = 8123
URL_ROOT: Final = "/"
URL_API: Final = "/api/"
URL_API_STREAM: Final = "/api/stream"
URL_API_CONFIG: Final = "/api/config"
URL_API_DISCOVERY_INFO: Final = "/api/discovery_info"
URL_API_STATES: Final = "/api/states"
URL_API_STATES_ENTITY: Final = "/api/states/{}"
URL_API_EVENTS: Final = "/api/events"
URL_API_EVENTS_EVENT: Final = "/api/events/{}"
URL_API_SERVICES: Final = "/api/services"
URL_API_SERVICES_SERVICE: Final = "/api/services/{}/{}"
URL_API_COMPONENTS: Final = "/api/components"
URL_API_ERROR_LOG: Final = "/api/error_log"
URL_API_LOG_OUT: Final = "/api/log_out"
URL_API_TEMPLATE: Final = "/api/template"
HTTP_OK: Final = 200
HTTP_CREATED: Final = 201
HTTP_ACCEPTED: Final = 202
HTTP_MOVED_PERMANENTLY: Final = 301
HTTP_BAD_REQUEST: Final = 400
HTTP_UNAUTHORIZED: Final = 401
HTTP_FORBIDDEN: Final = 403
HTTP_NOT_FOUND: Final = 404
HTTP_METHOD_NOT_ALLOWED: Final = 405
HTTP_UNPROCESSABLE_ENTITY: Final = 422
HTTP_TOO_MANY_REQUESTS: Final = 429
HTTP_INTERNAL_SERVER_ERROR: Final = 500
HTTP_BAD_GATEWAY: Final = 502
HTTP_SERVICE_UNAVAILABLE: Final = 503
HTTP_BASIC_AUTHENTICATION: Final = "basic"
HTTP_BEARER_AUTHENTICATION: Final = "bearer_token"
HTTP_DIGEST_AUTHENTICATION: Final = "digest"
HTTP_HEADER_X_REQUESTED_WITH: Final = "X-Requested-With"
CONTENT_TYPE_JSON: Final = "application/json"
CONTENT_TYPE_MULTIPART: Final = "multipart/x-mixed-replace; boundary={}"
CONTENT_TYPE_TEXT_PLAIN: Final = "text/plain"
# The exit code to send to request a restart
RESTART_EXIT_CODE: Final = 100
UNIT_NOT_RECOGNIZED_TEMPLATE: Final = "{} is not a recognized {} unit."
LENGTH: Final = "length"
MASS: Final = "mass"
PRESSURE: Final = "pressure"
VOLUME: Final = "volume"
TEMPERATURE: Final = "temperature"
SPEED_MS: Final = "speed_ms"
ILLUMINANCE: Final = "illuminance"
WEEKDAYS: Final[list[str]] = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
# The degree of precision for platforms
PRECISION_WHOLE: Final = 1
PRECISION_HALVES: Final = 0.5
PRECISION_TENTHS: Final = 0.1
# Static list of entities that will never be exposed to
# cloud, alexa, or google_home components
CLOUD_NEVER_EXPOSED_ENTITIES: Final[list[str]] = ["group.all_locks"]
# The ID of the Home Assistant Cast App
CAST_APP_ID_HOMEASSISTANT: Final = "B12CE3CA"
ENTITY_CATEGORY_CONFIG: Final = "config"
ENTITY_CATEGORY_DIAGNOSTIC: Final = "diagnostic"
| 31.827004 | 87 | 0.776526 | from __future__ import annotations
from typing import Final
MAJOR_VERSION: Final = 2021
MINOR_VERSION: Final = 11
PATCH_VERSION: Final = "0.dev0"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 8, 0)
REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2022.1"
PLATFORM_FORMAT: Final = "{platform}.{domain}"
MATCH_ALL: Final = "*"
ENTITY_MATCH_NONE: Final = "none"
ENTITY_MATCH_ALL: Final = "all"
DEVICE_DEFAULT_NAME: Final = "Unnamed Device"
MAX_LENGTH_EVENT_EVENT_TYPE: Final = 64
MAX_LENGTH_EVENT_ORIGIN: Final = 32
MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36
MAX_LENGTH_STATE_DOMAIN: Final = 64
MAX_LENGTH_STATE_ENTITY_ID: Final = 255
MAX_LENGTH_STATE_STATE: Final = 255
SUN_EVENT_SUNSET: Final = "sunset"
SUN_EVENT_SUNRISE: Final = "sunrise"
n"
CONF_ADDRESS: Final = "address"
CONF_AFTER: Final = "after"
CONF_ALIAS: Final = "alias"
CONF_ALLOWLIST_EXTERNAL_URLS: Final = "allowlist_external_urls"
CONF_API_KEY: Final = "api_key"
CONF_API_TOKEN: Final = "api_token"
CONF_API_VERSION: Final = "api_version"
CONF_ARMING_TIME: Final = "arming_time"
CONF_AT: Final = "at"
CONF_ATTRIBUTE: Final = "attribute"
CONF_AUTH_MFA_MODULES: Final = "auth_mfa_modules"
CONF_AUTH_PROVIDERS: Final = "auth_providers"
CONF_AUTHENTICATION: Final = "authentication"
CONF_BASE: Final = "base"
CONF_BEFORE: Final = "before"
CONF_BELOW: Final = "below"
CONF_BINARY_SENSORS: Final = "binary_sensors"
CONF_BRIGHTNESS: Final = "brightness"
CONF_BROADCAST_ADDRESS: Final = "broadcast_address"
CONF_BROADCAST_PORT: Final = "broadcast_port"
CONF_CHOOSE: Final = "choose"
CONF_CLIENT_ID: Final = "client_id"
CONF_CLIENT_SECRET: Final = "client_secret"
CONF_CODE: Final = "code"
CONF_COLOR_TEMP: Final = "color_temp"
CONF_COMMAND: Final = "command"
CONF_COMMAND_CLOSE: Final = "command_close"
CONF_COMMAND_OFF: Final = "command_off"
CONF_COMMAND_ON: Final = "command_on"
CONF_COMMAND_OPEN: Final = "command_open"
CONF_COMMAND_STATE: Final = "command_state"
CONF_COMMAND_STOP: Final = "command_stop"
CONF_CONDITION: Final = "condition"
CONF_CONDITIONS: Final = "conditions"
CONF_CONTINUE_ON_TIMEOUT: Final = "continue_on_timeout"
CONF_COUNT: Final = "count"
CONF_COVERS: Final = "covers"
CONF_CURRENCY: Final = "currency"
CONF_CUSTOMIZE: Final = "customize"
CONF_CUSTOMIZE_DOMAIN: Final = "customize_domain"
CONF_CUSTOMIZE_GLOB: Final = "customize_glob"
CONF_DEFAULT: Final = "default"
CONF_DELAY: Final = "delay"
CONF_DELAY_TIME: Final = "delay_time"
CONF_DESCRIPTION: Final = "description"
CONF_DEVICE: Final = "device"
CONF_DEVICES: Final = "devices"
CONF_DEVICE_CLASS: Final = "device_class"
CONF_DEVICE_ID: Final = "device_id"
CONF_DISARM_AFTER_TRIGGER: Final = "disarm_after_trigger"
CONF_DISCOVERY: Final = "discovery"
CONF_DISKS: Final = "disks"
CONF_DISPLAY_CURRENCY: Final = "display_currency"
CONF_DISPLAY_OPTIONS: Final = "display_options"
CONF_DOMAIN: Final = "domain"
CONF_DOMAINS: Final = "domains"
CONF_EFFECT: Final = "effect"
CONF_ELEVATION: Final = "elevation"
CONF_EMAIL: Final = "email"
CONF_ENTITIES: Final = "entities"
CONF_ENTITY_CATEGORY: Final = "entity_category"
CONF_ENTITY_ID: Final = "entity_id"
CONF_ENTITY_NAMESPACE: Final = "entity_namespace"
CONF_ENTITY_PICTURE_TEMPLATE: Final = "entity_picture_template"
CONF_EVENT: Final = "event"
CONF_EVENT_DATA: Final = "event_data"
CONF_EVENT_DATA_TEMPLATE: Final = "event_data_template"
CONF_EXCLUDE: Final = "exclude"
CONF_EXTERNAL_URL: Final = "external_url"
CONF_FILENAME: Final = "filename"
CONF_FILE_PATH: Final = "file_path"
CONF_FOR: Final = "for"
CONF_FORCE_UPDATE: Final = "force_update"
CONF_FRIENDLY_NAME: Final = "friendly_name"
CONF_FRIENDLY_NAME_TEMPLATE: Final = "friendly_name_template"
CONF_HEADERS: Final = "headers"
CONF_HOST: Final = "host"
CONF_HOSTS: Final = "hosts"
CONF_HS: Final = "hs"
CONF_ICON: Final = "icon"
CONF_ICON_TEMPLATE: Final = "icon_template"
CONF_ID: Final = "id"
CONF_INCLUDE: Final = "include"
CONF_INTERNAL_URL: Final = "internal_url"
CONF_IP_ADDRESS: Final = "ip_address"
CONF_LATITUDE: Final = "latitude"
CONF_LEGACY_TEMPLATES: Final = "legacy_templates"
CONF_LIGHTS: Final = "lights"
CONF_LONGITUDE: Final = "longitude"
CONF_MAC: Final = "mac"
CONF_MAXIMUM: Final = "maximum"
CONF_MEDIA_DIRS: Final = "media_dirs"
CONF_METHOD: Final = "method"
CONF_MINIMUM: Final = "minimum"
CONF_MODE: Final = "mode"
CONF_MONITORED_CONDITIONS: Final = "monitored_conditions"
CONF_MONITORED_VARIABLES: Final = "monitored_variables"
CONF_NAME: Final = "name"
CONF_OFFSET: Final = "offset"
CONF_OPTIMISTIC: Final = "optimistic"
CONF_PACKAGES: Final = "packages"
CONF_PARAMS: Final = "params"
CONF_PASSWORD: Final = "password"
CONF_PATH: Final = "path"
CONF_PAYLOAD: Final = "payload"
CONF_PAYLOAD_OFF: Final = "payload_off"
CONF_PAYLOAD_ON: Final = "payload_on"
CONF_PENDING_TIME: Final = "pending_time"
CONF_PIN: Final = "pin"
CONF_PLATFORM: Final = "platform"
CONF_PORT: Final = "port"
CONF_PREFIX: Final = "prefix"
CONF_PROFILE_NAME: Final = "profile_name"
CONF_PROTOCOL: Final = "protocol"
CONF_PROXY_SSL: Final = "proxy_ssl"
CONF_QUOTE: Final = "quote"
CONF_RADIUS: Final = "radius"
CONF_RECIPIENT: Final = "recipient"
CONF_REGION: Final = "region"
CONF_REPEAT: Final = "repeat"
CONF_RESOURCE: Final = "resource"
CONF_RESOURCES: Final = "resources"
CONF_RESOURCE_TEMPLATE: Final = "resource_template"
CONF_RGB: Final = "rgb"
CONF_ROOM: Final = "room"
CONF_SCAN_INTERVAL: Final = "scan_interval"
CONF_SCENE: Final = "scene"
CONF_SELECTOR: Final = "selector"
CONF_SENDER: Final = "sender"
CONF_SENSORS: Final = "sensors"
CONF_SENSOR_TYPE: Final = "sensor_type"
CONF_SEQUENCE: Final = "sequence"
CONF_SERVICE: Final = "service"
CONF_SERVICE_DATA: Final = "data"
CONF_SERVICE_TEMPLATE: Final = "service_template"
CONF_SHOW_ON_MAP: Final = "show_on_map"
CONF_SLAVE: Final = "slave"
CONF_SOURCE: Final = "source"
CONF_SSL: Final = "ssl"
CONF_STATE: Final = "state"
CONF_STATE_TEMPLATE: Final = "state_template"
CONF_STRUCTURE: Final = "structure"
CONF_SWITCHES: Final = "switches"
CONF_TARGET: Final = "target"
CONF_TEMPERATURE_UNIT: Final = "temperature_unit"
CONF_TIMEOUT: Final = "timeout"
CONF_TIME_ZONE: Final = "time_zone"
CONF_TOKEN: Final = "token"
CONF_TRIGGER_TIME: Final = "trigger_time"
CONF_TTL: Final = "ttl"
CONF_TYPE: Final = "type"
CONF_UNIQUE_ID: Final = "unique_id"
CONF_UNIT_OF_MEASUREMENT: Final = "unit_of_measurement"
CONF_UNIT_SYSTEM: Final = "unit_system"
CONF_UNTIL: Final = "until"
CONF_URL: Final = "url"
CONF_USERNAME: Final = "username"
CONF_VALUE_TEMPLATE: Final = "value_template"
CONF_VARIABLES: Final = "variables"
CONF_VERIFY_SSL: Final = "verify_ssl"
CONF_WAIT_FOR_TRIGGER: Final = "wait_for_trigger"
CONF_WAIT_TEMPLATE: Final = "wait_template"
CONF_WEBHOOK_ID: Final = "webhook_id"
CONF_WEEKDAY: Final = "weekday"
CONF_WHILE: Final = "while"
CONF_WHITELIST: Final = "whitelist"
CONF_ALLOWLIST_EXTERNAL_DIRS: Final = "allowlist_external_dirs"
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS: Final = "whitelist_external_dirs"
CONF_WHITE_VALUE: Final = "white_value"
CONF_XY: Final = "xy"
CONF_ZONE: Final = "zone"
Final = "component_loaded"
EVENT_CORE_CONFIG_UPDATE: Final = "core_config_updated"
EVENT_HOMEASSISTANT_CLOSE: Final = "homeassistant_close"
EVENT_HOMEASSISTANT_START: Final = "homeassistant_start"
EVENT_HOMEASSISTANT_STARTED: Final = "homeassistant_started"
EVENT_HOMEASSISTANT_STOP: Final = "homeassistant_stop"
EVENT_HOMEASSISTANT_FINAL_WRITE: Final = "homeassistant_final_write"
EVENT_LOGBOOK_ENTRY: Final = "logbook_entry"
EVENT_SERVICE_REGISTERED: Final = "service_registered"
EVENT_SERVICE_REMOVED: Final = "service_removed"
EVENT_STATE_CHANGED: Final = "state_changed"
EVENT_THEMES_UPDATED: Final = "themes_updated"
EVENT_TIMER_OUT_OF_SYNC: Final = "timer_out_of_sync"
EVENT_TIME_CHANGED: Final = "time_changed"
arbon_monoxide"
DEVICE_CLASS_CO2: Final = "carbon_dioxide"
DEVICE_CLASS_CURRENT: Final = "current"
DEVICE_CLASS_DATE: Final = "date"
DEVICE_CLASS_ENERGY: Final = "energy"
DEVICE_CLASS_HUMIDITY: Final = "humidity"
DEVICE_CLASS_ILLUMINANCE: Final = "illuminance"
DEVICE_CLASS_MONETARY: Final = "monetary"
DEVICE_CLASS_NITROGEN_DIOXIDE = "nitrogen_dioxide"
DEVICE_CLASS_NITROGEN_MONOXIDE = "nitrogen_monoxide"
DEVICE_CLASS_NITROUS_OXIDE = "nitrous_oxide"
DEVICE_CLASS_OZONE: Final = "ozone"
DEVICE_CLASS_POWER_FACTOR: Final = "power_factor"
DEVICE_CLASS_POWER: Final = "power"
DEVICE_CLASS_PM25: Final = "pm25"
DEVICE_CLASS_PM1: Final = "pm1"
DEVICE_CLASS_PM10: Final = "pm10"
DEVICE_CLASS_PRESSURE: Final = "pressure"
DEVICE_CLASS_SIGNAL_STRENGTH: Final = "signal_strength"
DEVICE_CLASS_SULPHUR_DIOXIDE = "sulphur_dioxide"
DEVICE_CLASS_TEMPERATURE: Final = "temperature"
DEVICE_CLASS_TIMESTAMP: Final = "timestamp"
DEVICE_CLASS_VOLTAGE: Final = "voltage"
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS = "volatile_organic_compounds"
DEVICE_CLASS_GAS: Final = "gas"
"home"
STATE_NOT_HOME: Final = "not_home"
STATE_UNKNOWN: Final = "unknown"
STATE_OPEN: Final = "open"
STATE_OPENING: Final = "opening"
STATE_CLOSED: Final = "closed"
STATE_CLOSING: Final = "closing"
STATE_PLAYING: Final = "playing"
STATE_PAUSED: Final = "paused"
STATE_IDLE: Final = "idle"
STATE_STANDBY: Final = "standby"
STATE_ALARM_DISARMED: Final = "disarmed"
STATE_ALARM_ARMED_HOME: Final = "armed_home"
STATE_ALARM_ARMED_AWAY: Final = "armed_away"
STATE_ALARM_ARMED_NIGHT: Final = "armed_night"
STATE_ALARM_ARMED_VACATION: Final = "armed_vacation"
STATE_ALARM_ARMED_CUSTOM_BYPASS: Final = "armed_custom_bypass"
STATE_ALARM_PENDING: Final = "pending"
STATE_ALARM_ARMING: Final = "arming"
STATE_ALARM_DISARMING: Final = "disarming"
STATE_ALARM_TRIGGERED: Final = "triggered"
STATE_LOCKED: Final = "locked"
STATE_UNLOCKED: Final = "unlocked"
STATE_LOCKING: Final = "locking"
STATE_UNLOCKING: Final = "unlocking"
STATE_JAMMED: Final = "jammed"
STATE_UNAVAILABLE: Final = "unavailable"
STATE_OK: Final = "ok"
STATE_PROBLEM: Final = "problem"
inal = "time"
ATTR_SECONDS: Final = "seconds"
ATTR_DOMAIN: Final = "domain"
ATTR_SERVICE: Final = "service"
ATTR_SERVICE_DATA: Final = "service_data"
ATTR_ID: Final = "id"
ATTR_NAME: Final = "name"
ATTR_ENTITY_ID: Final = "entity_id"
ATTR_AREA_ID: Final = "area_id"
ATTR_DEVICE_ID: Final = "device_id"
ATTR_FRIENDLY_NAME: Final = "friendly_name"
ATTR_ENTITY_PICTURE: Final = "entity_picture"
ATTR_IDENTIFIERS: Final = "identifiers"
ATTR_ICON: Final = "icon"
ATTR_UNIT_OF_MEASUREMENT: Final = "unit_of_measurement"
CONF_UNIT_SYSTEM_METRIC: Final = "metric"
CONF_UNIT_SYSTEM_IMPERIAL: Final = "imperial"
ATTR_VOLTAGE: Final = "voltage"
ATTR_LOCATION: Final = "location"
ATTR_MODE: Final = "mode"
ATTR_MANUFACTURER: Final = "manufacturer"
ATTR_MODEL: Final = "model"
ATTR_SW_VERSION: Final = "sw_version"
ATTR_BATTERY_CHARGING: Final = "battery_charging"
ATTR_BATTERY_LEVEL: Final = "battery_level"
ATTR_WAKEUP: Final = "wake_up_interval"
ATTR_CODE: Final = "code"
ATTR_CODE_FORMAT: Final = "code_format"
ATTR_COMMAND: Final = "command"
ATTR_ARMED: Final = "device_armed"
ATTR_LOCKED: Final = "locked"
ATTR_TRIPPED: Final = "device_tripped"
ATTR_LAST_TRIP_TIME: Final = "last_tripped_time"
ATTR_HIDDEN: Final = "hidden"
# Location of the entity
ATTR_LATITUDE: Final = "latitude"
ATTR_LONGITUDE: Final = "longitude"
# Accuracy of location in meters
ATTR_GPS_ACCURACY: Final = "gps_accuracy"
# If state is assumed
ATTR_ASSUMED_STATE: Final = "assumed_state"
ATTR_STATE: Final = "state"
ATTR_EDITABLE: Final = "editable"
ATTR_OPTION: Final = "option"
# The entity has been restored with restore state
ATTR_RESTORED: Final = "restored"
# Bitfield of supported component features for the entity
ATTR_SUPPORTED_FEATURES: Final = "supported_features"
# Class of device within its domain
ATTR_DEVICE_CLASS: Final = "device_class"
# Temperature attribute
ATTR_TEMPERATURE: Final = "temperature"
# #### UNITS OF MEASUREMENT ####
# Power units
POWER_WATT: Final = "W"
POWER_KILO_WATT: Final = "kW"
POWER_VOLT_AMPERE: Final = "VA"
# Energy units
ENERGY_WATT_HOUR: Final = "Wh"
ENERGY_KILO_WATT_HOUR: Final = "kWh"
# Electric_current units
ELECTRIC_CURRENT_MILLIAMPERE: Final = "mA"
ELECTRIC_CURRENT_AMPERE: Final = "A"
# Electric_potential units
ELECTRIC_POTENTIAL_MILLIVOLT: Final = "mV"
ELECTRIC_POTENTIAL_VOLT: Final = "V"
# Degree units
DEGREE: Final = "°"
# Currency units
CURRENCY_EURO: Final = "€"
CURRENCY_DOLLAR: Final = "$"
CURRENCY_CENT: Final = "¢"
# Temperature units
TEMP_CELSIUS: Final = "°C"
TEMP_FAHRENHEIT: Final = "°F"
TEMP_KELVIN: Final = "K"
# Time units
TIME_MICROSECONDS: Final = "μs"
TIME_MILLISECONDS: Final = "ms"
TIME_SECONDS: Final = "s"
TIME_MINUTES: Final = "min"
TIME_HOURS: Final = "h"
TIME_DAYS: Final = "d"
TIME_WEEKS: Final = "w"
TIME_MONTHS: Final = "m"
TIME_YEARS: Final = "y"
# Length units
LENGTH_MILLIMETERS: Final = "mm"
LENGTH_CENTIMETERS: Final = "cm"
LENGTH_METERS: Final = "m"
LENGTH_KILOMETERS: Final = "km"
LENGTH_INCHES: Final = "in"
LENGTH_FEET: Final = "ft"
LENGTH_YARD: Final = "yd"
LENGTH_MILES: Final = "mi"
# Frequency units
FREQUENCY_HERTZ: Final = "Hz"
FREQUENCY_MEGAHERTZ: Final = "MHz"
FREQUENCY_GIGAHERTZ: Final = "GHz"
# Pressure units
PRESSURE_PA: Final = "Pa"
PRESSURE_HPA: Final = "hPa"
PRESSURE_KPA: Final = "kPa"
PRESSURE_BAR: Final = "bar"
PRESSURE_MBAR: Final = "mbar"
PRESSURE_INHG: Final = "inHg"
PRESSURE_PSI: Final = "psi"
# Sound pressure units
SOUND_PRESSURE_DB: Final = "dB"
SOUND_PRESSURE_WEIGHTED_DBA: Final = "dBa"
# Volume units
VOLUME_LITERS: Final = "L"
VOLUME_MILLILITERS: Final = "mL"
VOLUME_CUBIC_METERS: Final = "m³"
VOLUME_CUBIC_FEET: Final = "ft³"
VOLUME_GALLONS: Final = "gal"
VOLUME_FLUID_OUNCE: Final = "fl. oz."
# Volume Flow Rate units
VOLUME_FLOW_RATE_CUBIC_METERS_PER_HOUR: Final = "m³/h"
VOLUME_FLOW_RATE_CUBIC_FEET_PER_MINUTE: Final = "ft³/m"
# Area units
AREA_SQUARE_METERS: Final = "m²"
# Mass units
MASS_GRAMS: Final = "g"
MASS_KILOGRAMS: Final = "kg"
MASS_MILLIGRAMS: Final = "mg"
MASS_MICROGRAMS: Final = "µg"
MASS_OUNCES: Final = "oz"
MASS_POUNDS: Final = "lb"
# Conductivity units
CONDUCTIVITY: Final = "µS/cm"
# Light units
LIGHT_LUX: Final = "lx"
# UV Index units
UV_INDEX: Final = "UV index"
# Percentage units
PERCENTAGE: Final = "%"
# Irradiation units
IRRADIATION_WATTS_PER_SQUARE_METER: Final = "W/m²"
IRRADIATION_BTUS_PER_HOUR_SQUARE_FOOT: Final = "BTU/(h×ft²)"
# Precipitation units
PRECIPITATION_MILLIMETERS_PER_HOUR: Final = "mm/h"
PRECIPITATION_INCHES: Final = "in"
PRECIPITATION_INCHES_PER_HOUR: Final = "in/h"
# Concentration units
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: Final = "µg/m³"
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER: Final = "mg/m³"
CONCENTRATION_MICROGRAMS_PER_CUBIC_FOOT: Final = "μg/ft³"
CONCENTRATION_PARTS_PER_CUBIC_METER: Final = "p/m³"
CONCENTRATION_PARTS_PER_MILLION: Final = "ppm"
CONCENTRATION_PARTS_PER_BILLION: Final = "ppb"
# Speed units
SPEED_MILLIMETERS_PER_DAY: Final = "mm/d"
SPEED_INCHES_PER_DAY: Final = "in/d"
SPEED_METERS_PER_SECOND: Final = "m/s"
SPEED_INCHES_PER_HOUR: Final = "in/h"
SPEED_KILOMETERS_PER_HOUR: Final = "km/h"
SPEED_MILES_PER_HOUR: Final = "mph"
# Signal_strength units
SIGNAL_STRENGTH_DECIBELS: Final = "dB"
SIGNAL_STRENGTH_DECIBELS_MILLIWATT: Final = "dBm"
# Data units
DATA_BITS: Final = "bit"
DATA_KILOBITS: Final = "kbit"
DATA_MEGABITS: Final = "Mbit"
DATA_GIGABITS: Final = "Gbit"
DATA_BYTES: Final = "B"
DATA_KILOBYTES: Final = "kB"
DATA_MEGABYTES: Final = "MB"
DATA_GIGABYTES: Final = "GB"
DATA_TERABYTES: Final = "TB"
DATA_PETABYTES: Final = "PB"
DATA_EXABYTES: Final = "EB"
DATA_ZETTABYTES: Final = "ZB"
DATA_YOTTABYTES: Final = "YB"
DATA_KIBIBYTES: Final = "KiB"
DATA_MEBIBYTES: Final = "MiB"
DATA_GIBIBYTES: Final = "GiB"
DATA_TEBIBYTES: Final = "TiB"
DATA_PEBIBYTES: Final = "PiB"
DATA_EXBIBYTES: Final = "EiB"
DATA_ZEBIBYTES: Final = "ZiB"
DATA_YOBIBYTES: Final = "YiB"
# Data_rate units
DATA_RATE_BITS_PER_SECOND: Final = "bit/s"
DATA_RATE_KILOBITS_PER_SECOND: Final = "kbit/s"
DATA_RATE_MEGABITS_PER_SECOND: Final = "Mbit/s"
DATA_RATE_GIGABITS_PER_SECOND: Final = "Gbit/s"
DATA_RATE_BYTES_PER_SECOND: Final = "B/s"
DATA_RATE_KILOBYTES_PER_SECOND: Final = "kB/s"
DATA_RATE_MEGABYTES_PER_SECOND: Final = "MB/s"
DATA_RATE_GIGABYTES_PER_SECOND: Final = "GB/s"
DATA_RATE_KIBIBYTES_PER_SECOND: Final = "KiB/s"
DATA_RATE_MEBIBYTES_PER_SECOND: Final = "MiB/s"
DATA_RATE_GIBIBYTES_PER_SECOND: Final = "GiB/s"
# #### SERVICES ####
SERVICE_HOMEASSISTANT_STOP: Final = "stop"
SERVICE_HOMEASSISTANT_RESTART: Final = "restart"
SERVICE_TURN_ON: Final = "turn_on"
SERVICE_TURN_OFF: Final = "turn_off"
SERVICE_TOGGLE: Final = "toggle"
SERVICE_RELOAD: Final = "reload"
SERVICE_VOLUME_UP: Final = "volume_up"
SERVICE_VOLUME_DOWN: Final = "volume_down"
SERVICE_VOLUME_MUTE: Final = "volume_mute"
SERVICE_VOLUME_SET: Final = "volume_set"
SERVICE_MEDIA_PLAY_PAUSE: Final = "media_play_pause"
SERVICE_MEDIA_PLAY: Final = "media_play"
SERVICE_MEDIA_PAUSE: Final = "media_pause"
SERVICE_MEDIA_STOP: Final = "media_stop"
SERVICE_MEDIA_NEXT_TRACK: Final = "media_next_track"
SERVICE_MEDIA_PREVIOUS_TRACK: Final = "media_previous_track"
SERVICE_MEDIA_SEEK: Final = "media_seek"
SERVICE_REPEAT_SET: Final = "repeat_set"
SERVICE_SHUFFLE_SET: Final = "shuffle_set"
SERVICE_ALARM_DISARM: Final = "alarm_disarm"
SERVICE_ALARM_ARM_HOME: Final = "alarm_arm_home"
SERVICE_ALARM_ARM_AWAY: Final = "alarm_arm_away"
SERVICE_ALARM_ARM_NIGHT: Final = "alarm_arm_night"
SERVICE_ALARM_ARM_VACATION: Final = "alarm_arm_vacation"
SERVICE_ALARM_ARM_CUSTOM_BYPASS: Final = "alarm_arm_custom_bypass"
SERVICE_ALARM_TRIGGER: Final = "alarm_trigger"
SERVICE_LOCK: Final = "lock"
SERVICE_UNLOCK: Final = "unlock"
SERVICE_OPEN: Final = "open"
SERVICE_CLOSE: Final = "close"
SERVICE_CLOSE_COVER: Final = "close_cover"
SERVICE_CLOSE_COVER_TILT: Final = "close_cover_tilt"
SERVICE_OPEN_COVER: Final = "open_cover"
SERVICE_OPEN_COVER_TILT: Final = "open_cover_tilt"
SERVICE_SAVE_PERSISTENT_STATES: Final = "save_persistent_states"
SERVICE_SET_COVER_POSITION: Final = "set_cover_position"
SERVICE_SET_COVER_TILT_POSITION: Final = "set_cover_tilt_position"
SERVICE_STOP_COVER: Final = "stop_cover"
SERVICE_STOP_COVER_TILT: Final = "stop_cover_tilt"
SERVICE_TOGGLE_COVER_TILT: Final = "toggle_cover_tilt"
SERVICE_SELECT_OPTION: Final = "select_option"
# #### API / REMOTE ####
SERVER_PORT: Final = 8123
URL_ROOT: Final = "/"
URL_API: Final = "/api/"
URL_API_STREAM: Final = "/api/stream"
URL_API_CONFIG: Final = "/api/config"
URL_API_DISCOVERY_INFO: Final = "/api/discovery_info"
URL_API_STATES: Final = "/api/states"
URL_API_STATES_ENTITY: Final = "/api/states/{}"
URL_API_EVENTS: Final = "/api/events"
URL_API_EVENTS_EVENT: Final = "/api/events/{}"
URL_API_SERVICES: Final = "/api/services"
URL_API_SERVICES_SERVICE: Final = "/api/services/{}/{}"
URL_API_COMPONENTS: Final = "/api/components"
URL_API_ERROR_LOG: Final = "/api/error_log"
URL_API_LOG_OUT: Final = "/api/log_out"
URL_API_TEMPLATE: Final = "/api/template"
HTTP_OK: Final = 200
HTTP_CREATED: Final = 201
HTTP_ACCEPTED: Final = 202
HTTP_MOVED_PERMANENTLY: Final = 301
HTTP_BAD_REQUEST: Final = 400
HTTP_UNAUTHORIZED: Final = 401
HTTP_FORBIDDEN: Final = 403
HTTP_NOT_FOUND: Final = 404
HTTP_METHOD_NOT_ALLOWED: Final = 405
HTTP_UNPROCESSABLE_ENTITY: Final = 422
HTTP_TOO_MANY_REQUESTS: Final = 429
HTTP_INTERNAL_SERVER_ERROR: Final = 500
HTTP_BAD_GATEWAY: Final = 502
HTTP_SERVICE_UNAVAILABLE: Final = 503
HTTP_BASIC_AUTHENTICATION: Final = "basic"
HTTP_BEARER_AUTHENTICATION: Final = "bearer_token"
HTTP_DIGEST_AUTHENTICATION: Final = "digest"
HTTP_HEADER_X_REQUESTED_WITH: Final = "X-Requested-With"
CONTENT_TYPE_JSON: Final = "application/json"
CONTENT_TYPE_MULTIPART: Final = "multipart/x-mixed-replace; boundary={}"
CONTENT_TYPE_TEXT_PLAIN: Final = "text/plain"
# The exit code to send to request a restart
RESTART_EXIT_CODE: Final = 100
UNIT_NOT_RECOGNIZED_TEMPLATE: Final = "{} is not a recognized {} unit."
LENGTH: Final = "length"
MASS: Final = "mass"
PRESSURE: Final = "pressure"
VOLUME: Final = "volume"
TEMPERATURE: Final = "temperature"
SPEED_MS: Final = "speed_ms"
ILLUMINANCE: Final = "illuminance"
WEEKDAYS: Final[list[str]] = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
# The degree of precision for platforms
PRECISION_WHOLE: Final = 1
PRECISION_HALVES: Final = 0.5
PRECISION_TENTHS: Final = 0.1
# Static list of entities that will never be exposed to
# cloud, alexa, or google_home components
CLOUD_NEVER_EXPOSED_ENTITIES: Final[list[str]] = ["group.all_locks"]
# The ID of the Home Assistant Cast App
CAST_APP_ID_HOMEASSISTANT: Final = "B12CE3CA"
ENTITY_CATEGORY_CONFIG: Final = "config"
ENTITY_CATEGORY_DIAGNOSTIC: Final = "diagnostic"
| true | true |
1c302afc00f8455b99cd49949b9ddea989f339e9 | 12,495 | py | Python | cinder/backup/drivers/swift.py | alexpilotti/cinder-ci-fixes | c0ed2ab8cc6b1197e426cd6c58c3b582624d1cfd | [
"Apache-2.0"
] | null | null | null | cinder/backup/drivers/swift.py | alexpilotti/cinder-ci-fixes | c0ed2ab8cc6b1197e426cd6c58c3b582624d1cfd | [
"Apache-2.0"
] | null | null | null | cinder/backup/drivers/swift.py | alexpilotti/cinder-ci-fixes | c0ed2ab8cc6b1197e426cd6c58c3b582624d1cfd | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2014 TrilioData, Inc
# Copyright (c) 2015 EMC Corporation
# Copyright (C) 2015 Kevin Fox <kevin@efox.cc>
# Copyright (C) 2015 Tom Barron <tpb@dyncloud.net>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of a backup service that uses Swift as the backend
**Related Flags**
:backup_swift_url: The URL of the Swift endpoint (default: None, use catalog).
:swift_catalog_info: Info to match when looking for swift in the service '
catalog.
:backup_swift_object_size: The size in bytes of the Swift objects used
for volume backups (default: 52428800).
:backup_swift_retry_attempts: The number of retries to make for Swift
operations (default: 10).
:backup_swift_retry_backoff: The backoff time in seconds between retrying
failed Swift operations (default: 10).
:backup_compression_algorithm: Compression algorithm to use for volume
backups. Supported options are:
None (to disable), zlib and bz2 (default: zlib)
"""
import hashlib
import socket
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
import six
from swiftclient import client as swift
from cinder.backup import chunkeddriver
from cinder import exception
from cinder.i18n import _
from cinder.i18n import _LE
LOG = logging.getLogger(__name__)
swiftbackup_service_opts = [
cfg.StrOpt('backup_swift_url',
default=None,
help='The URL of the Swift endpoint'),
cfg.StrOpt('swift_catalog_info',
default='object-store:swift:publicURL',
help='Info to match when looking for swift in the service '
'catalog. Format is: separated values of the form: '
'<service_type>:<service_name>:<endpoint_type> - '
'Only used if backup_swift_url is unset'),
cfg.StrOpt('backup_swift_auth',
default='per_user',
help='Swift authentication mechanism'),
cfg.StrOpt('backup_swift_auth_version',
default='1',
help='Swift authentication version. Specify "1" for auth 1.0'
', or "2" for auth 2.0'),
cfg.StrOpt('backup_swift_tenant',
default=None,
help='Swift tenant/account name. Required when connecting'
' to an auth 2.0 system'),
cfg.StrOpt('backup_swift_user',
default=None,
help='Swift user name'),
cfg.StrOpt('backup_swift_key',
default=None,
help='Swift key for authentication'),
cfg.StrOpt('backup_swift_container',
default='volumebackups',
help='The default Swift container to use'),
cfg.IntOpt('backup_swift_object_size',
default=52428800,
help='The size in bytes of Swift backup objects'),
cfg.IntOpt('backup_swift_block_size',
default=32768,
help='The size in bytes that changes are tracked '
'for incremental backups. backup_swift_object_size '
'has to be multiple of backup_swift_block_size.'),
cfg.IntOpt('backup_swift_retry_attempts',
default=3,
help='The number of retries to make for Swift operations'),
cfg.IntOpt('backup_swift_retry_backoff',
default=2,
help='The backoff time in seconds between Swift retries'),
cfg.BoolOpt('backup_swift_enable_progress_timer',
default=True,
help='Enable or Disable the timer to send the periodic '
'progress notifications to Ceilometer when backing '
'up the volume to the Swift backend storage. The '
'default value is True to enable the timer.'),
]
CONF = cfg.CONF
CONF.register_opts(swiftbackup_service_opts)
class SwiftBackupDriver(chunkeddriver.ChunkedBackupDriver):
"""Provides backup, restore and delete of backup objects within Swift."""
def __init__(self, context, db_driver=None):
chunk_size_bytes = CONF.backup_swift_object_size
sha_block_size_bytes = CONF.backup_swift_block_size
backup_default_container = CONF.backup_swift_container
enable_progress_timer = CONF.backup_swift_enable_progress_timer
super(SwiftBackupDriver, self).__init__(context, chunk_size_bytes,
sha_block_size_bytes,
backup_default_container,
enable_progress_timer,
db_driver)
if CONF.backup_swift_url is None:
self.swift_url = None
info = CONF.swift_catalog_info
try:
service_type, service_name, endpoint_type = info.split(':')
except ValueError:
raise exception.BackupDriverException(_(
"Failed to parse the configuration option "
"'swift_catalog_info', must be in the form "
"<service_type>:<service_name>:<endpoint_type>"))
for entry in context.service_catalog:
if entry.get('type') == service_type:
self.swift_url = entry.get(
'endpoints')[0].get(endpoint_type)
else:
self.swift_url = '%s%s' % (CONF.backup_swift_url,
context.project_id)
if self.swift_url is None:
raise exception.BackupDriverException(_(
"Could not determine which Swift endpoint to use. This can "
" either be set in the service catalog or with the "
" cinder.conf config option 'backup_swift_url'."))
LOG.debug("Using swift URL %s", self.swift_url)
self.swift_attempts = CONF.backup_swift_retry_attempts
self.swift_backoff = CONF.backup_swift_retry_backoff
LOG.debug('Connect to %s in "%s" mode', CONF.backup_swift_url,
CONF.backup_swift_auth)
if CONF.backup_swift_auth == 'single_user':
if CONF.backup_swift_user is None:
LOG.error(_LE("single_user auth mode enabled, "
"but %(param)s not set"),
{'param': 'backup_swift_user'})
raise exception.ParameterNotFound(param='backup_swift_user')
self.conn = swift.Connection(
authurl=CONF.backup_swift_url,
auth_version=CONF.backup_swift_auth_version,
tenant_name=CONF.backup_swift_tenant,
user=CONF.backup_swift_user,
key=CONF.backup_swift_key,
retries=self.swift_attempts,
starting_backoff=self.swift_backoff)
else:
self.conn = swift.Connection(retries=self.swift_attempts,
preauthurl=self.swift_url,
preauthtoken=self.context.auth_token,
starting_backoff=self.swift_backoff)
class SwiftObjectWriter:
def __init__(self, container, object_name, conn):
self.container = container
self.object_name = object_name
self.conn = conn
self.data = ''
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def write(self, data):
self.data += data
def close(self):
reader = six.StringIO(self.data)
try:
etag = self.conn.put_object(self.container, self.object_name,
reader,
content_length=reader.len)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
LOG.debug('swift MD5 for %(object_name)s: %(etag)s',
{'object_name': self.object_name, 'etag': etag, })
md5 = hashlib.md5(self.data).hexdigest()
LOG.debug('backup MD5 for %(object_name)s: %(md5)s',
{'object_name': self.object_name, 'md5': md5})
if etag != md5:
err = _('error writing object to swift, MD5 of object in '
'swift %(etag)s is not the same as MD5 of object sent '
'to swift %(md5)s'), {'etag': etag, 'md5': md5}
raise exception.InvalidBackup(reason=err)
return md5
class SwiftObjectReader:
def __init__(self, container, object_name, conn):
self.container = container
self.object_name = object_name
self.conn = conn
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def read(self):
try:
(_resp, body) = self.conn.get_object(self.container,
self.object_name)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
return body
def put_container(self, container):
"""Create the container if needed. No failure if it pre-exists."""
try:
self.conn.put_container(container)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
return
def get_container_entries(self, container, prefix):
"""Get container entry names"""
try:
swift_objects = self.conn.get_container(container,
prefix=prefix,
full_listing=True)[1]
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
swift_object_names = [swift_obj['name'] for swift_obj in swift_objects]
return swift_object_names
def get_object_writer(self, container, object_name, extra_metadata=None):
"""Returns a writer object that stores a chunk of volume data in a
Swift object store.
"""
return self.SwiftObjectWriter(container, object_name, self.conn)
def get_object_reader(self, container, object_name, extra_metadata=None):
"""Returns a reader object that retrieves a chunk of backed-up volume data
from a Swift object store.
"""
return self.SwiftObjectReader(container, object_name, self.conn)
def delete_object(self, container, object_name):
"""Deletes a backup object from a Swift object store."""
try:
self.conn.delete_object(container, object_name)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
def _generate_object_name_prefix(self, backup):
"""Generates a Swift backup object name prefix."""
az = 'az_%s' % self.az
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
timestamp = timeutils.strtime(fmt="%Y%m%d%H%M%S")
prefix = volume + '/' + timestamp + '/' + backup_name
LOG.debug('generate_object_name_prefix: %s', prefix)
return prefix
def update_container_name(self, backup, container):
"""Use the container name as provided - don't update."""
return container
def get_extra_metadata(self, backup, volume):
"""Swift driver does not use any extra metadata."""
return None
def get_backup_driver(context):
return SwiftBackupDriver(context)
| 43.842105 | 82 | 0.595118 |
import hashlib
import socket
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
import six
from swiftclient import client as swift
from cinder.backup import chunkeddriver
from cinder import exception
from cinder.i18n import _
from cinder.i18n import _LE
LOG = logging.getLogger(__name__)
swiftbackup_service_opts = [
cfg.StrOpt('backup_swift_url',
default=None,
help='The URL of the Swift endpoint'),
cfg.StrOpt('swift_catalog_info',
default='object-store:swift:publicURL',
help='Info to match when looking for swift in the service '
'catalog. Format is: separated values of the form: '
'<service_type>:<service_name>:<endpoint_type> - '
'Only used if backup_swift_url is unset'),
cfg.StrOpt('backup_swift_auth',
default='per_user',
help='Swift authentication mechanism'),
cfg.StrOpt('backup_swift_auth_version',
default='1',
help='Swift authentication version. Specify "1" for auth 1.0'
', or "2" for auth 2.0'),
cfg.StrOpt('backup_swift_tenant',
default=None,
help='Swift tenant/account name. Required when connecting'
' to an auth 2.0 system'),
cfg.StrOpt('backup_swift_user',
default=None,
help='Swift user name'),
cfg.StrOpt('backup_swift_key',
default=None,
help='Swift key for authentication'),
cfg.StrOpt('backup_swift_container',
default='volumebackups',
help='The default Swift container to use'),
cfg.IntOpt('backup_swift_object_size',
default=52428800,
help='The size in bytes of Swift backup objects'),
cfg.IntOpt('backup_swift_block_size',
default=32768,
help='The size in bytes that changes are tracked '
'for incremental backups. backup_swift_object_size '
'has to be multiple of backup_swift_block_size.'),
cfg.IntOpt('backup_swift_retry_attempts',
default=3,
help='The number of retries to make for Swift operations'),
cfg.IntOpt('backup_swift_retry_backoff',
default=2,
help='The backoff time in seconds between Swift retries'),
cfg.BoolOpt('backup_swift_enable_progress_timer',
default=True,
help='Enable or Disable the timer to send the periodic '
'progress notifications to Ceilometer when backing '
'up the volume to the Swift backend storage. The '
'default value is True to enable the timer.'),
]
CONF = cfg.CONF
CONF.register_opts(swiftbackup_service_opts)
class SwiftBackupDriver(chunkeddriver.ChunkedBackupDriver):
def __init__(self, context, db_driver=None):
chunk_size_bytes = CONF.backup_swift_object_size
sha_block_size_bytes = CONF.backup_swift_block_size
backup_default_container = CONF.backup_swift_container
enable_progress_timer = CONF.backup_swift_enable_progress_timer
super(SwiftBackupDriver, self).__init__(context, chunk_size_bytes,
sha_block_size_bytes,
backup_default_container,
enable_progress_timer,
db_driver)
if CONF.backup_swift_url is None:
self.swift_url = None
info = CONF.swift_catalog_info
try:
service_type, service_name, endpoint_type = info.split(':')
except ValueError:
raise exception.BackupDriverException(_(
"Failed to parse the configuration option "
"'swift_catalog_info', must be in the form "
"<service_type>:<service_name>:<endpoint_type>"))
for entry in context.service_catalog:
if entry.get('type') == service_type:
self.swift_url = entry.get(
'endpoints')[0].get(endpoint_type)
else:
self.swift_url = '%s%s' % (CONF.backup_swift_url,
context.project_id)
if self.swift_url is None:
raise exception.BackupDriverException(_(
"Could not determine which Swift endpoint to use. This can "
" either be set in the service catalog or with the "
" cinder.conf config option 'backup_swift_url'."))
LOG.debug("Using swift URL %s", self.swift_url)
self.swift_attempts = CONF.backup_swift_retry_attempts
self.swift_backoff = CONF.backup_swift_retry_backoff
LOG.debug('Connect to %s in "%s" mode', CONF.backup_swift_url,
CONF.backup_swift_auth)
if CONF.backup_swift_auth == 'single_user':
if CONF.backup_swift_user is None:
LOG.error(_LE("single_user auth mode enabled, "
"but %(param)s not set"),
{'param': 'backup_swift_user'})
raise exception.ParameterNotFound(param='backup_swift_user')
self.conn = swift.Connection(
authurl=CONF.backup_swift_url,
auth_version=CONF.backup_swift_auth_version,
tenant_name=CONF.backup_swift_tenant,
user=CONF.backup_swift_user,
key=CONF.backup_swift_key,
retries=self.swift_attempts,
starting_backoff=self.swift_backoff)
else:
self.conn = swift.Connection(retries=self.swift_attempts,
preauthurl=self.swift_url,
preauthtoken=self.context.auth_token,
starting_backoff=self.swift_backoff)
class SwiftObjectWriter:
def __init__(self, container, object_name, conn):
self.container = container
self.object_name = object_name
self.conn = conn
self.data = ''
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def write(self, data):
self.data += data
def close(self):
reader = six.StringIO(self.data)
try:
etag = self.conn.put_object(self.container, self.object_name,
reader,
content_length=reader.len)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
LOG.debug('swift MD5 for %(object_name)s: %(etag)s',
{'object_name': self.object_name, 'etag': etag, })
md5 = hashlib.md5(self.data).hexdigest()
LOG.debug('backup MD5 for %(object_name)s: %(md5)s',
{'object_name': self.object_name, 'md5': md5})
if etag != md5:
err = _('error writing object to swift, MD5 of object in '
'swift %(etag)s is not the same as MD5 of object sent '
'to swift %(md5)s'), {'etag': etag, 'md5': md5}
raise exception.InvalidBackup(reason=err)
return md5
class SwiftObjectReader:
def __init__(self, container, object_name, conn):
self.container = container
self.object_name = object_name
self.conn = conn
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def read(self):
try:
(_resp, body) = self.conn.get_object(self.container,
self.object_name)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
return body
def put_container(self, container):
try:
self.conn.put_container(container)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
return
def get_container_entries(self, container, prefix):
try:
swift_objects = self.conn.get_container(container,
prefix=prefix,
full_listing=True)[1]
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
swift_object_names = [swift_obj['name'] for swift_obj in swift_objects]
return swift_object_names
def get_object_writer(self, container, object_name, extra_metadata=None):
return self.SwiftObjectWriter(container, object_name, self.conn)
def get_object_reader(self, container, object_name, extra_metadata=None):
return self.SwiftObjectReader(container, object_name, self.conn)
def delete_object(self, container, object_name):
try:
self.conn.delete_object(container, object_name)
except socket.error as err:
raise exception.SwiftConnectionFailed(reason=err)
def _generate_object_name_prefix(self, backup):
az = 'az_%s' % self.az
backup_name = '%s_backup_%s' % (az, backup['id'])
volume = 'volume_%s' % (backup['volume_id'])
timestamp = timeutils.strtime(fmt="%Y%m%d%H%M%S")
prefix = volume + '/' + timestamp + '/' + backup_name
LOG.debug('generate_object_name_prefix: %s', prefix)
return prefix
def update_container_name(self, backup, container):
return container
def get_extra_metadata(self, backup, volume):
return None
def get_backup_driver(context):
return SwiftBackupDriver(context)
| true | true |
1c302b36a64e07516c35eec3aeae4bd8e31bf0eb | 14,805 | py | Python | sdk/python/pulumi_azure_native/redhatopenshift/v20200430/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/redhatopenshift/v20200430/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/redhatopenshift/v20200430/_inputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'APIServerProfileArgs',
'ClusterProfileArgs',
'ConsoleProfileArgs',
'IngressProfileArgs',
'MasterProfileArgs',
'NetworkProfileArgs',
'ServicePrincipalProfileArgs',
'WorkerProfileArgs',
]
@pulumi.input_type
class APIServerProfileArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None,
visibility: Optional[pulumi.Input[str]] = None):
"""
APIServerProfile represents an API server profile.
:param pulumi.Input[str] ip: The IP of the cluster API server (immutable).
:param pulumi.Input[str] url: The URL to access the cluster API server (immutable).
:param pulumi.Input[str] visibility: API server visibility (immutable).
"""
if ip is not None:
pulumi.set(__self__, "ip", ip)
if url is not None:
pulumi.set(__self__, "url", url)
if visibility is not None:
pulumi.set(__self__, "visibility", visibility)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input[str]]:
"""
The IP of the cluster API server (immutable).
"""
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The URL to access the cluster API server (immutable).
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def visibility(self) -> Optional[pulumi.Input[str]]:
"""
API server visibility (immutable).
"""
return pulumi.get(self, "visibility")
@visibility.setter
def visibility(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "visibility", value)
@pulumi.input_type
class ClusterProfileArgs:
def __init__(__self__, *,
domain: Optional[pulumi.Input[str]] = None,
pull_secret: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
ClusterProfile represents a cluster profile.
:param pulumi.Input[str] domain: The domain for the cluster (immutable).
:param pulumi.Input[str] pull_secret: The pull secret for the cluster (immutable).
:param pulumi.Input[str] resource_group_id: The ID of the cluster resource group (immutable).
:param pulumi.Input[str] version: The version of the cluster (immutable).
"""
if domain is not None:
pulumi.set(__self__, "domain", domain)
if pull_secret is not None:
pulumi.set(__self__, "pull_secret", pull_secret)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
The domain for the cluster (immutable).
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="pullSecret")
def pull_secret(self) -> Optional[pulumi.Input[str]]:
"""
The pull secret for the cluster (immutable).
"""
return pulumi.get(self, "pull_secret")
@pull_secret.setter
def pull_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pull_secret", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the cluster resource group (immutable).
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the cluster (immutable).
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class ConsoleProfileArgs:
def __init__(__self__, *,
url: Optional[pulumi.Input[str]] = None):
"""
ConsoleProfile represents a console profile.
:param pulumi.Input[str] url: The URL to access the cluster console (immutable).
"""
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The URL to access the cluster console (immutable).
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class IngressProfileArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
visibility: Optional[pulumi.Input[str]] = None):
"""
IngressProfile represents an ingress profile.
:param pulumi.Input[str] ip: The IP of the ingress (immutable).
:param pulumi.Input[str] name: The ingress profile name. Must be "default" (immutable).
:param pulumi.Input[str] visibility: Ingress visibility (immutable).
"""
if ip is not None:
pulumi.set(__self__, "ip", ip)
if name is not None:
pulumi.set(__self__, "name", name)
if visibility is not None:
pulumi.set(__self__, "visibility", visibility)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input[str]]:
"""
The IP of the ingress (immutable).
"""
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The ingress profile name. Must be "default" (immutable).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def visibility(self) -> Optional[pulumi.Input[str]]:
"""
Ingress visibility (immutable).
"""
return pulumi.get(self, "visibility")
@visibility.setter
def visibility(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "visibility", value)
@pulumi.input_type
class MasterProfileArgs:
def __init__(__self__, *,
subnet_id: Optional[pulumi.Input[str]] = None,
vm_size: Optional[pulumi.Input[str]] = None):
"""
MasterProfile represents a master profile.
:param pulumi.Input[str] subnet_id: The Azure resource ID of the master subnet (immutable).
:param pulumi.Input[str] vm_size: The size of the master VMs (immutable).
"""
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The Azure resource ID of the master subnet (immutable).
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[pulumi.Input[str]]:
"""
The size of the master VMs (immutable).
"""
return pulumi.get(self, "vm_size")
@vm_size.setter
def vm_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vm_size", value)
@pulumi.input_type
class NetworkProfileArgs:
def __init__(__self__, *,
pod_cidr: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None):
"""
NetworkProfile represents a network profile.
:param pulumi.Input[str] pod_cidr: The CIDR used for OpenShift/Kubernetes Pods (immutable).
:param pulumi.Input[str] service_cidr: The CIDR used for OpenShift/Kubernetes Services (immutable).
"""
if pod_cidr is not None:
pulumi.set(__self__, "pod_cidr", pod_cidr)
if service_cidr is not None:
pulumi.set(__self__, "service_cidr", service_cidr)
@property
@pulumi.getter(name="podCidr")
def pod_cidr(self) -> Optional[pulumi.Input[str]]:
"""
The CIDR used for OpenShift/Kubernetes Pods (immutable).
"""
return pulumi.get(self, "pod_cidr")
@pod_cidr.setter
def pod_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pod_cidr", value)
@property
@pulumi.getter(name="serviceCidr")
def service_cidr(self) -> Optional[pulumi.Input[str]]:
"""
The CIDR used for OpenShift/Kubernetes Services (immutable).
"""
return pulumi.get(self, "service_cidr")
@service_cidr.setter
def service_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_cidr", value)
@pulumi.input_type
class ServicePrincipalProfileArgs:
def __init__(__self__, *,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None):
"""
ServicePrincipalProfile represents a service principal profile.
:param pulumi.Input[str] client_id: The client ID used for the cluster (immutable).
:param pulumi.Input[str] client_secret: The client secret used for the cluster (immutable).
"""
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The client ID used for the cluster (immutable).
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The client secret used for the cluster (immutable).
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@pulumi.input_type
class WorkerProfileArgs:
def __init__(__self__, *,
count: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
vm_size: Optional[pulumi.Input[str]] = None):
"""
WorkerProfile represents a worker profile.
:param pulumi.Input[int] count: The number of worker VMs. Must be between 3 and 20 (immutable).
:param pulumi.Input[int] disk_size_gb: The disk size of the worker VMs. Must be 128 or greater (immutable).
:param pulumi.Input[str] name: The worker profile name. Must be "worker" (immutable).
:param pulumi.Input[str] subnet_id: The Azure resource ID of the worker subnet (immutable).
:param pulumi.Input[str] vm_size: The size of the worker VMs (immutable).
"""
if count is not None:
pulumi.set(__self__, "count", count)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if name is not None:
pulumi.set(__self__, "name", name)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
@property
@pulumi.getter
def count(self) -> Optional[pulumi.Input[int]]:
"""
The number of worker VMs. Must be between 3 and 20 (immutable).
"""
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="diskSizeGB")
def disk_size_gb(self) -> Optional[pulumi.Input[int]]:
"""
The disk size of the worker VMs. Must be 128 or greater (immutable).
"""
return pulumi.get(self, "disk_size_gb")
@disk_size_gb.setter
def disk_size_gb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_size_gb", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The worker profile name. Must be "worker" (immutable).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The Azure resource ID of the worker subnet (immutable).
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[pulumi.Input[str]]:
"""
The size of the worker VMs (immutable).
"""
return pulumi.get(self, "vm_size")
@vm_size.setter
def vm_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vm_size", value)
| 33.80137 | 116 | 0.614725 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'APIServerProfileArgs',
'ClusterProfileArgs',
'ConsoleProfileArgs',
'IngressProfileArgs',
'MasterProfileArgs',
'NetworkProfileArgs',
'ServicePrincipalProfileArgs',
'WorkerProfileArgs',
]
@pulumi.input_type
class APIServerProfileArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None,
visibility: Optional[pulumi.Input[str]] = None):
if ip is not None:
pulumi.set(__self__, "ip", ip)
if url is not None:
pulumi.set(__self__, "url", url)
if visibility is not None:
pulumi.set(__self__, "visibility", visibility)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@property
@pulumi.getter
def visibility(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "visibility")
@visibility.setter
def visibility(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "visibility", value)
@pulumi.input_type
class ClusterProfileArgs:
def __init__(__self__, *,
domain: Optional[pulumi.Input[str]] = None,
pull_secret: Optional[pulumi.Input[str]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None):
if domain is not None:
pulumi.set(__self__, "domain", domain)
if pull_secret is not None:
pulumi.set(__self__, "pull_secret", pull_secret)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="pullSecret")
def pull_secret(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "pull_secret")
@pull_secret.setter
def pull_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pull_secret", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class ConsoleProfileArgs:
def __init__(__self__, *,
url: Optional[pulumi.Input[str]] = None):
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class IngressProfileArgs:
def __init__(__self__, *,
ip: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
visibility: Optional[pulumi.Input[str]] = None):
if ip is not None:
pulumi.set(__self__, "ip", ip)
if name is not None:
pulumi.set(__self__, "name", name)
if visibility is not None:
pulumi.set(__self__, "visibility", visibility)
@property
@pulumi.getter
def ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ip")
@ip.setter
def ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def visibility(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "visibility")
@visibility.setter
def visibility(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "visibility", value)
@pulumi.input_type
class MasterProfileArgs:
def __init__(__self__, *,
subnet_id: Optional[pulumi.Input[str]] = None,
vm_size: Optional[pulumi.Input[str]] = None):
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "vm_size")
@vm_size.setter
def vm_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vm_size", value)
@pulumi.input_type
class NetworkProfileArgs:
def __init__(__self__, *,
pod_cidr: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None):
if pod_cidr is not None:
pulumi.set(__self__, "pod_cidr", pod_cidr)
if service_cidr is not None:
pulumi.set(__self__, "service_cidr", service_cidr)
@property
@pulumi.getter(name="podCidr")
def pod_cidr(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "pod_cidr")
@pod_cidr.setter
def pod_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pod_cidr", value)
@property
@pulumi.getter(name="serviceCidr")
def service_cidr(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "service_cidr")
@service_cidr.setter
def service_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_cidr", value)
@pulumi.input_type
class ServicePrincipalProfileArgs:
def __init__(__self__, *,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None):
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@pulumi.input_type
class WorkerProfileArgs:
def __init__(__self__, *,
count: Optional[pulumi.Input[int]] = None,
disk_size_gb: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
vm_size: Optional[pulumi.Input[str]] = None):
if count is not None:
pulumi.set(__self__, "count", count)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if name is not None:
pulumi.set(__self__, "name", name)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
@property
@pulumi.getter
def count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="diskSizeGB")
def disk_size_gb(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "disk_size_gb")
@disk_size_gb.setter
def disk_size_gb(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "disk_size_gb", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "vm_size")
@vm_size.setter
def vm_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vm_size", value)
| true | true |
1c302b5bad3090eca8357dc6c7bb7abb57c6c22b | 471 | py | Python | terminator/display.py | radarsh/terminator | 4979d9f57d4d2206114e401c4b7013db5f187f70 | [
"Apache-2.0"
] | 9 | 2015-02-27T16:21:26.000Z | 2020-03-19T07:47:18.000Z | terminator/display.py | radarsh/bashmon | 4979d9f57d4d2206114e401c4b7013db5f187f70 | [
"Apache-2.0"
] | 9 | 2015-03-02T09:46:09.000Z | 2017-09-29T18:03:41.000Z | terminator/display.py | radarsh/bashmon | 4979d9f57d4d2206114e401c4b7013db5f187f70 | [
"Apache-2.0"
] | 5 | 2015-03-13T14:14:34.000Z | 2019-09-12T07:11:52.000Z | import os
from platform import system
import terminator.formatter as formatter
import terminator.arguments as arguments
def term_width():
return arguments.terminal_width or os.get_terminal_size().columns
def clear_screen():
if system() == 'Windows':
os.system('cls')
else:
os.system('clear')
def repaint(jobs):
clear_screen()
for job in jobs:
_formatter = formatter.Formatter(job)
print(_formatter.job_display()) | 20.478261 | 69 | 0.694268 | import os
from platform import system
import terminator.formatter as formatter
import terminator.arguments as arguments
def term_width():
return arguments.terminal_width or os.get_terminal_size().columns
def clear_screen():
if system() == 'Windows':
os.system('cls')
else:
os.system('clear')
def repaint(jobs):
clear_screen()
for job in jobs:
_formatter = formatter.Formatter(job)
print(_formatter.job_display()) | true | true |
1c302bc8c309a1d8c1b579797a0a5f35ea31626b | 351 | py | Python | test/linkage-agent/manual/time_test_manual_test_10k-x-6.py | greshje/linkage-agent-tools | bc3a041c74cfe96950a0e3c011dd6a0185ad912d | [
"Apache-2.0"
] | 1 | 2020-06-25T19:57:56.000Z | 2020-06-25T19:57:56.000Z | test/linkage-agent/manual/time_test_manual_test_10k-x-6.py | greshje/linkage-agent-tools | bc3a041c74cfe96950a0e3c011dd6a0185ad912d | [
"Apache-2.0"
] | 1 | 2021-10-01T15:13:15.000Z | 2021-10-01T15:13:15.000Z | test/linkage-agent/manual/time_test_manual_test_10k-x-6.py | greshje/linkage-agent-tools | bc3a041c74cfe96950a0e3c011dd6a0185ad912d | [
"Apache-2.0"
] | null | null | null | # ------
#
# This test is not part of the automated test suite. It is intended to be
# run manually after the data folder has been copied to C:\test.
#
# This module exists primarily to enable debugging of the process called by the cmd line
#
# ------
import time_test as tt
if __name__ == "__main__":
tt.run_test("C:\\test\\test-set-10k-x-6")
| 25.071429 | 88 | 0.68661 |
import time_test as tt
if __name__ == "__main__":
tt.run_test("C:\\test\\test-set-10k-x-6")
| true | true |
1c302bf040036bd5a89e90813fd96cc42d886097 | 5,141 | py | Python | peeringdb_server/templatetags/util.py | WEBZCC/peeringdb | 9f74fb8a1148eb613179f9fb16bb5d17b86f9c04 | [
"BSD-2-Clause"
] | null | null | null | peeringdb_server/templatetags/util.py | WEBZCC/peeringdb | 9f74fb8a1148eb613179f9fb16bb5d17b86f9c04 | [
"BSD-2-Clause"
] | null | null | null | peeringdb_server/templatetags/util.py | WEBZCC/peeringdb | 9f74fb8a1148eb613179f9fb16bb5d17b86f9c04 | [
"BSD-2-Clause"
] | null | null | null | from django import template
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
import datetime
from peeringdb_server.models import (
InternetExchange,
Network,
Facility,
Organization,
PARTNERSHIP_LEVELS,
format_speed,
)
from peeringdb_server.views import DoNotRender
from peeringdb_server.org_admin_views import permission_ids
from peeringdb_server.inet import RdapException
from django_countries import countries
from django_namespace_perms.util import get_permission_flag
import tld
import random
import markdown
import bleach
countries_dict = dict(countries)
register = template.Library()
@register.filter
def editable_list_value(row):
if row.get("value") or row.get("value_label"):
return _(row.get("value_label", row.get("value")))
elif row.get("blank") and row.get("value") == "":
return row.get("blank")
return ""
@register.filter
def shuffle(val):
rmp = [r for r in val]
random.shuffle(rmp)
return rmp
@register.filter
def blank_sub(val, row):
if val == "" and row.get("blank"):
return row.get("blank")
return val
@register.filter
def org_permission_id_xl(org, id):
return permission_ids(org).get(id)
@register.filter
def check_perms(v, op):
flg = get_permission_flag(op)
return v & flg == flg
@register.filter
def user_org_group(org, user):
if org.admin_usergroup.user_set.filter(id=user.id).exists():
return "admin"
elif org.usergroup.user_set.filter(id=user.id).exists():
return "member"
return ""
@register.filter
def ownership_warning(org, user):
email_domain = user.email.split("@")[1]
b = False
for url in [tld.get_tld(u) for u in org.urls]:
if email_domain == url:
b = True
break
if not b:
for rdap in list(org.rdap_collect.values()):
try:
if user.validate_rdap_relationship(rdap):
b = True
break
except RdapException as exc:
# we don't need to do anything with the rdap exception here, as it will
# be raised apropriately when the request is sent off
pass
if not b:
return mark_safe(
'<span class="attention">{}</span>'.format(
_(
"Your email address does not match the domain information we have on file for this organization."
)
)
)
return ""
@register.filter
def long_country_name(v):
if type(v) == str:
return countries_dict.get(v, v)
else:
return v.name
@register.filter
def as_bool(v):
if not v or v == "0":
return False
return True
@register.filter
def fallback(a, b):
if not a:
return b
return a
@register.filter
def is_dict(value):
return type(value) == dict
@register.filter
def is_bool(value):
return type(value) == bool
@register.filter
def is_none(value):
return type(value) is None
@register.filter
def none_blank(value):
if value is None:
return ""
return value
@register.filter
def dont_render(value):
return type(value) is DoNotRender
@register.filter
def age(dt):
seconds = (datetime.datetime.now().replace(tzinfo=dt.tzinfo) - dt).total_seconds()
if seconds < 60:
return "%d %s" % (seconds, _("seconds ago"))
elif seconds < 3600:
return "%d %s" % (seconds / 60, _("minutes ago"))
elif seconds < 86400:
return "%d %s" % (seconds / 3600, _("hours ago"))
else:
return "%d %s" % (seconds / 86400, _("days ago"))
@register.filter
def ref_tag(value):
if hasattr(value, "_handleref"):
return value._handleref.tag
elif value == "InternetExchange":
return InternetExchange.handleref.tag
elif value == "Network":
return Network.handleref.tag
elif value == "Facility":
return Facility.handleref.tag
elif value == "Organization":
return Organization.handleref.tag
return "unknown"
@register.filter
def autocomplete_preload_net(value):
if not value:
return ""
qset = Network.objects.filter(status="ok", id__in=value.split(","))
return ",".join([f"{net.id};{net.name}" for net in qset])
@register.filter
def pretty_speed(value):
if not value:
return ""
try:
return format_speed(value)
except ValueError:
return value
@register.filter
def partnership_label(level):
return dict(PARTNERSHIP_LEVELS).get(level, "Unknown")
@register.filter
def render_markdown(value):
markdown_tags = [
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"b",
"i",
"strong",
"em",
"tt",
"p",
"br",
"span",
"div",
"blockquote",
"code",
"hr",
"ul",
"ol",
"li",
"dd",
"dt",
"a",
]
return bleach.clean(
markdown.markdown(value), tags=markdown_tags, protocols=["http", "https"]
)
| 21.876596 | 117 | 0.60922 | from django import template
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
import datetime
from peeringdb_server.models import (
InternetExchange,
Network,
Facility,
Organization,
PARTNERSHIP_LEVELS,
format_speed,
)
from peeringdb_server.views import DoNotRender
from peeringdb_server.org_admin_views import permission_ids
from peeringdb_server.inet import RdapException
from django_countries import countries
from django_namespace_perms.util import get_permission_flag
import tld
import random
import markdown
import bleach
countries_dict = dict(countries)
register = template.Library()
@register.filter
def editable_list_value(row):
if row.get("value") or row.get("value_label"):
return _(row.get("value_label", row.get("value")))
elif row.get("blank") and row.get("value") == "":
return row.get("blank")
return ""
@register.filter
def shuffle(val):
rmp = [r for r in val]
random.shuffle(rmp)
return rmp
@register.filter
def blank_sub(val, row):
if val == "" and row.get("blank"):
return row.get("blank")
return val
@register.filter
def org_permission_id_xl(org, id):
return permission_ids(org).get(id)
@register.filter
def check_perms(v, op):
flg = get_permission_flag(op)
return v & flg == flg
@register.filter
def user_org_group(org, user):
if org.admin_usergroup.user_set.filter(id=user.id).exists():
return "admin"
elif org.usergroup.user_set.filter(id=user.id).exists():
return "member"
return ""
@register.filter
def ownership_warning(org, user):
email_domain = user.email.split("@")[1]
b = False
for url in [tld.get_tld(u) for u in org.urls]:
if email_domain == url:
b = True
break
if not b:
for rdap in list(org.rdap_collect.values()):
try:
if user.validate_rdap_relationship(rdap):
b = True
break
except RdapException as exc:
# be raised apropriately when the request is sent off
pass
if not b:
return mark_safe(
'<span class="attention">{}</span>'.format(
_(
"Your email address does not match the domain information we have on file for this organization."
)
)
)
return ""
@register.filter
def long_country_name(v):
if type(v) == str:
return countries_dict.get(v, v)
else:
return v.name
@register.filter
def as_bool(v):
if not v or v == "0":
return False
return True
@register.filter
def fallback(a, b):
if not a:
return b
return a
@register.filter
def is_dict(value):
return type(value) == dict
@register.filter
def is_bool(value):
return type(value) == bool
@register.filter
def is_none(value):
return type(value) is None
@register.filter
def none_blank(value):
if value is None:
return ""
return value
@register.filter
def dont_render(value):
return type(value) is DoNotRender
@register.filter
def age(dt):
seconds = (datetime.datetime.now().replace(tzinfo=dt.tzinfo) - dt).total_seconds()
if seconds < 60:
return "%d %s" % (seconds, _("seconds ago"))
elif seconds < 3600:
return "%d %s" % (seconds / 60, _("minutes ago"))
elif seconds < 86400:
return "%d %s" % (seconds / 3600, _("hours ago"))
else:
return "%d %s" % (seconds / 86400, _("days ago"))
@register.filter
def ref_tag(value):
if hasattr(value, "_handleref"):
return value._handleref.tag
elif value == "InternetExchange":
return InternetExchange.handleref.tag
elif value == "Network":
return Network.handleref.tag
elif value == "Facility":
return Facility.handleref.tag
elif value == "Organization":
return Organization.handleref.tag
return "unknown"
@register.filter
def autocomplete_preload_net(value):
if not value:
return ""
qset = Network.objects.filter(status="ok", id__in=value.split(","))
return ",".join([f"{net.id};{net.name}" for net in qset])
@register.filter
def pretty_speed(value):
if not value:
return ""
try:
return format_speed(value)
except ValueError:
return value
@register.filter
def partnership_label(level):
return dict(PARTNERSHIP_LEVELS).get(level, "Unknown")
@register.filter
def render_markdown(value):
markdown_tags = [
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"b",
"i",
"strong",
"em",
"tt",
"p",
"br",
"span",
"div",
"blockquote",
"code",
"hr",
"ul",
"ol",
"li",
"dd",
"dt",
"a",
]
return bleach.clean(
markdown.markdown(value), tags=markdown_tags, protocols=["http", "https"]
)
| true | true |
1c302c7b2dd41ddcf61c38ff298d35f5958f5fbe | 1,663 | py | Python | setup.py | massarom/pypackinit | c5860e60b6eaa5d66fab53e3795fd524618ca9da | [
"MIT"
] | null | null | null | setup.py | massarom/pypackinit | c5860e60b6eaa5d66fab53e3795fd524618ca9da | [
"MIT"
] | null | null | null | setup.py | massarom/pypackinit | c5860e60b6eaa5d66fab53e3795fd524618ca9da | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Example setup.py file
This example setup file is adapted from https://github.com/pypa/sampleproject
and is not supposed to be and exahustive list of options accepted by the
setuptools' function `setup`. You should read the respective documentation,
which can be found at https://setuptools.readthedocs.io/en/latest/setuptools.html
The original License and Copyright for this setup file follows:
Copyright (c) 2016 The Python Packaging Authority (PyPA)
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from setuptools import setup, find_packages
setup(
py_modules=["pypackinit"],
entry_points={"console_scripts": ["ppinit = pypackinit:main"]},
)
| 44.945946 | 81 | 0.790138 |
from setuptools import setup, find_packages
setup(
py_modules=["pypackinit"],
entry_points={"console_scripts": ["ppinit = pypackinit:main"]},
)
| true | true |
1c302dc39798038ca1e819bf63f1a0d88093d9b5 | 2,279 | py | Python | test_service.py | michhar/azureml-keras-yolov3-custom | 3f52a48bf865a221e70432cfcb6e6afa73c3b001 | [
"MIT"
] | 12 | 2019-11-20T21:19:22.000Z | 2021-11-09T11:32:35.000Z | test_service.py | michhar/azureml-keras-yolov3-custom | 3f52a48bf865a221e70432cfcb6e6afa73c3b001 | [
"MIT"
] | 9 | 2019-11-16T21:07:07.000Z | 2022-02-10T02:14:16.000Z | test_service.py | michhar/azureml-keras-yolov3-custom | 3f52a48bf865a221e70432cfcb6e6afa73c3b001 | [
"MIT"
] | 6 | 2019-11-22T03:42:58.000Z | 2021-08-28T03:51:21.000Z | """
Use Python requests to call web service
Set an environment variable SCORING_URI to the scoring URL before running and
set environment variable WEBSERVICE_KEY to the webservice key if in the cloud (omit
if testing a local service).
"""
import requests
from requests.exceptions import HTTPError
import numpy as np
import matplotlib.pyplot as plt
import argparse
import os
import json
from PIL import Image
from io import BytesIO
import base64
def arg_parse():
"""
Parse arguments
"""
parser = argparse.ArgumentParser(
description='This script is for calling the HTTP REST API for Azure ML cloud service')
parser.add_argument("--image", dest='image',
help="Image file", type=str)
return parser.parse_args()
def get_service(ws, name):
"""Get the web service object from Azure ML workspace that matches name"""
webservices = ws.webservices
service = None
for servicename, webservice in webservices.items():
if name == servicename:
service = webservice
break
return service
def main(img_file):
"""Call Azure ML webservice, sending image and returning inference results"""
scoring_uri = os.getenv('SCORING_URI', '')
# Construction input data json string
# input_data = plt.imread(img_file)
# input_data = {"data": [input_data.tolist()] }
pil_img = Image.open(img_file)
buff = BytesIO()
pil_img.save(buff, format="JPEG")
new_image_string = json.dumps({'data': base64.b64encode(buff.getvalue()).decode("utf-8")})
with open('request.txt', 'w') as f:
f.write(new_image_string)
headers = {'Content-Type':'application/json', 'Authorization': 'Bearer ' + os.getenv('WEBSERVICE_KEY', '')}
try:
resp = requests.post(scoring_uri,
new_image_string,
headers=headers)
# If the response was successful, no Exception will be raised
resp.raise_for_status()
except HTTPError as http_err:
print(f'HTTP error occurred: {http_err}')
except Exception as err:
print(f'Other error occurred: {err}')
else:
print('Success!')
print(resp.text)
if __name__ == "__main__":
args = arg_parse()
main(args.image)
| 29.597403 | 111 | 0.66301 | import requests
from requests.exceptions import HTTPError
import numpy as np
import matplotlib.pyplot as plt
import argparse
import os
import json
from PIL import Image
from io import BytesIO
import base64
def arg_parse():
parser = argparse.ArgumentParser(
description='This script is for calling the HTTP REST API for Azure ML cloud service')
parser.add_argument("--image", dest='image',
help="Image file", type=str)
return parser.parse_args()
def get_service(ws, name):
webservices = ws.webservices
service = None
for servicename, webservice in webservices.items():
if name == servicename:
service = webservice
break
return service
def main(img_file):
scoring_uri = os.getenv('SCORING_URI', '')
pil_img = Image.open(img_file)
buff = BytesIO()
pil_img.save(buff, format="JPEG")
new_image_string = json.dumps({'data': base64.b64encode(buff.getvalue()).decode("utf-8")})
with open('request.txt', 'w') as f:
f.write(new_image_string)
headers = {'Content-Type':'application/json', 'Authorization': 'Bearer ' + os.getenv('WEBSERVICE_KEY', '')}
try:
resp = requests.post(scoring_uri,
new_image_string,
headers=headers)
resp.raise_for_status()
except HTTPError as http_err:
print(f'HTTP error occurred: {http_err}')
except Exception as err:
print(f'Other error occurred: {err}')
else:
print('Success!')
print(resp.text)
if __name__ == "__main__":
args = arg_parse()
main(args.image)
| true | true |
1c302e6be5a1a45bc57efd3bec3653f92d393f2b | 654 | py | Python | p9.py | JasonD1997/Machine-Learning-Programs | 81e54ccaf2ca4bd7e2d79ed0e538f13cb355c0bf | [
"MIT"
] | null | null | null | p9.py | JasonD1997/Machine-Learning-Programs | 81e54ccaf2ca4bd7e2d79ed0e538f13cb355c0bf | [
"MIT"
] | null | null | null | p9.py | JasonD1997/Machine-Learning-Programs | 81e54ccaf2ca4bd7e2d79ed0e538f13cb355c0bf | [
"MIT"
] | null | null | null | from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import classification_report,confusion_matrix
from sklearn import datasets
iris=datasets.load_iris()
iris_data=iris.data
iris_labels=iris.target
print(iris_data)
print(iris_labels)
x_train,x_test,y_train,y_test=train_test_split(iris_data,iris_labels,test_size=0.30)
classifier=KNeighborsClassifier(n_neighbors=5)
classifier.fit(x_train,y_train)
y_pred=classifier.predict(x_test)
print('confusion matrix is as follows')
print(confusion_matrix(y_test,y_pred))
print('Accuracy metrics')
print(classification_report(y_test,y_pred))
| 34.421053 | 84 | 0.860856 | from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import classification_report,confusion_matrix
from sklearn import datasets
iris=datasets.load_iris()
iris_data=iris.data
iris_labels=iris.target
print(iris_data)
print(iris_labels)
x_train,x_test,y_train,y_test=train_test_split(iris_data,iris_labels,test_size=0.30)
classifier=KNeighborsClassifier(n_neighbors=5)
classifier.fit(x_train,y_train)
y_pred=classifier.predict(x_test)
print('confusion matrix is as follows')
print(confusion_matrix(y_test,y_pred))
print('Accuracy metrics')
print(classification_report(y_test,y_pred))
| true | true |
1c30304757f5dcc3fcbb4ba2ba9ecb42d0cd0d8d | 1,922 | py | Python | gpfit/xfoil/constraint_set.py | appliedopt/gpfit | 3c8025f12ba5360fdeb71c270e55d4c93e1676fd | [
"MIT"
] | 6 | 2017-12-10T11:27:03.000Z | 2021-12-28T18:12:32.000Z | gpfit/xfoil/constraint_set.py | appliedopt/gpfit | 3c8025f12ba5360fdeb71c270e55d4c93e1676fd | [
"MIT"
] | 49 | 2017-10-16T17:15:55.000Z | 2021-10-19T06:32:27.000Z | gpfit/xfoil/constraint_set.py | appliedopt/gpfit | 3c8025f12ba5360fdeb71c270e55d4c93e1676fd | [
"MIT"
] | 1 | 2017-12-10T08:47:26.000Z | 2017-12-10T08:47:26.000Z | """xfoil constraint set"""
import numpy as np
from gpfit.constraint_set import FitConstraintSet
from .wrapper import xfoil_comparison
# pylint: disable=too-many-arguments
class XfoilFit(FitConstraintSet):
"""Special FitConstraintSet that can post-solve compare result to XFOIL
Arguments (in addition to the arguments to FitConstraintSet)
---------
airfoil: airfoil of fitted data
str (e.g. "xxx.dat", "naca xxxx")
"""
def __init__(
self, fitdata, ivar=None, dvars=None, name="", err_margin=None, airfoil=False
):
super().__init__(
fitdata, ivar=ivar, dvars=dvars, name=name, err_margin=err_margin
)
self.airfoil = airfoil
def process_result(self, result):
"""
if data comes from Xfoil and airfoil is provided check against xfoil
"""
super().process_result(result)
if self.mfac not in result["sensitivities"]["constants"]:
return
if np.amax([abs(result["sensitivities"]["constants"][self.mfac])]) < 1e-5:
return
if not self.airfoil:
return
cl, re = 0.0, 0.0
for dvar in self.dvars:
if "Re" in str(dvar):
re = result(dvar)
if "C_L" in str(dvar):
cl = result(dvar)
cd = result(self.ivar)
if not hasattr(cl, "__len__") and hasattr(re, "__len__"):
cl = [cl]*len(re)
err, cdx = xfoil_comparison(self.airfoil, cl, re, cd)
ind = np.where(err > 0.05)[0]
for i in ind:
modelstr = ", ".join(self.ivar.descr["models"])
msg = (
f"Drag error for {modelstr} is {err[i]:.2f}. Re={re[i]:.1f};"
f" CL={cl[i]:.4f}; Xfoil cd={cd[i]:.6f}, GP sol cd={cdx[i]:.6f}"
)
print(f"Warning: {msg}")
| 32.576271 | 85 | 0.540583 | import numpy as np
from gpfit.constraint_set import FitConstraintSet
from .wrapper import xfoil_comparison
class XfoilFit(FitConstraintSet):
def __init__(
self, fitdata, ivar=None, dvars=None, name="", err_margin=None, airfoil=False
):
super().__init__(
fitdata, ivar=ivar, dvars=dvars, name=name, err_margin=err_margin
)
self.airfoil = airfoil
def process_result(self, result):
super().process_result(result)
if self.mfac not in result["sensitivities"]["constants"]:
return
if np.amax([abs(result["sensitivities"]["constants"][self.mfac])]) < 1e-5:
return
if not self.airfoil:
return
cl, re = 0.0, 0.0
for dvar in self.dvars:
if "Re" in str(dvar):
re = result(dvar)
if "C_L" in str(dvar):
cl = result(dvar)
cd = result(self.ivar)
if not hasattr(cl, "__len__") and hasattr(re, "__len__"):
cl = [cl]*len(re)
err, cdx = xfoil_comparison(self.airfoil, cl, re, cd)
ind = np.where(err > 0.05)[0]
for i in ind:
modelstr = ", ".join(self.ivar.descr["models"])
msg = (
f"Drag error for {modelstr} is {err[i]:.2f}. Re={re[i]:.1f};"
f" CL={cl[i]:.4f}; Xfoil cd={cd[i]:.6f}, GP sol cd={cdx[i]:.6f}"
)
print(f"Warning: {msg}")
| true | true |
1c30319c69766804d9361859d13d841c20dbd931 | 2,816 | py | Python | src/sn7_baseline_prep_funcs.py | ordinaryname/CosmiQ_SN7_Baseline | db486f834b5f4a0a917098c63c1bbfe432350789 | [
"Apache-2.0"
] | null | null | null | src/sn7_baseline_prep_funcs.py | ordinaryname/CosmiQ_SN7_Baseline | db486f834b5f4a0a917098c63c1bbfe432350789 | [
"Apache-2.0"
] | null | null | null | src/sn7_baseline_prep_funcs.py | ordinaryname/CosmiQ_SN7_Baseline | db486f834b5f4a0a917098c63c1bbfe432350789 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 25 14:11:02 2020
@author: avanetten
"""
import multiprocessing
import pandas as pd
import numpy as np
import skimage
import gdal
import os
import solaris as sol
from solaris.raster.image import create_multiband_geotiff
from solaris.utils.core import _check_gdf_load
def map_wrapper(x):
'''For multi-threading'''
return x[0](*(x[1:]))
def make_geojsons_and_masks(name_root, image_path, json_path,
output_path_mask, output_path_mask_fbc=None):
'''
Make the stuffins
mask_fbc is an (optional) three-channel fbc (footprint, boundary, raw) mask
'''
print(" name_root:", name_root)
# filter out null geoms (this is always a worthy check)
gdf_tmp = _check_gdf_load(json_path)
if len(gdf_tmp) == 0:
gdf_nonull = gdf_tmp
else:
gdf_nonull = gdf_tmp[gdf_tmp.geometry.notnull()]
try:
im_tmp = skimage.io.imread(image_path)
except:
print("Error loading image %s, skipping..." %(image_path))
return
# handle empty geojsons
if len(gdf_nonull) == 0:
# create masks
# mask 1 has 1 channel
# mask_fbc has 3 channel
print(" Empty labels for name_root!", name_root)
im = gdal.Open(image_path)
proj = im.GetProjection()
geo = im.GetGeoTransform()
im = im.ReadAsArray()
# set masks to 0 everywhere
mask_arr = np.zeros((1, im.shape[1], im.shape[2]))
create_multiband_geotiff(mask_arr, output_path_mask, proj, geo)
if output_path_mask_fbc:
mask_arr = np.zeros((3, im.shape[1], im.shape[2]))
create_multiband_geotiff(mask_arr, output_path_mask_fbc, proj, geo)
return
# make masks (single channel)
# https://github.com/CosmiQ/solaris/blob/master/docs/tutorials/notebooks/api_masks_tutorial.ipynb
f_mask = sol.vector.mask.df_to_px_mask(df=gdf_nonull, out_file=output_path_mask,
channels=['footprint'],
reference_im=image_path,
shape=(im_tmp.shape[0], im_tmp.shape[1]))
# three channel mask (contact channel, if used, takes forever)
# https://github.com/CosmiQ/solaris/blob/master/docs/tutorials/notebooks/api_masks_tutorial.ipynb
if output_path_mask_fbc:
fbc_mask = sol.vector.mask.df_to_px_mask(df=gdf_nonull, out_file=output_path_mask_fbc,
channels=['band1', 'band2', 'band3'],
reference_im=image_path,
boundary_width=4, meters=True,
shape=(im_tmp.shape[0], im_tmp.shape[1]))
return
| 34.341463 | 101 | 0.610085 |
import multiprocessing
import pandas as pd
import numpy as np
import skimage
import gdal
import os
import solaris as sol
from solaris.raster.image import create_multiband_geotiff
from solaris.utils.core import _check_gdf_load
def map_wrapper(x):
return x[0](*(x[1:]))
def make_geojsons_and_masks(name_root, image_path, json_path,
output_path_mask, output_path_mask_fbc=None):
print(" name_root:", name_root)
gdf_tmp = _check_gdf_load(json_path)
if len(gdf_tmp) == 0:
gdf_nonull = gdf_tmp
else:
gdf_nonull = gdf_tmp[gdf_tmp.geometry.notnull()]
try:
im_tmp = skimage.io.imread(image_path)
except:
print("Error loading image %s, skipping..." %(image_path))
return
if len(gdf_nonull) == 0:
print(" Empty labels for name_root!", name_root)
im = gdal.Open(image_path)
proj = im.GetProjection()
geo = im.GetGeoTransform()
im = im.ReadAsArray()
mask_arr = np.zeros((1, im.shape[1], im.shape[2]))
create_multiband_geotiff(mask_arr, output_path_mask, proj, geo)
if output_path_mask_fbc:
mask_arr = np.zeros((3, im.shape[1], im.shape[2]))
create_multiband_geotiff(mask_arr, output_path_mask_fbc, proj, geo)
return
f_mask = sol.vector.mask.df_to_px_mask(df=gdf_nonull, out_file=output_path_mask,
channels=['footprint'],
reference_im=image_path,
shape=(im_tmp.shape[0], im_tmp.shape[1]))
if output_path_mask_fbc:
fbc_mask = sol.vector.mask.df_to_px_mask(df=gdf_nonull, out_file=output_path_mask_fbc,
channels=['band1', 'band2', 'band3'],
reference_im=image_path,
boundary_width=4, meters=True,
shape=(im_tmp.shape[0], im_tmp.shape[1]))
return
| true | true |
1c303396e9863a9c89da3336cb44e63eaf8e0bf8 | 1,526 | py | Python | comment/models.py | abdukhashimov/django-rest-blog-2 | ae12c24f95b3a8f216c85a8f32c47e215118ce07 | [
"MIT"
] | null | null | null | comment/models.py | abdukhashimov/django-rest-blog-2 | ae12c24f95b3a8f216c85a8f32c47e215118ce07 | [
"MIT"
] | null | null | null | comment/models.py | abdukhashimov/django-rest-blog-2 | ae12c24f95b3a8f216c85a8f32c47e215118ce07 | [
"MIT"
] | null | null | null | from django.db import models
from post.models import Post
from django.contrib.auth import get_user_model
from django.contrib.admin.utils import NestedObjects
class CommentManager(models.Manager):
def get_parent_comment(self, id):
return super(
CommentManager, self
).get_queryset().filter(id=id, parent_id__isnull=True)
def get_child_comment(self, id):
parents = Comment.objects.filter(id=id)
collector = NestedObjects(using='default')
collector.collect(parents)
collector.data[parents[0].__class__].remove(parents[0])
return collector.data[parents[0].__class__]
class Comment(models.Model):
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE)
comment = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
parent = models.ForeignKey(
'self', related_name='reply', null=True, blank=True,
on_delete=models.CASCADE
)
post = models.ForeignKey(
Post, related_name='comments', on_delete=models.CASCADE)
class Meta:
ordering = ['-created_at', ]
objects = CommentManager()
def __str__(self):
if self.parent is None:
return "{}'s comment".format(str(self.user))
return "{}'s reply".format(str(self.user))
# @property
# def parent_comment(self):
# return Comment.objects.get_parent_comment(self.id)
@property
def child_comments(self):
return Comment.objects.get_child_comment(self.id)
| 31.142857 | 72 | 0.680865 | from django.db import models
from post.models import Post
from django.contrib.auth import get_user_model
from django.contrib.admin.utils import NestedObjects
class CommentManager(models.Manager):
def get_parent_comment(self, id):
return super(
CommentManager, self
).get_queryset().filter(id=id, parent_id__isnull=True)
def get_child_comment(self, id):
parents = Comment.objects.filter(id=id)
collector = NestedObjects(using='default')
collector.collect(parents)
collector.data[parents[0].__class__].remove(parents[0])
return collector.data[parents[0].__class__]
class Comment(models.Model):
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE)
comment = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
parent = models.ForeignKey(
'self', related_name='reply', null=True, blank=True,
on_delete=models.CASCADE
)
post = models.ForeignKey(
Post, related_name='comments', on_delete=models.CASCADE)
class Meta:
ordering = ['-created_at', ]
objects = CommentManager()
def __str__(self):
if self.parent is None:
return "{}'s comment".format(str(self.user))
return "{}'s reply".format(str(self.user))
@property
def child_comments(self):
return Comment.objects.get_child_comment(self.id)
| true | true |
1c303427d85e89b55d8a992ea275e68e7213ab06 | 1,532 | py | Python | src/dictionaries/cuhk/setup_data.py | aaronhktan/jyut-dict | d1a3f22af224f35cde312b7e664d3e392c8af3db | [
"MIT"
] | 52 | 2019-01-20T20:40:15.000Z | 2022-03-11T01:29:34.000Z | src/dictionaries/cuhk/setup_data.py | aaronhktan/jyut-dict | d1a3f22af224f35cde312b7e664d3e392c8af3db | [
"MIT"
] | 58 | 2019-01-30T06:35:53.000Z | 2022-02-20T03:33:18.000Z | src/dictionaries/cuhk/setup_data.py | aaronhktan/jyut-dict | d1a3f22af224f35cde312b7e664d3e392c8af3db | [
"MIT"
] | 1 | 2021-08-03T03:46:36.000Z | 2021-08-03T03:46:36.000Z | from bs4 import BeautifulSoup
import requests
import sys
if len(sys.argv) != 2:
print("Usage: python3 script.py output_filename.txt")
sys.exit(1)
r = requests.get("https://apps.itsc.cuhk.edu.hk/hanyu/Page/Terms.aspx")
soup = BeautifulSoup(r.text, "html.parser")
# Extract links to each category
main_panel = soup.find("div", id="MainContent_panelTermsIndex")
main_table = main_panel.find("tbody").find("tr")
columns = main_table.find_all(
"td"
) # Yes, each column in the table is a <td>. Why? I don't know either.
category_links = []
for column in columns:
rows = column.find_all("td")
for row in rows:
if row.find("a"):
category_links.append(
"https://apps.itsc.cuhk.edu.hk/hanyu/Page/" + row.find("a").get("href")
)
# Extract links for each word in each category
word_links = []
for category_link in category_links:
print(f"Getting data for category {category_link}...")
r = requests.get(category_link)
soup = BeautifulSoup(r.text, "html.parser")
main_table = soup.find("div", id="MainContent_panelTermsQuery").find("table")
word_link_elems = main_table.find_all("a")
for word_link_elem in word_link_elems:
word_links.append(
word_link_elem.get_text()
+ "\t"
+ "https://apps.itsc.cuhk.edu.hk/hanyu/Page/"
+ word_link_elem.get("href")
+ "\n"
)
# Write all links to file
with open(sys.argv[1], "w") as output_file:
output_file.writelines(word_links)
| 30.64 | 87 | 0.650783 | from bs4 import BeautifulSoup
import requests
import sys
if len(sys.argv) != 2:
print("Usage: python3 script.py output_filename.txt")
sys.exit(1)
r = requests.get("https://apps.itsc.cuhk.edu.hk/hanyu/Page/Terms.aspx")
soup = BeautifulSoup(r.text, "html.parser")
main_panel = soup.find("div", id="MainContent_panelTermsIndex")
main_table = main_panel.find("tbody").find("tr")
columns = main_table.find_all(
"td"
)
category_links = []
for column in columns:
rows = column.find_all("td")
for row in rows:
if row.find("a"):
category_links.append(
"https://apps.itsc.cuhk.edu.hk/hanyu/Page/" + row.find("a").get("href")
)
# Extract links for each word in each category
word_links = []
for category_link in category_links:
print(f"Getting data for category {category_link}...")
r = requests.get(category_link)
soup = BeautifulSoup(r.text, "html.parser")
main_table = soup.find("div", id="MainContent_panelTermsQuery").find("table")
word_link_elems = main_table.find_all("a")
for word_link_elem in word_link_elems:
word_links.append(
word_link_elem.get_text()
+ "\t"
+ "https://apps.itsc.cuhk.edu.hk/hanyu/Page/"
+ word_link_elem.get("href")
+ "\n"
)
# Write all links to file
with open(sys.argv[1], "w") as output_file:
output_file.writelines(word_links)
| true | true |
1c3034837475e5316df55c05060f253699b61e3a | 207,999 | py | Python | ryu/ofproto/ofproto_v1_4_parser.py | alanquillin/ryu | 5552348c7c0a425313e2d5085d40d9729c8cc95e | [
"Apache-2.0"
] | null | null | null | ryu/ofproto/ofproto_v1_4_parser.py | alanquillin/ryu | 5552348c7c0a425313e2d5085d40d9729c8cc95e | [
"Apache-2.0"
] | null | null | null | ryu/ofproto/ofproto_v1_4_parser.py | alanquillin/ryu | 5552348c7c0a425313e2d5085d40d9729c8cc95e | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2012, 2013, 2014 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2012, 2013 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Decoder/Encoder implementations of OpenFlow 1.4.
"""
import six
import struct
from ryu.lib import addrconv
from ryu.lib.pack_utils import msg_pack_into
from ryu import utils
from ryu.ofproto.ofproto_parser import StringifyMixin, MsgBase, MsgInMsgBase, msg_str_attr
from ryu.ofproto import ether
from ryu.ofproto import nx_actions
from ryu.ofproto import ofproto_parser
from ryu.ofproto import ofproto_common
from ryu.ofproto import ofproto_v1_4 as ofproto
_MSG_PARSERS = {}
def _set_msg_type(msg_type):
def _set_cls_msg_type(cls):
cls.cls_msg_type = msg_type
return cls
return _set_cls_msg_type
def _register_parser(cls):
'''class decorator to register msg parser'''
assert cls.cls_msg_type is not None
assert cls.cls_msg_type not in _MSG_PARSERS
_MSG_PARSERS[cls.cls_msg_type] = cls.parser
return cls
@ofproto_parser.register_msg_parser(ofproto.OFP_VERSION)
def msg_parser(datapath, version, msg_type, msg_len, xid, buf):
parser = _MSG_PARSERS.get(msg_type)
return parser(datapath, version, msg_type, msg_len, xid, buf)
@_register_parser
@_set_msg_type(ofproto.OFPT_HELLO)
class OFPHello(MsgBase):
"""
Hello message
When connection is started, the hello message is exchanged between a
switch and a controller.
This message is handled by the Ryu framework, so the Ryu application
do not need to process this typically.
========== =========================================================
Attribute Description
========== =========================================================
elements list of ``OFPHelloElemVersionBitmap`` instance
========== =========================================================
"""
def __init__(self, datapath, elements=None):
elements = elements if elements else []
super(OFPHello, self).__init__(datapath)
self.elements = elements
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPHello, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
offset = ofproto.OFP_HELLO_HEADER_SIZE
elems = []
while offset < msg.msg_len:
type_, length = struct.unpack_from(
ofproto.OFP_HELLO_ELEM_HEADER_PACK_STR, msg.buf, offset)
# better to register Hello Element classes but currently
# Only VerisonBitmap is supported so let's be simple.
if type_ == ofproto.OFPHET_VERSIONBITMAP:
elem = OFPHelloElemVersionBitmap.parser(msg.buf, offset)
elems.append(elem)
offset += length
msg.elements = elems
return msg
class OFPHelloElemVersionBitmap(StringifyMixin):
"""
Version bitmap Hello Element
========== =========================================================
Attribute Description
========== =========================================================
versions list of versions of OpenFlow protocol a device supports
========== =========================================================
"""
def __init__(self, versions, type_=None, length=None):
super(OFPHelloElemVersionBitmap, self).__init__()
self.type = ofproto.OFPHET_VERSIONBITMAP
self.length = None
self._bitmaps = None
self.versions = versions
@classmethod
def parser(cls, buf, offset):
type_, length = struct.unpack_from(
ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR,
buf, offset)
assert type_ == ofproto.OFPHET_VERSIONBITMAP
bitmaps_len = (length -
ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE)
offset += ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE
bitmaps = []
while bitmaps_len >= 4:
bitmap = struct.unpack_from('!I', buf, offset)
bitmaps.append(bitmap[0])
offset += 4
bitmaps_len -= 4
versions = [i * 32 + shift
for i, bitmap in enumerate(bitmaps)
for shift in range(31) if bitmap & (1 << shift)]
elem = cls(versions)
elem.length = length
elem._bitmaps = bitmaps
return elem
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REQUEST)
class OFPEchoRequest(MsgBase):
"""
Echo request message
This message is handled by the Ryu framework, so the Ryu application
do not need to process this typically.
========== =========================================================
Attribute Description
========== =========================================================
data An arbitrary length data
========== =========================================================
Example::
def send_echo_request(self, datapath, data):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPEchoRequest(datapath, data)
datapath.send_msg(req)
@set_ev_cls(ofp_event.EventOFPEchoRequest,
[HANDSHAKE_DISPATCHER, CONFIG_DISPATCHER, MAIN_DISPATCHER])
def echo_request_handler(self, ev):
self.logger.debug('OFPEchoRequest received: data=%s',
utils.hex_array(ev.msg.data))
"""
def __init__(self, datapath, data=None):
super(OFPEchoRequest, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoRequest, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
if self.data is not None:
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ERROR)
class OFPErrorMsg(MsgBase):
"""
Error message
The switch notifies controller of problems by this message.
========== =========================================================
Attribute Description
========== =========================================================
type High level type of error
code Details depending on the type
data Variable length data depending on the type and code
========== =========================================================
``type`` attribute corresponds to ``type_`` parameter of __init__.
Types and codes are defined in ``ryu.ofproto.ofproto``.
============================= ===========
Type Code
============================= ===========
OFPET_HELLO_FAILED OFPHFC_*
OFPET_BAD_REQUEST OFPBRC_*
OFPET_BAD_ACTION OFPBAC_*
OFPET_BAD_INSTRUCTION OFPBIC_*
OFPET_BAD_MATCH OFPBMC_*
OFPET_FLOW_MOD_FAILED OFPFMFC_*
OFPET_GROUP_MOD_FAILED OFPGMFC_*
OFPET_PORT_MOD_FAILED OFPPMFC_*
OFPET_TABLE_MOD_FAILED OFPTMFC_*
OFPET_QUEUE_OP_FAILED OFPQOFC_*
OFPET_SWITCH_CONFIG_FAILED OFPSCFC_*
OFPET_ROLE_REQUEST_FAILED OFPRRFC_*
OFPET_METER_MOD_FAILED OFPMMFC_*
OFPET_TABLE_FEATURES_FAILED OFPTFFC_*
OFPET_EXPERIMENTER N/A
============================= ===========
Example::
@set_ev_cls(ofp_event.EventOFPErrorMsg,
[HANDSHAKE_DISPATCHER, CONFIG_DISPATCHER, MAIN_DISPATCHER])
def error_msg_handler(self, ev):
msg = ev.msg
self.logger.debug('OFPErrorMsg received: type=0x%02x code=0x%02x '
'message=%s',
msg.type, msg.code, utils.hex_array(msg.data))
"""
def __init__(self, datapath, type_=None, code=None, data=None):
super(OFPErrorMsg, self).__init__(datapath)
self.type = type_
self.code = code
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
type_, = struct.unpack_from('!H', six.binary_type(buf),
ofproto.OFP_HEADER_SIZE)
if type_ == ofproto.OFPET_EXPERIMENTER:
return OFPErrorExperimenterMsg.parser(datapath, version, msg_type,
msg_len, xid, buf)
msg = super(OFPErrorMsg, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.type, msg.code = struct.unpack_from(
ofproto.OFP_ERROR_MSG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_ERROR_MSG_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_ERROR_MSG_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE, self.type, self.code)
self.buf += self.data
class OFPErrorExperimenterMsg(MsgBase):
def __init__(self, datapath, type_=None, exp_type=None, experimenter=None,
data=None):
super(OFPErrorExperimenterMsg, self).__init__(datapath)
self.type = ofproto.OFPET_EXPERIMENTER
self.exp_type = exp_type
self.experimenter = experimenter
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
cls.cls_msg_type = msg_type
msg = super(OFPErrorExperimenterMsg, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
msg.type, msg.exp_type, msg.experimenter = struct.unpack_from(
ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_ERROR_EXPERIMENTER_MSG_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.type, self.exp_type, self.experimenter)
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REPLY)
class OFPEchoReply(MsgBase):
"""
Echo reply message
This message is handled by the Ryu framework, so the Ryu application
do not need to process this typically.
========== =========================================================
Attribute Description
========== =========================================================
data An arbitrary length data
========== =========================================================
Example::
def send_echo_reply(self, datapath, data):
ofp_parser = datapath.ofproto_parser
reply = ofp_parser.OFPEchoReply(datapath, data)
datapath.send_msg(reply)
@set_ev_cls(ofp_event.EventOFPEchoReply,
[HANDSHAKE_DISPATCHER, CONFIG_DISPATCHER, MAIN_DISPATCHER])
def echo_reply_handler(self, ev):
self.logger.debug('OFPEchoReply received: data=%s',
utils.hex_array(ev.msg.data))
"""
def __init__(self, datapath, data=None):
super(OFPEchoReply, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
self.buf += self.data
@_set_msg_type(ofproto.OFPT_FEATURES_REQUEST)
class OFPFeaturesRequest(MsgBase):
"""
Features request message
The controller sends a feature request to the switch upon session
establishment.
This message is handled by the Ryu framework, so the Ryu application
do not need to process this typically.
Example::
def send_features_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPFeaturesRequest(datapath)
datapath.send_msg(req)
"""
def __init__(self, datapath):
super(OFPFeaturesRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_EXPERIMENTER)
class OFPExperimenter(MsgBase):
"""
Experimenter extension message
============= =========================================================
Attribute Description
============= =========================================================
experimenter Experimenter ID
exp_type Experimenter defined
data Experimenter defined arbitrary additional data
============= =========================================================
"""
def __init__(self, datapath, experimenter=None, exp_type=None, data=None):
super(OFPExperimenter, self).__init__(datapath)
self.experimenter = experimenter
self.exp_type = exp_type
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPExperimenter, cls).parser(datapath, version,
msg_type, msg_len,
xid, buf)
(msg.experimenter, msg.exp_type) = struct.unpack_from(
ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_EXPERIMENTER_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.experimenter, self.exp_type)
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_FEATURES_REPLY)
class OFPSwitchFeatures(MsgBase):
"""
Features reply message
The switch responds with a features reply message to a features
request.
This message is handled by the Ryu framework, so the Ryu application
do not need to process this typically.
Example::
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
msg = ev.msg
self.logger.debug('OFPSwitchFeatures received: '
'datapath_id=0x%016x n_buffers=%d '
'n_tables=%d auxiliary_id=%d '
'capabilities=0x%08x',
msg.datapath_id, msg.n_buffers, msg.n_tables,
msg.auxiliary_id, msg.capabilities)
"""
def __init__(self, datapath, datapath_id=None, n_buffers=None,
n_tables=None, auxiliary_id=None, capabilities=None):
super(OFPSwitchFeatures, self).__init__(datapath)
self.datapath_id = datapath_id
self.n_buffers = n_buffers
self.n_tables = n_tables
self.auxiliary_id = auxiliary_id
self.capabilities = capabilities
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPSwitchFeatures, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.datapath_id,
msg.n_buffers,
msg.n_tables,
msg.auxiliary_id,
msg.capabilities,
msg._reserved) = struct.unpack_from(
ofproto.OFP_SWITCH_FEATURES_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
return msg
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REQUEST)
class OFPGetConfigRequest(MsgBase):
"""
Get config request message
The controller sends a get config request to query configuration
parameters in the switch.
Example::
def send_get_config_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPGetConfigRequest(datapath)
datapath.send_msg(req)
"""
def __init__(self, datapath):
super(OFPGetConfigRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REPLY)
class OFPGetConfigReply(MsgBase):
"""
Get config reply message
The switch responds to a configuration request with a get config reply
message.
============= =========================================================
Attribute Description
============= =========================================================
flags Bitmap of the following flags.
| OFPC_FRAG_NORMAL
| OFPC_FRAG_DROP
| OFPC_FRAG_REASM
miss_send_len Max bytes of new flow that datapath should send to the
controller
============= =========================================================
Example::
@set_ev_cls(ofp_event.EventOFPGetConfigReply, MAIN_DISPATCHER)
def get_config_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
flags = []
if msg.flags & ofp.OFPC_FRAG_NORMAL:
flags.append('NORMAL')
if msg.flags & ofp.OFPC_FRAG_DROP:
flags.append('DROP')
if msg.flags & ofp.OFPC_FRAG_REASM:
flags.append('REASM')
self.logger.debug('OFPGetConfigReply received: '
'flags=%s miss_send_len=%d',
','.join(flags), msg.miss_send_len)
"""
def __init__(self, datapath, flags=None, miss_send_len=None):
super(OFPGetConfigReply, self).__init__(datapath)
self.flags = flags
self.miss_send_len = miss_send_len
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGetConfigReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.flags, msg.miss_send_len = struct.unpack_from(
ofproto.OFP_SWITCH_CONFIG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
return msg
@_set_msg_type(ofproto.OFPT_SET_CONFIG)
class OFPSetConfig(MsgBase):
"""
Set config request message
The controller sends a set config request message to set configuraion
parameters.
============= =========================================================
Attribute Description
============= =========================================================
flags Bitmap of the following flags.
| OFPC_FRAG_NORMAL
| OFPC_FRAG_DROP
| OFPC_FRAG_REASM
miss_send_len Max bytes of new flow that datapath should send to the
controller
============= =========================================================
Example::
def send_set_config(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPSetConfig(datapath, ofp.OFPC_FRAG_NORMAL, 256)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, miss_send_len=0):
super(OFPSetConfig, self).__init__(datapath)
self.flags = flags
self.miss_send_len = miss_send_len
def _serialize_body(self):
assert self.flags is not None
assert self.miss_send_len is not None
msg_pack_into(ofproto.OFP_SWITCH_CONFIG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.flags, self.miss_send_len)
class OFPMatch(StringifyMixin):
"""
Flow Match Structure
This class is implementation of the flow match structure having
compose/query API.
You can define the flow match by the keyword arguments.
The following arguments are available.
================ =============== ==================================
Argument Value Description
================ =============== ==================================
in_port Integer 32bit Switch input port
in_phy_port Integer 32bit Switch physical input port
metadata Integer 64bit Metadata passed between tables
eth_dst MAC address Ethernet destination address
eth_src MAC address Ethernet source address
eth_type Integer 16bit Ethernet frame type
vlan_vid Integer 16bit VLAN id
vlan_pcp Integer 8bit VLAN priority
ip_dscp Integer 8bit IP DSCP (6 bits in ToS field)
ip_ecn Integer 8bit IP ECN (2 bits in ToS field)
ip_proto Integer 8bit IP protocol
ipv4_src IPv4 address IPv4 source address
ipv4_dst IPv4 address IPv4 destination address
tcp_src Integer 16bit TCP source port
tcp_dst Integer 16bit TCP destination port
udp_src Integer 16bit UDP source port
udp_dst Integer 16bit UDP destination port
sctp_src Integer 16bit SCTP source port
sctp_dst Integer 16bit SCTP destination port
icmpv4_type Integer 8bit ICMP type
icmpv4_code Integer 8bit ICMP code
arp_op Integer 16bit ARP opcode
arp_spa IPv4 address ARP source IPv4 address
arp_tpa IPv4 address ARP target IPv4 address
arp_sha MAC address ARP source hardware address
arp_tha MAC address ARP target hardware address
ipv6_src IPv6 address IPv6 source address
ipv6_dst IPv6 address IPv6 destination address
ipv6_flabel Integer 32bit IPv6 Flow Label
icmpv6_type Integer 8bit ICMPv6 type
icmpv6_code Integer 8bit ICMPv6 code
ipv6_nd_target IPv6 address Target address for ND
ipv6_nd_sll MAC address Source link-layer for ND
ipv6_nd_tll MAC address Target link-layer for ND
mpls_label Integer 32bit MPLS label
mpls_tc Integer 8bit MPLS TC
mpls_bos Integer 8bit MPLS BoS bit
pbb_isid Integer 24bit PBB I-SID
tunnel_id Integer 64bit Logical Port Metadata
ipv6_exthdr Integer 16bit IPv6 Extension Header pseudo-field
pbb_uca Integer 8bit PBB UCA header field
tcp_flags Integer 16bit TCP flags
(EXT-109 ONF Extension)
actset_output Integer 32bit Output port from action set metadata
(EXT-233 ONF Extension)
================ =============== ==================================
Example::
>>> # compose
>>> match = parser.OFPMatch(
... in_port=1,
... eth_type=0x86dd,
... ipv6_src=('2001:db8:bd05:1d2:288a:1fc0:1:10ee',
... 'ffff:ffff:ffff:ffff::'),
... ipv6_dst='2001:db8:bd05:1d2:288a:1fc0:1:10ee')
>>> # query
>>> if 'ipv6_src' in match:
... print match['ipv6_src']
...
('2001:db8:bd05:1d2:288a:1fc0:1:10ee', 'ffff:ffff:ffff:ffff::')
.. Note::
For the list of the supported Nicira experimenter matches,
please refer to :ref:`ryu.ofproto.nx_match <nx_match_structures>`.
.. Note::
For VLAN id match field, special values are defined in OpenFlow Spec.
1) Packets with and without a VLAN tag
- Example::
match = parser.OFPMatch()
- Packet Matching
====================== =====
non-VLAN-tagged MATCH
VLAN-tagged(vlan_id=3) MATCH
VLAN-tagged(vlan_id=5) MATCH
====================== =====
2) Only packets without a VLAN tag
- Example::
match = parser.OFPMatch(vlan_vid=0x0000)
- Packet Matching
====================== =====
non-VLAN-tagged MATCH
VLAN-tagged(vlan_id=3) x
VLAN-tagged(vlan_id=5) x
====================== =====
3) Only packets with a VLAN tag regardless of its value
- Example::
match = parser.OFPMatch(vlan_vid=(0x1000, 0x1000))
- Packet Matching
====================== =====
non-VLAN-tagged x
VLAN-tagged(vlan_id=3) MATCH
VLAN-tagged(vlan_id=5) MATCH
====================== =====
4) Only packets with VLAN tag and VID equal
- Example::
match = parser.OFPMatch(vlan_vid=(0x1000 | 3))
- Packet Matching
====================== =====
non-VLAN-tagged x
VLAN-tagged(vlan_id=3) MATCH
VLAN-tagged(vlan_id=5) x
====================== =====
"""
def __init__(self, type_=None, length=None, _ordered_fields=None,
**kwargs):
super(OFPMatch, self).__init__()
self.type = ofproto.OFPMT_OXM
self.length = length
if _ordered_fields is not None:
assert not kwargs
self._fields2 = _ordered_fields
else:
kwargs = dict(ofproto.oxm_normalize_user(k, v) for
(k, v) in kwargs.items())
fields = [ofproto.oxm_from_user(k, v) for (k, v)
in kwargs.items()]
# assumption: sorting by OXM type values makes fields
# meet ordering requirements (eg. eth_type before ipv4_src)
fields.sort(
key=lambda x: x[0][0] if isinstance(x[0], tuple) else x[0])
self._fields2 = [ofproto.oxm_to_user(n, v, m) for (n, v, m)
in fields]
@classmethod
def parser(cls, buf, offset):
"""
Returns an object which is generated from a buffer including the
expression of the wire protocol of the flow match.
"""
match = OFPMatch()
type_, length = struct.unpack_from('!HH', buf, offset)
match.type = type_
match.length = length
# ofp_match adjustment
offset += 4
length -= 4
fields = []
while length > 0:
n, value, mask, field_len = ofproto.oxm_parse(buf, offset)
k, uv = ofproto.oxm_to_user(n, value, mask)
fields.append((k, uv))
offset += field_len
length -= field_len
match._fields2 = fields
return match
def serialize(self, buf, offset):
"""
Outputs the expression of the wire protocol of the flow match into
the buf.
Returns the output length.
"""
fields = [ofproto.oxm_from_user(k, uv) for (k, uv)
in self._fields2]
hdr_pack_str = '!HH'
field_offset = offset + struct.calcsize(hdr_pack_str)
for (n, value, mask) in fields:
field_offset += ofproto.oxm_serialize(n, value, mask, buf,
field_offset)
length = field_offset - offset
msg_pack_into(hdr_pack_str, buf, offset, ofproto.OFPMT_OXM, length)
self.length = length
pad_len = utils.round_up(length, 8) - length
msg_pack_into("%dx" % pad_len, buf, field_offset)
return length + pad_len
def __getitem__(self, key):
return dict(self._fields2)[key]
def __contains__(self, key):
return key in dict(self._fields2)
def iteritems(self):
return iter(dict(self._fields2).items())
def items(self):
return self._fields2
def get(self, key, default=None):
return dict(self._fields2).get(key, default)
def stringify_attrs(self):
yield "oxm_fields", dict(self._fields2)
def to_jsondict(self):
"""
Returns a dict expressing the flow match.
"""
body = {"oxm_fields": [ofproto.oxm_to_jsondict(k, uv) for k, uv
in self._fields2],
"length": self.length,
"type": self.type}
return {self.__class__.__name__: body}
@classmethod
def from_jsondict(cls, dict_):
"""
Returns an object which is generated from a dict.
Exception raises:
KeyError -- Unknown match field is defined in dict
"""
fields = [ofproto.oxm_from_jsondict(f) for f
in dict_['oxm_fields']]
return OFPMatch(_ordered_fields=fields)
class OFPPropUnknown(StringifyMixin):
def __init__(self, type_=None, length=None, buf=None):
self.buf = buf
@classmethod
def parser(cls, buf):
return cls(buf=buf)
class OFPPropBase(StringifyMixin):
_PACK_STR = '!HH'
# _TYPES = {} must be an attribute of subclass
def __init__(self, type_, length=None):
self.type = type_
self.length = length
@classmethod
def register_type(cls, type_):
def _register_type(subcls):
cls._TYPES[type_] = subcls
return subcls
return _register_type
@classmethod
def parse(cls, buf):
(type_, length) = struct.unpack_from(cls._PACK_STR, buf, 0)
rest = buf[utils.round_up(length, 8):]
try:
subcls = cls._TYPES[type_]
except KeyError:
subcls = OFPPropUnknown
prop = subcls.parser(buf)
prop.type = type_
prop.length = length
return prop, rest
@classmethod
def get_rest(cls, buf):
(type_, length) = struct.unpack_from(cls._PACK_STR, buf, 0)
offset = struct.calcsize(cls._PACK_STR)
return buf[offset:length]
def serialize(self):
# Body
# serialize_body should be implemented by subclass
body = bytearray()
body += self.serialize_body()
# fixup
self.length = len(body) + struct.calcsize(self._PACK_STR)
# Header
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.length)
buf += body
# Pad
pad_len = utils.round_up(self.length, 8) - self.length
msg_pack_into("%dx" % pad_len, buf, len(buf))
return buf
class OFPPropCommonExperimenter4ByteData(StringifyMixin):
_PACK_STR = '!HHII'
_EXPERIMENTER_DATA_PACK_STR = '!I'
_EXPERIMENTER_DATA_SIZE = 4
def __init__(self, type_=None, length=None, experimenter=None,
exp_type=None, data=bytearray()):
self.type = type_
self.length = length
self.experimenter = experimenter
self.exp_type = exp_type
self.data = data
@classmethod
def parser(cls, buf):
(type_, length, experimenter, exp_type) = struct.unpack_from(
ofproto.OFP_PROP_EXPERIMENTER_PACK_STR, buf, 0)
rest = buf[ofproto.OFP_PROP_EXPERIMENTER_SIZE:length]
data = []
while rest:
(d,) = struct.unpack_from(
cls._EXPERIMENTER_DATA_PACK_STR, rest, 0)
data.append(d)
rest = rest[cls._EXPERIMENTER_DATA_SIZE:]
return cls(type_, length, experimenter, exp_type, data)
def serialize(self):
offset = 0
bin_data = bytearray()
for d in self.data:
msg_pack_into(self._EXPERIMENTER_DATA_PACK_STR,
bin_data, offset, d)
offset += self._EXPERIMENTER_DATA_SIZE
# fixup
self.length = struct.calcsize(self._PACK_STR)
self.length += len(bin_data)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf,
0, self.type, self.length, self.experimenter,
self.exp_type)
buf += bin_data
# Pad
pad_len = utils.round_up(self.length, 8) - self.length
msg_pack_into("%dx" % pad_len, buf, len(buf))
return buf
class OFPPortDescProp(OFPPropBase):
_TYPES = {}
@OFPPortDescProp.register_type(ofproto.OFPPDPT_ETHERNET)
class OFPPortDescPropEthernet(OFPPortDescProp):
def __init__(self, type_=None, length=None, curr=None, advertised=None,
supported=None, peer=None, curr_speed=None, max_speed=None):
self.type = type_
self.length = length
self.curr = curr
self.advertised = advertised
self.supported = supported
self.peer = peer
self.curr_speed = curr_speed
self.max_speed = max_speed
@classmethod
def parser(cls, buf):
ether = cls()
(ether.type, ether.length, ether.curr,
ether.advertised, ether.supported,
ether.peer, ether.curr_speed, ether.max_speed) = struct.unpack_from(
ofproto.OFP_PORT_DESC_PROP_ETHERNET_PACK_STR, buf, 0)
return ether
@OFPPortDescProp.register_type(ofproto.OFPPDPT_OPTICAL)
class OFPPortDescPropOptical(OFPPortDescProp):
def __init__(self, type_=None, length=None, supported=None,
tx_min_freq_lmda=None, tx_max_freq_lmda=None,
tx_grid_freq_lmda=None, rx_min_freq_lmda=None,
rx_max_freq_lmda=None, rx_grid_freq_lmda=None,
tx_pwr_min=None, tx_pwr_max=None):
self.type = type_
self.length = length
self.supported = supported
self.tx_min_freq_lmda = tx_min_freq_lmda
self.tx_max_freq_lmda = tx_max_freq_lmda
self.tx_grid_freq_lmda = tx_grid_freq_lmda
self.rx_min_freq_lmda = rx_min_freq_lmda
self.rx_max_freq_lmda = rx_max_freq_lmda
self.rx_grid_freq_lmda = rx_grid_freq_lmda
self.tx_pwr_min = tx_pwr_min
self.tx_pwr_max = tx_pwr_max
@classmethod
def parser(cls, buf):
optical = cls()
(optical.type, optical.length, optical.supported,
optical.tx_min_freq_lmda, optical.tx_max_freq_lmda,
optical.tx_grid_freq_lmda, optical.rx_min_freq_lmda,
optical.rx_max_freq_lmda, optical.rx_grid_freq_lmda,
optical.tx_pwr_min, optical.tx_pwr_max) = struct.unpack_from(
ofproto.OFP_PORT_DESC_PROP_OPTICAL_PACK_STR, buf, 0)
return optical
@OFPPortDescProp.register_type(ofproto.OFPPDPT_EXPERIMENTER)
class OFPPortDescPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPTableModProp(OFPPropBase):
_TYPES = {}
@OFPTableModProp.register_type(ofproto.OFPTMPT_EVICTION)
class OFPTableModPropEviction(OFPTableModProp):
def __init__(self, type_=None, length=None, flags=None):
self.type = type_
self.length = length
self.flags = flags
@classmethod
def parser(cls, buf):
eviction = cls()
(eviction.type, eviction.length, eviction.flags) = struct.unpack_from(
ofproto.OFP_TABLE_MOD_PROP_EVICTION_PACK_STR, buf, 0)
return eviction
def serialize(self):
# fixup
self.length = ofproto.OFP_TABLE_MOD_PROP_EVICTION_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_MOD_PROP_EVICTION_PACK_STR, buf, 0,
self.type, self.length, self.flags)
return buf
@OFPTableModProp.register_type(ofproto.OFPTMPT_VACANCY)
class OFPTableModPropVacancy(OFPTableModProp):
def __init__(self, type_=None, length=None, vacancy_down=None,
vacancy_up=None, vacancy=None):
self.type = type_
self.length = length
self.vacancy_down = vacancy_down
self.vacancy_up = vacancy_up
self.vacancy = vacancy
@classmethod
def parser(cls, buf):
vacancy = cls()
(vacancy.type, vacancy.length, vacancy.vacancy_down,
vacancy.vacancy_up, vacancy.vacancy) = struct.unpack_from(
ofproto.OFP_TABLE_MOD_PROP_VACANCY_PACK_STR, buf, 0)
return vacancy
def serialize(self):
# fixup
self.length = ofproto.OFP_TABLE_MOD_PROP_VACANCY_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_MOD_PROP_VACANCY_PACK_STR, buf, 0,
self.type, self.length, self.vacancy_down,
self.vacancy_up, self.vacancy)
return buf
@OFPTableModProp.register_type(ofproto.OFPTMPT_EXPERIMENTER)
class OFPTableModPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPQueueDescProp(OFPPropBase):
_TYPES = {}
@OFPQueueDescProp.register_type(ofproto.OFPQDPT_MIN_RATE)
class OFPQueueDescPropMinRate(OFPQueueDescProp):
def __init__(self, type_=None, length=None, rate=None):
self.type = type_
self.length = length
self.rate = rate
@classmethod
def parser(cls, buf):
minrate = cls()
(minrate.type, minrate.length, minrate.rate) = struct.unpack_from(
ofproto.OFP_QUEUE_DESC_PROP_MIN_RATE_PACK_STR, buf, 0)
return minrate
@OFPQueueDescProp.register_type(ofproto.OFPQDPT_MAX_RATE)
class OFPQueueDescPropMaxRate(OFPQueueDescProp):
def __init__(self, type_=None, length=None, rate=None):
self.type = type_
self.length = length
self.rate = rate
@classmethod
def parser(cls, buf):
maxrate = cls()
(maxrate.type, maxrate.length, maxrate.rate) = struct.unpack_from(
ofproto.OFP_QUEUE_DESC_PROP_MAX_RATE_PACK_STR, buf, 0)
return maxrate
@OFPQueueDescProp.register_type(ofproto.OFPQDPT_EXPERIMENTER)
class OFPQueueDescPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPRoleProp(OFPPropBase):
_TYPES = {}
@OFPRoleProp.register_type(ofproto.OFPRPT_EXPERIMENTER)
class OFPRolePropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPBundleProp(OFPPropBase):
_TYPES = {}
@OFPBundleProp.register_type(ofproto.OFPRPT_EXPERIMENTER)
class OFPBundlePropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_register_parser
@_set_msg_type(ofproto.OFPT_PACKET_IN)
class OFPPacketIn(MsgBase):
"""
Packet-In message
The switch sends the packet that received to the controller by this
message.
============= =========================================================
Attribute Description
============= =========================================================
buffer_id ID assigned by datapath
total_len Full length of frame
reason Reason packet is being sent.
| OFPR_TABLE_MISS
| OFPR_APPLY_ACTION
| OFPR_INVALID_TTL
| OFPR_ACTION_SET
| OFPR_GROUP
| OFPR_PACKET_OUT
table_id ID of the table that was looked up
cookie Cookie of the flow entry that was looked up
match Instance of ``OFPMatch``
data Ethernet frame
============= =========================================================
Example::
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def packet_in_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.reason == ofp.TABLE_MISS:
reason = 'TABLE MISS'
elif msg.reason == ofp.OFPR_APPLY_ACTION:
reason = 'APPLY ACTION'
elif msg.reason == ofp.OFPR_INVALID_TTL:
reason = 'INVALID TTL'
elif msg.reason == ofp.OFPR_ACTION_SET:
reason = 'ACTION SET'
elif msg.reason == ofp.OFPR_GROUP:
reason = 'GROUP'
elif msg.reason == ofp.OFPR_PACKET_OUT:
reason = 'PACKET OUT'
else:
reason = 'unknown'
self.logger.debug('OFPPacketIn received: '
'buffer_id=%x total_len=%d reason=%s '
'table_id=%d cookie=%d match=%s data=%s',
msg.buffer_id, msg.total_len, reason,
msg.table_id, msg.cookie, msg.match,
utils.hex_array(msg.data))
"""
def __init__(self, datapath, buffer_id=None, total_len=None, reason=None,
table_id=None, cookie=None, match=None, data=None):
super(OFPPacketIn, self).__init__(datapath)
self.buffer_id = buffer_id
self.total_len = total_len
self.reason = reason
self.table_id = table_id
self.cookie = cookie
self.match = match
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPacketIn, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.buffer_id, msg.total_len, msg.reason,
msg.table_id, msg.cookie) = struct.unpack_from(
ofproto.OFP_PACKET_IN_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
msg.match = OFPMatch.parser(msg.buf, ofproto.OFP_PACKET_IN_SIZE -
ofproto.OFP_MATCH_SIZE)
match_len = utils.round_up(msg.match.length, 8)
msg.data = msg.buf[(ofproto.OFP_PACKET_IN_SIZE -
ofproto.OFP_MATCH_SIZE + match_len + 2):]
if msg.total_len < len(msg.data):
# discard padding for 8-byte alignment of OFP packet
msg.data = msg.data[:msg.total_len]
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_FLOW_REMOVED)
class OFPFlowRemoved(MsgBase):
"""
Flow removed message
When flow entries time out or are deleted, the switch notifies controller
with this message.
================ ======================================================
Attribute Description
================ ======================================================
cookie Opaque controller-issued identifier
priority Priority level of flow entry
reason One of the following values.
| OFPRR_IDLE_TIMEOUT
| OFPRR_HARD_TIMEOUT
| OFPRR_DELETE
| OFPRR_GROUP_DELETE
| OFPRR_METER_DELETE
| OFPRR_EVICTION
table_id ID of the table
duration_sec Time flow was alive in seconds
duration_nsec Time flow was alive in nanoseconds beyond duration_sec
idle_timeout Idle timeout from original flow mod
hard_timeout Hard timeout from original flow mod
packet_count Number of packets that was associated with the flow
byte_count Number of bytes that was associated with the flow
match Instance of ``OFPMatch``
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPFlowRemoved, MAIN_DISPATCHER)
def flow_removed_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.reason == ofp.OFPRR_IDLE_TIMEOUT:
reason = 'IDLE TIMEOUT'
elif msg.reason == ofp.OFPRR_HARD_TIMEOUT:
reason = 'HARD TIMEOUT'
elif msg.reason == ofp.OFPRR_DELETE:
reason = 'DELETE'
elif msg.reason == ofp.OFPRR_GROUP_DELETE:
reason = 'GROUP DELETE'
else:
reason = 'unknown'
self.logger.debug('OFPFlowRemoved received: '
'cookie=%d priority=%d reason=%s table_id=%d '
'duration_sec=%d duration_nsec=%d '
'idle_timeout=%d hard_timeout=%d '
'packet_count=%d byte_count=%d match.fields=%s',
msg.cookie, msg.priority, reason, msg.table_id,
msg.duration_sec, msg.duration_nsec,
msg.idle_timeout, msg.hard_timeout,
msg.packet_count, msg.byte_count, msg.match)
"""
def __init__(self, datapath, cookie=None, priority=None, reason=None,
table_id=None, duration_sec=None, duration_nsec=None,
idle_timeout=None, hard_timeout=None, packet_count=None,
byte_count=None, match=None):
super(OFPFlowRemoved, self).__init__(datapath)
self.cookie = cookie
self.priority = priority
self.reason = reason
self.table_id = table_id
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.packet_count = packet_count
self.byte_count = byte_count
self.match = match
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPFlowRemoved, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.cookie, msg.priority, msg.reason,
msg.table_id, msg.duration_sec, msg.duration_nsec,
msg.idle_timeout, msg.hard_timeout, msg.packet_count,
msg.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_REMOVED_PACK_STR0,
msg.buf, ofproto.OFP_HEADER_SIZE)
offset = (ofproto.OFP_FLOW_REMOVED_SIZE - ofproto.OFP_MATCH_SIZE)
msg.match = OFPMatch.parser(msg.buf, offset)
return msg
class OFPPort(StringifyMixin):
"""
Description of a port
========== =========================================================
Attribute Description
========== =========================================================
port_no Port number and it uniquely identifies a port within
a switch.
length Length of ofp_port (excluding padding).
hw_addr MAC address for the port.
name Null-terminated string containing a human-readable name
for the interface.
config Bitmap of port configration flags.
| OFPPC_PORT_DOWN
| OFPPC_NO_RECV
| OFPPC_NO_FWD
| OFPPC_NO_PACKET_IN
state Bitmap of port state flags.
| OFPPS_LINK_DOWN
| OFPPS_BLOCKED
| OFPPS_LIVE
properties List of ``OFPPortDescProp`` subclass instance
========== =========================================================
"""
_TYPE = {
'ascii': [
'hw_addr',
],
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8, which is used by OVS.
'name',
]
}
def __init__(self, port_no=None, length=None, hw_addr=None, name=None,
config=None, state=None, properties=None):
super(OFPPort, self).__init__()
self.port_no = port_no
self.length = length
self.hw_addr = hw_addr
self.name = name
self.config = config
self.state = state
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(port_no, length, hw_addr, name, config, state) = struct.unpack_from(
ofproto.OFP_PORT_PACK_STR, buf, offset)
hw_addr = addrconv.mac.bin_to_text(hw_addr)
name = name.rstrip(b'\0')
props = []
rest = buf[offset + ofproto.OFP_PORT_SIZE:offset + length]
while rest:
p, rest = OFPPortDescProp.parse(rest)
props.append(p)
ofpport = cls(port_no, length, hw_addr, name, config, state, props)
return ofpport
class OFPTableDesc(StringifyMixin):
def __init__(self, length=None, table_id=None, config=None,
properties=None):
super(OFPTableDesc, self).__init__()
self.table_id = table_id
self.length = length
self.config = config
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(length, table_id, config) = struct.unpack_from(
ofproto.OFP_TABLE_DESC_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_TABLE_DESC_SIZE:offset + length]
while rest:
p, rest = OFPTableModProp.parse(rest)
props.append(p)
ofptabledesc = cls(length, table_id, config, props)
return ofptabledesc
class OFPQueueDesc(StringifyMixin):
def __init__(self, port_no=None, queue_id=None, len_=None,
properties=None):
super(OFPQueueDesc, self).__init__()
self.port_no = port_no
self.queue_id = queue_id
self.len = len_
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(port_no, queue_id, len_) = struct.unpack_from(
ofproto.OFP_QUEUE_DESC_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_QUEUE_DESC_SIZE:offset + len_]
while rest:
p, rest = OFPQueueDescProp.parse(rest)
props.append(p)
ofpqueuedesc = cls(port_no, queue_id, len_, props)
return ofpqueuedesc
def _set_stats_type(stats_type, stats_body_cls):
def _set_cls_stats_type(cls):
cls.cls_stats_type = stats_type
cls.cls_stats_body_cls = stats_body_cls
return cls
return _set_cls_stats_type
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMultipartRequest(MsgBase):
def __init__(self, datapath, flags):
super(OFPMultipartRequest, self).__init__(datapath)
self.type = self.__class__.cls_stats_type
self.flags = flags
def _serialize_stats_body(self):
pass
def _serialize_body(self):
msg_pack_into(ofproto.OFP_MULTIPART_REQUEST_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.type, self.flags)
self._serialize_stats_body()
@_register_parser
@_set_msg_type(ofproto.OFPT_METER_MOD)
class OFPMeterMod(MsgBase):
"""
Meter modification message
The controller sends this message to modify the meter.
================ ======================================================
Attribute Description
================ ======================================================
command One of the following values.
| OFPMC_ADD
| OFPMC_MODIFY
| OFPMC_DELETE
flags Bitmap of the following flags.
| OFPMF_KBPS
| OFPMF_PKTPS
| OFPMF_BURST
| OFPMF_STATS
meter_id Meter instance
bands list of the following class instance.
| OFPMeterBandDrop
| OFPMeterBandDscpRemark
| OFPMeterBandExperimenter
================ ======================================================
"""
def __init__(self, datapath, command=ofproto.OFPMC_ADD,
flags=ofproto.OFPMF_KBPS, meter_id=1, bands=None):
bands = bands if bands else []
super(OFPMeterMod, self).__init__(datapath)
self.command = command
self.flags = flags
self.meter_id = meter_id
self.bands = bands
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPMeterMod, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
(msg.command, msg.flags, msg.meter_id) = struct.unpack_from(
ofproto.OFP_METER_MOD_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
offset = ofproto.OFP_METER_MOD_SIZE
msg.bands = []
while offset < msg.msg_len:
band = OFPMeterBandHeader.parser(buf, offset)
msg.bands.append(band)
offset += band.len
return msg
def _serialize_body(self):
msg_pack_into(ofproto.OFP_METER_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.command, self.flags, self.meter_id)
offset = ofproto.OFP_METER_MOD_SIZE
for b in self.bands:
b.serialize(self.buf, offset)
offset += b.len
@_set_msg_type(ofproto.OFPT_TABLE_MOD)
class OFPTableMod(MsgBase):
"""
Flow table configuration message
The controller sends this message to configure table state.
================ ======================================================
Attribute Description
================ ======================================================
table_id ID of the table (OFPTT_ALL indicates all tables)
config Bitmap of configuration flags.
| OFPTC_EVICTION
| OFPTC_VACANCY_EVENTS
properties List of ``OFPTableModProp`` subclass instance
================ ======================================================
Example::
def send_table_mod(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPTableMod(datapath, 1, 3)
flags = ofp.OFPTC_VACANCY_EVENTS
properties = [ofp_parser.OFPTableModPropEviction(flags)]
req = ofp_parser.OFPTableMod(datapath, 1, 3, properties)
datapath.send_msg(req)
"""
def __init__(self, datapath, table_id, config, properties):
super(OFPTableMod, self).__init__(datapath)
self.table_id = table_id
self.config = config
self.properties = properties
def _serialize_body(self):
props_buf = bytearray()
for p in self.properties:
props_buf += p.serialize()
msg_pack_into(ofproto.OFP_TABLE_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.table_id, self.config)
self.buf += props_buf
@_register_parser
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMultipartReply(MsgBase):
_STATS_MSG_TYPES = {}
@staticmethod
def register_stats_type(body_single_struct=False):
def _register_stats_type(cls):
assert cls.cls_stats_type is not None
assert cls.cls_stats_type not in OFPMultipartReply._STATS_MSG_TYPES
assert cls.cls_stats_body_cls is not None
cls.cls_body_single_struct = body_single_struct
OFPMultipartReply._STATS_MSG_TYPES[cls.cls_stats_type] = cls
return cls
return _register_stats_type
def __init__(self, datapath, body=None, flags=None):
super(OFPMultipartReply, self).__init__(datapath)
self.body = body
self.flags = flags
@classmethod
def parser_stats_body(cls, buf, msg_len, offset):
body_cls = cls.cls_stats_body_cls
body = []
while offset < msg_len:
entry = body_cls.parser(buf, offset)
body.append(entry)
offset += entry.length
if cls.cls_body_single_struct:
return body[0]
return body
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
msg.body = msg.parser_stats_body(msg.buf, msg.msg_len,
ofproto.OFP_MULTIPART_REPLY_SIZE)
return msg
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
type_, flags = struct.unpack_from(
ofproto.OFP_MULTIPART_REPLY_PACK_STR, six.binary_type(buf),
ofproto.OFP_HEADER_SIZE)
stats_type_cls = cls._STATS_MSG_TYPES.get(type_)
msg = super(OFPMultipartReply, stats_type_cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
msg.type = type_
msg.flags = flags
offset = ofproto.OFP_MULTIPART_REPLY_SIZE
body = []
while offset < msg_len:
b = stats_type_cls.cls_stats_body_cls.parser(msg.buf, offset)
body.append(b)
offset += b.length if hasattr(b, 'length') else b.len
if stats_type_cls.cls_body_single_struct:
msg.body = body[0]
else:
msg.body = body
return msg
class OFPDescStats(ofproto_parser.namedtuple('OFPDescStats', (
'mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'))):
_TYPE = {
'ascii': [
'mfr_desc',
'hw_desc',
'sw_desc',
'serial_num',
'dp_desc',
]
}
@classmethod
def parser(cls, buf, offset):
desc = struct.unpack_from(ofproto.OFP_DESC_PACK_STR,
buf, offset)
desc = list(desc)
desc = [x.rstrip(b'\0') for x in desc]
stats = cls(*desc)
stats.length = ofproto.OFP_DESC_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPDescStatsRequest(OFPMultipartRequest):
"""
Description statistics request message
The controller uses this message to query description of the switch.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_desc_stats_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPDescStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, type_=None):
super(OFPDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPDescStatsReply(OFPMultipartReply):
"""
Description statistics reply message
The switch responds with this message to a description statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body Instance of ``OFPDescStats``
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPDescStatsReply, MAIN_DISPATCHER)
def desc_stats_reply_handler(self, ev):
body = ev.msg.body
self.logger.debug('DescStats: mfr_desc=%s hw_desc=%s sw_desc=%s '
'serial_num=%s dp_desc=%s',
body.mfr_desc, body.hw_desc, body.sw_desc,
body.serial_num, body.dp_desc)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPDescStatsReply, self).__init__(datapath, **kwargs)
class OFPTableFeaturesStats(StringifyMixin):
_TYPE = {
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8.
'name',
]
}
def __init__(self, table_id=None, name=None, metadata_match=None,
metadata_write=None, config=None, max_entries=None,
properties=None, length=None):
super(OFPTableFeaturesStats, self).__init__()
self.length = None
self.table_id = table_id
self.name = name
self.metadata_match = metadata_match
self.metadata_write = metadata_write
self.config = config
self.max_entries = max_entries
self.properties = properties
@classmethod
def parser(cls, buf, offset):
table_features = cls()
(table_features.length, table_features.table_id,
name, table_features.metadata_match,
table_features.metadata_write, table_features.config,
table_features.max_entries
) = struct.unpack_from(ofproto.OFP_TABLE_FEATURES_PACK_STR,
buf, offset)
table_features.name = name.rstrip(b'\0')
props = []
rest = buf[offset + ofproto.OFP_TABLE_FEATURES_SIZE:
offset + table_features.length]
while rest:
p, rest = OFPTableFeatureProp.parse(rest)
props.append(p)
table_features.properties = props
return table_features
def serialize(self):
# fixup
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
self.length = ofproto.OFP_TABLE_FEATURES_SIZE + len(bin_props)
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_FEATURES_PACK_STR, buf, 0,
self.length, self.table_id, self.name,
self.metadata_match, self.metadata_write,
self.config, self.max_entries)
return buf + bin_props
class OFPTableFeatureProp(OFPPropBase):
_TYPES = {}
class OFPInstructionId(StringifyMixin):
_PACK_STR = '!HH' # type, len
def __init__(self, type_, len_=None):
self.type = type_
self.len = len_
# XXX experimenter
@classmethod
def parse(cls, buf):
(type_, len_,) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf), 0)
rest = buf[len_:]
return cls(type_=type_, len_=len_), rest
def serialize(self):
# fixup
self.len = struct.calcsize(self._PACK_STR)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.len)
return buf
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_INSTRUCTIONS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_INSTRUCTIONS_MISS)
class OFPTableFeaturePropInstructions(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, instruction_ids=None):
instruction_ids = instruction_ids if instruction_ids else []
super(OFPTableFeaturePropInstructions, self).__init__(type_, length)
self.instruction_ids = instruction_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
i, rest = OFPInstructionId.parse(rest)
ids.append(i)
return cls(instruction_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.instruction_ids:
bin_ids += i.serialize()
return bin_ids
# Implementation note: While OpenFlow 1.3.2 shares the same ofp_action_header
# for flow_mod and table_features, we have separate classes. We named this
# class to match with OpenFlow 1.4's name. (ofp_action_id)
class OFPActionId(StringifyMixin):
_PACK_STR = '!HH' # type, len
def __init__(self, type_, len_=None):
self.type = type_
self.len = len_
# XXX experimenter
@classmethod
def parse(cls, buf):
(type_, len_,) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf), 0)
rest = buf[len_:]
return cls(type_=type_, len_=len_), rest
def serialize(self):
# fixup
self.len = struct.calcsize(self._PACK_STR)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.len)
return buf
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_ACTIONS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_ACTIONS_MISS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_ACTIONS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_ACTIONS_MISS)
class OFPTableFeaturePropActions(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, action_ids=None):
action_ids = action_ids if action_ids else []
super(OFPTableFeaturePropActions, self).__init__(type_, length)
self.action_ids = action_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
i, rest = OFPActionId.parse(rest)
ids.append(i)
return cls(action_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.action_ids:
bin_ids += i.serialize()
return bin_ids
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_NEXT_TABLES)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_NEXT_TABLES_MISS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_TABLE_SYNC_FROM)
class OFPTableFeaturePropNextTables(OFPTableFeatureProp):
_TABLE_ID_PACK_STR = '!B'
def __init__(self, type_=None, length=None, table_ids=None):
table_ids = table_ids if table_ids else []
super(OFPTableFeaturePropNextTables, self).__init__(type_, length)
self.table_ids = table_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i,) = struct.unpack_from(cls._TABLE_ID_PACK_STR, six.binary_type(rest), 0)
rest = rest[struct.calcsize(cls._TABLE_ID_PACK_STR):]
ids.append(i)
return cls(table_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.table_ids:
bin_id = bytearray()
msg_pack_into(self._TABLE_ID_PACK_STR, bin_id, 0, i)
bin_ids += bin_id
return bin_ids
# Implementation note: OFPOxmId is specific to this implementation.
# It does not have a corresponding structure in the specification.
# (the specification uses plain uint32_t for non-experimenter OXMs
# and uint64_t for experimenter OXMs.)
#
# i have taken a look at some of software switch implementations
# but they all look broken or incomplete. according to the spec,
# oxm_hasmask should be 1 if a switch supports masking for the type.
# the right value for oxm_length is not clear from the spec.
# update: OpenFlow 1.3.3 "clarified" that oxm_length here is the payload
# length. it's still unclear if it should be doubled for hasmask or not,
# though.
# ofsoftswitch13
# oxm_hasmask always 0
# oxm_length same as ofp_match etc (as without mask)
# linc/of_protocol
# oxm_hasmask always 0
# oxm_length always 0
# ovs:
# seems in flux as of writing this [20141003]
class OFPOxmId(StringifyMixin):
_PACK_STR = '!I' # oxm header
_EXPERIMENTER_ID_PACK_STR = '!I'
_TYPE = {
'ascii': [
'type',
],
}
def __init__(self, type_, hasmask=False, length=None):
self.type = type_
self.hasmask = hasmask
self.length = length
@classmethod
def parse(cls, buf):
(oxm,) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf), 0)
# oxm (32 bit) == class (16) | field (7) | hasmask (1) | length (8)
# in case of experimenter OXMs, another 32 bit value
# (experimenter id) follows.
(type_, _v) = ofproto.oxm_to_user(oxm >> (1 + 8), None, None)
rest = buf[struct.calcsize(cls._PACK_STR):]
hasmask = ofproto.oxm_tlv_header_extract_hasmask(oxm)
length = oxm & 0xff # XXX see the comment on OFPOxmId
class_ = oxm >> (7 + 1 + 8)
if class_ == ofproto.OFPXMC_EXPERIMENTER:
(exp_id,) = struct.unpack_from(cls._EXPERIMENTER_ID_PACK_STR,
six.binary_type(rest), 0)
rest = rest[struct.calcsize(cls._EXPERIMENTER_ID_PACK_STR):]
subcls = OFPExperimenterOxmId
return subcls(type_=type_, exp_id=exp_id, hasmask=hasmask,
length=length), rest
else:
return cls(type_=type_, hasmask=hasmask, length=length), rest
def serialize(self):
# fixup
self.length = 0 # XXX see the comment on OFPOxmId
(n, _v, _m) = ofproto.oxm_from_user(self.type, None)
oxm = (n << (1 + 8)) | (self.hasmask << 8) | self.length
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, oxm)
assert n >> 7 != ofproto.OFPXMC_EXPERIMENTER
return buf
class OFPExperimenterOxmId(OFPOxmId):
def __init__(self, type_, exp_id, hasmask=False, length=None):
super(OFPExperimenterOxmId, self).__init__(type_=type_,
hasmask=hasmask,
length=length)
self.exp_id = exp_id
def serialize(self):
buf = super(OFPExperimenterOxmId, self).serialize()
msg_pack_into(self._EXPERIMENTER_ID_PACK_STR, buf,
struct.calcsize(self._PACK_STR), self.exp_id)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_MATCH)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WILDCARDS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_SETFIELD)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_SETFIELD_MISS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_SETFIELD)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_SETFIELD_MISS)
class OFPTableFeaturePropOxm(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, oxm_ids=None):
oxm_ids = oxm_ids if oxm_ids else []
super(OFPTableFeaturePropOxm, self).__init__(type_, length)
self.oxm_ids = oxm_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
i, rest = OFPOxmId.parse(rest)
ids.append(i)
return cls(oxm_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.oxm_ids:
bin_ids += i.serialize()
return bin_ids
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_EXPERIMENTER)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_MISS)
class OFPTableFeaturePropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPTableFeaturesStatsRequest(OFPMultipartRequest):
"""
Table features statistics request message
The controller uses this message to query table features.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPTableFeaturesStats`` instances.
The default is [].
================ ======================================================
"""
def __init__(self, datapath, flags=0, body=None, type_=None):
body = body if body else []
super(OFPTableFeaturesStatsRequest, self).__init__(datapath, flags)
self.body = body
def _serialize_stats_body(self):
bin_body = bytearray()
for p in self.body:
bin_body += p.serialize()
self.buf += bin_body
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPTableFeaturesStatsReply(OFPMultipartReply):
"""
Table features statistics reply message
The switch responds with this message to a table features statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPTableFeaturesStats`` instance
================ ======================================================
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPTableFeaturesStatsReply, self).__init__(datapath, **kwargs)
@_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPPortDescStatsRequest(OFPMultipartRequest):
"""
Port description request message
The controller uses this message to query description of all the ports.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_port_desc_stats_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPPortDescStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, type_=None):
super(OFPPortDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPPortDescStatsReply(OFPMultipartReply):
"""
Port description reply message
The switch responds with this message to a port description request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPPort`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPPortDescStatsReply, MAIN_DISPATCHER)
def port_desc_stats_reply_handler(self, ev):
ports = []
for p in ev.msg.body:
ports.append('port_no=%d hw_addr=%s name=%s config=0x%08x '
'state=0x%08x properties=%s' %
(p.port_no, p.hw_addr,
p.name, p.config, p.state, repr(p.properties)))
self.logger.debug('OFPPortDescStatsReply received: %s', ports)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPPortDescStatsReply, self).__init__(datapath, **kwargs)
@_set_stats_type(ofproto.OFPMP_TABLE_DESC, OFPTableDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPTableDescStatsRequest(OFPMultipartRequest):
"""
Table description request message
The controller uses this message to query description of all the tables.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_table_desc_stats_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPTableDescStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, type_=None):
super(OFPTableDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_TABLE_DESC, OFPTableDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPTableDescStatsReply(OFPMultipartReply):
"""
Table description reply message
The switch responds with this message to a table description request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPTableDesc`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPTableDescStatsReply, MAIN_DISPATCHER)
def table_desc_stats_reply_handler(self, ev):
tables = []
for p in ev.msg.body:
tables.append('table_id=%d config=0x%08x properties=%s' %
(p.table_id, p.config, repr(p.properties)))
self.logger.debug('OFPTableDescStatsReply received: %s', tables)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPTableDescStatsReply, self).__init__(datapath, **kwargs)
@_set_stats_type(ofproto.OFPMP_QUEUE_DESC, OFPQueueDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPQueueDescStatsRequest(OFPMultipartRequest):
"""
Queue description request message
The controller uses this message to query description of all the queues.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
port_no Port number to read (OFPP_ANY for all ports)
queue_id ID of queue to read (OFPQ_ALL for all queues)
================ ======================================================
Example::
def send_queue_desc_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPQueueDescStatsRequest(datapath, 0,
ofp.OFPP_ANY,
ofp.OFPQ_ALL)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY,
queue_id=ofproto.OFPQ_ALL, type_=None):
super(OFPQueueDescStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
self.queue_id = queue_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_QUEUE_DESC_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.port_no, self.queue_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_QUEUE_DESC, OFPQueueDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPQueueDescStatsReply(OFPMultipartReply):
"""
Queue description reply message
The switch responds with this message to a queue description request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPQueueDesc`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPQueueDescStatsReply, MAIN_DISPATCHER)
def queue_desc_stats_reply_handler(self, ev):
queues = []
for q in ev.msg.body:
queues.append('port_no=%d queue_id=0x%08x properties=%s' %
(q.port_no, q.queue_id, repr(q.properties)))
self.logger.debug('OFPQueueDescStatsReply received: %s', queues)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPQueueDescStatsReply, self).__init__(datapath, **kwargs)
class OFPQueueStatsProp(OFPPropBase):
_TYPES = {}
@OFPQueueStatsProp.register_type(ofproto.OFPQSPT_EXPERIMENTER)
class OFPQueueStatsPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPQueueStats(StringifyMixin):
def __init__(self, length=None, port_no=None, queue_id=None,
tx_bytes=None, tx_packets=None, tx_errors=None,
duration_sec=None, duration_nsec=None, properties=None):
super(OFPQueueStats, self).__init__()
self.length = length
self.port_no = port_no
self.queue_id = queue_id
self.tx_bytes = tx_bytes
self.tx_packets = tx_packets
self.tx_errors = tx_errors
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(length, port_no, queue_id, tx_bytes, tx_packets, tx_errors,
duration_sec, duration_nsec) = struct.unpack_from(
ofproto.OFP_QUEUE_STATS_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_QUEUE_STATS_SIZE:offset + length]
while rest:
p, rest = OFPQueueStatsProp.parse(rest)
props.append(p)
stats = cls(length, port_no, queue_id, tx_bytes, tx_packets, tx_errors,
duration_sec, duration_nsec, props)
return stats
@_set_stats_type(ofproto.OFPMP_QUEUE_STATS, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPQueueStatsRequest(OFPMultipartRequest):
"""
Queue statistics request message
The controller uses this message to query queue statictics.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
port_no Port number to read
queue_id ID of queue to read
================ ======================================================
Example::
def send_queue_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPQueueStatsRequest(datapath, 0, ofp.OFPP_ANY,
ofp.OFPQ_ALL)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY,
queue_id=ofproto.OFPQ_ALL, type_=None):
super(OFPQueueStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
self.queue_id = queue_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.port_no, self.queue_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_QUEUE_STATS, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPQueueStatsReply(OFPMultipartReply):
"""
Queue statistics reply message
The switch responds with this message to an aggregate flow statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPQueueStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPQueueStatsReply, MAIN_DISPATCHER)
def queue_stats_reply_handler(self, ev):
queues = []
for stat in ev.msg.body:
queues.append('port_no=%d queue_id=%d '
'tx_bytes=%d tx_packets=%d tx_errors=%d '
'duration_sec=%d duration_nsec=%d'
'properties=%s' %
(stat.port_no, stat.queue_id,
stat.tx_bytes, stat.tx_packets, stat.tx_errors,
stat.duration_sec, stat.duration_nsec,
repr(stat.properties)))
self.logger.debug('QueueStats: %s', queues)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPQueueStatsReply, self).__init__(datapath, **kwargs)
class OFPBucketCounter(StringifyMixin):
def __init__(self, packet_count, byte_count):
super(OFPBucketCounter, self).__init__()
self.packet_count = packet_count
self.byte_count = byte_count
@classmethod
def parser(cls, buf, offset):
packet_count, byte_count = struct.unpack_from(
ofproto.OFP_BUCKET_COUNTER_PACK_STR, buf, offset)
return cls(packet_count, byte_count)
class OFPGroupStats(StringifyMixin):
def __init__(self, length=None, group_id=None, ref_count=None,
packet_count=None, byte_count=None, duration_sec=None,
duration_nsec=None, bucket_stats=None):
super(OFPGroupStats, self).__init__()
self.length = length
self.group_id = group_id
self.ref_count = ref_count
self.packet_count = packet_count
self.byte_count = byte_count
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.bucket_stats = bucket_stats
@classmethod
def parser(cls, buf, offset):
group = struct.unpack_from(ofproto.OFP_GROUP_STATS_PACK_STR,
buf, offset)
group_stats = cls(*group)
group_stats.bucket_stats = []
total_len = group_stats.length + offset
offset += ofproto.OFP_GROUP_STATS_SIZE
while total_len > offset:
b = OFPBucketCounter.parser(buf, offset)
group_stats.bucket_stats.append(b)
offset += ofproto.OFP_BUCKET_COUNTER_SIZE
return group_stats
@_set_stats_type(ofproto.OFPMP_GROUP, OFPGroupStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPGroupStatsRequest(OFPMultipartRequest):
"""
Group statistics request message
The controller uses this message to query statistics of one or more
groups.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
group_id ID of group to read (OFPG_ALL to all groups)
================ ======================================================
Example::
def send_group_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPGroupStatsRequest(datapath, 0, ofp.OFPG_ALL)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, group_id=ofproto.OFPG_ALL,
type_=None):
super(OFPGroupStatsRequest, self).__init__(datapath, flags)
self.group_id = group_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_GROUP_STATS_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.group_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_GROUP, OFPGroupStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPGroupStatsReply(OFPMultipartReply):
"""
Group statistics reply message
The switch responds with this message to a group statistics request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPGroupStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPGroupStatsReply, MAIN_DISPATCHER)
def group_stats_reply_handler(self, ev):
groups = []
for stat in ev.msg.body:
groups.append('length=%d group_id=%d '
'ref_count=%d packet_count=%d byte_count=%d '
'duration_sec=%d duration_nsec=%d' %
(stat.length, stat.group_id,
stat.ref_count, stat.packet_count,
stat.byte_count, stat.duration_sec,
stat.duration_nsec))
self.logger.debug('GroupStats: %s', groups)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPGroupStatsReply, self).__init__(datapath, **kwargs)
class OFPGroupDescStats(StringifyMixin):
def __init__(self, type_=None, group_id=None, buckets=None, length=None):
super(OFPGroupDescStats, self).__init__()
self.type = type_
self.group_id = group_id
self.buckets = buckets
@classmethod
def parser(cls, buf, offset):
stats = cls()
(stats.length, stats.type, stats.group_id) = struct.unpack_from(
ofproto.OFP_GROUP_DESC_STATS_PACK_STR, buf, offset)
offset += ofproto.OFP_GROUP_DESC_STATS_SIZE
stats.buckets = []
length = ofproto.OFP_GROUP_DESC_STATS_SIZE
while length < stats.length:
bucket = OFPBucket.parser(buf, offset)
stats.buckets.append(bucket)
offset += bucket.len
length += bucket.len
return stats
@_set_stats_type(ofproto.OFPMP_GROUP_DESC, OFPGroupDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPGroupDescStatsRequest(OFPMultipartRequest):
"""
Group description request message
The controller uses this message to list the set of groups on a switch.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_group_desc_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPGroupDescStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, type_=None):
super(OFPGroupDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_GROUP_DESC, OFPGroupDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPGroupDescStatsReply(OFPMultipartReply):
"""
Group description reply message
The switch responds with this message to a group description request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPGroupDescStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPGroupDescStatsReply, MAIN_DISPATCHER)
def group_desc_stats_reply_handler(self, ev):
descs = []
for stat in ev.msg.body:
descs.append('length=%d type=%d group_id=%d '
'buckets=%s' %
(stat.length, stat.type, stat.group_id,
stat.bucket))
self.logger.debug('GroupDescStats: %s', descs)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPGroupDescStatsReply, self).__init__(datapath, **kwargs)
class OFPGroupFeaturesStats(ofproto_parser.namedtuple('OFPGroupFeaturesStats',
('types', 'capabilities', 'max_groups',
'actions'))):
@classmethod
def parser(cls, buf, offset):
group_features = struct.unpack_from(
ofproto.OFP_GROUP_FEATURES_PACK_STR, buf, offset)
types = group_features[0]
capabilities = group_features[1]
max_groups = list(group_features[2:6])
actions = list(group_features[6:10])
stats = cls(types, capabilities, max_groups, actions)
stats.length = ofproto.OFP_GROUP_FEATURES_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_GROUP_FEATURES, OFPGroupFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPGroupFeaturesStatsRequest(OFPMultipartRequest):
"""
Group features request message
The controller uses this message to list the capabilities of groups on
a switch.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_group_features_stats_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPGroupFeaturesStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, type_=None):
super(OFPGroupFeaturesStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_GROUP_FEATURES, OFPGroupFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPGroupFeaturesStatsReply(OFPMultipartReply):
"""
Group features reply message
The switch responds with this message to a group features request.
================ ======================================================
Attribute Description
================ ======================================================
body Instance of ``OFPGroupFeaturesStats``
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPGroupFeaturesStatsReply, MAIN_DISPATCHER)
def group_features_stats_reply_handler(self, ev):
body = ev.msg.body
self.logger.debug('GroupFeaturesStats: types=%d '
'capabilities=0x%08x max_groups=%s '
'actions=%s',
body.types, body.capabilities,
body.max_groups, body.actions)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPGroupFeaturesStatsReply, self).__init__(datapath, **kwargs)
class OFPMeterBandStats(StringifyMixin):
def __init__(self, packet_band_count, byte_band_count):
super(OFPMeterBandStats, self).__init__()
self.packet_band_count = packet_band_count
self.byte_band_count = byte_band_count
@classmethod
def parser(cls, buf, offset):
band_stats = struct.unpack_from(
ofproto.OFP_METER_BAND_STATS_PACK_STR, buf, offset)
return cls(*band_stats)
class OFPMeterStats(StringifyMixin):
def __init__(self, meter_id=None, flow_count=None, packet_in_count=None,
byte_in_count=None, duration_sec=None, duration_nsec=None,
band_stats=None, len_=None):
super(OFPMeterStats, self).__init__()
self.meter_id = meter_id
self.len = 0
self.flow_count = flow_count
self.packet_in_count = packet_in_count
self.byte_in_count = byte_in_count
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.band_stats = band_stats
@classmethod
def parser(cls, buf, offset):
meter_stats = cls()
(meter_stats.meter_id, meter_stats.len,
meter_stats.flow_count, meter_stats.packet_in_count,
meter_stats.byte_in_count, meter_stats.duration_sec,
meter_stats.duration_nsec) = struct.unpack_from(
ofproto.OFP_METER_STATS_PACK_STR, buf, offset)
offset += ofproto.OFP_METER_STATS_SIZE
meter_stats.band_stats = []
length = ofproto.OFP_METER_STATS_SIZE
while length < meter_stats.len:
band_stats = OFPMeterBandStats.parser(buf, offset)
meter_stats.band_stats.append(band_stats)
offset += ofproto.OFP_METER_BAND_STATS_SIZE
length += ofproto.OFP_METER_BAND_STATS_SIZE
return meter_stats
@_set_stats_type(ofproto.OFPMP_METER, OFPMeterStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterStatsRequest(OFPMultipartRequest):
"""
Meter statistics request message
The controller uses this message to query statistics for one or more
meters.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
meter_id ID of meter to read (OFPM_ALL to all meters)
================ ======================================================
Example::
def send_meter_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPMeterStatsRequest(datapath, 0, ofp.OFPM_ALL)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL,
type_=None):
super(OFPMeterStatsRequest, self).__init__(datapath, flags)
self.meter_id = meter_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.meter_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_METER, OFPMeterStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMeterStatsReply(OFPMultipartReply):
"""
Meter statistics reply message
The switch responds with this message to a meter statistics request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPMeterStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPMeterStatsReply, MAIN_DISPATCHER)
def meter_stats_reply_handler(self, ev):
meters = []
for stat in ev.msg.body:
meters.append('meter_id=0x%08x len=%d flow_count=%d '
'packet_in_count=%d byte_in_count=%d '
'duration_sec=%d duration_nsec=%d '
'band_stats=%s' %
(stat.meter_id, stat.len, stat.flow_count,
stat.packet_in_count, stat.byte_in_count,
stat.duration_sec, stat.duration_nsec,
stat.band_stats))
self.logger.debug('MeterStats: %s', meters)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPMeterStatsReply, self).__init__(datapath, **kwargs)
class OFPMeterBand(StringifyMixin):
def __init__(self, type_, len_):
super(OFPMeterBand, self).__init__()
self.type = type_
self.len = len_
class OFPMeterBandHeader(OFPMeterBand):
_METER_BAND = {}
@staticmethod
def register_meter_band_type(type_, len_):
def _register_meter_band_type(cls):
OFPMeterBandHeader._METER_BAND[type_] = cls
cls.cls_meter_band_type = type_
cls.cls_meter_band_len = len_
return cls
return _register_meter_band_type
def __init__(self):
cls = self.__class__
super(OFPMeterBandHeader, self).__init__(cls.cls_meter_band_type,
cls.cls_meter_band_len)
@classmethod
def parser(cls, buf, offset):
type_, len_, _rate, _burst_size = struct.unpack_from(
ofproto.OFP_METER_BAND_HEADER_PACK_STR, buf, offset)
cls_ = cls._METER_BAND[type_]
assert cls_.cls_meter_band_len == len_
return cls_.parser(buf, offset)
@OFPMeterBandHeader.register_meter_band_type(
ofproto.OFPMBT_DROP, ofproto.OFP_METER_BAND_DROP_SIZE)
class OFPMeterBandDrop(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, type_=None, len_=None):
super(OFPMeterBandDrop, self).__init__()
self.rate = rate
self.burst_size = burst_size
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset,
self.type, self.len, self.rate, self.burst_size)
@classmethod
def parser(cls, buf, offset):
type_, len_, rate, burst_size = struct.unpack_from(
ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset)
assert cls.cls_meter_band_type == type_
assert cls.cls_meter_band_len == len_
return cls(rate, burst_size)
@OFPMeterBandHeader.register_meter_band_type(
ofproto.OFPMBT_DSCP_REMARK,
ofproto.OFP_METER_BAND_DSCP_REMARK_SIZE)
class OFPMeterBandDscpRemark(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, prec_level=0,
type_=None, len_=None):
super(OFPMeterBandDscpRemark, self).__init__()
self.rate = rate
self.burst_size = burst_size
self.prec_level = prec_level
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf,
offset, self.type, self.len, self.rate,
self.burst_size, self.prec_level)
@classmethod
def parser(cls, buf, offset):
type_, len_, rate, burst_size, prec_level = struct.unpack_from(
ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf, offset)
assert cls.cls_meter_band_type == type_
assert cls.cls_meter_band_len == len_
return cls(rate, burst_size, prec_level)
@OFPMeterBandHeader.register_meter_band_type(
ofproto.OFPMBT_EXPERIMENTER,
ofproto.OFP_METER_BAND_EXPERIMENTER_SIZE)
class OFPMeterBandExperimenter(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, experimenter=None,
type_=None, len_=None):
super(OFPMeterBandExperimenter, self).__init__()
self.rate = rate
self.burst_size = burst_size
self.experimenter = experimenter
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_EXPERIMENTER_PACK_STR, buf,
offset, self.type, self.len, self.rate,
self.burst_size, self.experimenter)
@classmethod
def parser(cls, buf, offset):
type_, len_, rate, burst_size, experimenter = struct.unpack_from(
ofproto.OFP_METER_BAND_EXPERIMENTER_PACK_STR, buf, offset)
assert cls.cls_meter_band_type == type_
assert cls.cls_meter_band_len == len_
return cls(rate, burst_size, experimenter)
class OFPMeterConfigStats(StringifyMixin):
def __init__(self, flags=None, meter_id=None, bands=None, length=None):
super(OFPMeterConfigStats, self).__init__()
self.length = None
self.flags = flags
self.meter_id = meter_id
self.bands = bands
@classmethod
def parser(cls, buf, offset):
meter_config = cls()
(meter_config.length, meter_config.flags,
meter_config.meter_id) = struct.unpack_from(
ofproto.OFP_METER_CONFIG_PACK_STR, buf, offset)
offset += ofproto.OFP_METER_CONFIG_SIZE
meter_config.bands = []
length = ofproto.OFP_METER_CONFIG_SIZE
while length < meter_config.length:
band = OFPMeterBandHeader.parser(buf, offset)
meter_config.bands.append(band)
offset += band.len
length += band.len
return meter_config
@_set_stats_type(ofproto.OFPMP_METER_CONFIG, OFPMeterConfigStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterConfigStatsRequest(OFPMultipartRequest):
"""
Meter configuration statistics request message
The controller uses this message to query configuration for one or more
meters.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
meter_id ID of meter to read (OFPM_ALL to all meters)
================ ======================================================
Example::
def send_meter_config_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPMeterConfigStatsRequest(datapath, 0,
ofp.OFPM_ALL)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL,
type_=None):
super(OFPMeterConfigStatsRequest, self).__init__(datapath, flags)
self.meter_id = meter_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.meter_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_METER_CONFIG, OFPMeterConfigStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMeterConfigStatsReply(OFPMultipartReply):
"""
Meter configuration statistics reply message
The switch responds with this message to a meter configuration
statistics request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPMeterConfigStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPMeterConfigStatsReply, MAIN_DISPATCHER)
def meter_config_stats_reply_handler(self, ev):
configs = []
for stat in ev.msg.body:
configs.append('length=%d flags=0x%04x meter_id=0x%08x '
'bands=%s' %
(stat.length, stat.flags, stat.meter_id,
stat.bands))
self.logger.debug('MeterConfigStats: %s', configs)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPMeterConfigStatsReply, self).__init__(datapath, **kwargs)
class OFPMeterFeaturesStats(ofproto_parser.namedtuple('OFPMeterFeaturesStats',
('max_meter', 'band_types', 'capabilities',
'max_bands', 'max_color'))):
@classmethod
def parser(cls, buf, offset):
meter_features = struct.unpack_from(
ofproto.OFP_METER_FEATURES_PACK_STR, buf, offset)
stats = cls(*meter_features)
stats.length = ofproto.OFP_METER_FEATURES_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_METER_FEATURES, OFPMeterFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterFeaturesStatsRequest(OFPMultipartRequest):
"""
Meter features statistics request message
The controller uses this message to query the set of features of the
metering subsystem.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_meter_features_stats_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPMeterFeaturesStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, type_=None):
super(OFPMeterFeaturesStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_METER_FEATURES, OFPMeterFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMeterFeaturesStatsReply(OFPMultipartReply):
"""
Meter features statistics reply message
The switch responds with this message to a meter features statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPMeterFeaturesStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPMeterFeaturesStatsReply, MAIN_DISPATCHER)
def meter_features_stats_reply_handler(self, ev):
features = []
for stat in ev.msg.body:
features.append('max_meter=%d band_types=0x%08x '
'capabilities=0x%08x max_bands=%d '
'max_color=%d' %
(stat.max_meter, stat.band_types,
stat.capabilities, stat.max_bands,
stat.max_color))
self.logger.debug('MeterFeaturesStats: %s', features)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPMeterFeaturesStatsReply, self).__init__(datapath, **kwargs)
class OFPFlowUpdate(StringifyMixin):
def __init__(self, length, event):
super(OFPFlowUpdate, self).__init__()
self.length = length
self.event = event
class OFPFlowUpdateHeader(OFPFlowUpdate):
_EVENT = {}
@staticmethod
def register_flow_update_event(event, length):
def _register_flow_update_event(cls):
OFPFlowUpdateHeader._EVENT[event] = cls
cls.cls_flow_update_event = event
cls.cls_flow_update_length = length
return cls
return _register_flow_update_event
def __init__(self, length=None, event=None):
cls = self.__class__
super(OFPFlowUpdateHeader, self).__init__(length,
cls.cls_flow_update_event)
self.length = length
@classmethod
def parser(cls, buf, offset):
length, event = struct.unpack_from(
ofproto.OFP_FLOW_UPDATE_HEADER_PACK_STR, buf, offset)
cls_ = cls._EVENT[event]
return cls_.parser(buf, offset)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_INITIAL, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_ADDED, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_REMOVED, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_MODIFIED, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
class OFPFlowUpdateFull(OFPFlowUpdateHeader):
def __init__(self, length=None, event=None, table_id=None, reason=None,
idle_timeout=None, hard_timeout=None, priority=None,
cookie=None, match=None, instructions=None):
instructions = instructions if instructions else []
super(OFPFlowUpdateFull, self).__init__(length, event)
self.table_id = table_id
self.reason = reason
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.cookie = cookie
self.match = match
assert (event != ofproto.OFPFME_REMOVED or len(instructions) == 0)
for i in instructions:
assert isinstance(i, OFPInstruction)
self.instructions = instructions
@classmethod
def parser(cls, buf, offset):
(length, event, table_id, reason, idle_timeout, hard_timeout, priority,
cookie) = struct.unpack_from(ofproto.OFP_FLOW_UPDATE_FULL_0_PACK_STR,
buf, offset)
offset += ofproto.OFP_FLOW_UPDATE_FULL_0_SIZE
assert cls.cls_flow_update_length <= length
assert cls.cls_flow_update_event == event
match = OFPMatch.parser(buf, offset)
match_length = utils.round_up(match.length, 8)
offset += match_length
inst_length = (length - ofproto.OFP_FLOW_UPDATE_FULL_0_SIZE -
match_length)
instructions = []
while inst_length > 0:
inst = OFPInstruction.parser(buf, offset)
instructions.append(inst)
offset += inst.len
inst_length -= inst.len
return cls(length, event, table_id, reason, idle_timeout,
hard_timeout, priority, cookie, match, instructions)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_ABBREV, ofproto.OFP_FLOW_UPDATE_ABBREV_SIZE)
class OFPFlowUpdateAbbrev(OFPFlowUpdateHeader):
def __init__(self, length=None, event=None, xid=None):
super(OFPFlowUpdateAbbrev, self).__init__(length, event)
self.xid = xid
@classmethod
def parser(cls, buf, offset):
length, event, xid = struct.unpack_from(
ofproto.OFP_FLOW_UPDATE_ABBREV_PACK_STR, buf, offset)
assert cls.cls_flow_update_length == length
assert cls.cls_flow_update_event == event
return cls(length, event, xid)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_PAUSED, ofproto.OFP_FLOW_UPDATE_PAUSED_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_RESUMED, ofproto.OFP_FLOW_UPDATE_PAUSED_SIZE)
class OFPFlowUpdatePaused(OFPFlowUpdateHeader):
@classmethod
def parser(cls, buf, offset):
length, event = struct.unpack_from(
ofproto.OFP_FLOW_UPDATE_PAUSED_PACK_STR, buf, offset)
assert cls.cls_flow_update_length == length
assert cls.cls_flow_update_event == event
return cls(length, event)
class OFPFlowMonitorRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags, monitor_id, out_port, out_group,
monitor_flags, table_id, command, match):
super(OFPFlowMonitorRequestBase, self).__init__(datapath, flags)
self.monitor_id = monitor_id
self.out_port = out_port
self.out_group = out_group
self.monitor_flags = monitor_flags
self.table_id = table_id
self.command = command
self.match = match
def _serialize_stats_body(self):
offset = ofproto.OFP_MULTIPART_REQUEST_SIZE
msg_pack_into(ofproto.OFP_FLOW_MONITOR_REQUEST_0_PACK_STR, self.buf,
offset, self.monitor_id, self.out_port, self.out_group,
self.monitor_flags, self.table_id, self.command)
offset += ofproto.OFP_FLOW_MONITOR_REQUEST_0_SIZE
self.match.serialize(self.buf, offset)
@_set_stats_type(ofproto.OFPMP_FLOW_MONITOR, OFPFlowUpdateHeader)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPFlowMonitorRequest(OFPFlowMonitorRequestBase):
"""
Flow monitor request message
The controller uses this message to query flow monitors.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
monitor_id Controller-assigned ID for this monitor
out_port Require matching entries to include this as an output
port
out_group Require matching entries to include this as an output
group
monitor_flags Bitmap of the following flags.
| OFPFMF_INITIAL
| OFPFMF_ADD
| OFPFMF_REMOVED
| OFPFMF_MODIFY
| OFPFMF_INSTRUCTIONS
| OFPFMF_NO_ABBREV
| OFPFMF_ONLY_OWN
table_id ID of table to monitor
command One of the following values.
| OFPFMC_ADD
| OFPFMC_MODIFY
| OFPFMC_DELETE
match Instance of ``OFPMatch``
================ ======================================================
Example::
def send_flow_monitor_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
monitor_flags = [ofp.OFPFMF_INITIAL, ofp.OFPFMF_ONLY_OWN]
match = ofp_parser.OFPMatch(in_port=1)
req = ofp_parser.OFPFlowMonitorRequest(datapath, 0, 10000,
ofp.OFPP_ANY, ofp.OFPG_ANY,
monitor_flags,
ofp.OFPTT_ALL,
ofp.OFPFMC_ADD, match)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, monitor_id=0,
out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY,
monitor_flags=0, table_id=ofproto.OFPTT_ALL,
command=ofproto.OFPFMC_ADD, match=None, type_=None):
if match is None:
match = OFPMatch()
super(OFPFlowMonitorRequest, self).__init__(datapath, flags,
monitor_id, out_port,
out_group, monitor_flags,
table_id, command, match)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_FLOW_MONITOR, OFPFlowUpdateHeader)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPFlowMonitorReply(OFPMultipartReply):
"""
Flow monitor reply message
The switch responds with this message to a flow monitor request.
================ ======================================================
Attribute Description
================ ======================================================
body List of list of the following class instance.
| OFPFlowMonitorFull
| OFPFlowMonitorAbbrev
| OFPFlowMonitorPaused
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPFlowMonitorReply, MAIN_DISPATCHER)
def flow_monitor_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
flow_updates = []
for update in msg.body:
update_str = 'length=%d event=%d' %
(update.length, update.event)
if (update.event == ofp.OFPFME_INITIAL or
update.event == ofp.OFPFME_ADDED or
update.event == ofp.OFPFME_REMOVED or
update.event == ofp.OFPFME_MODIFIED):
update_str += 'table_id=%d reason=%d idle_timeout=%d '
'hard_timeout=%d priority=%d cookie=%d '
'match=%d instructions=%s' %
(update.table_id, update.reason,
update.idle_timeout, update.hard_timeout,
update.priority, update.cookie,
update.match, update.instructions)
elif update.event == ofp.OFPFME_ABBREV:
update_str += 'xid=%d' % (update.xid)
flow_updates.append(update_str)
self.logger.debug('FlowUpdates: %s', flow_updates)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPFlowMonitorReply, self).__init__(datapath, **kwargs)
class OFPExperimenterMultipart(ofproto_parser.namedtuple(
'OFPExperimenterMultipart',
('experimenter', 'exp_type', 'data'))):
"""
The body of OFPExperimenterStatsReply multipart messages.
================ ======================================================
Attribute Description
================ ======================================================
experimenter Experimenter ID
exp_type Experimenter defined
data Experimenter defined additional data
================ ======================================================
"""
@classmethod
def parser(cls, buf, offset):
args = struct.unpack_from(
ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR, buf,
offset)
args = list(args)
args.append(buf[offset +
ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE:])
stats = cls(*args)
stats.length = ofproto.OFP_METER_FEATURES_SIZE
return stats
def serialize(self):
buf = bytearray()
msg_pack_into(ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR,
buf, 0,
self.experimenter, self.exp_type)
return buf + self.data
class OFPExperimenterStatsRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags,
experimenter, exp_type,
type_=None):
super(OFPExperimenterStatsRequestBase, self).__init__(datapath, flags)
self.experimenter = experimenter
self.exp_type = exp_type
@_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPExperimenterStatsRequest(OFPExperimenterStatsRequestBase):
"""
Experimenter multipart request message
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
experimenter Experimenter ID
exp_type Experimenter defined
data Experimenter defined additional data
================ ======================================================
"""
def __init__(self, datapath, flags,
experimenter, exp_type, data,
type_=None):
super(OFPExperimenterStatsRequest, self).__init__(datapath, flags,
experimenter,
exp_type, type_)
self.data = data
def _serialize_stats_body(self):
body = OFPExperimenterMultipart(experimenter=self.experimenter,
exp_type=self.exp_type,
data=self.data)
self.buf += body.serialize()
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPExperimenterStatsReply(OFPMultipartReply):
"""
Experimenter multipart reply message
================ ======================================================
Attribute Description
================ ======================================================
body An ``OFPExperimenterMultipart`` instance
================ ======================================================
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPExperimenterStatsReply, self).__init__(datapath, **kwargs)
class OFPFlowStats(StringifyMixin):
def __init__(self, table_id=None, duration_sec=None, duration_nsec=None,
priority=None, idle_timeout=None, hard_timeout=None,
flags=None, importance=None, cookie=None, packet_count=None,
byte_count=None, match=None, instructions=None,
length=None):
super(OFPFlowStats, self).__init__()
self.table_id = table_id
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.priority = priority
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.flags = flags
self.importance = importance
self.cookie = cookie
self.packet_count = packet_count
self.byte_count = byte_count
self.match = match
self.instructions = instructions
self.length = length
@classmethod
def parser(cls, buf, offset):
flow_stats = cls()
(flow_stats.length, flow_stats.table_id,
flow_stats.duration_sec, flow_stats.duration_nsec,
flow_stats.priority, flow_stats.idle_timeout,
flow_stats.hard_timeout, flow_stats.flags,
flow_stats.importance, flow_stats.cookie,
flow_stats.packet_count,
flow_stats.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_STATS_0_PACK_STR, buf, offset)
offset += ofproto.OFP_FLOW_STATS_0_SIZE
flow_stats.match = OFPMatch.parser(buf, offset)
match_length = utils.round_up(flow_stats.match.length, 8)
inst_length = (flow_stats.length - (ofproto.OFP_FLOW_STATS_SIZE -
ofproto.OFP_MATCH_SIZE +
match_length))
offset += match_length
instructions = []
while inst_length > 0:
inst = OFPInstruction.parser(buf, offset)
instructions.append(inst)
offset += inst.len
inst_length -= inst.len
flow_stats.instructions = instructions
return flow_stats
class OFPFlowStatsRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags, table_id, out_port, out_group,
cookie, cookie_mask, match):
super(OFPFlowStatsRequestBase, self).__init__(datapath, flags)
self.table_id = table_id
self.out_port = out_port
self.out_group = out_group
self.cookie = cookie
self.cookie_mask = cookie_mask
self.match = match
def _serialize_stats_body(self):
offset = ofproto.OFP_MULTIPART_REQUEST_SIZE
msg_pack_into(ofproto.OFP_FLOW_STATS_REQUEST_0_PACK_STR,
self.buf, offset, self.table_id, self.out_port,
self.out_group, self.cookie, self.cookie_mask)
offset += ofproto.OFP_FLOW_STATS_REQUEST_0_SIZE
self.match.serialize(self.buf, offset)
@_set_stats_type(ofproto.OFPMP_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPFlowStatsRequest(OFPFlowStatsRequestBase):
"""
Individual flow statistics request message
The controller uses this message to query individual flow statistics.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
table_id ID of table to read
out_port Require matching entries to include this as an output
port
out_group Require matching entries to include this as an output
group
cookie Require matching entries to contain this cookie value
cookie_mask Mask used to restrict the cookie bits that must match
match Instance of ``OFPMatch``
================ ======================================================
Example::
def send_flow_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
cookie = cookie_mask = 0
match = ofp_parser.OFPMatch(in_port=1)
req = ofp_parser.OFPFlowStatsRequest(datapath, 0,
ofp.OFPTT_ALL,
ofp.OFPP_ANY, ofp.OFPG_ANY,
cookie, cookie_mask,
match)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags=0, table_id=ofproto.OFPTT_ALL,
out_port=ofproto.OFPP_ANY,
out_group=ofproto.OFPG_ANY,
cookie=0, cookie_mask=0, match=None, type_=None):
if match is None:
match = OFPMatch()
super(OFPFlowStatsRequest, self).__init__(datapath, flags, table_id,
out_port, out_group,
cookie, cookie_mask, match)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPFlowStatsReply(OFPMultipartReply):
"""
Individual flow statistics reply message
The switch responds with this message to an individual flow statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPFlowStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER)
def flow_stats_reply_handler(self, ev):
flows = []
for stat in ev.msg.body:
flows.append('table_id=%s '
'duration_sec=%d duration_nsec=%d '
'priority=%d '
'idle_timeout=%d hard_timeout=%d flags=0x%04x '
'importance=%d cookie=%d packet_count=%d '
'byte_count=%d match=%s instructions=%s' %
(stat.table_id,
stat.duration_sec, stat.duration_nsec,
stat.priority,
stat.idle_timeout, stat.hard_timeout,
stat.flags, stat.importance,
stat.cookie, stat.packet_count, stat.byte_count,
stat.match, stat.instructions))
self.logger.debug('FlowStats: %s', flows)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPFlowStatsReply, self).__init__(datapath, **kwargs)
class OFPAggregateStats(ofproto_parser.namedtuple('OFPAggregateStats', (
'packet_count', 'byte_count', 'flow_count'))):
@classmethod
def parser(cls, buf, offset):
agg = struct.unpack_from(
ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset)
stats = cls(*agg)
stats.length = ofproto.OFP_AGGREGATE_STATS_REPLY_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPAggregateStatsRequest(OFPFlowStatsRequestBase):
"""
Aggregate flow statistics request message
The controller uses this message to query aggregate flow statictics.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
table_id ID of table to read
out_port Require matching entries to include this as an output
port
out_group Require matching entries to include this as an output
group
cookie Require matching entries to contain this cookie value
cookie_mask Mask used to restrict the cookie bits that must match
match Instance of ``OFPMatch``
================ ======================================================
Example::
def send_aggregate_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
cookie = cookie_mask = 0
match = ofp_parser.OFPMatch(in_port=1)
req = ofp_parser.OFPAggregateStatsRequest(datapath, 0,
ofp.OFPTT_ALL,
ofp.OFPP_ANY,
ofp.OFPG_ANY,
cookie, cookie_mask,
match)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags, table_id, out_port, out_group,
cookie, cookie_mask, match, type_=None):
super(OFPAggregateStatsRequest, self).__init__(datapath,
flags,
table_id,
out_port,
out_group,
cookie,
cookie_mask,
match)
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPAggregateStatsReply(OFPMultipartReply):
"""
Aggregate flow statistics reply message
The switch responds with this message to an aggregate flow statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body Instance of ``OFPAggregateStats``
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPAggregateStatsReply, MAIN_DISPATCHER)
def aggregate_stats_reply_handler(self, ev):
body = ev.msg.body
self.logger.debug('AggregateStats: packet_count=%d byte_count=%d '
'flow_count=%d',
body.packet_count, body.byte_count,
body.flow_count)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPAggregateStatsReply, self).__init__(datapath, **kwargs)
class OFPTableStats(ofproto_parser.namedtuple('OFPTableStats', (
'table_id', 'active_count', 'lookup_count',
'matched_count'))):
@classmethod
def parser(cls, buf, offset):
tbl = struct.unpack_from(ofproto.OFP_TABLE_STATS_PACK_STR,
buf, offset)
stats = cls(*tbl)
stats.length = ofproto.OFP_TABLE_STATS_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPTableStatsRequest(OFPMultipartRequest):
"""
Table statistics request message
The controller uses this message to query flow table statictics.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
================ ======================================================
Example::
def send_table_stats_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPTableStatsRequest(datapath, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags, type_=None):
super(OFPTableStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPTableStatsReply(OFPMultipartReply):
"""
Table statistics reply message
The switch responds with this message to a table statistics request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPTableStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPTableStatsReply, MAIN_DISPATCHER)
def table_stats_reply_handler(self, ev):
tables = []
for stat in ev.msg.body:
tables.append('table_id=%d active_count=%d lookup_count=%d '
' matched_count=%d' %
(stat.table_id, stat.active_count,
stat.lookup_count, stat.matched_count))
self.logger.debug('TableStats: %s', tables)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPTableStatsReply, self).__init__(datapath, **kwargs)
class OFPPortStatsProp(OFPPropBase):
_TYPES = {}
@OFPPortStatsProp.register_type(ofproto.OFPPSPT_ETHERNET)
class OFPPortStatsPropEthernet(OFPPortStatsProp):
def __init__(self, type_=None, length=None, rx_frame_err=None,
rx_over_err=None, rx_crc_err=None, collisions=None):
self.type = type_
self.length = length
self.rx_frame_err = rx_frame_err
self.rx_over_err = rx_over_err
self.rx_crc_err = rx_crc_err
self.collisions = collisions
@classmethod
def parser(cls, buf):
ether = cls()
(ether.type, ether.length, ether.rx_frame_err, ether.rx_over_err,
ether.rx_crc_err, ether.collisions) = struct.unpack_from(
ofproto.OFP_PORT_STATS_PROP_ETHERNET_PACK_STR, buf, 0)
return ether
@OFPPortStatsProp.register_type(ofproto.OFPPSPT_OPTICAL)
class OFPPortStatsPropOptical(OFPPortStatsProp):
def __init__(self, type_=None, length=None, flags=None,
tx_freq_lmda=None, tx_offset=None, tx_grid_span=None,
rx_freq_lmda=None, rx_offset=None, rx_grid_span=None,
tx_pwr=None, rx_pwr=None, bias_current=None,
temperature=None):
self.type = type_
self.length = length
self.flags = flags
self.tx_freq_lmda = tx_freq_lmda
self.tx_offset = tx_offset
self.tx_grid_span = tx_grid_span
self.rx_freq_lmda = rx_freq_lmda
self.rx_offset = rx_offset
self.rx_grid_span = rx_grid_span
self.tx_pwr = tx_pwr
self.rx_pwr = rx_pwr
self.bias_current = bias_current
self.temperature = temperature
@classmethod
def parser(cls, buf):
optical = cls()
(optical.type, optical.length, optical.flags,
optical.tx_freq_lmda, optical.tx_offset, optical.tx_grid_span,
optical.rx_freq_lmda, optical.rx_offset, optical.rx_grid_span,
optical.tx_pwr, optical.rx_pwr, optical.bias_current,
optical.temperature) = struct.unpack_from(
ofproto.OFP_PORT_STATS_PROP_OPTICAL_PACK_STR, buf, 0)
return optical
@OFPPortStatsProp.register_type(ofproto.OFPPSPT_EXPERIMENTER)
class OFPPortStatsPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPPortStats(StringifyMixin):
def __init__(self, length=None, port_no=None, duration_sec=None,
duration_nsec=None, rx_packets=None, tx_packets=None,
rx_bytes=None, tx_bytes=None, rx_dropped=None,
tx_dropped=None, rx_errors=None, tx_errors=None,
properties=None):
super(OFPPortStats, self).__init__()
self.length = length
self.port_no = port_no
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.rx_packets = rx_packets
self.tx_packets = tx_packets
self.rx_bytes = rx_bytes
self.tx_bytes = tx_bytes
self.rx_dropped = rx_dropped
self.tx_dropped = tx_dropped
self.rx_errors = rx_errors
self.tx_errors = tx_errors
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(length, port_no, duration_sec, duration_nsec, rx_packets,
tx_packets, rx_bytes, tx_bytes, rx_dropped, tx_dropped,
rx_errors, tx_errors) = struct.unpack_from(
ofproto.OFP_PORT_STATS_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_PORT_STATS_SIZE:offset + length]
while rest:
p, rest = OFPPortStatsProp.parse(rest)
props.append(p)
stats = cls(length, port_no, duration_sec, duration_nsec, rx_packets,
tx_packets, rx_bytes, tx_bytes, rx_dropped, tx_dropped,
rx_errors, tx_errors, props)
return stats
@_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPPortStatsRequest(OFPMultipartRequest):
"""
Port statistics request message
The controller uses this message to query information about ports
statistics.
================ ======================================================
Attribute Description
================ ======================================================
flags Zero or ``OFPMPF_REQ_MORE``
port_no Port number to read (OFPP_ANY to all ports)
================ ======================================================
Example::
def send_port_stats_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPPortStatsRequest(datapath, 0, ofp.OFPP_ANY)
datapath.send_msg(req)
"""
def __init__(self, datapath, flags, port_no, type_=None):
super(OFPPortStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_PORT_STATS_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.port_no)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPPortStatsReply(OFPMultipartReply):
"""
Port statistics reply message
The switch responds with this message to a port statistics request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPPortStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER)
def port_stats_reply_handler(self, ev):
ports = []
for stat in ev.msg.body:
ports.append(stat.length, stat.port_no,
stat.duration_sec, stat.duration_nsec,
stat.rx_packets, stat.tx_packets,
stat.rx_bytes, stat.tx_bytes,
stat.rx_dropped, stat.tx_dropped,
stat.rx_errors, stat.tx_errors,
repr(stat.properties))
self.logger.debug('PortStats: %s', ports)
"""
def __init__(self, datapath, type_=None, **kwargs):
super(OFPPortStatsReply, self).__init__(datapath, **kwargs)
@_set_msg_type(ofproto.OFPT_BARRIER_REQUEST)
class OFPBarrierRequest(MsgBase):
"""
Barrier request message
The controller sends this message to ensure message dependencies have
been met or receive notifications for completed operations.
Example::
def send_barrier_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPBarrierRequest(datapath)
datapath.send_msg(req)
"""
def __init__(self, datapath):
super(OFPBarrierRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_BARRIER_REPLY)
class OFPBarrierReply(MsgBase):
"""
Barrier reply message
The switch responds with this message to a barrier request.
Example::
@set_ev_cls(ofp_event.EventOFPBarrierReply, MAIN_DISPATCHER)
def barrier_reply_handler(self, ev):
self.logger.debug('OFPBarrierReply received')
"""
def __init__(self, datapath):
super(OFPBarrierReply, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_PORT_STATUS)
class OFPPortStatus(MsgBase):
"""
Port status message
The switch notifies controller of change of ports.
================ ======================================================
Attribute Description
================ ======================================================
reason One of the following values.
| OFPPR_ADD
| OFPPR_DELETE
| OFPPR_MODIFY
desc instance of ``OFPPort``
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPPortStatus, MAIN_DISPATCHER)
def port_status_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.reason == ofp.OFPPR_ADD:
reason = 'ADD'
elif msg.reason == ofp.OFPPR_DELETE:
reason = 'DELETE'
elif msg.reason == ofp.OFPPR_MODIFY:
reason = 'MODIFY'
else:
reason = 'unknown'
self.logger.debug('OFPPortStatus received: reason=%s desc=%s',
reason, msg.desc)
"""
def __init__(self, datapath, reason=None, desc=None):
super(OFPPortStatus, self).__init__(datapath)
self.reason = reason
self.desc = desc
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPortStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.reason = struct.unpack_from(
ofproto.OFP_PORT_STATUS_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)[0]
msg.desc = OFPPort.parser(msg.buf, ofproto.OFP_PORT_STATUS_DESC_OFFSET)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_ROLE_STATUS)
class OFPRoleStatus(MsgBase):
"""
Role status message
The switch notifies controller of change of role.
================ ======================================================
Attribute Description
================ ======================================================
role One of the following values.
| OFPCR_ROLE_NOCHANGE
| OFPCR_ROLE_EQUAL
| OFPCR_ROLE_MASTER
reason One of the following values.
| OFPCRR_MASTER_REQUEST
| OFPCRR_CONFIG
| OFPCRR_EXPERIMENTER
generation_id Master Election Generation ID
properties List of ``OFPRoleProp`` subclass instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPRoleStatus, MAIN_DISPATCHER)
def role_status_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.role == ofp.OFPCR_ROLE_NOCHANGE:
role = 'ROLE NOCHANGE'
elif msg.role == ofp.OFPCR_ROLE_EQUAL:
role = 'ROLE EQUAL'
elif msg.role == ofp.OFPCR_ROLE_MASTER:
role = 'ROLE MASTER'
else:
role = 'unknown'
if msg.reason == ofp.OFPCRR_MASTER_REQUEST:
reason = 'MASTER REQUEST'
elif msg.reason == ofp.OFPCRR_CONFIG:
reason = 'CONFIG'
elif msg.reason == ofp.OFPCRR_EXPERIMENTER:
reason = 'EXPERIMENTER'
else:
reason = 'unknown'
self.logger.debug('OFPRoleStatus received: role=%s reason=%s '
'generation_id=%d properties=%s', role, reason,
msg.generation_id, repr(msg.properties))
"""
def __init__(self, datapath, role=None, reason=None,
generation_id=None, properties=None):
super(OFPRoleStatus, self).__init__(datapath)
self.role = role
self.reason = reason
self.generation_id = generation_id
self.properties = properties
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPRoleStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.role, msg.reason, msg.generation_id) = struct.unpack_from(
ofproto.OFP_ROLE_STATUS_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.properties = []
rest = msg.buf[ofproto.OFP_ROLE_STATUS_SIZE:]
while rest:
p, rest = OFPRoleProp.parse(rest)
msg.properties.append(p)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_TABLE_STATUS)
class OFPTableStatus(MsgBase):
"""
Table status message
The switch notifies controller of change of table status.
================ ======================================================
Attribute Description
================ ======================================================
reason One of the following values.
| OFPTR_VACANCY_DOWN
| OFPTR_VACANCY_UP
table ``OFPTableDesc`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPTableStatus, MAIN_DISPATCHER)
def table(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.reason == ofp.OFPTR_VACANCY_DOWN:
reason = 'VACANCY_DOWN'
elif msg.reason == ofp.OFPTR_VACANCY_UP:
reason = 'VACANCY_UP'
else:
reason = 'unknown'
self.logger.debug('OFPTableStatus received: reason=%s '
'table_id=%d config=0x%08x properties=%s',
reason, msg.table.table_id, msg.table.config,
repr(msg.table.properties))
"""
def __init__(self, datapath, reason=None, table=None):
super(OFPTableStatus, self).__init__(datapath)
self.reason = reason
self.table = table
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPTableStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.reason,) = struct.unpack_from(ofproto.OFP_TABLE_STATUS_0_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
msg.table = OFPTableDesc.parser(msg.buf,
ofproto.OFP_TABLE_STATUS_0_SIZE)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_REQUESTFORWARD)
class OFPRequestForward(MsgInMsgBase):
"""
Forwarded request message
The swtich forwards request messages from one controller to other
controllers.
================ ======================================================
Attribute Description
================ ======================================================
request ``OFPGroupMod`` or ``OFPMeterMod`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPRequestForward, MAIN_DISPATCHER)
def request_forward_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.request.msg_type == ofp.OFPT_GROUP_MOD:
self.logger.debug(
'OFPRequestForward received: request=OFPGroupMod('
'command=%d, type=%d, group_id=%d, buckets=%s)',
msg.request.command, msg.request.type,
msg.request.group_id, msg.request.buckets)
elif msg.request.msg_type == ofp.OFPT_METER_MOD:
self.logger.debug(
'OFPRequestForward received: request=OFPMeterMod('
'command=%d, flags=%d, meter_id=%d, bands=%s)',
msg.request.command, msg.request.flags,
msg.request.meter_id, msg.request.bands)
else:
self.logger.debug(
'OFPRequestForward received: request=Unknown')
"""
def __init__(self, datapath, request=None):
super(OFPRequestForward, self).__init__(datapath)
self.request = request
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPRequestForward, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
req_buf = buf[ofproto.OFP_HEADER_SIZE:]
(_ver, _type, _len, _xid) = ofproto_parser.header(req_buf)
msg.request = ofproto_parser.msg(
datapath, _ver, _type, _len, _xid, req_buf)
return msg
def _serialize_body(self):
assert isinstance(self.request, (OFPGroupMod, OFPMeterMod))
self.request.serialize()
self.buf += self.request.buf
@_set_msg_type(ofproto.OFPT_PACKET_OUT)
class OFPPacketOut(MsgBase):
"""
Packet-Out message
The controller uses this message to send a packet out throught the
switch.
================ ======================================================
Attribute Description
================ ======================================================
buffer_id ID assigned by datapath (OFP_NO_BUFFER if none)
in_port Packet's input port or ``OFPP_CONTROLLER``
actions list of OpenFlow action class
data Packet data
================ ======================================================
Example::
def send_packet_out(self, datapath, buffer_id, in_port):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
actions = [ofp_parser.OFPActionOutput(ofp.OFPP_FLOOD, 0)]
req = ofp_parser.OFPPacketOut(datapath, buffer_id,
in_port, actions)
datapath.send_msg(req)
"""
def __init__(self, datapath, buffer_id=None, in_port=None, actions=None,
data=None, actions_len=None):
assert in_port is not None
super(OFPPacketOut, self).__init__(datapath)
self.buffer_id = buffer_id
self.in_port = in_port
self.actions_len = 0
self.actions = actions
self.data = data
def _serialize_body(self):
self.actions_len = 0
offset = ofproto.OFP_PACKET_OUT_SIZE
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
self.actions_len += a.len
if self.data is not None:
assert self.buffer_id == 0xffffffff
self.buf += self.data
msg_pack_into(ofproto.OFP_PACKET_OUT_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.buffer_id, self.in_port, self.actions_len)
@_set_msg_type(ofproto.OFPT_FLOW_MOD)
class OFPFlowMod(MsgBase):
"""
Modify Flow entry message
The controller sends this message to modify the flow table.
================ ======================================================
Attribute Description
================ ======================================================
cookie Opaque controller-issued identifier
cookie_mask Mask used to restrict the cookie bits that must match
when the command is ``OPFFC_MODIFY*`` or
``OFPFC_DELETE*``
table_id ID of the table to put the flow in
command One of the following values.
| OFPFC_ADD
| OFPFC_MODIFY
| OFPFC_MODIFY_STRICT
| OFPFC_DELETE
| OFPFC_DELETE_STRICT
idle_timeout Idle time before discarding (seconds)
hard_timeout Max time before discarding (seconds)
priority Priority level of flow entry
buffer_id Buffered packet to apply to (or OFP_NO_BUFFER)
out_port For ``OFPFC_DELETE*`` commands, require matching
entries to include this as an output port
out_group For ``OFPFC_DELETE*`` commands, require matching
entries to include this as an output group
flags Bitmap of the following flags.
| OFPFF_SEND_FLOW_REM
| OFPFF_CHECK_OVERLAP
| OFPFF_RESET_COUNTS
| OFPFF_NO_PKT_COUNTS
| OFPFF_NO_BYT_COUNTS
importance Eviction precedence
match Instance of ``OFPMatch``
instructions list of ``OFPInstruction*`` instance
================ ======================================================
Example::
def send_flow_mod(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
cookie = cookie_mask = 0
table_id = 0
idle_timeout = hard_timeout = 0
priority = 32768
buffer_id = ofp.OFP_NO_BUFFER
importance = 0
match = ofp_parser.OFPMatch(in_port=1, eth_dst='ff:ff:ff:ff:ff:ff')
actions = [ofp_parser.OFPActionOutput(ofp.OFPP_NORMAL, 0)]
inst = [ofp_parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
actions)]
req = ofp_parser.OFPFlowMod(datapath, cookie, cookie_mask,
table_id, ofp.OFPFC_ADD,
idle_timeout, hard_timeout,
priority, buffer_id,
ofp.OFPP_ANY, ofp.OFPG_ANY,
ofp.OFPFF_SEND_FLOW_REM,
importance,
match, inst)
datapath.send_msg(req)
"""
def __init__(self, datapath, cookie=0, cookie_mask=0, table_id=0,
command=ofproto.OFPFC_ADD,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=ofproto.OFP_NO_BUFFER,
out_port=0, out_group=0, flags=0, importance=0,
match=None,
instructions=None):
instructions = instructions if instructions else []
super(OFPFlowMod, self).__init__(datapath)
self.cookie = cookie
self.cookie_mask = cookie_mask
self.table_id = table_id
self.command = command
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.buffer_id = buffer_id
self.out_port = out_port
self.out_group = out_group
self.flags = flags
self.importance = importance
if match is None:
match = OFPMatch()
assert isinstance(match, OFPMatch)
self.match = match
for i in instructions:
assert isinstance(i, OFPInstruction)
self.instructions = instructions
def _serialize_body(self):
msg_pack_into(ofproto.OFP_FLOW_MOD_PACK_STR0, self.buf,
ofproto.OFP_HEADER_SIZE,
self.cookie, self.cookie_mask, self.table_id,
self.command, self.idle_timeout, self.hard_timeout,
self.priority, self.buffer_id, self.out_port,
self.out_group, self.flags, self.importance)
offset = (ofproto.OFP_FLOW_MOD_SIZE -
ofproto.OFP_MATCH_SIZE)
match_len = self.match.serialize(self.buf, offset)
offset += match_len
for inst in self.instructions:
inst.serialize(self.buf, offset)
offset += inst.len
class OFPInstruction(StringifyMixin):
_INSTRUCTION_TYPES = {}
@staticmethod
def register_instruction_type(types):
def _register_instruction_type(cls):
for type_ in types:
OFPInstruction._INSTRUCTION_TYPES[type_] = cls
return cls
return _register_instruction_type
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from('!HH', buf, offset)
cls_ = cls._INSTRUCTION_TYPES.get(type_)
return cls_.parser(buf, offset)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_GOTO_TABLE])
class OFPInstructionGotoTable(OFPInstruction):
"""
Goto table instruction
This instruction indicates the next table in the processing pipeline.
================ ======================================================
Attribute Description
================ ======================================================
table_id Next table
================ ======================================================
"""
def __init__(self, table_id, type_=None, len_=None):
super(OFPInstructionGotoTable, self).__init__()
self.type = ofproto.OFPIT_GOTO_TABLE
self.len = ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE
self.table_id = table_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, table_id) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR,
buf, offset)
return cls(table_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR,
buf, offset, self.type, self.len, self.table_id)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_WRITE_METADATA])
class OFPInstructionWriteMetadata(OFPInstruction):
"""
Write metadata instruction
This instruction writes the masked metadata value into the metadata field.
================ ======================================================
Attribute Description
================ ======================================================
metadata Metadata value to write
metadata_mask Metadata write bitmask
================ ======================================================
"""
def __init__(self, metadata, metadata_mask, type_=None, len_=None):
super(OFPInstructionWriteMetadata, self).__init__()
self.type = ofproto.OFPIT_WRITE_METADATA
self.len = ofproto.OFP_INSTRUCTION_WRITE_METADATA_SIZE
self.metadata = metadata
self.metadata_mask = metadata_mask
@classmethod
def parser(cls, buf, offset):
(type_, len_, metadata, metadata_mask) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR,
buf, offset)
return cls(metadata, metadata_mask)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR,
buf, offset, self.type, self.len, self.metadata,
self.metadata_mask)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_WRITE_ACTIONS,
ofproto.OFPIT_APPLY_ACTIONS,
ofproto.OFPIT_CLEAR_ACTIONS])
class OFPInstructionActions(OFPInstruction):
"""
Actions instruction
This instruction writes/applies/clears the actions.
================ ======================================================
Attribute Description
================ ======================================================
type One of following values.
| OFPIT_WRITE_ACTIONS
| OFPIT_APPLY_ACTIONS
| OFPIT_CLEAR_ACTIONS
actions list of OpenFlow action class
================ ======================================================
``type`` attribute corresponds to ``type_`` parameter of __init__.
"""
def __init__(self, type_, actions=None, len_=None):
super(OFPInstructionActions, self).__init__()
self.type = type_
for a in actions:
assert isinstance(a, OFPAction)
self.actions = actions
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR,
buf, offset)
offset += ofproto.OFP_INSTRUCTION_ACTIONS_SIZE
actions = []
actions_len = len_ - ofproto.OFP_INSTRUCTION_ACTIONS_SIZE
while actions_len > 0:
a = OFPAction.parser(buf, offset)
actions.append(a)
actions_len -= a.len
offset += a.len
inst = cls(type_, actions)
inst.len = len_
return inst
def serialize(self, buf, offset):
action_offset = offset + ofproto.OFP_INSTRUCTION_ACTIONS_SIZE
if self.actions:
for a in self.actions:
a.serialize(buf, action_offset)
action_offset += a.len
self.len = action_offset - offset
pad_len = utils.round_up(self.len, 8) - self.len
msg_pack_into("%dx" % pad_len, buf, action_offset)
self.len += pad_len
msg_pack_into(ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR,
buf, offset, self.type, self.len)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_METER])
class OFPInstructionMeter(OFPInstruction):
"""
Meter instruction
This instruction applies the meter.
================ ======================================================
Attribute Description
================ ======================================================
meter_id Meter instance
================ ======================================================
"""
def __init__(self, meter_id=1, type_=None, len_=None):
super(OFPInstructionMeter, self).__init__()
self.type = ofproto.OFPIT_METER
self.len = ofproto.OFP_INSTRUCTION_METER_SIZE
self.meter_id = meter_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, meter_id) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_METER_PACK_STR,
buf, offset)
return cls(meter_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_METER_PACK_STR,
buf, offset, self.type, self.len, self.meter_id)
class OFPActionHeader(StringifyMixin):
def __init__(self, type_, len_):
self.type = type_
self.len = len_
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR,
buf, offset, self.type, self.len)
class OFPAction(OFPActionHeader):
_ACTION_TYPES = {}
@staticmethod
def register_action_type(type_, len_):
def _register_action_type(cls):
cls.cls_action_type = type_
cls.cls_action_len = len_
OFPAction._ACTION_TYPES[cls.cls_action_type] = cls
return cls
return _register_action_type
def __init__(self):
cls = self.__class__
super(OFPAction, self).__init__(cls.cls_action_type,
cls.cls_action_len)
@classmethod
def parser(cls, buf, offset):
type_, len_ = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_TYPES.get(type_)
assert cls_ is not None
return cls_.parser(buf, offset)
@OFPAction.register_action_type(ofproto.OFPAT_OUTPUT,
ofproto.OFP_ACTION_OUTPUT_SIZE)
class OFPActionOutput(OFPAction):
"""
Output action
This action indicates output a packet to the switch port.
================ ======================================================
Attribute Description
================ ======================================================
port Output port
max_len Max length to send to controller
================ ======================================================
"""
def __init__(self, port, max_len=ofproto.OFPCML_MAX,
type_=None, len_=None):
super(OFPActionOutput, self).__init__()
self.port = port
self.max_len = max_len
@classmethod
def parser(cls, buf, offset):
type_, len_, port, max_len = struct.unpack_from(
ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset)
return cls(port, max_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf,
offset, self.type, self.len, self.port, self.max_len)
@OFPAction.register_action_type(ofproto.OFPAT_GROUP,
ofproto.OFP_ACTION_GROUP_SIZE)
class OFPActionGroup(OFPAction):
"""
Group action
This action indicates the group used to process the packet.
================ ======================================================
Attribute Description
================ ======================================================
group_id Group identifier
================ ======================================================
"""
def __init__(self, group_id=0, type_=None, len_=None):
super(OFPActionGroup, self).__init__()
self.group_id = group_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, group_id) = struct.unpack_from(
ofproto.OFP_ACTION_GROUP_PACK_STR, buf, offset)
return cls(group_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_GROUP_PACK_STR, buf,
offset, self.type, self.len, self.group_id)
@OFPAction.register_action_type(ofproto.OFPAT_SET_QUEUE,
ofproto.OFP_ACTION_SET_QUEUE_SIZE)
class OFPActionSetQueue(OFPAction):
"""
Set queue action
This action sets the queue id that will be used to map a flow to an
already-configured queue on a port.
================ ======================================================
Attribute Description
================ ======================================================
queue_id Queue ID for the packets
================ ======================================================
"""
def __init__(self, queue_id, type_=None, len_=None):
super(OFPActionSetQueue, self).__init__()
self.queue_id = queue_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, queue_id) = struct.unpack_from(
ofproto.OFP_ACTION_SET_QUEUE_PACK_STR, buf, offset)
return cls(queue_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_SET_QUEUE_PACK_STR, buf,
offset, self.type, self.len, self.queue_id)
@OFPAction.register_action_type(ofproto.OFPAT_SET_MPLS_TTL,
ofproto.OFP_ACTION_MPLS_TTL_SIZE)
class OFPActionSetMplsTtl(OFPAction):
"""
Set MPLS TTL action
This action sets the MPLS TTL.
================ ======================================================
Attribute Description
================ ======================================================
mpls_ttl MPLS TTL
================ ======================================================
"""
def __init__(self, mpls_ttl, type_=None, len_=None):
super(OFPActionSetMplsTtl, self).__init__()
self.mpls_ttl = mpls_ttl
@classmethod
def parser(cls, buf, offset):
(type_, len_, mpls_ttl) = struct.unpack_from(
ofproto.OFP_ACTION_MPLS_TTL_PACK_STR, buf, offset)
return cls(mpls_ttl)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_MPLS_TTL_PACK_STR, buf,
offset, self.type, self.len, self.mpls_ttl)
@OFPAction.register_action_type(ofproto.OFPAT_DEC_MPLS_TTL,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionDecMplsTtl(OFPAction):
"""
Decrement MPLS TTL action
This action decrements the MPLS TTL.
"""
def __init__(self, type_=None, len_=None):
super(OFPActionDecMplsTtl, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_TTL,
ofproto.OFP_ACTION_NW_TTL_SIZE)
class OFPActionSetNwTtl(OFPAction):
"""
Set IP TTL action
This action sets the IP TTL.
================ ======================================================
Attribute Description
================ ======================================================
nw_ttl IP TTL
================ ======================================================
"""
def __init__(self, nw_ttl, type_=None, len_=None):
super(OFPActionSetNwTtl, self).__init__()
self.nw_ttl = nw_ttl
@classmethod
def parser(cls, buf, offset):
(type_, len_, nw_ttl) = struct.unpack_from(
ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset)
return cls(nw_ttl)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset,
self.type, self.len, self.nw_ttl)
@OFPAction.register_action_type(ofproto.OFPAT_DEC_NW_TTL,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionDecNwTtl(OFPAction):
"""
Decrement IP TTL action
This action decrements the IP TTL.
"""
def __init__(self, type_=None, len_=None):
super(OFPActionDecNwTtl, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_OUT,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionCopyTtlOut(OFPAction):
"""
Copy TTL Out action
This action copies the TTL from the next-to-outermost header with TTL to
the outermost header with TTL.
"""
def __init__(self, type_=None, len_=None):
super(OFPActionCopyTtlOut, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_IN,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionCopyTtlIn(OFPAction):
"""
Copy TTL In action
This action copies the TTL from the outermost header with TTL to the
next-to-outermost header with TTL.
"""
def __init__(self, type_=None, len_=None):
super(OFPActionCopyTtlIn, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_PUSH_VLAN,
ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushVlan(OFPAction):
"""
Push VLAN action
This action pushes a new VLAN tag to the packet.
================ ======================================================
Attribute Description
================ ======================================================
ethertype Ether type. The default is 802.1Q. (0x8100)
================ ======================================================
"""
def __init__(self, ethertype=ether.ETH_TYPE_8021Q, type_=None, len_=None):
super(OFPActionPushVlan, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_PUSH_MPLS,
ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushMpls(OFPAction):
"""
Push MPLS action
This action pushes a new MPLS header to the packet.
================ ======================================================
Attribute Description
================ ======================================================
ethertype Ether type
================ ======================================================
"""
def __init__(self, ethertype=ether.ETH_TYPE_MPLS, type_=None, len_=None):
super(OFPActionPushMpls, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_POP_VLAN,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionPopVlan(OFPAction):
"""
Pop VLAN action
This action pops the outermost VLAN tag from the packet.
"""
def __init__(self, type_=None, len_=None):
super(OFPActionPopVlan, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_POP_MPLS,
ofproto.OFP_ACTION_POP_MPLS_SIZE)
class OFPActionPopMpls(OFPAction):
"""
Pop MPLS action
This action pops the MPLS header from the packet.
"""
def __init__(self, ethertype=ether.ETH_TYPE_IP, type_=None, len_=None):
super(OFPActionPopMpls, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_POP_MPLS_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_POP_MPLS_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_SET_FIELD,
ofproto.OFP_ACTION_SET_FIELD_SIZE)
class OFPActionSetField(OFPAction):
"""
Set field action
This action modifies a header field in the packet.
The set of keywords available for this is same as OFPMatch.
Example::
set_field = OFPActionSetField(eth_src="00:00:00:00:00:00")
"""
def __init__(self, field=None, **kwargs):
super(OFPActionSetField, self).__init__()
assert len(kwargs) == 1
key = list(kwargs.keys())[0]
value = kwargs[key]
assert isinstance(key, (str, six.text_type))
assert not isinstance(value, tuple) # no mask
self.key = key
self.value = value
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_SET_FIELD_PACK_STR, buf, offset)
(n, value, mask, _len) = ofproto.oxm_parse(buf, offset + 4)
k, uv = ofproto.oxm_to_user(n, value, mask)
action = cls(**{k: uv})
action.len = len_
return action
def serialize(self, buf, offset):
n, value, mask = ofproto.oxm_from_user(self.key, self.value)
len_ = ofproto.oxm_serialize(n, value, mask, buf, offset + 4)
self.len = utils.round_up(4 + len_, 8)
msg_pack_into('!HH', buf, offset, self.type, self.len)
pad_len = self.len - (4 + len_)
msg_pack_into("%dx" % pad_len, buf, offset + 4 + len_)
def to_jsondict(self):
return {
self.__class__.__name__: {
'field': ofproto.oxm_to_jsondict(self.key, self.value),
"len": self.len,
"type": self.type
}
}
@classmethod
def from_jsondict(cls, dict_):
k, v = ofproto.oxm_from_jsondict(dict_['field'])
return OFPActionSetField(**{k: v})
def stringify_attrs(self):
yield (self.key, self.value)
@OFPAction.register_action_type(ofproto.OFPAT_PUSH_PBB,
ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushPbb(OFPAction):
"""
Push PBB action
This action pushes a new PBB header to the packet.
================ ======================================================
Attribute Description
================ ======================================================
ethertype Ether type
================ ======================================================
"""
def __init__(self, ethertype, type_=None, len_=None):
super(OFPActionPushPbb, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_POP_PBB,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionPopPbb(OFPAction):
"""
Pop PBB action
This action pops the outermost PBB service instance header from
the packet.
"""
def __init__(self, type_=None, len_=None):
super(OFPActionPopPbb, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(
ofproto.OFPAT_EXPERIMENTER,
ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE)
class OFPActionExperimenter(OFPAction):
"""
Experimenter action
This action is an extensible action for the experimenter.
================ ======================================================
Attribute Description
================ ======================================================
experimenter Experimenter ID
================ ======================================================
.. Note::
For the list of the supported Nicira experimenter actions,
please refer to :ref:`ryu.ofproto.nx_actions <nx_actions_structures>`.
"""
def __init__(self, experimenter):
super(OFPActionExperimenter, self).__init__()
self.type = ofproto.OFPAT_EXPERIMENTER
self.experimenter = experimenter
self.len = None
@classmethod
def parser(cls, buf, offset):
(type_, len_, experimenter) = struct.unpack_from(
ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR, buf, offset)
data = buf[(offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE
): offset + len_]
if experimenter == ofproto_common.NX_EXPERIMENTER_ID:
obj = NXAction.parse(data)
else:
obj = OFPActionExperimenterUnknown(experimenter, data)
obj.len = len_
return obj
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR,
buf, offset, self.type, self.len, self.experimenter)
class OFPActionExperimenterUnknown(OFPActionExperimenter):
def __init__(self, experimenter, data=None, type_=None, len_=None):
super(OFPActionExperimenterUnknown,
self).__init__(experimenter=experimenter)
self.data = data
def serialize(self, buf, offset):
# fixup
data = self.data
if data is None:
data = bytearray()
self.len = (utils.round_up(len(data), 8) +
ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE)
super(OFPActionExperimenterUnknown, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(self.data),
buf,
offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE,
self.data)
@_register_parser
@_set_msg_type(ofproto.OFPT_GROUP_MOD)
class OFPGroupMod(MsgBase):
"""
Modify group entry message
The controller sends this message to modify the group table.
================ ======================================================
Attribute Description
================ ======================================================
command One of the following values.
| OFPGC_ADD
| OFPGC_MODIFY
| OFPGC_DELETE
type One of the following values.
| OFPGT_ALL
| OFPGT_SELECT
| OFPGT_INDIRECT
| OFPGT_FF
group_id Group identifier
buckets list of ``OFPBucket``
================ ======================================================
``type`` attribute corresponds to ``type_`` parameter of __init__.
Example::
def send_group_mod(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
port = 1
max_len = 2000
actions = [ofp_parser.OFPActionOutput(port, max_len)]
weight = 100
watch_port = 0
watch_group = 0
buckets = [ofp_parser.OFPBucket(weight, watch_port, watch_group,
actions)]
group_id = 1
req = ofp_parser.OFPGroupMod(datapath, ofp.OFPGC_ADD,
ofp.OFPGT_SELECT, group_id, buckets)
datapath.send_msg(req)
"""
def __init__(self, datapath, command=ofproto.OFPGC_ADD,
type_=ofproto.OFPGT_ALL, group_id=0, buckets=None):
buckets = buckets if buckets else []
super(OFPGroupMod, self).__init__(datapath)
self.command = command
self.type = type_
self.group_id = group_id
self.buckets = buckets
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGroupMod, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
(msg.command, msg.type, msg.group_id) = struct.unpack_from(
ofproto.OFP_GROUP_MOD_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
offset = ofproto.OFP_GROUP_MOD_SIZE
msg.buckets = []
while offset < msg.msg_len:
bucket = OFPBucket.parser(buf, offset)
msg.buckets.append(bucket)
offset += bucket.len
return msg
def _serialize_body(self):
msg_pack_into(ofproto.OFP_GROUP_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.command, self.type, self.group_id)
offset = ofproto.OFP_GROUP_MOD_SIZE
for b in self.buckets:
b.serialize(self.buf, offset)
offset += b.len
class OFPPortModProp(OFPPropBase):
_TYPES = {}
class OFPPortModPropEthernet(OFPPortModProp):
def __init__(self, type_=None, length=None, advertise=None):
self.type = type_
self.advertise = advertise
def serialize(self):
# fixup
self.length = struct.calcsize(
ofproto.OFP_PORT_MOD_PROP_ETHERNET_PACK_STR)
buf = bytearray()
msg_pack_into(ofproto.OFP_PORT_MOD_PROP_ETHERNET_PACK_STR,
buf, 0, self.type, self.length, self.advertise)
return buf
class OFPPortModPropOptical(OFPPortModProp):
def __init__(self, type_=None, length=None, configure=None,
freq_lmda=None, fl_offset=None, grid_span=None,
tx_pwr=None):
self.type = type_
self.length = length
self.configure = configure
self.freq_lmda = freq_lmda
self.fl_offset = fl_offset
self.grid_span = grid_span
self.tx_pwr = tx_pwr
def serialize(self):
# fixup
self.length = struct.calcsize(
ofproto.OFP_PORT_MOD_PROP_OPTICAL_PACK_STR)
buf = bytearray()
msg_pack_into(ofproto.OFP_PORT_MOD_PROP_OPTICAL_PACK_STR, buf, 0,
self.type, self.length, self.configure, self.freq_lmda,
self.fl_offset, self.grid_span, self.tx_pwr)
return buf
class OFPPortModPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_set_msg_type(ofproto.OFPT_PORT_MOD)
class OFPPortMod(MsgBase):
"""
Port modification message
The controller sneds this message to modify the behavior of the port.
================ ======================================================
Attribute Description
================ ======================================================
port_no Port number to modify
hw_addr The hardware address that must be the same as hw_addr
of ``OFPPort`` of ``OFPSwitchFeatures``
config Bitmap of configuration flags.
| OFPPC_PORT_DOWN
| OFPPC_NO_RECV
| OFPPC_NO_FWD
| OFPPC_NO_PACKET_IN
mask Bitmap of configuration flags above to be changed
properties List of ``OFPPortModProp`` subclass instance
================ ======================================================
Example::
def send_port_mod(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
port_no = 3
hw_addr = 'fa:c8:e8:76:1d:7e'
config = 0
mask = (ofp.OFPPC_PORT_DOWN | ofp.OFPPC_NO_RECV |
ofp.OFPPC_NO_FWD | ofp.OFPPC_NO_PACKET_IN)
advertise = (ofp.OFPPF_10MB_HD | ofp.OFPPF_100MB_FD |
ofp.OFPPF_1GB_FD | ofp.OFPPF_COPPER |
ofp.OFPPF_AUTONEG | ofp.OFPPF_PAUSE |
ofp.OFPPF_PAUSE_ASYM)
properties = [ofp_parser.OFPPortModPropEthernet(advertise)]
req = ofp_parser.OFPPortMod(datapath, port_no, hw_addr, config,
mask, properties)
datapath.send_msg(req)
"""
_TYPE = {
'ascii': [
'hw_addr',
]
}
def __init__(self, datapath, port_no=0, hw_addr='00:00:00:00:00:00',
config=0, mask=0, properties=None):
super(OFPPortMod, self).__init__(datapath)
self.port_no = port_no
self.hw_addr = hw_addr
self.config = config
self.mask = mask
self.properties = properties or []
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.port_no, addrconv.mac.text_to_bin(self.hw_addr),
self.config,
self.mask)
self.buf += bin_props
class OFPBucket(StringifyMixin):
def __init__(self, weight=0, watch_port=ofproto.OFPP_ANY,
watch_group=ofproto.OFPG_ANY, actions=None, len_=None):
super(OFPBucket, self).__init__()
self.weight = weight
self.watch_port = watch_port
self.watch_group = watch_group
self.actions = actions
@classmethod
def parser(cls, buf, offset):
(len_, weight, watch_port, watch_group) = struct.unpack_from(
ofproto.OFP_BUCKET_PACK_STR, buf, offset)
msg = cls(weight, watch_port, watch_group, [])
msg.len = len_
length = ofproto.OFP_BUCKET_SIZE
offset += ofproto.OFP_BUCKET_SIZE
while length < msg.len:
action = OFPAction.parser(buf, offset)
msg.actions.append(action)
offset += action.len
length += action.len
return msg
def serialize(self, buf, offset):
action_offset = offset + ofproto.OFP_BUCKET_SIZE
action_len = 0
for a in self.actions:
a.serialize(buf, action_offset)
action_offset += a.len
action_len += a.len
self.len = utils.round_up(ofproto.OFP_BUCKET_SIZE + action_len, 8)
msg_pack_into(ofproto.OFP_BUCKET_PACK_STR, buf, offset,
self.len, self.weight, self.watch_port,
self.watch_group)
@_set_msg_type(ofproto.OFPT_ROLE_REQUEST)
class OFPRoleRequest(MsgBase):
"""
Role request message
The controller uses this message to change its role.
================ ======================================================
Attribute Description
================ ======================================================
role One of the following values.
| OFPCR_ROLE_NOCHANGE
| OFPCR_ROLE_EQUAL
| OFPCR_ROLE_MASTER
| OFPCR_ROLE_SLAVE
generation_id Master Election Generation ID
================ ======================================================
Example::
def send_role_request(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPRoleRequest(datapath, ofp.OFPCR_ROLE_EQUAL, 0)
datapath.send_msg(req)
"""
def __init__(self, datapath, role=None, generation_id=None):
super(OFPRoleRequest, self).__init__(datapath)
self.role = role
self.generation_id = generation_id
def _serialize_body(self):
assert self.role is not None
assert self.generation_id is not None
msg_pack_into(ofproto.OFP_ROLE_REQUEST_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.role, self.generation_id)
@_register_parser
@_set_msg_type(ofproto.OFPT_ROLE_REPLY)
class OFPRoleReply(MsgBase):
"""
Role reply message
The switch responds with this message to a role request.
================ ======================================================
Attribute Description
================ ======================================================
role One of the following values.
| OFPCR_ROLE_NOCHANGE
| OFPCR_ROLE_EQUAL
| OFPCR_ROLE_MASTER
| OFPCR_ROLE_SLAVE
generation_id Master Election Generation ID
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPRoleReply, MAIN_DISPATCHER)
def role_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
ofp = dp.ofproto
if msg.role == ofp.OFPCR_ROLE_NOCHANGE:
role = 'NOCHANGE'
elif msg.role == ofp.OFPCR_ROLE_EQUAL:
role = 'EQUAL'
elif msg.role == ofp.OFPCR_ROLE_MASTER:
role = 'MASTER'
elif msg.role == ofp.OFPCR_ROLE_SLAVE:
role = 'SLAVE'
else:
role = 'unknown'
self.logger.debug('OFPRoleReply received: '
'role=%s generation_id=%d',
role, msg.generation_id)
"""
def __init__(self, datapath, role=None, generation_id=None):
super(OFPRoleReply, self).__init__(datapath)
self.role = role
self.generation_id = generation_id
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPRoleReply, cls).parser(datapath, version,
msg_type, msg_len, xid,
buf)
(msg.role, msg.generation_id) = struct.unpack_from(
ofproto.OFP_ROLE_REQUEST_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
return msg
class OFPAsyncConfigProp(OFPPropBase):
_TYPES = {}
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PACKET_IN_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PACKET_IN_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PORT_STATUS_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PORT_STATUS_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_FLOW_REMOVED_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_FLOW_REMOVED_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_ROLE_STATUS_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_ROLE_STATUS_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_TABLE_STATUS_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_TABLE_STATUS_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_REQUESTFORWARD_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_REQUESTFORWARD_MASTER)
class OFPAsyncConfigPropReasons(OFPAsyncConfigProp):
def __init__(self, type_=None, length=None, mask=None):
self.type = type_
self.length = length
self.mask = mask
@classmethod
def parser(cls, buf):
reasons = cls()
(reasons.type, reasons.length, reasons.mask) = struct.unpack_from(
ofproto.OFP_ASYNC_CONFIG_PROP_REASONS_PACK_STR, buf, 0)
return reasons
def serialize(self):
# fixup
self.length = ofproto.OFP_ASYNC_CONFIG_PROP_REASONS_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_ASYNC_CONFIG_PROP_REASONS_PACK_STR, buf, 0,
self.type, self.length, self.mask)
return buf
@OFPAsyncConfigProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_MASTER)
class OFPAsyncConfigPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_set_msg_type(ofproto.OFPT_GET_ASYNC_REQUEST)
class OFPGetAsyncRequest(MsgBase):
"""
Get asynchronous configuration request message
The controller uses this message to query the asynchronous message.
Example::
def send_get_async_request(self, datapath):
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPGetAsyncRequest(datapath)
datapath.send_msg(req)
"""
def __init__(self, datapath):
super(OFPGetAsyncRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_GET_ASYNC_REPLY)
class OFPGetAsyncReply(MsgBase):
"""
Get asynchronous configuration reply message
The switch responds with this message to a get asynchronous configuration
request.
================== ====================================================
Attribute Description
================== ====================================================
properties List of ``OFPAsyncConfigProp`` subclass instances
================== ====================================================
Example::
@set_ev_cls(ofp_event.EventOFPGetAsyncReply, MAIN_DISPATCHER)
def get_async_reply_handler(self, ev):
msg = ev.msg
self.logger.debug('OFPGetAsyncReply received: '
'properties=%s', repr(msg.properties))
"""
def __init__(self, datapath, properties=None):
super(OFPGetAsyncReply, self).__init__(datapath)
self.properties = properties
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGetAsyncReply, cls).parser(datapath, version,
msg_type, msg_len,
xid, buf)
msg.properties = []
rest = msg.buf[ofproto.OFP_HEADER_SIZE:]
while rest:
p, rest = OFPAsyncConfigProp.parse(rest)
msg.properties.append(p)
return msg
@_set_msg_type(ofproto.OFPT_SET_ASYNC)
class OFPSetAsync(MsgBase):
"""
Set asynchronous configuration message
The controller sends this message to set the asynchronous messages that
it wants to receive on a given OpneFlow channel.
================== ====================================================
Attribute Description
================== ====================================================
properties List of ``OFPAsyncConfigProp`` subclass instances
================== ====================================================
Example::
def send_set_async(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
properties = [ofp_parser.OFPAsyncConfigPropReasons(
8, ofp_parser.OFPACPT_PACKET_IN_SLAVE,
(ofp_parser.OFPR_APPLY_ACTION |
ofp_parser.OFPR_INVALID_TTL)),
ofp_parser.OFPAsyncConfigPropExperimenter(
ofp.OFPTFPT_EXPERIMENTER_MASTER,
16, 100, 2, bytearray())]
req = ofp_parser.OFPSetAsync(datapath, properties)
datapath.send_msg(req)
"""
def __init__(self, datapath, properties=None):
super(OFPSetAsync, self).__init__(datapath)
self.properties = properties
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
self.buf += bin_props
@_set_msg_type(ofproto.OFPT_BUNDLE_CONTROL)
class OFPBundleCtrlMsg(MsgBase):
"""
Bundle control message
The controller uses this message to create, destroy and commit bundles
================ ======================================================
Attribute Description
================ ======================================================
bundle_id Id of the bundle
type One of the following values.
| OFPBCT_OPEN_REQUEST
| OFPBCT_OPEN_REPLY
| OFPBCT_CLOSE_REQUEST
| OFPBCT_CLOSE_REPLY
| OFPBCT_COMMIT_REQUEST
| OFPBCT_COMMIT_REPLY
| OFPBCT_DISCARD_REQUEST
| OFPBCT_DISCARD_REPLY
flags Bitmap of the following flags.
| OFPBF_ATOMIC
| OFPBF_ORDERED
properties List of ``OFPBundleProp`` subclass instance
================ ======================================================
Example::
def send_bundle_control(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
req = ofp_parser.OFPBundleCtrlMsg(datapath, 7,
ofp.OFPBCT_OPEN_REQUEST,
[ofp.OFPBF_ATOMIC], [])
datapath.send_msg(req)
"""
def __init__(self, datapath, bundle_id, type_, flags, properties):
super(OFPBundleCtrlMsg, self).__init__(datapath)
self.bundle_id = bundle_id
self.type = type_
self.flags = flags
self.properties = properties
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
msg_pack_into(ofproto.OFP_BUNDLE_CTRL_MSG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.bundle_id,
self.type, self.flags)
self.buf += bin_props
@_set_msg_type(ofproto.OFPT_BUNDLE_ADD_MESSAGE)
class OFPBundleAddMsg(MsgInMsgBase):
"""
Bundle control message
The controller uses this message to create, destroy and commit bundles
================ ======================================================
Attribute Description
================ ======================================================
bundle_id Id of the bundle
flags Bitmap of the following flags.
| OFPBF_ATOMIC
| OFPBF_ORDERED
message ``MsgBase`` subclass instance
properties List of ``OFPBundleProp`` subclass instance
================ ======================================================
Example::
def send_bundle_add_message(self, datapath):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
msg = ofp_parser.OFPRoleRequest(datapath, ofp.OFPCR_ROLE_EQUAL, 0)
req = ofp_parser.OFPBundleAddMsg(datapath, 7, [ofp.OFPBF_ATOMIC],
msg, [])
datapath.send_msg(req)
"""
def __init__(self, datapath, bundle_id, flags, message, properties):
super(OFPBundleAddMsg, self).__init__(datapath)
self.bundle_id = bundle_id
self.flags = flags
self.message = message
self.properties = properties
def _serialize_body(self):
# The xid of the inner message must be the same as
# that of the outer message (OF1.4.0 7.3.9.2)
if self.message.xid != self.xid:
self.message.set_xid(self.xid)
# Message
self.message.serialize()
tail_buf = self.message.buf
# Pad
if len(self.properties) > 0:
message_len = len(tail_buf)
pad_len = utils.round_up(message_len, 8) - message_len
msg_pack_into("%dx" % pad_len, tail_buf, message_len)
# Properties
for p in self.properties:
tail_buf += p.serialize()
# Head
msg_pack_into(ofproto.OFP_BUNDLE_ADD_MSG_0_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.bundle_id,
self.flags)
# Finish
self.buf += tail_buf
nx_actions.generate(
'ryu.ofproto.ofproto_v1_4',
'ryu.ofproto.ofproto_v1_4_parser'
)
| 36.74894 | 90 | 0.568157 |
import six
import struct
from ryu.lib import addrconv
from ryu.lib.pack_utils import msg_pack_into
from ryu import utils
from ryu.ofproto.ofproto_parser import StringifyMixin, MsgBase, MsgInMsgBase, msg_str_attr
from ryu.ofproto import ether
from ryu.ofproto import nx_actions
from ryu.ofproto import ofproto_parser
from ryu.ofproto import ofproto_common
from ryu.ofproto import ofproto_v1_4 as ofproto
_MSG_PARSERS = {}
def _set_msg_type(msg_type):
def _set_cls_msg_type(cls):
cls.cls_msg_type = msg_type
return cls
return _set_cls_msg_type
def _register_parser(cls):
assert cls.cls_msg_type is not None
assert cls.cls_msg_type not in _MSG_PARSERS
_MSG_PARSERS[cls.cls_msg_type] = cls.parser
return cls
@ofproto_parser.register_msg_parser(ofproto.OFP_VERSION)
def msg_parser(datapath, version, msg_type, msg_len, xid, buf):
parser = _MSG_PARSERS.get(msg_type)
return parser(datapath, version, msg_type, msg_len, xid, buf)
@_register_parser
@_set_msg_type(ofproto.OFPT_HELLO)
class OFPHello(MsgBase):
def __init__(self, datapath, elements=None):
elements = elements if elements else []
super(OFPHello, self).__init__(datapath)
self.elements = elements
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPHello, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
offset = ofproto.OFP_HELLO_HEADER_SIZE
elems = []
while offset < msg.msg_len:
type_, length = struct.unpack_from(
ofproto.OFP_HELLO_ELEM_HEADER_PACK_STR, msg.buf, offset)
if type_ == ofproto.OFPHET_VERSIONBITMAP:
elem = OFPHelloElemVersionBitmap.parser(msg.buf, offset)
elems.append(elem)
offset += length
msg.elements = elems
return msg
class OFPHelloElemVersionBitmap(StringifyMixin):
def __init__(self, versions, type_=None, length=None):
super(OFPHelloElemVersionBitmap, self).__init__()
self.type = ofproto.OFPHET_VERSIONBITMAP
self.length = None
self._bitmaps = None
self.versions = versions
@classmethod
def parser(cls, buf, offset):
type_, length = struct.unpack_from(
ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR,
buf, offset)
assert type_ == ofproto.OFPHET_VERSIONBITMAP
bitmaps_len = (length -
ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE)
offset += ofproto.OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE
bitmaps = []
while bitmaps_len >= 4:
bitmap = struct.unpack_from('!I', buf, offset)
bitmaps.append(bitmap[0])
offset += 4
bitmaps_len -= 4
versions = [i * 32 + shift
for i, bitmap in enumerate(bitmaps)
for shift in range(31) if bitmap & (1 << shift)]
elem = cls(versions)
elem.length = length
elem._bitmaps = bitmaps
return elem
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REQUEST)
class OFPEchoRequest(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoRequest, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoRequest, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
if self.data is not None:
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ERROR)
class OFPErrorMsg(MsgBase):
def __init__(self, datapath, type_=None, code=None, data=None):
super(OFPErrorMsg, self).__init__(datapath)
self.type = type_
self.code = code
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
type_, = struct.unpack_from('!H', six.binary_type(buf),
ofproto.OFP_HEADER_SIZE)
if type_ == ofproto.OFPET_EXPERIMENTER:
return OFPErrorExperimenterMsg.parser(datapath, version, msg_type,
msg_len, xid, buf)
msg = super(OFPErrorMsg, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.type, msg.code = struct.unpack_from(
ofproto.OFP_ERROR_MSG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_ERROR_MSG_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_ERROR_MSG_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE, self.type, self.code)
self.buf += self.data
class OFPErrorExperimenterMsg(MsgBase):
def __init__(self, datapath, type_=None, exp_type=None, experimenter=None,
data=None):
super(OFPErrorExperimenterMsg, self).__init__(datapath)
self.type = ofproto.OFPET_EXPERIMENTER
self.exp_type = exp_type
self.experimenter = experimenter
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
cls.cls_msg_type = msg_type
msg = super(OFPErrorExperimenterMsg, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
msg.type, msg.exp_type, msg.experimenter = struct.unpack_from(
ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_ERROR_EXPERIMENTER_MSG_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.type, self.exp_type, self.experimenter)
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REPLY)
class OFPEchoReply(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoReply, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
self.buf += self.data
@_set_msg_type(ofproto.OFPT_FEATURES_REQUEST)
class OFPFeaturesRequest(MsgBase):
def __init__(self, datapath):
super(OFPFeaturesRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_EXPERIMENTER)
class OFPExperimenter(MsgBase):
def __init__(self, datapath, experimenter=None, exp_type=None, data=None):
super(OFPExperimenter, self).__init__(datapath)
self.experimenter = experimenter
self.exp_type = exp_type
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPExperimenter, cls).parser(datapath, version,
msg_type, msg_len,
xid, buf)
(msg.experimenter, msg.exp_type) = struct.unpack_from(
ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_EXPERIMENTER_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.experimenter, self.exp_type)
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_FEATURES_REPLY)
class OFPSwitchFeatures(MsgBase):
def __init__(self, datapath, datapath_id=None, n_buffers=None,
n_tables=None, auxiliary_id=None, capabilities=None):
super(OFPSwitchFeatures, self).__init__(datapath)
self.datapath_id = datapath_id
self.n_buffers = n_buffers
self.n_tables = n_tables
self.auxiliary_id = auxiliary_id
self.capabilities = capabilities
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPSwitchFeatures, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.datapath_id,
msg.n_buffers,
msg.n_tables,
msg.auxiliary_id,
msg.capabilities,
msg._reserved) = struct.unpack_from(
ofproto.OFP_SWITCH_FEATURES_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
return msg
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REQUEST)
class OFPGetConfigRequest(MsgBase):
def __init__(self, datapath):
super(OFPGetConfigRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REPLY)
class OFPGetConfigReply(MsgBase):
def __init__(self, datapath, flags=None, miss_send_len=None):
super(OFPGetConfigReply, self).__init__(datapath)
self.flags = flags
self.miss_send_len = miss_send_len
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGetConfigReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.flags, msg.miss_send_len = struct.unpack_from(
ofproto.OFP_SWITCH_CONFIG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
return msg
@_set_msg_type(ofproto.OFPT_SET_CONFIG)
class OFPSetConfig(MsgBase):
def __init__(self, datapath, flags=0, miss_send_len=0):
super(OFPSetConfig, self).__init__(datapath)
self.flags = flags
self.miss_send_len = miss_send_len
def _serialize_body(self):
assert self.flags is not None
assert self.miss_send_len is not None
msg_pack_into(ofproto.OFP_SWITCH_CONFIG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.flags, self.miss_send_len)
class OFPMatch(StringifyMixin):
def __init__(self, type_=None, length=None, _ordered_fields=None,
**kwargs):
super(OFPMatch, self).__init__()
self.type = ofproto.OFPMT_OXM
self.length = length
if _ordered_fields is not None:
assert not kwargs
self._fields2 = _ordered_fields
else:
kwargs = dict(ofproto.oxm_normalize_user(k, v) for
(k, v) in kwargs.items())
fields = [ofproto.oxm_from_user(k, v) for (k, v)
in kwargs.items()]
# assumption: sorting by OXM type values makes fields
# meet ordering requirements (eg. eth_type before ipv4_src)
fields.sort(
key=lambda x: x[0][0] if isinstance(x[0], tuple) else x[0])
self._fields2 = [ofproto.oxm_to_user(n, v, m) for (n, v, m)
in fields]
@classmethod
def parser(cls, buf, offset):
match = OFPMatch()
type_, length = struct.unpack_from('!HH', buf, offset)
match.type = type_
match.length = length
# ofp_match adjustment
offset += 4
length -= 4
fields = []
while length > 0:
n, value, mask, field_len = ofproto.oxm_parse(buf, offset)
k, uv = ofproto.oxm_to_user(n, value, mask)
fields.append((k, uv))
offset += field_len
length -= field_len
match._fields2 = fields
return match
def serialize(self, buf, offset):
fields = [ofproto.oxm_from_user(k, uv) for (k, uv)
in self._fields2]
hdr_pack_str = '!HH'
field_offset = offset + struct.calcsize(hdr_pack_str)
for (n, value, mask) in fields:
field_offset += ofproto.oxm_serialize(n, value, mask, buf,
field_offset)
length = field_offset - offset
msg_pack_into(hdr_pack_str, buf, offset, ofproto.OFPMT_OXM, length)
self.length = length
pad_len = utils.round_up(length, 8) - length
msg_pack_into("%dx" % pad_len, buf, field_offset)
return length + pad_len
def __getitem__(self, key):
return dict(self._fields2)[key]
def __contains__(self, key):
return key in dict(self._fields2)
def iteritems(self):
return iter(dict(self._fields2).items())
def items(self):
return self._fields2
def get(self, key, default=None):
return dict(self._fields2).get(key, default)
def stringify_attrs(self):
yield "oxm_fields", dict(self._fields2)
def to_jsondict(self):
body = {"oxm_fields": [ofproto.oxm_to_jsondict(k, uv) for k, uv
in self._fields2],
"length": self.length,
"type": self.type}
return {self.__class__.__name__: body}
@classmethod
def from_jsondict(cls, dict_):
fields = [ofproto.oxm_from_jsondict(f) for f
in dict_['oxm_fields']]
return OFPMatch(_ordered_fields=fields)
class OFPPropUnknown(StringifyMixin):
def __init__(self, type_=None, length=None, buf=None):
self.buf = buf
@classmethod
def parser(cls, buf):
return cls(buf=buf)
class OFPPropBase(StringifyMixin):
_PACK_STR = '!HH'
# _TYPES = {} must be an attribute of subclass
def __init__(self, type_, length=None):
self.type = type_
self.length = length
@classmethod
def register_type(cls, type_):
def _register_type(subcls):
cls._TYPES[type_] = subcls
return subcls
return _register_type
@classmethod
def parse(cls, buf):
(type_, length) = struct.unpack_from(cls._PACK_STR, buf, 0)
rest = buf[utils.round_up(length, 8):]
try:
subcls = cls._TYPES[type_]
except KeyError:
subcls = OFPPropUnknown
prop = subcls.parser(buf)
prop.type = type_
prop.length = length
return prop, rest
@classmethod
def get_rest(cls, buf):
(type_, length) = struct.unpack_from(cls._PACK_STR, buf, 0)
offset = struct.calcsize(cls._PACK_STR)
return buf[offset:length]
def serialize(self):
# Body
# serialize_body should be implemented by subclass
body = bytearray()
body += self.serialize_body()
# fixup
self.length = len(body) + struct.calcsize(self._PACK_STR)
# Header
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.length)
buf += body
# Pad
pad_len = utils.round_up(self.length, 8) - self.length
msg_pack_into("%dx" % pad_len, buf, len(buf))
return buf
class OFPPropCommonExperimenter4ByteData(StringifyMixin):
_PACK_STR = '!HHII'
_EXPERIMENTER_DATA_PACK_STR = '!I'
_EXPERIMENTER_DATA_SIZE = 4
def __init__(self, type_=None, length=None, experimenter=None,
exp_type=None, data=bytearray()):
self.type = type_
self.length = length
self.experimenter = experimenter
self.exp_type = exp_type
self.data = data
@classmethod
def parser(cls, buf):
(type_, length, experimenter, exp_type) = struct.unpack_from(
ofproto.OFP_PROP_EXPERIMENTER_PACK_STR, buf, 0)
rest = buf[ofproto.OFP_PROP_EXPERIMENTER_SIZE:length]
data = []
while rest:
(d,) = struct.unpack_from(
cls._EXPERIMENTER_DATA_PACK_STR, rest, 0)
data.append(d)
rest = rest[cls._EXPERIMENTER_DATA_SIZE:]
return cls(type_, length, experimenter, exp_type, data)
def serialize(self):
offset = 0
bin_data = bytearray()
for d in self.data:
msg_pack_into(self._EXPERIMENTER_DATA_PACK_STR,
bin_data, offset, d)
offset += self._EXPERIMENTER_DATA_SIZE
# fixup
self.length = struct.calcsize(self._PACK_STR)
self.length += len(bin_data)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf,
0, self.type, self.length, self.experimenter,
self.exp_type)
buf += bin_data
# Pad
pad_len = utils.round_up(self.length, 8) - self.length
msg_pack_into("%dx" % pad_len, buf, len(buf))
return buf
class OFPPortDescProp(OFPPropBase):
_TYPES = {}
@OFPPortDescProp.register_type(ofproto.OFPPDPT_ETHERNET)
class OFPPortDescPropEthernet(OFPPortDescProp):
def __init__(self, type_=None, length=None, curr=None, advertised=None,
supported=None, peer=None, curr_speed=None, max_speed=None):
self.type = type_
self.length = length
self.curr = curr
self.advertised = advertised
self.supported = supported
self.peer = peer
self.curr_speed = curr_speed
self.max_speed = max_speed
@classmethod
def parser(cls, buf):
ether = cls()
(ether.type, ether.length, ether.curr,
ether.advertised, ether.supported,
ether.peer, ether.curr_speed, ether.max_speed) = struct.unpack_from(
ofproto.OFP_PORT_DESC_PROP_ETHERNET_PACK_STR, buf, 0)
return ether
@OFPPortDescProp.register_type(ofproto.OFPPDPT_OPTICAL)
class OFPPortDescPropOptical(OFPPortDescProp):
def __init__(self, type_=None, length=None, supported=None,
tx_min_freq_lmda=None, tx_max_freq_lmda=None,
tx_grid_freq_lmda=None, rx_min_freq_lmda=None,
rx_max_freq_lmda=None, rx_grid_freq_lmda=None,
tx_pwr_min=None, tx_pwr_max=None):
self.type = type_
self.length = length
self.supported = supported
self.tx_min_freq_lmda = tx_min_freq_lmda
self.tx_max_freq_lmda = tx_max_freq_lmda
self.tx_grid_freq_lmda = tx_grid_freq_lmda
self.rx_min_freq_lmda = rx_min_freq_lmda
self.rx_max_freq_lmda = rx_max_freq_lmda
self.rx_grid_freq_lmda = rx_grid_freq_lmda
self.tx_pwr_min = tx_pwr_min
self.tx_pwr_max = tx_pwr_max
@classmethod
def parser(cls, buf):
optical = cls()
(optical.type, optical.length, optical.supported,
optical.tx_min_freq_lmda, optical.tx_max_freq_lmda,
optical.tx_grid_freq_lmda, optical.rx_min_freq_lmda,
optical.rx_max_freq_lmda, optical.rx_grid_freq_lmda,
optical.tx_pwr_min, optical.tx_pwr_max) = struct.unpack_from(
ofproto.OFP_PORT_DESC_PROP_OPTICAL_PACK_STR, buf, 0)
return optical
@OFPPortDescProp.register_type(ofproto.OFPPDPT_EXPERIMENTER)
class OFPPortDescPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPTableModProp(OFPPropBase):
_TYPES = {}
@OFPTableModProp.register_type(ofproto.OFPTMPT_EVICTION)
class OFPTableModPropEviction(OFPTableModProp):
def __init__(self, type_=None, length=None, flags=None):
self.type = type_
self.length = length
self.flags = flags
@classmethod
def parser(cls, buf):
eviction = cls()
(eviction.type, eviction.length, eviction.flags) = struct.unpack_from(
ofproto.OFP_TABLE_MOD_PROP_EVICTION_PACK_STR, buf, 0)
return eviction
def serialize(self):
# fixup
self.length = ofproto.OFP_TABLE_MOD_PROP_EVICTION_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_MOD_PROP_EVICTION_PACK_STR, buf, 0,
self.type, self.length, self.flags)
return buf
@OFPTableModProp.register_type(ofproto.OFPTMPT_VACANCY)
class OFPTableModPropVacancy(OFPTableModProp):
def __init__(self, type_=None, length=None, vacancy_down=None,
vacancy_up=None, vacancy=None):
self.type = type_
self.length = length
self.vacancy_down = vacancy_down
self.vacancy_up = vacancy_up
self.vacancy = vacancy
@classmethod
def parser(cls, buf):
vacancy = cls()
(vacancy.type, vacancy.length, vacancy.vacancy_down,
vacancy.vacancy_up, vacancy.vacancy) = struct.unpack_from(
ofproto.OFP_TABLE_MOD_PROP_VACANCY_PACK_STR, buf, 0)
return vacancy
def serialize(self):
# fixup
self.length = ofproto.OFP_TABLE_MOD_PROP_VACANCY_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_MOD_PROP_VACANCY_PACK_STR, buf, 0,
self.type, self.length, self.vacancy_down,
self.vacancy_up, self.vacancy)
return buf
@OFPTableModProp.register_type(ofproto.OFPTMPT_EXPERIMENTER)
class OFPTableModPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPQueueDescProp(OFPPropBase):
_TYPES = {}
@OFPQueueDescProp.register_type(ofproto.OFPQDPT_MIN_RATE)
class OFPQueueDescPropMinRate(OFPQueueDescProp):
def __init__(self, type_=None, length=None, rate=None):
self.type = type_
self.length = length
self.rate = rate
@classmethod
def parser(cls, buf):
minrate = cls()
(minrate.type, minrate.length, minrate.rate) = struct.unpack_from(
ofproto.OFP_QUEUE_DESC_PROP_MIN_RATE_PACK_STR, buf, 0)
return minrate
@OFPQueueDescProp.register_type(ofproto.OFPQDPT_MAX_RATE)
class OFPQueueDescPropMaxRate(OFPQueueDescProp):
def __init__(self, type_=None, length=None, rate=None):
self.type = type_
self.length = length
self.rate = rate
@classmethod
def parser(cls, buf):
maxrate = cls()
(maxrate.type, maxrate.length, maxrate.rate) = struct.unpack_from(
ofproto.OFP_QUEUE_DESC_PROP_MAX_RATE_PACK_STR, buf, 0)
return maxrate
@OFPQueueDescProp.register_type(ofproto.OFPQDPT_EXPERIMENTER)
class OFPQueueDescPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPRoleProp(OFPPropBase):
_TYPES = {}
@OFPRoleProp.register_type(ofproto.OFPRPT_EXPERIMENTER)
class OFPRolePropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPBundleProp(OFPPropBase):
_TYPES = {}
@OFPBundleProp.register_type(ofproto.OFPRPT_EXPERIMENTER)
class OFPBundlePropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_register_parser
@_set_msg_type(ofproto.OFPT_PACKET_IN)
class OFPPacketIn(MsgBase):
def __init__(self, datapath, buffer_id=None, total_len=None, reason=None,
table_id=None, cookie=None, match=None, data=None):
super(OFPPacketIn, self).__init__(datapath)
self.buffer_id = buffer_id
self.total_len = total_len
self.reason = reason
self.table_id = table_id
self.cookie = cookie
self.match = match
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPacketIn, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.buffer_id, msg.total_len, msg.reason,
msg.table_id, msg.cookie) = struct.unpack_from(
ofproto.OFP_PACKET_IN_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
msg.match = OFPMatch.parser(msg.buf, ofproto.OFP_PACKET_IN_SIZE -
ofproto.OFP_MATCH_SIZE)
match_len = utils.round_up(msg.match.length, 8)
msg.data = msg.buf[(ofproto.OFP_PACKET_IN_SIZE -
ofproto.OFP_MATCH_SIZE + match_len + 2):]
if msg.total_len < len(msg.data):
# discard padding for 8-byte alignment of OFP packet
msg.data = msg.data[:msg.total_len]
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_FLOW_REMOVED)
class OFPFlowRemoved(MsgBase):
def __init__(self, datapath, cookie=None, priority=None, reason=None,
table_id=None, duration_sec=None, duration_nsec=None,
idle_timeout=None, hard_timeout=None, packet_count=None,
byte_count=None, match=None):
super(OFPFlowRemoved, self).__init__(datapath)
self.cookie = cookie
self.priority = priority
self.reason = reason
self.table_id = table_id
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.packet_count = packet_count
self.byte_count = byte_count
self.match = match
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPFlowRemoved, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.cookie, msg.priority, msg.reason,
msg.table_id, msg.duration_sec, msg.duration_nsec,
msg.idle_timeout, msg.hard_timeout, msg.packet_count,
msg.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_REMOVED_PACK_STR0,
msg.buf, ofproto.OFP_HEADER_SIZE)
offset = (ofproto.OFP_FLOW_REMOVED_SIZE - ofproto.OFP_MATCH_SIZE)
msg.match = OFPMatch.parser(msg.buf, offset)
return msg
class OFPPort(StringifyMixin):
_TYPE = {
'ascii': [
'hw_addr',
],
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8, which is used by OVS.
'name',
]
}
def __init__(self, port_no=None, length=None, hw_addr=None, name=None,
config=None, state=None, properties=None):
super(OFPPort, self).__init__()
self.port_no = port_no
self.length = length
self.hw_addr = hw_addr
self.name = name
self.config = config
self.state = state
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(port_no, length, hw_addr, name, config, state) = struct.unpack_from(
ofproto.OFP_PORT_PACK_STR, buf, offset)
hw_addr = addrconv.mac.bin_to_text(hw_addr)
name = name.rstrip(b'\0')
props = []
rest = buf[offset + ofproto.OFP_PORT_SIZE:offset + length]
while rest:
p, rest = OFPPortDescProp.parse(rest)
props.append(p)
ofpport = cls(port_no, length, hw_addr, name, config, state, props)
return ofpport
class OFPTableDesc(StringifyMixin):
def __init__(self, length=None, table_id=None, config=None,
properties=None):
super(OFPTableDesc, self).__init__()
self.table_id = table_id
self.length = length
self.config = config
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(length, table_id, config) = struct.unpack_from(
ofproto.OFP_TABLE_DESC_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_TABLE_DESC_SIZE:offset + length]
while rest:
p, rest = OFPTableModProp.parse(rest)
props.append(p)
ofptabledesc = cls(length, table_id, config, props)
return ofptabledesc
class OFPQueueDesc(StringifyMixin):
def __init__(self, port_no=None, queue_id=None, len_=None,
properties=None):
super(OFPQueueDesc, self).__init__()
self.port_no = port_no
self.queue_id = queue_id
self.len = len_
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(port_no, queue_id, len_) = struct.unpack_from(
ofproto.OFP_QUEUE_DESC_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_QUEUE_DESC_SIZE:offset + len_]
while rest:
p, rest = OFPQueueDescProp.parse(rest)
props.append(p)
ofpqueuedesc = cls(port_no, queue_id, len_, props)
return ofpqueuedesc
def _set_stats_type(stats_type, stats_body_cls):
def _set_cls_stats_type(cls):
cls.cls_stats_type = stats_type
cls.cls_stats_body_cls = stats_body_cls
return cls
return _set_cls_stats_type
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMultipartRequest(MsgBase):
def __init__(self, datapath, flags):
super(OFPMultipartRequest, self).__init__(datapath)
self.type = self.__class__.cls_stats_type
self.flags = flags
def _serialize_stats_body(self):
pass
def _serialize_body(self):
msg_pack_into(ofproto.OFP_MULTIPART_REQUEST_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.type, self.flags)
self._serialize_stats_body()
@_register_parser
@_set_msg_type(ofproto.OFPT_METER_MOD)
class OFPMeterMod(MsgBase):
def __init__(self, datapath, command=ofproto.OFPMC_ADD,
flags=ofproto.OFPMF_KBPS, meter_id=1, bands=None):
bands = bands if bands else []
super(OFPMeterMod, self).__init__(datapath)
self.command = command
self.flags = flags
self.meter_id = meter_id
self.bands = bands
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPMeterMod, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
(msg.command, msg.flags, msg.meter_id) = struct.unpack_from(
ofproto.OFP_METER_MOD_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
offset = ofproto.OFP_METER_MOD_SIZE
msg.bands = []
while offset < msg.msg_len:
band = OFPMeterBandHeader.parser(buf, offset)
msg.bands.append(band)
offset += band.len
return msg
def _serialize_body(self):
msg_pack_into(ofproto.OFP_METER_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.command, self.flags, self.meter_id)
offset = ofproto.OFP_METER_MOD_SIZE
for b in self.bands:
b.serialize(self.buf, offset)
offset += b.len
@_set_msg_type(ofproto.OFPT_TABLE_MOD)
class OFPTableMod(MsgBase):
def __init__(self, datapath, table_id, config, properties):
super(OFPTableMod, self).__init__(datapath)
self.table_id = table_id
self.config = config
self.properties = properties
def _serialize_body(self):
props_buf = bytearray()
for p in self.properties:
props_buf += p.serialize()
msg_pack_into(ofproto.OFP_TABLE_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.table_id, self.config)
self.buf += props_buf
@_register_parser
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMultipartReply(MsgBase):
_STATS_MSG_TYPES = {}
@staticmethod
def register_stats_type(body_single_struct=False):
def _register_stats_type(cls):
assert cls.cls_stats_type is not None
assert cls.cls_stats_type not in OFPMultipartReply._STATS_MSG_TYPES
assert cls.cls_stats_body_cls is not None
cls.cls_body_single_struct = body_single_struct
OFPMultipartReply._STATS_MSG_TYPES[cls.cls_stats_type] = cls
return cls
return _register_stats_type
def __init__(self, datapath, body=None, flags=None):
super(OFPMultipartReply, self).__init__(datapath)
self.body = body
self.flags = flags
@classmethod
def parser_stats_body(cls, buf, msg_len, offset):
body_cls = cls.cls_stats_body_cls
body = []
while offset < msg_len:
entry = body_cls.parser(buf, offset)
body.append(entry)
offset += entry.length
if cls.cls_body_single_struct:
return body[0]
return body
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
msg.body = msg.parser_stats_body(msg.buf, msg.msg_len,
ofproto.OFP_MULTIPART_REPLY_SIZE)
return msg
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
type_, flags = struct.unpack_from(
ofproto.OFP_MULTIPART_REPLY_PACK_STR, six.binary_type(buf),
ofproto.OFP_HEADER_SIZE)
stats_type_cls = cls._STATS_MSG_TYPES.get(type_)
msg = super(OFPMultipartReply, stats_type_cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
msg.type = type_
msg.flags = flags
offset = ofproto.OFP_MULTIPART_REPLY_SIZE
body = []
while offset < msg_len:
b = stats_type_cls.cls_stats_body_cls.parser(msg.buf, offset)
body.append(b)
offset += b.length if hasattr(b, 'length') else b.len
if stats_type_cls.cls_body_single_struct:
msg.body = body[0]
else:
msg.body = body
return msg
class OFPDescStats(ofproto_parser.namedtuple('OFPDescStats', (
'mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'))):
_TYPE = {
'ascii': [
'mfr_desc',
'hw_desc',
'sw_desc',
'serial_num',
'dp_desc',
]
}
@classmethod
def parser(cls, buf, offset):
desc = struct.unpack_from(ofproto.OFP_DESC_PACK_STR,
buf, offset)
desc = list(desc)
desc = [x.rstrip(b'\0') for x in desc]
stats = cls(*desc)
stats.length = ofproto.OFP_DESC_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, type_=None):
super(OFPDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPDescStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPDescStatsReply, self).__init__(datapath, **kwargs)
class OFPTableFeaturesStats(StringifyMixin):
_TYPE = {
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8.
'name',
]
}
def __init__(self, table_id=None, name=None, metadata_match=None,
metadata_write=None, config=None, max_entries=None,
properties=None, length=None):
super(OFPTableFeaturesStats, self).__init__()
self.length = None
self.table_id = table_id
self.name = name
self.metadata_match = metadata_match
self.metadata_write = metadata_write
self.config = config
self.max_entries = max_entries
self.properties = properties
@classmethod
def parser(cls, buf, offset):
table_features = cls()
(table_features.length, table_features.table_id,
name, table_features.metadata_match,
table_features.metadata_write, table_features.config,
table_features.max_entries
) = struct.unpack_from(ofproto.OFP_TABLE_FEATURES_PACK_STR,
buf, offset)
table_features.name = name.rstrip(b'\0')
props = []
rest = buf[offset + ofproto.OFP_TABLE_FEATURES_SIZE:
offset + table_features.length]
while rest:
p, rest = OFPTableFeatureProp.parse(rest)
props.append(p)
table_features.properties = props
return table_features
def serialize(self):
# fixup
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
self.length = ofproto.OFP_TABLE_FEATURES_SIZE + len(bin_props)
buf = bytearray()
msg_pack_into(ofproto.OFP_TABLE_FEATURES_PACK_STR, buf, 0,
self.length, self.table_id, self.name,
self.metadata_match, self.metadata_write,
self.config, self.max_entries)
return buf + bin_props
class OFPTableFeatureProp(OFPPropBase):
_TYPES = {}
class OFPInstructionId(StringifyMixin):
_PACK_STR = '!HH' # type, len
def __init__(self, type_, len_=None):
self.type = type_
self.len = len_
# XXX experimenter
@classmethod
def parse(cls, buf):
(type_, len_,) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf), 0)
rest = buf[len_:]
return cls(type_=type_, len_=len_), rest
def serialize(self):
# fixup
self.len = struct.calcsize(self._PACK_STR)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.len)
return buf
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_INSTRUCTIONS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_INSTRUCTIONS_MISS)
class OFPTableFeaturePropInstructions(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, instruction_ids=None):
instruction_ids = instruction_ids if instruction_ids else []
super(OFPTableFeaturePropInstructions, self).__init__(type_, length)
self.instruction_ids = instruction_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
i, rest = OFPInstructionId.parse(rest)
ids.append(i)
return cls(instruction_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.instruction_ids:
bin_ids += i.serialize()
return bin_ids
# Implementation note: While OpenFlow 1.3.2 shares the same ofp_action_header
# for flow_mod and table_features, we have separate classes. We named this
# class to match with OpenFlow 1.4's name. (ofp_action_id)
class OFPActionId(StringifyMixin):
_PACK_STR = '!HH'
def __init__(self, type_, len_=None):
self.type = type_
self.len = len_
@classmethod
def parse(cls, buf):
(type_, len_,) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf), 0)
rest = buf[len_:]
return cls(type_=type_, len_=len_), rest
def serialize(self):
self.len = struct.calcsize(self._PACK_STR)
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, self.type, self.len)
return buf
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_ACTIONS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_ACTIONS_MISS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_ACTIONS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_ACTIONS_MISS)
class OFPTableFeaturePropActions(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, action_ids=None):
action_ids = action_ids if action_ids else []
super(OFPTableFeaturePropActions, self).__init__(type_, length)
self.action_ids = action_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
i, rest = OFPActionId.parse(rest)
ids.append(i)
return cls(action_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.action_ids:
bin_ids += i.serialize()
return bin_ids
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_NEXT_TABLES)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_NEXT_TABLES_MISS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_TABLE_SYNC_FROM)
class OFPTableFeaturePropNextTables(OFPTableFeatureProp):
_TABLE_ID_PACK_STR = '!B'
def __init__(self, type_=None, length=None, table_ids=None):
table_ids = table_ids if table_ids else []
super(OFPTableFeaturePropNextTables, self).__init__(type_, length)
self.table_ids = table_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i,) = struct.unpack_from(cls._TABLE_ID_PACK_STR, six.binary_type(rest), 0)
rest = rest[struct.calcsize(cls._TABLE_ID_PACK_STR):]
ids.append(i)
return cls(table_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.table_ids:
bin_id = bytearray()
msg_pack_into(self._TABLE_ID_PACK_STR, bin_id, 0, i)
bin_ids += bin_id
return bin_ids
# though.
# ofsoftswitch13
# oxm_hasmask always 0
# oxm_length same as ofp_match etc (as without mask)
# linc/of_protocol
# oxm_hasmask always 0
# oxm_length always 0
# ovs:
# seems in flux as of writing this [20141003]
class OFPOxmId(StringifyMixin):
_PACK_STR = '!I' # oxm header
_EXPERIMENTER_ID_PACK_STR = '!I'
_TYPE = {
'ascii': [
'type',
],
}
def __init__(self, type_, hasmask=False, length=None):
self.type = type_
self.hasmask = hasmask
self.length = length
@classmethod
def parse(cls, buf):
(oxm,) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf), 0)
# oxm (32 bit) == class (16) | field (7) | hasmask (1) | length (8)
# in case of experimenter OXMs, another 32 bit value
# (experimenter id) follows.
(type_, _v) = ofproto.oxm_to_user(oxm >> (1 + 8), None, None)
rest = buf[struct.calcsize(cls._PACK_STR):]
hasmask = ofproto.oxm_tlv_header_extract_hasmask(oxm)
length = oxm & 0xff # XXX see the comment on OFPOxmId
class_ = oxm >> (7 + 1 + 8)
if class_ == ofproto.OFPXMC_EXPERIMENTER:
(exp_id,) = struct.unpack_from(cls._EXPERIMENTER_ID_PACK_STR,
six.binary_type(rest), 0)
rest = rest[struct.calcsize(cls._EXPERIMENTER_ID_PACK_STR):]
subcls = OFPExperimenterOxmId
return subcls(type_=type_, exp_id=exp_id, hasmask=hasmask,
length=length), rest
else:
return cls(type_=type_, hasmask=hasmask, length=length), rest
def serialize(self):
# fixup
self.length = 0 # XXX see the comment on OFPOxmId
(n, _v, _m) = ofproto.oxm_from_user(self.type, None)
oxm = (n << (1 + 8)) | (self.hasmask << 8) | self.length
buf = bytearray()
msg_pack_into(self._PACK_STR, buf, 0, oxm)
assert n >> 7 != ofproto.OFPXMC_EXPERIMENTER
return buf
class OFPExperimenterOxmId(OFPOxmId):
def __init__(self, type_, exp_id, hasmask=False, length=None):
super(OFPExperimenterOxmId, self).__init__(type_=type_,
hasmask=hasmask,
length=length)
self.exp_id = exp_id
def serialize(self):
buf = super(OFPExperimenterOxmId, self).serialize()
msg_pack_into(self._EXPERIMENTER_ID_PACK_STR, buf,
struct.calcsize(self._PACK_STR), self.exp_id)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_MATCH)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WILDCARDS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_SETFIELD)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_WRITE_SETFIELD_MISS)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_SETFIELD)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_APPLY_SETFIELD_MISS)
class OFPTableFeaturePropOxm(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, oxm_ids=None):
oxm_ids = oxm_ids if oxm_ids else []
super(OFPTableFeaturePropOxm, self).__init__(type_, length)
self.oxm_ids = oxm_ids
@classmethod
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
i, rest = OFPOxmId.parse(rest)
ids.append(i)
return cls(oxm_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.oxm_ids:
bin_ids += i.serialize()
return bin_ids
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_EXPERIMENTER)
@OFPTableFeatureProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_MISS)
class OFPTableFeaturePropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPTableFeaturesStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, body=None, type_=None):
body = body if body else []
super(OFPTableFeaturesStatsRequest, self).__init__(datapath, flags)
self.body = body
def _serialize_stats_body(self):
bin_body = bytearray()
for p in self.body:
bin_body += p.serialize()
self.buf += bin_body
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPTableFeaturesStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPTableFeaturesStatsReply, self).__init__(datapath, **kwargs)
@_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPPortDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, type_=None):
super(OFPPortDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPPortDescStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPPortDescStatsReply, self).__init__(datapath, **kwargs)
@_set_stats_type(ofproto.OFPMP_TABLE_DESC, OFPTableDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPTableDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, type_=None):
super(OFPTableDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_TABLE_DESC, OFPTableDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPTableDescStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPTableDescStatsReply, self).__init__(datapath, **kwargs)
@_set_stats_type(ofproto.OFPMP_QUEUE_DESC, OFPQueueDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPQueueDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY,
queue_id=ofproto.OFPQ_ALL, type_=None):
super(OFPQueueDescStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
self.queue_id = queue_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_QUEUE_DESC_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.port_no, self.queue_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_QUEUE_DESC, OFPQueueDesc)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPQueueDescStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPQueueDescStatsReply, self).__init__(datapath, **kwargs)
class OFPQueueStatsProp(OFPPropBase):
_TYPES = {}
@OFPQueueStatsProp.register_type(ofproto.OFPQSPT_EXPERIMENTER)
class OFPQueueStatsPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPQueueStats(StringifyMixin):
def __init__(self, length=None, port_no=None, queue_id=None,
tx_bytes=None, tx_packets=None, tx_errors=None,
duration_sec=None, duration_nsec=None, properties=None):
super(OFPQueueStats, self).__init__()
self.length = length
self.port_no = port_no
self.queue_id = queue_id
self.tx_bytes = tx_bytes
self.tx_packets = tx_packets
self.tx_errors = tx_errors
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(length, port_no, queue_id, tx_bytes, tx_packets, tx_errors,
duration_sec, duration_nsec) = struct.unpack_from(
ofproto.OFP_QUEUE_STATS_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_QUEUE_STATS_SIZE:offset + length]
while rest:
p, rest = OFPQueueStatsProp.parse(rest)
props.append(p)
stats = cls(length, port_no, queue_id, tx_bytes, tx_packets, tx_errors,
duration_sec, duration_nsec, props)
return stats
@_set_stats_type(ofproto.OFPMP_QUEUE_STATS, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPQueueStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY,
queue_id=ofproto.OFPQ_ALL, type_=None):
super(OFPQueueStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
self.queue_id = queue_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.port_no, self.queue_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_QUEUE_STATS, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPQueueStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPQueueStatsReply, self).__init__(datapath, **kwargs)
class OFPBucketCounter(StringifyMixin):
def __init__(self, packet_count, byte_count):
super(OFPBucketCounter, self).__init__()
self.packet_count = packet_count
self.byte_count = byte_count
@classmethod
def parser(cls, buf, offset):
packet_count, byte_count = struct.unpack_from(
ofproto.OFP_BUCKET_COUNTER_PACK_STR, buf, offset)
return cls(packet_count, byte_count)
class OFPGroupStats(StringifyMixin):
def __init__(self, length=None, group_id=None, ref_count=None,
packet_count=None, byte_count=None, duration_sec=None,
duration_nsec=None, bucket_stats=None):
super(OFPGroupStats, self).__init__()
self.length = length
self.group_id = group_id
self.ref_count = ref_count
self.packet_count = packet_count
self.byte_count = byte_count
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.bucket_stats = bucket_stats
@classmethod
def parser(cls, buf, offset):
group = struct.unpack_from(ofproto.OFP_GROUP_STATS_PACK_STR,
buf, offset)
group_stats = cls(*group)
group_stats.bucket_stats = []
total_len = group_stats.length + offset
offset += ofproto.OFP_GROUP_STATS_SIZE
while total_len > offset:
b = OFPBucketCounter.parser(buf, offset)
group_stats.bucket_stats.append(b)
offset += ofproto.OFP_BUCKET_COUNTER_SIZE
return group_stats
@_set_stats_type(ofproto.OFPMP_GROUP, OFPGroupStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPGroupStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, group_id=ofproto.OFPG_ALL,
type_=None):
super(OFPGroupStatsRequest, self).__init__(datapath, flags)
self.group_id = group_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_GROUP_STATS_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.group_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_GROUP, OFPGroupStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPGroupStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPGroupStatsReply, self).__init__(datapath, **kwargs)
class OFPGroupDescStats(StringifyMixin):
def __init__(self, type_=None, group_id=None, buckets=None, length=None):
super(OFPGroupDescStats, self).__init__()
self.type = type_
self.group_id = group_id
self.buckets = buckets
@classmethod
def parser(cls, buf, offset):
stats = cls()
(stats.length, stats.type, stats.group_id) = struct.unpack_from(
ofproto.OFP_GROUP_DESC_STATS_PACK_STR, buf, offset)
offset += ofproto.OFP_GROUP_DESC_STATS_SIZE
stats.buckets = []
length = ofproto.OFP_GROUP_DESC_STATS_SIZE
while length < stats.length:
bucket = OFPBucket.parser(buf, offset)
stats.buckets.append(bucket)
offset += bucket.len
length += bucket.len
return stats
@_set_stats_type(ofproto.OFPMP_GROUP_DESC, OFPGroupDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPGroupDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, type_=None):
super(OFPGroupDescStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_GROUP_DESC, OFPGroupDescStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPGroupDescStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPGroupDescStatsReply, self).__init__(datapath, **kwargs)
class OFPGroupFeaturesStats(ofproto_parser.namedtuple('OFPGroupFeaturesStats',
('types', 'capabilities', 'max_groups',
'actions'))):
@classmethod
def parser(cls, buf, offset):
group_features = struct.unpack_from(
ofproto.OFP_GROUP_FEATURES_PACK_STR, buf, offset)
types = group_features[0]
capabilities = group_features[1]
max_groups = list(group_features[2:6])
actions = list(group_features[6:10])
stats = cls(types, capabilities, max_groups, actions)
stats.length = ofproto.OFP_GROUP_FEATURES_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_GROUP_FEATURES, OFPGroupFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPGroupFeaturesStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, type_=None):
super(OFPGroupFeaturesStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_GROUP_FEATURES, OFPGroupFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPGroupFeaturesStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPGroupFeaturesStatsReply, self).__init__(datapath, **kwargs)
class OFPMeterBandStats(StringifyMixin):
def __init__(self, packet_band_count, byte_band_count):
super(OFPMeterBandStats, self).__init__()
self.packet_band_count = packet_band_count
self.byte_band_count = byte_band_count
@classmethod
def parser(cls, buf, offset):
band_stats = struct.unpack_from(
ofproto.OFP_METER_BAND_STATS_PACK_STR, buf, offset)
return cls(*band_stats)
class OFPMeterStats(StringifyMixin):
def __init__(self, meter_id=None, flow_count=None, packet_in_count=None,
byte_in_count=None, duration_sec=None, duration_nsec=None,
band_stats=None, len_=None):
super(OFPMeterStats, self).__init__()
self.meter_id = meter_id
self.len = 0
self.flow_count = flow_count
self.packet_in_count = packet_in_count
self.byte_in_count = byte_in_count
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.band_stats = band_stats
@classmethod
def parser(cls, buf, offset):
meter_stats = cls()
(meter_stats.meter_id, meter_stats.len,
meter_stats.flow_count, meter_stats.packet_in_count,
meter_stats.byte_in_count, meter_stats.duration_sec,
meter_stats.duration_nsec) = struct.unpack_from(
ofproto.OFP_METER_STATS_PACK_STR, buf, offset)
offset += ofproto.OFP_METER_STATS_SIZE
meter_stats.band_stats = []
length = ofproto.OFP_METER_STATS_SIZE
while length < meter_stats.len:
band_stats = OFPMeterBandStats.parser(buf, offset)
meter_stats.band_stats.append(band_stats)
offset += ofproto.OFP_METER_BAND_STATS_SIZE
length += ofproto.OFP_METER_BAND_STATS_SIZE
return meter_stats
@_set_stats_type(ofproto.OFPMP_METER, OFPMeterStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL,
type_=None):
super(OFPMeterStatsRequest, self).__init__(datapath, flags)
self.meter_id = meter_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.meter_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_METER, OFPMeterStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMeterStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPMeterStatsReply, self).__init__(datapath, **kwargs)
class OFPMeterBand(StringifyMixin):
def __init__(self, type_, len_):
super(OFPMeterBand, self).__init__()
self.type = type_
self.len = len_
class OFPMeterBandHeader(OFPMeterBand):
_METER_BAND = {}
@staticmethod
def register_meter_band_type(type_, len_):
def _register_meter_band_type(cls):
OFPMeterBandHeader._METER_BAND[type_] = cls
cls.cls_meter_band_type = type_
cls.cls_meter_band_len = len_
return cls
return _register_meter_band_type
def __init__(self):
cls = self.__class__
super(OFPMeterBandHeader, self).__init__(cls.cls_meter_band_type,
cls.cls_meter_band_len)
@classmethod
def parser(cls, buf, offset):
type_, len_, _rate, _burst_size = struct.unpack_from(
ofproto.OFP_METER_BAND_HEADER_PACK_STR, buf, offset)
cls_ = cls._METER_BAND[type_]
assert cls_.cls_meter_band_len == len_
return cls_.parser(buf, offset)
@OFPMeterBandHeader.register_meter_band_type(
ofproto.OFPMBT_DROP, ofproto.OFP_METER_BAND_DROP_SIZE)
class OFPMeterBandDrop(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, type_=None, len_=None):
super(OFPMeterBandDrop, self).__init__()
self.rate = rate
self.burst_size = burst_size
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset,
self.type, self.len, self.rate, self.burst_size)
@classmethod
def parser(cls, buf, offset):
type_, len_, rate, burst_size = struct.unpack_from(
ofproto.OFP_METER_BAND_DROP_PACK_STR, buf, offset)
assert cls.cls_meter_band_type == type_
assert cls.cls_meter_band_len == len_
return cls(rate, burst_size)
@OFPMeterBandHeader.register_meter_band_type(
ofproto.OFPMBT_DSCP_REMARK,
ofproto.OFP_METER_BAND_DSCP_REMARK_SIZE)
class OFPMeterBandDscpRemark(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, prec_level=0,
type_=None, len_=None):
super(OFPMeterBandDscpRemark, self).__init__()
self.rate = rate
self.burst_size = burst_size
self.prec_level = prec_level
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf,
offset, self.type, self.len, self.rate,
self.burst_size, self.prec_level)
@classmethod
def parser(cls, buf, offset):
type_, len_, rate, burst_size, prec_level = struct.unpack_from(
ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf, offset)
assert cls.cls_meter_band_type == type_
assert cls.cls_meter_band_len == len_
return cls(rate, burst_size, prec_level)
@OFPMeterBandHeader.register_meter_band_type(
ofproto.OFPMBT_EXPERIMENTER,
ofproto.OFP_METER_BAND_EXPERIMENTER_SIZE)
class OFPMeterBandExperimenter(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, experimenter=None,
type_=None, len_=None):
super(OFPMeterBandExperimenter, self).__init__()
self.rate = rate
self.burst_size = burst_size
self.experimenter = experimenter
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_EXPERIMENTER_PACK_STR, buf,
offset, self.type, self.len, self.rate,
self.burst_size, self.experimenter)
@classmethod
def parser(cls, buf, offset):
type_, len_, rate, burst_size, experimenter = struct.unpack_from(
ofproto.OFP_METER_BAND_EXPERIMENTER_PACK_STR, buf, offset)
assert cls.cls_meter_band_type == type_
assert cls.cls_meter_band_len == len_
return cls(rate, burst_size, experimenter)
class OFPMeterConfigStats(StringifyMixin):
def __init__(self, flags=None, meter_id=None, bands=None, length=None):
super(OFPMeterConfigStats, self).__init__()
self.length = None
self.flags = flags
self.meter_id = meter_id
self.bands = bands
@classmethod
def parser(cls, buf, offset):
meter_config = cls()
(meter_config.length, meter_config.flags,
meter_config.meter_id) = struct.unpack_from(
ofproto.OFP_METER_CONFIG_PACK_STR, buf, offset)
offset += ofproto.OFP_METER_CONFIG_SIZE
meter_config.bands = []
length = ofproto.OFP_METER_CONFIG_SIZE
while length < meter_config.length:
band = OFPMeterBandHeader.parser(buf, offset)
meter_config.bands.append(band)
offset += band.len
length += band.len
return meter_config
@_set_stats_type(ofproto.OFPMP_METER_CONFIG, OFPMeterConfigStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterConfigStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL,
type_=None):
super(OFPMeterConfigStatsRequest, self).__init__(datapath, flags)
self.meter_id = meter_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.meter_id)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_METER_CONFIG, OFPMeterConfigStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMeterConfigStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPMeterConfigStatsReply, self).__init__(datapath, **kwargs)
class OFPMeterFeaturesStats(ofproto_parser.namedtuple('OFPMeterFeaturesStats',
('max_meter', 'band_types', 'capabilities',
'max_bands', 'max_color'))):
@classmethod
def parser(cls, buf, offset):
meter_features = struct.unpack_from(
ofproto.OFP_METER_FEATURES_PACK_STR, buf, offset)
stats = cls(*meter_features)
stats.length = ofproto.OFP_METER_FEATURES_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_METER_FEATURES, OFPMeterFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterFeaturesStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, type_=None):
super(OFPMeterFeaturesStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_METER_FEATURES, OFPMeterFeaturesStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPMeterFeaturesStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPMeterFeaturesStatsReply, self).__init__(datapath, **kwargs)
class OFPFlowUpdate(StringifyMixin):
def __init__(self, length, event):
super(OFPFlowUpdate, self).__init__()
self.length = length
self.event = event
class OFPFlowUpdateHeader(OFPFlowUpdate):
_EVENT = {}
@staticmethod
def register_flow_update_event(event, length):
def _register_flow_update_event(cls):
OFPFlowUpdateHeader._EVENT[event] = cls
cls.cls_flow_update_event = event
cls.cls_flow_update_length = length
return cls
return _register_flow_update_event
def __init__(self, length=None, event=None):
cls = self.__class__
super(OFPFlowUpdateHeader, self).__init__(length,
cls.cls_flow_update_event)
self.length = length
@classmethod
def parser(cls, buf, offset):
length, event = struct.unpack_from(
ofproto.OFP_FLOW_UPDATE_HEADER_PACK_STR, buf, offset)
cls_ = cls._EVENT[event]
return cls_.parser(buf, offset)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_INITIAL, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_ADDED, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_REMOVED, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_MODIFIED, ofproto.OFP_FLOW_UPDATE_FULL_SIZE)
class OFPFlowUpdateFull(OFPFlowUpdateHeader):
def __init__(self, length=None, event=None, table_id=None, reason=None,
idle_timeout=None, hard_timeout=None, priority=None,
cookie=None, match=None, instructions=None):
instructions = instructions if instructions else []
super(OFPFlowUpdateFull, self).__init__(length, event)
self.table_id = table_id
self.reason = reason
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.cookie = cookie
self.match = match
assert (event != ofproto.OFPFME_REMOVED or len(instructions) == 0)
for i in instructions:
assert isinstance(i, OFPInstruction)
self.instructions = instructions
@classmethod
def parser(cls, buf, offset):
(length, event, table_id, reason, idle_timeout, hard_timeout, priority,
cookie) = struct.unpack_from(ofproto.OFP_FLOW_UPDATE_FULL_0_PACK_STR,
buf, offset)
offset += ofproto.OFP_FLOW_UPDATE_FULL_0_SIZE
assert cls.cls_flow_update_length <= length
assert cls.cls_flow_update_event == event
match = OFPMatch.parser(buf, offset)
match_length = utils.round_up(match.length, 8)
offset += match_length
inst_length = (length - ofproto.OFP_FLOW_UPDATE_FULL_0_SIZE -
match_length)
instructions = []
while inst_length > 0:
inst = OFPInstruction.parser(buf, offset)
instructions.append(inst)
offset += inst.len
inst_length -= inst.len
return cls(length, event, table_id, reason, idle_timeout,
hard_timeout, priority, cookie, match, instructions)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_ABBREV, ofproto.OFP_FLOW_UPDATE_ABBREV_SIZE)
class OFPFlowUpdateAbbrev(OFPFlowUpdateHeader):
def __init__(self, length=None, event=None, xid=None):
super(OFPFlowUpdateAbbrev, self).__init__(length, event)
self.xid = xid
@classmethod
def parser(cls, buf, offset):
length, event, xid = struct.unpack_from(
ofproto.OFP_FLOW_UPDATE_ABBREV_PACK_STR, buf, offset)
assert cls.cls_flow_update_length == length
assert cls.cls_flow_update_event == event
return cls(length, event, xid)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_PAUSED, ofproto.OFP_FLOW_UPDATE_PAUSED_SIZE)
@OFPFlowUpdateHeader.register_flow_update_event(
ofproto.OFPFME_RESUMED, ofproto.OFP_FLOW_UPDATE_PAUSED_SIZE)
class OFPFlowUpdatePaused(OFPFlowUpdateHeader):
@classmethod
def parser(cls, buf, offset):
length, event = struct.unpack_from(
ofproto.OFP_FLOW_UPDATE_PAUSED_PACK_STR, buf, offset)
assert cls.cls_flow_update_length == length
assert cls.cls_flow_update_event == event
return cls(length, event)
class OFPFlowMonitorRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags, monitor_id, out_port, out_group,
monitor_flags, table_id, command, match):
super(OFPFlowMonitorRequestBase, self).__init__(datapath, flags)
self.monitor_id = monitor_id
self.out_port = out_port
self.out_group = out_group
self.monitor_flags = monitor_flags
self.table_id = table_id
self.command = command
self.match = match
def _serialize_stats_body(self):
offset = ofproto.OFP_MULTIPART_REQUEST_SIZE
msg_pack_into(ofproto.OFP_FLOW_MONITOR_REQUEST_0_PACK_STR, self.buf,
offset, self.monitor_id, self.out_port, self.out_group,
self.monitor_flags, self.table_id, self.command)
offset += ofproto.OFP_FLOW_MONITOR_REQUEST_0_SIZE
self.match.serialize(self.buf, offset)
@_set_stats_type(ofproto.OFPMP_FLOW_MONITOR, OFPFlowUpdateHeader)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPFlowMonitorRequest(OFPFlowMonitorRequestBase):
def __init__(self, datapath, flags=0, monitor_id=0,
out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY,
monitor_flags=0, table_id=ofproto.OFPTT_ALL,
command=ofproto.OFPFMC_ADD, match=None, type_=None):
if match is None:
match = OFPMatch()
super(OFPFlowMonitorRequest, self).__init__(datapath, flags,
monitor_id, out_port,
out_group, monitor_flags,
table_id, command, match)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_FLOW_MONITOR, OFPFlowUpdateHeader)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPFlowMonitorReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPFlowMonitorReply, self).__init__(datapath, **kwargs)
class OFPExperimenterMultipart(ofproto_parser.namedtuple(
'OFPExperimenterMultipart',
('experimenter', 'exp_type', 'data'))):
@classmethod
def parser(cls, buf, offset):
args = struct.unpack_from(
ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR, buf,
offset)
args = list(args)
args.append(buf[offset +
ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE:])
stats = cls(*args)
stats.length = ofproto.OFP_METER_FEATURES_SIZE
return stats
def serialize(self):
buf = bytearray()
msg_pack_into(ofproto.OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR,
buf, 0,
self.experimenter, self.exp_type)
return buf + self.data
class OFPExperimenterStatsRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags,
experimenter, exp_type,
type_=None):
super(OFPExperimenterStatsRequestBase, self).__init__(datapath, flags)
self.experimenter = experimenter
self.exp_type = exp_type
@_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPExperimenterStatsRequest(OFPExperimenterStatsRequestBase):
def __init__(self, datapath, flags,
experimenter, exp_type, data,
type_=None):
super(OFPExperimenterStatsRequest, self).__init__(datapath, flags,
experimenter,
exp_type, type_)
self.data = data
def _serialize_stats_body(self):
body = OFPExperimenterMultipart(experimenter=self.experimenter,
exp_type=self.exp_type,
data=self.data)
self.buf += body.serialize()
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_EXPERIMENTER, OFPExperimenterMultipart)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPExperimenterStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPExperimenterStatsReply, self).__init__(datapath, **kwargs)
class OFPFlowStats(StringifyMixin):
def __init__(self, table_id=None, duration_sec=None, duration_nsec=None,
priority=None, idle_timeout=None, hard_timeout=None,
flags=None, importance=None, cookie=None, packet_count=None,
byte_count=None, match=None, instructions=None,
length=None):
super(OFPFlowStats, self).__init__()
self.table_id = table_id
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.priority = priority
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.flags = flags
self.importance = importance
self.cookie = cookie
self.packet_count = packet_count
self.byte_count = byte_count
self.match = match
self.instructions = instructions
self.length = length
@classmethod
def parser(cls, buf, offset):
flow_stats = cls()
(flow_stats.length, flow_stats.table_id,
flow_stats.duration_sec, flow_stats.duration_nsec,
flow_stats.priority, flow_stats.idle_timeout,
flow_stats.hard_timeout, flow_stats.flags,
flow_stats.importance, flow_stats.cookie,
flow_stats.packet_count,
flow_stats.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_STATS_0_PACK_STR, buf, offset)
offset += ofproto.OFP_FLOW_STATS_0_SIZE
flow_stats.match = OFPMatch.parser(buf, offset)
match_length = utils.round_up(flow_stats.match.length, 8)
inst_length = (flow_stats.length - (ofproto.OFP_FLOW_STATS_SIZE -
ofproto.OFP_MATCH_SIZE +
match_length))
offset += match_length
instructions = []
while inst_length > 0:
inst = OFPInstruction.parser(buf, offset)
instructions.append(inst)
offset += inst.len
inst_length -= inst.len
flow_stats.instructions = instructions
return flow_stats
class OFPFlowStatsRequestBase(OFPMultipartRequest):
def __init__(self, datapath, flags, table_id, out_port, out_group,
cookie, cookie_mask, match):
super(OFPFlowStatsRequestBase, self).__init__(datapath, flags)
self.table_id = table_id
self.out_port = out_port
self.out_group = out_group
self.cookie = cookie
self.cookie_mask = cookie_mask
self.match = match
def _serialize_stats_body(self):
offset = ofproto.OFP_MULTIPART_REQUEST_SIZE
msg_pack_into(ofproto.OFP_FLOW_STATS_REQUEST_0_PACK_STR,
self.buf, offset, self.table_id, self.out_port,
self.out_group, self.cookie, self.cookie_mask)
offset += ofproto.OFP_FLOW_STATS_REQUEST_0_SIZE
self.match.serialize(self.buf, offset)
@_set_stats_type(ofproto.OFPMP_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPFlowStatsRequest(OFPFlowStatsRequestBase):
def __init__(self, datapath, flags=0, table_id=ofproto.OFPTT_ALL,
out_port=ofproto.OFPP_ANY,
out_group=ofproto.OFPG_ANY,
cookie=0, cookie_mask=0, match=None, type_=None):
if match is None:
match = OFPMatch()
super(OFPFlowStatsRequest, self).__init__(datapath, flags, table_id,
out_port, out_group,
cookie, cookie_mask, match)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPFlowStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPFlowStatsReply, self).__init__(datapath, **kwargs)
class OFPAggregateStats(ofproto_parser.namedtuple('OFPAggregateStats', (
'packet_count', 'byte_count', 'flow_count'))):
@classmethod
def parser(cls, buf, offset):
agg = struct.unpack_from(
ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset)
stats = cls(*agg)
stats.length = ofproto.OFP_AGGREGATE_STATS_REPLY_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPAggregateStatsRequest(OFPFlowStatsRequestBase):
def __init__(self, datapath, flags, table_id, out_port, out_group,
cookie, cookie_mask, match, type_=None):
super(OFPAggregateStatsRequest, self).__init__(datapath,
flags,
table_id,
out_port,
out_group,
cookie,
cookie_mask,
match)
@OFPMultipartReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPMP_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPAggregateStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPAggregateStatsReply, self).__init__(datapath, **kwargs)
class OFPTableStats(ofproto_parser.namedtuple('OFPTableStats', (
'table_id', 'active_count', 'lookup_count',
'matched_count'))):
@classmethod
def parser(cls, buf, offset):
tbl = struct.unpack_from(ofproto.OFP_TABLE_STATS_PACK_STR,
buf, offset)
stats = cls(*tbl)
stats.length = ofproto.OFP_TABLE_STATS_SIZE
return stats
@_set_stats_type(ofproto.OFPMP_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPTableStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags, type_=None):
super(OFPTableStatsRequest, self).__init__(datapath, flags)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPTableStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPTableStatsReply, self).__init__(datapath, **kwargs)
class OFPPortStatsProp(OFPPropBase):
_TYPES = {}
@OFPPortStatsProp.register_type(ofproto.OFPPSPT_ETHERNET)
class OFPPortStatsPropEthernet(OFPPortStatsProp):
def __init__(self, type_=None, length=None, rx_frame_err=None,
rx_over_err=None, rx_crc_err=None, collisions=None):
self.type = type_
self.length = length
self.rx_frame_err = rx_frame_err
self.rx_over_err = rx_over_err
self.rx_crc_err = rx_crc_err
self.collisions = collisions
@classmethod
def parser(cls, buf):
ether = cls()
(ether.type, ether.length, ether.rx_frame_err, ether.rx_over_err,
ether.rx_crc_err, ether.collisions) = struct.unpack_from(
ofproto.OFP_PORT_STATS_PROP_ETHERNET_PACK_STR, buf, 0)
return ether
@OFPPortStatsProp.register_type(ofproto.OFPPSPT_OPTICAL)
class OFPPortStatsPropOptical(OFPPortStatsProp):
def __init__(self, type_=None, length=None, flags=None,
tx_freq_lmda=None, tx_offset=None, tx_grid_span=None,
rx_freq_lmda=None, rx_offset=None, rx_grid_span=None,
tx_pwr=None, rx_pwr=None, bias_current=None,
temperature=None):
self.type = type_
self.length = length
self.flags = flags
self.tx_freq_lmda = tx_freq_lmda
self.tx_offset = tx_offset
self.tx_grid_span = tx_grid_span
self.rx_freq_lmda = rx_freq_lmda
self.rx_offset = rx_offset
self.rx_grid_span = rx_grid_span
self.tx_pwr = tx_pwr
self.rx_pwr = rx_pwr
self.bias_current = bias_current
self.temperature = temperature
@classmethod
def parser(cls, buf):
optical = cls()
(optical.type, optical.length, optical.flags,
optical.tx_freq_lmda, optical.tx_offset, optical.tx_grid_span,
optical.rx_freq_lmda, optical.rx_offset, optical.rx_grid_span,
optical.tx_pwr, optical.rx_pwr, optical.bias_current,
optical.temperature) = struct.unpack_from(
ofproto.OFP_PORT_STATS_PROP_OPTICAL_PACK_STR, buf, 0)
return optical
@OFPPortStatsProp.register_type(ofproto.OFPPSPT_EXPERIMENTER)
class OFPPortStatsPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
class OFPPortStats(StringifyMixin):
def __init__(self, length=None, port_no=None, duration_sec=None,
duration_nsec=None, rx_packets=None, tx_packets=None,
rx_bytes=None, tx_bytes=None, rx_dropped=None,
tx_dropped=None, rx_errors=None, tx_errors=None,
properties=None):
super(OFPPortStats, self).__init__()
self.length = length
self.port_no = port_no
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.rx_packets = rx_packets
self.tx_packets = tx_packets
self.rx_bytes = rx_bytes
self.tx_bytes = tx_bytes
self.rx_dropped = rx_dropped
self.tx_dropped = tx_dropped
self.rx_errors = rx_errors
self.tx_errors = tx_errors
self.properties = properties
@classmethod
def parser(cls, buf, offset):
(length, port_no, duration_sec, duration_nsec, rx_packets,
tx_packets, rx_bytes, tx_bytes, rx_dropped, tx_dropped,
rx_errors, tx_errors) = struct.unpack_from(
ofproto.OFP_PORT_STATS_PACK_STR, buf, offset)
props = []
rest = buf[offset + ofproto.OFP_PORT_STATS_SIZE:offset + length]
while rest:
p, rest = OFPPortStatsProp.parse(rest)
props.append(p)
stats = cls(length, port_no, duration_sec, duration_nsec, rx_packets,
tx_packets, rx_bytes, tx_bytes, rx_dropped, tx_dropped,
rx_errors, tx_errors, props)
return stats
@_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPPortStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags, port_no, type_=None):
super(OFPPortStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_PORT_STATS_REQUEST_PACK_STR,
self.buf,
ofproto.OFP_MULTIPART_REQUEST_SIZE,
self.port_no)
@OFPMultipartReply.register_stats_type()
@_set_stats_type(ofproto.OFPMP_PORT_STATS, OFPPortStats)
@_set_msg_type(ofproto.OFPT_MULTIPART_REPLY)
class OFPPortStatsReply(OFPMultipartReply):
def __init__(self, datapath, type_=None, **kwargs):
super(OFPPortStatsReply, self).__init__(datapath, **kwargs)
@_set_msg_type(ofproto.OFPT_BARRIER_REQUEST)
class OFPBarrierRequest(MsgBase):
def __init__(self, datapath):
super(OFPBarrierRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_BARRIER_REPLY)
class OFPBarrierReply(MsgBase):
def __init__(self, datapath):
super(OFPBarrierReply, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_PORT_STATUS)
class OFPPortStatus(MsgBase):
def __init__(self, datapath, reason=None, desc=None):
super(OFPPortStatus, self).__init__(datapath)
self.reason = reason
self.desc = desc
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPortStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.reason = struct.unpack_from(
ofproto.OFP_PORT_STATUS_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)[0]
msg.desc = OFPPort.parser(msg.buf, ofproto.OFP_PORT_STATUS_DESC_OFFSET)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_ROLE_STATUS)
class OFPRoleStatus(MsgBase):
def __init__(self, datapath, role=None, reason=None,
generation_id=None, properties=None):
super(OFPRoleStatus, self).__init__(datapath)
self.role = role
self.reason = reason
self.generation_id = generation_id
self.properties = properties
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPRoleStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.role, msg.reason, msg.generation_id) = struct.unpack_from(
ofproto.OFP_ROLE_STATUS_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.properties = []
rest = msg.buf[ofproto.OFP_ROLE_STATUS_SIZE:]
while rest:
p, rest = OFPRoleProp.parse(rest)
msg.properties.append(p)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_TABLE_STATUS)
class OFPTableStatus(MsgBase):
def __init__(self, datapath, reason=None, table=None):
super(OFPTableStatus, self).__init__(datapath)
self.reason = reason
self.table = table
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPTableStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.reason,) = struct.unpack_from(ofproto.OFP_TABLE_STATUS_0_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
msg.table = OFPTableDesc.parser(msg.buf,
ofproto.OFP_TABLE_STATUS_0_SIZE)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_REQUESTFORWARD)
class OFPRequestForward(MsgInMsgBase):
def __init__(self, datapath, request=None):
super(OFPRequestForward, self).__init__(datapath)
self.request = request
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPRequestForward, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
req_buf = buf[ofproto.OFP_HEADER_SIZE:]
(_ver, _type, _len, _xid) = ofproto_parser.header(req_buf)
msg.request = ofproto_parser.msg(
datapath, _ver, _type, _len, _xid, req_buf)
return msg
def _serialize_body(self):
assert isinstance(self.request, (OFPGroupMod, OFPMeterMod))
self.request.serialize()
self.buf += self.request.buf
@_set_msg_type(ofproto.OFPT_PACKET_OUT)
class OFPPacketOut(MsgBase):
def __init__(self, datapath, buffer_id=None, in_port=None, actions=None,
data=None, actions_len=None):
assert in_port is not None
super(OFPPacketOut, self).__init__(datapath)
self.buffer_id = buffer_id
self.in_port = in_port
self.actions_len = 0
self.actions = actions
self.data = data
def _serialize_body(self):
self.actions_len = 0
offset = ofproto.OFP_PACKET_OUT_SIZE
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
self.actions_len += a.len
if self.data is not None:
assert self.buffer_id == 0xffffffff
self.buf += self.data
msg_pack_into(ofproto.OFP_PACKET_OUT_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.buffer_id, self.in_port, self.actions_len)
@_set_msg_type(ofproto.OFPT_FLOW_MOD)
class OFPFlowMod(MsgBase):
def __init__(self, datapath, cookie=0, cookie_mask=0, table_id=0,
command=ofproto.OFPFC_ADD,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=ofproto.OFP_NO_BUFFER,
out_port=0, out_group=0, flags=0, importance=0,
match=None,
instructions=None):
instructions = instructions if instructions else []
super(OFPFlowMod, self).__init__(datapath)
self.cookie = cookie
self.cookie_mask = cookie_mask
self.table_id = table_id
self.command = command
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.buffer_id = buffer_id
self.out_port = out_port
self.out_group = out_group
self.flags = flags
self.importance = importance
if match is None:
match = OFPMatch()
assert isinstance(match, OFPMatch)
self.match = match
for i in instructions:
assert isinstance(i, OFPInstruction)
self.instructions = instructions
def _serialize_body(self):
msg_pack_into(ofproto.OFP_FLOW_MOD_PACK_STR0, self.buf,
ofproto.OFP_HEADER_SIZE,
self.cookie, self.cookie_mask, self.table_id,
self.command, self.idle_timeout, self.hard_timeout,
self.priority, self.buffer_id, self.out_port,
self.out_group, self.flags, self.importance)
offset = (ofproto.OFP_FLOW_MOD_SIZE -
ofproto.OFP_MATCH_SIZE)
match_len = self.match.serialize(self.buf, offset)
offset += match_len
for inst in self.instructions:
inst.serialize(self.buf, offset)
offset += inst.len
class OFPInstruction(StringifyMixin):
_INSTRUCTION_TYPES = {}
@staticmethod
def register_instruction_type(types):
def _register_instruction_type(cls):
for type_ in types:
OFPInstruction._INSTRUCTION_TYPES[type_] = cls
return cls
return _register_instruction_type
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from('!HH', buf, offset)
cls_ = cls._INSTRUCTION_TYPES.get(type_)
return cls_.parser(buf, offset)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_GOTO_TABLE])
class OFPInstructionGotoTable(OFPInstruction):
def __init__(self, table_id, type_=None, len_=None):
super(OFPInstructionGotoTable, self).__init__()
self.type = ofproto.OFPIT_GOTO_TABLE
self.len = ofproto.OFP_INSTRUCTION_GOTO_TABLE_SIZE
self.table_id = table_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, table_id) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR,
buf, offset)
return cls(table_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_GOTO_TABLE_PACK_STR,
buf, offset, self.type, self.len, self.table_id)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_WRITE_METADATA])
class OFPInstructionWriteMetadata(OFPInstruction):
def __init__(self, metadata, metadata_mask, type_=None, len_=None):
super(OFPInstructionWriteMetadata, self).__init__()
self.type = ofproto.OFPIT_WRITE_METADATA
self.len = ofproto.OFP_INSTRUCTION_WRITE_METADATA_SIZE
self.metadata = metadata
self.metadata_mask = metadata_mask
@classmethod
def parser(cls, buf, offset):
(type_, len_, metadata, metadata_mask) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR,
buf, offset)
return cls(metadata, metadata_mask)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_WRITE_METADATA_PACK_STR,
buf, offset, self.type, self.len, self.metadata,
self.metadata_mask)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_WRITE_ACTIONS,
ofproto.OFPIT_APPLY_ACTIONS,
ofproto.OFPIT_CLEAR_ACTIONS])
class OFPInstructionActions(OFPInstruction):
def __init__(self, type_, actions=None, len_=None):
super(OFPInstructionActions, self).__init__()
self.type = type_
for a in actions:
assert isinstance(a, OFPAction)
self.actions = actions
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR,
buf, offset)
offset += ofproto.OFP_INSTRUCTION_ACTIONS_SIZE
actions = []
actions_len = len_ - ofproto.OFP_INSTRUCTION_ACTIONS_SIZE
while actions_len > 0:
a = OFPAction.parser(buf, offset)
actions.append(a)
actions_len -= a.len
offset += a.len
inst = cls(type_, actions)
inst.len = len_
return inst
def serialize(self, buf, offset):
action_offset = offset + ofproto.OFP_INSTRUCTION_ACTIONS_SIZE
if self.actions:
for a in self.actions:
a.serialize(buf, action_offset)
action_offset += a.len
self.len = action_offset - offset
pad_len = utils.round_up(self.len, 8) - self.len
msg_pack_into("%dx" % pad_len, buf, action_offset)
self.len += pad_len
msg_pack_into(ofproto.OFP_INSTRUCTION_ACTIONS_PACK_STR,
buf, offset, self.type, self.len)
@OFPInstruction.register_instruction_type([ofproto.OFPIT_METER])
class OFPInstructionMeter(OFPInstruction):
def __init__(self, meter_id=1, type_=None, len_=None):
super(OFPInstructionMeter, self).__init__()
self.type = ofproto.OFPIT_METER
self.len = ofproto.OFP_INSTRUCTION_METER_SIZE
self.meter_id = meter_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, meter_id) = struct.unpack_from(
ofproto.OFP_INSTRUCTION_METER_PACK_STR,
buf, offset)
return cls(meter_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_INSTRUCTION_METER_PACK_STR,
buf, offset, self.type, self.len, self.meter_id)
class OFPActionHeader(StringifyMixin):
def __init__(self, type_, len_):
self.type = type_
self.len = len_
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR,
buf, offset, self.type, self.len)
class OFPAction(OFPActionHeader):
_ACTION_TYPES = {}
@staticmethod
def register_action_type(type_, len_):
def _register_action_type(cls):
cls.cls_action_type = type_
cls.cls_action_len = len_
OFPAction._ACTION_TYPES[cls.cls_action_type] = cls
return cls
return _register_action_type
def __init__(self):
cls = self.__class__
super(OFPAction, self).__init__(cls.cls_action_type,
cls.cls_action_len)
@classmethod
def parser(cls, buf, offset):
type_, len_ = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_TYPES.get(type_)
assert cls_ is not None
return cls_.parser(buf, offset)
@OFPAction.register_action_type(ofproto.OFPAT_OUTPUT,
ofproto.OFP_ACTION_OUTPUT_SIZE)
class OFPActionOutput(OFPAction):
def __init__(self, port, max_len=ofproto.OFPCML_MAX,
type_=None, len_=None):
super(OFPActionOutput, self).__init__()
self.port = port
self.max_len = max_len
@classmethod
def parser(cls, buf, offset):
type_, len_, port, max_len = struct.unpack_from(
ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset)
return cls(port, max_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf,
offset, self.type, self.len, self.port, self.max_len)
@OFPAction.register_action_type(ofproto.OFPAT_GROUP,
ofproto.OFP_ACTION_GROUP_SIZE)
class OFPActionGroup(OFPAction):
def __init__(self, group_id=0, type_=None, len_=None):
super(OFPActionGroup, self).__init__()
self.group_id = group_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, group_id) = struct.unpack_from(
ofproto.OFP_ACTION_GROUP_PACK_STR, buf, offset)
return cls(group_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_GROUP_PACK_STR, buf,
offset, self.type, self.len, self.group_id)
@OFPAction.register_action_type(ofproto.OFPAT_SET_QUEUE,
ofproto.OFP_ACTION_SET_QUEUE_SIZE)
class OFPActionSetQueue(OFPAction):
def __init__(self, queue_id, type_=None, len_=None):
super(OFPActionSetQueue, self).__init__()
self.queue_id = queue_id
@classmethod
def parser(cls, buf, offset):
(type_, len_, queue_id) = struct.unpack_from(
ofproto.OFP_ACTION_SET_QUEUE_PACK_STR, buf, offset)
return cls(queue_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_SET_QUEUE_PACK_STR, buf,
offset, self.type, self.len, self.queue_id)
@OFPAction.register_action_type(ofproto.OFPAT_SET_MPLS_TTL,
ofproto.OFP_ACTION_MPLS_TTL_SIZE)
class OFPActionSetMplsTtl(OFPAction):
def __init__(self, mpls_ttl, type_=None, len_=None):
super(OFPActionSetMplsTtl, self).__init__()
self.mpls_ttl = mpls_ttl
@classmethod
def parser(cls, buf, offset):
(type_, len_, mpls_ttl) = struct.unpack_from(
ofproto.OFP_ACTION_MPLS_TTL_PACK_STR, buf, offset)
return cls(mpls_ttl)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_MPLS_TTL_PACK_STR, buf,
offset, self.type, self.len, self.mpls_ttl)
@OFPAction.register_action_type(ofproto.OFPAT_DEC_MPLS_TTL,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionDecMplsTtl(OFPAction):
def __init__(self, type_=None, len_=None):
super(OFPActionDecMplsTtl, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_TTL,
ofproto.OFP_ACTION_NW_TTL_SIZE)
class OFPActionSetNwTtl(OFPAction):
def __init__(self, nw_ttl, type_=None, len_=None):
super(OFPActionSetNwTtl, self).__init__()
self.nw_ttl = nw_ttl
@classmethod
def parser(cls, buf, offset):
(type_, len_, nw_ttl) = struct.unpack_from(
ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset)
return cls(nw_ttl)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_TTL_PACK_STR, buf, offset,
self.type, self.len, self.nw_ttl)
@OFPAction.register_action_type(ofproto.OFPAT_DEC_NW_TTL,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionDecNwTtl(OFPAction):
def __init__(self, type_=None, len_=None):
super(OFPActionDecNwTtl, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_OUT,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionCopyTtlOut(OFPAction):
def __init__(self, type_=None, len_=None):
super(OFPActionCopyTtlOut, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_COPY_TTL_IN,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionCopyTtlIn(OFPAction):
def __init__(self, type_=None, len_=None):
super(OFPActionCopyTtlIn, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_PUSH_VLAN,
ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushVlan(OFPAction):
def __init__(self, ethertype=ether.ETH_TYPE_8021Q, type_=None, len_=None):
super(OFPActionPushVlan, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_PUSH_MPLS,
ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushMpls(OFPAction):
def __init__(self, ethertype=ether.ETH_TYPE_MPLS, type_=None, len_=None):
super(OFPActionPushMpls, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_POP_VLAN,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionPopVlan(OFPAction):
def __init__(self, type_=None, len_=None):
super(OFPActionPopVlan, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(ofproto.OFPAT_POP_MPLS,
ofproto.OFP_ACTION_POP_MPLS_SIZE)
class OFPActionPopMpls(OFPAction):
def __init__(self, ethertype=ether.ETH_TYPE_IP, type_=None, len_=None):
super(OFPActionPopMpls, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_POP_MPLS_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_POP_MPLS_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_SET_FIELD,
ofproto.OFP_ACTION_SET_FIELD_SIZE)
class OFPActionSetField(OFPAction):
def __init__(self, field=None, **kwargs):
super(OFPActionSetField, self).__init__()
assert len(kwargs) == 1
key = list(kwargs.keys())[0]
value = kwargs[key]
assert isinstance(key, (str, six.text_type))
assert not isinstance(value, tuple) # no mask
self.key = key
self.value = value
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_SET_FIELD_PACK_STR, buf, offset)
(n, value, mask, _len) = ofproto.oxm_parse(buf, offset + 4)
k, uv = ofproto.oxm_to_user(n, value, mask)
action = cls(**{k: uv})
action.len = len_
return action
def serialize(self, buf, offset):
n, value, mask = ofproto.oxm_from_user(self.key, self.value)
len_ = ofproto.oxm_serialize(n, value, mask, buf, offset + 4)
self.len = utils.round_up(4 + len_, 8)
msg_pack_into('!HH', buf, offset, self.type, self.len)
pad_len = self.len - (4 + len_)
msg_pack_into("%dx" % pad_len, buf, offset + 4 + len_)
def to_jsondict(self):
return {
self.__class__.__name__: {
'field': ofproto.oxm_to_jsondict(self.key, self.value),
"len": self.len,
"type": self.type
}
}
@classmethod
def from_jsondict(cls, dict_):
k, v = ofproto.oxm_from_jsondict(dict_['field'])
return OFPActionSetField(**{k: v})
def stringify_attrs(self):
yield (self.key, self.value)
@OFPAction.register_action_type(ofproto.OFPAT_PUSH_PBB,
ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushPbb(OFPAction):
def __init__(self, ethertype, type_=None, len_=None):
super(OFPActionPushPbb, self).__init__()
self.ethertype = ethertype
@classmethod
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(
ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset,
self.type, self.len, self.ethertype)
@OFPAction.register_action_type(ofproto.OFPAT_POP_PBB,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionPopPbb(OFPAction):
def __init__(self, type_=None, len_=None):
super(OFPActionPopPbb, self).__init__()
@classmethod
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@OFPAction.register_action_type(
ofproto.OFPAT_EXPERIMENTER,
ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE)
class OFPActionExperimenter(OFPAction):
def __init__(self, experimenter):
super(OFPActionExperimenter, self).__init__()
self.type = ofproto.OFPAT_EXPERIMENTER
self.experimenter = experimenter
self.len = None
@classmethod
def parser(cls, buf, offset):
(type_, len_, experimenter) = struct.unpack_from(
ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR, buf, offset)
data = buf[(offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE
): offset + len_]
if experimenter == ofproto_common.NX_EXPERIMENTER_ID:
obj = NXAction.parse(data)
else:
obj = OFPActionExperimenterUnknown(experimenter, data)
obj.len = len_
return obj
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR,
buf, offset, self.type, self.len, self.experimenter)
class OFPActionExperimenterUnknown(OFPActionExperimenter):
def __init__(self, experimenter, data=None, type_=None, len_=None):
super(OFPActionExperimenterUnknown,
self).__init__(experimenter=experimenter)
self.data = data
def serialize(self, buf, offset):
# fixup
data = self.data
if data is None:
data = bytearray()
self.len = (utils.round_up(len(data), 8) +
ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE)
super(OFPActionExperimenterUnknown, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(self.data),
buf,
offset + ofproto.OFP_ACTION_EXPERIMENTER_HEADER_SIZE,
self.data)
@_register_parser
@_set_msg_type(ofproto.OFPT_GROUP_MOD)
class OFPGroupMod(MsgBase):
def __init__(self, datapath, command=ofproto.OFPGC_ADD,
type_=ofproto.OFPGT_ALL, group_id=0, buckets=None):
buckets = buckets if buckets else []
super(OFPGroupMod, self).__init__(datapath)
self.command = command
self.type = type_
self.group_id = group_id
self.buckets = buckets
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGroupMod, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
(msg.command, msg.type, msg.group_id) = struct.unpack_from(
ofproto.OFP_GROUP_MOD_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
offset = ofproto.OFP_GROUP_MOD_SIZE
msg.buckets = []
while offset < msg.msg_len:
bucket = OFPBucket.parser(buf, offset)
msg.buckets.append(bucket)
offset += bucket.len
return msg
def _serialize_body(self):
msg_pack_into(ofproto.OFP_GROUP_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.command, self.type, self.group_id)
offset = ofproto.OFP_GROUP_MOD_SIZE
for b in self.buckets:
b.serialize(self.buf, offset)
offset += b.len
class OFPPortModProp(OFPPropBase):
_TYPES = {}
class OFPPortModPropEthernet(OFPPortModProp):
def __init__(self, type_=None, length=None, advertise=None):
self.type = type_
self.advertise = advertise
def serialize(self):
# fixup
self.length = struct.calcsize(
ofproto.OFP_PORT_MOD_PROP_ETHERNET_PACK_STR)
buf = bytearray()
msg_pack_into(ofproto.OFP_PORT_MOD_PROP_ETHERNET_PACK_STR,
buf, 0, self.type, self.length, self.advertise)
return buf
class OFPPortModPropOptical(OFPPortModProp):
def __init__(self, type_=None, length=None, configure=None,
freq_lmda=None, fl_offset=None, grid_span=None,
tx_pwr=None):
self.type = type_
self.length = length
self.configure = configure
self.freq_lmda = freq_lmda
self.fl_offset = fl_offset
self.grid_span = grid_span
self.tx_pwr = tx_pwr
def serialize(self):
# fixup
self.length = struct.calcsize(
ofproto.OFP_PORT_MOD_PROP_OPTICAL_PACK_STR)
buf = bytearray()
msg_pack_into(ofproto.OFP_PORT_MOD_PROP_OPTICAL_PACK_STR, buf, 0,
self.type, self.length, self.configure, self.freq_lmda,
self.fl_offset, self.grid_span, self.tx_pwr)
return buf
class OFPPortModPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_set_msg_type(ofproto.OFPT_PORT_MOD)
class OFPPortMod(MsgBase):
_TYPE = {
'ascii': [
'hw_addr',
]
}
def __init__(self, datapath, port_no=0, hw_addr='00:00:00:00:00:00',
config=0, mask=0, properties=None):
super(OFPPortMod, self).__init__(datapath)
self.port_no = port_no
self.hw_addr = hw_addr
self.config = config
self.mask = mask
self.properties = properties or []
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE,
self.port_no, addrconv.mac.text_to_bin(self.hw_addr),
self.config,
self.mask)
self.buf += bin_props
class OFPBucket(StringifyMixin):
def __init__(self, weight=0, watch_port=ofproto.OFPP_ANY,
watch_group=ofproto.OFPG_ANY, actions=None, len_=None):
super(OFPBucket, self).__init__()
self.weight = weight
self.watch_port = watch_port
self.watch_group = watch_group
self.actions = actions
@classmethod
def parser(cls, buf, offset):
(len_, weight, watch_port, watch_group) = struct.unpack_from(
ofproto.OFP_BUCKET_PACK_STR, buf, offset)
msg = cls(weight, watch_port, watch_group, [])
msg.len = len_
length = ofproto.OFP_BUCKET_SIZE
offset += ofproto.OFP_BUCKET_SIZE
while length < msg.len:
action = OFPAction.parser(buf, offset)
msg.actions.append(action)
offset += action.len
length += action.len
return msg
def serialize(self, buf, offset):
action_offset = offset + ofproto.OFP_BUCKET_SIZE
action_len = 0
for a in self.actions:
a.serialize(buf, action_offset)
action_offset += a.len
action_len += a.len
self.len = utils.round_up(ofproto.OFP_BUCKET_SIZE + action_len, 8)
msg_pack_into(ofproto.OFP_BUCKET_PACK_STR, buf, offset,
self.len, self.weight, self.watch_port,
self.watch_group)
@_set_msg_type(ofproto.OFPT_ROLE_REQUEST)
class OFPRoleRequest(MsgBase):
def __init__(self, datapath, role=None, generation_id=None):
super(OFPRoleRequest, self).__init__(datapath)
self.role = role
self.generation_id = generation_id
def _serialize_body(self):
assert self.role is not None
assert self.generation_id is not None
msg_pack_into(ofproto.OFP_ROLE_REQUEST_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.role, self.generation_id)
@_register_parser
@_set_msg_type(ofproto.OFPT_ROLE_REPLY)
class OFPRoleReply(MsgBase):
def __init__(self, datapath, role=None, generation_id=None):
super(OFPRoleReply, self).__init__(datapath)
self.role = role
self.generation_id = generation_id
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPRoleReply, cls).parser(datapath, version,
msg_type, msg_len, xid,
buf)
(msg.role, msg.generation_id) = struct.unpack_from(
ofproto.OFP_ROLE_REQUEST_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
return msg
class OFPAsyncConfigProp(OFPPropBase):
_TYPES = {}
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PACKET_IN_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PACKET_IN_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PORT_STATUS_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_PORT_STATUS_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_FLOW_REMOVED_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_FLOW_REMOVED_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_ROLE_STATUS_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_ROLE_STATUS_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_TABLE_STATUS_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_TABLE_STATUS_MASTER)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_REQUESTFORWARD_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPACPT_REQUESTFORWARD_MASTER)
class OFPAsyncConfigPropReasons(OFPAsyncConfigProp):
def __init__(self, type_=None, length=None, mask=None):
self.type = type_
self.length = length
self.mask = mask
@classmethod
def parser(cls, buf):
reasons = cls()
(reasons.type, reasons.length, reasons.mask) = struct.unpack_from(
ofproto.OFP_ASYNC_CONFIG_PROP_REASONS_PACK_STR, buf, 0)
return reasons
def serialize(self):
# fixup
self.length = ofproto.OFP_ASYNC_CONFIG_PROP_REASONS_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_ASYNC_CONFIG_PROP_REASONS_PACK_STR, buf, 0,
self.type, self.length, self.mask)
return buf
@OFPAsyncConfigProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_SLAVE)
@OFPAsyncConfigProp.register_type(ofproto.OFPTFPT_EXPERIMENTER_MASTER)
class OFPAsyncConfigPropExperimenter(OFPPropCommonExperimenter4ByteData):
pass
@_set_msg_type(ofproto.OFPT_GET_ASYNC_REQUEST)
class OFPGetAsyncRequest(MsgBase):
def __init__(self, datapath):
super(OFPGetAsyncRequest, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_GET_ASYNC_REPLY)
class OFPGetAsyncReply(MsgBase):
def __init__(self, datapath, properties=None):
super(OFPGetAsyncReply, self).__init__(datapath)
self.properties = properties
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGetAsyncReply, cls).parser(datapath, version,
msg_type, msg_len,
xid, buf)
msg.properties = []
rest = msg.buf[ofproto.OFP_HEADER_SIZE:]
while rest:
p, rest = OFPAsyncConfigProp.parse(rest)
msg.properties.append(p)
return msg
@_set_msg_type(ofproto.OFPT_SET_ASYNC)
class OFPSetAsync(MsgBase):
def __init__(self, datapath, properties=None):
super(OFPSetAsync, self).__init__(datapath)
self.properties = properties
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
self.buf += bin_props
@_set_msg_type(ofproto.OFPT_BUNDLE_CONTROL)
class OFPBundleCtrlMsg(MsgBase):
def __init__(self, datapath, bundle_id, type_, flags, properties):
super(OFPBundleCtrlMsg, self).__init__(datapath)
self.bundle_id = bundle_id
self.type = type_
self.flags = flags
self.properties = properties
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
msg_pack_into(ofproto.OFP_BUNDLE_CTRL_MSG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.bundle_id,
self.type, self.flags)
self.buf += bin_props
@_set_msg_type(ofproto.OFPT_BUNDLE_ADD_MESSAGE)
class OFPBundleAddMsg(MsgInMsgBase):
def __init__(self, datapath, bundle_id, flags, message, properties):
super(OFPBundleAddMsg, self).__init__(datapath)
self.bundle_id = bundle_id
self.flags = flags
self.message = message
self.properties = properties
def _serialize_body(self):
# The xid of the inner message must be the same as
# that of the outer message (OF1.4.0 7.3.9.2)
if self.message.xid != self.xid:
self.message.set_xid(self.xid)
# Message
self.message.serialize()
tail_buf = self.message.buf
# Pad
if len(self.properties) > 0:
message_len = len(tail_buf)
pad_len = utils.round_up(message_len, 8) - message_len
msg_pack_into("%dx" % pad_len, tail_buf, message_len)
# Properties
for p in self.properties:
tail_buf += p.serialize()
# Head
msg_pack_into(ofproto.OFP_BUNDLE_ADD_MSG_0_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.bundle_id,
self.flags)
# Finish
self.buf += tail_buf
nx_actions.generate(
'ryu.ofproto.ofproto_v1_4',
'ryu.ofproto.ofproto_v1_4_parser'
)
| true | true |
1c3034e026a4b7927b1b22737c8e3921a53b0a31 | 9,812 | py | Python | ymir/backend/src/ymir_app/app/api/api_v1/endpoints/projects.py | Zhang-SJ930104/ymir | dd6481be6f229ade4cf8fba64ef44a15357430c4 | [
"Apache-2.0"
] | null | null | null | ymir/backend/src/ymir_app/app/api/api_v1/endpoints/projects.py | Zhang-SJ930104/ymir | dd6481be6f229ade4cf8fba64ef44a15357430c4 | [
"Apache-2.0"
] | 1 | 2022-01-18T09:28:29.000Z | 2022-01-18T09:28:29.000Z | ymir/backend/src/ymir_app/app/api/api_v1/endpoints/projects.py | Aryalfrat/ymir | d4617ed00ef67a77ab4e1944763f608bface4be6 | [
"Apache-2.0"
] | null | null | null | import enum
import json
import time
from typing import Any
from fastapi import APIRouter, Depends, Path, Query, BackgroundTasks
from fastapi.logger import logger
from sqlalchemy.orm import Session
from app import crud, models, schemas
from app.api import deps
from app.api.errors.errors import (
ProjectNotFound,
DuplicateProjectError,
FailedToCreateProject,
FailedToConnectClickHouse,
NoDatasetPermission,
DatasetNotFound,
)
from app.config import settings
from app.constants.state import ResultState, RunningStates, TaskType, TrainingType
from app.utils.cache import CacheClient
from app.utils.clickhouse import YmirClickHouse
from app.utils.ymir_controller import ControllerClient, gen_task_hash
from app.libs.projects import setup_sample_project_in_background
from app.libs.keywords import add_keywords
from common_utils.labels import UserLabels
router = APIRouter()
class SortField(enum.Enum):
id = "id"
create_datetime = "create_datetime"
@router.get("/", response_model=schemas.ProjectPaginationOut)
def list_projects(
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
name: str = Query(None),
start_time: int = Query(None, description="from this timestamp"),
end_time: int = Query(None, description="to this timestamp"),
offset: int = Query(None),
limit: int = Query(None),
order_by: SortField = Query(SortField.id),
is_desc: bool = Query(True),
) -> Any:
"""
Get projects list
filter:
- name
order_by:
- id
- create_datetime
"""
projects, total = crud.project.get_multi_projects(
db,
user_id=current_user.id,
name=name,
offset=offset,
limit=limit,
order_by=order_by.name,
is_desc=is_desc,
start_time=start_time,
end_time=end_time,
)
return {"result": {"total": total, "items": projects}}
@router.post("/samples", response_model=schemas.ProjectOut)
def create_sample_project(
*,
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
user_labels: UserLabels = Depends(deps.get_user_labels),
controller_client: ControllerClient = Depends(deps.get_controller_client),
background_tasks: BackgroundTasks,
cache: CacheClient = Depends(deps.get_cache),
) -> Any:
"""
Create sample project
"""
project_name = f"sample_project_{current_user.username}_{time.time()}"
project_in = schemas.ProjectCreate(
name=project_name,
training_keywords=settings.SAMPLE_PROJECT_KEYWORDS,
chunk_size=1,
is_example=True,
)
project = crud.project.create_project(db, user_id=current_user.id, obj_in=project_in)
project_task_hash = gen_task_hash(current_user.id, project.id)
try:
training_classes = user_labels.get_class_ids(names_or_aliases=settings.SAMPLE_PROJECT_KEYWORDS)
except KeyError:
# todo refactor keywords dependencies to handle ensure given keywords exist
add_keywords(controller_client, cache, current_user.id, settings.SAMPLE_PROJECT_KEYWORDS)
user_labels = controller_client.get_labels_of_user(current_user.id)
training_classes = user_labels.get_class_ids(names_or_aliases=settings.SAMPLE_PROJECT_KEYWORDS)
try:
resp = controller_client.create_project(
user_id=current_user.id,
project_id=project.id,
task_id=project_task_hash,
args={"training_classes": training_classes},
)
logger.info("[create task] controller response: %s", resp)
except ValueError:
crud.project.soft_remove(db, id=project.id)
raise FailedToCreateProject()
background_tasks.add_task(
setup_sample_project_in_background,
db,
controller_client,
project_name=project.name,
project_id=project.id,
user_id=current_user.id,
project_task_hash=project_task_hash,
)
return {"result": project}
@router.post("/", response_model=schemas.ProjectOut)
def create_project(
*,
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
project_in: schemas.ProjectCreate,
controller_client: ControllerClient = Depends(deps.get_controller_client),
user_labels: UserLabels = Depends(deps.get_user_labels),
clickhouse: YmirClickHouse = Depends(deps.get_clickhouse_client),
) -> Any:
"""
Create project
"""
if crud.project.is_duplicated_name(db, user_id=current_user.id, name=project_in.name):
raise DuplicateProjectError()
# 1.create project to get task_id for sending to controller
project = crud.project.create_project(db, user_id=current_user.id, obj_in=project_in)
task_id = gen_task_hash(current_user.id, project.id)
training_classes = user_labels.get_class_ids(names_or_aliases=project_in.training_keywords)
# 2.send to controller
try:
resp = controller_client.create_project(
user_id=current_user.id,
project_id=project.id,
task_id=task_id,
args={"training_classes": training_classes},
)
logger.info("[create task] controller response: %s", resp)
except ValueError:
crud.project.soft_remove(db, id=project.id)
raise FailedToCreateProject()
# 3.create task info
task = crud.task.create_placeholder(
db, type_=TaskType.create_project, user_id=current_user.id, project_id=project.id
)
# 3.create dataset group to build dataset info
dataset_name = f"{project_in.name}_training_dataset"
dataset_paras = schemas.DatasetGroupCreate(name=dataset_name, project_id=project.id, user_id=current_user.id)
dataset_group = crud.dataset_group.create_with_user_id(db, user_id=current_user.id, obj_in=dataset_paras)
# 4.create init dataset
dataset_in = schemas.DatasetCreate(
name=dataset_name,
hash=task_id,
dataset_group_id=dataset_group.id,
project_id=project.id,
user_id=current_user.id,
source=task.type,
result_state=ResultState.ready,
task_id=task.id,
)
initial_dataset = crud.dataset.create_with_version(db, obj_in=dataset_in)
# 5.update project info
project = crud.project.update_resources(
db,
project_id=project.id,
project_update=schemas.ProjectUpdate(
training_dataset_group_id=dataset_group.id, initial_training_dataset_id=initial_dataset.id
),
)
try:
clickhouse.save_project_parameter(
dt=project.create_datetime,
user_id=project.user_id,
id_=project.id,
name=project.name,
training_type=TrainingType(project.training_type).name,
training_keywords=json.loads(project.training_keywords),
)
except FailedToConnectClickHouse:
# clickhouse metric shouldn't block create task process
logger.exception(
"[create project metrics] failed to write project(%s) stats to clickhouse, continue anyway",
project.name,
)
logger.info("[create project] project record created: %s", project)
return {"result": project}
@router.get(
"/{project_id}",
response_model=schemas.ProjectOut,
)
def get_project(
*,
db: Session = Depends(deps.get_db),
project_id: int = Path(...),
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
"""
Get a project detail
"""
project = crud.project.get_by_user_and_id(db, user_id=current_user.id, id=project_id)
if not project:
raise ProjectNotFound()
return {"result": project}
@router.patch(
"/{project_id}",
response_model=schemas.ProjectOut,
)
def update_project(
*,
db: Session = Depends(deps.get_db),
project_id: int = Path(...),
project_update: schemas.ProjectUpdate,
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
"""
Setting up a project
"""
project = crud.project.get_by_user_and_id(db, user_id=current_user.id, id=project_id)
if not project:
raise ProjectNotFound()
if project_update.initial_training_dataset_id is not None:
dataset = crud.dataset.get(db, id=project_update.initial_training_dataset_id)
if not dataset:
raise DatasetNotFound()
if project.training_dataset_group_id != dataset.dataset_group_id:
raise NoDatasetPermission()
project = crud.project.update_resources(db, project_id=project.id, project_update=project_update)
return {"result": project}
@router.delete(
"/{project_id}",
response_model=schemas.ProjectOut,
)
def delete_project(
*,
db: Session = Depends(deps.get_db),
project_id: int = Path(...),
controller_client: ControllerClient = Depends(deps.get_controller_client),
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
"""
Delete project, and terminate all tasks
"""
project = crud.project.get_by_user_and_id(db, user_id=current_user.id, id=project_id)
if not project:
raise ProjectNotFound()
project = crud.project.soft_remove(db, id=project_id)
unfinished_tasks = crud.task.get_tasks_by_states(
db,
states=RunningStates,
including_deleted=True,
project_id=project_id,
)
for task in unfinished_tasks:
try:
controller_client.terminate_task(user_id=current_user.id, task_hash=task.hash, task_type=task.type)
except Exception:
logger.info(f"Failed to terminate task: {task.hash} of project_id: {project_id}")
continue
return {"result": project}
| 33.037037 | 113 | 0.698227 | import enum
import json
import time
from typing import Any
from fastapi import APIRouter, Depends, Path, Query, BackgroundTasks
from fastapi.logger import logger
from sqlalchemy.orm import Session
from app import crud, models, schemas
from app.api import deps
from app.api.errors.errors import (
ProjectNotFound,
DuplicateProjectError,
FailedToCreateProject,
FailedToConnectClickHouse,
NoDatasetPermission,
DatasetNotFound,
)
from app.config import settings
from app.constants.state import ResultState, RunningStates, TaskType, TrainingType
from app.utils.cache import CacheClient
from app.utils.clickhouse import YmirClickHouse
from app.utils.ymir_controller import ControllerClient, gen_task_hash
from app.libs.projects import setup_sample_project_in_background
from app.libs.keywords import add_keywords
from common_utils.labels import UserLabels
router = APIRouter()
class SortField(enum.Enum):
id = "id"
create_datetime = "create_datetime"
@router.get("/", response_model=schemas.ProjectPaginationOut)
def list_projects(
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
name: str = Query(None),
start_time: int = Query(None, description="from this timestamp"),
end_time: int = Query(None, description="to this timestamp"),
offset: int = Query(None),
limit: int = Query(None),
order_by: SortField = Query(SortField.id),
is_desc: bool = Query(True),
) -> Any:
projects, total = crud.project.get_multi_projects(
db,
user_id=current_user.id,
name=name,
offset=offset,
limit=limit,
order_by=order_by.name,
is_desc=is_desc,
start_time=start_time,
end_time=end_time,
)
return {"result": {"total": total, "items": projects}}
@router.post("/samples", response_model=schemas.ProjectOut)
def create_sample_project(
*,
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
user_labels: UserLabels = Depends(deps.get_user_labels),
controller_client: ControllerClient = Depends(deps.get_controller_client),
background_tasks: BackgroundTasks,
cache: CacheClient = Depends(deps.get_cache),
) -> Any:
project_name = f"sample_project_{current_user.username}_{time.time()}"
project_in = schemas.ProjectCreate(
name=project_name,
training_keywords=settings.SAMPLE_PROJECT_KEYWORDS,
chunk_size=1,
is_example=True,
)
project = crud.project.create_project(db, user_id=current_user.id, obj_in=project_in)
project_task_hash = gen_task_hash(current_user.id, project.id)
try:
training_classes = user_labels.get_class_ids(names_or_aliases=settings.SAMPLE_PROJECT_KEYWORDS)
except KeyError:
add_keywords(controller_client, cache, current_user.id, settings.SAMPLE_PROJECT_KEYWORDS)
user_labels = controller_client.get_labels_of_user(current_user.id)
training_classes = user_labels.get_class_ids(names_or_aliases=settings.SAMPLE_PROJECT_KEYWORDS)
try:
resp = controller_client.create_project(
user_id=current_user.id,
project_id=project.id,
task_id=project_task_hash,
args={"training_classes": training_classes},
)
logger.info("[create task] controller response: %s", resp)
except ValueError:
crud.project.soft_remove(db, id=project.id)
raise FailedToCreateProject()
background_tasks.add_task(
setup_sample_project_in_background,
db,
controller_client,
project_name=project.name,
project_id=project.id,
user_id=current_user.id,
project_task_hash=project_task_hash,
)
return {"result": project}
@router.post("/", response_model=schemas.ProjectOut)
def create_project(
*,
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
project_in: schemas.ProjectCreate,
controller_client: ControllerClient = Depends(deps.get_controller_client),
user_labels: UserLabels = Depends(deps.get_user_labels),
clickhouse: YmirClickHouse = Depends(deps.get_clickhouse_client),
) -> Any:
if crud.project.is_duplicated_name(db, user_id=current_user.id, name=project_in.name):
raise DuplicateProjectError()
project = crud.project.create_project(db, user_id=current_user.id, obj_in=project_in)
task_id = gen_task_hash(current_user.id, project.id)
training_classes = user_labels.get_class_ids(names_or_aliases=project_in.training_keywords)
try:
resp = controller_client.create_project(
user_id=current_user.id,
project_id=project.id,
task_id=task_id,
args={"training_classes": training_classes},
)
logger.info("[create task] controller response: %s", resp)
except ValueError:
crud.project.soft_remove(db, id=project.id)
raise FailedToCreateProject()
task = crud.task.create_placeholder(
db, type_=TaskType.create_project, user_id=current_user.id, project_id=project.id
)
dataset_name = f"{project_in.name}_training_dataset"
dataset_paras = schemas.DatasetGroupCreate(name=dataset_name, project_id=project.id, user_id=current_user.id)
dataset_group = crud.dataset_group.create_with_user_id(db, user_id=current_user.id, obj_in=dataset_paras)
dataset_in = schemas.DatasetCreate(
name=dataset_name,
hash=task_id,
dataset_group_id=dataset_group.id,
project_id=project.id,
user_id=current_user.id,
source=task.type,
result_state=ResultState.ready,
task_id=task.id,
)
initial_dataset = crud.dataset.create_with_version(db, obj_in=dataset_in)
project = crud.project.update_resources(
db,
project_id=project.id,
project_update=schemas.ProjectUpdate(
training_dataset_group_id=dataset_group.id, initial_training_dataset_id=initial_dataset.id
),
)
try:
clickhouse.save_project_parameter(
dt=project.create_datetime,
user_id=project.user_id,
id_=project.id,
name=project.name,
training_type=TrainingType(project.training_type).name,
training_keywords=json.loads(project.training_keywords),
)
except FailedToConnectClickHouse:
logger.exception(
"[create project metrics] failed to write project(%s) stats to clickhouse, continue anyway",
project.name,
)
logger.info("[create project] project record created: %s", project)
return {"result": project}
@router.get(
"/{project_id}",
response_model=schemas.ProjectOut,
)
def get_project(
*,
db: Session = Depends(deps.get_db),
project_id: int = Path(...),
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
project = crud.project.get_by_user_and_id(db, user_id=current_user.id, id=project_id)
if not project:
raise ProjectNotFound()
return {"result": project}
@router.patch(
"/{project_id}",
response_model=schemas.ProjectOut,
)
def update_project(
*,
db: Session = Depends(deps.get_db),
project_id: int = Path(...),
project_update: schemas.ProjectUpdate,
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
project = crud.project.get_by_user_and_id(db, user_id=current_user.id, id=project_id)
if not project:
raise ProjectNotFound()
if project_update.initial_training_dataset_id is not None:
dataset = crud.dataset.get(db, id=project_update.initial_training_dataset_id)
if not dataset:
raise DatasetNotFound()
if project.training_dataset_group_id != dataset.dataset_group_id:
raise NoDatasetPermission()
project = crud.project.update_resources(db, project_id=project.id, project_update=project_update)
return {"result": project}
@router.delete(
"/{project_id}",
response_model=schemas.ProjectOut,
)
def delete_project(
*,
db: Session = Depends(deps.get_db),
project_id: int = Path(...),
controller_client: ControllerClient = Depends(deps.get_controller_client),
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
project = crud.project.get_by_user_and_id(db, user_id=current_user.id, id=project_id)
if not project:
raise ProjectNotFound()
project = crud.project.soft_remove(db, id=project_id)
unfinished_tasks = crud.task.get_tasks_by_states(
db,
states=RunningStates,
including_deleted=True,
project_id=project_id,
)
for task in unfinished_tasks:
try:
controller_client.terminate_task(user_id=current_user.id, task_hash=task.hash, task_type=task.type)
except Exception:
logger.info(f"Failed to terminate task: {task.hash} of project_id: {project_id}")
continue
return {"result": project}
| true | true |
1c30375076d1a9e4b146b86d5824aa4268b2444f | 3,190 | py | Python | code/ReID_net/scripts/postproc/crf/crf_davis.py | MTonyM/PReMVOS | 3d01f0c6156628083a4c8441b4b57622c500e04e | [
"MIT"
] | 140 | 2018-10-25T11:58:34.000Z | 2022-01-18T15:29:38.000Z | code/ReID_net/scripts/postproc/crf/crf_davis.py | MTonyM/PReMVOS | 3d01f0c6156628083a4c8441b4b57622c500e04e | [
"MIT"
] | 18 | 2018-11-21T04:48:03.000Z | 2020-09-14T09:30:56.000Z | code/ReID_net/scripts/postproc/crf/crf_davis.py | MTonyM/PReMVOS | 3d01f0c6156628083a4c8441b4b57622c500e04e | [
"MIT"
] | 32 | 2018-10-25T11:58:57.000Z | 2021-12-27T06:13:45.000Z | #!/usr/bin/env python
import pydensecrf.densecrf as dcrf
from pydensecrf.utils import unary_from_softmax
from scipy.ndimage import imread
from scipy.misc import imsave
import pickle
import numpy
import glob
import os
import matplotlib.pyplot as plt
from joblib import Parallel, delayed
import sys
imgs_path = "/work/mahadevan/data/DAVIS/JPEGImages/480p/"
annots_path = "/work/mahadevan/data/DAVIS/Annotations/480p/"
preds_path_prefix = "/home/mahadevan/vision/savitar/forwarded/"
def convert_path(inp):
sp = inp.split("/")
fwd_idx = sp.index("forwarded")
seq = sp[fwd_idx + 3]
fn = sp[-1]
im_path = imgs_path + seq + "/" + fn.replace(".pickle", ".jpg")
gt_path = annots_path + seq + "/" + fn.replace(".pickle", ".png")
sp[fwd_idx + 1] += "_crf"
sp[-1] = sp[-1].replace(".pickle", ".png")
out_path = "/".join(sp)
return im_path, gt_path, out_path
def mkdir_p(d):
try:
os.makedirs(d)
except OSError as err:
if err.errno != 17:
raise
def apply_crf(im, pred):
im = numpy.ascontiguousarray(im)
pred = numpy.ascontiguousarray(pred.swapaxes(0, 2).swapaxes(1, 2))
d = dcrf.DenseCRF2D(854, 480, 2) # width, height, nlabels
unaries = unary_from_softmax(pred, scale=1.0)
d.setUnaryEnergy(unaries)
#print im.shape
# print annot.shape
#print pred.shape
d.addPairwiseGaussian(sxy=0.220880737269, compat=1.24845093352)
d.addPairwiseBilateral(sxy=22.3761305044, srgb=7.70254062277, rgbim=im, compat=1.40326787165)
processed = d.inference(12)
res = numpy.argmax(processed, axis=0).reshape(480, 854)
return res
def do_seq(seq, model, save=True):
preds_path = preds_path_prefix + model + "/valid/"
files = sorted(glob.glob(preds_path + seq + "/*.pickle"))
ious = []
for f in files:
pred_path = f
im_path, gt_path, out_path = convert_path(f)
pred = pickle.load(open(pred_path))
im = imread(im_path)
res = apply_crf(im, pred).astype("uint8") * 255
# before = numpy.argmax(pred, axis=2)
if save:
dir_ = "/".join(out_path.split("/")[:-1])
mkdir_p(dir_)
imsave(out_path, res)
#compute iou as well
groundtruth = imread(gt_path)
I = numpy.logical_and(res == 255, groundtruth == 255).sum()
U = numpy.logical_or(res == 255, groundtruth == 255).sum()
IOU = float(I) / U
ious.append(IOU)
print(out_path, "IOU", IOU)
# plt.imshow(before)
# plt.figure()
# plt.imshow(res)
# plt.show()
return numpy.mean(ious[1:-1])
def main():
#seqs = ["blackswan", "bmx-trees", "breakdance", "camel", "car-roundabout", "car-shadow", "cows", "dance-twirl",
# "dog", "drift-chicane", "drift-straight", "goat", "horsejump-high", "kite-surf", "libby", "motocross-jump",
# "paragliding-launch", "parkour", "scooter-black", "soapbox"]
seqs = ["dance-twirl"]
save = True
assert len(sys.argv) == 2
model = sys.argv[1]
#model = "paper_nomask_DAVIS1_oneshot1_4"
#ious = []
#for seq in seqs:
# iou = do_seq(seq, save=save)
# print iou
# ious.append(iou)
ious = Parallel(n_jobs=20)(delayed(do_seq)(seq, model, save=save) for seq in seqs)
print(ious)
print(numpy.mean(ious))
if __name__ == "__main__":
main()
| 26.806723 | 118 | 0.658307 |
import pydensecrf.densecrf as dcrf
from pydensecrf.utils import unary_from_softmax
from scipy.ndimage import imread
from scipy.misc import imsave
import pickle
import numpy
import glob
import os
import matplotlib.pyplot as plt
from joblib import Parallel, delayed
import sys
imgs_path = "/work/mahadevan/data/DAVIS/JPEGImages/480p/"
annots_path = "/work/mahadevan/data/DAVIS/Annotations/480p/"
preds_path_prefix = "/home/mahadevan/vision/savitar/forwarded/"
def convert_path(inp):
sp = inp.split("/")
fwd_idx = sp.index("forwarded")
seq = sp[fwd_idx + 3]
fn = sp[-1]
im_path = imgs_path + seq + "/" + fn.replace(".pickle", ".jpg")
gt_path = annots_path + seq + "/" + fn.replace(".pickle", ".png")
sp[fwd_idx + 1] += "_crf"
sp[-1] = sp[-1].replace(".pickle", ".png")
out_path = "/".join(sp)
return im_path, gt_path, out_path
def mkdir_p(d):
try:
os.makedirs(d)
except OSError as err:
if err.errno != 17:
raise
def apply_crf(im, pred):
im = numpy.ascontiguousarray(im)
pred = numpy.ascontiguousarray(pred.swapaxes(0, 2).swapaxes(1, 2))
d = dcrf.DenseCRF2D(854, 480, 2)
unaries = unary_from_softmax(pred, scale=1.0)
d.setUnaryEnergy(unaries)
d.addPairwiseGaussian(sxy=0.220880737269, compat=1.24845093352)
d.addPairwiseBilateral(sxy=22.3761305044, srgb=7.70254062277, rgbim=im, compat=1.40326787165)
processed = d.inference(12)
res = numpy.argmax(processed, axis=0).reshape(480, 854)
return res
def do_seq(seq, model, save=True):
preds_path = preds_path_prefix + model + "/valid/"
files = sorted(glob.glob(preds_path + seq + "/*.pickle"))
ious = []
for f in files:
pred_path = f
im_path, gt_path, out_path = convert_path(f)
pred = pickle.load(open(pred_path))
im = imread(im_path)
res = apply_crf(im, pred).astype("uint8") * 255
if save:
dir_ = "/".join(out_path.split("/")[:-1])
mkdir_p(dir_)
imsave(out_path, res)
groundtruth = imread(gt_path)
I = numpy.logical_and(res == 255, groundtruth == 255).sum()
U = numpy.logical_or(res == 255, groundtruth == 255).sum()
IOU = float(I) / U
ious.append(IOU)
print(out_path, "IOU", IOU)
return numpy.mean(ious[1:-1])
def main():
seqs = ["dance-twirl"]
save = True
assert len(sys.argv) == 2
model = sys.argv[1]
ious = Parallel(n_jobs=20)(delayed(do_seq)(seq, model, save=save) for seq in seqs)
print(ious)
print(numpy.mean(ious))
if __name__ == "__main__":
main()
| true | true |
1c30385f9a3584b953548dfaf122c9c827e4f11e | 85 | py | Python | yuri/exceptions.py | kanade0404/yuri | dcea86566a176be96fac0a5db19d59595e234269 | [
"MIT"
] | null | null | null | yuri/exceptions.py | kanade0404/yuri | dcea86566a176be96fac0a5db19d59595e234269 | [
"MIT"
] | null | null | null | yuri/exceptions.py | kanade0404/yuri | dcea86566a176be96fac0a5db19d59595e234269 | [
"MIT"
] | null | null | null | class YuriException(Exception):
pass
class RouteError(YuriException):
pass
| 12.142857 | 32 | 0.741176 | class YuriException(Exception):
pass
class RouteError(YuriException):
pass
| true | true |
1c3038e5ab80989a97904ddbf9925efa252264be | 15,485 | py | Python | salt/modules/boto_cloudtrail.py | ahammond/salt | 945b21b70dbe708716d7b009a2005ef0acf76e6b | [
"Apache-2.0"
] | null | null | null | salt/modules/boto_cloudtrail.py | ahammond/salt | 945b21b70dbe708716d7b009a2005ef0acf76e6b | [
"Apache-2.0"
] | null | null | null | salt/modules/boto_cloudtrail.py | ahammond/salt | 945b21b70dbe708716d7b009a2005ef0acf76e6b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Connection module for Amazon CloudTrail
.. versionadded:: 2016.3.0
:configuration: This module accepts explicit Lambda credentials but can also
utilize IAM roles assigned to the instance through Instance Profiles.
Dynamic credentials are then automatically obtained from AWS API and no
further configuration is necessary. More Information available at:
.. code-block:: text
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file:
.. code-block:: yaml
cloudtrail.keyid: GKTADJGHEIQSXMKKRBJ08H
cloudtrail.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration:
.. code-block:: yaml
cloudtrail.region: us-east-1
If a region is not specified, the default is us-east-1.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto3
'''
# keep lint from choking on _get_conn and _cache_id
#pylint: disable=E0602
# Import Python libs
from __future__ import absolute_import
import logging
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=import-error,no-name-in-module
# Import Salt libs
import salt.utils.boto3
import salt.utils.compat
import salt.utils
log = logging.getLogger(__name__)
# Import third party libs
# pylint: disable=import-error
try:
#pylint: disable=unused-import
import boto
import boto3
#pylint: enable=unused-import
from botocore.exceptions import ClientError
logging.getLogger('boto3').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# pylint: enable=import-error
def __virtual__():
'''
Only load if boto libraries exist and if boto libraries are greater than
a given version.
'''
required_boto3_version = '1.2.5'
# the boto_lambda execution module relies on the connect_to_region() method
# which was added in boto 2.8.0
# https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12
if not HAS_BOTO:
return (False, 'The boto_cloudtrial module could not be loaded: boto libraries not found')
elif _LooseVersion(boto3.__version__) < _LooseVersion(required_boto3_version):
return (False, 'The boto_cloudtrial module could not be loaded: '
'boto version {0} or later must be installed.'.format(required_boto3_version))
else:
return True
def __init__(opts):
salt.utils.compat.pack_dunder(__name__)
if HAS_BOTO:
__utils__['boto3.assign_funcs'](__name__, 'cloudtrail')
def exists(Name,
region=None, key=None, keyid=None, profile=None):
'''
Given a trail name, check to see if the given trail exists.
Returns True if the given trail exists and returns False if the given
trail does not exist.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.exists mytrail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.get_trail_status(Name=Name)
return {'exists': True}
except ClientError as e:
err = salt.utils.boto3.get_error(e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'exists': False}
return {'error': err}
def create(Name,
S3BucketName, S3KeyPrefix=None,
SnsTopicName=None,
IncludeGlobalServiceEvents=None,
IsMultiRegionTrail=None,
EnableLogFileValidation=None,
CloudWatchLogsLogGroupArn=None,
CloudWatchLogsRoleArn=None,
KmsKeyId=None,
region=None, key=None, keyid=None, profile=None):
'''
Given a valid config, create a trail.
Returns {created: true} if the trail was created and returns
{created: False} if the trail was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.create my_trail my_bucket
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for arg in ('S3KeyPrefix', 'SnsTopicName', 'IncludeGlobalServiceEvents',
'IsMultiRegionTrail',
'EnableLogFileValidation', 'CloudWatchLogsLogGroupArn',
'CloudWatchLogsRoleArn', 'KmsKeyId'):
if locals()[arg] is not None:
kwargs[arg] = locals()[arg]
trail = conn.create_trail(Name=Name,
S3BucketName=S3BucketName,
**kwargs)
if trail:
log.info('The newly created trail name is {0}'.format(trail['Name']))
return {'created': True, 'name': trail['Name']}
else:
log.warning('Trail was not created')
return {'created': False}
except ClientError as e:
return {'created': False, 'error': salt.utils.boto3.get_error(e)}
def delete(Name,
region=None, key=None, keyid=None, profile=None):
'''
Given a trail name, delete it.
Returns {deleted: true} if the trail was deleted and returns
{deleted: false} if the trail was not deleted.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.delete mytrail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.delete_trail(Name=Name)
return {'deleted': True}
except ClientError as e:
return {'deleted': False, 'error': salt.utils.boto3.get_error(e)}
def describe(Name,
region=None, key=None, keyid=None, profile=None):
'''
Given a trail name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.describe mytrail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trails = conn.describe_trails(trailNameList=[Name])
if trails and len(trails.get('trailList', [])) > 0:
keys = ('Name', 'S3BucketName', 'S3KeyPrefix',
'SnsTopicName', 'IncludeGlobalServiceEvents',
'IsMultiRegionTrail',
'HomeRegion', 'TrailARN',
'LogFileValidationEnabled', 'CloudWatchLogsLogGroupArn',
'CloudWatchLogsRoleArn', 'KmsKeyId')
trail = trails['trailList'].pop()
return {'trail': dict([(k, trail.get(k)) for k in keys])}
else:
return {'trail': None}
except ClientError as e:
err = salt.utils.boto3.get_error(e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'trail': None}
return {'error': salt.utils.boto3.get_error(e)}
def status(Name,
region=None, key=None, keyid=None, profile=None):
'''
Given a trail name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.describe mytrail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trail = conn.get_trail_status(Name=Name)
if trail:
keys = ('IsLogging', 'LatestDeliveryError', 'LatestNotificationError',
'LatestDeliveryTime', 'LatestNotificationTime',
'StartLoggingTime', 'StopLoggingTime',
'LatestCloudWatchLogsDeliveryError',
'LatestCloudWatchLogsDeliveryTime',
'LatestDigestDeliveryTime', 'LatestDigestDeliveryError',
'LatestDeliveryAttemptTime',
'LatestNotificationAttemptTime',
'LatestNotificationAttemptSucceeded',
'LatestDeliveryAttemptSucceeded',
'TimeLoggingStarted',
'TimeLoggingStopped')
return {'trail': dict([(k, trail.get(k)) for k in keys])}
else:
return {'trail': None}
except ClientError as e:
err = salt.utils.boto3.get_error(e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'trail': None}
return {'error': salt.utils.boto3.get_error(e)}
def list(region=None, key=None, keyid=None, profile=None):
'''
List all trails
Returns list of trails
CLI Example:
.. code-block:: yaml
policies:
- {...}
- {...}
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trails = conn.describe_trails()
if not bool(trails.get('trailList')):
log.warning('No trails found')
return {'trails': trails.get('trailList', [])}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def update(Name,
S3BucketName, S3KeyPrefix=None,
SnsTopicName=None,
IncludeGlobalServiceEvents=None,
IsMultiRegionTrail=None,
EnableLogFileValidation=None,
CloudWatchLogsLogGroupArn=None,
CloudWatchLogsRoleArn=None,
KmsKeyId=None,
region=None, key=None, keyid=None, profile=None):
'''
Given a valid config, update a trail.
Returns {created: true} if the trail was created and returns
{created: False} if the trail was not created.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.update my_trail my_bucket
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for arg in ('S3KeyPrefix', 'SnsTopicName', 'IncludeGlobalServiceEvents',
'IsMultiRegionTrail',
'EnableLogFileValidation', 'CloudWatchLogsLogGroupArn',
'CloudWatchLogsRoleArn', 'KmsKeyId'):
if locals()[arg] is not None:
kwargs[arg] = locals()[arg]
trail = conn.update_trail(Name=Name,
S3BucketName=S3BucketName,
**kwargs)
if trail:
log.info('The updated trail name is {0}'.format(trail['Name']))
return {'updated': True, 'name': trail['Name']}
else:
log.warning('Trail was not created')
return {'updated': False}
except ClientError as e:
return {'updated': False, 'error': salt.utils.boto3.get_error(e)}
def start_logging(Name,
region=None, key=None, keyid=None, profile=None):
'''
Start logging for a trail
Returns {started: true} if the trail was started and returns
{started: False} if the trail was not started.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.start_logging my_trail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.start_logging(Name=Name)
return {'started': True}
except ClientError as e:
return {'started': False, 'error': salt.utils.boto3.get_error(e)}
def stop_logging(Name,
region=None, key=None, keyid=None, profile=None):
'''
Stop logging for a trail
Returns {stopped: true} if the trail was stopped and returns
{stopped: False} if the trail was not stopped.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.stop_logging my_trail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.stop_logging(Name=Name)
return {'stopped': True}
except ClientError as e:
return {'stopped': False, 'error': salt.utils.boto3.get_error(e)}
def _get_trail_arn(name, region=None, key=None, keyid=None, profile=None):
if name.startswith('arn:aws:cloudtrail:'):
return name
account_id = __salt__['boto_iam.get_account_id'](
region=region, key=key, keyid=keyid, profile=profile
)
if profile and 'region' in profile:
region = profile['region']
if region is None:
region = 'us-east-1'
return 'arn:aws:cloudtrail:{0}:{1}:trail/{2}'.format(region, account_id, name)
def add_tags(Name,
region=None, key=None, keyid=None, profile=None, **kwargs):
'''
Add tags to a trail
Returns {tagged: true} if the trail was tagged and returns
{tagged: False} if the trail was not tagged.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.add_tags my_trail tag_a=tag_value tag_b=tag_value
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
tagslist = []
for k, v in kwargs.iteritems():
if str(k).startswith('__'):
continue
tagslist.append({'Key': str(k), 'Value': str(v)})
conn.add_tags(ResourceId=_get_trail_arn(Name,
region=region, key=key, keyid=keyid,
profile=profile), TagsList=tagslist)
return {'tagged': True}
except ClientError as e:
return {'tagged': False, 'error': salt.utils.boto3.get_error(e)}
def remove_tags(Name,
region=None, key=None, keyid=None, profile=None, **kwargs):
'''
Remove tags from a trail
Returns {tagged: true} if the trail was tagged and returns
{tagged: False} if the trail was not tagged.
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.remove_tags my_trail tag_a=tag_value tag_b=tag_value
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
tagslist = []
for k, v in kwargs.iteritems():
if str(k).startswith('__'):
continue
tagslist.append({'Key': str(k), 'Value': str(v)})
conn.remove_tags(ResourceId=_get_trail_arn(Name,
region=region, key=key, keyid=keyid,
profile=profile), TagsList=tagslist)
return {'tagged': True}
except ClientError as e:
return {'tagged': False, 'error': salt.utils.boto3.get_error(e)}
def list_tags(Name,
region=None, key=None, keyid=None, profile=None):
'''
List tags of a trail
Returns:
tags:
- {...}
- {...}
CLI Example:
.. code-block:: bash
salt myminion boto_cloudtrail.list_tags my_trail
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
rid = _get_trail_arn(Name,
region=region, key=key, keyid=keyid,
profile=profile)
ret = conn.list_tags(ResourceIdList=[rid])
tlist = ret.get('ResourceTagList', []).pop().get('TagsList')
tagdict = {}
for tag in tlist:
tagdict[tag.get('Key')] = tag.get('Value')
return {'tags': tagdict}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
| 30.846614 | 109 | 0.614982 |
from __future__ import absolute_import
import logging
from distutils.version import LooseVersion as _LooseVersion
import salt.utils.boto3
import salt.utils.compat
import salt.utils
log = logging.getLogger(__name__)
try:
import boto
import boto3
from botocore.exceptions import ClientError
logging.getLogger('boto3').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def __virtual__():
required_boto3_version = '1.2.5'
if not HAS_BOTO:
return (False, 'The boto_cloudtrial module could not be loaded: boto libraries not found')
elif _LooseVersion(boto3.__version__) < _LooseVersion(required_boto3_version):
return (False, 'The boto_cloudtrial module could not be loaded: '
'boto version {0} or later must be installed.'.format(required_boto3_version))
else:
return True
def __init__(opts):
salt.utils.compat.pack_dunder(__name__)
if HAS_BOTO:
__utils__['boto3.assign_funcs'](__name__, 'cloudtrail')
def exists(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.get_trail_status(Name=Name)
return {'exists': True}
except ClientError as e:
err = salt.utils.boto3.get_error(e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'exists': False}
return {'error': err}
def create(Name,
S3BucketName, S3KeyPrefix=None,
SnsTopicName=None,
IncludeGlobalServiceEvents=None,
IsMultiRegionTrail=None,
EnableLogFileValidation=None,
CloudWatchLogsLogGroupArn=None,
CloudWatchLogsRoleArn=None,
KmsKeyId=None,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for arg in ('S3KeyPrefix', 'SnsTopicName', 'IncludeGlobalServiceEvents',
'IsMultiRegionTrail',
'EnableLogFileValidation', 'CloudWatchLogsLogGroupArn',
'CloudWatchLogsRoleArn', 'KmsKeyId'):
if locals()[arg] is not None:
kwargs[arg] = locals()[arg]
trail = conn.create_trail(Name=Name,
S3BucketName=S3BucketName,
**kwargs)
if trail:
log.info('The newly created trail name is {0}'.format(trail['Name']))
return {'created': True, 'name': trail['Name']}
else:
log.warning('Trail was not created')
return {'created': False}
except ClientError as e:
return {'created': False, 'error': salt.utils.boto3.get_error(e)}
def delete(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.delete_trail(Name=Name)
return {'deleted': True}
except ClientError as e:
return {'deleted': False, 'error': salt.utils.boto3.get_error(e)}
def describe(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trails = conn.describe_trails(trailNameList=[Name])
if trails and len(trails.get('trailList', [])) > 0:
keys = ('Name', 'S3BucketName', 'S3KeyPrefix',
'SnsTopicName', 'IncludeGlobalServiceEvents',
'IsMultiRegionTrail',
'HomeRegion', 'TrailARN',
'LogFileValidationEnabled', 'CloudWatchLogsLogGroupArn',
'CloudWatchLogsRoleArn', 'KmsKeyId')
trail = trails['trailList'].pop()
return {'trail': dict([(k, trail.get(k)) for k in keys])}
else:
return {'trail': None}
except ClientError as e:
err = salt.utils.boto3.get_error(e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'trail': None}
return {'error': salt.utils.boto3.get_error(e)}
def status(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trail = conn.get_trail_status(Name=Name)
if trail:
keys = ('IsLogging', 'LatestDeliveryError', 'LatestNotificationError',
'LatestDeliveryTime', 'LatestNotificationTime',
'StartLoggingTime', 'StopLoggingTime',
'LatestCloudWatchLogsDeliveryError',
'LatestCloudWatchLogsDeliveryTime',
'LatestDigestDeliveryTime', 'LatestDigestDeliveryError',
'LatestDeliveryAttemptTime',
'LatestNotificationAttemptTime',
'LatestNotificationAttemptSucceeded',
'LatestDeliveryAttemptSucceeded',
'TimeLoggingStarted',
'TimeLoggingStopped')
return {'trail': dict([(k, trail.get(k)) for k in keys])}
else:
return {'trail': None}
except ClientError as e:
err = salt.utils.boto3.get_error(e)
if e.response.get('Error', {}).get('Code') == 'TrailNotFoundException':
return {'trail': None}
return {'error': salt.utils.boto3.get_error(e)}
def list(region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
trails = conn.describe_trails()
if not bool(trails.get('trailList')):
log.warning('No trails found')
return {'trails': trails.get('trailList', [])}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def update(Name,
S3BucketName, S3KeyPrefix=None,
SnsTopicName=None,
IncludeGlobalServiceEvents=None,
IsMultiRegionTrail=None,
EnableLogFileValidation=None,
CloudWatchLogsLogGroupArn=None,
CloudWatchLogsRoleArn=None,
KmsKeyId=None,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for arg in ('S3KeyPrefix', 'SnsTopicName', 'IncludeGlobalServiceEvents',
'IsMultiRegionTrail',
'EnableLogFileValidation', 'CloudWatchLogsLogGroupArn',
'CloudWatchLogsRoleArn', 'KmsKeyId'):
if locals()[arg] is not None:
kwargs[arg] = locals()[arg]
trail = conn.update_trail(Name=Name,
S3BucketName=S3BucketName,
**kwargs)
if trail:
log.info('The updated trail name is {0}'.format(trail['Name']))
return {'updated': True, 'name': trail['Name']}
else:
log.warning('Trail was not created')
return {'updated': False}
except ClientError as e:
return {'updated': False, 'error': salt.utils.boto3.get_error(e)}
def start_logging(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.start_logging(Name=Name)
return {'started': True}
except ClientError as e:
return {'started': False, 'error': salt.utils.boto3.get_error(e)}
def stop_logging(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
conn.stop_logging(Name=Name)
return {'stopped': True}
except ClientError as e:
return {'stopped': False, 'error': salt.utils.boto3.get_error(e)}
def _get_trail_arn(name, region=None, key=None, keyid=None, profile=None):
if name.startswith('arn:aws:cloudtrail:'):
return name
account_id = __salt__['boto_iam.get_account_id'](
region=region, key=key, keyid=keyid, profile=profile
)
if profile and 'region' in profile:
region = profile['region']
if region is None:
region = 'us-east-1'
return 'arn:aws:cloudtrail:{0}:{1}:trail/{2}'.format(region, account_id, name)
def add_tags(Name,
region=None, key=None, keyid=None, profile=None, **kwargs):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
tagslist = []
for k, v in kwargs.iteritems():
if str(k).startswith('__'):
continue
tagslist.append({'Key': str(k), 'Value': str(v)})
conn.add_tags(ResourceId=_get_trail_arn(Name,
region=region, key=key, keyid=keyid,
profile=profile), TagsList=tagslist)
return {'tagged': True}
except ClientError as e:
return {'tagged': False, 'error': salt.utils.boto3.get_error(e)}
def remove_tags(Name,
region=None, key=None, keyid=None, profile=None, **kwargs):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
tagslist = []
for k, v in kwargs.iteritems():
if str(k).startswith('__'):
continue
tagslist.append({'Key': str(k), 'Value': str(v)})
conn.remove_tags(ResourceId=_get_trail_arn(Name,
region=region, key=key, keyid=keyid,
profile=profile), TagsList=tagslist)
return {'tagged': True}
except ClientError as e:
return {'tagged': False, 'error': salt.utils.boto3.get_error(e)}
def list_tags(Name,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
rid = _get_trail_arn(Name,
region=region, key=key, keyid=keyid,
profile=profile)
ret = conn.list_tags(ResourceIdList=[rid])
tlist = ret.get('ResourceTagList', []).pop().get('TagsList')
tagdict = {}
for tag in tlist:
tagdict[tag.get('Key')] = tag.get('Value')
return {'tags': tagdict}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
| true | true |
1c3039638c64ad046537ebe5a2c3abd3d2dd9cc1 | 5,342 | py | Python | scripts/makebrainstools.py | reckbo/ppl | 916d96188a43bbc5915020edfa12f14895b5f66c | [
"BSD-3-Clause"
] | null | null | null | scripts/makebrainstools.py | reckbo/ppl | 916d96188a43bbc5915020edfa12f14895b5f66c | [
"BSD-3-Clause"
] | null | null | null | scripts/makebrainstools.py | reckbo/ppl | 916d96188a43bbc5915020edfa12f14895b5f66c | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
from __future__ import print_function
from plumbum import local, FG, cli
from plumbum.cmd import git, cmake, make
import logging
from util import logfmt
import sys
logger = logging.getLogger()
logging.basicConfig(level=logging.DEBUG, format=logfmt(__file__))
class App(cli.Application):
DESCRIPTION = "Downloads and compiles BRAINSTools binaries. Output is 'BRAINSTools-bin-<hash>'."
prefix = cli.SwitchAttr('-d', cli.ExistingDirectory, help="Root directory in which to install repo", default=local.path('/data/pnl/soft'))
githash = cli.SwitchAttr('-g', help='GitHub hash commit. If omitted will get latest commit from the master branch.')
def main():
srcdir = self.prefix / "BRAINSTools"
blddir = self.prefix / "BRAINSTools-build"
logging.info("Get source:")
if not srcdir.exists():
repo = 'https://github.com/BRAINSia/BRAINSTools.git'
git("clone", repo, srcdir)
else:
with local.cwd(srcdir):
git("fetch", "origin")
if self.githash is not None:
git("checkout", args.githash)
clone_hash = git("rev-parse", "--short", "HEAD")[:-1] # remove trailing \n
logging.info("Build code:")
blddir.mkdir()
with local.cwd(blddir):
cmake(srcdir
,"-DBRAINSTools_INSTALL_DEVELOPMENT=OFF"
,"-DBRAINSTools_MAX_TEST_LEVEL=0"
,"-DBRAINSTools_SUPERBUILD=ON"
,"-DBRAINSTools_USE_QT=OFF"
,"-DBRAINS_DEBUG_IMAGE_WRITE=OFF"
,"-DBUILD_STYLE_UTILS=OFF"
,"-DBUILD_TESTING=OFF"
,"-DCMAKE_BUILD_TYPE=Release"
,"-DCMAKE_COLOR_MAKEFILE=ON"
,"-DCMAKE_EXE_LINKER_FLAGS=' '"
,"-DCMAKE_EXE_LINKER_FLAGS_DEBUG="
,"-DCMAKE_EXE_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_EXE_LINKER_FLAGS_RELEASE="
,"-DCMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_EXPORT_COMPILE_COMMANDS=OFF"
,"-DCMAKE_INSTALL_PREFIX:PATH=/usr/local"
,"-DCMAKE_MODULE_LINKER_FLAGS=' '"
,"-DCMAKE_MODULE_LINKER_FLAGS_DEBUG="
,"-DCMAKE_MODULE_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_MODULE_LINKER_FLAGS_RELEASE="
,"-DCMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_PROJECT_NAME:STATIC=SuperBuild_BRAINSTools"
,"-DCMAKE_SHARED_LINKER_FLAGS=' '"
,"-DCMAKE_SHARED_LINKER_FLAGS_DEBUG="
,"-DCMAKE_SHARED_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_SHARED_LINKER_FLAGS_RELEASE="
,"-DCMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_SKIP_INSTALL_RPATH=NO"
,"-DCMAKE_SKIP_RPATH=NO"
,"-DCMAKE_STATIC_LINKER_FLAGS="
,"-DCMAKE_STATIC_LINKER_FLAGS_DEBUG="
,"-DCMAKE_STATIC_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_STATIC_LINKER_FLAGS_RELEASE="
,"-DCMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_USE_RELATIVE_PATHS=OFF"
,"-DCMAKE_VERBOSE_MAKEFILE=FALSE"
,"-DCOVERAGE_EXTRA_FLAGS=-l"
,"-DCTEST_SUBMIT_RETRY_COUNT=3"
,"-DCTEST_SUBMIT_RETRY_DELAY=5"
,"-DDART_TESTING_TIMEOUT=1500"
,"-DEXTERNAL_PROJECT_BUILD_TYPE=Release"
,"-DFORCE_EXTERNAL_BUILDS=OFF"
,"-DITK_VERSION_MAJOR=4"
,"-DSuperBuild_BRAINSTools_BUILD_DICOM_SUPPORT=ON"
,"-DSuperBuild_BRAINSTools_USE_CTKAPPLAUNCHER=OFF"
,"-DSuperBuild_BRAINSTools_USE_GIT_PROTOCOL=ON"
,"-DUSE_ANTS=ON"
,"-DUSE_AutoWorkup=OFF"
,"-DUSE_BRAINSABC=OFF"
,"-DUSE_BRAINSConstellationDetector=OFF"
,"-DUSE_BRAINSContinuousClass=OFF"
,"-DUSE_BRAINSCreateLabelMapFromProbabilityMaps=OFF"
,"-DUSE_BRAINSCut=OFF"
,"-DUSE_BRAINSDWICleanup=OFF"
,"-DUSE_BRAINSDemonWarp=OFF"
,"-DUSE_BRAINSFit=OFF"
,"-DUSE_BRAINSInitializedControlPoints=OFF"
,"-DUSE_BRAINSLabelStats=OFF"
,"-DUSE_BRAINSLandmarkInitializer=OFF"
,"-DUSE_BRAINSMultiModeSegment=OFF"
,"-DUSE_BRAINSMultiSTAPLE=OFF"
,"-DUSE_BRAINSMush=OFF"
,"-DUSE_BRAINSPosteriorToContinuousClass=OFF"
,"-DUSE_BRAINSROIAuto=OFF"
,"-DUSE_BRAINSResample=OFF"
,"-DUSE_BRAINSSnapShotWriter=OFF"
,"-DUSE_BRAINSStripRotation=OFF"
,"-DUSE_BRAINSSurfaceTools=OFF"
,"-DUSE_BRAINSTalairach=OFF"
,"-DUSE_BRAINSTransformConvert=OFF"
,"-DUSE_ConvertBetweenFileFormats=ON"
,"-DUSE_DWIConvert=ON"
,"-DUSE_DebugImageViewer=OFF"
,"-DUSE_GTRACT=OFF"
,"-DUSE_ICCDEF=OFF"
,"-DUSE_ImageCalculator=OFF"
,"-DUSE_ReferenceAtlas=OFF"
,"-DUSE_SYSTEM_DCMTK=OFF"
,"-DUSE_SYSTEM_ITK=OFF"
,"-DUSE_SYSTEM_SlicerExecutionModel=OFF"
,"-DUSE_SYSTEM_VTK=OFF"
,"-DVTK_GIT_REPOSITORY=git://vtk.org/VTK.git"
)
make['all'] & FG
outbin = self.prefix / 'BRAINSTools-bin-'+clone_hash
(blddir / 'bin').move(outbin)
if __name__ == '__main__':
main()
| 41.734375 | 142 | 0.610446 |
from __future__ import print_function
from plumbum import local, FG, cli
from plumbum.cmd import git, cmake, make
import logging
from util import logfmt
import sys
logger = logging.getLogger()
logging.basicConfig(level=logging.DEBUG, format=logfmt(__file__))
class App(cli.Application):
DESCRIPTION = "Downloads and compiles BRAINSTools binaries. Output is 'BRAINSTools-bin-<hash>'."
prefix = cli.SwitchAttr('-d', cli.ExistingDirectory, help="Root directory in which to install repo", default=local.path('/data/pnl/soft'))
githash = cli.SwitchAttr('-g', help='GitHub hash commit. If omitted will get latest commit from the master branch.')
def main():
srcdir = self.prefix / "BRAINSTools"
blddir = self.prefix / "BRAINSTools-build"
logging.info("Get source:")
if not srcdir.exists():
repo = 'https://github.com/BRAINSia/BRAINSTools.git'
git("clone", repo, srcdir)
else:
with local.cwd(srcdir):
git("fetch", "origin")
if self.githash is not None:
git("checkout", args.githash)
clone_hash = git("rev-parse", "--short", "HEAD")[:-1]
logging.info("Build code:")
blddir.mkdir()
with local.cwd(blddir):
cmake(srcdir
,"-DBRAINSTools_INSTALL_DEVELOPMENT=OFF"
,"-DBRAINSTools_MAX_TEST_LEVEL=0"
,"-DBRAINSTools_SUPERBUILD=ON"
,"-DBRAINSTools_USE_QT=OFF"
,"-DBRAINS_DEBUG_IMAGE_WRITE=OFF"
,"-DBUILD_STYLE_UTILS=OFF"
,"-DBUILD_TESTING=OFF"
,"-DCMAKE_BUILD_TYPE=Release"
,"-DCMAKE_COLOR_MAKEFILE=ON"
,"-DCMAKE_EXE_LINKER_FLAGS=' '"
,"-DCMAKE_EXE_LINKER_FLAGS_DEBUG="
,"-DCMAKE_EXE_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_EXE_LINKER_FLAGS_RELEASE="
,"-DCMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_EXPORT_COMPILE_COMMANDS=OFF"
,"-DCMAKE_INSTALL_PREFIX:PATH=/usr/local"
,"-DCMAKE_MODULE_LINKER_FLAGS=' '"
,"-DCMAKE_MODULE_LINKER_FLAGS_DEBUG="
,"-DCMAKE_MODULE_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_MODULE_LINKER_FLAGS_RELEASE="
,"-DCMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_PROJECT_NAME:STATIC=SuperBuild_BRAINSTools"
,"-DCMAKE_SHARED_LINKER_FLAGS=' '"
,"-DCMAKE_SHARED_LINKER_FLAGS_DEBUG="
,"-DCMAKE_SHARED_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_SHARED_LINKER_FLAGS_RELEASE="
,"-DCMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_SKIP_INSTALL_RPATH=NO"
,"-DCMAKE_SKIP_RPATH=NO"
,"-DCMAKE_STATIC_LINKER_FLAGS="
,"-DCMAKE_STATIC_LINKER_FLAGS_DEBUG="
,"-DCMAKE_STATIC_LINKER_FLAGS_MINSIZEREL="
,"-DCMAKE_STATIC_LINKER_FLAGS_RELEASE="
,"-DCMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO="
,"-DCMAKE_USE_RELATIVE_PATHS=OFF"
,"-DCMAKE_VERBOSE_MAKEFILE=FALSE"
,"-DCOVERAGE_EXTRA_FLAGS=-l"
,"-DCTEST_SUBMIT_RETRY_COUNT=3"
,"-DCTEST_SUBMIT_RETRY_DELAY=5"
,"-DDART_TESTING_TIMEOUT=1500"
,"-DEXTERNAL_PROJECT_BUILD_TYPE=Release"
,"-DFORCE_EXTERNAL_BUILDS=OFF"
,"-DITK_VERSION_MAJOR=4"
,"-DSuperBuild_BRAINSTools_BUILD_DICOM_SUPPORT=ON"
,"-DSuperBuild_BRAINSTools_USE_CTKAPPLAUNCHER=OFF"
,"-DSuperBuild_BRAINSTools_USE_GIT_PROTOCOL=ON"
,"-DUSE_ANTS=ON"
,"-DUSE_AutoWorkup=OFF"
,"-DUSE_BRAINSABC=OFF"
,"-DUSE_BRAINSConstellationDetector=OFF"
,"-DUSE_BRAINSContinuousClass=OFF"
,"-DUSE_BRAINSCreateLabelMapFromProbabilityMaps=OFF"
,"-DUSE_BRAINSCut=OFF"
,"-DUSE_BRAINSDWICleanup=OFF"
,"-DUSE_BRAINSDemonWarp=OFF"
,"-DUSE_BRAINSFit=OFF"
,"-DUSE_BRAINSInitializedControlPoints=OFF"
,"-DUSE_BRAINSLabelStats=OFF"
,"-DUSE_BRAINSLandmarkInitializer=OFF"
,"-DUSE_BRAINSMultiModeSegment=OFF"
,"-DUSE_BRAINSMultiSTAPLE=OFF"
,"-DUSE_BRAINSMush=OFF"
,"-DUSE_BRAINSPosteriorToContinuousClass=OFF"
,"-DUSE_BRAINSROIAuto=OFF"
,"-DUSE_BRAINSResample=OFF"
,"-DUSE_BRAINSSnapShotWriter=OFF"
,"-DUSE_BRAINSStripRotation=OFF"
,"-DUSE_BRAINSSurfaceTools=OFF"
,"-DUSE_BRAINSTalairach=OFF"
,"-DUSE_BRAINSTransformConvert=OFF"
,"-DUSE_ConvertBetweenFileFormats=ON"
,"-DUSE_DWIConvert=ON"
,"-DUSE_DebugImageViewer=OFF"
,"-DUSE_GTRACT=OFF"
,"-DUSE_ICCDEF=OFF"
,"-DUSE_ImageCalculator=OFF"
,"-DUSE_ReferenceAtlas=OFF"
,"-DUSE_SYSTEM_DCMTK=OFF"
,"-DUSE_SYSTEM_ITK=OFF"
,"-DUSE_SYSTEM_SlicerExecutionModel=OFF"
,"-DUSE_SYSTEM_VTK=OFF"
,"-DVTK_GIT_REPOSITORY=git://vtk.org/VTK.git"
)
make['all'] & FG
outbin = self.prefix / 'BRAINSTools-bin-'+clone_hash
(blddir / 'bin').move(outbin)
if __name__ == '__main__':
main()
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.